Commits

Jimmy Yuen Ho Wong committed c6c4090

replaced mapping utils with all new collection utils that support recursive transformation

Comments (0)

Files changed (2)

src/blueberrypy/util.py

 import copy
 import hashlib
 import hmac
+import warnings
 
 from base64 import b64encode, urlsafe_b64encode
 
 from datetime import date, time, datetime, timedelta
 
 from dateutil.parser import parse as parse_date
-
-from sqlalchemy.orm import RelationshipProperty
+from sqlalchemy.orm import RelationshipProperty, Session, collections
 
 try:
     from geoalchemy.base import SpatialElement, WKTSpatialElement
     geos_support = True
 
 
-__all__ = ["to_mapping", "from_mapping", "CSRFToken",
+__all__ = ["to_collection", "to_mapping", "from_mapping", "CSRFToken",
            "pad_block_cipher_message", "unpad_block_cipher_message"]
 
 
-def to_mapping(value, includes=None, excludes=None, format=None, **json_kwargs):
-    """Utility function to convert a value to a mapping.
+def _get_model_properties(model, excludes):
+    props = {}
+    for prop in model.__mapper__.iterate_properties:
+        if isinstance(prop, RelationshipProperty):
+            props[prop.key] = prop
+            if prop.backref:
+                backref_prop_key = prop.backref[0]
+                for mapper in prop.mapper.polymorphic_iterator():
+                    excludes.setdefault(mapper.class_, set()).add(backref_prop_key)
+        else:
+            if prop.key.startswith("_"):
+                props[prop.columns[0].key] = prop
+            else:
+                props[prop.key] = prop
+    return props
+
+def _ensure_is_dict(key, inc_exc):
+
+    if inc_exc:
+        inc_exc = copy.deepcopy(inc_exc)
+
+        if isinstance(inc_exc, basestring):
+            inc_exc = {key: set([inc_exc])}
+        elif isinstance(inc_exc, (list, tuple, set, frozenset)):
+            inc_exc = {key: set(iter(inc_exc))}
+        elif not isinstance(inc_exc, dict):
+            raise TypeError(inc_exc, "Please provide a string, an iterable or a dict")
+
+        return inc_exc
+
+    return {}
+
+def to_mapping(value, includes=None, excludes=None, format=None, recursive=False, **json_kwargs):
+    warnings.warn("to_mapping() is deprecated and will be removed in 0.6, please use to_collection() instead.")
+    return to_collection(value, includes=includes, excludes=excludes, format=format,
+                         recursive=recursive, **json_kwargs)
+
+def to_collection(from_, includes=None, excludes=None, format=None, recursive=False, **json_kwargs):
+    """Utility function to convert complex values and SQLAlchemy declarative model objects to a Python collection.
     
-    This function has 2 modes:
-        - SQLAlchemy declarative model -> mapping
-        - complex value type (e.g. datetime types and GeoAlchemy SpatialElement) -> mapping
+    This function generally works very similar to `json.dump()`, with the
+    following enhancements:
     
     SQLAlchemy declarative model
     ----------------------------
-    If `value` is a SQLAlchemy declarative model value (identified by the
-    existance of an `__table__` attribute), `to_mapping()` will iterate through
-    all the value's column and put the column's name and its value into the
-    mapping object to be returned. In addition to basic Python data types, this
-    function will convert `datetime` values according to the following table:
+    If `from_` is a SQLAlchemy declarative model object (identified by the
+    existance of a `__mapper__` attribute), or a collection if it,
+    `to_collection()` will iterate through all the value's mapped properties
+    and put the mapped property's name and its value into the result object to
+    be returned. In addition to basic Python data types, this function will
+    convert `datetime` values according to the following table:
 
     ========== =========== =============
-    value type mapping key mapping value
+    value type result key  result value
     ========== =========== =============
     datetime   datetime    .isoformat()
     time       time        .isoformat()
     timedelta  interval    .seconds
     ========== =========== =============
     
-    In additional to `datetime` values, GeoAlchemy `SpatialElement values are
-    also converted to `geojson <http://geojson.org/>`_ format using
+    Furthermore, GeoAlchemy `SpatialElement values are also converted to
+    `geojson <http://geojson.org/>`_ format using
     `Shapely <http://toblerity.github.com/shapely/>_`.
     
-    Under SQLalchemy mode, if `includes` is provided, additional attribute(s) in
-    the model value will be included in the returned mapping. `includes` can be
-    a string or a list of strings. If `excludes` is provided, which can also be
-    a string or a list of strings, the attribute(s) will be exclude from the
-    returned mapping.
+    If `includes` is provided, additional attribute(s) in the model value(s)
+    will be included in the returned result. `includes` can be a string, an
+    iterable of strings, or a mapping of classes to iterables of strings. This
+    is usually used for getting the values of the un-mapped properties from the
+    model instances.
     
-    **Note:** columns with names starting with '_' and attributes that are
-    containers (e.g. relationship attributes) will not be included in the
-    returned mapping by default unless specified by `includes`.
+    If `excludes` is provided, which can also be a string, an iterable of
+    strings, or a mapping of classes to iterables of strings, the attribute(s)
+    will be excluded from the returned result.
     
+    Internally, `to_collection()` will convert the provided `includes` and
+    `excludes` property sets to a mapping of the classes of the values to lists
+    of property key strings.
+    
+    **Note:** Mapped property names starting with '_' will never be included in the
+    returned result.
+
+    If `recursive` is True, `to_collection` will recursively traverse the entire
+    object graph of the values and return a result representing the entire
+    object tree. The backrefs of the relationship properties will be
+    automatically added to the `excludes` set to prevent running into an
+    infinite loop. If you set `recursive` to True, and also supply either an
+    `includes` or `excludes` property sets, it is encouraged that you provide
+    mappings for explicitness.
+
     Complex values
     --------------
-    If `value` is not a a SQLAlchemy declarative model, a shallow copy of it
-    will be made and processed according to the same logic as SQLAlchemy mode's
-    column values. Namely `datatime` values and GeoAlchemy SpatialElement values
-    will be converted to their mapping representations.
+    If `from_` is not a a SQLAlchemy declarative model, it must be a Python
+    collection and its elements are processed according to the same logic as
+    SQLAlchemy mode. If `from_` is a collection, this function will recursively
+    convert all elements if `recursive` is True. `includes` and `excludes` will
+    have no effect under this mode unless some decendent objects are SQLAlchemy
+    declarative model objects, in which case processing will be the same as
+    described above.
     
-    If `format` is the string `json`, the mapping returned will be a JSON string
-    , otherwise a mapping object will be returned.
+    **Note:** If `from_` is an instance of a dict its keys will be converted to
+    a string regardless. All iterables besides a dict is returned as a list.
+    
+    
+    If `format` is the string `json`, the result returned will be a JSON string
+    , otherwise a Python collection object will be returned.
     
     If any `json_kwargs` is provided, they will be passed through to the
     underlying simplejson JSONDecoder.
     
     Examples:
     ---------
-    >>> to_mapping(legco) #doctest: +SKIP
+    >>> to_collection(legco) #doctest: +SKIP
     {'name': 'Hong Kong Legislative Council Building', 'founded': {'date': '1912-01-15'}, 'location': {'type': 'Point', 'coordinates': (22.280909, 114.160349)}}
     
-    >>> to_mapping(legco, excludes=['founded', 'location']) #doctest: +SKIP
+    >>> to_collection(legco, excludes=['founded', 'location']) #doctest: +SKIP
     {'name': 'Hong Kong Legislative Council Building'}
     
-    >>> to_mapping(legco, excludes=['founded'], format='json') #doctest: +SKIP
+    >>> to_collection(legco, excludes='founded', format='json') #doctest: +SKIP
     '{"name": "Hong Kong Legislative Council Building", 'location': {'type': 'Point', 'coordinates': [22.280909, 114.160349]}}'
+    
+    >>> to_collection([legco, hkpark], recursive=True, included={Location: set(['founded'])}) #doctest: +SKIP
+    [{'name': 'Hong Kong Legislative Council Building', 'founded': {'date': '1912-01-15'}, 'location': {'type': 'Point', 'coordinates': (22.280909, 114.160349)}},
+    {'name': 'Hong Kong Park', 'founded': {'date': '1991-05-23'}, 'location': {'type': 'Point', 'coordinates': [22.2771398, 114.1613993]}}]
     """
+    if hasattr(from_, "__mapper__"):
 
-    if hasattr(value, "__table__"):
-        includes = set([includes] if isinstance(includes, basestring) else includes and list(includes) or [])
-        excludes = set([excludes] if isinstance(excludes, basestring) else excludes and list(excludes) or [])
-        attrs = set([prop.key for prop in value.__mapper__.iterate_properties if not isinstance(prop, RelationshipProperty)])
-        attrs = includes | attrs - excludes
+        includes = _ensure_is_dict(from_.__class__, includes)
+        excludes = _ensure_is_dict(from_.__class__, excludes)
 
-        mapping = {}
-        for k in attrs:
-            v = getattr(value, k)
-            if not k.startswith("_") and not isinstance(v, (tuple, list, set, frozenset, dict)):
-                if isinstance(v, datetime):
-                    v = {"datetime": v.isoformat()}
-                elif isinstance(v, time):
-                    v = {"time": v.isoformat()}
-                elif isinstance(v, date):
-                    v = {"date": v.isoformat()}
-                elif isinstance(v, timedelta):
-                    v = {"interval": v.seconds}
-                elif geos_support and isinstance(v, SpatialElement):
-                    if isinstance(v, WKTSpatialElement):
-                        v = asGeoJSON(wkt_decode(v.geom_wkt))
-                    else:
-                        v = asGeoJSON(wkb_decode(str(v.geom_wkb)))
-                mapping[k] = v
+        props = _get_model_properties(from_, excludes)
+        attrs = set(props.iterkeys())
+        if includes and from_.__class__ in includes:
+            attrs |= includes[from_.__class__]
+        if excludes and from_.__class__ in excludes:
+            attrs -= excludes[from_.__class__]
 
-        if format == "json":
-            return json.dumps(mapping, **json_kwargs)
-        return mapping
+        result = {}
+        for attr in attrs:
+            if not attr.startswith("_"):
+                val = getattr(from_, attr)
+                val = to_collection(val, includes=includes, excludes=excludes, recursive=recursive)
+                result[attr] = val
     else:
-        v = copy.copy(value)
-        if isinstance(v, datetime):
-            v = {"datetime": v.isoformat()}
-        elif isinstance(v, time):
-            v = {"time": v.isoformat()}
-        elif isinstance(v, date):
-            v = {"date": v.isoformat()}
-        elif isinstance(v, timedelta):
-            v = {"interval": v.seconds}
-        elif geos_support and isinstance(v, SpatialElement):
-            if isinstance(v, WKTSpatialElement):
-                v = asGeoJSON(wkt_decode(v.geom_wkt))
+        if isinstance(from_, datetime):
+            result = {"datetime": from_.isoformat()}
+        elif isinstance(from_, time):
+            result = {"time": from_.isoformat()}
+        elif isinstance(from_, date):
+            result = {"date": from_.isoformat()}
+        elif isinstance(from_, timedelta):
+            result = {"interval": from_.seconds}
+        elif geos_support and isinstance(from_, SpatialElement):
+            if isinstance(from_, WKTSpatialElement):
+                result = asGeoJSON(wkt_decode(from_.geom_wkt))
             else:
-                v = asGeoJSON(wkb_decode(str(v.geom_wkb)))
+                result = asGeoJSON(wkb_decode(str(from_.geom_wkb)))
+        elif isinstance(from_, dict):
+            result = {}
+            for k, v in from_.items():
+                result[unicode(k)] = to_collection(v, includes=includes, excludes=excludes, recursive=recursive)
+        elif hasattr(from_, "__iter__"): # iterable collections, not strings
+            result = [to_collection(v, includes=includes, excludes=excludes, recursive=recursive)
+                      for v in from_] if recursive else list(from_)
+        else:
+            result = from_
 
-        if format == "json":
-            return json.dumps(v, **json_kwargs)
-        return v
+    if format == "json":
+        return json.dumps(result, **json_kwargs)
+
+    return result
+
+def from_mapping(mapping, instance, excludes=None, format=None):
+    warnings.warn("from_mapping() is deprecated and will be removed in 0.6, please use from_collection() instead.")
+    return from_collection(mapping, instance, excludes=excludes, format=format)
 
 # TODO: add validators support
-def from_mapping(mapping, instance, excludes=None, format=None):
-    """Utility function to set the column values of a SQLAlchemy declarative
-    model instance via a mapping.
+def from_collection(from_, to_, excludes=None, format=None, collection_handling="replace"):
+    """Utility function to apply data in a Python collection to SQLAlchemy declarative models objects.
     
     This function takes a `mapping` and an `instance` and sets the attributes
     on the SQLAlchemy declarative model instance using the key-value pairs from
     simply be skipped and not set on the instance.
     
     The values supplied is converted according to the similiar rules as
-    `to_mapping()`:
+    `to_collection()`:
     
     ============== ============================================
     column type    mapping value format
     time           {"time": "ISO-8601"}
     date           {"date": "ISO-8601"}
     timedelta      {"interval": seconds}
-    SpatialElement {"type": "Point", "coordinates": [lat, lng]}
+    SpatialElement GeoJSON
     ============== ============================================
     
     **Security Notice:** This function currently does not yet have integration 
     support for data validation. If you are using this function to directly 
     mass-assign user supplied data to your model instances, make sure you have 
     validated the data first. In a future version of blueberrypy, integration 
-    with a form validation library will be provided to ease this process.
-    
-    **Note:** If you supply collections values, the entire collection on the
-    entity is replaced instead of merging.
+    with a form validation library will be provided to ease this process. 
     """
+    if format == "json":
+        from_ = json.loads(from_)
 
-    if format == "json":
-        mapping = json.loads(mapping)
+    if not isinstance(from_, dict):
+        if hasattr(from_, "__iter__"):
+            if not hasattr(to_, "__iter__"):
+                raise TypeError("to_ must be an iterable if from_ is an iterable.")
+            elif len(from_) != len(to_):
+                raise ValueError("length of to_ must match length of from_.")
 
-    if not isinstance(mapping, dict):
-        raise TypeError(mapping, "mapping must be a dict")
+    if collection_handling != "replace" and collection_handling != "append":
+        raise ValueError("collection_handling must be 'replace' or 'append'.")
 
-    excludes = set([excludes] if isinstance(excludes, basestring) else excludes and list(excludes) or [])
-    attrs = set([prop.key for prop in instance.__mapper__.iterate_properties])
-    attrs = attrs - excludes
+    excludes = _ensure_is_dict(to_.__class__, excludes)
 
-    for k, v in mapping.iteritems():
+    if isinstance(from_, dict):
+        if isinstance(to_, dict):
+            for k in to_.iterkeys():
+                if k in from_:
+                    to_[k] = from_collection(from_[k], to_[k], excludes=excludes)
+        elif hasattr(to_, "__mapper__"):
+            props = _get_model_properties(to_, excludes)
+            attrs = set(props.iterkeys())
+            if excludes and to_.__class__ in excludes:
+                attrs -= excludes[to_.__class__]
 
-        if k in attrs:
-            if isinstance(v, dict):
-                if "date" in v:
-                    v = parse_date(v["date"]).date()
-                    setattr(instance, k, v)
-                elif "time" in v:
-                    v = parse_date(v["time"]).time()
-                    setattr(instance, k, v)
-                elif "datetime" in v:
-                    v = parse_date(v["datetime"])
-                    setattr(instance, k, v)
-                elif "interval" in v:
-                    v = timedelta(seconds=v["interval"])
-                    setattr(instance, k, v)
-                elif geos_support and "type" in v:
-                    v = asShape(v)
-                    setattr(instance, k, WKTSpatialElement(v.wkt))
-            else:
-                setattr(instance, k, v)
+            for attr in attrs:
+                if attr in from_:
+                    prop = props[attr]
+                    from_val = from_[attr]
+                    if isinstance(prop, RelationshipProperty):
+                        if not isinstance(from_val, list) and not isinstance(from_val, dict):
+                            raise ValueError("%r must be either a list or a dict" % attr)
 
-    return instance
+                        prop_cls = prop.mapper.class_
+
+                        if prop.uselist is None or prop.uselist:
+
+                            if collection_handling == "replace":
+                                col = collections.prepare_instrumentation(prop.collection_class or list)()
+                            elif collection_handling == "append":
+                                col = getattr(to_, attr)
+
+                            appender = col._sa_appender
+
+                            from_iterator = iter(from_val) if isinstance(from_val, list) else from_val.itervalues()
+
+                            for v in from_iterator:
+                                prop_pk_vals = tuple((v[pk_col.key] for pk_col in prop.mapper.primary_key if pk_col.key in v))
+                                if prop_pk_vals and Session.object_session(to_):
+                                    prop_inst = Session.object_session(to_).query(prop_cls).get(prop_pk_vals)
+                                elif prop.mapper.polymorphic_on is not None:
+                                    prop_inst = prop.mapper.polymorphic_map[v[prop.mapper.get_property_by_column(prop.mapper.polymorphic_on).key]].class_()
+                                else:
+                                    prop_inst = prop_cls()
+
+                                appender(from_collection(v, prop_inst, excludes=excludes))
+                            
+                            if collection_handling == "replace":
+                                setattr(to_, attr, col)
+                        else:
+                            prop_pk_vals = tuple((from_val[pk_col.key] for pk_col in prop.mapper.primary_key if pk_col.key in from_val))
+                            if prop_pk_vals and Session.object_session(to_):
+                                prop_inst = Session.object_session(to_).query(prop_cls).get(prop_pk_vals)
+                            elif prop_cls.__mapper__.polymorphic_on:
+                                prop_inst = prop.mapper.polymorphic_map[from_val[prop.mapper.get_property_by_column(prop.mapper.polymorphic_on).key]].class_()
+                            else:
+                                prop_inst = prop_cls()
+
+                            setattr(to_, attr, from_collection(from_val, prop_inst, excludes=excludes))
+                    else:
+                        setattr(to_, attr, from_collection(from_val, None, excludes=excludes))
+        else:
+            if "date" in from_:
+                to_ = parse_date(from_["date"]).date()
+            elif "time" in from_:
+                to_ = parse_date(from_["time"]).time()
+            elif "datetime" in from_:
+                to_ = parse_date(from_["datetime"])
+            elif "interval" in from_:
+                to_ = timedelta(seconds=from_["interval"])
+            elif geos_support and "type" in from_:
+                to_ = WKTSpatialElement(asShape(from_).wkt)
+
+    elif hasattr(from_, "__iter__") and hasattr(to_, "__iter__"):
+        to_ = [from_collection(f, t, excludes=excludes) for f, t in zip(from_, to_)]
+
+    else:
+        to_ = from_
+
+    return to_
 
 
 class CSRFToken(object):

tests/test_util.py

 
 from geoalchemy import GeometryColumn, Point, WKTSpatialElement, GeometryDDL
 from sqlalchemy import Column, Integer, Date, DateTime, Time, Interval, Enum, \
-    ForeignKey, engine_from_config
-from sqlalchemy.orm import sessionmaker, scoped_session, relationship
+    ForeignKey, UnicodeText, engine_from_config
+from sqlalchemy.orm import sessionmaker, scoped_session, relationship, backref
 from sqlalchemy.ext.declarative import declarative_base
 
 from blueberrypy.util import CSRFToken, pad_block_cipher_message, \
-    unpad_block_cipher_message, from_mapping, to_mapping
+    unpad_block_cipher_message, from_collection, to_collection
 
 
 engine = engine_from_config(testconfig.config["sqlalchemy_engine"], '')
     __tablename__ = "related"
 
     id = Column(Integer, autoincrement=True, primary_key=True)
+    key = Column(UnicodeText)
+
+    parent_id = Column(Integer, ForeignKey("testentity.id"))
+
+    discriminator = Column("type", Enum("related", "relatedsubclass",
+                                        name="searchoptiontype"))
+
+    __mapper_args__ = {"polymorphic_on": discriminator,
+                       "polymorphic_identity": "related"}
+
+
+class RelatedEntitySubclass(RelatedEntity):
+
+    __mapper_args__ = {"polymorphic_identity": "relatedsubclass"}
+
+    subclass_prop = Column(UnicodeText)
+
 
 # remember to setup postgis
 class TestEntity(Base):
     def combined(self):
         return datetime.combine(self.date, self.time)
 
-    related_id = Column(Integer, ForeignKey("related.id"))
-    related = relationship(RelatedEntity, uselist=False)
+    related = relationship(RelatedEntity, backref=backref("parent"))
 
 
 GeometryDDL(TestEntity.__table__)
         self.assertFalse(csrftoken.verify(testtoken))
 
 
-class MappingUtilTest(unittest.TestCase):
+class CollectionUtilTest(unittest.TestCase):
 
     @classmethod
     @orm_session
                                datetime=datetime(2012, 1, 1, 0, 0, 0),
                                interval=timedelta(seconds=3600),
                                geo=WKTSpatialElement("POINT(45.0 45.0)"))
+        session = Session()
+        session.add(te)
+
+        te.related = [RelatedEntity(key=u"related1"),
+                      RelatedEntitySubclass(key=u"related2", subclass_prop=u"sub1")]
+
+        session.commit()
+
+        te2 = TestEntity(id=2,
+                         date=date(2013, 2, 2),
+                         time=time(1, 1, 1),
+                         datetime=datetime(2013, 2, 2, 1, 1, 1),
+                         interval=timedelta(seconds=3601),
+                         geo=WKTSpatialElement("POINT(46.0 45.1)"))
 
         session = Session()
-        session.add(te)
+        session.add(te2)
+
+        te2.related = [RelatedEntity(key=u"related3"),
+                      RelatedEntity(key=u"related4")]
+
         session.commit()
-
-        te.related = RelatedEntity()
-        session.commit()
-
     setUpClass = setup_class
 
     @classmethod
     testDownClass = teardown_class
 
     @orm_session
-    def test_to_mapping(self):
+    def test_to_collection(self):
+
+        self.assertEqual(1, to_collection(1))
+        self.assertEqual(1.1, to_collection(1.1))
+        self.assertEqual("str", to_collection("str"))
+        self.assertEqual([1, 2, 3], to_collection([1, 2, 3]))
+        self.assertEqual([1, 2, 3], to_collection((1, 2, 3)))
+        self.assertEqual([1, 2, 3], to_collection(set([1, 2, 3])))
+        self.assertEqual([1, 2, 3], to_collection(frozenset([1, 2, 3])))
+        self.assertEqual({"1": [2]}, to_collection({1: [2]}))
+        self.assertEqual({"a": [1, 2], "b": 2}, to_collection({"a": set([1, 2]), "b": 2}))
+
         doc = {'date': {'date': '2012-01-01'},
                'time': {'time': '00:00:00'},
                'interval': {'interval': 3600},
                'id': 1,
                'discriminator': 'derived',
-               'related_id': 1,
                'derivedprop': 2,
                'datetime': {'datetime': '2012-01-01T00:00:00'},
                'geo': {'type': 'Point',
-                       'coordinates': (45.0, 45.0)}}
+                       'coordinates': (45.0, 45.0)},
+               'related': [{'id': 1,
+                            'discriminator': 'related',
+                            'key': u'related1',
+                            'parent_id': 1},
+                           {'id': 2,
+                            'discriminator': 'relatedsubclass',
+                            'key': u'related2',
+                            'parent_id': 1,
+                            'subclass_prop': u'sub1'}]}
 
         session = Session()
-        te = session.query(TestEntity).one()
-        result = to_mapping(te)
+        te = session.query(TestEntity).get(1)
+        result = to_collection(te, recursive=True)
 
         self.assertEqual(doc, result)
 
-        serialized_doc = '{"date": {"date": "2012-01-01"}, "datetime": {"datetime": "2012-01-01T00:00:00"}, "derivedprop": 2, "discriminator": "derived", "geo": {"coordinates": [45.0, 45.0], "type": "Point"}, "id": 1, "interval": {"interval": 3600}, "related_id": 1, "time": {"time": "00:00:00"}}'
-        self.assertEqual(serialized_doc, to_mapping(te, format="json",
-                                                    sort_keys=True))
+        serialized_doc = '{"date": {"date": "2012-01-01"}, "datetime": {"datetime": "2012-01-01T00:00:00"}, "derivedprop": 2, "discriminator": "derived", "geo": {"coordinates": [45.0, 45.0], "type": "Point"}, "id": 1, "interval": {"interval": 3600}, "related": [{"discriminator": "related", "id": 1, "key": "related1", "parent_id": 1}, {"discriminator": "relatedsubclass", "id": 2, "key": "related2", "parent_id": 1, "subclass_prop": "sub1"}], "time": {"time": "00:00:00"}}'
+        result = to_collection(te, format="json", recursive=True, sort_keys=True)
+        self.assertEqual(serialized_doc, result)
 
         doc = {'date': {'date': '2012-01-01'},
                'time': {'time': '00:00:00'},
                'discriminator': 'derived',
-               'related_id': 1,
                'datetime': {'datetime': '2012-01-01T00:00:00'},
                'combined': {'datetime': '2012-01-01T00:00:00'},
                'geo': {'type': 'Point', 'coordinates': (45.0, 45.0)}}
 
-        self.assertEqual(doc, to_mapping(te, includes=["combined"],
-                                         excludes=["id", "interval", "derivedprop"]))
+        self.assertEqual(doc, to_collection(te, includes=["combined"],
+                                            excludes=["id", "interval", "derivedprop", "related"]))
+        self.assertEqual("a", to_collection("a"))
+        self.assertEqual(1, to_collection(1))
+        self.assertEqual(1.1, to_collection(1.1))
+        self.assertEqual({'date': '2012-01-01'}, to_collection(date(2012, 1, 1)))
+        self.assertEqual({'time': '00:00:00'}, to_collection(time(0, 0, 0)))
+        self.assertEqual({'interval': 3600}, to_collection(timedelta(seconds=3600)))
+        self.assertEqual({'datetime': '2012-01-01T00:00:00'}, to_collection(datetime(2012, 1, 1, 0, 0, 0)))
+        self.assertEqual({'type': 'Point', 'coordinates': (45.0, 45.0)}, to_collection(te.geo))
 
-        self.assertEqual("a", to_mapping("a"))
-        self.assertEqual(1, to_mapping(1))
-        self.assertEqual(1.1, to_mapping(1.1))
-        self.assertEqual({'date': '2012-01-01'}, to_mapping(date(2012, 1, 1)))
-        self.assertEqual({'time': '00:00:00'}, to_mapping(time(0, 0, 0)))
-        self.assertEqual({'interval': 3600}, to_mapping(timedelta(seconds=3600)))
-        self.assertEqual({'datetime': '2012-01-01T00:00:00'}, to_mapping(datetime(2012, 1, 1, 0, 0, 0)))
-        self.assertEqual({'type': 'Point', 'coordinates': (45.0, 45.0)}, to_mapping(te.geo))
+        tes = session.query(TestEntity).all()
+        result = to_collection(tes, recursive=True,
+                               includes={DerivedTestEntity: set(['combined'])},
+                               excludes={DerivedTestEntity: set(['id', 'interval', 'derivedprop'])},
+                               format="json", sort_keys=True)
+
+        serialized_doc = '[{"combined": {"datetime": "2012-01-01T00:00:00"}, "date": {"date": "2012-01-01"}, "datetime": {"datetime": "2012-01-01T00:00:00"}, "discriminator": "derived", "geo": {"coordinates": [45.0, 45.0], "type": "Point"}, "related": [{"discriminator": "related", "id": 1, "key": "related1", "parent_id": 1}, {"discriminator": "relatedsubclass", "id": 2, "key": "related2", "parent_id": 1, "subclass_prop": "sub1"}], "time": {"time": "00:00:00"}}, {"combined": {"datetime": "2013-02-02T01:01:01"}, "date": {"date": "2013-02-02"}, "datetime": {"datetime": "2013-02-02T01:01:01"}, "discriminator": "derived", "geo": {"coordinates": [46.0, 45.1], "type": "Point"}, "related": [{"discriminator": "related", "id": 3, "key": "related3", "parent_id": 2}, {"discriminator": "related", "id": 4, "key": "related4", "parent_id": 2}], "time": {"time": "01:01:01"}}]'
+        self.assertEqual(serialized_doc, result)
 
     @orm_session
-    def test_from_mapping(self):
+    def test_from_collection(self):
+
+        self.assertEqual(1, from_collection(1, None))
+        self.assertEqual(1.1, from_collection(1.1, None))
+        self.assertEqual("str", from_collection("str", None))
+        self.assertEqual([1, 2, 3], from_collection([1, 2, 3], [4, 5, 6]))
+        self.assertEqual([1, 2, 3], from_collection((1, 2, 3), (4, 5, 6)))
+        self.assertEqual([1, 2, 3], from_collection(set([1, 2, 3]), set([4, 5, 6])))
+        self.assertEqual([1, 2, 3], from_collection(frozenset([1, 2, 3]), frozenset([4, 5, 6])))
 
         doc = {'date': {'date': '2012-01-01'},
                'time': {'time': '00:00:00'},
                'id': 1,
                'derivedprop': 2,
                'datetime': {'datetime': '2012-01-01T00:00:00'},
-               'geo': {'type': 'Point', 'coordinates': (45.0, 45.0)}}
-
+               'geo': {'type': 'Point', 'coordinates': (45.0, 45.0)},
+               'related': [{'key': u'key1', 'parent_id': 1, 'discriminator': 'related'},
+                           {'key': u'key2', 'parent_id': 1, 'discriminator': 'relatedsubclass', 'subclass_prop': 'sub'}]}
+        
         te = DerivedTestEntity()
-        te = from_mapping(doc, te)
+        te = from_collection(doc, te)
         self.assertEqual(te.date, date(2012, 1, 1))
         self.assertEqual(te.time, time(0, 0, 0))
         self.assertEqual(te.interval, timedelta(seconds=3600))
         self.assertEqual(te.id, 1)
         self.assertEqual(te.derivedprop, 2)
         self.assertEqual(te.geo.geom_wkt, "POINT (45.0000000000000000 45.0000000000000000)")
+        self.assertIsNone(te.related[0].id)
+        self.assertEqual(te.related[0].parent_id, 1)
+        self.assertEqual(te.related[0].key, "key1")
+        self.assertEqual(te.related[0].discriminator, "related")
+        self.assertIsNone(te.related[1].id)
+        self.assertEqual(te.related[1].parent_id, 1)
+        self.assertEqual(te.related[1].key, "key2")
+        self.assertEqual(te.related[1].discriminator, "relatedsubclass")
+        self.assertEqual(te.related[1].subclass_prop, "sub")
 
-        te = TestEntity()
-        te = from_mapping(doc, te, excludes=["interval"])
+        #TODO: testing loading of persisted entity, json format, excludes
+        doc = {'date': {'date': '2012-01-01'},
+               'time': {'time': '00:00:00'},
+               'interval': {'interval': 3600},
+               'id': 1,
+               'datetime': {'datetime': '2012-01-01T00:00:00'},
+               'geo': {'type': 'Point', 'coordinates': (45.0, 45.0)},
+               'related': [{'key': u'key1', 'parent_id': 1, 'discriminator': u'related', "id": 3}]}
+
+        session = Session()
+        te = session.query(TestEntity).get(2)
+        te = from_collection(doc, te, excludes=["interval"])
         self.assertEqual(te.date, date(2012, 1, 1))
         self.assertEqual(te.time, time(0, 0, 0))
-        self.assertIsNone(te.interval)
+        self.assertEqual(te.interval, timedelta(seconds=3601))
         self.assertEqual(te.datetime, datetime(2012, 1, 1, 0, 0, 0))
         self.assertEqual(te.id, 1)
         self.assertEqual(te.geo.geom_wkt, "POINT (45.0000000000000000 45.0000000000000000)")
+        self.assertEqual(te.related[0].parent_id, 1)
+        self.assertEqual(te.related[0].key, u"key1")
+        self.assertEqual(te.related[0].id, 3)
+        self.assertEqual(te.related[0].discriminator, u"related")
+        self.assertEqual(len(te.related), 1)
+        self.assertIsNotNone(Session.object_session(te.related[0]))
+        
+        doc = {'related': [{'key': u'hello', 'parent_id': 1, 'discriminator': u'related'}]}
+        te = from_collection(doc, te, collection_handling="append")
+        self.assertEqual(len(te.related), 2)
+        self.assertEqual(te.related[-1].key, u"hello")
+        self.assertEqual(te.related[-1].parent_id, 1)
+        self.assertEqual(te.related[-1].discriminator, "related")
 
-        te = TestEntity()
-        json_doc = '{"date": {"date": "2012-01-01"}, "time": {"time": "00:00:00"}, "interval": {"interval": 3600}, "id": 1, "datetime": {"datetime": "2012-01-01T00:00:00"}, "geo": {"coordinates": [45.0, 45.0], "type": "Point"}}'
-        te = from_mapping(json_doc, te, format="json")
+        te = DerivedTestEntity()
+        json_doc = '{"time": {"time": "00:00:00"}, "date": {"date": "2012-01-01"}, "geo": {"type": "Point", "coordinates": [45.0, 45.0]}, "interval": {"interval": 3600}, "datetime": {"datetime": "2012-01-01T00:00:00"}, "id": 1, "related": [{"parent_id": 1, "key": "key1", "discriminator": "related"}, {"parent_id": 1, "subclass_prop": "sub", "key": "key2", "discriminator": "relatedsubclass"}], "derivedprop": 2}'
+        te = from_collection(json_doc, te, format="json")
         self.assertEqual(te.date, date(2012, 1, 1))
         self.assertEqual(te.time, time(0, 0, 0))
         self.assertEqual(te.interval, timedelta(seconds=3600))
         self.assertEqual(te.datetime, datetime(2012, 1, 1, 0, 0, 0))
         self.assertEqual(te.id, 1)
+        self.assertEqual(te.derivedprop, 2)
         self.assertEqual(te.geo.geom_wkt, "POINT (45.0000000000000000 45.0000000000000000)")
+        self.assertIsNone(te.related[0].id)
+        self.assertEqual(te.related[0].parent_id, 1)
+        self.assertEqual(te.related[0].key, "key1")
+        self.assertEqual(te.related[0].discriminator, "related")
+        self.assertIsNone(te.related[1].id)
+        self.assertEqual(te.related[1].parent_id, 1)
+        self.assertEqual(te.related[1].key, "key2")
+        self.assertEqual(te.related[1].discriminator, "relatedsubclass")
+        self.assertEqual(te.related[1].subclass_prop, "sub")
 
 
 class BlockCipherPaddingTest(unittest.TestCase):