Commits

Mike Bayer committed f602dc5 Merge

merge tip

  • Participants
  • Parent commits 13cdc68, 4c575eb

Comments (0)

Files changed (12)

     on ColumnProperty, CompositeProperty, RelationshipProperty
     have been underscored, signature has changed.
     
+- engines
+  - The C extension now also works with DBAPIs which use custom
+    sequences as row (and not only tuples). [ticket:1757]
+
 - sql
   - Restored some bind-labeling logic from 0.5 which ensures
     that tables with column names that overlap another column
     - STRING/FIXED_CHAR now convert to unicode natively.
       SQLAlchemy's String types then don't need to 
       apply any kind of conversions.
-    
+
+- firebird
+   - The functionality of result.rowcount can be disabled on a
+     per-engine basis by setting 'enable_rowcount=False' 
+     on create_engine().  Normally, cursor.rowcount is called
+     after any UPDATE or DELETE statement unconditionally,
+     because the cursor is then closed and Firebird requires
+     an open cursor in order to get a rowcount.  This 
+     call is slightly expensive however so it can be disabled.
+     To re-enable on a per-execution basis, the 
+     'enable_rowcount=True' execution option may be used.
+     
 - examples
   - Updated attribute_shard.py example to use a more robust
     method of searching a Query for binary expressions which

doc/build/conf.py

 #today_fmt = '%B %d, %Y'
 
 # List of documents that shouldn't be included in the build.
-unused_docs = ['output.txt']
+unused_docs = ['copyright']
 
 # List of directories, relative to source directory, that shouldn't be searched
 # for source files.

doc/build/reference/ext/declarative.rst

 
 .. autofunction:: _declarative_constructor
 
+.. autofunction:: has_inherited_table
+
 .. autofunction:: synonym_for
 
 .. autofunction:: comparable_using

doc/build/reference/sqlalchemy/index.rst

     schema
     types
     interfaces
+    util
 
-

doc/build/reference/sqlalchemy/util.rst

+Utilities
+=========
+
+.. automodule:: sqlalchemy.util
+   :members:

lib/sqlalchemy/cextension/resultproxy.c

     PyObject **valueptr, **funcptr, **resultptr;
     PyObject *func, *result, *processed_value, *values_fastseq;
 
-    num_values = Py_SIZE(values);
-    num_processors = Py_SIZE(processors);
+    num_values = PySequence_Length(values);
+    num_processors = PyList_Size(processors);
     if (num_values != num_processors) {
         PyErr_Format(PyExc_RuntimeError,
             "number of values in row (%d) differ from number of column "
 static Py_ssize_t
 BaseRowProxy_length(BaseRowProxy *self)
 {
-    return Py_SIZE(self->row);
+    return PySequence_Length(self->row);
 }
 
 static PyObject *
 {
     PyObject *processors, *values;
     PyObject *processor, *value;
-    PyObject *record, *result, *indexobject;
+    PyObject *row, *record, *result, *indexobject;
     PyObject *exc_module, *exception;
     char *cstr_key;
     long index;
     if (processor == NULL)
         return NULL;
 
-    value = PyTuple_GetItem(self->row, index);
+    row = self->row;
+    if (PyTuple_CheckExact(row))
+        value = PyTuple_GetItem(row, index);
+    else
+        value = PySequence_GetItem(row, index);
     if (value == NULL)
         return NULL;
 

lib/sqlalchemy/dialects/firebird/kinterbasdb.py

 
 Kinterbasedb backend specific keyword arguments are:
 
-type_conv
-  select the kind of mapping done on the types: by default SQLAlchemy
+* type_conv - select the kind of mapping done on the types: by default SQLAlchemy
   uses 200 with Unicode, datetime and decimal support (see details__).
 
-concurrency_level
-  set the backend policy with regards to threading issues: by default
+* concurrency_level - set the backend policy with regards to threading issues: by default
   SQLAlchemy uses policy 1 (see details__).
 
+* enable_rowcount - True by default, setting this to False disables 
+  the usage of "cursor.rowcount" with the 
+  Kinterbasdb dialect, which SQLAlchemy ordinarily calls upon automatically
+  after any UPDATE or DELETE statement.   When disabled, SQLAlchemy's 
+  ResultProxy will return -1 for result.rowcount.   The rationale here is 
+  that Kinterbasdb requires a second round trip to the database when 
+  .rowcount is called -  since SQLA's resultproxy automatically closes 
+  the cursor after a non-result-returning statement, rowcount must be 
+  called, if at all, before the result object is returned.   Additionally,
+  cursor.rowcount may not return correct results with older versions
+  of Firebird, and setting this flag to False will also cause the SQLAlchemy ORM
+  to ignore its usage. The behavior can also be controlled on a per-execution 
+  basis using the `enable_rowcount` option with :meth:`execution_options()`::
+  
+      conn = engine.connect().execution_options(enable_rowcount=True)
+      r = conn.execute(stmt)
+      print r.rowcount
+  
 __ http://sourceforge.net/projects/kinterbasdb
 __ http://firebirdsql.org/index.php?op=devel&sub=python
 __ http://kinterbasdb.sourceforge.net/dist_docs/usage.html#adv_param_conv_dynamic_type_translation
 __ http://kinterbasdb.sourceforge.net/dist_docs/usage.html#special_issue_concurrency
 """
 
-from sqlalchemy.dialects.firebird.base import FBDialect, FBCompiler
+from sqlalchemy.dialects.firebird.base import FBDialect, \
+                                    FBCompiler, FBExecutionContext
 from sqlalchemy import util, types as sqltypes
 
 class _FBNumeric_kinterbasdb(sqltypes.Numeric):
             else:
                 return value
         return process
-        
+
+class FBExecutionContext_kinterbasdb(FBExecutionContext):
+    @property
+    def rowcount(self):
+        if self.execution_options.get('enable_rowcount', 
+                                        self.dialect.enable_rowcount):
+            return self.cursor.rowcount
+        else:
+            return -1
+            
 class FBDialect_kinterbasdb(FBDialect):
     driver = 'kinterbasdb'
     supports_sane_rowcount = False
     supports_sane_multi_rowcount = False
+    execution_ctx_cls = FBExecutionContext_kinterbasdb
     
     supports_native_decimal = True
     
         
     )
     
-    def __init__(self, type_conv=200, concurrency_level=1, **kwargs):
+    def __init__(self, type_conv=200, concurrency_level=1, enable_rowcount=True, **kwargs):
         super(FBDialect_kinterbasdb, self).__init__(**kwargs)
-
+        self.enable_rowcount = enable_rowcount
         self.type_conv = type_conv
         self.concurrency_level = concurrency_level
-
+        if enable_rowcount:
+            self.supports_sane_rowcount = True
+        
     @classmethod
     def dbapi(cls):
         k = __import__('kinterbasdb')

lib/sqlalchemy/ext/declarative.py

 
         id =  Column(Integer, primary_key=True)
 
+Controlling table inheritance with mix-ins
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The ``__tablename__`` attribute in conjunction with the hierarchy of
+the classes involved controls what type of table inheritance, if any,
+is configured by the declarative extension.
+
+If the ``__tablename__`` is computed by a mix-in, you may need to
+control which classes get the computed attribute in order to get the
+type of table inheritance you require.
+
+For example, if you had a mix-in that computes ``__tablename__`` but
+where you wanted to use that mix-in in a single table inheritance
+hierarchy, you can explicitly specify ``__tablename__`` as ``None`` to
+indicate that the class should not have a table mapped::
+
+    from sqlalchemy.util import classproperty
+
+    class Tablename:
+        @classproperty
+        def __tablename__(cls):
+            return cls.__name__.lower()
+
+    class Person(Base,Tablename):
+        id = Column(Integer, primary_key=True)
+        discriminator = Column('type', String(50))
+        __mapper_args__ = {'polymorphic_on': discriminator}
+
+    class Engineer(Person):
+        __tablename__ = None
+        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+        primary_language = Column(String(50))
+
+Alternatively, you can make the mix-in intelligent enough to only
+return a ``__tablename__`` in the event that no table is already
+mapped in the inheritance hierarchy. To help with this, a
+:func:`~sqlalchemy.ext.declarative.has_inherited_table` helper
+function is provided that returns ``True`` if a parent class already
+has a mapped table. 
+
+As an examply, here's a mix-in that will only allow single table
+inheritance::
+
+    from sqlalchemy.util import classproperty
+    from sqlalchemy.ext.declarative import has_inherited_table
+
+    class Tablename:
+        @classproperty
+        def __tablename__(cls):
+            if has_inherited_table(cls):
+                return None
+            return cls.__name__.lower()
+
+    class Person(Base,Tablename):
+        id = Column(Integer, primary_key=True)
+        discriminator = Column('type', String(50))
+        __mapper_args__ = {'polymorphic_on': discriminator}
+
+    class Engineer(Person):
+        __tablename__ = None
+        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+        primary_language = Column(String(50))
+
+If you want to use a similar pattern with a mix of single and joined
+table inheritance, you would need a slightly different mix-in and use
+it on any joined table child classes in addition to their parent
+classes::
+
+    from sqlalchemy.util import classproperty
+    from sqlalchemy.ext.declarative import has_inherited_table
+
+    class Tablename:
+        @classproperty
+        def __tablename__(cls):
+            if (decl.has_inherited_table(cls) and
+                TableNameMixin not in cls.__bases__):
+                return None
+            return cls.__name__.lower()
+
+    class Person(Base,Tablename):
+        id = Column(Integer, primary_key=True)
+        discriminator = Column('type', String(50))
+        __mapper_args__ = {'polymorphic_on': discriminator}
+
+    class Engineer(Person):
+        # This is single table inheritance
+        __tablename__ = None
+        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+        primary_language = Column(String(50))
+
+    class Manager(Person,Tablename):
+        # This is joinded table inheritance
+        __tablename__ = None
+        __mapper_args__ = {'polymorphic_identity': 'engineer'}
+        preferred_recreation = Column(String(50))
+
 Class Constructor
 =================
 
     cls._decl_class_registry = registry
     cls.metadata = metadata
     _as_declarative(cls, cls.__name__, cls.__dict__)
-    
+
+def has_inherited_table(cls):
+    """Given a class, return True if any of the classes it inherits from has a mapped
+    table, otherwise return False.
+    """
+    for class_ in cls.__mro__:
+        if getattr(class_,'__table__',None) is not None:
+            return True
+    return False
+
 def _as_declarative(cls, classname, dict_):
 
     # dict_ will be a dictproxy, which we can't write to, and we need to!
     dict_ = dict(dict_)
 
     column_copies = dict()
-    mixin_table_args = None
-    mapper_args = {}
-    table_args = None
+    potential_columns = dict()
     
-    def _is_mixin(klass):
-        return not _is_mapped_class(klass) and klass is not cls
+    mapper_args ={}
+    table_args = inherited_table_args = None
+    tablename = None
+    parent_columns = None
     
     for base in cls.__mro__:
-        if _is_mixin(base):
-            for name in dir(base):
+        if _is_mapped_class(base):
+            parent_columns = base.__table__.c.keys()
+        else:
+            for name,obj in vars(base).items():
                 if name == '__mapper_args__':
                     if not mapper_args:
                         mapper_args = cls.__mapper_args__
+                elif name == '__tablename__':
+                    if not tablename:
+                        tablename = cls.__tablename__
                 elif name == '__table_args__':
-                    if not table_args:
-                        table_args = mixin_table_args = cls.__table_args__
-                elif name == '__tablename__':
-                    if '__tablename__' not in dict_:
-                        dict_['__tablename__'] = cls.__tablename__
-                else:
-                    obj = getattr(base,name, None)
+                    if not table_args:                        
+                        table_args = cls.__table_args__
+                        if base is not cls:
+                            inherited_table_args = True
+                elif base is not cls:
                     if isinstance(obj, Column):
                         if obj.foreign_keys:
                             raise exceptions.InvalidRequestError(
                                 "Columns with foreign keys to other columns "
                                 "are not allowed on declarative mixins at this time."
                             )
-                        dict_[name]=column_copies[obj]=obj.copy()
+                        if name not in dict_:
+                            potential_columns[name]=column_copies[obj]=obj.copy()
                     elif isinstance(obj, RelationshipProperty):
                         raise exceptions.InvalidRequestError(
                                             "relationships are not allowed on "
                                             "declarative mixins at this time.")
-        elif base is cls:
-            if '__mapper_args__' in dict_:
-                mapper_args = cls.__mapper_args__
-            if '__table_args__' in dict_:
-                table_args = cls.__table_args__
-            if '__tablename__' in dict_:
-                dict_['__tablename__'] = cls.__tablename__
-                
+    # apply inherited columns as we should
+    for k, v in potential_columns.items():
+        if tablename or k not in parent_columns:
+            dict_[k]=v
+    if inherited_table_args and not tablename:
+        table_args = None
+
     # make sure that column copies are used rather than the original columns
     # from any mixins
     for k, v in mapper_args.iteritems():
 
     table = None
     if '__table__' not in dict_:
-        if '__tablename__' in dict_:
-            # see above: if __tablename__ is a descriptor, this
-            # means we get the right value used!
-            tablename = cls.__tablename__
+        if tablename is not None:
             
             if isinstance(table_args, dict):
                 args, table_kw = (), table_args
         if table is None:
             # single table inheritance.
             # ensure no table args
-            if table_args is not None and table_args is not mixin_table_args:
+            if table_args:
                 raise exceptions.ArgumentError(
                     "Can't place __table_args__ on an inherited class with no table."
                     )

lib/sqlalchemy/sql/compiler.py

                         )
                 elif getattr(existing, '_is_crud', False):
                     raise exc.CompileError(
-                            "Bind parameter name '%s' is reserved "
-                            "for the VALUES or SET clause of this insert/update statement." 
-                            % bindparam.key
+                            "bindparam() name '%s' is reserved "
+                            "for automatic usage in the VALUES or SET clause of this "
+                            "insert/update statement.   Please use a " 
+                            "name other than column name when using bindparam() "
+                            "with insert() or update() (for example, 'b_%s')."
+                            % (bindparam.key, bindparam.key)
                         )
                     
         self.binds[bindparam.key] = self.binds[name] = bindparam
         bindparam._is_crud = True
         if col.key in self.binds:
             raise exc.CompileError(
-                    "Bind parameter name '%s' is reserved "
-                    "for the VALUES or SET clause of this insert/update statement." 
-                    % col.key
+                    "bindparam() name '%s' is reserved "
+                    "for automatic usage in the VALUES or SET clause of this "
+                    "insert/update statement.   Please use a " 
+                    "name other than column name when using bindparam() "
+                    "with insert() or update() (for example, 'b_%s')."
+                    % (col.key, col.key)
                 )
             
         self.binds[col.key] = bindparam

test/dialect/test_firebird.py

-from sqlalchemy.test.testing import eq_
+from sqlalchemy.test.testing import eq_, assert_raises
 from sqlalchemy import *
 from sqlalchemy.databases import firebird
 from sqlalchemy.exc import ProgrammingError
 class MiscTest(TestBase):
     __only_on__ = 'firebird'
 
+    @testing.provide_metadata
     def test_strlen(self):
         # On FB the length() function is implemented by an external
         # UDF, strlen().  Various SA tests fail because they pass a
         # the maximum string length the UDF was declared to accept).
         # This test checks that at least it works ok in other cases.
 
-        meta = MetaData(testing.db)
-        t = Table('t1', meta,
+        t = Table('t1', metadata,
             Column('id', Integer, Sequence('t1idseq'), primary_key=True),
             Column('name', String(10))
         )
-        meta.create_all()
-        try:
-            t.insert(values=dict(name='dante')).execute()
-            t.insert(values=dict(name='alighieri')).execute()
-            select([func.count(t.c.id)],func.length(t.c.name)==5).execute().first()[0] == 1
-        finally:
-            meta.drop_all()
+        metadata.create_all()
+        t.insert(values=dict(name='dante')).execute()
+        t.insert(values=dict(name='alighieri')).execute()
+        select([func.count(t.c.id)],func.length(t.c.name)==5).execute().first()[0] == 1
 
     def test_server_version_info(self):
         version = testing.db.dialect.server_version_info
         assert len(version) == 3, "Got strange version info: %s" % repr(version)
 
+    @testing.provide_metadata
+    def test_rowcount_flag(self):
+        engine = engines.testing_engine(options={'enable_rowcount':True})
+        assert engine.dialect.supports_sane_rowcount
+        metadata.bind = engine
+        t = Table('t1', metadata,
+            Column('data', String(10))
+        )
+        metadata.create_all()
+        r = t.insert().execute({'data':'d1'}, {'data':'d2'}, {'data': 'd3'})
+        r = t.update().where(t.c.data=='d2').values(data='d3').execute()
+        eq_(r.rowcount, 1)
+        r = t.delete().where(t.c.data == 'd3').execute()
+        eq_(r.rowcount, 2)
+        
+        r = t.delete().execution_options(enable_rowcount=False).execute()
+        eq_(r.rowcount, -1)
+        
+        engine = engines.testing_engine(options={'enable_rowcount':False})
+        assert not engine.dialect.supports_sane_rowcount
+        metadata.bind = engine
+        r = t.insert().execute({'data':'d1'}, {'data':'d2'}, {'data':'d3'})
+        r = t.update().where(t.c.data=='d2').values(data='d3').execute()
+        eq_(r.rowcount, -1)
+        r = t.delete().where(t.c.data == 'd3').execute()
+        eq_(r.rowcount, -1)
+        r = t.delete().execution_options(enable_rowcount=True).execute()
+        eq_(r.rowcount, 1)
+
     def test_percents_in_text(self):
         for expr, result in (
             (text("select '%' from rdb$database"), '%'),

test/engine/test_execute.py

-from sqlalchemy.test.testing import eq_
+from sqlalchemy.test.testing import eq_, assert_raises
 import re
 from sqlalchemy.interfaces import ConnectionProxy
 from sqlalchemy import MetaData, Integer, String, INT, VARCHAR, func, bindparam, select
-from sqlalchemy.test.schema import Table
-from sqlalchemy.test.schema import Column
+from sqlalchemy.test.schema import Table, Column
 import sqlalchemy as tsa
 from sqlalchemy.test import TestBase, testing, engines
 import logging
             "0x...%s" % hex(id(eng.pool))[-4:],
         )
         
-    
+class ResultProxyTest(TestBase):
+    def test_nontuple_row(self):
+        """ensure the C version of BaseRowProxy handles 
+        duck-type-dependent rows."""
+        
+        from sqlalchemy.engine import RowProxy
+
+        class MyList(object):
+            def __init__(self, l):
+                self.l = l
+
+            def __len__(self):
+                return len(self.l)
+
+            def __getitem__(self, i):
+                return list.__getitem__(self.l, i)
+
+        proxy = RowProxy(object(), MyList(['value']), [None], {'key': (None, 0), 0: (None, 0)})
+        eq_(list(proxy), ['value'])
+        eq_(proxy[0], 'value')
+        eq_(proxy['key'], 'value')
+
+    @testing.provide_metadata
+    def test_no_rowcount_on_selects_inserts(self):
+        """assert that rowcount is only called on deletes and updates.
+
+        This because cursor.rowcount can be expensive on some dialects
+        such as Firebird.
+
+        """
+
+        engine = engines.testing_engine()
+        metadata.bind = engine
+        
+        t = Table('t1', metadata,
+            Column('data', String(10))
+        )
+        metadata.create_all()
+
+        class BreakRowcountMixin(object):
+            @property
+            def rowcount(self):
+                assert False
+        
+        execution_ctx_cls = engine.dialect.execution_ctx_cls
+        engine.dialect.execution_ctx_cls = type("FakeCtx", 
+                                            (BreakRowcountMixin, 
+                                            execution_ctx_cls), 
+                                            {})
+
+        try:
+            r = t.insert().execute({'data':'d1'}, {'data':'d2'}, {'data': 'd3'})
+            eq_(
+                t.select().execute().fetchall(),
+                [('d1', ), ('d2',), ('d3', )]
+            )
+            assert_raises(AssertionError, t.update().execute, {'data':'d4'})
+            assert_raises(AssertionError, t.delete().execute)
+        finally:
+            engine.dialect.execution_ctx_cls = execution_ctx_cls
+        
 class ProxyConnectionTest(TestBase):
 
     @testing.fails_on('firebird', 'Data type unknown')

test/ext/test_declarative.py

         class Specific(General):
             __mapper_args__ = {'polymorphic_identity':'specific'}
 
+        assert Specific.__table__ is General.__table__
         eq_(General.__table__.kwargs,{'mysql_engine': 'InnoDB'})
-        eq_(Specific.__table__.kwargs,{'mysql_engine': 'InnoDB'})
     
     def test_table_args_overridden(self):
         
         assert col.table is not None
         
         eq_(MyModel.__mapper__.always_refresh,True)
+
+    def test_single_table_no_propagation(self):
+
+        class IdColumn:
+            id = Column(Integer, primary_key=True)
+
+        class Generic(Base, IdColumn):
+            __tablename__ = 'base'
+            discriminator = Column('type', String(50))
+            __mapper_args__= dict(polymorphic_on=discriminator)
+            value = Column(Integer())
+
+        class Specific(Generic):
+            __mapper_args__ = dict(polymorphic_identity='specific')
+
+        assert Specific.__table__ is Generic.__table__
+        eq_(Generic.__table__.c.keys(),['type', 'value', 'id'])
+        assert class_mapper(Specific).polymorphic_on is Generic.__table__.c.type
+        eq_(class_mapper(Specific).polymorphic_identity, 'specific')
+
+    def test_joined_table_propagation(self):
+
+        class CommonMixin:
+            
+            @classproperty
+            def __tablename__(cls):
+                return cls.__name__.lower()
+            
+            __table_args__ = {'mysql_engine':'InnoDB'}
+            
+            timestamp = Column(Integer) 
+            id = Column(Integer, primary_key=True)
+    
+        class Generic(Base, CommonMixin):
+            discriminator = Column('python_type', String(50))
+            __mapper_args__= dict(polymorphic_on=discriminator)
+
+        class Specific(Generic):
+            __mapper_args__ = dict(polymorphic_identity='specific')
+            id = Column(Integer, ForeignKey('generic.id'), primary_key=True)
+        eq_(Generic.__table__.name,'generic')
+        eq_(Specific.__table__.name,'specific')
+        eq_(Generic.__table__.c.keys(),['python_type', 'timestamp', 'id'])
+        eq_(Specific.__table__.c.keys(),['id', 'timestamp'])
+        eq_(Generic.__table__.kwargs,{'mysql_engine': 'InnoDB'})
+        eq_(Specific.__table__.kwargs,{'mysql_engine': 'InnoDB'})
+            
+    def test_some_propagation(self):
+        
+        class CommonMixin:
+            @classproperty
+            def __tablename__(cls):
+                return cls.__name__.lower()
+            __table_args__ = {'mysql_engine':'InnoDB'}
+            timestamp = Column(Integer) 
+
+        class BaseType(Base, CommonMixin):
+            discriminator = Column('type', String(50))
+            __mapper_args__= dict(polymorphic_on=discriminator)
+            id = Column(Integer, primary_key=True) 
+            value = Column(Integer())  
+
+        class Single(BaseType):
+            __tablename__ = None
+            __mapper_args__ = dict(polymorphic_identity='type1')
+
+        class Joined(BaseType):
+            __mapper_args__ = dict(polymorphic_identity='type2')
+            id = Column(Integer, ForeignKey('basetype.id'), primary_key=True)
+
+        eq_(BaseType.__table__.name,'basetype')
+        eq_(BaseType.__table__.c.keys(),['type', 'id', 'value', 'timestamp'])
+        eq_(BaseType.__table__.kwargs,{'mysql_engine': 'InnoDB'})
+
+        assert Single.__table__ is BaseType.__table__
+
+        eq_(Joined.__table__.name,'joined')
+        eq_(Joined.__table__.c.keys(),['id','timestamp'])
+        eq_(Joined.__table__.kwargs,{'mysql_engine': 'InnoDB'})
+            
+    def test_non_propagating_mixin(self):
+
+        class NoJoinedTableNameMixin:
+            @classproperty
+            def __tablename__(cls):
+                if decl.has_inherited_table(cls):
+                    return None
+                return cls.__name__.lower()
+
+        class BaseType(Base, NoJoinedTableNameMixin):
+            discriminator = Column('type', String(50))
+            __mapper_args__= dict(polymorphic_on=discriminator)
+            id = Column(Integer, primary_key=True) 
+            value = Column(Integer())  
+
+        class Specific(BaseType):
+            __mapper_args__ = dict(polymorphic_identity='specific')
+
+        eq_(BaseType.__table__.name,'basetype')
+        eq_(BaseType.__table__.c.keys(),['type', 'id', 'value'])
+
+        assert Specific.__table__ is BaseType.__table__
+        assert class_mapper(Specific).polymorphic_on is BaseType.__table__.c.type
+        eq_(class_mapper(Specific).polymorphic_identity, 'specific')
+
+    def test_non_propagating_mixin_used_for_joined(self):
+
+        class TableNameMixin:
+            @classproperty
+            def __tablename__(cls):
+                if (decl.has_inherited_table(cls) and
+                    TableNameMixin not in cls.__bases__):
+                    return None
+                return cls.__name__.lower()
+
+        class BaseType(Base, TableNameMixin):
+            discriminator = Column('type', String(50))
+            __mapper_args__= dict(polymorphic_on=discriminator)
+            id = Column(Integer, primary_key=True) 
+            value = Column(Integer())  
+
+        class Specific(BaseType, TableNameMixin):
+            __mapper_args__ = dict(polymorphic_identity='specific')
+            id = Column(Integer, ForeignKey('basetype.id'), primary_key=True)
+            
+        eq_(BaseType.__table__.name,'basetype')
+        eq_(BaseType.__table__.c.keys(),['type', 'id', 'value'])
+        eq_(Specific.__table__.name,'specific')
+        eq_(Specific.__table__.c.keys(),['id'])
+
+    def test_single_back_propagate(self):
+
+        class ColumnMixin:
+            timestamp = Column(Integer) 
+
+        class BaseType(Base):
+            __tablename__ = 'foo'
+            discriminator = Column('type', String(50))
+            __mapper_args__= dict(polymorphic_on=discriminator)
+            id = Column(Integer, primary_key=True) 
+
+        class Specific(BaseType,ColumnMixin):
+            __mapper_args__ = dict(polymorphic_identity='specific')
+            
+        eq_(BaseType.__table__.c.keys(),['type', 'id', 'timestamp'])