Commits

Mike Bayer committed 61acefe Merge

Comments (0)

Files changed (6)

alembic/autogenerate.py

 from alembic import util
 from sqlalchemy.engine.reflection import Inspector
 from sqlalchemy.util import OrderedSet
-from sqlalchemy import schema, types as sqltypes
+from sqlalchemy import schema as sa_schema, types as sqltypes
 import re
 
 import logging
             Table(u'bar', MetaData(bind=None),
                 Column(u'data', VARCHAR(), table=<bar>), schema=None)),
           ( 'add_column',
+            None,
             'foo',
             Column('data', Integer(), table=<foo>)),
           ( 'remove_column',
+            None,
             'foo',
             Column(u'old_data', VARCHAR(), table=None)),
           [ ( 'modify_nullable',
+              None,
               'foo',
               u'x',
               { 'existing_server_default': None,
                             include_symbol=None):
     inspector = Inspector.from_engine(connection)
     # TODO: not hardcode alembic_version here ?
-    conn_table_names = set(inspector.get_table_names()).\
-                            difference(['alembic_version'])
-
-
-    metadata_table_names = OrderedSet([table.name
+    conn_table_names = set()
+    schemas = inspector.get_schema_names() or [None]
+    for s in schemas:
+        if s == 'information_schema':
+            # ignore postgres own information_schema
+            continue
+        tables = set(inspector.get_table_names(schema=s)).\
+                difference(['alembic_version'])
+        conn_table_names.update(zip([s] * len(tables), tables))
+
+    metadata_table_names = OrderedSet([(table.schema, table.name)
                                 for table in metadata.sorted_tables])
 
     if include_symbol:
-        conn_table_names = set(name for name in conn_table_names
-                            if include_symbol(name))
-        metadata_table_names = OrderedSet(name for name in metadata_table_names
-                                if include_symbol(name))
+        conn_table_names = set((s, name)
+                                for s, name in conn_table_names
+                                if include_symbol(name, schema=s))
+        metadata_table_names = OrderedSet((s, name)
+                                for s, name in metadata_table_names
+                                if include_symbol(name, schema=s))
 
     _compare_tables(conn_table_names, metadata_table_names,
                     inspector, metadata, diffs, autogen_context)
 
 def _compare_tables(conn_table_names, metadata_table_names,
                     inspector, metadata, diffs, autogen_context):
-    for tname in metadata_table_names.difference(conn_table_names):
-        diffs.append(("add_table", metadata.tables[tname]))
-        log.info("Detected added table %r", tname)
-
-    removal_metadata = schema.MetaData()
-    for tname in conn_table_names.difference(metadata_table_names):
-        exists = tname in removal_metadata.tables
-        t = schema.Table(tname, removal_metadata)
+    for s, tname in metadata_table_names.difference(conn_table_names):
+        name = '%s.%s' % (s, tname) if s else tname
+        diffs.append(("add_table", metadata.tables[name]))
+        log.info("Detected added table %r", name)
+
+    removal_metadata = sa_schema.MetaData()
+    for s, tname in conn_table_names.difference(metadata_table_names):
+        name = '%s.%s' % (s, tname) if s else tname
+        exists = name in removal_metadata.tables
+        t = sa_schema.Table(tname, removal_metadata, schema=s)
         if not exists:
             inspector.reflecttable(t, None)
         diffs.append(("remove_table", t))
-        log.info("Detected removed table %r", tname)
+        log.info("Detected removed table %r", name)
 
     existing_tables = conn_table_names.intersection(metadata_table_names)
 
     conn_column_info = dict(
-        (tname,
+        ((s, tname),
             dict(
                 (rec["name"], rec)
-                for rec in inspector.get_columns(tname)
+                for rec in inspector.get_columns(tname, schema=s)
             )
         )
-        for tname in existing_tables
+        for s, tname in existing_tables
     )
 
-    for tname in sorted(existing_tables):
-        _compare_columns(tname,
-                conn_column_info[tname],
-                metadata.tables[tname],
+    for s, tname in sorted(existing_tables):
+        name = '%s.%s' % (s, tname) if s else tname
+        _compare_columns(s, tname,
+                conn_column_info[(s, tname)],
+                metadata.tables[name],
                 diffs, autogen_context)
 
     # TODO:
 ###################################################
 # element comparison
 
-def _compare_columns(tname, conn_table, metadata_table,
+def _compare_columns(schema, tname, conn_table, metadata_table,
                                 diffs, autogen_context):
+    name = '%s.%s' % (schema, tname) if schema else tname
     metadata_cols_by_name = dict((c.name, c) for c in metadata_table.c)
     conn_col_names = set(conn_table)
     metadata_col_names = set(metadata_cols_by_name)
 
     for cname in metadata_col_names.difference(conn_col_names):
         diffs.append(
-            ("add_column", tname, metadata_cols_by_name[cname])
+            ("add_column", schema, tname, metadata_cols_by_name[cname])
         )
-        log.info("Detected added column '%s.%s'", tname, cname)
+        log.info("Detected added column '%s.%s'", name, cname)
 
     for cname in conn_col_names.difference(metadata_col_names):
         diffs.append(
-            ("remove_column", tname, schema.Column(
+            ("remove_column", schema, tname, sa_schema.Column(
                 cname,
                 conn_table[cname]['type'],
                 nullable=conn_table[cname]['nullable'],
                 server_default=conn_table[cname]['default']
             ))
         )
-        log.info("Detected removed column '%s.%s'", tname, cname)
+        log.info("Detected removed column '%s.%s'", name, cname)
 
     for colname in metadata_col_names.intersection(conn_col_names):
         metadata_col = metadata_table.c[colname]
         conn_col = conn_table[colname]
         col_diff = []
-        _compare_type(tname, colname,
+        _compare_type(schema, tname, colname,
             conn_col,
             metadata_col,
             col_diff, autogen_context
         )
-        _compare_nullable(tname, colname,
+        _compare_nullable(schema, tname, colname,
             conn_col,
             metadata_col.nullable,
             col_diff, autogen_context
         )
-        _compare_server_default(tname, colname,
+        _compare_server_default(schema, tname, colname,
             conn_col,
             metadata_col,
             col_diff, autogen_context
         if col_diff:
             diffs.append(col_diff)
 
-def _compare_nullable(tname, cname, conn_col,
+def _compare_nullable(schema, tname, cname, conn_col,
                             metadata_col_nullable, diffs,
                             autogen_context):
     conn_col_nullable = conn_col['nullable']
     if conn_col_nullable is not metadata_col_nullable:
         diffs.append(
-            ("modify_nullable", tname, cname,
+            ("modify_nullable", schema, tname, cname,
                 {
                     "existing_type": conn_col['type'],
                     "existing_server_default": conn_col['default'],
             cname
         )
 
-def _compare_type(tname, cname, conn_col,
+def _compare_type(schema, tname, cname, conn_col,
                             metadata_col, diffs,
                             autogen_context):
 
     if isdiff:
 
         diffs.append(
-            ("modify_type", tname, cname,
+            ("modify_type", schema, tname, cname,
                     {
                         "existing_nullable": conn_col['nullable'],
                         "existing_server_default": conn_col['default'],
             conn_type, metadata_type, tname, cname
         )
 
-def _compare_server_default(tname, cname, conn_col, metadata_col,
+def _compare_server_default(schema, tname, cname, conn_col, metadata_col,
                                 diffs, autogen_context):
 
     metadata_default = metadata_col.server_default
     if isdiff:
         conn_col_default = conn_col['default']
         diffs.append(
-            ("modify_default", tname, cname,
+            ("modify_default", schema, tname, cname,
                 {
                     "existing_nullable": conn_col['nullable'],
                     "existing_type": conn_col['type'],
         return cmd_callables[0](*cmd_args)
 
 def _invoke_modify_command(updown, args, autogen_context):
-    tname, cname = args[0][1:3]
+    sname, tname, cname = args[0][1:4]
     kw = {}
 
     _arg_struct = {
         "modify_default": ("existing_server_default", "server_default"),
     }
     for diff in args:
-        diff_kw = diff[3]
+        diff_kw = diff[4]
         for arg in ("existing_type", \
                 "existing_nullable", \
                 "existing_server_default"):
         kw.pop("existing_nullable", None)
     if "server_default" in kw:
         kw.pop("existing_server_default", None)
-    return _modify_col(tname, cname, autogen_context, **kw)
+    return _modify_col(tname, cname, autogen_context, schema=sname, **kw)
 
 ###################################################
 # render python
 
 def _add_table(table, autogen_context):
-    return "%(prefix)screate_table(%(tablename)r,\n%(args)s\n)" % {
+    text = "%(prefix)screate_table(%(tablename)r,\n%(args)s" % {
         'tablename': table.name,
         'prefix': _alembic_autogenerate_prefix(autogen_context),
         'args': ',\n'.join(
                     table.constraints]
                 if rcons is not None
             ])
-        ),
+        )
     }
+    if table.schema:
+        text += ",\nschema=%r" % table.schema
+    text += "\n)"
+    return text
 
 def _drop_table(table, autogen_context):
-    return "%(prefix)sdrop_table(%(tname)r)" % {
+    text = "%(prefix)sdrop_table(%(tname)r" % {
             "prefix": _alembic_autogenerate_prefix(autogen_context),
             "tname": table.name
         }
+    if table.schema:
+        text += ", schema=%r" % table.schema
+    text += ")"
+    return text
 
-def _add_column(tname, column, autogen_context):
-    return "%(prefix)sadd_column(%(tname)r, %(column)s)" % {
+def _add_column(schema, tname, column, autogen_context):
+    text = "%(prefix)sadd_column(%(tname)r, %(column)s" % {
             "prefix": _alembic_autogenerate_prefix(autogen_context),
             "tname": tname,
             "column": _render_column(column, autogen_context)
             }
+    if schema:
+        text += ", schema=%r" % schema
+    text += ")"
+    return text
 
-def _drop_column(tname, column, autogen_context):
-    return "%(prefix)sdrop_column(%(tname)r, %(cname)r)" % {
+def _drop_column(schema, tname, column, autogen_context):
+    text = "%(prefix)sdrop_column(%(tname)r, %(cname)r" % {
             "prefix": _alembic_autogenerate_prefix(autogen_context),
             "tname": tname,
             "cname": column.name
             }
+    if schema:
+        text += ", schema=%r" % schema
+    text += ")"
+    return text
 
 def _modify_col(tname, cname,
                 autogen_context,
                 nullable=None,
                 existing_type=None,
                 existing_nullable=None,
-                existing_server_default=False):
+                existing_server_default=False,
+                schema=None):
     sqla_prefix = _sqlalchemy_autogenerate_prefix(autogen_context)
     indent = " " * 11
     text = "%(prefix)salter_column(%(tname)r, %(cname)r" % {
                             existing_server_default,
                             autogen_context),
                     )
+    if schema:
+        text += ",\n%sschema=%r" % (indent, schema)
     text += ")"
     return text
 
     }
 
 def _render_server_default(default, autogen_context):
-    if isinstance(default, schema.DefaultClause):
+    if isinstance(default, sa_schema.DefaultClause):
         if isinstance(default.arg, basestring):
             default = default.arg
         else:
         "prefix": _sqlalchemy_autogenerate_prefix(autogen_context)
         }
 _constraint_renderers = {
-    schema.PrimaryKeyConstraint: _render_primary_key,
-    schema.ForeignKeyConstraint: _render_foreign_key,
-    schema.UniqueConstraint: _render_unique_constraint,
-    schema.CheckConstraint: _render_check_constraint
+    sa_schema.PrimaryKeyConstraint: _render_primary_key,
+    sa_schema.ForeignKeyConstraint: _render_foreign_key,
+    sa_schema.UniqueConstraint: _render_unique_constraint,
+    sa_schema.CheckConstraint: _render_check_constraint
 }

alembic/ddl/impl.py

                                 existing_nullable=existing_nullable,
                             ))
 
-    def add_column(self, table_name, column):
-        self._exec(base.AddColumn(table_name, column))
+    def add_column(self, table_name, column, schema=None):
+        self._exec(base.AddColumn(table_name, column, schema=schema))
 
-    def drop_column(self, table_name, column, **kw):
-        self._exec(base.DropColumn(table_name, column))
+    def drop_column(self, table_name, column, schema=None, **kw):
+        self._exec(base.DropColumn(table_name, column, schema=schema))
 
     def add_constraint(self, const):
         if const._create_rule is None or \

alembic/ddl/postgresql.py

 
         conn_col_default = inspector_column['default']
 
+        if None in (conn_col_default, rendered_metadata_default):
+            return conn_col_default != rendered_metadata_default
+
         if metadata_column.type._type_affinity is not sqltypes.String:
             rendered_metadata_default = re.sub(r"^'|'$", "", rendered_metadata_default)
 

alembic/operations.py

 from alembic import util
 from alembic.ddl import impl
 from sqlalchemy.types import NULLTYPE, Integer
-from sqlalchemy import schema, sql
+from sqlalchemy import schema as sa_schema, sql
 from contextlib import contextmanager
 import alembic
 
 
     def _foreign_key_constraint(self, name, source, referent,
                                     local_cols, remote_cols,
-                                    onupdate=None, ondelete=None):
-        m = schema.MetaData()
+                                    onupdate=None, ondelete=None,
+                                    source_schema=None, referent_schema=None):
+        m = sa_schema.MetaData()
         if source == referent:
             t1_cols = local_cols + remote_cols
         else:
             t1_cols = local_cols
-            schema.Table(referent, m,
-                *[schema.Column(n, NULLTYPE) for n in remote_cols])
-
-        t1 = schema.Table(source, m,
-                *[schema.Column(n, NULLTYPE) for n in t1_cols])
-
-        f = schema.ForeignKeyConstraint(local_cols,
-                                            ["%s.%s" % (referent, n)
+            sa_schema.Table(referent, m,
+                    *[sa_schema.Column(n, NULLTYPE) for n in remote_cols],
+                    schema=referent_schema)
+
+        t1 = sa_schema.Table(source, m,
+                *[sa_schema.Column(n, NULLTYPE) for n in t1_cols],
+                schema=source_schema)
+
+        tname = "%s.%s" % (referent_schema, referent) if referent_schema \
+                else referent
+        f = sa_schema.ForeignKeyConstraint(local_cols,
+                                            ["%s.%s" % (tname, n)
                                             for n in remote_cols],
                                             name=name,
                                             onupdate=onupdate,
 
         return f
 
-    def _unique_constraint(self, name, source, local_cols, **kw):
-        t = schema.Table(source, schema.MetaData(),
-                    *[schema.Column(n, NULLTYPE) for n in local_cols])
+    def _unique_constraint(self, name, source, local_cols, schema=None, **kw):
+        t = sa_schema.Table(source, sa_schema.MetaData(),
+                    *[sa_schema.Column(n, NULLTYPE) for n in local_cols],
+                    schema=schema)
         kw['name'] = name
-        uq = schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw)
+        uq = sa_schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw)
         # TODO: need event tests to ensure the event
         # is fired off here
         t.append_constraint(uq)
         return uq
 
-    def _check_constraint(self, name, source, condition, **kw):
-        t = schema.Table(source, schema.MetaData(),
-                    schema.Column('x', Integer))
-        ck = schema.CheckConstraint(condition, name=name, **kw)
+    def _check_constraint(self, name, source, condition, schema=None, **kw):
+        t = sa_schema.Table(source, sa_schema.MetaData(),
+                    sa_schema.Column('x', Integer), schema=schema)
+        ck = sa_schema.CheckConstraint(condition, name=name, **kw)
         t.append_constraint(ck)
         return ck
 
     def _table(self, name, *columns, **kw):
-        m = schema.MetaData()
-        t = schema.Table(name, m, *columns, **kw)
+        m = sa_schema.MetaData()
+        t = sa_schema.Table(name, m, *columns, **kw)
         for f in t.foreign_keys:
             self._ensure_table_for_fk(m, f)
         return t
 
     def _column(self, name, type_, **kw):
-        return schema.Column(name, type_, **kw)
+        return sa_schema.Column(name, type_, **kw)
 
-    def _index(self, name, tablename, columns, **kw):
-        t = schema.Table(tablename or 'no_table', schema.MetaData(),
-            *[schema.Column(n, NULLTYPE) for n in columns]
+    def _index(self, name, tablename, columns, schema=None, **kw):
+        t = sa_schema.Table(tablename or 'no_table', sa_schema.MetaData(),
+            *[sa_schema.Column(n, NULLTYPE) for n in columns],
+            schema=schema
         )
-        return schema.Index(name, *list(t.c), **kw)
+        return sa_schema.Index(name, *list(t.c), **kw)
+
+    def _parse_table_key(self, table_key):
+        if '.' in table_key:
+            tokens = table_key.split('.')
+            sname = ".".join(tokens[0:-1])
+            tname = tokens[-1]
+        else:
+            tname = table_key
+            sname = None
+        return (sname, tname)
 
     def _ensure_table_for_fk(self, metadata, fk):
         """create a placeholder Table object for the referent of a
         """
         if isinstance(fk._colspec, basestring):
             table_key, cname = fk._colspec.rsplit('.', 1)
-            if '.' in table_key:
-                tokens = table_key.split('.')
-                sname = ".".join(tokens[0:-1])
-                tname = tokens[-1]
-            else:
-                tname = table_key
-                sname = None
+            sname, tname = self._parse_table_key(table_key)
             if table_key not in metadata.tables:
-                rel_t = schema.Table(tname, metadata, schema=sname)
+                rel_t = sa_schema.Table(tname, metadata, schema=sname)
             else:
                 rel_t = metadata.tables[table_key]
             if cname not in rel_t.c:
-                rel_t.append_column(schema.Column(cname, NULLTYPE))
+                rel_t.append_column(sa_schema.Column(cname, NULLTYPE))
 
     def get_context(self):
         """Return the :class:`.MigrationContext` object that's
                         existing_type=None,
                         existing_server_default=False,
                         existing_nullable=None,
-                        existing_autoincrement=None
+                        existing_autoincrement=None,
+                        schema=None
     ):
         """Issue an "alter column" instruction using the
         current migration context.
         :param existing_autoincrement: Optional; the existing autoincrement
          of the column.  Used for MySQL's system of altering a column
          that specifies ``AUTO_INCREMENT``.
+        :param schema: Optional, name of schema to operate within.
         """
 
         compiler = self.impl.dialect.statement_compiler(
                             None
                         )
         def _count_constraint(constraint):
-            return not isinstance(constraint, schema.PrimaryKeyConstraint) and \
+            return not isinstance(constraint, sa_schema.PrimaryKeyConstraint) and \
                 (not constraint._create_rule or
                     constraint._create_rule(compiler))
 
         if existing_type and type_:
             t = self._table(table_name,
-                        schema.Column(column_name, existing_type)
+                        sa_schema.Column(column_name, existing_type),
+                        schema=schema
                     )
             for constraint in t.constraints:
                 if _count_constraint(constraint):
             server_default=server_default,
             name=name,
             type_=type_,
+            schema=schema,
             autoincrement=autoincrement,
             existing_type=existing_type,
             existing_server_default=existing_server_default,
         )
 
         if type_:
-            t = self._table(table_name, schema.Column(column_name, type_))
+            t = self._table(table_name,
+                        sa_schema.Column(column_name, type_),
+                        schema=schema
+                    )
             for constraint in t.constraints:
                 if _count_constraint(constraint):
                     self.impl.add_constraint(constraint)
 
-    def add_column(self, table_name, column):
+    def add_column(self, table_name, column, schema=None):
         """Issue an "add column" instruction using the current
         migration context.
 
         :param table_name: String name of the parent table.
         :param column: a :class:`sqlalchemy.schema.Column` object
          representing the new column.
+        :param schema: Optional, name of schema to operate within.
 
         """
 
-        t = self._table(table_name, column)
+        t = self._table(table_name, column, schema=schema)
         self.impl.add_column(
             table_name,
-            column
+            column,
+            schema=schema
         )
         for constraint in t.constraints:
-            if not isinstance(constraint, schema.PrimaryKeyConstraint):
+            if not isinstance(constraint, sa_schema.PrimaryKeyConstraint):
                 self.impl.add_constraint(constraint)
 
     def drop_column(self, table_name, column_name, **kw):
 
 
     def create_foreign_key(self, name, source, referent, local_cols,
-                           remote_cols, onupdate=None, ondelete=None):
+                           remote_cols, onupdate=None, ondelete=None,
+                           source_schema=None, referent_schema=None):
         """Issue a "create foreign key" instruction using the
         current migration context.
 
          ``name`` here can be ``None``, as the event listener will
          apply the name to the constraint object when it is associated
          with the table.
-        :param source: String name of the source table.  Currently
-         there is no support for dotted schema names.
-        :param referent: String name of the destination table. Currently
-         there is no support for dotted schema names.
+        :param source: String name of the source table.
+        :param referent: String name of the destination table.
         :param local_cols: a list of string column names in the
          source table.
         :param remote_cols: a list of string column names in the
         :param ondelete: Optional string. If set, emit ON DELETE <value> when
          issuing DDL for this constraint. Typical values include CASCADE,
          DELETE and RESTRICT.
+        :param source_schema: Optional schema name of the source table.
+        :param referent_schema: Optional schema name of the destination table.
 
         """
 
         self.impl.add_constraint(
                     self._foreign_key_constraint(name, source, referent,
                             local_cols, remote_cols,
-                            onupdate=onupdate, ondelete=ondelete)
+                            onupdate=onupdate, ondelete=ondelete,
+                            source_schema=source_schema,
+                            referent_schema=referent_schema)
                 )
 
-    def create_unique_constraint(self, name, source, local_cols, **kw):
+    def create_unique_constraint(self, name, source, local_cols,
+                                 schema=None, **kw):
         """Issue a "create unique constraint" instruction using the
         current migration context.
 
          ``name`` here can be ``None``, as the event listener will
          apply the name to the constraint object when it is associated
          with the table.
-        :param source: String name of the source table.  Currently
-         there is no support for dotted schema names.
+        :param source: String name of the source table. Dotted schema names are
+         supported.
         :param local_cols: a list of string column names in the
          source table.
         :param deferrable: optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
          issuing DDL for this constraint.
         :param initially: optional string. If set, emit INITIALLY <value> when issuing DDL
          for this constraint.
+        :param schema: Optional schema name of the source table.
 
         """
 
         self.impl.add_constraint(
                     self._unique_constraint(name, source, local_cols,
-                        **kw)
+                        schema=schema, **kw)
                 )
 
-    def create_check_constraint(self, name, source, condition, **kw):
+    def create_check_constraint(self, name, source, condition,
+                                schema=None, **kw):
         """Issue a "create check constraint" instruction using the
         current migration context.
 
          ``name`` here can be ``None``, as the event listener will
          apply the name to the constraint object when it is associated
          with the table.
-        :param source: String name of the source table.  Currently
-         there is no support for dotted schema names.
+        :param source: String name of the source table.
         :param condition: SQL expression that's the condition of the constraint.
          Can be a string or SQLAlchemy expression language structure.
         :param deferrable: optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
          issuing DDL for this constraint.
         :param initially: optional string. If set, emit INITIALLY <value> when issuing DDL
          for this constraint.
+        :param schema: Optional schema name of the source table.
 
         """
         self.impl.add_constraint(
-            self._check_constraint(name, source, condition, **kw)
+            self._check_constraint(name, source, condition, schema=schema, **kw)
         )
 
     def create_table(self, name, *columns, **kw):
             self._table(name, *columns, **kw)
         )
 
-    def drop_table(self, name):
+    def drop_table(self, name, **kw):
         """Issue a "drop table" instruction using the current
         migration context.
 
 
             drop_table("accounts")
 
+        :param name: Name of the table
+        :param \**kw: Other keyword arguments are passed to the underlying
+         :class:`.Table` object created for the command.
+
         """
         self.impl.drop_table(
-            self._table(name)
+            self._table(name, **kw)
         )
 
-    def create_index(self, name, tablename, *columns, **kw):
+    def create_index(self, name, tablename, columns, schema=None, **kw):
         """Issue a "create index" instruction using the current
         migration context.
 
             from alembic import op
             op.create_index('ik_test', 't1', ['foo', 'bar'])
 
+        :param name: name of the index.
+        :param tablename: name of the owning table.
+        :param columns: a list of string column names in the
+         table.
+        :param schema: Optional, name of schema to operate within.
+
         """
 
         self.impl.create_index(
-            self._index(name, tablename, *columns, **kw)
+            self._index(name, tablename, columns, schema=schema, **kw)
         )
 
-    def drop_index(self, name, tablename=None):
+    def drop_index(self, name, tablename=None, schema=None):
         """Issue a "drop index" instruction using the current
         migration context.
 
 
             drop_index("accounts")
 
+        :param name: name of the index.
         :param tablename: name of the owning table.  Some
          backends such as Microsoft SQL Server require this.
+        :param schema: Optional, name of schema to operate within.
 
         """
         # need a dummy column name here since SQLAlchemy
         # 0.7.6 and further raises on Index with no columns
-        self.impl.drop_index(self._index(name, tablename, ['x']))
+        self.impl.drop_index(
+            self._index(name, tablename, ['x'], schema=schema)
+        )
 
-    def drop_constraint(self, name, tablename, type=None):
+    def drop_constraint(self, name, tablename, type=None, schema=None):
         """Drop a constraint of the given name, typically via DROP CONSTRAINT.
 
         :param name: name of the constraint.
         .. versionadded:: 0.3.6 'primary' qualfier to enable
            dropping of MySQL primary key constraints.
 
+        :param schema: Optional, name of schema to operate within.
+
         """
-        t = self._table(tablename)
+        t = self._table(tablename, schema=schema)
         types = {
-            'foreignkey':lambda name:schema.ForeignKeyConstraint(
+            'foreignkey':lambda name:sa_schema.ForeignKeyConstraint(
                                 [], [], name=name),
-            'primary':schema.PrimaryKeyConstraint,
-            'unique':schema.UniqueConstraint,
-            'check':lambda name:schema.CheckConstraint("", name=name),
-            None:schema.Constraint
+            'primary':sa_schema.PrimaryKeyConstraint,
+            'unique':sa_schema.UniqueConstraint,
+            'check':lambda name:sa_schema.CheckConstraint("", name=name),
+            None:sa_schema.Constraint
         }
         try:
             const = types[type]

tests/test_autogenerate.py

 import sys
 py3k = sys.version_info >= (3, )
 
-def _model_one():
-    m = MetaData()
+def _model_one(schema=None):
+    m = MetaData(schema=schema)
 
     Table('user', m,
         Column('id', Integer, primary_key=True),
 
     return m
 
-def _model_two():
-    m = MetaData()
+def _model_two(schema=None):
+    m = MetaData(schema=schema)
 
     Table('user', m,
         Column('id', Integer, primary_key=True),
         template_args = {}
         autogenerate._produce_migration_diffs(self.context,
             template_args, set(),
-            include_symbol=lambda name: name == 'sometable')
+            include_symbol=lambda name, schema=None: name == 'sometable')
         eq_(
             re.sub(r"u'", "'", template_args['upgrades']),
             "### commands auto generated by Alembic - please adjust! ###\n"
 
 
 
+class AutogenerateDiffTestWSchema(AutogenTest, TestCase):
+
+    @classmethod
+    def _get_bind(cls):
+        return db_for_dialect('postgresql')
+
+    @classmethod
+    def _get_db_schema(cls):
+        return _model_one(schema='foo')
+
+    @classmethod
+    def _get_model_schema(cls):
+        return _model_two(schema='foo')
+
+    def test_diffs(self):
+        """test generation of diff rules"""
+
+        metadata = self.m2
+        connection = self.context.bind
+        diffs = []
+        autogenerate._produce_net_changes(connection, metadata, diffs,
+                                          self.autogen_context)
+
+        eq_(
+            diffs[0],
+            ('add_table', metadata.tables['foo.item'])
+        )
+
+        eq_(diffs[1][0], 'remove_table')
+        eq_(diffs[1][1].name, "extra")
+
+        eq_(diffs[2][0], "add_column")
+        eq_(diffs[2][1], "foo")
+        eq_(diffs[2][2], "address")
+        eq_(diffs[2][3], metadata.tables['foo.address'].c.street)
+
+        eq_(diffs[3][0], "add_column")
+        eq_(diffs[3][1], "foo")
+        eq_(diffs[3][2], "order")
+        eq_(diffs[3][3], metadata.tables['foo.order'].c.user_id)
+
+        eq_(diffs[4][0][0], "modify_type")
+        eq_(diffs[4][0][1], "foo")
+        eq_(diffs[4][0][2], "order")
+        eq_(diffs[4][0][3], "amount")
+        eq_(repr(diffs[4][0][5]), "NUMERIC(precision=8, scale=2)")
+        eq_(repr(diffs[4][0][6]), "Numeric(precision=10, scale=2)")
+
+
+        eq_(diffs[5][0], 'remove_column')
+        eq_(diffs[5][3].name, 'pw')
+
+        eq_(diffs[6][0][0], "modify_default")
+        eq_(diffs[6][0][1], "foo")
+        eq_(diffs[6][0][2], "user")
+        eq_(diffs[6][0][3], "a1")
+        eq_(diffs[6][0][6].arg, "x")
+
+        eq_(diffs[7][0][0], 'modify_nullable')
+        eq_(diffs[7][0][5], True)
+        eq_(diffs[7][0][6], False)
+
+    def test_render_nothing(self):
+        context = MigrationContext.configure(
+            connection = self.bind.connect(),
+            opts = {
+                'compare_type' : True,
+                'compare_server_default' : True,
+                'target_metadata' : self.m1,
+                'upgrade_token':"upgrades",
+                'downgrade_token':"downgrades",
+                'alembic_module_prefix': 'op.',
+                'sqlalchemy_module_prefix': 'sa.',
+            }
+        )
+        template_args = {}
+        autogenerate._produce_migration_diffs(context, template_args, set())
+        eq_(re.sub(r"u'", "'", template_args['upgrades']),
+"""### commands auto generated by Alembic - please adjust! ###
+    pass
+    ### end Alembic commands ###""")
+        eq_(re.sub(r"u'", "'", template_args['downgrades']),
+"""### commands auto generated by Alembic - please adjust! ###
+    pass
+    ### end Alembic commands ###""")
+
+    def test_render_diffs(self):
+        """test a full render including indentation"""
+
+        template_args = {}
+        autogenerate._produce_migration_diffs(self.context, template_args, set())
+        eq_(re.sub(r"u'", "'", template_args['upgrades']),
+"""### commands auto generated by Alembic - please adjust! ###
+    op.create_table('item',
+    sa.Column('id', sa.Integer(), nullable=False),
+    sa.Column('description', sa.String(length=100), nullable=True),
+    sa.Column('order_id', sa.Integer(), nullable=True),
+    sa.CheckConstraint('len(description) > 5'),
+    sa.ForeignKeyConstraint(['order_id'], ['foo.order.order_id'], ),
+    sa.PrimaryKeyConstraint('id'),
+    schema='foo'
+    )
+    op.drop_table('extra', schema='foo')
+    op.add_column('address', sa.Column('street', sa.String(length=50), nullable=True), schema='foo')
+    op.add_column('order', sa.Column('user_id', sa.Integer(), nullable=True), schema='foo')
+    op.alter_column('order', 'amount',
+               existing_type=sa.NUMERIC(precision=8, scale=2),
+               type_=sa.Numeric(precision=10, scale=2),
+               nullable=True,
+               existing_server_default='0::numeric',
+               schema='foo')
+    op.drop_column('user', 'pw', schema='foo')
+    op.alter_column('user', 'a1',
+               existing_type=sa.TEXT(),
+               server_default='x',
+               existing_nullable=True,
+               schema='foo')
+    op.alter_column('user', 'name',
+               existing_type=sa.VARCHAR(length=50),
+               nullable=False,
+               schema='foo')
+    ### end Alembic commands ###""")
+        eq_(re.sub(r"u'", "'", template_args['downgrades']),
+"""### commands auto generated by Alembic - please adjust! ###
+    op.alter_column('user', 'name',
+               existing_type=sa.VARCHAR(length=50),
+               nullable=True,
+               schema='foo')
+    op.alter_column('user', 'a1',
+               existing_type=sa.TEXT(),
+               server_default=None,
+               existing_nullable=True,
+               schema='foo')
+    op.add_column('user', sa.Column('pw', sa.VARCHAR(length=50), nullable=True), schema='foo')
+    op.alter_column('order', 'amount',
+               existing_type=sa.Numeric(precision=10, scale=2),
+               type_=sa.NUMERIC(precision=8, scale=2),
+               nullable=False,
+               existing_server_default='0::numeric',
+               schema='foo')
+    op.drop_column('order', 'user_id', schema='foo')
+    op.drop_column('address', 'street', schema='foo')
+    op.create_table('extra',
+    sa.Column('x', sa.CHAR(length=1), nullable=True),
+    sa.Column('uid', sa.INTEGER(), nullable=True),
+    sa.ForeignKeyConstraint(['uid'], ['foo.user.id'], ),
+    sa.PrimaryKeyConstraint(),
+    schema='foo'
+    )
+    op.drop_table('item', schema='foo')
+    ### end Alembic commands ###""")
+
+
 class AutogenerateDiffTest(AutogenTest, TestCase):
     @classmethod
     def _get_db_schema(cls):
         eq_(diffs[1][1].name, "extra")
 
         eq_(diffs[2][0], "add_column")
-        eq_(diffs[2][1], "address")
-        eq_(diffs[2][2], metadata.tables['address'].c.street)
+        eq_(diffs[2][1], None)
+        eq_(diffs[2][2], "address")
+        eq_(diffs[2][3], metadata.tables['address'].c.street)
 
         eq_(diffs[3][0], "add_column")
-        eq_(diffs[3][1], "order")
-        eq_(diffs[3][2], metadata.tables['order'].c.user_id)
+        eq_(diffs[3][1], None)
+        eq_(diffs[3][2], "order")
+        eq_(diffs[3][3], metadata.tables['order'].c.user_id)
 
         eq_(diffs[4][0][0], "modify_type")
-        eq_(diffs[4][0][1], "order")
-        eq_(diffs[4][0][2], "amount")
-        eq_(repr(diffs[4][0][4]), "NUMERIC(precision=8, scale=2)")
-        eq_(repr(diffs[4][0][5]), "Numeric(precision=10, scale=2)")
+        eq_(diffs[4][0][1], None)
+        eq_(diffs[4][0][2], "order")
+        eq_(diffs[4][0][3], "amount")
+        eq_(repr(diffs[4][0][5]), "NUMERIC(precision=8, scale=2)")
+        eq_(repr(diffs[4][0][6]), "Numeric(precision=10, scale=2)")
 
 
         eq_(diffs[5][0], 'remove_column')
-        eq_(diffs[5][2].name, 'pw')
+        eq_(diffs[5][3].name, 'pw')
 
         eq_(diffs[6][0][0], "modify_default")
-        eq_(diffs[6][0][1], "user")
-        eq_(diffs[6][0][2], "a1")
-        eq_(diffs[6][0][5].arg, "x")
+        eq_(diffs[6][0][1], None)
+        eq_(diffs[6][0][2], "user")
+        eq_(diffs[6][0][3], "a1")
+        eq_(diffs[6][0][6].arg, "x")
 
         eq_(diffs[7][0][0], 'modify_nullable')
-        eq_(diffs[7][0][4], True)
-        eq_(diffs[7][0][5], False)
+        eq_(diffs[7][0][5], True)
+        eq_(diffs[7][0][6], False)
 
     def test_render_nothing(self):
         context = MigrationContext.configure(
 
     def test_skip_null_type_comparison_reflected(self):
         diff = []
-        autogenerate._compare_type("sometable", "somecol",
+        autogenerate._compare_type(None, "sometable", "somecol",
             {"name":"somecol", "type":NULLTYPE,
             "nullable":True, "default":None},
             Column("somecol", Integer()),
 
     def test_skip_null_type_comparison_local(self):
         diff = []
-        autogenerate._compare_type("sometable", "somecol",
+        autogenerate._compare_type(None, "sometable", "somecol",
             {"name":"somecol", "type":Integer(),
             "nullable":True, "default":None},
             Column("somecol", NULLTYPE),
                     return dialect.type_descriptor(CHAR(32))
 
         diff = []
-        autogenerate._compare_type("sometable", "somecol",
+        autogenerate._compare_type(None, "sometable", "somecol",
             {"name":"somecol", "type":Integer(),
             "nullable":True, "default":None},
             Column("somecol", MyType()),
         from sqlalchemy.util import OrderedSet
         inspector = Inspector.from_engine(self.bind)
         autogenerate._compare_tables(
-            OrderedSet(['extra', 'user']), OrderedSet(), inspector,
+            OrderedSet([(None, 'extra'), (None, 'user')]), OrderedSet(), inspector,
                 MetaData(), diffs, self.autogen_context
         )
         eq_(
             ")"
         )
 
+    def test_render_table_w_schema(self):
+        m = MetaData()
+        t = Table('test', m,
+            Column('id', Integer, primary_key=True),
+            schema='foo'
+        )
+        eq_ignore_whitespace(
+            autogenerate._add_table(t, self.autogen_context),
+            "op.create_table('test',"
+            "sa.Column('id', sa.Integer(), nullable=False),"
+            "sa.PrimaryKeyConstraint('id'),"
+            "schema='foo'"
+            ")"
+        )
+
     def test_render_drop_table(self):
         eq_(
             autogenerate._drop_table(Table("sometable", MetaData()),
             "op.drop_table('sometable')"
         )
 
+    def test_render_drop_table_w_schema(self):
+        eq_(
+            autogenerate._drop_table(
+                Table("sometable", MetaData(), schema='foo'),
+                self.autogen_context),
+            "op.drop_table('sometable', schema='foo')"
+        )
+
     def test_render_add_column(self):
         eq_(
             autogenerate._add_column(
-                    "foo", Column("x", Integer, server_default="5"),
+                    None, "foo", Column("x", Integer, server_default="5"),
                         self.autogen_context),
             "op.add_column('foo', sa.Column('x', sa.Integer(), "
                 "server_default='5', nullable=True))"
         )
 
+    def test_render_add_column_w_schema(self):
+        eq_(
+            autogenerate._add_column(
+                    "foo", "bar", Column("x", Integer, server_default="5"),
+                        self.autogen_context),
+            "op.add_column('bar', sa.Column('x', sa.Integer(), "
+                "server_default='5', nullable=True), schema='foo')"
+        )
+
     def test_render_drop_column(self):
         eq_(
             autogenerate._drop_column(
-                    "foo", Column("x", Integer, server_default="5"),
+                    None, "foo", Column("x", Integer, server_default="5"),
                         self.autogen_context),
 
             "op.drop_column('foo', 'x')"
         )
 
+    def test_render_drop_column_w_schema(self):
+        eq_(
+            autogenerate._drop_column(
+                    "foo", "bar", Column("x", Integer, server_default="5"),
+                        self.autogen_context),
+
+            "op.drop_column('bar', 'x', schema='foo')"
+        )
+
     def test_render_quoted_server_default(self):
         eq_(
             autogenerate._render_server_default(
                 "existing_type=sa.CHAR(length=20), type_=sa.CHAR(length=10))"
         )
 
+    def test_render_modify_type_w_schema(self):
+        eq_ignore_whitespace(
+            autogenerate._modify_col(
+                        "sometable", "somecolumn",
+                        self.autogen_context,
+                        type_=CHAR(10), existing_type=CHAR(20),
+                        schema='foo'),
+            "op.alter_column('sometable', 'somecolumn', "
+                "existing_type=sa.CHAR(length=20), type_=sa.CHAR(length=10), "
+                "schema='foo')"
+        )
+
     def test_render_modify_nullable(self):
         eq_ignore_whitespace(
             autogenerate._modify_col(
             "existing_type=sa.Integer(), nullable=True)"
         )
 
+    def test_render_modify_nullable_w_schema(self):
+        eq_ignore_whitespace(
+            autogenerate._modify_col(
+                        "sometable", "somecolumn",
+                        self.autogen_context,
+                        existing_type=Integer(),
+                        nullable=True, schema='foo'),
+            "op.alter_column('sometable', 'somecolumn', "
+            "existing_type=sa.Integer(), nullable=True, schema='foo')"
+        )
+
     def test_render_check_constraint_literal(self):
         eq_ignore_whitespace(
             autogenerate._render_check_constraint(
     op.add_column('t1', Column('c1', Integer, nullable=False))
     context.assert_("ALTER TABLE t1 ADD COLUMN c1 INTEGER NOT NULL")
 
+def test_add_column_schema():
+    context = op_fixture()
+    op.add_column('t1', Column('c1', Integer, nullable=False), schema="foo")
+    context.assert_("ALTER TABLE foo.t1 ADD COLUMN c1 INTEGER NOT NULL")
+
 def test_add_column_with_default():
     context = op_fixture()
     op.add_column('t1', Column('c1', Integer, nullable=False, server_default="12"))
     context.assert_("ALTER TABLE t1 ADD COLUMN c1 INTEGER DEFAULT '12' NOT NULL")
 
+def test_add_column_schema_with_default():
+    context = op_fixture()
+    op.add_column('t1',
+            Column('c1', Integer, nullable=False, server_default="12"),
+            schema='foo')
+    context.assert_("ALTER TABLE foo.t1 ADD COLUMN c1 INTEGER DEFAULT '12' NOT NULL")
+
 def test_add_column_fk():
     context = op_fixture()
     op.add_column('t1', Column('c1', Integer, ForeignKey('c2.id'), nullable=False))
         "ALTER TABLE t1 ADD FOREIGN KEY(c1) REFERENCES c2 (id)"
     )
 
+def test_add_column_schema_fk():
+    context = op_fixture()
+    op.add_column('t1',
+            Column('c1', Integer, ForeignKey('c2.id'), nullable=False),
+            schema='foo')
+    context.assert_(
+        "ALTER TABLE foo.t1 ADD COLUMN c1 INTEGER NOT NULL",
+        "ALTER TABLE foo.t1 ADD FOREIGN KEY(c1) REFERENCES c2 (id)"
+    )
+
 def test_add_column_schema_type():
     """Test that a schema type generates its constraints...."""
     context = op_fixture()
         'ALTER TABLE t1 ADD CHECK (c1 IN (0, 1))'
     )
 
+def test_add_column_schema_schema_type():
+    """Test that a schema type generates its constraints...."""
+    context = op_fixture()
+    op.add_column('t1', Column('c1', Boolean, nullable=False), schema='foo')
+    context.assert_(
+        'ALTER TABLE foo.t1 ADD COLUMN c1 BOOLEAN NOT NULL',
+        'ALTER TABLE foo.t1 ADD CHECK (c1 IN (0, 1))'
+    )
+
 def test_add_column_schema_type_checks_rule():
     """Test that a schema type doesn't generate a
     constraint based on check rule."""
         "ALTER TABLE t1 ADD FOREIGN KEY(c1) REFERENCES t1 (c2)"
     )
 
+def test_add_column_schema_fk_self_referential():
+    context = op_fixture()
+    op.add_column('t1',
+            Column('c1', Integer, ForeignKey('foo.t1.c2'), nullable=False),
+            schema='foo')
+    context.assert_(
+        "ALTER TABLE foo.t1 ADD COLUMN c1 INTEGER NOT NULL",
+        "ALTER TABLE foo.t1 ADD FOREIGN KEY(c1) REFERENCES foo.t1 (c2)"
+    )
+
 def test_add_column_fk_schema():
     context = op_fixture()
     op.add_column('t1', Column('c1', Integer, ForeignKey('remote.t2.c2'), nullable=False))
     'ALTER TABLE t1 ADD FOREIGN KEY(c1) REFERENCES remote.t2 (c2)'
     )
 
+def test_add_column_schema_fk_schema():
+    context = op_fixture()
+    op.add_column('t1',
+            Column('c1', Integer, ForeignKey('remote.t2.c2'), nullable=False),
+            schema='foo')
+    context.assert_(
+    'ALTER TABLE foo.t1 ADD COLUMN c1 INTEGER NOT NULL',
+    'ALTER TABLE foo.t1 ADD FOREIGN KEY(c1) REFERENCES remote.t2 (c2)'
+    )
+
 def test_drop_column():
     context = op_fixture()
     op.drop_column('t1', 'c1')
     context.assert_("ALTER TABLE t1 DROP COLUMN c1")
 
+def test_drop_column_schema():
+    context = op_fixture()
+    op.drop_column('t1', 'c1', schema='foo')
+    context.assert_("ALTER TABLE foo.t1 DROP COLUMN c1")
+
 def test_alter_column_nullable():
     context = op_fixture()
     op.alter_column("t", "c", nullable=True)
         "ALTER TABLE t ALTER COLUMN c DROP NOT NULL"
     )
 
+def test_alter_column_schema_nullable():
+    context = op_fixture()
+    op.alter_column("t", "c", nullable=True, schema='foo')
+    context.assert_(
+        # TODO: not sure if this is PG only or standard
+        # SQL
+        "ALTER TABLE foo.t ALTER COLUMN c DROP NOT NULL"
+    )
+
 def test_alter_column_not_nullable():
     context = op_fixture()
     op.alter_column("t", "c", nullable=False)
         "ALTER TABLE t ALTER COLUMN c SET NOT NULL"
     )
 
+def test_alter_column_schema_not_nullable():
+    context = op_fixture()
+    op.alter_column("t", "c", nullable=False, schema='foo')
+    context.assert_(
+        # TODO: not sure if this is PG only or standard
+        # SQL
+        "ALTER TABLE foo.t ALTER COLUMN c SET NOT NULL"
+    )
+
 def test_alter_column_rename():
     context = op_fixture()
     op.alter_column("t", "c", name="x")
         "ALTER TABLE t RENAME c TO x"
     )
 
+def test_alter_column_schema_rename():
+    context = op_fixture()
+    op.alter_column("t", "c", name="x", schema='foo')
+    context.assert_(
+        "ALTER TABLE foo.t RENAME c TO x"
+    )
+
 def test_alter_column_type():
     context = op_fixture()
     op.alter_column("t", "c", type_=String(50))
         'ALTER TABLE t ALTER COLUMN c TYPE VARCHAR(50)'
     )
 
+def test_alter_column_schema_type():
+    context = op_fixture()
+    op.alter_column("t", "c", type_=String(50), schema='foo')
+    context.assert_(
+        'ALTER TABLE foo.t ALTER COLUMN c TYPE VARCHAR(50)'
+    )
+
 def test_alter_column_set_default():
     context = op_fixture()
     op.alter_column("t", "c", server_default="q")
         "ALTER TABLE t ALTER COLUMN c SET DEFAULT 'q'"
     )
 
+def test_alter_column_schema_set_default():
+    context = op_fixture()
+    op.alter_column("t", "c", server_default="q", schema='foo')
+    context.assert_(
+        "ALTER TABLE foo.t ALTER COLUMN c SET DEFAULT 'q'"
+    )
+
 def test_alter_column_set_compiled_default():
     context = op_fixture()
-    op.alter_column("t", "c", server_default=func.utc_thing(func.current_timestamp()))
+    op.alter_column("t", "c",
+            server_default=func.utc_thing(func.current_timestamp()))
     context.assert_(
         "ALTER TABLE t ALTER COLUMN c SET DEFAULT utc_thing(CURRENT_TIMESTAMP)"
     )
 
+def test_alter_column_schema_set_compiled_default():
+    context = op_fixture()
+    op.alter_column("t", "c",
+            server_default=func.utc_thing(func.current_timestamp()),
+            schema='foo')
+    context.assert_(
+        "ALTER TABLE foo.t ALTER COLUMN c SET DEFAULT utc_thing(CURRENT_TIMESTAMP)"
+    )
+
 def test_alter_column_drop_default():
     context = op_fixture()
     op.alter_column("t", "c", server_default=None)
         'ALTER TABLE t ALTER COLUMN c DROP DEFAULT'
     )
 
+def test_alter_column_schema_drop_default():
+    context = op_fixture()
+    op.alter_column("t", "c", server_default=None, schema='foo')
+    context.assert_(
+        'ALTER TABLE foo.t ALTER COLUMN c DROP DEFAULT'
+    )
+
 
 def test_alter_column_schema_type_unnamed():
     context = op_fixture('mssql')
         'ALTER TABLE t ADD CHECK (c IN (0, 1))'
     )
 
+def test_alter_column_schema_schema_type_unnamed():
+    context = op_fixture('mssql')
+    op.alter_column("t", "c", type_=Boolean(), schema='foo')
+    context.assert_(
+        'ALTER TABLE foo.t ALTER COLUMN c BIT',
+        'ALTER TABLE foo.t ADD CHECK (c IN (0, 1))'
+    )
+
 def test_alter_column_schema_type_named():
     context = op_fixture('mssql')
     op.alter_column("t", "c", type_=Boolean(name="xyz"))
         'ALTER TABLE t ADD CONSTRAINT xyz CHECK (c IN (0, 1))'
     )
 
+def test_alter_column_schema_schema_type_named():
+    context = op_fixture('mssql')
+    op.alter_column("t", "c", type_=Boolean(name="xyz"), schema='foo')
+    context.assert_(
+        'ALTER TABLE foo.t ALTER COLUMN c BIT',
+        'ALTER TABLE foo.t ADD CONSTRAINT xyz CHECK (c IN (0, 1))'
+    )
+
 def test_alter_column_schema_type_existing_type():
     context = op_fixture('mssql')
     op.alter_column("t", "c", type_=String(10), existing_type=Boolean(name="xyz"))
         'ALTER TABLE t ALTER COLUMN c VARCHAR(10)'
     )
 
+def test_alter_column_schema_schema_type_existing_type():
+    context = op_fixture('mssql')
+    op.alter_column("t", "c", type_=String(10),
+            existing_type=Boolean(name="xyz"), schema='foo')
+    context.assert_(
+        'ALTER TABLE foo.t DROP CONSTRAINT xyz',
+        'ALTER TABLE foo.t ALTER COLUMN c VARCHAR(10)'
+    )
+
 def test_alter_column_schema_type_existing_type_no_const():
     context = op_fixture('postgresql')
     op.alter_column("t", "c", type_=String(10), existing_type=Boolean())
         'ALTER TABLE t ALTER COLUMN c TYPE VARCHAR(10)'
     )
 
+def test_alter_column_schema_schema_type_existing_type_no_const():
+    context = op_fixture('postgresql')
+    op.alter_column("t", "c", type_=String(10), existing_type=Boolean(),
+            schema='foo')
+    context.assert_(
+        'ALTER TABLE foo.t ALTER COLUMN c TYPE VARCHAR(10)'
+    )
+
 def test_alter_column_schema_type_existing_type_no_new_type():
     context = op_fixture('postgresql')
     op.alter_column("t", "c", nullable=False, existing_type=Boolean())
         'ALTER TABLE t ALTER COLUMN c SET NOT NULL'
     )
 
+def test_alter_column_schema_schema_type_existing_type_no_new_type():
+    context = op_fixture('postgresql')
+    op.alter_column("t", "c", nullable=False, existing_type=Boolean(),
+            schema='foo')
+    context.assert_(
+        'ALTER TABLE foo.t ALTER COLUMN c SET NOT NULL'
+    )
+
 def test_add_foreign_key():
     context = op_fixture()
     op.create_foreign_key('fk_test', 't1', 't2',
             "REFERENCES t2 (bat, hoho)"
     )
 
+def test_add_foreign_key_schema():
+    context = op_fixture()
+    op.create_foreign_key('fk_test', 't1', 't2',
+                    ['foo', 'bar'], ['bat', 'hoho'],
+                   source_schema='foo2', referent_schema='bar2')
+    context.assert_(
+        "ALTER TABLE foo2.t1 ADD CONSTRAINT fk_test FOREIGN KEY(foo, bar) "
+            "REFERENCES bar2.t2 (bat, hoho)"
+    )
+
 def test_add_foreign_key_onupdate():
     context = op_fixture()
     op.create_foreign_key('fk_test', 't1', 't2',
         "CHECK (len(name) > 5)"
     )
 
+def test_add_check_constraint_schema():
+    context = op_fixture()
+    op.create_check_constraint(
+        "ck_user_name_len",
+        "user_table",
+        func.len(column('name')) > 5,
+        schema='foo'
+    )
+    context.assert_(
+        "ALTER TABLE foo.user_table ADD CONSTRAINT ck_user_name_len "
+        "CHECK (len(name) > 5)"
+    )
+
 def test_add_unique_constraint():
     context = op_fixture()
     op.create_unique_constraint('uk_test', 't1', ['foo', 'bar'])
         "ALTER TABLE t1 ADD CONSTRAINT uk_test UNIQUE (foo, bar)"
     )
 
+def test_add_unique_constraint_schema():
+    context = op_fixture()
+    op.create_unique_constraint('uk_test', 't1', ['foo', 'bar'], schema='foo')
+    context.assert_(
+        "ALTER TABLE foo.t1 ADD CONSTRAINT uk_test UNIQUE (foo, bar)"
+    )
+
 def test_add_unique_constraint_auto_cols():
     context = op_fixture()
     from sqlalchemy import event, DateTime
         "ALTER TABLE t1 DROP CONSTRAINT foo_bar_bat"
     )
 
+def test_drop_constraint_schema():
+    context = op_fixture()
+    op.drop_constraint('foo_bar_bat', 't1', schema='foo')
+    context.assert_(
+        "ALTER TABLE foo.t1 DROP CONSTRAINT foo_bar_bat"
+    )
+
 def test_create_index():
     context = op_fixture()
     op.create_index('ik_test', 't1', ['foo', 'bar'])
         "CREATE INDEX ik_test ON t1 (foo, bar)"
     )
 
+def test_create_index_schema():
+    context = op_fixture()
+    op.create_index('ik_test', 't1', ['foo', 'bar'], schema='foo')
+    context.assert_(
+        "CREATE INDEX ik_test ON foo.t1 (foo, bar)"
+    )
 
 def test_drop_index():
     context = op_fixture()
         "DROP INDEX ik_test"
     )
 
+def test_drop_index_schema():
+    context = op_fixture()
+    op.drop_index('ik_test', schema='foo')
+    context.assert_(
+        "DROP INDEX foo.ik_test"
+    )
+
 def test_drop_table():
     context = op_fixture()
     op.drop_table('tb_test')
         "DROP TABLE tb_test"
     )
 
+def test_drop_table_schema():
+    context = op_fixture()
+    op.drop_table('tb_test', schema='foo')
+    context.assert_(
+        "DROP TABLE foo.tb_test"
+    )
+
 def test_create_table_selfref():
     context = op_fixture()
     op.create_table(
         "for the Alembic 'Operations' class.  "
         "Try placing this code inside a callable.",
         op.inline_literal, "asdf"
-    )
+    )