Commits

Mike Bayer  committed 011aec5

- AbstractType.__str__() produces the string version of the type with default dialect
- 79 chars

  • Participants
  • Parent commits 2ee784a

Comments (0)

Files changed (2)

File lib/sqlalchemy/types.py

             'FLOAT', 'NUMERIC', 'DECIMAL', 'TIMESTAMP', 'DATETIME', 'CLOB',
             'BLOB', 'BOOLEAN', 'SMALLINT', 'INTEGER', 'DATE', 'TIME',
             'String', 'Integer', 'SmallInteger', 'BigInteger', 'Numeric',
-            'Float', 'DateTime', 'Date', 'Time', 'LargeBinary', 'Binary', 'Boolean',
-            'Unicode', 'MutableType', 'Concatenable', 'UnicodeText',
-            'PickleType', 'Interval', 'type_map', 'Enum' ]
+            'Float', 'DateTime', 'Date', 'Time', 'LargeBinary', 'Binary',
+            'Boolean', 'Unicode', 'MutableType', 'Concatenable',
+            'UnicodeText','PickleType', 'Interval', 'type_map', 'Enum' ]
 
 import inspect
 import datetime as dt
 from sqlalchemy import processors
 import collections
 
+DefaultDialect = None
 NoneType = type(None)
 if util.jython:
     import array
 
 class AbstractType(Visitable):
     
-    def __init__(self, *args, **kwargs):
-        pass
-
-    def compile(self, dialect):
-        return dialect.type_compiler.process(self)
-
     def copy_value(self, value):
         return value
 
         
     @util.memoized_property
     def _type_affinity(self):
-        """Return a rudimental 'affinity' value expressing the general class of type."""
+        """Return a rudimental 'affinity' value expressing the general class
+        of type."""
 
         typ = None
         for t in self.__class__.__mro__:
     
     def _coerce_compared_value(self, op, value):
         _coerced_type = type_map.get(type(value), NULLTYPE)
-        if _coerced_type is NULLTYPE or _coerced_type._type_affinity is self._type_affinity:
+        if _coerced_type is NULLTYPE or _coerced_type._type_affinity \
+            is self._type_affinity:
             return self
         else:
             return _coerced_type
     def _compare_type_affinity(self, other):
         return self._type_affinity is other._type_affinity
 
+    def compile(self, dialect=None):
+        # arg, return value is inconsistent with
+        # ClauseElement.compile()....this is a mistake.
+        
+        if not dialect:
+            global DefaultDialect
+            if DefaultDialect is None:
+                from sqlalchemy.engine.default import DefaultDialect
+            dialect = DefaultDialect()
+        
+        return dialect.type_compiler.process(self)
+
+    def __str__(self):
+        # Py3K
+        #return unicode(self.compile())
+        # Py2K
+        return unicode(self.compile()).encode('ascii', 'backslashreplace')
+        # end Py2K
+
+    def __init__(self, *args, **kwargs):
+        # supports getargspec of the __init__ method
+        # used by generic __repr__
+        pass
+
     def __repr__(self):
         return "%s(%s)" % (
             self.__class__.__name__,
         return {}
 
     def dialect_impl(self, dialect, **kwargs):
-        key = (dialect.__class__, dialect.server_version_info)
-        
+        key = dialect.__class__, dialect.server_version_info
         try:
             return self._impl_dict[key]
         except KeyError:
-            return self._impl_dict.setdefault(key, dialect.type_descriptor(self))
+            return self._impl_dict.setdefault(key,
+                    dialect.type_descriptor(self))
 
     def __getstate__(self):
         d = self.__dict__.copy()
     given; in this case, the "impl" variable can reference
     ``TypeEngine`` as a placeholder.
 
-    Types that receive a Python type that isn't similar to the 
-    ultimate type used may want to define the :meth:`TypeDecorator.coerce_compared_value`
-    method.  This is used to give the expression system a hint 
-    when coercing Python objects into bind parameters within expressions.  
-    Consider this expression::
+    Types that receive a Python type that isn't similar to the ultimate type
+    used may want to define the :meth:`TypeDecorator.coerce_compared_value`
+    method. This is used to give the expression system a hint when coercing
+    Python objects into bind parameters within expressions. Consider this
+    expression::
     
         mytable.c.somecol + datetime.date(2009, 5, 15)
         
     The expression system does the right thing by not attempting to
     coerce the "date()" value into an integer-oriented bind parameter.
     
-    However, in the case of ``TypeDecorator``, we are usually changing
-    an incoming Python type to something new - ``TypeDecorator`` by 
-    default will "coerce" the non-typed side to be the same type as itself.
-    Such as below, we define an "epoch" type that stores a date value as an integer::
+    However, in the case of ``TypeDecorator``, we are usually changing an
+    incoming Python type to something new - ``TypeDecorator`` by default will
+    "coerce" the non-typed side to be the same type as itself. Such as below,
+    we define an "epoch" type that stores a date value as an integer::
     
         class MyEpochType(types.TypeDecorator):
             impl = types.Integer
     Our expression of ``somecol + date`` with the above type will coerce the
     "date" on the right side to also be treated as ``MyEpochType``.  
     
-    This behavior can be overridden via the :meth:`~TypeDecorator.coerce_compared_value`
-    method, which returns a type that should be used for the value of the expression.
-    Below we set it such that an integer value will be treated as an ``Integer``,
-    and any other value is assumed to be a date and will be treated as a ``MyEpochType``::
+    This behavior can be overridden via the
+    :meth:`~TypeDecorator.coerce_compared_value` method, which returns a type
+    that should be used for the value of the expression. Below we set it such
+    that an integer value will be treated as an ``Integer``, and any other
+    value is assumed to be a date and will be treated as a ``MyEpochType``::
     
         def coerce_compared_value(self, op, value):
             if isinstance(value, int):
 
     def __init__(self, *args, **kwargs):
         if not hasattr(self.__class__, 'impl'):
-            raise AssertionError("TypeDecorator implementations require a class-level "
-                        "variable 'impl' which refers to the class of type being decorated")
+            raise AssertionError("TypeDecorator implementations "
+                                 "require a class-level variable "
+                                 "'impl' which refers to the class of "
+                                 "type being decorated")
         self.impl = self.__class__.impl(*args, **kwargs)
     
     def adapt(self, cls):
         typedesc = self.load_dialect_impl(dialect)
         tt = self.copy()
         if not isinstance(tt, self.__class__):
-            raise AssertionError("Type object %s does not properly implement the copy() "
-                    "method, it must return an object of type %s" % (self, self.__class__))
+            raise AssertionError('Type object %s does not properly '
+                                 'implement the copy() method, it must '
+                                 'return an object of type %s' % (self,
+                                 self.__class__))
         tt.impl = typedesc
         self._impl_dict[key] = tt
         return tt
             return dialect.type_descriptor(self.impl)
 
     def __getattr__(self, key):
-        """Proxy all other undefined accessors to the underlying implementation."""
+        """Proxy all other undefined accessors to the underlying
+        implementation."""
 
         return getattr(self.impl, key)
 
         raise NotImplementedError()
 
     def bind_processor(self, dialect):
-        if self.__class__.process_bind_param.func_code is not TypeDecorator.process_bind_param.func_code:
+        if self.__class__.process_bind_param.func_code \
+            is not TypeDecorator.process_bind_param.func_code:
             process_param = self.process_bind_param
             impl_processor = self.impl.bind_processor(dialect)
             if impl_processor:
                 def process(value):
                     return impl_processor(process_param(value, dialect))
+
             else:
                 def process(value):
                     return process_param(value, dialect)
+
             return process
         else:
             return self.impl.bind_processor(dialect)
 
     def result_processor(self, dialect, coltype):
-        if self.__class__.process_result_value.func_code is not TypeDecorator.process_result_value.func_code:
+        if self.__class__.process_result_value.func_code \
+            is not TypeDecorator.process_result_value.func_code:
             process_value = self.process_result_value
-            impl_processor = self.impl.result_processor(dialect, coltype)
+            impl_processor = self.impl.result_processor(dialect,
+                    coltype)
             if impl_processor:
                 def process(value):
                     return process_value(impl_processor(value), dialect)
+
             else:
                 def process(value):
                     return process_value(value, dialect)
+
             return process
         else:
             return self.impl.result_processor(dialect, coltype)
 NullTypeEngine = NullType
 
 class Concatenable(object):
-    """A mixin that marks a type as supporting 'concatenation', typically strings."""
+    """A mixin that marks a type as supporting 'concatenation',
+    typically strings."""
 
     def _adapt_expression(self, op, othertype):
-        if op is operators.add and issubclass(othertype._type_affinity, (Concatenable, NullType)):
+        if op is operators.add and issubclass(othertype._type_affinity,
+                (Concatenable, NullType)):
             return operators.concat_op, self
         else:
             return op, self
 class _DateAffinity(object):
     """Mixin date/time specific expression adaptations.
     
-    Rules are implemented within Date,Time,Interval,DateTime, Numeric, Integer.
-    Based on http://www.postgresql.org/docs/current/static/functions-datetime.html.
+    Rules are implemented within Date,Time,Interval,DateTime, Numeric,
+    Integer. Based on http://www.postgresql.org/docs/current/static
+    /functions-datetime.html.
     
     """
     
           set convert_unicode='force'.  This will incur significant
           performance overhead when fetching unicode result columns.
           
-        :param assert_unicode: Deprecated.  A warning is raised in all cases when a non-Unicode
-          object is passed when SQLAlchemy would coerce into an encoding
-          (note: but **not** when the DBAPI handles unicode objects natively).
-          To suppress or raise this warning to an 
-          error, use the Python warnings filter documented at:
+        :param assert_unicode: Deprecated.  A warning is raised in all cases
+          when a non-Unicode object is passed when SQLAlchemy would coerce
+          into an encoding (note: but **not** when the DBAPI handles unicode
+          objects natively). To suppress or raise this warning to an error,
+          use the Python warnings filter documented at:
           http://docs.python.org/library/warnings.html
 
         :param unicode_error: Optional, a method to use to handle Unicode
                                         "when unicode_error is set.")
         
         if assert_unicode:
-            util.warn_deprecated("assert_unicode is deprecated. "
-                                "SQLAlchemy emits a warning in all cases where it "
-                                "would otherwise like to encode a Python unicode object "
-                                "into a specific encoding but a plain bytestring is received. "
-                                "This does *not* apply to DBAPIs that coerce Unicode natively."
-                                )
+            util.warn_deprecated('assert_unicode is deprecated. '
+                                 'SQLAlchemy emits a warning in all '
+                                 'cases where it would otherwise like '
+                                 'to encode a Python unicode object '
+                                 'into a specific encoding but a plain '
+                                 'bytestring is received. This does '
+                                 '*not* apply to DBAPIs that coerce '
+                                 'Unicode natively.')
         self.length = length
         self.convert_unicode = convert_unicode
         self.unicode_error = unicode_error
 
     def bind_processor(self, dialect):
         if self.convert_unicode or dialect.convert_unicode:
-            if dialect.supports_unicode_binds and self.convert_unicode != 'force':
+            if dialect.supports_unicode_binds and \
+                self.convert_unicode != 'force':
                 if self._warn_on_bytestring:
                     def process(value):
                         # Py3K
         """
         Construct a Numeric.
 
-        :param precision: the numeric precision for use in DDL ``CREATE TABLE``.
+        :param precision: the numeric precision for use in DDL ``CREATE
+          TABLE``.
 
         :param scale: the numeric scale for use in DDL ``CREATE TABLE``.
 
                 # we're a "numeric", DBAPI will give us Decimal directly
                 return None
             else:
-                util.warn("Dialect %s+%s does *not* support Decimal objects natively, "
-                            "and SQLAlchemy must convert from floating point - "
-                            "rounding errors and other issues may occur. "
-                            "Please consider storing Decimal numbers as strings or "
-                            "integers on this platform for lossless storage." % 
-                            (dialect.name, dialect.driver))
+                util.warn('Dialect %s+%s does *not* support Decimal '
+                          'objects natively, and SQLAlchemy must '
+                          'convert from floating point - rounding '
+                          'errors and other issues may occur. Please '
+                          'consider storing Decimal numbers as strings '
+                          'or integers on this platform for lossless '
+                          'storage.' % (dialect.name, dialect.driver))
                 
                 # we're a "numeric", DBAPI returns floats, convert.
                 if self.scale is not None:
-                    return processors.to_decimal_processor_factory(_python_Decimal, self.scale)
+                    return processors.to_decimal_processor_factory(
+                                _python_Decimal, self.scale)
                 else:
-                    return processors.to_decimal_processor_factory(_python_Decimal)
+                    return processors.to_decimal_processor_factory(
+                                _python_Decimal)
         else:
             if dialect.supports_native_decimal:
                 return processors.to_float
         """
         Construct a Float.
 
-        :param precision: the numeric precision for use in DDL ``CREATE TABLE``.
+        :param precision: the numeric precision for use in DDL ``CREATE
+           TABLE``.
         
         :param asdecimal: the same flag as that of :class:`Numeric`, but
           defaults to ``False``.   Note that setting this flag to ``True``
     """Deprecated.  Renamed to LargeBinary."""
     
     def __init__(self, *arg, **kw):
-        util.warn_deprecated("The Binary type has been renamed to LargeBinary.")
+        util.warn_deprecated('The Binary type has been renamed to '
+                             'LargeBinary.')
         LargeBinary.__init__(self, *arg, **kw)
 
 class SchemaType(object):
         self.schema = kw.pop('schema', None)
         self.metadata = kw.pop('metadata', None)
         if self.metadata:
-            self.metadata.append_ddl_listener(
-                                    'before-create',
-                                    util.portable_instancemethod(self._on_metadata_create)
-                                    )
-            self.metadata.append_ddl_listener(
-                                    'after-drop',
-                                    util.portable_instancemethod(self._on_metadata_drop)
-                                    )
+            self.metadata.append_ddl_listener('before-create',
+                    util.portable_instancemethod(self._on_metadata_create))
+            self.metadata.append_ddl_listener('after-drop',
+                    util.portable_instancemethod(self._on_metadata_drop))
             
     def _set_parent(self, column):
         column._on_table_attach(util.portable_instancemethod(self._set_table))
         
     def _set_table(self, table, column):
-        table.append_ddl_listener(
-                            'before-create', 
-                            util.portable_instancemethod(self._on_table_create)
-                            )
-        table.append_ddl_listener(
-                            'after-drop', 
-                            util.portable_instancemethod(self._on_table_drop)
-                            )
+        table.append_ddl_listener('before-create',
+                                  util.portable_instancemethod(
+                                        self._on_table_create))
+        table.append_ddl_listener('after-drop',
+                                  util.portable_instancemethod(
+                                        self._on_table_drop))
         if self.metadata is None:
-            table.metadata.append_ddl_listener(
-                            'before-create',
-                            util.portable_instancemethod(self._on_metadata_create)
-                            )
-            table.metadata.append_ddl_listener(
-                            'after-drop',
-                            util.portable_instancemethod(self._on_metadata_drop)
-                            )
+            table.metadata.append_ddl_listener('before-create',
+                    util.portable_instancemethod(self._on_metadata_create))
+            table.metadata.append_ddl_listener('after-drop',
+                    util.portable_instancemethod(self._on_metadata_drop))
     
     @property
     def bind(self):
         Keyword arguments which don't apply to a specific backend are ignored
         by that backend.
 
-        :param \*enums: string or unicode enumeration labels. If unicode labels
-            are present, the `convert_unicode` flag is auto-enabled.
+        :param \*enums: string or unicode enumeration labels. If unicode
+           labels are present, the `convert_unicode` flag is auto-enabled.
 
-        :param convert_unicode: Enable unicode-aware bind parameter and result-set
-            processing for this Enum's data. This is set automatically based on
-            the presence of unicode label strings.
+        :param convert_unicode: Enable unicode-aware bind parameter and
+           result-set processing for this Enum's data. This is set
+           automatically based on the presence of unicode label strings.
 
-        :param metadata: Associate this type directly with a ``MetaData`` object.
-            For types that exist on the target database as an independent schema
-            construct (Postgresql), this type will be created and dropped within
-            ``create_all()`` and ``drop_all()`` operations. If the type is not
-            associated with any ``MetaData`` object, it will associate itself with
-            each ``Table`` in which it is used, and will be created when any of
-            those individual tables are created, after a check is performed for
-            it's existence. The type is only dropped when ``drop_all()`` is called
-            for that ``Table`` object's metadata, however.
+        :param metadata: Associate this type directly with a ``MetaData``
+           object. For types that exist on the target database as an
+           independent schema construct (Postgresql), this type will be
+           created and dropped within ``create_all()`` and ``drop_all()``
+           operations. If the type is not associated with any ``MetaData``
+           object, it will associate itself with each ``Table`` in which it is
+           used, and will be created when any of those individual tables are
+           created, after a check is performed for it's existence. The type is
+           only dropped when ``drop_all()`` is called for that ``Table``
+           object's metadata, however.
 
-        :param name: The name of this type. This is required for Postgresql and
-            any future supported database which requires an explicitly named type,
-            or an explicitly named constraint in order to generate the type and/or
-            a table that uses it.
+        :param name: The name of this type. This is required for Postgresql
+           and any future supported database which requires an explicitly
+           named type, or an explicitly named constraint in order to generate
+           the type and/or a table that uses it.
 
-        :param native_enum: Use the database's native ENUM type when available.
-            Defaults to True.  When False, uses VARCHAR + check constraint
-            for all backends.
+        :param native_enum: Use the database's native ENUM type when
+           available. Defaults to True. When False, uses VARCHAR + check
+           constraint for all backends.
 
-        :param schema: Schemaname of this type. For types that exist on the target
-            database as an independent schema construct (Postgresql), this
-            parameter specifies the named schema in which the type is present.
+        :param schema: Schemaname of this type. For types that exist on the
+           target database as an independent schema construct (Postgresql),
+           this parameter specifies the named schema in which the type is
+           present.
 
-        :param quote: Force quoting to be on or off on the type's name. If left as
-            the default of `None`, the usual schema-level "case
-            sensitive"/"reserved name" rules are used to determine if this type's
-            name should be quoted.
+        :param quote: Force quoting to be on or off on the type's name. If
+           left as the default of `None`, the usual schema-level "case
+           sensitive"/"reserved name" rules are used to determine if this
+           type's name should be quoted.
 
         """
         self.enums = enums
         e = schema.CheckConstraint(
                         column.in_(self.enums),
                         name=self.name,
-                        _create_rule=util.portable_instancemethod(self._should_create_constraint)
+                        _create_rule=util.portable_instancemethod(
+                                        self._should_create_constraint)
                     )
         table.append_constraint(e)
         
 
     impl = LargeBinary
 
-    def __init__(self, protocol=pickle.HIGHEST_PROTOCOL, pickler=None, mutable=True, comparator=None):
+    def __init__(self, protocol=pickle.HIGHEST_PROTOCOL, 
+                    pickler=None, mutable=True, comparator=None):
         """
         Construct a PickleType.
 
 
     def copy_value(self, value):
         if self.mutable:
-            return self.pickler.loads(self.pickler.dumps(value, self.protocol))
+            return self.pickler.loads(
+                        self.pickler.dumps(value, self.protocol))
         else:
             return value
 
         e = schema.CheckConstraint(
                         column.in_([0, 1]),
                         name=self.name,
-                        _create_rule=util.portable_instancemethod(self._should_create_constraint)
+                        _create_rule=util.portable_instancemethod(
+                                    self._should_create_constraint)
                     )
         table.append_constraint(e)
     
     value is stored as a date which is relative to the "epoch"
     (Jan. 1, 1970).
 
-    Note that the ``Interval`` type does not currently provide 
-    date arithmetic operations on platforms which do not support 
-    interval types natively.   Such operations usually require
-    transformation of both sides of the expression (such as, conversion
-    of both sides into integer epoch values first) which currently
-    is a manual procedure (such as via :attr:`~sqlalchemy.sql.expression.func`).
+    Note that the ``Interval`` type does not currently provide date arithmetic
+    operations on platforms which do not support interval types natively. Such
+    operations usually require transformation of both sides of the expression
+    (such as, conversion of both sides into integer epoch values first) which
+    currently is a manual procedure (such as via
+    :attr:`~sqlalchemy.sql.expression.func`).
     
     """
 
 
 # using VARCHAR/NCHAR so that we dont get the genericized "String"
 # type which usually resolves to TEXT/CLOB
+# NOTE: this dict is not meant to be public and will be underscored
+# in 0.7, see [ticket:1870]. 
+
 type_map = {
     str: String(),
     # Py3K

File test/sql/test_types.py

 
 class AdaptTest(TestBase):
     def test_uppercase_rendering(self):
-        """Test that uppercase types from types.py always render as their type.
+        """Test that uppercase types from types.py always render as their
+        type.
         
-        As of SQLA 0.6, using an uppercase type means you want specifically that
-        type.  If the database in use doesn't support that DDL, it (the DB backend) 
-        should raise an error - it means you should be using a lowercased (genericized) type.
+        As of SQLA 0.6, using an uppercase type means you want specifically
+        that type. If the database in use doesn't support that DDL, it (the DB
+        backend) should raise an error - it means you should be using a
+        lowercased (genericized) type.
         
         """
         
                 mysql.dialect(), 
                 postgresql.dialect(), 
                 sqlite.dialect(), 
-                mssql.dialect()]: # TODO when dialects are complete:  engines.all_dialects():
+                mssql.dialect()]: 
             for type_, expected in (
                 (FLOAT, "FLOAT"),
                 (NUMERIC, "NUMERIC"),
                 (DECIMAL, "DECIMAL"),
                 (INTEGER, "INTEGER"),
                 (SMALLINT, "SMALLINT"),
-                (TIMESTAMP, "TIMESTAMP"),
+                (TIMESTAMP, ("TIMESTAMP", "TIMESTAMP WITHOUT TIME ZONE")),
                 (DATETIME, "DATETIME"),
                 (DATE, "DATE"),
-                (TIME, "TIME"),
+                (TIME, ("TIME", "TIME WITHOUT TIME ZONE")),
                 (CLOB, "CLOB"),
                 (VARCHAR(10), ("VARCHAR(10)","VARCHAR(10 CHAR)")),
-                (NVARCHAR(10), ("NVARCHAR(10)", "NATIONAL VARCHAR(10)", "NVARCHAR2(10)")),
+                (NVARCHAR(10), ("NVARCHAR(10)", "NATIONAL VARCHAR(10)",
+                                    "NVARCHAR2(10)")),
                 (CHAR, "CHAR"),
                 (NCHAR, ("NCHAR", "NATIONAL CHAR")),
                 (BLOB, "BLOB"),
             ):
                 if isinstance(expected, str):
                     expected = (expected, )
-                for exp in expected:
-                    compiled = types.to_instance(type_).compile(dialect=dialect)
-                    if exp in compiled:
-                        break
-                else:
-                    assert False, "%r matches none of %r for dialect %s" % \
-                                            (compiled, expected, dialect.name)
-            
+
+                compiled = types.to_instance(type_).\
+                            compile(dialect=dialect)
+                    
+                assert compiled in expected, \
+                    "%r matches none of %r for dialect %s" % \
+                    (compiled, expected, dialect.name)
+                
+                assert str(types.to_instance(type_)) in expected, \
+                    "default str() of type %r not expected, %r" % \
+                    (type_, expected)
+                
 class TypeAffinityTest(TestBase):
     def test_type_affinity(self):
         for type_, affin in [