Kirill Simonov avatar Kirill Simonov committed 988f014

Subclass all base classes from `object`.

Hold references to the objects being represented (should fix #22).

The value of a mapping node is represented as a list of pairs `(key, value)`
now.

Sort dictionary items (fix #23).

Recursive structures are now loaded and dumped correctly, including complex
structures like recursive tuples (fix #5). Thanks Peter Murphy for the patches.
To make it possible, representer functions are allowed to be generators.
In this case, the first generated value is an object. Other values produced
by the representer are ignored.

Make Representer not try to guess `!!pairs` when a list is represented.
You need to construct a `!!pairs` node explicitly now.

Do not check for duplicate mapping keys as it didn't work correctly anyway.

Comments (0)

Files changed (38)

lib/yaml/composer.py

 class ComposerError(MarkedYAMLError):
     pass
 
-class Composer:
+class Composer(object):
 
     def __init__(self):
         self.anchors = {}
         tag = start_event.tag
         if tag is None or tag == u'!':
             tag = self.resolve(MappingNode, None, start_event.implicit)
-        node = MappingNode(tag, {},
+        node = MappingNode(tag, [],
                 start_event.start_mark, None,
                 flow_style=start_event.flow_style)
         if anchor is not None:
             self.anchors[anchor] = node
         while not self.check_event(MappingEndEvent):
-            key_event = self.peek_event()
+            #key_event = self.peek_event()
             item_key = self.compose_node(node, None)
-            if item_key in node.value:
-                raise ComposerError("while composing a mapping", start_event.start_mark,
-                        "found duplicate key", key_event.start_mark)
+            #if item_key in node.value:
+            #    raise ComposerError("while composing a mapping", start_event.start_mark,
+            #            "found duplicate key", key_event.start_mark)
             item_value = self.compose_node(node, item_key)
-            node.value[item_key] = item_value
+            #node.value[item_key] = item_value
+            node.value.append((item_key, item_value))
         end_event = self.get_event()
         node.end_mark = end_event.end_mark
         return node

lib/yaml/constructor.py

 class ConstructorError(MarkedYAMLError):
     pass
 
-class BaseConstructor:
+class BaseConstructor(object):
 
     yaml_constructors = {}
     yaml_multi_constructors = {}
     def __init__(self):
         self.constructed_objects = {}
         self.recursive_objects = {}
+        self.state_generators = []
+        self.deep_construct = False
 
     def check_data(self):
         # If there are more documents available?
         if self.check_node():
             return self.construct_document(self.get_node())
 
+    def g(): yield None
+    generator_type = type(g())
+    del g
+
     def construct_document(self, node):
         data = self.construct_object(node)
+        while self.state_generators:
+            state_generators = self.state_generators
+            self.state_generators = []
+            for generator in state_generators:
+                for dummy in generator:
+                    pass
         self.constructed_objects = {}
         self.recursive_objects = {}
+        self.deep_construct = False
         return data
 
-    def construct_object(self, node):
+    def construct_object(self, node, deep=False):
+        if deep:
+            old_deep = self.deep_construct
+            self.deep_construct = True
         if node in self.constructed_objects:
             return self.constructed_objects[node]
         if node in self.recursive_objects:
             raise ConstructorError(None, None,
-                    "found recursive node", node.start_mark)
+                    "found unconstructable recursive node", node.start_mark)
         self.recursive_objects[node] = None
         constructor = None
+        state_constructor = None
+        tag_suffix = None
         if node.tag in self.yaml_constructors:
-            constructor = lambda node: self.yaml_constructors[node.tag](self, node)
+            constructor = self.yaml_constructors[node.tag]
         else:
             for tag_prefix in self.yaml_multi_constructors:
                 if node.tag.startswith(tag_prefix):
                     tag_suffix = node.tag[len(tag_prefix):]
-                    constructor = lambda node:  \
-                            self.yaml_multi_constructors[tag_prefix](self, tag_suffix, node)
+                    constructor = self.yaml_multi_constructors[tag_prefix]
                     break
             else:
                 if None in self.yaml_multi_constructors:
-                    constructor = lambda node:  \
-                            self.yaml_multi_constructors[None](self, node.tag, node)
+                    tag_suffix = node.tag
+                    constructor = self.yaml_multi_constructors[None]
                 elif None in self.yaml_constructors:
-                    constructor = lambda node:  \
-                            self.yaml_constructors[None](self, node)
+                    constructor = self.yaml_constructors[None]
                 elif isinstance(node, ScalarNode):
-                    constructor = self.construct_scalar
+                    constructor = self.__class__.construct_scalar
                 elif isinstance(node, SequenceNode):
-                    constructor = self.construct_sequence
+                    constructor = self.__class__.construct_sequence
                 elif isinstance(node, MappingNode):
-                    constructor = self.construct_mapping
-                else:
-                    print node.tag
-        data = constructor(node)
+                    constructor = self.__class__.construct_mapping
+        if tag_suffix is None:
+            data = constructor(self, node)
+        else:
+            data = constructor(self, tag_suffix, node)
+        if isinstance(data, self.generator_type):
+            generator = data
+            data = generator.next()
+            if self.deep_construct:
+                for dummy in generator:
+                    pass
+            else:
+                self.state_generators.append(generator)
         self.constructed_objects[node] = data
         del self.recursive_objects[node]
+        if deep:
+            self.deep_construct = old_deep
         return data
 
     def construct_scalar(self, node):
         if not isinstance(node, ScalarNode):
-            if isinstance(node, MappingNode):
-                for key_node in node.value:
-                    if key_node.tag == u'tag:yaml.org,2002:value':
-                        return self.construct_scalar(node.value[key_node])
             raise ConstructorError(None, None,
                     "expected a scalar node, but found %s" % node.id,
                     node.start_mark)
         return node.value
 
-    def construct_sequence(self, node):
+    def construct_sequence(self, node, deep=False):
         if not isinstance(node, SequenceNode):
             raise ConstructorError(None, None,
                     "expected a sequence node, but found %s" % node.id,
                     node.start_mark)
-        return [self.construct_object(child) for child in node.value]
+        return [self.construct_object(child, deep=deep)
+                for child in node.value]
 
-    def construct_mapping(self, node):
+    def construct_mapping(self, node, deep=False):
         if not isinstance(node, MappingNode):
             raise ConstructorError(None, None,
                     "expected a mapping node, but found %s" % node.id,
                     node.start_mark)
         mapping = {}
-        merge = None
-        for key_node in node.value:
-            if key_node.tag == u'tag:yaml.org,2002:merge':
-                if merge is not None:
-                    raise ConstructorError("while constructing a mapping", node.start_mark,
-                            "found duplicate merge key", key_node.start_mark)
-                value_node = node.value[key_node]
-                if isinstance(value_node, MappingNode):
-                    merge = [self.construct_mapping(value_node)]
-                elif isinstance(value_node, SequenceNode):
-                    merge = []
-                    for subnode in value_node.value:
-                        if not isinstance(subnode, MappingNode):
-                            raise ConstructorError("while constructing a mapping",
-                                    node.start_mark,
-                                    "expected a mapping for merging, but found %s"
-                                    % subnode.id, subnode.start_mark)
-                        merge.append(self.construct_mapping(subnode))
-                    merge.reverse()
-                else:
-                    raise ConstructorError("while constructing a mapping", node.start_mark,
-                            "expected a mapping or list of mappings for merging, but found %s"
-                            % value_node.id, value_node.start_mark)
-            elif key_node.tag == u'tag:yaml.org,2002:value':
-                if '=' in mapping:
-                    raise ConstructorError("while construction a mapping", node.start_mark,
-                            "found duplicate value key", key_node.start_mark)
-                value = self.construct_object(node.value[key_node])
-                mapping['='] = value
-            else:
-                key = self.construct_object(key_node)
-                try:
-                    duplicate_key = key in mapping
-                except TypeError, exc:
-                    raise ConstructorError("while constructing a mapping", node.start_mark,
-                            "found unacceptable key (%s)" % exc, key_node.start_mark)
-                if duplicate_key:
-                    raise ConstructorError("while constructing a mapping", node.start_mark,
-                            "found duplicate key", key_node.start_mark)
-                value = self.construct_object(node.value[key_node])
-                mapping[key] = value
-        if merge is not None:
-            merge.append(mapping)
-            mapping = {}
-            for submapping in merge:
-                mapping.update(submapping)
+        for key_node, value_node in node.value:
+            key = self.construct_object(key_node, deep=deep)
+            try:
+                hash(key)
+            except TypeError, exc:
+                raise ConstructorError("while constructing a mapping", node.start_mark,
+                        "found unacceptable key (%s)" % exc, key_node.start_mark)
+            value = self.construct_object(value_node, deep=deep)
+            mapping[key] = value
         return mapping
 
-    def construct_pairs(self, node):
+    def construct_pairs(self, node, deep=False):
         if not isinstance(node, MappingNode):
             raise ConstructorError(None, None,
                     "expected a mapping node, but found %s" % node.id,
                     node.start_mark)
         pairs = []
-        for key_node in node.value:
-            key = self.construct_object(key_node)
-            value = self.construct_object(node.value[key_node])
+        for key_node, value_node in node.value:
+            key = self.construct_object(key_node, deep=deep)
+            value = self.construct_object(value_node, deep=deep)
             pairs.append((key, value))
         return pairs
 
 
 class SafeConstructor(BaseConstructor):
 
+    def construct_scalar(self, node):
+        if isinstance(node, MappingNode):
+            for key_node, value_node in node.value:
+                if key_node.tag == u'tag:yaml.org,2002:value':
+                    return self.construct_scalar(value_node)
+        return BaseConstructor.construct_scalar(self, node)
+
+    def flatten_mapping(self, node):
+        merge = []
+        index = 0
+        while index < len(node.value):
+            key_node, value_node = node.value[index]
+            if key_node.tag == u'tag:yaml.org,2002:merge':
+                del node.value[index]
+                if isinstance(value_node, MappingNode):
+                    self.flatten_mapping(value_node)
+                    merge.extend(value_node.value)
+                elif isinstance(value_node, SequenceNode):
+                    submerge = []
+                    for subnode in value_node.value:
+                        if not isinstance(subnode, MappingNode):
+                            raise ConstructorError("while constructing a mapping",
+                                    node.start_mark,
+                                    "expected a mapping for merging, but found %s"
+                                    % subnode.id, subnode.start_mark)
+                        self.flatten_mapping(subnode)
+                        submerge.append(subnode.value)
+                    submerge.reverse()
+                    for value in submerge:
+                        merge.extend(value)
+                else:
+                    raise ConstructorError("while constructing a mapping", node.start_mark,
+                            "expected a mapping or list of mappings for merging, but found %s"
+                            % value_node.id, value_node.start_mark)
+            elif key_node.tag == u'tag:yaml.org,2002:value':
+                key_node.tag = u'tag:yaml.org,2002:str'
+                index += 1
+            else:
+                index += 1
+        if merge:
+            node.value = merge + node.value
+
+    def construct_mapping(self, node, deep=False):
+        if isinstance(node, MappingNode):
+            self.flatten_mapping(node)
+        return BaseConstructor.construct_mapping(self, node, deep=deep)
+
     def construct_yaml_null(self, node):
         self.construct_scalar(node)
         return None
     def construct_yaml_omap(self, node):
         # Note: we do not check for duplicate keys, because it's too
         # CPU-expensive.
+        omap = []
+        yield omap
         if not isinstance(node, SequenceNode):
             raise ConstructorError("while constructing an ordered map", node.start_mark,
                     "expected a sequence, but found %s" % node.id, node.start_mark)
-        omap = []
         for subnode in node.value:
             if not isinstance(subnode, MappingNode):
                 raise ConstructorError("while constructing an ordered map", node.start_mark,
                 raise ConstructorError("while constructing an ordered map", node.start_mark,
                         "expected a single mapping item, but found %d items" % len(subnode.value),
                         subnode.start_mark)
-            key_node = subnode.value.keys()[0]
+            key_node, value_node = subnode.value[0]
             key = self.construct_object(key_node)
-            value = self.construct_object(subnode.value[key_node])
+            value = self.construct_object(value_node)
             omap.append((key, value))
-        return omap
 
     def construct_yaml_pairs(self, node):
         # Note: the same code as `construct_yaml_omap`.
+        pairs = []
+        yield pairs
         if not isinstance(node, SequenceNode):
             raise ConstructorError("while constructing pairs", node.start_mark,
                     "expected a sequence, but found %s" % node.id, node.start_mark)
-        pairs = []
         for subnode in node.value:
             if not isinstance(subnode, MappingNode):
                 raise ConstructorError("while constructing pairs", node.start_mark,
                 raise ConstructorError("while constructing pairs", node.start_mark,
                         "expected a single mapping item, but found %d items" % len(subnode.value),
                         subnode.start_mark)
-            key_node = subnode.value.keys()[0]
+            key_node, value_node = subnode.value[0]
             key = self.construct_object(key_node)
-            value = self.construct_object(subnode.value[key_node])
+            value = self.construct_object(value_node)
             pairs.append((key, value))
-        return pairs
 
     def construct_yaml_set(self, node):
+        data = set()
+        yield data
         value = self.construct_mapping(node)
-        return set(value)
+        data.update(value)
 
     def construct_yaml_str(self, node):
         value = self.construct_scalar(node)
             return value
 
     def construct_yaml_seq(self, node):
-        return self.construct_sequence(node)
+        data = []
+        yield data
+        data.extend(self.construct_sequence(node))
 
     def construct_yaml_map(self, node):
-        return self.construct_mapping(node)
+        data = {}
+        yield data
+        value = self.construct_mapping(node)
+        data.update(value)
 
     def construct_yaml_object(self, node, cls):
-        state = self.construct_mapping(node)
         data = cls.__new__(cls)
+        yield data
         if hasattr(data, '__setstate__'):
+            state = self.construct_mapping(node, deep=True)
             data.__setstate__(state)
         else:
+            state = self.construct_mapping(node)
             data.__dict__.update(state)
-        return data
 
     def construct_undefined(self, node):
         raise ConstructorError(None, None,
        return complex(self.construct_scalar(node))
 
     def construct_python_tuple(self, node):
-        return tuple(self.construct_yaml_seq(node))
+        return tuple(self.construct_sequence(node))
 
     def find_python_module(self, name, mark):
         if not name:
         # Format:
         #   !!python/object:module.name { ... state ... }
         instance = self.make_python_instance(suffix, node, newobj=True)
-        state = self.construct_mapping(node)
+        yield instance
+        deep = hasattr(instance, '__setstate__')
+        state = self.construct_mapping(node, deep=deep)
         self.set_python_instance_state(instance, state)
-        return instance
 
     def construct_python_object_apply(self, suffix, node, newobj=False):
         # Format:
         # The difference between !!python/object/apply and !!python/object/new
         # is how an object is created, check make_python_instance for details.
         if isinstance(node, SequenceNode):
-            args = self.construct_sequence(node)
+            args = self.construct_sequence(node, deep=True)
             kwds = {}
             state = {}
             listitems = []
             dictitems = {}
         else:
-            value = self.construct_mapping(node)
+            value = self.construct_mapping(node, deep=True)
             args = value.get('args', [])
             kwds = value.get('kwds', {})
             state = value.get('state', {})
     def construct_python_object_new(self, suffix, node):
         return self.construct_python_object_apply(suffix, node, newobj=True)
 
-
 Constructor.add_constructor(
     u'tag:yaml.org,2002:python/none',
     Constructor.construct_yaml_null)

lib/yaml/emitter.py

 class EmitterError(YAMLError):
     pass
 
-class ScalarAnalysis:
+class ScalarAnalysis(object):
     def __init__(self, scalar, empty, multiline,
             allow_flow_plain, allow_block_plain,
             allow_single_quoted, allow_double_quoted,
         self.allow_double_quoted = allow_double_quoted
         self.allow_block = allow_block
 
-class Emitter:
+class Emitter(object):
 
     DEFAULT_TAG_PREFIXES = {
         u'!' : u'!',

lib/yaml/error.py

 
 __all__ = ['Mark', 'YAMLError', 'MarkedYAMLError']
 
-class Mark:
+class Mark(object):
 
     def __init__(self, name, index, line, column, buffer, pointer):
         self.name = name

lib/yaml/events.py

 
 # Abstract classes.
 
-class Event:
+class Event(object):
     def __init__(self, start_mark=None, end_mark=None):
         self.start_mark = start_mark
         self.end_mark = end_mark

lib/yaml/nodes.py

 
-class Node:
+class Node(object):
     def __init__(self, tag, value, start_mark, end_mark):
         self.tag = tag
         self.value = value

lib/yaml/parser.py

 class ParserError(MarkedYAMLError):
     pass
 
-class Parser:
+class Parser(object):
     # Since writing a recursive-descendant parser is a straightforward task, we
     # do not give many comments here.
     # Note that we use Python generators. If you rewrite the parser in another

lib/yaml/reader.py

                     % (ord(self.character), self.reason,
                             self.name, self.position)
 
-class Reader:
+class Reader(object):
     # Reader:
     # - determines the data encoding and converts it to unicode,
     # - checks if characters are in allowed range,

lib/yaml/representer.py

 class RepresenterError(YAMLError):
     pass
 
-class BaseRepresenter:
+class BaseRepresenter(object):
 
     yaml_representers = {}
     yaml_multi_representers = {}
         self.default_style = default_style
         self.default_flow_style = default_flow_style
         self.represented_objects = {}
+        self.object_keeper = []
+        self.alias_key = None
 
     def represent(self, data):
         node = self.represent_data(data)
         self.serialize(node)
         self.represented_objects = {}
+        self.object_keeper = []
+        self.alias_key = None
 
     class C: pass
     c = C()
     def f(): pass
+    def g(): yield None
     classobj_type = type(C)
     instance_type = type(c)
     function_type = type(f)
+    generator_type = type(g())
     builtin_function_type = type(abs)
     module_type = type(sys)
-    del C, c, f
+    del C, c, f, g
 
     def get_classobj_bases(self, cls):
         bases = [cls]
 
     def represent_data(self, data):
         if self.ignore_aliases(data):
-            alias_key = None
+            self.alias_key = None
         else:
-            alias_key = id(data)
-        if alias_key is not None:
-            if alias_key in self.represented_objects:
-                node = self.represented_objects[alias_key]
-                if node is None:
-                    raise RepresenterError("recursive objects are not allowed: %r" % data)
+            self.alias_key = id(data)
+        if self.alias_key is not None:
+            if self.alias_key in self.represented_objects:
+                node = self.represented_objects[self.alias_key]
+                #if node is None:
+                #    raise RepresenterError("recursive objects are not allowed: %r" % data)
                 return node
-            self.represented_objects[alias_key] = None
+            #self.represented_objects[alias_key] = None
+            self.object_keeper.append(data)
         data_types = type(data).__mro__
         if type(data) is self.instance_type:
             data_types = self.get_classobj_bases(data.__class__)+list(data_types)
                     node = self.yaml_representers[None](self, data)
                 else:
                     node = ScalarNode(None, unicode(data))
-        if alias_key is not None:
-            self.represented_objects[alias_key] = node
+        #if alias_key is not None:
+        #    self.represented_objects[alias_key] = node
         return node
 
     def add_representer(cls, data_type, representer):
     def represent_scalar(self, tag, value, style=None):
         if style is None:
             style = self.default_style
-        return ScalarNode(tag, value, style=style)
+        node = ScalarNode(tag, value, style=style)
+        if self.alias_key is not None:
+            self.represented_objects[self.alias_key] = node
+        return node
 
     def represent_sequence(self, tag, sequence, flow_style=None):
+        value = []
+        node = SequenceNode(tag, value, flow_style=flow_style)
+        if self.alias_key is not None:
+            self.represented_objects[self.alias_key] = node
         best_style = True
-        value = []
         for item in sequence:
             node_item = self.represent_data(item)
             if not (isinstance(node_item, ScalarNode) and not node_item.style):
                 best_style = False
-            value.append(self.represent_data(item))
+            value.append(node_item)
         if flow_style is None:
-            flow_style = self.default_flow_style
-        if flow_style is None:
-            flow_style = best_style
-        return SequenceNode(tag, value, flow_style=flow_style)
+            if self.default_flow_style is not None:
+                node.flow_style = self.default_flow_style
+            else:
+                node.flow_style = best_style
+        return node
 
     def represent_mapping(self, tag, mapping, flow_style=None):
+        value = []
+        node = MappingNode(tag, value, flow_style=flow_style)
+        if self.alias_key is not None:
+            self.represented_objects[self.alias_key] = node
         best_style = True
-        if hasattr(mapping, 'keys'):
-            value = {}
-            for item_key in mapping.keys():
-                item_value = mapping[item_key]
-                node_key = self.represent_data(item_key)
-                node_value = self.represent_data(item_value)
-                if not (isinstance(node_key, ScalarNode) and not node_key.style):
-                    best_style = False
-                if not (isinstance(node_value, ScalarNode) and not node_value.style):
-                    best_style = False
-                value[node_key] = node_value
-        else:
-            value = []
-            for item_key, item_value in mapping:
-                node_key = self.represent_data(item_key)
-                node_value = self.represent_data(item_value)
-                if not (isinstance(node_key, ScalarNode) and not node_key.style):
-                    best_style = False
-                if not (isinstance(node_value, ScalarNode) and not node_value.style):
-                    best_style = False
-                value.append((node_key, node_value))
+        if hasattr(mapping, 'items'):
+            mapping = mapping.items()
+            mapping.sort()
+        for item_key, item_value in mapping:
+            node_key = self.represent_data(item_key)
+            node_value = self.represent_data(item_value)
+            if not (isinstance(node_key, ScalarNode) and not node_key.style):
+                best_style = False
+            if not (isinstance(node_value, ScalarNode) and not node_value.style):
+                best_style = False
+            value.append((node_key, node_value))
         if flow_style is None:
-            flow_style = self.default_flow_style
-        if flow_style is None:
-            flow_style = best_style
-        return MappingNode(tag, value, flow_style=flow_style)
+            if self.default_flow_style is not None:
+                node.flow_style = self.default_flow_style
+            else:
+                node.flow_style = best_style
+        return node
 
     def ignore_aliases(self, data):
         return False
         return self.represent_scalar(u'tag:yaml.org,2002:float', value)
 
     def represent_list(self, data):
-        pairs = (len(data) > 0 and isinstance(data, list))
-        if pairs:
-            for item in data:
-                if not isinstance(item, tuple) or len(item) != 2:
-                    pairs = False
-                    break
-        if not pairs:
+        #pairs = (len(data) > 0 and isinstance(data, list))
+        #if pairs:
+        #    for item in data:
+        #        if not isinstance(item, tuple) or len(item) != 2:
+        #            pairs = False
+        #            break
+        #if not pairs:
             return self.represent_sequence(u'tag:yaml.org,2002:seq', data)
-        value = []
-        for item_key, item_value in data:
-            value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
-                [(item_key, item_value)]))
-        return SequenceNode(u'tag:yaml.org,2002:pairs', value)
+        #value = []
+        #for item_key, item_value in data:
+        #    value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
+        #        [(item_key, item_value)]))
+        #return SequenceNode(u'tag:yaml.org,2002:pairs', value)
 
     def represent_dict(self, data):
         return self.represent_mapping(u'tag:yaml.org,2002:map', data)
             state = data.__getstate__()
         else:
             state = data.__dict__.copy()
-        if isinstance(state, dict):
-            state = state.items()
-            state.sort()
         return self.represent_mapping(tag, state, flow_style=flow_style)
 
     def represent_undefined(self, data):
         else:
             state = data.__dict__
         if args is None and isinstance(state, dict):
-            state = state.items()
-            state.sort()
             return self.represent_mapping(
                     u'tag:yaml.org,2002:python/object:'+class_name, state)
         if isinstance(state, dict) and not state:
         function_name = u'%s.%s' % (function.__module__, function.__name__)
         if not args and not listitems and not dictitems \
                 and isinstance(state, dict) and newobj:
-            state = state.items()
-            state.sort()
             return self.represent_mapping(
                     u'tag:yaml.org,2002:python/object:'+function_name, state)
         if not listitems and not dictitems  \

lib/yaml/resolver.py

 class ResolverError(YAMLError):
     pass
 
-class BaseResolver:
+class BaseResolver(object):
 
     DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str'
     DEFAULT_SEQUENCE_TAG = u'tag:yaml.org,2002:seq'

lib/yaml/scanner.py

 # ALIAS(value)
 # ANCHOR(value)
 # TAG(value)
-# SCALAR(value, plain)
+# SCALAR(value, plain, style)
 #
 # Read comments in the Scanner code for more details.
 #
 class ScannerError(MarkedYAMLError):
     pass
 
-class SimpleKey:
+class SimpleKey(object):
     # See below simple keys treatment.
 
     def __init__(self, token_number, required, index, line, column, mark):
         self.column = column
         self.mark = mark
 
-class Scanner:
+class Scanner(object):
 
     def __init__(self):
         """Initialize the scanner."""

lib/yaml/serializer.py

 class SerializerError(YAMLError):
     pass
 
-class Serializer:
+class Serializer(object):
 
     ANCHOR_TEMPLATE = u'id%03d'
 
                 for item in node.value:
                     self.anchor_node(item)
             elif isinstance(node, MappingNode):
-                if hasattr(node.value, 'keys'):
-                    for key in node.value.keys():
-                        self.anchor_node(key)
-                        self.anchor_node(node.value[key])
-                else:
-                    for key, value in node.value:
-                        self.anchor_node(key)
-                        self.anchor_node(value)
+                for key, value in node.value:
+                    self.anchor_node(key)
+                    self.anchor_node(value)
 
     def generate_anchor(self, node):
         self.last_anchor_id += 1
                             == self.resolve(MappingNode, node.value, True))
                 self.emit(MappingStartEvent(alias, node.tag, implicit,
                     flow_style=node.flow_style))
-                if hasattr(node.value, 'keys'):
-                    for key in node.value.keys():
-                        self.serialize_node(key, node, None)
-                        self.serialize_node(node.value[key], node, key)
-                else:
-                    for key, value in node.value:
-                        self.serialize_node(key, node, None)
-                        self.serialize_node(value, node, key)
+                for key, value in node.value:
+                    self.serialize_node(key, node, None)
+                    self.serialize_node(value, node, key)
                 self.emit(MappingEndEvent())
             self.ascend_resolver()
 

lib/yaml/tokens.py

 
-class Token:
+class Token(object):
     def __init__(self, start_mark, end_mark):
         self.start_mark = start_mark
         self.end_mark = end_mark

tests/data/duplicate-key.former-loader-error.code

+{ 'foo': 'baz' }

tests/data/duplicate-key.former-loader-error.data

+---
+foo: bar
+foo: baz

tests/data/duplicate-key.loader-error

----
-foo: bar
-foo: baz

tests/data/duplicate-mapping-key.former-loader-error.code

+{ 'foo': { 'baz': 'bat', 'foo': 'duplicate key' } }

tests/data/duplicate-mapping-key.former-loader-error.data

+---
+&anchor foo:
+    foo: bar
+    *anchor: duplicate key
+    baz: bat
+    *anchor: duplicate key

tests/data/duplicate-mapping-key.loader-error

----
-&anchor foo:
-    foo: bar
-    *anchor: duplicate key
-    baz: bat
-    *anchor: duplicate key

tests/data/duplicate-merge-key.former-loader-error.code

+{ 'x': 1, 'y': 2, 'foo': 'bar', 'z': 3, 't': 4 }

tests/data/duplicate-merge-key.former-loader-error.data

+---
+<<: {x: 1, y: 2}
+foo: bar
+<<: {z: 3, t: 4}

tests/data/duplicate-merge-key.loader-error

----
-<<: {x: 1, y: 2}
-foo: bar
-<<: {z: 3, t: 4}

tests/data/duplicate-value-key.former-loader-error.code

+{ 'foo': 'bar', '=': 2 }

tests/data/duplicate-value-key.former-loader-error.data

+---
+=: 1
+foo: bar
+=: 2

tests/data/duplicate-value-key.loader-error

----
-=: 1
-foo: bar
-=: 2

tests/data/recurive-list.recursive

+value = []
+value.append(value)

tests/data/recursive-anchor.former-loader-error

+- &foo [1
+    2,
+    3,
+    *foo]

tests/data/recursive-anchor.loader-error

-- &foo [1
-    2,
-    3,
-    *foo]

tests/data/recursive-dict.recursive

+value = {}
+instance = AnInstance(value, value)
+value[instance] = instance

tests/data/recursive-set.recursive

+value = set()
+value.add(AnInstance(foo=value, bar=value))
+value.add(AnInstance(foo=value, bar=value))

tests/data/recursive-state.recursive

+value = []
+value.append(AnInstanceWithState(value, value))

tests/data/recursive-tuple.recursive

+value = ([], [])
+value[0].append(value)
+value[1].append(value[0])

tests/data/recursive.dumper-error

-data = []
-data.append(data)
-dump(data)

tests/data/recursive.former-dumper-error

+data = []
+data.append(data)
+dump(data)

tests/test_constructor.py

     def __eq__(self, other):
         return type(self) is type(other) and dict(self) == dict(other)
 
+def execute(code):
+    exec code
+    return value
+
 class TestConstructorTypes(test_appliance.TestAppliance):
 
     def _testTypes(self, test_name, data_filename, code_filename):

tests/test_recursive.py

 
-import unittest
+import test_appliance
+
 from yaml import *
 
-RECURSIVE = """
---- &A
-- *A: *A
-"""
+class AnInstance:
 
-class TestRecursive(unittest.TestCase):
+    def __init__(self, foo, bar):
+        self.foo = foo
+        self.bar = bar
 
-    def testRecursive(self):
-        node = compose(RECURSIVE)
-        self._check(node)
-        document = serialize(node)
-        node = compose(document)
-        self._check(node)
+    def __repr__(self):
+        try:
+            return "%s(foo=%r, bar=%r)" % (self.__class__.__name__,
+                    self.foo, self.bar)
+        except RuntimeError:
+            return "%s(foo=..., bar=...)" % self.__class__.__name__
 
-    def _check(self, node):
-        self.failUnless(node in node.value[0].value)
-        self.failUnless(node.value[0].value[node] is node)
+class AnInstanceWithState(AnInstance):
 
+    def __getstate__(self):
+        return {'attributes': [self.foo, self.bar]}
+
+    def __setstate__(self, state):
+        self.foo, self.bar = state['attributes']
+
+class TestRecursive(test_appliance.TestAppliance):
+
+    def _testRecursive(self, test_name, recursive_filename):
+        exec file(recursive_filename, 'r').read()
+        value1 = value
+        output1 = None
+        value2 = None
+        output2 = None
+        try:
+            output1 = dump(value1)
+            #print "OUTPUT %s:" % test_name
+            #print output1
+            value2 = load(output1)
+            output2 = dump(value2)
+            self.failUnlessEqual(output1, output2)
+        except:
+            print "VALUE1:", value1
+            print "VALUE2:", value2
+            print "OUTPUT1:"
+            print output1
+            print "OUTPUT2:"
+            print output2
+            raise
+
+TestRecursive.add_tests('testRecursive', '.recursive')
+

tests/test_resolver.py

             return node.tag, value
         elif isinstance(node, MappingNode):
             value = []
-            for key in node.value:
-                item = node.value[key]
+            for key, item in node.value:
                 value.append((self._convert(key), self._convert(item)))
             value.sort()
             return node.tag, value

tests/test_structure.py

     def construct_undefined(self, node):
         return self.construct_scalar(node)
 
+MyLoader.add_constructor(u'tag:yaml.org,2002:map', MyLoader.construct_mapping)
 MyLoader.add_constructor(None, MyLoader.construct_undefined)
 
 class MyCanonicalLoader(test_appliance.CanonicalLoader):
     def construct_undefined(self, node):
         return self.construct_scalar(node)
 
+MyCanonicalLoader.add_constructor(u'tag:yaml.org,2002:map', MyCanonicalLoader.construct_mapping)
 MyCanonicalLoader.add_constructor(None, MyCanonicalLoader.construct_undefined)
 
 class TestConstructor(test_appliance.TestAppliance):
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.