Kirill Simonov avatar Kirill Simonov committed b5044b7

Share data files between Py2 and Py3 test suites.

Comments (0)

Files changed (666)

 include README LICENSE CHANGES setup.py
 recursive-include examples *.py *.cfg *.yaml
-recursive-include tests *.py
 recursive-include tests/data *
+recursive-include tests/lib *.py
+recursive-include tests/lib3 *.py
 	${PYTHON} setup.py --with-libyaml install ${PARAMETERS}
 
 test: build
-	${PYTHON} tests/test_build.py ${TEST}
+	${PYTHON} tests/lib/test_build.py ${TEST}
 
 testext: buildext
-	${PYTHON} tests/test_build_ext.py ${TEST}
+	${PYTHON} tests/lib/test_build_ext.py ${TEST}
 
 testall:
 	${PYTHON} setup.py test
         build_cmd.run()
         sys.path.insert(0, build_cmd.build_lib)
         if sys.version_info[0] < 3:
-            sys.path.insert(0, 'tests')
+            sys.path.insert(0, 'tests/lib')
         else:
-            sys.path.insert(0, 'tests3')
+            sys.path.insert(0, 'tests/lib3')
         import test_all
         test_all.main([])
 

tests/canonical.py

-
-import yaml, yaml.composer, yaml.constructor, yaml.resolver
-
-class CanonicalError(yaml.YAMLError):
-    pass
-
-class CanonicalScanner:
-
-    def __init__(self, data):
-        try:
-            self.data = unicode(data, 'utf-8')+u'\0'
-        except UnicodeDecodeError:
-            raise CanonicalError("utf-8 stream is expected")
-        self.index = 0
-        self.tokens = []
-        self.scanned = False
-
-    def check_token(self, *choices):
-        if not self.scanned:
-            self.scan()
-        if self.tokens:
-            if not choices:
-                return True
-            for choice in choices:
-                if isinstance(self.tokens[0], choice):
-                    return True
-        return False
-
-    def peek_token(self):
-        if not self.scanned:
-            self.scan()
-        if self.tokens:
-            return self.tokens[0]
-
-    def get_token(self, choice=None):
-        if not self.scanned:
-            self.scan()
-        token = self.tokens.pop(0)
-        if choice and not isinstance(token, choice):
-            raise CanonicalError("unexpected token "+repr(token))
-        return token
-
-    def get_token_value(self):
-        token = self.get_token()
-        return token.value
-
-    def scan(self):
-        self.tokens.append(yaml.StreamStartToken(None, None))
-        while True:
-            self.find_token()
-            ch = self.data[self.index]
-            if ch == u'\0':
-                self.tokens.append(yaml.StreamEndToken(None, None))
-                break
-            elif ch == u'%':
-                self.tokens.append(self.scan_directive())
-            elif ch == u'-' and self.data[self.index:self.index+3] == u'---':
-                self.index += 3
-                self.tokens.append(yaml.DocumentStartToken(None, None))
-            elif ch == u'[':
-                self.index += 1
-                self.tokens.append(yaml.FlowSequenceStartToken(None, None))
-            elif ch == u'{':
-                self.index += 1
-                self.tokens.append(yaml.FlowMappingStartToken(None, None))
-            elif ch == u']':
-                self.index += 1
-                self.tokens.append(yaml.FlowSequenceEndToken(None, None))
-            elif ch == u'}':
-                self.index += 1
-                self.tokens.append(yaml.FlowMappingEndToken(None, None))
-            elif ch == u'?':
-                self.index += 1
-                self.tokens.append(yaml.KeyToken(None, None))
-            elif ch == u':':
-                self.index += 1
-                self.tokens.append(yaml.ValueToken(None, None))
-            elif ch == u',':
-                self.index += 1
-                self.tokens.append(yaml.FlowEntryToken(None, None))
-            elif ch == u'*' or ch == u'&':
-                self.tokens.append(self.scan_alias())
-            elif ch == u'!':
-                self.tokens.append(self.scan_tag())
-            elif ch == u'"':
-                self.tokens.append(self.scan_scalar())
-            else:
-                raise CanonicalError("invalid token")
-        self.scanned = True
-
-    DIRECTIVE = u'%YAML 1.1'
-
-    def scan_directive(self):
-        if self.data[self.index:self.index+len(self.DIRECTIVE)] == self.DIRECTIVE and \
-                self.data[self.index+len(self.DIRECTIVE)] in u' \n\0':
-            self.index += len(self.DIRECTIVE)
-            return yaml.DirectiveToken('YAML', (1, 1), None, None)
-        else:
-            raise CanonicalError("invalid directive")
-
-    def scan_alias(self):
-        if self.data[self.index] == u'*':
-            TokenClass = yaml.AliasToken
-        else:
-            TokenClass = yaml.AnchorToken
-        self.index += 1
-        start = self.index
-        while self.data[self.index] not in u', \n\0':
-            self.index += 1
-        value = self.data[start:self.index]
-        return TokenClass(value, None, None)
-
-    def scan_tag(self):
-        self.index += 1
-        start = self.index
-        while self.data[self.index] not in u' \n\0':
-            self.index += 1
-        value = self.data[start:self.index]
-        if not value:
-            value = u'!'
-        elif value[0] == u'!':
-            value = 'tag:yaml.org,2002:'+value[1:]
-        elif value[0] == u'<' and value[-1] == u'>':
-            value = value[1:-1]
-        else:
-            value = u'!'+value
-        return yaml.TagToken(value, None, None)
-
-    QUOTE_CODES = {
-        'x': 2,
-        'u': 4,
-        'U': 8,
-    }
-
-    QUOTE_REPLACES = {
-        u'\\': u'\\',
-        u'\"': u'\"',
-        u' ': u' ',
-        u'a': u'\x07',
-        u'b': u'\x08',
-        u'e': u'\x1B',
-        u'f': u'\x0C',
-        u'n': u'\x0A',
-        u'r': u'\x0D',
-        u't': u'\x09',
-        u'v': u'\x0B',
-        u'N': u'\u0085',
-        u'L': u'\u2028',
-        u'P': u'\u2029',
-        u'_': u'_',
-        u'0': u'\x00',
-
-    }
-
-    def scan_scalar(self):
-        self.index += 1
-        chunks = []
-        start = self.index
-        ignore_spaces = False
-        while self.data[self.index] != u'"':
-            if self.data[self.index] == u'\\':
-                ignore_spaces = False
-                chunks.append(self.data[start:self.index])
-                self.index += 1
-                ch = self.data[self.index]
-                self.index += 1
-                if ch == u'\n':
-                    ignore_spaces = True
-                elif ch in self.QUOTE_CODES:
-                    length = self.QUOTE_CODES[ch]
-                    code = int(self.data[self.index:self.index+length], 16)
-                    chunks.append(unichr(code))
-                    self.index += length
-                else:
-                    if ch not in self.QUOTE_REPLACES:
-                        raise CanonicalError("invalid escape code")
-                    chunks.append(self.QUOTE_REPLACES[ch])
-                start = self.index
-            elif self.data[self.index] == u'\n':
-                chunks.append(self.data[start:self.index])
-                chunks.append(u' ')
-                self.index += 1
-                start = self.index
-                ignore_spaces = True
-            elif ignore_spaces and self.data[self.index] == u' ':
-                self.index += 1
-                start = self.index
-            else:
-                ignore_spaces = False
-                self.index += 1
-        chunks.append(self.data[start:self.index])
-        self.index += 1
-        return yaml.ScalarToken(u''.join(chunks), False, None, None)
-
-    def find_token(self):
-        found = False
-        while not found:
-            while self.data[self.index] in u' \t':
-                self.index += 1
-            if self.data[self.index] == u'#':
-                while self.data[self.index] != u'\n':
-                    self.index += 1
-            if self.data[self.index] == u'\n':
-                self.index += 1
-            else:
-                found = True
-
-class CanonicalParser:
-
-    def __init__(self):
-        self.events = []
-        self.parsed = False
-
-    # stream: STREAM-START document* STREAM-END
-    def parse_stream(self):
-        self.get_token(yaml.StreamStartToken)
-        self.events.append(yaml.StreamStartEvent(None, None))
-        while not self.check_token(yaml.StreamEndToken):
-            if self.check_token(yaml.DirectiveToken, yaml.DocumentStartToken):
-                self.parse_document()
-            else:
-                raise CanonicalError("document is expected, got "+repr(self.tokens[0]))
-        self.get_token(yaml.StreamEndToken)
-        self.events.append(yaml.StreamEndEvent(None, None))
-
-    # document: DIRECTIVE? DOCUMENT-START node
-    def parse_document(self):
-        node = None
-        if self.check_token(yaml.DirectiveToken):
-            self.get_token(yaml.DirectiveToken)
-        self.get_token(yaml.DocumentStartToken)
-        self.events.append(yaml.DocumentStartEvent(None, None))
-        self.parse_node()
-        self.events.append(yaml.DocumentEndEvent(None, None))
-
-    # node: ALIAS | ANCHOR? TAG? (SCALAR|sequence|mapping)
-    def parse_node(self):
-        if self.check_token(yaml.AliasToken):
-            self.events.append(yaml.AliasEvent(self.get_token_value(), None, None))
-        else:
-            anchor = None
-            if self.check_token(yaml.AnchorToken):
-                anchor = self.get_token_value()
-            tag = None
-            if self.check_token(yaml.TagToken):
-                tag = self.get_token_value()
-            if self.check_token(yaml.ScalarToken):
-                self.events.append(yaml.ScalarEvent(anchor, tag, (False, False), self.get_token_value(), None, None))
-            elif self.check_token(yaml.FlowSequenceStartToken):
-                self.events.append(yaml.SequenceStartEvent(anchor, tag, None, None))
-                self.parse_sequence()
-            elif self.check_token(yaml.FlowMappingStartToken):
-                self.events.append(yaml.MappingStartEvent(anchor, tag, None, None))
-                self.parse_mapping()
-            else:
-                raise CanonicalError("SCALAR, '[', or '{' is expected, got "+repr(self.tokens[0]))
-
-    # sequence: SEQUENCE-START (node (ENTRY node)*)? ENTRY? SEQUENCE-END
-    def parse_sequence(self):
-        self.get_token(yaml.FlowSequenceStartToken)
-        if not self.check_token(yaml.FlowSequenceEndToken):
-            self.parse_node()
-            while not self.check_token(yaml.FlowSequenceEndToken):
-                self.get_token(yaml.FlowEntryToken)
-                if not self.check_token(yaml.FlowSequenceEndToken):
-                    self.parse_node()
-        self.get_token(yaml.FlowSequenceEndToken)
-        self.events.append(yaml.SequenceEndEvent(None, None))
-
-    # mapping: MAPPING-START (map_entry (ENTRY map_entry)*)? ENTRY? MAPPING-END
-    def parse_mapping(self):
-        self.get_token(yaml.FlowMappingStartToken)
-        if not self.check_token(yaml.FlowMappingEndToken):
-            self.parse_map_entry()
-            while not self.check_token(yaml.FlowMappingEndToken):
-                self.get_token(yaml.FlowEntryToken)
-                if not self.check_token(yaml.FlowMappingEndToken):
-                    self.parse_map_entry()
-        self.get_token(yaml.FlowMappingEndToken)
-        self.events.append(yaml.MappingEndEvent(None, None))
-
-    # map_entry: KEY node VALUE node
-    def parse_map_entry(self):
-        self.get_token(yaml.KeyToken)
-        self.parse_node()
-        self.get_token(yaml.ValueToken)
-        self.parse_node()
-
-    def parse(self):
-        self.parse_stream()
-        self.parsed = True
-
-    def get_event(self):
-        if not self.parsed:
-            self.parse()
-        return self.events.pop(0)
-
-    def check_event(self, *choices):
-        if not self.parsed:
-            self.parse()
-        if self.events:
-            if not choices:
-                return True
-            for choice in choices:
-                if isinstance(self.events[0], choice):
-                    return True
-        return False
-
-    def peek_event(self):
-        if not self.parsed:
-            self.parse()
-        return self.events[0]
-
-class CanonicalLoader(CanonicalScanner, CanonicalParser,
-        yaml.composer.Composer, yaml.constructor.Constructor, yaml.resolver.Resolver):
-
-    def __init__(self, stream):
-        if hasattr(stream, 'read'):
-            stream = stream.read()
-        CanonicalScanner.__init__(self, stream)
-        CanonicalParser.__init__(self)
-        yaml.composer.Composer.__init__(self)
-        yaml.constructor.Constructor.__init__(self)
-        yaml.resolver.Resolver.__init__(self)
-
-yaml.CanonicalLoader = CanonicalLoader
-
-def canonical_scan(stream):
-    return yaml.scan(stream, Loader=CanonicalLoader)
-
-yaml.canonical_scan = canonical_scan
-
-def canonical_parse(stream):
-    return yaml.parse(stream, Loader=CanonicalLoader)
-
-yaml.canonical_parse = canonical_parse
-
-def canonical_compose(stream):
-    return yaml.compose(stream, Loader=CanonicalLoader)
-
-yaml.canonical_compose = canonical_compose
-
-def canonical_compose_all(stream):
-    return yaml.compose_all(stream, Loader=CanonicalLoader)
-
-yaml.canonical_compose_all = canonical_compose_all
-
-def canonical_load(stream):
-    return yaml.load(stream, Loader=CanonicalLoader)
-
-yaml.canonical_load = canonical_load
-
-def canonical_load_all(stream):
-    return yaml.load_all(stream, Loader=CanonicalLoader)
-
-yaml.canonical_load_all = canonical_load_all
-

tests/data/construct-binary-py2.code

+{
+    "canonical":
+        "GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05,  \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;",
+    "generic":
+        "GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05,  \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;",
+    "description": "The binary value above is a tiny arrow encoded as a gif image.",
+}

tests/data/construct-binary-py2.data

+canonical: !!binary "\
+ R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\
+ OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\
+ +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\
+ AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs="
+generic: !!binary |
+ R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5
+ OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+
+ +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC
+ AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=
+description:
+ The binary value above is a tiny arrow encoded as a gif image.

tests/data/construct-binary-py3.code

+{
+    "canonical":
+        b"GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05,  \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;",
+    "generic":
+        b"GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05,  \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;",
+    "description": "The binary value above is a tiny arrow encoded as a gif image.",
+}

tests/data/construct-binary-py3.data

+canonical: !!binary "\
+ R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\
+ OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\
+ +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\
+ AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs="
+generic: !!binary |
+ R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5
+ OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+
+ +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC
+ AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=
+description:
+ The binary value above is a tiny arrow encoded as a gif image.

tests/data/construct-binary.code

-{
-    "canonical":
-        "GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05,  \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;",
-    "generic":
-        "GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05,  \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;",
-    "description": "The binary value above is a tiny arrow encoded as a gif image.",
-}

tests/data/construct-binary.data

-canonical: !!binary "\
- R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\
- OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\
- +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\
- AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs="
-generic: !!binary |
- R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5
- OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+
- +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC
- AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=
-description:
- The binary value above is a tiny arrow encoded as a gif image.

tests/data/construct-python-bytes-py3.code

+b'some binary data'

tests/data/construct-python-bytes-py3.data

+--- !!python/bytes 'c29tZSBiaW5hcnkgZGF0YQ=='

tests/data/construct-python-long-short-py2.code

+123L

tests/data/construct-python-long-short-py2.data

+!!python/long 123

tests/data/construct-python-long-short-py3.code

+123

tests/data/construct-python-long-short-py3.data

+!!python/long 123

tests/data/construct-python-long-short.code

-123L

tests/data/construct-python-long-short.data

-!!python/long 123

tests/data/construct-python-name-module.code

-[file, yaml.Loader, yaml.dump, abs, yaml.tokens]
+[str, yaml.Loader, yaml.dump, abs, yaml.tokens]

tests/data/construct-python-name-module.data

-- !!python/name:file
+- !!python/name:str
 - !!python/name:yaml.Loader
 - !!python/name:yaml.dump
 - !!python/name:abs

tests/data/construct-python-str-utf8-py2.code

+u'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'.encode('utf-8')

tests/data/construct-python-str-utf8-py2.data

+--- !!python/str "Это уникодная строка"

tests/data/construct-python-str-utf8-py3.code

+'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'

tests/data/construct-python-str-utf8-py3.data

+--- !!python/str "Это уникодная строка"

tests/data/construct-python-str-utf8.code

-u'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'.encode('utf-8')

tests/data/construct-python-str-utf8.data

---- !!python/str "Это уникодная строка"

tests/data/construct-python-unicode-ascii-py2.code

+u"ascii string"

tests/data/construct-python-unicode-ascii-py2.data

+--- !!python/unicode "ascii string"

tests/data/construct-python-unicode-ascii-py3.code

+"ascii string"

tests/data/construct-python-unicode-ascii-py3.data

+--- !!python/unicode "ascii string"

tests/data/construct-python-unicode-ascii.code

-u"ascii string"

tests/data/construct-python-unicode-ascii.data

---- !!python/unicode "ascii string"

tests/data/construct-python-unicode-utf8-py2.code

+u'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'

tests/data/construct-python-unicode-utf8-py2.data

+--- !!python/unicode "Это уникодная строка"

tests/data/construct-python-unicode-utf8-py3.code

+'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'

tests/data/construct-python-unicode-utf8-py3.data

+--- !!python/unicode "Это уникодная строка"

tests/data/construct-python-unicode-utf8.code

-u'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'

tests/data/construct-python-unicode-utf8.data

---- !!python/unicode "Это уникодная строка"

tests/data/construct-str-utf8-py2.code

+u'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'

tests/data/construct-str-utf8-py2.data

+--- !!str "Это уникодная строка"

tests/data/construct-str-utf8-py3.code

+'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'

tests/data/construct-str-utf8-py3.data

+--- !!str "Это уникодная строка"

tests/data/construct-str-utf8.code

-u'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'

tests/data/construct-str-utf8.data

---- !!str "Это уникодная строка"

tests/data/emitting-unacceptable-unicode-character-bug-py2.code

+u"\udd00"

tests/data/emitting-unacceptable-unicode-character-bug-py2.data

+"\udd00"
Add a comment to this file

tests/data/emitting-unacceptable-unicode-character-bug-py2.skip-ext

Empty file added.

tests/data/emitting-unacceptable-unicode-character-bug-py3.code

+"\udd00"

tests/data/emitting-unacceptable-unicode-character-bug-py3.data

+"\udd00"
Add a comment to this file

tests/data/emitting-unacceptable-unicode-character-bug-py3.skip-ext

Empty file added.

tests/data/emitting-unacceptable-unicode-character-bug.code

-u"\udd00"

tests/data/emitting-unacceptable-unicode-character-bug.data

-"\udd00"
Add a comment to this file

tests/data/emitting-unacceptable-unicode-character-bug.skip-ext

Empty file removed.

tests/data/serializer-is-already-opened.dumper-error

-dumper = yaml.Dumper(StringIO.StringIO())
+dumper = yaml.Dumper(StringIO())
 dumper.open()
 dumper.open()

tests/data/serializer-is-closed-1.dumper-error

-dumper = yaml.Dumper(StringIO.StringIO())
+dumper = yaml.Dumper(StringIO())
 dumper.open()
 dumper.close()
 dumper.open()

tests/data/serializer-is-closed-2.dumper-error

-dumper = yaml.Dumper(StringIO.StringIO())
+dumper = yaml.Dumper(StringIO())
 dumper.open()
 dumper.close()
 dumper.serialize(yaml.ScalarNode(tag='!foo', value='bar'))

tests/data/serializer-is-not-opened-1.dumper-error

-dumper = yaml.Dumper(StringIO.StringIO())
+dumper = yaml.Dumper(StringIO())
 dumper.close()

tests/data/serializer-is-not-opened-2.dumper-error

-dumper = yaml.Dumper(StringIO.StringIO())
+dumper = yaml.Dumper(StringIO())
 dumper.serialize(yaml.ScalarNode(tag='!foo', value='bar'))

tests/lib/canonical.py

+
+import yaml, yaml.composer, yaml.constructor, yaml.resolver
+
+class CanonicalError(yaml.YAMLError):
+    pass
+
+class CanonicalScanner:
+
+    def __init__(self, data):
+        try:
+            self.data = unicode(data, 'utf-8')+u'\0'
+        except UnicodeDecodeError:
+            raise CanonicalError("utf-8 stream is expected")
+        self.index = 0
+        self.tokens = []
+        self.scanned = False
+
+    def check_token(self, *choices):
+        if not self.scanned:
+            self.scan()
+        if self.tokens:
+            if not choices:
+                return True
+            for choice in choices:
+                if isinstance(self.tokens[0], choice):
+                    return True
+        return False
+
+    def peek_token(self):
+        if not self.scanned:
+            self.scan()
+        if self.tokens:
+            return self.tokens[0]
+
+    def get_token(self, choice=None):
+        if not self.scanned:
+            self.scan()
+        token = self.tokens.pop(0)
+        if choice and not isinstance(token, choice):
+            raise CanonicalError("unexpected token "+repr(token))
+        return token
+
+    def get_token_value(self):
+        token = self.get_token()
+        return token.value
+
+    def scan(self):
+        self.tokens.append(yaml.StreamStartToken(None, None))
+        while True:
+            self.find_token()
+            ch = self.data[self.index]
+            if ch == u'\0':
+                self.tokens.append(yaml.StreamEndToken(None, None))
+                break
+            elif ch == u'%':
+                self.tokens.append(self.scan_directive())
+            elif ch == u'-' and self.data[self.index:self.index+3] == u'---':
+                self.index += 3
+                self.tokens.append(yaml.DocumentStartToken(None, None))
+            elif ch == u'[':
+                self.index += 1
+                self.tokens.append(yaml.FlowSequenceStartToken(None, None))
+            elif ch == u'{':
+                self.index += 1
+                self.tokens.append(yaml.FlowMappingStartToken(None, None))
+            elif ch == u']':
+                self.index += 1
+                self.tokens.append(yaml.FlowSequenceEndToken(None, None))
+            elif ch == u'}':
+                self.index += 1
+                self.tokens.append(yaml.FlowMappingEndToken(None, None))
+            elif ch == u'?':
+                self.index += 1
+                self.tokens.append(yaml.KeyToken(None, None))
+            elif ch == u':':
+                self.index += 1
+                self.tokens.append(yaml.ValueToken(None, None))
+            elif ch == u',':
+                self.index += 1
+                self.tokens.append(yaml.FlowEntryToken(None, None))
+            elif ch == u'*' or ch == u'&':
+                self.tokens.append(self.scan_alias())
+            elif ch == u'!':
+                self.tokens.append(self.scan_tag())
+            elif ch == u'"':
+                self.tokens.append(self.scan_scalar())
+            else:
+                raise CanonicalError("invalid token")
+        self.scanned = True
+
+    DIRECTIVE = u'%YAML 1.1'
+
+    def scan_directive(self):
+        if self.data[self.index:self.index+len(self.DIRECTIVE)] == self.DIRECTIVE and \
+                self.data[self.index+len(self.DIRECTIVE)] in u' \n\0':
+            self.index += len(self.DIRECTIVE)
+            return yaml.DirectiveToken('YAML', (1, 1), None, None)
+        else:
+            raise CanonicalError("invalid directive")
+
+    def scan_alias(self):
+        if self.data[self.index] == u'*':
+            TokenClass = yaml.AliasToken
+        else:
+            TokenClass = yaml.AnchorToken
+        self.index += 1
+        start = self.index
+        while self.data[self.index] not in u', \n\0':
+            self.index += 1
+        value = self.data[start:self.index]
+        return TokenClass(value, None, None)
+
+    def scan_tag(self):
+        self.index += 1
+        start = self.index
+        while self.data[self.index] not in u' \n\0':
+            self.index += 1
+        value = self.data[start:self.index]
+        if not value:
+            value = u'!'
+        elif value[0] == u'!':
+            value = 'tag:yaml.org,2002:'+value[1:]
+        elif value[0] == u'<' and value[-1] == u'>':
+            value = value[1:-1]
+        else:
+            value = u'!'+value
+        return yaml.TagToken(value, None, None)
+
+    QUOTE_CODES = {
+        'x': 2,
+        'u': 4,
+        'U': 8,
+    }
+
+    QUOTE_REPLACES = {
+        u'\\': u'\\',
+        u'\"': u'\"',
+        u' ': u' ',
+        u'a': u'\x07',
+        u'b': u'\x08',
+        u'e': u'\x1B',
+        u'f': u'\x0C',
+        u'n': u'\x0A',
+        u'r': u'\x0D',
+        u't': u'\x09',
+        u'v': u'\x0B',
+        u'N': u'\u0085',
+        u'L': u'\u2028',
+        u'P': u'\u2029',
+        u'_': u'_',
+        u'0': u'\x00',
+
+    }
+
+    def scan_scalar(self):
+        self.index += 1
+        chunks = []
+        start = self.index
+        ignore_spaces = False
+        while self.data[self.index] != u'"':
+            if self.data[self.index] == u'\\':
+                ignore_spaces = False
+                chunks.append(self.data[start:self.index])
+                self.index += 1
+                ch = self.data[self.index]
+                self.index += 1
+                if ch == u'\n':
+                    ignore_spaces = True
+                elif ch in self.QUOTE_CODES:
+                    length = self.QUOTE_CODES[ch]
+                    code = int(self.data[self.index:self.index+length], 16)
+                    chunks.append(unichr(code))
+                    self.index += length
+                else:
+                    if ch not in self.QUOTE_REPLACES:
+                        raise CanonicalError("invalid escape code")
+                    chunks.append(self.QUOTE_REPLACES[ch])
+                start = self.index
+            elif self.data[self.index] == u'\n':
+                chunks.append(self.data[start:self.index])
+                chunks.append(u' ')
+                self.index += 1
+                start = self.index
+                ignore_spaces = True
+            elif ignore_spaces and self.data[self.index] == u' ':
+                self.index += 1
+                start = self.index
+            else:
+                ignore_spaces = False
+                self.index += 1
+        chunks.append(self.data[start:self.index])
+        self.index += 1
+        return yaml.ScalarToken(u''.join(chunks), False, None, None)
+
+    def find_token(self):
+        found = False
+        while not found:
+            while self.data[self.index] in u' \t':
+                self.index += 1
+            if self.data[self.index] == u'#':
+                while self.data[self.index] != u'\n':
+                    self.index += 1
+            if self.data[self.index] == u'\n':
+                self.index += 1
+            else:
+                found = True
+
+class CanonicalParser:
+
+    def __init__(self):
+        self.events = []
+        self.parsed = False
+
+    # stream: STREAM-START document* STREAM-END
+    def parse_stream(self):
+        self.get_token(yaml.StreamStartToken)
+        self.events.append(yaml.StreamStartEvent(None, None))
+        while not self.check_token(yaml.StreamEndToken):
+            if self.check_token(yaml.DirectiveToken, yaml.DocumentStartToken):
+                self.parse_document()
+            else:
+                raise CanonicalError("document is expected, got "+repr(self.tokens[0]))
+        self.get_token(yaml.StreamEndToken)
+        self.events.append(yaml.StreamEndEvent(None, None))
+
+    # document: DIRECTIVE? DOCUMENT-START node
+    def parse_document(self):
+        node = None
+        if self.check_token(yaml.DirectiveToken):
+            self.get_token(yaml.DirectiveToken)
+        self.get_token(yaml.DocumentStartToken)
+        self.events.append(yaml.DocumentStartEvent(None, None))
+        self.parse_node()
+        self.events.append(yaml.DocumentEndEvent(None, None))
+
+    # node: ALIAS | ANCHOR? TAG? (SCALAR|sequence|mapping)
+    def parse_node(self):
+        if self.check_token(yaml.AliasToken):
+            self.events.append(yaml.AliasEvent(self.get_token_value(), None, None))
+        else:
+            anchor = None
+            if self.check_token(yaml.AnchorToken):
+                anchor = self.get_token_value()
+            tag = None
+            if self.check_token(yaml.TagToken):
+                tag = self.get_token_value()
+            if self.check_token(yaml.ScalarToken):
+                self.events.append(yaml.ScalarEvent(anchor, tag, (False, False), self.get_token_value(), None, None))
+            elif self.check_token(yaml.FlowSequenceStartToken):
+                self.events.append(yaml.SequenceStartEvent(anchor, tag, None, None))
+                self.parse_sequence()
+            elif self.check_token(yaml.FlowMappingStartToken):
+                self.events.append(yaml.MappingStartEvent(anchor, tag, None, None))
+                self.parse_mapping()
+            else:
+                raise CanonicalError("SCALAR, '[', or '{' is expected, got "+repr(self.tokens[0]))
+
+    # sequence: SEQUENCE-START (node (ENTRY node)*)? ENTRY? SEQUENCE-END
+    def parse_sequence(self):
+        self.get_token(yaml.FlowSequenceStartToken)
+        if not self.check_token(yaml.FlowSequenceEndToken):
+            self.parse_node()
+            while not self.check_token(yaml.FlowSequenceEndToken):
+                self.get_token(yaml.FlowEntryToken)
+                if not self.check_token(yaml.FlowSequenceEndToken):
+                    self.parse_node()
+        self.get_token(yaml.FlowSequenceEndToken)
+        self.events.append(yaml.SequenceEndEvent(None, None))
+
+    # mapping: MAPPING-START (map_entry (ENTRY map_entry)*)? ENTRY? MAPPING-END
+    def parse_mapping(self):
+        self.get_token(yaml.FlowMappingStartToken)
+        if not self.check_token(yaml.FlowMappingEndToken):
+            self.parse_map_entry()
+            while not self.check_token(yaml.FlowMappingEndToken):
+                self.get_token(yaml.FlowEntryToken)
+                if not self.check_token(yaml.FlowMappingEndToken):
+                    self.parse_map_entry()
+        self.get_token(yaml.FlowMappingEndToken)
+        self.events.append(yaml.MappingEndEvent(None, None))
+
+    # map_entry: KEY node VALUE node
+    def parse_map_entry(self):
+        self.get_token(yaml.KeyToken)
+        self.parse_node()
+        self.get_token(yaml.ValueToken)
+        self.parse_node()
+
+    def parse(self):
+        self.parse_stream()
+        self.parsed = True
+
+    def get_event(self):
+        if not self.parsed:
+            self.parse()
+        return self.events.pop(0)
+
+    def check_event(self, *choices):
+        if not self.parsed:
+            self.parse()
+        if self.events:
+            if not choices:
+                return True
+            for choice in choices:
+                if isinstance(self.events[0], choice):
+                    return True
+        return False
+
+    def peek_event(self):
+        if not self.parsed:
+            self.parse()
+        return self.events[0]
+
+class CanonicalLoader(CanonicalScanner, CanonicalParser,
+        yaml.composer.Composer, yaml.constructor.Constructor, yaml.resolver.Resolver):
+
+    def __init__(self, stream):
+        if hasattr(stream, 'read'):
+            stream = stream.read()
+        CanonicalScanner.__init__(self, stream)
+        CanonicalParser.__init__(self)
+        yaml.composer.Composer.__init__(self)
+        yaml.constructor.Constructor.__init__(self)
+        yaml.resolver.Resolver.__init__(self)
+
+yaml.CanonicalLoader = CanonicalLoader
+
+def canonical_scan(stream):
+    return yaml.scan(stream, Loader=CanonicalLoader)
+
+yaml.canonical_scan = canonical_scan
+
+def canonical_parse(stream):
+    return yaml.parse(stream, Loader=CanonicalLoader)
+
+yaml.canonical_parse = canonical_parse
+
+def canonical_compose(stream):
+    return yaml.compose(stream, Loader=CanonicalLoader)
+
+yaml.canonical_compose = canonical_compose
+
+def canonical_compose_all(stream):
+    return yaml.compose_all(stream, Loader=CanonicalLoader)
+
+yaml.canonical_compose_all = canonical_compose_all
+
+def canonical_load(stream):
+    return yaml.load(stream, Loader=CanonicalLoader)
+
+yaml.canonical_load = canonical_load
+
+def canonical_load_all(stream):
+    return yaml.load_all(stream, Loader=CanonicalLoader)
+
+yaml.canonical_load_all = canonical_load_all
+

tests/lib/test_all.py

+
+import sys, yaml, test_appliance
+
+def main(args=None):
+    collections = []
+    import test_yaml
+    collections.append(test_yaml)
+    if yaml.__with_libyaml__:
+        import test_yaml_ext
+        collections.append(test_yaml_ext)
+    test_appliance.run(collections, args)
+
+if __name__ == '__main__':
+    main()
+

tests/lib/test_appliance.py

+
+import sys, os, os.path, types, traceback, pprint
+
+DATA = 'tests/data'
+
+def find_test_functions(collections):
+    if not isinstance(collections, list):
+        collections = [collections]
+    functions = []
+    for collection in collections:
+        if not isinstance(collection, dict):
+            collection = vars(collection)
+        keys = collection.keys()
+        keys.sort()
+        for key in keys:
+            value = collection[key]
+            if isinstance(value, types.FunctionType) and hasattr(value, 'unittest'):
+                functions.append(value)
+    return functions
+
+def find_test_filenames(directory):
+    filenames = {}
+    for filename in os.listdir(directory):
+        if os.path.isfile(os.path.join(directory, filename)):
+            base, ext = os.path.splitext(filename)
+            if base.endswith('-py3'):
+                continue
+            filenames.setdefault(base, []).append(ext)
+    filenames = filenames.items()
+    filenames.sort()
+    return filenames
+
+def parse_arguments(args):
+    if args is None:
+        args = sys.argv[1:]
+    verbose = False
+    if '-v' in args:
+        verbose = True
+        args.remove('-v')
+    if '--verbose' in args:
+        verbose = True
+    if 'YAML_TEST_VERBOSE' in os.environ:
+        verbose = True
+    include_functions = []
+    if args:
+        include_functions.append(args.pop(0))
+    if 'YAML_TEST_FUNCTIONS' in os.environ:
+        include_functions.extend(os.environ['YAML_TEST_FUNCTIONS'].split())
+    include_filenames = []
+    include_filenames.extend(args)
+    if 'YAML_TEST_FILENAMES' in os.environ:
+        include_filenames.extend(os.environ['YAML_TEST_FILENAMES'].split())
+    return include_functions, include_filenames, verbose
+
+def execute(function, filenames, verbose):
+    if hasattr(function, 'unittest_name'):
+        name = function.unittest_name
+    else:
+        name = function.func_name
+    if verbose:
+        sys.stdout.write('='*75+'\n')
+        sys.stdout.write('%s(%s)...\n' % (name, ', '.join(filenames)))
+    try:
+        function(verbose=verbose, *filenames)
+    except Exception, exc:
+        info = sys.exc_info()
+        if isinstance(exc, AssertionError):
+            kind = 'FAILURE'
+        else:
+            kind = 'ERROR'
+        if verbose:
+            traceback.print_exc(limit=1, file=sys.stdout)
+        else:
+            sys.stdout.write(kind[0])
+            sys.stdout.flush()
+    else:
+        kind = 'SUCCESS'
+        info = None
+        if not verbose:
+            sys.stdout.write('.')
+    sys.stdout.flush()
+    return (name, filenames, kind, info)
+
+def display(results, verbose):
+    if results and not verbose:
+        sys.stdout.write('\n')
+    total = len(results)
+    failures = 0
+    errors = 0
+    for name, filenames, kind, info in results:
+        if kind == 'SUCCESS':
+            continue
+        if kind == 'FAILURE':
+            failures += 1
+        if kind == 'ERROR':
+            errors += 1
+        sys.stdout.write('='*75+'\n')
+        sys.stdout.write('%s(%s): %s\n' % (name, ', '.join(filenames), kind))
+        if kind == 'ERROR':
+            traceback.print_exception(file=sys.stdout, *info)
+        else:
+            sys.stdout.write('Traceback (most recent call last):\n')
+            traceback.print_tb(info[2], file=sys.stdout)
+            sys.stdout.write('%s: see below\n' % info[0].__name__)
+            sys.stdout.write('~'*75+'\n')
+            for arg in info[1].args:
+                pprint.pprint(arg, stream=sys.stdout)
+        for filename in filenames:
+            sys.stdout.write('-'*75+'\n')
+            sys.stdout.write('%s:\n' % filename)
+            data = open(filename, 'rb').read()
+            sys.stdout.write(data)
+            if data and data[-1] != '\n':
+                sys.stdout.write('\n')
+    sys.stdout.write('='*75+'\n')
+    sys.stdout.write('TESTS: %s\n' % total)
+    if failures:
+        sys.stdout.write('FAILURES: %s\n' % failures)
+    if errors:
+        sys.stdout.write('ERRORS: %s\n' % errors)
+
+def run(collections, args=None):
+    test_functions = find_test_functions(collections)
+    test_filenames = find_test_filenames(DATA)
+    include_functions, include_filenames, verbose = parse_arguments(args)
+    results = []
+    for function in test_functions:
+        if include_functions and function.func_name not in include_functions:
+            continue
+        if function.unittest:
+            for base, exts in test_filenames:
+                if include_filenames and base not in include_filenames:
+                    continue
+                filenames = []
+                for ext in function.unittest: