Commits

Kirill Simonov committed 6639058

Added a YAML lexer for Pygments.

Comments (0)

Files changed (3)

examples/pygments-lexer/example.raw

+Token.Comment.Single	u'#'
+Token.Text.Break	u'\n'
+Token.Comment.Single	u'# Examples from the Preview section of the YAML specification'
+Token.Text.Break	u'\n'
+Token.Comment.Single	u'# (http://yaml.org/spec/1.2/#Preview)'
+Token.Text.Break	u'\n'
+Token.Comment.Single	u'#'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Sequence of scalars'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Mark'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'McGwire'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Sammy'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Sosa'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Ken'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Griffey'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Mapping scalars to scalars'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'hr'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u'  '
+Token.Literal.Scalar.Plain	u'65'
+Token.Text.Blank	u'    '
+Token.Comment.Single	u'# Home runs'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'avg'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'0.278'
+Token.Text.Blank	u' '
+Token.Comment.Single	u'# Batting average'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'rbi'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'147'
+Token.Text.Blank	u'   '
+Token.Comment.Single	u'# Runs Batted In'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Mapping scalars to sequences'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'american'
+Token.Punctuation.Indicator	u':'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Boston'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Red'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Sox'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Detroit'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Tigers'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'New'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'York'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Yankees'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'national'
+Token.Punctuation.Indicator	u':'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'New'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'York'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Mets'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Chicago'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Cubs'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Atlanta'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Braves'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Sequence of mappings'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'name'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'Mark'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'McGwire'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'hr'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u'   '
+Token.Literal.Scalar.Plain	u'65'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'avg'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u'  '
+Token.Literal.Scalar.Plain	u'0.278'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'name'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'Sammy'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Sosa'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'hr'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u'   '
+Token.Literal.Scalar.Plain	u'63'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'avg'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u'  '
+Token.Literal.Scalar.Plain	u'0.288'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Sequence of sequences'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Punctuation.Indicator	u'['
+Token.Literal.Scalar.Plain	u'name'
+Token.Text.Blank	u'        '
+Token.Punctuation.Indicator	u','
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'hr'
+Token.Punctuation.Indicator	u','
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'avg'
+Token.Text.Blank	u'  '
+Token.Punctuation.Indicator	u']'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Punctuation.Indicator	u'['
+Token.Literal.Scalar.Plain	u'Mark'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'McGwire'
+Token.Punctuation.Indicator	u','
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'65'
+Token.Punctuation.Indicator	u','
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'0.278'
+Token.Punctuation.Indicator	u']'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Punctuation.Indicator	u'['
+Token.Literal.Scalar.Plain	u'Sammy'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Sosa'
+Token.Text.Blank	u'  '
+Token.Punctuation.Indicator	u','
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'63'
+Token.Punctuation.Indicator	u','
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'0.288'
+Token.Punctuation.Indicator	u']'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Mapping of mappings'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'Mark'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'McGwire'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u'{'
+Token.Literal.Scalar.Plain	u'hr'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'65'
+Token.Punctuation.Indicator	u','
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'avg'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'0.278'
+Token.Punctuation.Indicator	u'}'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'Sammy'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Sosa'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u'{'
+Token.Text.Break	u'\n'
+Token.Text.Blank	u'    '
+Token.Literal.Scalar.Plain	u'hr'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'63'
+Token.Punctuation.Indicator	u','
+Token.Text.Break	u'\n'
+Token.Text.Blank	u'    '
+Token.Literal.Scalar.Plain	u'avg'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'0.288'
+Token.Text.Break	u'\n'
+Token.Text.Blank	u'  '
+Token.Punctuation.Indicator	u'}'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Two documents in a stream'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Blank	u' '
+Token.Comment.Single	u'# Ranking of 1998 home runs'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Mark'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'McGwire'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Sammy'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Sosa'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Ken'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Griffey'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Blank	u' '
+Token.Comment.Single	u'# Team ranking'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Chicago'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Cubs'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'St'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Louis'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Cardinals'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Documents with the end indicator'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'time'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'20:03:20'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'player'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'Sammy'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Sosa'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'action'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'strike'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'(miss)'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'...'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'time'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'20:03:47'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'player'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'Sammy'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Sosa'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'action'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'grand'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'slam'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'...'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Comments'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'hr'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Comment.Single	u'# 1998 hr ranking'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Mark'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'McGwire'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Sammy'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Sosa'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'rbi'
+Token.Punctuation.Indicator	u':'
+Token.Text.Break	u'\n'
+Token.Text.Blank	u'  '
+Token.Comment.Single	u'# 1998 rbi ranking'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Sammy'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Sosa'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Ken'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Griffey'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Anchors and aliases'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'hr'
+Token.Punctuation.Indicator	u':'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Mark'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'McGwire'
+Token.Text.Break	u'\n'
+Token.Text.Blank	u'  '
+Token.Comment.Single	u'# Following node labeled SS'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Name.Anchor	u'&SS'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'Sammy'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Sosa'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'rbi'
+Token.Punctuation.Indicator	u':'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Name.Alias	u'*SS'
+Token.Text.Blank	u' '
+Token.Comment.Single	u'# Subsequent occurrence'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Ken'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Griffey'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Mapping between sequences'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'?'
+Token.Text.Indent	u' '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Detroit'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Tigers'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Chicago'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'cubs'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u':'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'2001-07-23'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'?'
+Token.Text.Indent	u' '
+Token.Punctuation.Indicator	u'['
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'New'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'York'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Yankees'
+Token.Punctuation.Indicator	u','
+Token.Text.Break	u'\n'
+Token.Text.Blank	u'    '
+Token.Literal.Scalar.Plain	u'Atlanta'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Braves'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u']'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u':'
+Token.Text.Indent	u' '
+Token.Punctuation.Indicator	u'['
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'2001-07-02'
+Token.Punctuation.Indicator	u','
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'2001-08-12'
+Token.Punctuation.Indicator	u','
+Token.Text.Break	u'\n'
+Token.Text.Blank	u'    '
+Token.Literal.Scalar.Plain	u'2001-08-14'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u']'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Inline nested mapping'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Comment.Single	u'# products purchased'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'item'
+Token.Text.Blank	u'    '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'Super'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Hoop'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'quantity'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'1'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'item'
+Token.Text.Blank	u'    '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'Basketball'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'quantity'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'4'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'item'
+Token.Text.Blank	u'    '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'Big'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Shoes'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'quantity'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'1'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Literal scalars'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u'|'
+Token.Text.Blank	u' '
+Token.Comment.Single	u'# ASCII art'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Block	u'\\//||\\/||'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Block	u'// ||  ||__'
+Token.Text.Break	u'\n'
+Token.Text.Break	u'\n'
+Token.Comment.Single	u'# Folded scalars'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u'>'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Block	u"Mark McGwire's"
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Block	u'year was crippled'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Block	u'by a knee injury.'
+Token.Text.Break	u'\n'
+Token.Text.Break	u'\n'
+Token.Comment.Single	u'# Preserved indented block in a folded scalar'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'>'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Block	u'Sammy Sosa completed another'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Block	u'fine season with great stats.'
+Token.Text.Break	u'\n'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'   '
+Token.Literal.Scalar.Block	u'63 Home Runs'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'   '
+Token.Literal.Scalar.Block	u'0.288 Batting Average'
+Token.Text.Break	u'\n'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Block	u'What a year!'
+Token.Text.Break	u'\n'
+Token.Text.Break	u'\n'
+Token.Comment.Single	u'# Indentation determines scope'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'name'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'Mark'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'McGwire'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'accomplishment'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u'>'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Block	u'Mark set a major league'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Block	u'home run record in 1998.'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'stats'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u'|'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Block	u'65 Home Runs'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Block	u'0.278 Batting Average'
+Token.Text.Break	u'\n'
+Token.Text.Break	u'\n'
+Token.Comment.Single	u'# Quoted scalars'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'unicode'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Flow.Quote	u'"'
+Token.Literal.Scalar.Flow	u'Sosa'
+Token.Literal.Scalar.Flow	u' '
+Token.Literal.Scalar.Flow	u'did'
+Token.Literal.Scalar.Flow	u' '
+Token.Literal.Scalar.Flow	u'fine.'
+Token.Literal.Scalar.Flow.Escape	u'\\u263A'
+Token.Literal.Scalar.Flow.Quote	u'"'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'control'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Flow.Quote	u'"'
+Token.Literal.Scalar.Flow.Escape	u'\\b'
+Token.Literal.Scalar.Flow	u'1998'
+Token.Literal.Scalar.Flow.Escape	u'\\t'
+Token.Literal.Scalar.Flow	u'1999'
+Token.Literal.Scalar.Flow.Escape	u'\\t'
+Token.Literal.Scalar.Flow	u'2000'
+Token.Literal.Scalar.Flow.Escape	u'\\n'
+Token.Literal.Scalar.Flow.Quote	u'"'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'hex'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'esc'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Flow.Quote	u'"'
+Token.Literal.Scalar.Flow.Escape	u'\\x0d'
+Token.Literal.Scalar.Flow.Escape	u'\\x0a'
+Token.Literal.Scalar.Flow	u' '
+Token.Literal.Scalar.Flow	u'is'
+Token.Literal.Scalar.Flow	u' '
+Token.Literal.Scalar.Flow.Escape	u'\\r'
+Token.Literal.Scalar.Flow.Escape	u'\\n'
+Token.Literal.Scalar.Flow.Quote	u'"'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'single'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Flow.Quote	u"'"
+Token.Literal.Scalar.Flow	u'"Howdy!"'
+Token.Literal.Scalar.Flow	u' '
+Token.Literal.Scalar.Flow	u'he'
+Token.Literal.Scalar.Flow	u' '
+Token.Literal.Scalar.Flow	u'cried.'
+Token.Literal.Scalar.Flow.Quote	u"'"
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'quoted'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Flow.Quote	u"'"
+Token.Literal.Scalar.Flow	u' '
+Token.Literal.Scalar.Flow	u'#'
+Token.Literal.Scalar.Flow	u' '
+Token.Literal.Scalar.Flow	u'not'
+Token.Literal.Scalar.Flow	u' '
+Token.Literal.Scalar.Flow	u'a'
+Token.Literal.Scalar.Flow	u' '
+Token.Literal.Scalar.Flow.Escape	u"''"
+Token.Literal.Scalar.Flow	u'comment'
+Token.Literal.Scalar.Flow.Escape	u"''"
+Token.Literal.Scalar.Flow	u'.'
+Token.Literal.Scalar.Flow.Quote	u"'"
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'tie-fighter'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Flow.Quote	u"'"
+Token.Literal.Scalar.Flow	u'|\\-*-/|'
+Token.Literal.Scalar.Flow.Quote	u"'"
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Multi-line flow scalars'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'plain'
+Token.Punctuation.Indicator	u':'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'This'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'unquoted'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'scalar'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'spans'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'many'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'lines.'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'quoted'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Flow.Quote	u'"'
+Token.Literal.Scalar.Flow	u'So'
+Token.Literal.Scalar.Flow	u' '
+Token.Literal.Scalar.Flow	u'does'
+Token.Literal.Scalar.Flow	u' '
+Token.Literal.Scalar.Flow	u'this'
+Token.Text.Break	u'\n'
+Token.Text.Blank	u'  '
+Token.Literal.Scalar.Flow	u'quoted'
+Token.Literal.Scalar.Flow	u' '
+Token.Literal.Scalar.Flow	u'scalar.'
+Token.Literal.Scalar.Flow.Escape	u'\\n'
+Token.Literal.Scalar.Flow.Quote	u'"'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Integers'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'canonical'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'12345'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'decimal'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'+12_345'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'sexagesimal'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'3:25:45'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'octal'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'014'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'hexadecimal'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'0xC'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Floating point'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'canonical'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'1.23015e+3'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'exponential'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'12.3015e+02'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'sexagesimal'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'20:30.15'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'fixed'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'1_230.15'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'negative'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'infinity'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'-.inf'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'not'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'a'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'number'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'.NaN'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Miscellaneous'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'null'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'~'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'true'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'boolean'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'false'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'boolean'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'string'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Flow.Quote	u"'"
+Token.Literal.Scalar.Flow	u'12345'
+Token.Literal.Scalar.Flow.Quote	u"'"
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Timestamps'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'canonical'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'2001-12-15T02:59:43.1Z'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'iso8601'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'2001-12-14t21:59:43.10-05:00'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'spaced'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'2001-12-14'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'21:59:43.10'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'-5'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'date'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'2002-12-14'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Various explicit tags'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'not-date'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Name.Type	u'!!str'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'2002-04-28'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'picture'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Name.Type	u'!!binary'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u'|'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Block	u'R0lGODlhDAAMAIQAAP//9/X'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Block	u'17unp5WZmZgAAAOfn515eXv'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Block	u'Pz7Y6OjuDg4J+fn5OTk6enp'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Block	u'56enmleECcgggoBADs='
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'application'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'specific'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'tag'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Name.Type	u'!something'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u'|'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Block	u'The semantics of the tag'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Block	u'above may be different for'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Block	u'different documents.'
+Token.Text.Break	u'\n'
+Token.Text.Break	u'\n'
+Token.Comment.Single	u'# Global tags'
+Token.Text.Break	u'\n'
+Token.Name.Directive	u'%TAG'
+Token.Text.Blank	u' '
+Token.Name.Type	u'!'
+Token.Text.Blank	u' '
+Token.Name.Type	u'tag:clarkevans.com,2002:'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Blank	u' '
+Token.Name.Type	u'!shape'
+Token.Text.Break	u'\n'
+Token.Text.Blank	u'  '
+Token.Comment.Single	u'# Use the ! handle for presenting'
+Token.Text.Break	u'\n'
+Token.Text.Blank	u'  '
+Token.Comment.Single	u'# tag:clarkevans.com,2002:circle'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Name.Type	u'!circle'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'center'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Name.Anchor	u'&ORIGIN'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u'{'
+Token.Literal.Scalar.Plain	u'x'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'73'
+Token.Punctuation.Indicator	u','
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'y'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'129'
+Token.Punctuation.Indicator	u'}'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'radius'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'7'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Name.Type	u'!line'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'start'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Name.Alias	u'*ORIGIN'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'finish'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u'{'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'x'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'89'
+Token.Punctuation.Indicator	u','
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'y'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'102'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u'}'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Name.Type	u'!label'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'start'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Name.Alias	u'*ORIGIN'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'color'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'0xFFEEBB'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'text'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'Pretty'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'vector'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'drawing.'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Unordered sets'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Blank	u' '
+Token.Name.Type	u'!!set'
+Token.Text.Break	u'\n'
+Token.Comment.Single	u'# sets are represented as a'
+Token.Text.Break	u'\n'
+Token.Comment.Single	u'# mapping where each key is'
+Token.Text.Break	u'\n'
+Token.Comment.Single	u'# associated with the empty string'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'?'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Mark'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'McGwire'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'?'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Sammy'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Sosa'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'?'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Ken'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Griff'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Ordered mappings'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Blank	u' '
+Token.Name.Type	u'!!omap'
+Token.Text.Break	u'\n'
+Token.Comment.Single	u'# ordered maps are represented as'
+Token.Text.Break	u'\n'
+Token.Comment.Single	u'# a sequence of mappings, with'
+Token.Text.Break	u'\n'
+Token.Comment.Single	u'# each mapping having one key'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Mark'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'McGwire'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'65'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Sammy'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Sosa'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'63'
+Token.Text.Break	u'\n'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'Ken'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Griffy'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'58'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Full length example'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Blank	u' '
+Token.Name.Type	u'!<tag:clarkevans.com,2002:invoice>'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'invoice'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'34843'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'date'
+Token.Text.Blank	u'   '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'2001-01-23'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'bill-to'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Name.Anchor	u'&id001'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'    '
+Token.Literal.Scalar.Plain	u'given'
+Token.Text.Blank	u'  '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'Chris'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'    '
+Token.Literal.Scalar.Plain	u'family'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'Dumars'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'    '
+Token.Literal.Scalar.Plain	u'address'
+Token.Punctuation.Indicator	u':'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'        '
+Token.Literal.Scalar.Plain	u'lines'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u'|'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'            '
+Token.Literal.Scalar.Block	u'458 Walkman Dr.'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'            '
+Token.Literal.Scalar.Block	u'Suite #292'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'        '
+Token.Literal.Scalar.Plain	u'city'
+Token.Text.Blank	u'    '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'Royal'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Oak'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'        '
+Token.Literal.Scalar.Plain	u'state'
+Token.Text.Blank	u'   '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'MI'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'        '
+Token.Literal.Scalar.Plain	u'postal'
+Token.Text.Blank	u'  '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'48046'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'ship-to'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Name.Alias	u'*id001'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'product'
+Token.Punctuation.Indicator	u':'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'    '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'sku'
+Token.Text.Blank	u'         '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'BL394D'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'      '
+Token.Literal.Scalar.Plain	u'quantity'
+Token.Text.Blank	u'    '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'4'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'      '
+Token.Literal.Scalar.Plain	u'description'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'Basketball'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'      '
+Token.Literal.Scalar.Plain	u'price'
+Token.Text.Blank	u'       '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'450.00'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'    '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'sku'
+Token.Text.Blank	u'         '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'BL4438H'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'      '
+Token.Literal.Scalar.Plain	u'quantity'
+Token.Text.Blank	u'    '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'1'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'      '
+Token.Literal.Scalar.Plain	u'description'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'Super'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Hoop'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'      '
+Token.Literal.Scalar.Plain	u'price'
+Token.Text.Blank	u'       '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'2392.00'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'tax'
+Token.Text.Blank	u'  '
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'251.42'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'total'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'4443.52'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'comments'
+Token.Punctuation.Indicator	u':'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'    '
+Token.Literal.Scalar.Plain	u'Late'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'afternoon'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'is'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'best.'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'    '
+Token.Literal.Scalar.Plain	u'Backup'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'contact'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'is'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'Nancy'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'    '
+Token.Literal.Scalar.Plain	u'Billsmer'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'@'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'338-4338.'
+Token.Text.Break	u'\n\n'
+Token.Comment.Single	u'# Another full-length example'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'Time'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'2001-11-23'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'15:01:42'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'-5'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'User'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'ed'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'Warning'
+Token.Punctuation.Indicator	u':'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'This'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'is'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'an'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'error'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'message'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'for'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'the'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'log'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'file'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'Time'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'2001-11-23'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'15:02:31'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'-5'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'User'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'ed'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'Warning'
+Token.Punctuation.Indicator	u':'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'A'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'slightly'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'different'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'error'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'message.'
+Token.Text.Break	u'\n'
+Token.Punctuation.Document	u'---'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'Date'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'2001-11-23'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'15:03:17'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'-5'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'User'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'ed'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'Fatal'
+Token.Punctuation.Indicator	u':'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Literal.Scalar.Plain	u'Unknown'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'variable'
+Token.Literal.Scalar.Plain	u' '
+Token.Literal.Scalar.Plain	u'"bar"'
+Token.Text.Break	u'\n'
+Token.Literal.Scalar.Plain	u'Stack'
+Token.Punctuation.Indicator	u':'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'file'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'TopClass.py'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'    '
+Token.Literal.Scalar.Plain	u'line'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'23'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'    '
+Token.Literal.Scalar.Plain	u'code'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u'|'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'      '
+Token.Literal.Scalar.Block	u'x = MoreObject("345\\n")'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'  '
+Token.Punctuation.Indicator	u'-'
+Token.Text.Indent	u' '
+Token.Literal.Scalar.Plain	u'file'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'MoreClass.py'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'    '
+Token.Literal.Scalar.Plain	u'line'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Literal.Scalar.Plain	u'58'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'    '
+Token.Literal.Scalar.Plain	u'code'
+Token.Punctuation.Indicator	u':'
+Token.Text.Blank	u' '
+Token.Punctuation.Indicator	u'|'
+Token.Punctuation.Indicator	u'-'
+Token.Text.Break	u'\n'
+Token.Text.Indent	u'      '
+Token.Literal.Scalar.Block	u'foo = bar'
+Token.Text.Break	u'\n'

examples/pygments-lexer/example.yaml

+
+#
+# Examples from the Preview section of the YAML specification
+# (http://yaml.org/spec/1.2/#Preview)
+#
+
+# Sequence of scalars
+---
+- Mark McGwire
+- Sammy Sosa
+- Ken Griffey
+
+# Mapping scalars to scalars
+---
+hr:  65    # Home runs
+avg: 0.278 # Batting average
+rbi: 147   # Runs Batted In
+
+# Mapping scalars to sequences
+---
+american:
+  - Boston Red Sox
+  - Detroit Tigers
+  - New York Yankees
+national:
+  - New York Mets
+  - Chicago Cubs
+  - Atlanta Braves
+
+# Sequence of mappings
+---
+-
+  name: Mark McGwire
+  hr:   65
+  avg:  0.278
+-
+  name: Sammy Sosa
+  hr:   63
+  avg:  0.288
+
+# Sequence of sequences
+---
+- [name        , hr, avg  ]
+- [Mark McGwire, 65, 0.278]
+- [Sammy Sosa  , 63, 0.288]
+
+# Mapping of mappings
+---
+Mark McGwire: {hr: 65, avg: 0.278}
+Sammy Sosa: {
+    hr: 63,
+    avg: 0.288
+  }
+
+# Two documents in a stream
+--- # Ranking of 1998 home runs
+- Mark McGwire
+- Sammy Sosa
+- Ken Griffey
+--- # Team ranking
+- Chicago Cubs
+- St Louis Cardinals
+
+# Documents with the end indicator
+---
+time: 20:03:20
+player: Sammy Sosa
+action: strike (miss)
+...
+---
+time: 20:03:47
+player: Sammy Sosa
+action: grand slam
+...
+
+# Comments
+---
+hr: # 1998 hr ranking
+  - Mark McGwire
+  - Sammy Sosa
+rbi:
+  # 1998 rbi ranking
+  - Sammy Sosa
+  - Ken Griffey
+
+# Anchors and aliases
+---
+hr:
+  - Mark McGwire
+  # Following node labeled SS
+  - &SS Sammy Sosa
+rbi:
+  - *SS # Subsequent occurrence
+  - Ken Griffey
+
+# Mapping between sequences
+---
+? - Detroit Tigers
+  - Chicago cubs
+:
+  - 2001-07-23
+? [ New York Yankees,
+    Atlanta Braves ]
+: [ 2001-07-02, 2001-08-12,
+    2001-08-14 ]
+
+# Inline nested mapping
+---
+# products purchased
+- item    : Super Hoop
+  quantity: 1
+- item    : Basketball
+  quantity: 4
+- item    : Big Shoes
+  quantity: 1
+
+# Literal scalars
+--- | # ASCII art
+  \//||\/||
+  // ||  ||__
+
+# Folded scalars
+--- >
+  Mark McGwire's
+  year was crippled
+  by a knee injury.
+
+# Preserved indented block in a folded scalar
+---
+>
+ Sammy Sosa completed another
+ fine season with great stats.
+
+   63 Home Runs
+   0.288 Batting Average
+
+ What a year!
+
+# Indentation determines scope
+---
+name: Mark McGwire
+accomplishment: >
+  Mark set a major league
+  home run record in 1998.
+stats: |
+  65 Home Runs
+  0.278 Batting Average
+
+# Quoted scalars
+---
+unicode: "Sosa did fine.\u263A"
+control: "\b1998\t1999\t2000\n"
+hex esc: "\x0d\x0a is \r\n"
+single: '"Howdy!" he cried.'
+quoted: ' # not a ''comment''.'
+tie-fighter: '|\-*-/|'
+
+# Multi-line flow scalars
+---
+plain:
+  This unquoted scalar
+  spans many lines.
+quoted: "So does this
+  quoted scalar.\n"
+
+# Integers
+---
+canonical: 12345
+decimal: +12_345
+sexagesimal: 3:25:45
+octal: 014
+hexadecimal: 0xC
+
+# Floating point
+---
+canonical: 1.23015e+3
+exponential: 12.3015e+02
+sexagesimal: 20:30.15
+fixed: 1_230.15
+negative infinity: -.inf
+not a number: .NaN
+
+# Miscellaneous
+---
+null: ~
+true: boolean
+false: boolean
+string: '12345'
+
+# Timestamps
+---
+canonical: 2001-12-15T02:59:43.1Z
+iso8601: 2001-12-14t21:59:43.10-05:00
+spaced: 2001-12-14 21:59:43.10 -5
+date: 2002-12-14
+
+# Various explicit tags
+---
+not-date: !!str 2002-04-28
+picture: !!binary |
+ R0lGODlhDAAMAIQAAP//9/X
+ 17unp5WZmZgAAAOfn515eXv
+ Pz7Y6OjuDg4J+fn5OTk6enp
+ 56enmleECcgggoBADs=
+application specific tag: !something |
+ The semantics of the tag
+ above may be different for
+ different documents.
+
+# Global tags
+%TAG ! tag:clarkevans.com,2002:
+--- !shape
+  # Use the ! handle for presenting
+  # tag:clarkevans.com,2002:circle
+- !circle
+  center: &ORIGIN {x: 73, y: 129}
+  radius: 7
+- !line
+  start: *ORIGIN
+  finish: { x: 89, y: 102 }
+- !label
+  start: *ORIGIN
+  color: 0xFFEEBB
+  text: Pretty vector drawing.
+
+# Unordered sets
+--- !!set
+# sets are represented as a
+# mapping where each key is
+# associated with the empty string
+? Mark McGwire
+? Sammy Sosa
+? Ken Griff
+
+# Ordered mappings
+--- !!omap
+# ordered maps are represented as
+# a sequence of mappings, with
+# each mapping having one key
+- Mark McGwire: 65
+- Sammy Sosa: 63
+- Ken Griffy: 58
+
+# Full length example
+--- !<tag:clarkevans.com,2002:invoice>
+invoice: 34843
+date   : 2001-01-23
+bill-to: &id001
+    given  : Chris
+    family : Dumars
+    address:
+        lines: |
+            458 Walkman Dr.
+            Suite #292
+        city    : Royal Oak
+        state   : MI
+        postal  : 48046
+ship-to: *id001
+product:
+    - sku         : BL394D
+      quantity    : 4
+      description : Basketball
+      price       : 450.00
+    - sku         : BL4438H
+      quantity    : 1
+      description : Super Hoop
+      price       : 2392.00
+tax  : 251.42
+total: 4443.52
+comments:
+    Late afternoon is best.
+    Backup contact is Nancy
+    Billsmer @ 338-4338.
+
+# Another full-length example
+---
+Time: 2001-11-23 15:01:42 -5
+User: ed
+Warning:
+  This is an error message
+  for the log file
+---
+Time: 2001-11-23 15:02:31 -5
+User: ed
+Warning:
+  A slightly different error
+  message.
+---
+Date: 2001-11-23 15:03:17 -5
+User: ed
+Fatal:
+  Unknown variable "bar"
+Stack:
+  - file: TopClass.py
+    line: 23
+    code: |
+      x = MoreObject("345\n")
+  - file: MoreClass.py
+    line: 58
+    code: |-
+      foo = bar
+

examples/pygments-lexer/yaml.py

+
+"""
+yaml.py
+
+Lexer for YAML, a human-friendly data serialization language
+(http://yaml.org/).
+
+Written by Kirill Simonov <xi@resolvent.net>.
+
+License: Whatever suitable for inclusion into the Pygments package.
+"""
+
+from pygments.lexer import  \
+        ExtendedRegexLexer, LexerContext, include, bygroups
+from pygments.token import  \
+        Text, Comment, Punctuation, Name, Literal
+
+__all__ = ['YAMLLexer']
+
+
+class YAMLLexerContext(LexerContext):
+    """Indentation context for the YAML lexer."""
+
+    def __init__(self, *args, **kwds):
+        super(YAMLLexerContext, self).__init__(*args, **kwds)
+        self.indent_stack = []
+        self.indent = -1
+        self.next_indent = 0
+        self.block_scalar_indent = None
+
+
+def something(TokenClass):
+    """Do not produce empty tokens."""
+    def callback(lexer, match, context):
+        text = match.group()
+        if not text:
+            return
+        yield match.start(), TokenClass, text
+        context.pos = match.end()
+    return callback
+
+def reset_indent(TokenClass):
+    """Reset the indentation levels."""
+    def callback(lexer, match, context):
+        text = match.group()
+        context.indent_stack = []
+        context.indent = -1
+        context.next_indent = 0
+        context.block_scalar_indent = None
+        yield match.start(), TokenClass, text
+        context.pos = match.end()
+    return callback
+
+def save_indent(TokenClass, start=False):
+    """Save a possible indentation level."""
+    def callback(lexer, match, context):
+        text = match.group()
+        extra = ''
+        if start:
+            context.next_indent = len(text)
+            if context.next_indent < context.indent:
+                while context.next_indent < context.indent:
+                    context.indent = context.indent_stack.pop()
+                if context.next_indent > context.indent:
+                    extra = text[context.indent:]
+                    text = text[:context.indent]
+        else:
+            context.next_indent += len(text)
+        if text:
+            yield match.start(), TokenClass, text
+        if extra:
+            yield match.start()+len(text), TokenClass.Error, extra
+        context.pos = match.end()
+    return callback
+
+def set_indent(TokenClass, implicit=False):
+    """Set the previously saved indentation level."""
+    def callback(lexer, match, context):
+        text = match.group()
+        if context.indent < context.next_indent:
+            context.indent_stack.append(context.indent)
+            context.indent = context.next_indent
+        if not implicit:
+            context.next_indent += len(text)
+        yield match.start(), TokenClass, text
+        context.pos = match.end()
+    return callback
+
+def set_block_scalar_indent(TokenClass):
+    """Set an explicit indentation level for a block scalar."""
+    def callback(lexer, match, context):
+        text = match.group()
+        context.block_scalar_indent = None
+        if not text:
+            return
+        increment = match.group(1)
+        if increment:
+            current_indent = max(context.indent, 0)
+            increment = int(increment)
+            context.block_scalar_indent = current_indent + increment
+        if text:
+            yield match.start(), TokenClass, text
+            context.pos = match.end()
+    return callback
+
+def parse_block_scalar_empty_line(IndentTokenClass, ContentTokenClass):
+    """Process an empty line in a block scalar."""
+    def callback(lexer, match, context):
+        text = match.group()
+        if (context.block_scalar_indent is None or
+                len(text) <= context.block_scalar_indent):
+            if text:
+                yield match.start(), IndentTokenClass, text
+        else:
+            indentation = text[:context.block_scalar_indent]
+            content = text[context.block_scalar_indent:]
+            yield match.start(), IndentTokenClass, indentation
+            yield (match.start()+context.block_scalar_indent,
+                    ContentTokenClass, content)
+        context.pos = match.end()
+    return callback
+
+def parse_block_scalar_indent(TokenClass):
+    """Process indentation spaces in a block scalar."""
+    def callback(lexer, match, context):
+        text = match.group()
+        if context.block_scalar_indent is None:
+            if len(text) <= max(context.indent, 0):
+                context.stack.pop()
+                context.stack.pop()
+                return
+            context.block_scalar_indent = len(text)
+        else:
+            if len(text) < context.block_scalar_indent:
+                context.stack.pop()
+                context.stack.pop()
+                return
+        if text:
+            yield match.start(), TokenClass, text
+            context.pos = match.end()
+    return callback
+
+def parse_plain_scalar_indent(TokenClass):
+    """Process indentation spaces in a plain scalar."""
+    def callback(lexer, match, context):
+        text = match.group()
+        if len(text) <= context.indent:
+            context.stack.pop()
+            context.stack.pop()
+            return
+        if text:
+            yield match.start(), TokenClass, text
+            context.pos = match.end()
+    return callback
+
+
+class YAMLLexer(ExtendedRegexLexer):
+    """Lexer for the YAML language."""
+
+    name = 'YAML'
+    aliases = ['yaml']
+    filenames = ['*.yaml', '*.yml']
+    mimetypes = ['text/x-yaml']
+
+    tokens = {
+
+        # the root rules
+        'root': [
+            # ignored whitespaces
+            (r'[ ]+(?=#|$)', Text.Blank),
+            # line breaks
+            (r'\n+', Text.Break),
+            # a comment
+            (r'#[^\n]*', Comment.Single),
+            # the '%YAML' directive
+            (r'^%YAML(?=[ ]|$)', reset_indent(Name.Directive),
+                'yaml-directive'),
+            # the %TAG directive
+            (r'^%TAG(?=[ ]|$)', reset_indent(Name.Directive),
+                'tag-directive'),
+            # document start and document end indicators
+            (r'^(?:---|\.\.\.)(?=[ ]|$)',
+                reset_indent(Punctuation.Document), 'block-line'),
+            # indentation spaces
+            (r'[ ]*(?![ \t\n\r\f\v]|$)',
+                save_indent(Text.Indent, start=True),
+                ('block-line', 'indentation')),
+        ],
+
+        # trailing whitespaces after directives or a block scalar indicator
+        'ignored-line': [
+            # ignored whitespaces
+            (r'[ ]+(?=#|$)', Text.Blank),
+            # a comment
+            (r'#[^\n]*', Comment.Single),
+            # line break
+            (r'\n', Text.Break, '#pop:2'),
+        ],
+
+        # the %YAML directive
+        'yaml-directive': [
+            # the version number
+            (r'([ ]+)([0-9]+\.[0-9]+)',
+                bygroups(Text.Blank, Literal.Version), 'ignored-line'),
+        ],
+
+        # the %YAG directive
+        'tag-directive': [
+            # a tag handle and the corresponding prefix
+            (r'([ ]+)(!|![0-9A-Za-z_-]*!)'
+                r'([ ]+)(!|!?[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)',
+                bygroups(Text.Blank, Name.Type, Text.Blank, Name.Type),
+                'ignored-line'),
+        ],
+
+        # block scalar indicators and indentation spaces
+        'indentation': [
+            # trailing whitespaces are ignored
+            (r'[ ]*$', something(Text.Blank), '#pop:2'),
+            # whitespaces preceeding block collection indicators
+            (r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text.Indent)),
+            # block collection indicators
+            (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
+            # the beginning a block line
+            (r'[ ]*', save_indent(Text.Indent), '#pop'),
+        ],
+
+        # an indented line in the block context
+        'block-line': [
+            # the line end
+            (r'[ ]*(?=#|$)', something(Text.Blank), '#pop'),
+            # whitespaces separating tokens
+            (r'[ ]+', Text.Blank),
+            # tags, anchors and aliases,
+            include('descriptors'),
+            # block collections and scalars
+            include('block-nodes'),
+            # flow collections and quoted scalars
+            include('flow-nodes'),
+            # a plain scalar
+            (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`-]|[?:-][^ \t\n\r\f\v])',
+                something(Literal.Scalar.Plain),
+                'plain-scalar-in-block-context'),
+        ],
+
+        # tags, anchors, aliases
+        'descriptors' : [
+            # a full-form tag
+            (r'!<[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+>', Name.Type),
+            # a tag in the form '!', '!suffix' or '!handle!suffix'
+            (r'!(?:[0-9A-Za-z_-]+)?'
+                r'(?:![0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)?', Name.Type),
+            # an anchor
+            (r'&[0-9A-Za-z_-]+', Name.Anchor),
+            # an alias
+            (r'\*[0-9A-Za-z_-]+', Name.Alias),
+        ],
+
+        # block collections and scalars
+        'block-nodes': [
+            # implicit key
+            (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)),
+            # literal and folded scalars
+            (r'[|>]', Punctuation.Indicator,
+                ('block-scalar-content', 'block-scalar-header')),
+        ],
+
+        # flow collections and quoted scalars
+        'flow-nodes': [
+            # a flow sequence
+            (r'\[', Punctuation.Indicator, 'flow-sequence'),
+            # a flow mapping
+            (r'\{', Punctuation.Indicator, 'flow-mapping'),
+            # a single-quoted scalar
+            (r'\'', Literal.Scalar.Flow.Quote, 'single-quoted-scalar'),
+            # a double-quoted scalar
+            (r'\"', Literal.Scalar.Flow.Quote, 'double-quoted-scalar'),
+        ],
+
+        # the content of a flow collection
+        'flow-collection': [
+            # whitespaces
+            (r'[ ]+', Text.Blank),
+            # line breaks
+            (r'\n+', Text.Break),
+            # a comment
+            (r'#[^\n]*', Comment.Single),
+            # simple indicators
+            (r'[?:,]', Punctuation.Indicator),
+            # tags, anchors and aliases
+            include('descriptors'),
+            # nested collections and quoted scalars
+            include('flow-nodes'),
+            # a plain scalar
+            (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`])',
+                something(Literal.Scalar.Plain),
+                'plain-scalar-in-flow-context'),
+        ],
+
+        # a flow sequence indicated by '[' and ']'
+        'flow-sequence': [
+            # include flow collection rules
+            include('flow-collection'),
+            # the closing indicator
+            (r'\]', Punctuation.Indicator, '#pop'),
+        ],
+
+        # a flow mapping indicated by '{' and '}'
+        'flow-mapping': [
+            # include flow collection rules
+            include('flow-collection'),
+            # the closing indicator
+            (r'\}', Punctuation.Indicator, '#pop'),
+        ],
+
+        # block scalar lines
+        'block-scalar-content': [
+            # line break
+            (r'\n', Text.Break),
+            # empty line
+            (r'^[ ]+$',
+                parse_block_scalar_empty_line(Text.Indent,
+                    Literal.Scalar.Block)),
+            # indentation spaces (we may leave the state here)
+            (r'^[ ]*', parse_block_scalar_indent(Text.Indent)),
+            # line content
+            (r'[^\n\r\f\v]+', Literal.Scalar.Block),
+        ],
+
+        # the content of a literal or folded scalar
+        'block-scalar-header': [
+            # indentation indicator followed by chomping flag
+            (r'([1-9])?[+-]?(?=[ ]|$)',
+                set_block_scalar_indent(Punctuation.Indicator),
+                'ignored-line'),
+            # chomping flag followed by indentation indicator
+            (r'[+-]?([1-9])?(?=[ ]|$)',
+                set_block_scalar_indent(Punctuation.Indicator),
+                'ignored-line'),
+        ],
+
+        # ignored and regular whitespaces in quoted scalars
+        'quoted-scalar-whitespaces': [
+            # leading and trailing whitespaces are ignored
+            (r'^[ ]+|[ ]+$', Text.Blank),
+            # line breaks are ignored
+            (r'\n+', Text.Break),
+            # other whitespaces are a part of the value
+            (r'[ ]+', Literal.Scalar.Flow),
+        ],
+
+        # single-quoted scalars
+        'single-quoted-scalar': [
+            # include whitespace and line break rules
+            include('quoted-scalar-whitespaces'),
+            # escaping of the quote character
+            (r'\'\'', Literal.Scalar.Flow.Escape),
+            # regular non-whitespace characters
+            (r'[^ \t\n\r\f\v\']+', Literal.Scalar.Flow),
+            # the closing quote
+            (r'\'', Literal.Scalar.Flow.Quote, '#pop'),
+        ],
+
+        # double-quoted scalars
+        'double-quoted-scalar': [
+            # include whitespace and line break rules
+            include('quoted-scalar-whitespaces'),
+            # escaping of special characters
+            (r'\\[0abt\tn\nvfre "\\N_LP]', Literal.Scalar.Flow.Escape),
+            # escape codes
+            (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})',
+                Literal.Scalar.Flow.Escape),
+            # regular non-whitespace characters
+            (r'[^ \t\n\r\f\v\"\\]+', Literal.Scalar.Flow),
+            # the closing quote
+            (r'"', Literal.Scalar.Flow.Quote, '#pop'),
+        ],
+
+        # the beginning of a new line while scanning a plain scalar
+        'plain-scalar-in-block-context-new-line': [
+            # empty lines
+            (r'^[ ]+$', Text.Blank),
+            # line breaks
+            (r'\n+', Text.Break),
+            # document start and document end indicators
+            (r'^(?=---|\.\.\.)', something(Punctuation.Document), '#pop:3'),
+            # indentation spaces (we may leave the block line state here)
+            (r'^[ ]*', parse_plain_scalar_indent(Text.Indent), '#pop'),
+        ],
+
+        # a plain scalar in the block context
+        'plain-scalar-in-block-context': [
+            # the scalar ends with the ':' indicator
+            (r'[ ]*(?=:[ ]|:$)', something(Text.Blank), '#pop'),
+            # the scalar ends with whitespaces followed by a comment
+            (r'[ ]+(?=#)', Text.Blank, '#pop'),
+            # trailing whitespaces are ignored
+            (r'[ ]+$', Text.Blank),
+            # line breaks are ignored