Commits

Anonymous committed e628e4a Merge

Merge from Tim's repo.

Comments (0)

Files changed (15)

   * ActionScript 3, thanks to Pierre Bourdon
   * Cheetah/Spitfire templates, thanks to Matt Good
   * Lighttpd config files
-  * Nginix config files
+  * Nginx config files
   * Gnuplot plotting scripts
   * Clojure
+  * POV-Ray scene files
+  * Sqlite3 interactive console sessions
+  * Scala source files, thanks to Krzysiek Goj
+
+- Lexers improved:
+  * C lexer highlights standard library functions now and supports
+    C99 types.
+  * Bash lexer now correctly highlights heredocs without preceeding
+    whitespace.
+  * Vim lexer now highlights hex colors properly and knows a couple
+    more keywords.
+  * Irc logs lexer now handles xchat's default time format (#340)
+    and correctly highlights lines ending in ``>``.
+  * Support more delimiters for perl regular expressions (#258).
+  * ObjectiveC lexer now supports 2.0 features.
 
 - Added "Visual Studio" style.
 
-- C lexer highlights standard library functions now and supports
-  C99 types.
-
 - Updated markdown processor to Markdown 1.7.
 
 - Support roman/sans/mono style defs and use them in the LaTeX

pygments/lexers/_mapping.py

     'OcamlLexer': ('pygments.lexers.functional', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
     'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm'), ('text/x-perl', 'application/x-perl')),
     'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]'), ('text/x-php',)),
+    'PovrayLexer': ('pygments.lexers.other', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
     'Python3Lexer': ('pygments.lexers.agile', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
     'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
     'PythonLexer': ('pygments.lexers.agile', 'Python', ('python', 'py'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript'), ('text/x-python', 'application/x-python')),
     'RubyConsoleLexer': ('pygments.lexers.agile', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
     'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx'), ('text/x-ruby', 'application/x-ruby')),
     'SLexer': ('pygments.lexers.math', 'S', ('splus', 's', 'r'), ('*.S', '*.R'), ('text/S-plus', 'text/S', 'text/R')),
+    'ScalaLexer': ('pygments.lexers.compiled', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
     'SchemeLexer': ('pygments.lexers.functional', 'Scheme', ('scheme', 'scm'), ('*.scm',), ('text/x-scheme', 'application/x-scheme')),
     'SmalltalkLexer': ('pygments.lexers.other', 'Smalltalk', ('smalltalk', 'squeak'), ('*.st',), ('text/x-smalltalk',)),
     'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
     'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list'), ('sources.list',), ()),
     'SqlLexer': ('pygments.lexers.other', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
+    'SqliteConsoleLexer': ('pygments.lexers.other', 'sqlite3con', (), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
     'SquidConfLexer': ('pygments.lexers.text', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
     'TclLexer': ('pygments.lexers.agile', 'Tcl', ('tcl',), ('*.tcl',), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
     'TcshLexer': ('pygments.lexers.other', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),

pygments/lexers/agile.py

     flags = re.DOTALL | re.MULTILINE
     # TODO: give this a perl guy who knows how to parse perl...
     tokens = {
+        'balanced-regex': [
+            (r'/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex, '#pop'),
+            (r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'),
+            (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
+            (r'{(\\\\|\\}|[^}])*}[egimosx]*', String.Regex, '#pop'),
+            (r'<(\\\\|\\>|[^>])*>[egimosx]*', String.Regex, '#pop'),
+            (r'\[(\\\\|\\\]|[^\]])*\][egimosx]*', String.Regex, '#pop'),
+            (r'\((\\\\|\\\)|[^\)])*\)[egimosx]*', String.Regex, '#pop'),
+            (r'@(\\\\|\\\@|[^\@])*@[egimosx]*', String.Regex, '#pop'),
+            (r'%(\\\\|\\\%|[^\%])*%[egimosx]*', String.Regex, '#pop'),
+            (r'\$(\\\\|\\\$|[^\$])*\$[egimosx]*', String.Regex, '#pop'),
+            (r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'),
+        ],
         'root': [
             (r'\#.*?$', Comment.Single),
             (r'=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline),
             (r'(format)(\s+)([a-zA-Z0-9_]+)(\s*)(=)(\s*\n)',
              bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'),
             (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
+            # common delimiters
             (r's/(\\\\|\\/|[^/])*/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex),
+            (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex),
+            (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex),
+            (r's@(\\\\|\\@|[^@])*@(\\\\|\\@|[^@])*@[egimosx]*', String.Regex),
+            (r's%(\\\\|\\%|[^%])*%(\\\\|\\%|[^%])*%[egimosx]*', String.Regex),
+            # balanced delimiters
+            (r's{(\\\\|\\}|[^}])*}\s*', String.Regex, 'balanced-regex'),
+            (r's<(\\\\|\\>|[^>])*>\s*', String.Regex, 'balanced-regex'),
+            (r's\[(\\\\|\\\]|[^\]])*\]\s*', String.Regex, 'balanced-regex'),
+            (r's\((\\\\|\\\)|[^\)])*\)\s*', String.Regex, 'balanced-regex'),
+
             (r'm?/(\\\\|\\/|[^/\n])*/[gcimosx]*', String.Regex),
             (r'((?<==~)|(?<=\())\s*/(\\\\|\\/|[^/])*/[gcimosx]*', String.Regex),
             (r'\s+', Text),

pygments/lexers/compiled.py

     Lexers for compiled languages.
 
     :copyright: 2006-2008 by Georg Brandl, Armin Ronacher, Christoph Hack,
-                Whitney Young, Kirk McDonald, Stou Sandalski.
+                Whitney Young, Kirk McDonald, Stou Sandalski, Krzysiek Goj.
     :license: BSD, see LICENSE for more details.
 """
 
 # backwards compatibility
 from pygments.lexers.functional import OcamlLexer
 
-__all__ = ['CLexer', 'CppLexer', 'DLexer', 'DelphiLexer', 'JavaLexer',
+__all__ = ['CLexer', 'CppLexer', 'DLexer', 'DelphiLexer', 'JavaLexer', 'ScalaLexer',
            'DylanLexer', 'OcamlLexer', 'ObjectiveCLexer', 'FortranLexer']
 
 
              r'([a-zA-Z_][a-zA-Z0-9_]*)'             # method name
              r'(\s*\([^;]*?\))'                      # signature
              r'(' + _ws + r')({)',
-             bygroups(using(this), Name.Function, using(this), using(this), Punctuation),
+             bygroups(using(this), Name.Function, using(this), using(this),
+                      Punctuation),
              'function'),
             # function declarations
             (r'((?:[a-zA-Z0-9_*\s])+?(?:\s|[*]))'    # return arguments
              r'([a-zA-Z_][a-zA-Z0-9_]*)'             # method name
              r'(\s*\([^;]*?\))'                      # signature
              r'(' + _ws + r')(;)',
-             bygroups(using(this), Name.Function, using(this), using(this), Punctuation)),
+             bygroups(using(this), Name.Function, using(this), using(this),
+                      Punctuation)),
             ('', Text, 'statement'),
         ],
         'statement' : [
         ],
     }
 
+class ScalaLexer(RegexLexer):
+    """
+    For `Scala <http://www.scala-lang.org>`_ source code.
+    """
+
+    name = 'Scala'
+    aliases = ['scala']
+    filenames = ['*.scala']
+    mimetypes = ['text/x-scala']
+
+    flags = re.MULTILINE | re.DOTALL
+
+    #: optional Comment or Whitespace
+    _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+    tokens = {
+        'root': [
+            # method names
+            (r'(class|interface|trait|object)(\s+)', bygroups(Keyword, Text), 'class'),
+            (r'^(\s*def)'
+             r'([a-zA-Z_][a-zA-Z0-9_]*)'                    # method name
+             r'(\s*)(\()',                                  # signature start
+             bygroups(using(this), Name.Function, Text, Operator)),
+            (r"'([a-zA-Z_][a-zA-Z0-9_]*)", Text.Symbol),
+            (r'[^\S\n]+', Text),
+            (r'//.*?\n', Comment),
+            (r'/\*.*?\*/', Comment),
+            (r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
+            (r'(abstract|case|catch|do|else|extends|final|finally|for|forSome'
+             r'|if|implicit|lazy|match|new|null|override|private|protected'
+             r'|requires|return|sealed|super|this|throw|try|type|while|with'
+             r'|yield|let|def|var|println|=>|<-|_)\b', Keyword),
+            (r'(boolean|byte|char|double|float|int|long|short|void)\b',
+             Keyword.Type),
+            (r'(String|Int|Array|HashMap)\b', Keyword.Type),
+            (r'(true|false|null)\b', Keyword.Constant),
+            (r'(import)(\s+)', bygroups(Keyword, Text), 'import'),
+            (r'"(\\\\|\\"|[^"])*"', String),
+            (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
+            (r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
+            (r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
+            (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
+            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+            (r'0x[0-9a-f]+', Number.Hex),
+            (r'[0-9]+L?', Number.Integer),
+            (r'\n', Text)
+        ],
+        'class': [
+            (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop'),
+            (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\()',
+             bygroups(Name.Class, Text, Operator), '#pop'),
+        ],
+        'import': [
+            (r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
+        ],
+    }
+
 
 class DylanLexer(RegexLexer):
     """
 
     tokens = {
         'whitespace': [
-            (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
-            (r'^\s*#', Comment.Preproc, 'macro'),
+            (r'^(\s*)(#if\s+0)', bygroups(Text, Comment.Preproc), 'if0'),
+            (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'macro'),
             (r'\n', Text),
             (r'\s+', Text),
             (r'\\\n', Text), # line continuation
             (r'[()\[\],.]', Punctuation),
             (r'(auto|break|case|const|continue|default|do|else|enum|extern|'
              r'for|goto|if|register|restricted|return|sizeof|static|struct|'
-             r'switch|typedef|union|volatile|virtual|while|@selector|'
+             r'switch|typedef|union|volatile|virtual|while|in|@selector|'
              r'@private|@protected|@public|@encode|'
-             r'@synchronized|@try|@throw|@catch|@finally|@end)\b', Keyword),
+             r'@synchronized|@try|@throw|@catch|@finally|@end|@property|'
+             r'@synthesize|@dynamic)\b', Keyword),
             (r'(int|long|float|short|double|char|unsigned|signed|void|'
              r'id|BOOL|IBOutlet|IBAction|SEL)\b', Keyword.Type),
             (r'(_{0,2}inline|naked|restrict|thread|typename)\b', Keyword.Reserved),
             ('([a-zA-Z_][a-zA-Z0-9_]*)(\s*:\s*)([a-zA-Z_][a-zA-Z0-9_]*)?',
              bygroups(Name.Class, Text, Name.Class), '#pop'),
             # interface definition for a category
-            ('([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\([a-zA-Z_][a-zA-Z0-9_]\)*)',
+            ('([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(\([a-zA-Z_][a-zA-Z0-9_]*\))',
              bygroups(Name.Class, Text, Name.Label), '#pop'),
             # simple interface / implementation
             ('([a-zA-Z_][a-zA-Z0-9_]*)', Name.Class, '#pop')

pygments/lexers/functional.py

             (r'[~?][a-z][\w\']*:', Name.Variable),
         ],
         'comment': [
-            (r'[^(*)]', Comment),
+            (r'[^(*)]+', Comment),
             (r'\(\*', Comment, '#push'),
             (r'\*\)', Comment, '#pop'),
             (r'[(*)]', Comment),
         ],
         'string': [
-            (r'[^\\"]', String.Double),
+            (r'[^\\"]+', String.Double),
             include('escape-sequence'),
             (r'\\\n', String.Double),
             (r'"', String.Double, '#pop'),

pygments/lexers/math.py

     #
     # for f in elfun specfun elmat; do
     #   echo -n "$f = "
-    #   matlab -nojvm -r "help $f;exit;"|perl -ne 'push(@c,$1) if /^    (\w+)\s+-/; END {print q{["}.join(q{","},@c).qq{"]\n};}'
+    #   matlab -nojvm -r "help $f;exit;" | perl -ne \
+    #   'push(@c,$1) if /^    (\w+)\s+-/; END {print q{["}.join(q{","},@c).qq{"]\n};}'
     # done
     #
     # elfun: Elementary math functions

pygments/lexers/other.py

 
 import re
 
-from pygments.lexer import RegexLexer, include, bygroups, using, this
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, this, \
+                           do_insertions
 from pygments.token import Error, Punctuation, \
-     Text, Comment, Operator, Keyword, Name, String, Number
+     Text, Comment, Operator, Keyword, Name, String, Number, Generic
 from pygments.util import shebang_matches
 
 
-__all__ = ['SqlLexer', 'MySqlLexer', 'BrainfuckLexer', 'BashLexer',
-           'BatchLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer',
-           'SmalltalkLexer', 'TcshLexer', 'LogtalkLexer', 'GnuplotLexer']
+__all__ = ['SqlLexer', 'MySqlLexer', 'SqliteConsoleLexer', 'BrainfuckLexer',
+           'BashLexer', 'BatchLexer', 'BefungeLexer', 'RedcodeLexer',
+           'MOOCodeLexer', 'SmalltalkLexer', 'TcshLexer', 'LogtalkLexer',
+           'GnuplotLexer', 'PovrayLexer']
+
+line_re  = re.compile('.*?\n')
 
 
 class SqlLexer(RegexLexer):
     }
 
 
+class SqliteConsoleLexer(Lexer):
+    """
+    Lexer for example sessions using sqlite3.
+
+    *New in Pygments 0.11.*
+    """
+
+    name = 'sqlite3con'
+    aliases = []
+    filenames = ['*.sqlite3-console']
+    mimetypes = ['text/x-sqlite3-console']
+
+    def get_tokens_unprocessed(self, data):
+        sql = SqlLexer(**self.options)
+
+        curcode = ''
+        insertions = []
+        for match in line_re.finditer(data):
+            line = match.group()
+            if line.startswith('sqlite> ') or line.startswith('   ...> '):
+                insertions.append((len(curcode),
+                                   [(0, Generic.Prompt, line[:8])]))
+                curcode += line[8:]
+            else:
+                if curcode:
+                    for item in do_insertions(insertions,
+                                              sql.get_tokens_unprocessed(curcode)):
+                        yield item
+                    curcode = ''
+                    insertions = []
+                if line.startswith('SQL error: '):
+                    yield (match.start(), Generic.Traceback, line)
+                else:
+                    yield (match.start(), Generic.Output, line)
+        if curcode:
+            for item in do_insertions(insertions,
+                                      sql.get_tokens_unprocessed(curcode)):
+                yield item
+
+
 class BrainfuckLexer(RegexLexer):
     """
     Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_
             (r"\$?'(\\\\|\\[0-7]+|\\.|[^'])*'", String.Single),
             (r';', Text),
             (r'\s+', Text),
-            (r'[^=\s\n\[\]{}()$"\'`\\]+', Text),
+            (r'[^=\s\n\[\]{}()$"\'`\\<]+', Text),
             (r'\d+(?= |\Z)', Number),
             (r'\$#?(\w+|.)', Name.Variable),
+            (r'<', Text),
         ],
         'curly': [
             (r'}', Keyword, '#pop'),
             include('genericargs'),
         ],
     }
+
+
+class PovrayLexer(RegexLexer):
+    """
+    For `Persistence of Vision Raytracer http://www.povray.org/>`_ files.
+
+    *New in Pygments 0.11.*
+    """
+    name = 'POVRay'
+    aliases = ['pov']
+    filenames = ['*.pov', '*.inc']
+    mimetypes = ['text/x-povray']
+
+    tokens = {
+        'root': [
+            (r'/\*[\w\W]*?\*/', Comment.Multiline),
+            (r'//.*\n', Comment.Single),
+            (r'"(?:\\.|[^"])+"', String.Double),
+            (r'#(debug|default|else|end|error|fclose|fopen|if|ifdef|ifndef|'
+             r'include|range|read|render|statistics|switch|undef|version|'
+             r'warning|while|write|define|macro|local|declare)',
+             Comment.Preproc),
+            (r'\b(aa_level|aa_threshold|abs|acos|acosh|adaptive|adc_bailout|'
+             r'agate|agate_turb|all|alpha|ambient|ambient_light|angle|'
+             r'aperture|arc_angle|area_light|asc|asin|asinh|assumed_gamma|'
+             r'atan|atan2|atanh|atmosphere|atmospheric_attenuation|'
+             r'attenuating|average|background|black_hole|blue|blur_samples|'
+             r'bounded_by|box_mapping|bozo|break|brick|brick_size|'
+             r'brightness|brilliance|bumps|bumpy1|bumpy2|bumpy3|bump_map|'
+             r'bump_size|case|caustics|ceil|checker|chr|clipped_by|clock|'
+             r'color|color_map|colour|colour_map|component|composite|concat|'
+             r'confidence|conic_sweep|constant|control0|control1|cos|cosh|'
+             r'count|crackle|crand|cube|cubic_spline|cylindrical_mapping|'
+             r'debug|declare|default|degrees|dents|diffuse|direction|'
+             r'distance|distance_maximum|div|dust|dust_type|eccentricity|'
+             r'else|emitting|end|error|error_bound|exp|exponent|'
+             r'fade_distance|fade_power|falloff|falloff_angle|false|'
+             r'file_exists|filter|finish|fisheye|flatness|flip|floor|'
+             r'focal_point|fog|fog_alt|fog_offset|fog_type|frequency|gif|'
+             r'global_settings|glowing|gradient|granite|gray_threshold|'
+             r'green|halo|hexagon|hf_gray_16|hierarchy|hollow|hypercomplex|'
+             r'if|ifdef|iff|image_map|incidence|include|int|interpolate|'
+             r'inverse|ior|irid|irid_wavelength|jitter|lambda|leopard|'
+             r'linear|linear_spline|linear_sweep|location|log|looks_like|'
+             r'look_at|low_error_factor|mandel|map_type|marble|material_map|'
+             r'matrix|max|max_intersections|max_iteration|max_trace_level|'
+             r'max_value|metallic|min|minimum_reuse|mod|mortar|'
+             r'nearest_count|no|normal|normal_map|no_shadow|number_of_waves|'
+             r'octaves|off|offset|omega|omnimax|on|once|onion|open|'
+             r'orthographic|panoramic|pattern1|pattern2|pattern3|'
+             r'perspective|pgm|phase|phong|phong_size|pi|pigment|'
+             r'pigment_map|planar_mapping|png|point_at|pot|pow|ppm|'
+             r'precision|pwr|quadratic_spline|quaternion|quick_color|'
+             r'quick_colour|quilted|radial|radians|radiosity|radius|rainbow|'
+             r'ramp_wave|rand|range|reciprocal|recursion_limit|red|'
+             r'reflection|refraction|render|repeat|rgb|rgbf|rgbft|rgbt|'
+             r'right|ripples|rotate|roughness|samples|scale|scallop_wave|'
+             r'scattering|seed|shadowless|sin|sine_wave|sinh|sky|sky_sphere|'
+             r'slice|slope_map|smooth|specular|spherical_mapping|spiral|'
+             r'spiral1|spiral2|spotlight|spotted|sqr|sqrt|statistics|str|'
+             r'strcmp|strength|strlen|strlwr|strupr|sturm|substr|switch|sys|'
+             r't|tan|tanh|test_camera_1|test_camera_2|test_camera_3|'
+             r'test_camera_4|texture|texture_map|tga|thickness|threshold|'
+             r'tightness|tile2|tiles|track|transform|translate|transmit|'
+             r'triangle_wave|true|ttf|turbulence|turb_depth|type|'
+             r'ultra_wide_angle|up|use_color|use_colour|use_index|u_steps|'
+             r'val|variance|vaxis_rotate|vcross|vdot|version|vlength|'
+             r'vnormalize|volume_object|volume_rendered|vol_with_light|'
+             r'vrotate|v_steps|warning|warp|water_level|waves|while|width|'
+             r'wood|wrinkles|yes)\b', Keyword),
+            (r'bicubic_patch|blob|box|camera|cone|cubic|cylinder|difference|'
+             r'disc|height_field|intersection|julia_fractal|lathe|'
+             r'light_source|merge|mesh|object|plane|poly|polygon|prism|'
+             r'quadric|quartic|smooth_triangle|sor|sphere|superellipsoid|'
+             r'text|torus|triangle|union', Name.Builtin),
+            #TODO: <=, etc
+            (r'[\[\](){}<>;,]', Punctuation),
+            (r'[-+*/=]', Operator),
+            (r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo),
+            (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
+            (r'[0-9]+\.[0-9]*', Number.Float),
+            (r'\.[0-9]+', Number.Float),
+            (r'[0-9]+', Number.Integer),
+            (r'\s+', Text),
+        ]
+    }

pygments/lexers/text.py

           # weechat
           \d{4}\s\w{3}\s\d{2}\s        # Date
           \d{2}:\d{2}:\d{2}\s+         # Time + Whitespace
+        |
+          # xchat
+          \w{3}\s\d{2}\s               # Date
+          \d{2}:\d{2}:\d{2}\s+         # Time + Whitespace
         )?
     """
     tokens = {
                 # log start/end
             (r'^\*\*\*\*(.*)\*\*\*\*$', Comment),
             # hack
-            ("^" + timestamp + r'(\s*<.*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
+            ("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
             # normal msgs
             ("^" + timestamp + r"""
                 (\s*<.*?>\s*)          # Nick """,
             (r'"(\\\\|\\"|[^\n"])*"', String.Double),
             (r"'(\\\\|\\'|[^\n'])*'", String.Single),
             (r'-?\d+', Number),
+            (r'#[0-9a-f]{6}', Number.Hex),
             (r'^:', Punctuation),
             (r'[()<>+=!|,~-]', Punctuation), # Inexact list.  Looks decent.
             (r'\b(let|if|else|endif|elseif|fun|function|endfunction)\b',
              Keyword),
+            (r'\b(NONE|bold|italic|underline|dark|light)\b', Name.Builtin),
             (r'\b\w+\b', Name.Other), # These are postprocessed below
             (r'.', Text),
         ],

tests/examplefiles/hello.scala

+  object HelloWorld {
+    def main(args: Array[String]) {
+      println("Hello, world! " + args.toList)
+    }
+  }
+  HelloWorld.main(args)

tests/examplefiles/objc_example.m

+#import "Somefile.h"
+
+NSDictionary *dictionary = [NSDictionary dictionaryWithObjectsAndKeys:
+    @"quattuor", @"four", @"quinque", @"five", @"sex", @"six", nil];
+
+
+NSString *key;
+for (key in dictionary) {
+    NSLog(@"English: %@, Latin: %@", key, [dictionary valueForKey:key]);
+}
+

tests/examplefiles/objc_example2.m

+// MyClass.h
+@interface MyClass : NSObject
+{
+    NSString *value;
+    NSTextField *textField;
+@private
+    NSDate *lastModifiedDate;
+}
+@property(copy, readwrite) NSString *value;
+@property(retain) IBOutlet NSTextField *textField;
+@end
+
+// MyClass.m
+// Class extension to declare private property
+@interface MyClass ()
+@property(retain) NSDate *lastModifiedDate;
+@end
+
+@implementation MyClass
+@synthesize value;
+@synthesize textField;
+@synthesize lastModifiedDate;
+// implementation continues
+@end

tests/examplefiles/regex-delims.pl

+#! /usr/bin/env perl
+
+use strict;
+use warnings;
+
+# common delimiters
+print "a: ";
+my $a = "foo";
+print $a, " - ";
+$a =~ s/foo/bar/;
+print $a, "\n";
+
+print "b: ";
+my $b = "foo";
+print $b, " - ";
+$b =~ s!foo!bar!;
+print $b, "\n";
+
+print "c: ";
+my $c = "foo";
+print $c, " - ";
+$c =~ s@foo@bar@;
+print $c, "\n";
+
+print "d: ";
+my $d = "foo";
+print $d, " - ";
+$d =~ s\foo\bar\;
+print $d, "\n";
+
+print "\n";
+
+# balanced delimiters
+print "e: ";
+my $e = "foo";
+print $e, " - ";
+$e =~ s{foo}{bar};
+print $e, "\n";
+
+print "f: ";
+my $f = "foo";
+print $f, " - ";
+$f =~ s(foo)(bar);
+print $f, "\n";
+
+print "g: ";
+my $g = "foo";
+print $g, " - ";
+$g =~ s<foo><bar>;
+print $g, "\n";
+
+print "h: ";
+my $h = "foo";
+print $h, " - ";
+$h =~ s[foo][bar];
+print $h, "\n";
+
+print "\n";
+
+# balanced delimiters with whitespace
+print "i: ";
+my $i = "foo";
+print $i, " - ";
+$i =~ s{foo} {bar};
+print $i, "\n";
+
+print "j: ";
+my $j = "foo";
+print $j, " - ";
+$j =~ s<foo>		<bar>;
+print $j, "\n";
+
+print "k: ";
+my $k = "foo";
+print $k, " - ";
+$k =~
+	s(foo)
+
+	(bar);
+print $k, "\n";
+
+print "\n";
+
+# mixed delimiters
+print "l: ";
+my $l = "foo";
+print $l, " - ";
+$l =~ s{foo} <bar>;
+print $l, "\n";
+
+print "m: ";
+my $m = "foo";
+print $m, " - ";
+$m =~ s(foo) !bar!;
+print $m, "\n";
+
+print "n: ";
+my $n = "foo";
+print $n, " - ";
+$n =~ s[foo] $bar$;
+print $n, "\n";
+
+print "\n";
+
+# /x modifier
+print "o: ";
+my $o = "foo";
+print $o, " - ";
+$o =~ s{
+				foo
+			 } {bar}x;
+print $o, "\n";
+
+print "p: ";
+my $p = "foo";
+print $p, " - ";
+$p =~ s%
+  foo
+  %bar%x;
+print $p, "\n";

tests/examplefiles/sphere.pov

+#include "colors.inc"    
+
+background { color Cyan }
+
+camera {
+  location <0, 2, -3>
+  look_at <0, 1, 2>
+}
+
+sphere {
+  <0, 1, 2>, 2
+  texture {
+    pigment { color Yellow }
+  }
+}
+
+light_source { <2, 4, -3> color White}
+

tests/examplefiles/sqlite3.sqlite3-console

+SQLite version 3.4.2
+Enter ".help" for instructions
+sqlite> .schema
+CREATE TABLE paste (paste_id integer, code text, parsed_code text, pub_date
+varchar(24), language varchar(64), parent_id integer, url varchar(128));
+CREATE TABLE vars (key varchar(24), value varchar(128));
+sqlite> a '
+   ...> '
+   ...> ;
+SQL error: near "a": syntax error
+sqlite> %;
+SQL error: near "%": syntax error
+sqlite> select count(language), language from paste group by language order
+   ...> by count(language) desc;
+144|python
+76|text
+22|pycon
+9|ruby
+7|c
+7|js
+6|html+django
+4|html
+4|tex
+2|html+php
+1|cpp
+1|scheme
+sqlite> 

tests/examplefiles/while.pov

+#declare Index1 = 0;
+#while(Index1 <= 9)
+
+   #declare Index2 = 0;
+   #while(Index2 <= 19)
+
+      sphere { <Index1, Index2, 0>, .5 }
+
+      #declare Index2 = Index2 + 1;
+   #end
+
+   #declare Index1 = Index1 + 1;
+#end
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.