Commits

Anonymous committed 43b8efd Merge

Merge main development.

Comments (0)

Files changed (7)

pygments/lexer.py

                 try:
                     if text[pos] == '\n':
                         # at EOL, reset state to "root"
-                        pos += 1
                         statestack = ['root']
                         statetokens = tokendefs['root']
                         yield pos, Text, u'\n'
+                        pos += 1
                         continue
                     yield pos, Error, text[pos]
                     pos += 1

pygments/lexers/functional.py

     filenames = ['*.ex', '*.exs']
     mimetypes = ['text/x-elixir']
 
+    def gen_elixir_sigil_rules():
+        states = {}
+
+        states['strings'] = [
+            (r'(%[A-Ba-z])?"""(?:.|\n)*?"""', String.Doc),
+            (r"'''(?:.|\n)*?'''", String.Doc),
+            (r'"', String.Double, 'dqs'),
+            (r"'.*'", String.Single),
+            (r'(?<!\w)\?(\\(x\d{1,2}|\h{1,2}(?!\h)\b|0[0-7]{0,2}(?![0-7])\b|'
+             r'[^x0MC])|(\\[MC]-)+\w|[^\s\\])', String.Other)
+        ]
+
+        for lbrace, rbrace, name, in ('\\{', '\\}', 'cb'), \
+                                     ('\\[', '\\]', 'sb'), \
+                                     ('\\(', '\\)', 'pa'), \
+                                     ('\\<', '\\>', 'lt'):
+
+            states['strings'] += [
+                (r'%[a-z]' + lbrace, String.Double, name + 'intp'),
+                (r'%[A-Z]' + lbrace, String.Double, name + 'no-intp')
+            ]
+
+            states[name +'intp'] = [
+                (r'' + rbrace + '[a-z]*', String.Double, "#pop"),
+                include('enddoublestr')
+            ]
+
+            states[name +'no-intp'] = [
+                (r'.*' + rbrace + '[a-z]*', String.Double , "#pop")
+            ]
+
+        return states
+
     tokens = {
         'root': [
             (r'\s+', Text),
             (r'#.*$', Comment.Single),
-            (r'\b(case|end|bc|lc|if|unless|try|loop|receive|fn|defmodule|'
-             r'defp|def|defprotocol|defimpl|defrecord|defmacro|defdelegate|'
-             r'defexception|exit|raise|throw)\b(?![?!])|'
+            (r'\b(case|cond|end|bc|lc|if|unless|try|loop|receive|fn|defmodule|'
+             r'defp?|defprotocol|defimpl|defrecord|defmacrop?|defdelegate|'
+             r'defexception|exit|raise|throw|unless|after|rescue|catch|else)\b(?![?!])|'
              r'(?<!\.)\b(do|\-\>)\b\s*', Keyword),
             (r'\b(import|require|use|recur|quote|unquote|super|refer)\b(?![?!])',
                 Keyword.Namespace),
              r'\*\*?|=?~|<\-)|([a-zA-Z_]\w*([?!])?)(:)(?!:)', String.Symbol),
             (r':"', String.Symbol, 'interpoling_symbol'),
             (r'\b(nil|true|false)\b(?![?!])|\b[A-Z]\w*\b', Name.Constant),
-            (r'\b(__(FILE|LINE|MODULE|LOCAL|MAIN|FUNCTION)__)\b(?![?!])', Name.Builtin.Pseudo),
+            (r'\b(__(FILE|LINE|MODULE|MAIN|FUNCTION)__)\b(?![?!])', Name.Builtin.Pseudo),
             (r'[a-zA-Z_!][\w_]*[!\?]?', Name),
             (r'[(){};,/\|:\\\[\]]', Punctuation),
             (r'@[a-zA-Z_]\w*|&\d', Name.Variable),
             (r'\b(0[xX][0-9A-Fa-f]+|\d(_?\d)*(\.(?![^\d\s])'
              r'(_?\d)*)?([eE][-+]?\d(_?\d)*)?|0[bB][01]+)\b', Number),
+            (r'%r\/.*\/', String.Regex),
             include('strings'),
         ],
-        'strings': [
-            (r'"""(?:.|\n)*?"""', String.Doc),
-            (r"'''(?:.|\n)*?'''", String.Doc),
-            (r'"', String.Double, 'dqs'),
-            (r"'.*'", String.Single),
-            (r'(?<!\w)\?(\\(x\d{1,2}|\h{1,2}(?!\h)\b|0[0-7]{0,2}(?![0-7])\b|'
-             r'[^x0MC])|(\\[MC]-)+\w|[^\s\\])', String.Other)
-        ],
         'dqs': [
             (r'"', String.Double, "#pop"),
-            include('interpoling'),
-            (r'[^#"]+', String.Double),
+            include('enddoublestr')
         ],
         'interpoling': [
             (r'#{', String.Interpol, 'interpoling_string'),
             include('interpoling'),
             (r'[^#"]+', String.Symbol),
         ],
+        'enddoublestr' : [
+            include('interpoling'),
+            (r'[^#"]+', String.Double),
+        ]
     }
+    tokens.update(gen_elixir_sigil_rules())
 
 
 class ElixirConsoleLexer(Lexer):

pygments/lexers/math.py

             # keywords
             (r'(begin|while|for|in|return|break|continue|'
              r'macro|quote|let|if|elseif|else|try|catch|end|'
-             r'bitstype|ccall)\b', Keyword),
+             r'bitstype|ccall|do)\b', Keyword),
             (r'(local|global|const)\b', Keyword.Declaration),
             (r'(module|import|export)\b', Keyword.Reserved),
             (r'(Bool|Int|Int8|Int16|Int32|Int64|Uint|Uint8|Uint16|Uint32|Uint64'

pygments/lexers/templates.py

         ],
     }
 
+
 class ColdfusionMarkupLexer(RegexLexer):
     """
     Coldfusion markup only

pygments/lexers/web.py

     filenames = ['*.json']
     mimetypes = [ 'application/json', ]
 
+    # integer part of a number
+    int_part = r'-?(0|[1-9]\d*)'
+        
+    # fractional part of a number
+    frac_part = r'\.\d+'
+        
+    # exponential part of a number
+    exp_part = r'[eE](\+|-)?\d+'
+
+
     flags = re.DOTALL
     tokens = {
         'whitespace': [
         ],
 
         # represents a simple terminal value
-        'simplevalue':[
+        'simplevalue': [
             (r'(true|false|null)\b', Keyword.Constant),
-            (r'-?[0-9]+', Number.Integer),
+            (('%(int_part)s(%(frac_part)s%(exp_part)s|'
+              '%(exp_part)s|%(frac_part)s)') % vars(),
+             Number.Float),
+            (int_part, Number.Integer),
             (r'"(\\\\|\\"|[^"])*"', String.Double),
         ],
 

pygments/style.py

                 if len(col) == 6:
                     return col
                 elif len(col) == 3:
-                    return col[0]+'0'+col[1]+'0'+col[2]+'0'
+                    return col[0]*2 + col[1]*2 + col[2]*2
             elif text == '':
                 return ''
             assert False, "wrong color format %r" % text

tests/test_regexlexer.py

 
 from pygments.token import Text
 from pygments.lexer import RegexLexer
+from pygments.lexer import bygroups
 
 
 class TestLexer(RegexLexer):
         self.assertEqual(toks,
            [(0, Text.Root, 'a'), (1, Text.Rag, 'b'), (2, Text.Rag, 'c'),
             (3, Text.Beer, 'd'), (4, Text.Root, 'e')])
+
+    def test_multiline(self):
+        lx = TestLexer()
+        toks = list(lx.get_tokens_unprocessed('a\ne'))
+        self.assertEqual(toks,
+           [(0, Text.Root, 'a'), (1, Text, u'\n'),
+            (2, Text.Root, 'e')])