Commits

Andriy Kornatskyy  committed ded2d0a

Improved test cases to cover preprocessors, postprocessors and extension lexer scan.

  • Participants
  • Parent commits 7db2ef1

Comments (0)

Files changed (1)

File src/wheezy/template/tests/test_lexer.py

         """
         import re
         from wheezy.template.lexer import Lexer
+        from wheezy.template.lexer import lexer_scan
 
         def word_token(m):
             return m.end(), 'w', m.group()
         def blank_token(m):
             return m.end(), 'b', m.group()
 
-        lexer = Lexer([
-            (re.compile(r'\w+'), word_token),
-            (re.compile(r'\s+'), blank_token),
-        ])
-        assert [(1, 'w', 'hello'),
-                (1, 'b', '\n '),
-                (2, 'w', 'world')] == lexer.tokenize('hello\n world')
+        def to_upper(s):
+            return s.upper()
+
+        def cleanup(tokens):
+            for i in range(len(tokens)):
+                t = tokens[i]
+                if t[i] == 'b':
+                    tokens[i] = (t[0], 'b', ' ')
+
+        class Extension(object):
+            lexer_rules = {
+                100: (re.compile(r'\w+'), word_token),
+                200: (re.compile(r'\s+'), blank_token)
+            }
+            preprocessors = [to_upper]
+            postprocessors = [cleanup]
+
+        lexer = Lexer(**lexer_scan([Extension]))
+        assert [(1, 'w', 'HELLO'),
+                (1, 'b', ' '),
+                (2, 'w', 'WORLD')] == lexer.tokenize('hello\n world')
 
     def test_trivial(self):
         """ Empty rules and source