Commits

Anonymous committed 80f6ba3

[svn] Add tests for filters.

Comments (0)

Files changed (4)

pygments/filters/__init__.py

         )
 
     This would highlight the names "foo", "bar" and "baz"
-    as functions. `Name.Function` is the token default.
+    as functions. `Name.Function` is the default token type.
     """
 
     def __init__(self, **options):
         Filter.__init__(self, **options)
         self.names = set(get_list_opt(options, 'names', []))
+        print "!!!!!!", self.names
         tokentype = options.get('tokentype')
         if tokentype:
             self.tokentype = string_to_tokentype(tokentype)

pygments/lexer.py

         Add a new stream filter to this lexer.
         """
         if not isinstance(filter_, Filter):
-            filter = get_filter_by_name(filter_, **options)
+            filter_ = get_filter_by_name(filter_, **options)
         self.filters.append(filter_)
 
     def analyse_text(text):

tests/test_basic_api.py

     :license: BSD, see LICENSE for more details.
 """
 
+import os
 import unittest
 import StringIO
 import random
 
-from pygments import lexers, formatters, format
+from pygments import lexers, formatters, filters, format
 from pygments.token import _TokenType, Text
 from pygments.lexer import RegexLexer
 
             a(isinstance(x, lexers.PythonLexer))
             ae(x.options["opt"], "val")
 
+    def test_filters(self):
+        for x in filters.FILTERS.keys():
+            lx = lexers.PythonLexer()
+            lx.add_filter(x)
+            text = file(os.path.join(testdir, testfile)).read().decode('utf-8')
+            tokens = list(lx.get_tokens(text))
+            roundtext = ''.join([t[1] for t in tokens])
+            self.assertEquals(roundtext, text,
+                              "lexer roundtrip with %s filter failed" % x) 
+
 
 class FormattersTest(unittest.TestCase):
 

tests/test_cmdline.py

 
     def test_L_opt(self):
         c, o, e = run_cmdline("-L")
-        self.assert_(c == 0)
-        self.assert_(o.find("Lexers") and o.find("Formatters"))
+        self.assertEquals(c, 0)
+        self.assert_("Lexers" in o and "Formatters" in o and
+                     "Filters" in o and "Styles" in o) 
+        c, o, e = run_cmdline("-L", "lexer")
+        self.assertEquals(c, 0)
+        self.assert_("Lexers" in o and not "Formatters" in o)
+        c, o, e = run_cmdline("-L", "lexers")
+        self.assertEquals(c, 0)
 
     def test_O_opt(self):
         filename = os.path.join(testdir, testfile)
         self.assert_("<html" in o)
         self.assert_('class="linenos"' in o)
 
+    def test_F_opt(self):
+        filename = os.path.join(testdir, testfile)
+        c, o, e = run_cmdline("-Fhighlight:tokentype=Name.Blubb,names=testfile testdir",
+                              "-fhtml", filename)
+        self.assertEquals(c, 0)
+        self.assert_('<span class="n-Blubb' in o)
+
     def test_invalid_opts(self):
         for opts in [("-L", "-lpy"), ("-L", "-fhtml"), ("-L", "-Ox"),
                      ("-a",), ("-Sst", "-lpy")]: