Commits

Andriy Kornatskyy committed c2aef26

Fixed PEP8 warnings.

Comments (0)

Files changed (11)

 dist/
 env/
 src/*.egg-info/
+.cache
 .coverage
 *.pyc
 *.pyo
 clean:
 	find src/ -type d -name __pycache__ | xargs rm -rf
 	find src/ -name '*.py[co]' -delete
-	rm -rf dist/ build/ MANIFEST src/*.egg-info
+	rm -rf dist/ build/ MANIFEST src/*.egg-info .cache .coverage
 
 release:
 	$(PYTHON) setup.py -q bdist_egg
 upload-dir = doc/_build
 
 [pytest]
+pep8ignore =

src/wheezy/template/builder.py

     def add(self, lineno, code):
         if lineno < self.lineno:
             raise ValueError('Inconsistence at %s : %s' %
-                    (self.lineno, lineno))
+                             (self.lineno, lineno))
         if lineno == self.lineno:
             line = self.buf[-1]
             if line:
                     break
         else:
             raise ValueError('No rule to build "%s" token at line %d.' %
-                    (token, lineno))
+                             (token, lineno))
 
     def to_string(self):
         return '\n'.join(self.buf)
     def build_render(self, nodes):
         builder = BlockBuilder(self.rules, lineno=self.lineno)
         builder.add(self.lineno + 1,
-                'def render(ctx, local_defs, super_defs):')
+                    'def render(ctx, local_defs, super_defs):')
         builder.start_block()
         builder.build_token(self.lineno + 2, 'render', nodes)
         return builder.to_string()

src/wheezy/template/engine.py

 class Engine(object):
 
     def __init__(self, loader, extensions, template_class=None,
-            compiler_class=None):
+                 compiler_class=None):
         self.lock = allocate_lock()
         self.templates = {}
         self.renders = {}
         try:
             if name not in self.renders:
                 template_source = self.loader.load(name)
-                if template_source == None:
+                if template_source is None:
                     raise IOError('Template "%s" not found.' % name)
                 tokens = self.lexer.tokenize(template_source)
                 nodes = list(self.parser.parse(tokens))
                 #self.print_debug(name, tokens, nodes, source)
 
                 render_template = self.compiler.compile_source(
-                        source, name)['render']
+                    source, name)['render']
                 self.renders[name] = render_template
                 self.templates[name] = self.template_class(
-                        name, render_template)
+                    name, render_template)
         finally:
             self.lock.release()
 
         try:
             if name not in self.modules:
                 template_source = self.loader.load(name)
-                if template_source == None:
+                if template_source is None:
                     raise IOError('Import "%s" not found.' % name)
                 tokens = self.lexer.tokenize(template_source)
                 nodes = list(self.parser.parse(tokens))
                 #self.print_debug(name, tokens, nodes, source)
 
                 self.modules[name] = self.compiler.compile_module(
-                        source, name)
+                    source, name)
         finally:
             self.lock.release()
 

src/wheezy/template/ext/core.py

 all_tokens = end_tokens + compound_tokens + reserved_tokens
 out_tokens = ['markup', 'var', 'include']
 known_var_filters = {
-        's': PY3 and 'str' or 'unicode'
+    's': PY3 and 'str' or 'unicode'
 }
 
 # region: preprocessors
 def clean_source(source):
     """ Cleans leading whitespace before @. Ignores escaped (@@).
     """
-    return RE_CLEAN2.sub('\n@', RE_CLEAN1.sub('@',
-        source.replace('\r\n', '\n')))
+    return RE_CLEAN2.sub('\n@', RE_CLEAN1.sub(
+        '@', source.replace('\r\n', '\n')))
 
 
 # region: lexer extensions
             builder.build_token(lineno, token, value)
     lineno = builder.lineno
     builder.add(lineno + 1, 'return _r(' + extends +
-            ', ctx, local_defs, super_defs)')
+                ', ctx, local_defs, super_defs)')
     return True
 
 
     assert token == 'from '
     name, var, alias = value
     builder.add(lineno, alias + ' = ' + '_i(' + name
-            + ').local_defs[\'' + var + '\']')
+                + ').local_defs[\'' + var + '\']')
     return True
 
 
     for lineno, token, value in nodes:
         if token == 'include':
             builder.add(lineno, 'w(' + '_r(' + value +
-                ', ctx, local_defs, super_defs)' + ')')
+                        ', ctx, local_defs, super_defs)' + ')')
         elif token == 'var':
             var, var_filters = value
             if var_filters:
     """
 
     lexer_rules = {
-            100: (re.compile(r'@((%s).*?(?<!\\))(\n|$)'
-                    % '|'.join(all_tokens), re.S),
-                stmt_token),
-            200: (re.compile(r'@(\w+(\.\w+)*)'),
-                var_token),
-            999: (re.compile(r'.+?(?=(?<!@)@(?!@))|.+', re.S),
-                markup_token),
+        100: (re.compile(r'@((%s).*?(?<!\\))(\n|$)'
+                         % '|'.join(all_tokens), re.S),
+              stmt_token),
+        200: (re.compile(r'@(\w+(\.\w+)*)'),
+              var_token),
+        999: (re.compile(r'.+?(?=(?<!@)@(?!@))|.+', re.S),
+              markup_token),
     }
 
     preprocessors = [clean_source]
 
     parser_rules = {
-            'require': parse_require,
-            'extends': parse_extends,
-            'include': parse_include,
-            'import ': parse_import,
-            'from ': parse_from,
-            'var': parse_var,
-            'markup': parse_markup,
+        'require': parse_require,
+        'extends': parse_extends,
+        'include': parse_include,
+        'import ': parse_import,
+        'from ': parse_from,
+        'var': parse_var,
+        'markup': parse_markup,
     }
 
     parser_configs = [configure_parser]
 
     builder_rules = [
-            ('render', build_extends),
-            ('render', build_render),
-            ('module', build_module),
-            ('import ', build_import),
-            ('from ', build_from),
-            ('require', build_require),
-            ('out', build_out),
-            ('def ', build_def_empty),
-            ('def ', build_def),
-            ('if ', build_compound),
-            ('elif ', build_compound),
-            ('else:', build_compound),
-            ('for ', build_compound),
-            ('#', build_comment),
+        ('render', build_extends),
+        ('render', build_render),
+        ('module', build_module),
+        ('import ', build_import),
+        ('from ', build_from),
+        ('require', build_require),
+        ('out', build_out),
+        ('def ', build_def_empty),
+        ('def ', build_def),
+        ('if ', build_compound),
+        ('elif ', build_compound),
+        ('else:', build_compound),
+        ('for ', build_compound),
+        ('#', build_comment),
     ]

src/wheezy/template/ext/tests/test_core.py

         from wheezy.template.ext.core import CoreExtension
         from wheezy.template.loader import DictLoader
         self.engine = Engine(
-                loader=DictLoader({}),
-                extensions=[CoreExtension()])
+            loader=DictLoader({}),
+            extensions=[CoreExtension()])
 
     def tokenize(self, source):
         return self.engine.lexer.tokenize(source)
         from wheezy.template.ext.core import CoreExtension
         from wheezy.template.loader import DictLoader
         self.engine = Engine(
-                loader=DictLoader({}),
-                extensions=[CoreExtension()])
+            loader=DictLoader({}),
+            extensions=[CoreExtension()])
 
     def parse(self, source):
         return list(self.engine.parser.parse(
         """
         nodes = self.parse('@include("shared/scripts.html")\n')
         assert [(1, 'out', [
-                    (1, 'include', '"shared/scripts.html"')
-                ])] == nodes
+            (1, 'include', '"shared/scripts.html"')
+        ])] == nodes
 
     def test_markup(self):
         """ Test parse_markup.
  Welcome, @name!
 """)
         assert [(1, 'out', [
-                    (1, 'markup', "'\\n Welcome, '"),
-                    (2, 'var', ('name', None)),
-                    (2, 'markup', "'!\\n'")
-                ])] == nodes
+            (1, 'markup', "'\\n Welcome, '"),
+            (2, 'var', ('name', None)),
+            (2, 'markup', "'!\\n'")
+        ])] == nodes
 
     def test_var(self):
         """ Test parse_markup.
         """
         nodes = self.parse("""@name!h!""")
         assert [(1, 'out', [
-                    (1, 'var', ('name', ['h'])),
-                    (1, 'markup', "'!'")
-                ])] == nodes
+            (1, 'var', ('name', ['h'])),
+            (1, 'markup', "'!'")
+        ])] == nodes
         nodes = self.parse("""@name!s!h!""")
         assert [(1, 'out', [
-                    (1, 'var', ('name', ['s', 'h'])),
-                    (1, 'markup', "'!'")
-                ])] == nodes
+            (1, 'var', ('name', ['s', 'h'])),
+            (1, 'markup', "'!'")
+        ])] == nodes
 
 
 class BuilderTestCase(unittest.TestCase):
         from wheezy.template.ext.core import CoreExtension
         from wheezy.template.loader import DictLoader
         self.engine = Engine(
-                loader=DictLoader({}),
-                extensions=[CoreExtension()])
+            loader=DictLoader({}),
+            extensions=[CoreExtension()])
 
     def build_source(self, source):
         nodes = list(self.engine.parser.parse(
-                    self.engine.lexer.tokenize(source)))
+            self.engine.lexer.tokenize(source)))
         return self.engine.builder.build_source(nodes)
 
     def build_render(self, source):
         nodes = list(self.engine.parser.parse(
-                    self.engine.lexer.tokenize(source)))
+            self.engine.lexer.tokenize(source)))
         return self.engine.builder.build_render(nodes)
 
     def build_extends(self, name, source):
         nodes = list(self.engine.parser.parse(
-                    self.engine.lexer.tokenize(source)))
+            self.engine.lexer.tokenize(source)))
         return self.engine.builder.build_extends(name, nodes)
 
     def test_markup(self):
         """ Test build_out.
         """
         assert "w('Welcome, '); w(username); w('!')" == self.build_source(
-                'Welcome, @username!')
+            'Welcome, @username!')
         assert """\
 w('\\n<i>\\n')
 
         from wheezy.template.loader import DictLoader
         self.templates = {}
         self.engine = Engine(
-                loader=DictLoader(templates=self.templates),
-                extensions=[CoreExtension()])
+            loader=DictLoader(templates=self.templates),
+            extensions=[CoreExtension()])
 
     def render(self, ctx, source):
         self.templates['test.html'] = source
 
     def test_var(self):
         ctx = {
-                'username': 'John'
+            'username': 'John'
         }
         assert 'Welcome, John!' == self.render(ctx, """\
 @require(username)
 
     def test_for(self):
         ctx = {
-                'colors': ['red', 'yellow']
+            'colors': ['red', 'yellow']
         }
         assert 'red\nyellow\n' == self.render(ctx, """\
 @require(colors)
         from wheezy.template.loader import DictLoader
         self.templates = {}
         self.engine = Engine(
-                loader=DictLoader(templates=self.templates),
-                extensions=[CoreExtension()])
+            loader=DictLoader(templates=self.templates),
+            extensions=[CoreExtension()])
 
     def render(self, name, ctx):
         template = self.engine.get_template(name)
 
     def test_extends(self):
         self.templates.update({
-                'master.html': """\
+            'master.html': """\
 @def say_hi(name):
     Hello, @name!
 @end
 @say_hi('John')""",
-                'tmpl.html': """\
+
+            'tmpl.html': """\
 @extends('master.html')
 @def say_hi(name):
     Hi, @name!
 
     def test_super(self):
         self.templates.update({
-                'master.html': """\
+            'master.html': """\
 @def say_hi(name):
     Hello, @name!\
 @end
 @say_hi('John')""",
-                'tmpl.html': """\
+
+            'tmpl.html': """\
 @extends('master.html')
 @def say_hi(name):
     @super_defs['say_hi'](name)!!\
 
     def test_include(self):
         self.templates.update({
-                'footer.html': """\
+            'footer.html': """\
 @require(name)
 Thanks, @name""",
-                'tmpl.html': """\
+
+            'tmpl.html': """\
 Welcome to my site.
 @include('footer.html')
 """
 
     def test_import(self):
         self.templates.update({
-                'helpers.html': """\
+            'helpers.html': """\
 @def say_hi(name):
 Hi, @name\
 @end""",
-                'tmpl.html': """\
+
+            'tmpl.html': """\
 @import 'helpers.html' as helpers
 @helpers.say_hi('John')"""
         })
 
     def test_import_dynamic(self):
         self.templates.update({
-                'helpers.html': """\
+            'helpers.html': """\
 @def say_hi(name):
 Hi, @name\
 @end""",
-                'tmpl.html': """\
+
+            'tmpl.html': """\
 @require(helpers_impl)
 @import helpers_impl as helpers
 @helpers.say_hi('John')"""
 
     def test_from_import(self):
         self.templates.update({
-                'helpers.html': """\
+            'helpers.html': """\
 @def say_hi(name):
 Hi, @name\
 @end""",
-                'tmpl.html': """\
+
+            'tmpl.html': """\
 @from 'helpers.html' import say_hi
 @say_hi('John')"""
         })
 
     def test_from_import_dynamic(self):
         self.templates.update({
-                'helpers.html': """\
+            'helpers.html': """\
 @def say_hi(name):
 Hi, @name\
 @end""",
-                'tmpl.html': """\
+
+            'tmpl.html': """\
 @require(helpers_impl)
 @from helpers_impl import say_hi
 @say_hi('John')"""
 
     def test_from_import_as(self):
         self.templates.update({
-                'share/helpers.html': """\
+            'share/helpers.html': """\
 @def say_hi(name):
 Hi, @name\
 @end""",
-                'tmpl.html': """\
+
+            'tmpl.html': """\
 @from 'share/helpers.html' import say_hi as hi
 @hi('John')"""
         })

src/wheezy/template/parser.py

                     operands = []
                 if token in self.compound_tokens:
                     yield lineno, token, (
-                            value, list(self.parse_iter(tokens)))
+                        value, list(self.parse_iter(tokens)))
                 else:
                     if token in self.end_tokens:
                         break

src/wheezy/template/tests/test_lexer.py

         ])
         assert [(1, 'w', 'hello'),
                 (1, 'b', '\n '),
-                (2, 'w', 'world')
-            ] == lexer.tokenize('hello\n world')
+                (2, 'w', 'world')] == lexer.tokenize('hello\n world')
 
     def test_trivial(self):
         """ Empty rules and source

src/wheezy/template/tests/test_loader.py

     def test_list_names(self):
         """ Tests list_names.
         """
-        assert ['tmpl1.html',
-                'shared/master.html',
-                'shared/snippet/script.html'
+        assert [
+            'tmpl1.html',
+            'shared/master.html',
+            'shared/snippet/script.html'
         ] == self.loader.list_names()
 
     def test_load_existing(self):
     def setUp(self):
         from wheezy.template.loader import DictLoader
         self.loader = DictLoader(templates={
-                'tmpl1.html': 'x',
-                'shared/master.html': 'x'
+            'tmpl1.html': 'x',
+            'shared/master.html': 'x'
         })
 
     def test_list_names(self):
         """ Tests list_names.
         """
-        assert ['tmpl1.html',
-                'shared/master.html'
+        assert [
+            'tmpl1.html',
+            'shared/master.html'
         ] == self.loader.list_names()
 
     def test_load_existing(self):

src/wheezy/template/tests/test_parser.py

         from wheezy.template.parser import Parser
         self.parser = Parser({})
         self.tokens = [
-                (1, 'a', 11),
-                (2, 'b', 12),
-                (3, 'c', 13),
-                (4, 'b', 14),
-                (5, 'c', 15),
+            (1, 'a', 11),
+            (2, 'b', 12),
+            (3, 'c', 13),
+            (4, 'b', 14),
+            (5, 'c', 15),
         ]
 
     def test_end_continue(self):