Commits

Anonymous committed 356fe02

Leysin sprint work (mwh around):

refactoring and fixes to make it tranlatable (not yet there though)

Comments (0)

Files changed (3)

pypy/interpreter/pycompiler.py

         from pypy.interpreter.astcompiler.ast import Node
         from pyparser.astbuilder import AstBuilder
         from pypy.interpreter.pycode import PyCode
+        from pypy.interpreter.function import Function
 
         flags |= stdlib___future__.generators.compiler_flag   # always on (2.2 compat)
         space = self.space
         try:
             builder = AstBuilder(self.parser, space=space)
             for rulename, buildfunc in self.additional_rules.iteritems():
-                builder.build_rules[rulename] = buildfunc
+                assert isinstance(buildfunc, Function)
+                builder.user_build_rules[rulename] = buildfunc
             self.parser.parse_source(source, mode, builder, flags)
             ast_tree = builder.rule_stack[-1]
             encoding = builder.source_encoding
 def install_compiler_hook(space, w_callable):
 #       if not space.get( w_callable ):
 #           raise OperationError( space.w_TypeError( space.wrap( "must have a callable" ) )
-        space.default_compiler.w_compile_hook = w_callable
+    space.default_compiler.w_compile_hook = w_callable
 
 def insert_grammar_rule(space, w_rule, w_buildfuncs):
     """inserts new grammar rules to the default compiler"""
+    from pypy.interpreter import function
     rule = space.str_w(w_rule)
-    buildfuncs_w = w_buildfuncs.content
+    #buildfuncs_w = w_buildfuncs.content
     buildfuncs = {}
-    for w_name, w_func in buildfuncs_w.iteritems():
-        buildfuncs[space.str_w(w_name)] = space.unwrap(w_func)
+    #for w_name, w_func in buildfuncs_w.iteritems():
+    #    buildfuncs[space.str_w(w_name)] = space.unwrap(w_func)
+    w_iter = space.iter(w_buildfuncs)
+    while 1:
+        try:
+            w_key = space.next(w_iter)
+            w_func = space.getitem(w_buildfuncs, w_key)
+            buildfuncs[space.str_w(w_key)] = space.interp_w(function.Function, w_func)
+        except OperationError, e:
+            if not e.match(space, space.w_StopIteration):
+                raise
+            break
     space.default_compiler.additional_rules = buildfuncs
     space.default_compiler.parser.insert_rule(rule)
-
+    
+# XXX cyclic import
+#from pypy.interpreter.baseobjspace import ObjSpace
+#insert_grammar_rule.unwrap_spec = [ObjSpace, str, dict]

pypy/interpreter/pyparser/astbuilder.py

 #import pypy.interpreter.pyparser.pytoken as tok
 from pypy.interpreter.pyparser.error import SyntaxError
 from pypy.interpreter.pyparser.parsestring import parsestr
-
+from pypy.interpreter.gateway import interp2app
 from asthelper import *
 
 ## building functions helpers
     if l == 1 or l==2:
         atom0 = atoms[0]
         if isinstance(atom0, TokenObject) and atom0.name == builder.parser.tokens['NEWLINE']:
-            atom0 = ast.Pass(atom0.lineno)
+            #atom0 = ast.Pass(atom0.lineno) # break test_astcompiler
+            atom0 = ast.Stmt([], atom0.lineno) # break test_astbuilder
         elif not isinstance(atom0, ast.Stmt):
             atom0 = ast.Stmt([atom0], atom0.lineno)
-        builder.push(ast.Module(builder.wrap_none(), atom0, atom0.lineno))
+        doc = get_docstring(builder, atom0) # XXX consider docstring here ?
+        builder.push(ast.Module(doc, atom0, atom0.lineno))
     else:
         assert False, "Forbidden path"
 
     'eval_input' : build_eval_input,
     'with_stmt' : build_with_stmt,
     }
-
+    
 # Build two almost identical ASTRULES dictionaries
 #ASTRULES      = dict([(sym[key], value) for (key, value) in
 #                      ASTRULES_Template.iteritems()])
         #self.rule_stack = list(rule_stack)
         self.d = len(rule_stack)
 
-from pypy.interpreter.baseobjspace import Wrappable
-class AstBuilder(Wrappable, BaseGrammarBuilder):
+class AstBuilder(BaseGrammarBuilder):
     """A builder that directly produce the AST"""
 
     def __init__(self, parser, debug=0, space=None):
         self.space = space
         self.source_encoding = None
         self.with_enabled = False
-        self.build_rules = dict(ASTRULES_Template)
-        
+        self.build_rules = ASTRULES_Template
+        self.user_build_rules = {}
 
     def enable_with(self):
         if self.with_enabled:
         if rule.is_root():
             rulename = self.parser.sym_name[rule.codename]
             # builder_func = ASTRULES.get(rule.codename, None)
-            builder_func = self.build_rules.get(rulename, None)
+            w_func = self.user_build_rules.get(rulename, None)
             # user defined (applevel) function
-            if isinstance(builder_func, Function):
+            if w_func:
                 w_items = self.space.newlist( [self.space.wrap( it ) for it in get_atoms(self, 1)] )
-                w_astnode = self.space.call_function(builder_func, w_items)
+                w_astnode = self.space.call_function(w_func, w_items)
                 astnode = self.space.interp_w(ast.Node, w_astnode, can_be_None=False)
                 self.push(astnode)
-            elif builder_func:
-                builder_func(self, 1)
             else:
-                self.push_rule(rule.codename, 1, source)
+                builder_func = self.build_rules.get(rulename, None)
+                if builder_func:
+                    builder_func(self, 1)
+                else:
+                    self.push_rule(rule.codename, 1, source)
         else:
             self.push_rule(rule.codename, 1, source)
         return True
         if rule.is_root():
             rulename = self.parser.sym_name[rule.codename]
             # builder_func = ASTRULES.get(rule.codename, None)
-            builder_func = self.build_rules.get(rulename, None)
-            if isinstance(builder_func, Function):
+            w_func = self.user_build_rules.get(rulename, None)
+            # user defined (applevel) function
+            if w_func:
                 w_items = self.space.newlist( [self.space.wrap( it ) for it in get_atoms(self, elts_number)] )
-                w_astnode = self.space.call_function(builder_func, w_items)
+                w_astnode = self.space.call_function(w_func, w_items)
                 astnode = self.space.interp_w(ast.Node, w_astnode, can_be_None=False)
                 self.push(astnode)
-            elif builder_func:
-                builder_func(self, elts_number)
             else:
-                self.push_rule(rule.codename, elts_number, source)
+                builder_func = self.build_rules.get(rulename, None)
+                if builder_func:
+                    builder_func(self, elts_number)
+                else:
+                    self.push_rule(rule.codename, elts_number, source)
         else:
             self.push_rule(rule.codename, elts_number, source)
         return True
     def is_string_const(self, expr):
         if not isinstance(expr,ast.Const):
             return False
+        print 'IS STRING CONST', repr(expr.value)
         space = self.space
         return space.is_true(space.isinstance(expr.value,space.w_str))
 

pypy/interpreter/pyparser/pythonutil.py

         _ver = version
     else:
         raise ValueError('no such grammar version: %s' % version)
-    return os.path.join( os.path.dirname(__file__), "data", "Grammar" + _ver ), _ver
+    # two osp.join to avoid TyperError: can only iterate over tuples of length 1 for now
+    # generated by call to osp.join(a, *args)
+    return os.path.join( os.path.dirname(__file__),
+                         os.path.join("data", "Grammar" + _ver) ), _ver
 
 
 def build_parser(gramfile, parser=None):
         parser = Parser()
     setup_tokens(parser)
     # XXX: clean up object dependencies
-    grammardef = file(gramfile).read()
+    from pypy.rlib.streamio import open_file_as_stream
+    stream = open_file_as_stream(gramfile)
+    grammardef = stream.readall()
+    stream.close()
     assert isinstance(grammardef, str)
     source = GrammarSource(GRAMMAR_GRAMMAR, grammardef)
     builder = EBNFBuilder(GRAMMAR_GRAMMAR, dest_parser=parser)
     parser = make_pyparser(version)
     builder = AstBuilder(parser, space=space)
     parser.parse_source(source, mode, builder)
-    return builder
+    return builder.rule_stack[-1]
     
 
 ## convenience functions around CPython's parser functions