Commits

Alex Gaynor committed 691d6ce Merge

merged default in

Comments (0)

Files changed (33)

lib_pypy/cffi/__init__.py

 from .api import FFI, CDefError, FFIError
 from .ffiplatform import VerificationError, VerificationMissing
 
-__version__ = "0.6"
-__version_info__ = (0, 6)
+__version__ = "0.7"
+__version_info__ = (0, 7)

lib_pypy/cffi/api.py

             if name.startswith('RTLD_'):
                 setattr(self, name, getattr(backend, name))
         #
-        BVoidP = self._get_cached_btype(model.voidp_type)
+        self.BVoidP = self._get_cached_btype(model.voidp_type)
         if isinstance(backend, types.ModuleType):
             # _cffi_backend: attach these constants to the class
             if not hasattr(FFI, 'NULL'):
-                FFI.NULL = self.cast(BVoidP, 0)
+                FFI.NULL = self.cast(self.BVoidP, 0)
                 FFI.CData, FFI.CType = backend._get_types()
         else:
             # ctypes backend: attach these constants to the instance
-            self.NULL = self.cast(BVoidP, 0)
+            self.NULL = self.cast(self.BVoidP, 0)
             self.CData, self.CType = backend._get_types()
 
     def cdef(self, csource, override=False):
         self._cdefsources.extend(ffi_to_include._cdefsources)
         self._cdefsources.append(']')
 
+    def new_handle(self, x):
+        return self._backend.newp_handle(self.BVoidP, x)
+
+    def from_handle(self, x):
+        return self._backend.from_handle(x)
+
 
 def _make_ffi_library(ffi, libname, flags):
     import os
             BType = ffi._get_cached_btype(tp)
             try:
                 value = backendlib.load_function(BType, name)
-            except KeyError:
-                raise AttributeError(name)
+            except KeyError as e:
+                raise AttributeError('%s: %s' % (name, e))
             library.__dict__[name] = value
             return
         #

lib_pypy/cffi/backend_ctypes.py

 class CTypesData(object):
     __metaclass__ = CTypesType
     __slots__ = ['__weakref__']
+    __name__ = '<cdata>'
 
     def __init__(self, *args):
         raise TypeError("cannot instantiate %r" % (self.__class__,))
         elif BItem in (getbtype(model.PrimitiveType('signed char')),
                        getbtype(model.PrimitiveType('unsigned char'))):
             kind = 'bytep'
+        elif BItem is getbtype(model.void_type):
+            kind = 'voidp'
         else:
             kind = 'generic'
         #
             def __setitem__(self, index, value):
                 self._as_ctype_ptr[index] = BItem._to_ctypes(value)
 
-            if kind == 'charp':
+            if kind == 'charp' or kind == 'voidp':
                 @classmethod
-                def _arg_to_ctypes(cls, value):
-                    if isinstance(value, bytes):
-                        return ctypes.c_char_p(value)
+                def _arg_to_ctypes(cls, *value):
+                    if value and isinstance(value[0], bytes):
+                        return ctypes.c_char_p(value[0])
                     else:
-                        return super(CTypesPtr, cls)._arg_to_ctypes(value)
+                        return super(CTypesPtr, cls)._arg_to_ctypes(*value)
 
             if kind == 'charp' or kind == 'bytep':
                 def _to_string(self, maxlen):

lib_pypy/cffi/vengine_cpy.py

     def patch_extension_kwds(self, kwds):
         pass
 
+    def find_module(self, module_name, path, so_suffix):
+        try:
+            f, filename, descr = imp.find_module(module_name, path)
+        except ImportError:
+            return None
+        if f is not None:
+            f.close()
+        # Note that after a setuptools installation, there are both .py
+        # and .so files with the same basename.  The code here relies on
+        # imp.find_module() locating the .so in priority.
+        if descr[0] != so_suffix:
+            return None
+        return filename
+
     def collect_types(self):
         self._typesdict = {}
         self._generate("collecttype")
         prnt('static void %s(%s *p)' % (checkfuncname, cname))
         prnt('{')
         prnt('  /* only to generate compile-time warnings or errors */')
-        for fname, ftype, _ in tp.enumfields():
+        for fname, ftype, fbitsize in tp.enumfields():
             if (isinstance(ftype, model.PrimitiveType)
-                and ftype.is_integer_type()):
+                and ftype.is_integer_type()) or fbitsize >= 0:
                 # accept all integers, but complain on float or double
                 prnt('  (void)((p->%s) << 1);' % fname)
             else:

lib_pypy/cffi/vengine_gen.py

-import sys
+import sys, os
 import types
 
 from . import model, ffiplatform
         # up in kwds['export_symbols'].
         kwds.setdefault('export_symbols', self.export_symbols)
 
+    def find_module(self, module_name, path, so_suffix):
+        basename = module_name + so_suffix
+        if path is None:
+            path = sys.path
+        for dirname in path:
+            filename = os.path.join(dirname, basename)
+            if os.path.isfile(filename):
+                return filename
+        return None
+
     def collect_types(self):
         pass      # not needed in the generic engine
 
         prnt('static void %s(%s *p)' % (checkfuncname, cname))
         prnt('{')
         prnt('  /* only to generate compile-time warnings or errors */')
-        for fname, ftype, _ in tp.enumfields():
+        for fname, ftype, fbitsize in tp.enumfields():
             if (isinstance(ftype, model.PrimitiveType)
-                and ftype.is_integer_type()):
+                and ftype.is_integer_type()) or fbitsize >= 0:
                 # accept all integers, but complain on float or double
                 prnt('  (void)((p->%s) << 1);' % fname)
             else:

lib_pypy/cffi/verifier.py

                 path = pkg.__path__
             else:
                 path = None
-            try:
-                f, filename, descr = imp.find_module(self.get_module_name(),
-                                                     path)
-            except ImportError:
+            filename = self._vengine.find_module(self.get_module_name(), path,
+                                                 _get_so_suffix())
+            if filename is None:
                 return
-            if f is not None:
-                f.close()
-            if filename.lower().endswith('.py'):
-                # on PyPy, if there are both .py and .pypy-19.so files in
-                # the same directory, the .py file is returned.  That's the
-                # case after a setuptools installation.  We never want to
-                # load the .py file here...
-                filename = filename[:-3] + _get_so_suffix()
-                if not os.path.isfile(filename):
-                    return
             self.modulefilename = filename
         self._vengine.collect_types()
         self._has_module = True

pypy/config/pypyoption.py

     del working_modules["termios"]
     del working_modules["_minimal_curses"]
 
-    del working_modules["cppyy"]  # not tested on win32
+    if "cppyy" in working_modules:
+        del working_modules["cppyy"]  # not tested on win32
 
     # The _locale module is needed by site.py on Windows
     default_modules["_locale"] = None
     del working_modules["_minimal_curses"]
     del working_modules["termios"]
     del working_modules["_multiprocessing"]   # depends on rctime
-    del working_modules["cppyy"]  # depends on ctypes
+    if "cppyy" in working_modules:
+        del working_modules["cppyy"]  # depends on ctypes
 
 
 module_dependencies = {

pypy/doc/cppyy.rst

     $ genreflex MyClass.h
     $ g++ -fPIC -rdynamic -O2 -shared -I$REFLEXHOME/include MyClass_rflx.cpp -o libMyClassDict.so -L$REFLEXHOME/lib -lReflex
 
+Next, make sure that the library can be found through the dynamic lookup path
+(the ``LD_LIBRARY_PATH`` environment variable on Linux, ``PATH`` on Windows),
+for example by adding ".".
 Now you're ready to use the bindings.
 Since the bindings are designed to look pythonistic, it should be
 straightforward::

pypy/goal/targetpypystandalone.py

 
     @entrypoint('main', [], c_name='pypy_init_threads')
     def pypy_init_threads():
-        if space.config.objspace.usemodules.thread:
-            os_thread.setup_threads(space)
-            rffi.aroundstate.before()
+        if not space.config.objspace.usemodules.thread:
+            return
+        os_thread.setup_threads(space)
+        rffi.aroundstate.before()
 
     @entrypoint('main', [], c_name='pypy_thread_attach')
     def pypy_thread_attach():
-        if space.config.objspace.usemodules.thread:
-            rthread.gc_thread_start()
+        if not space.config.objspace.usemodules.thread:
+            return
+        os_thread.setup_threads(space)
+        os_thread.bootstrapper.acquire(space, None, None)
+        rthread.gc_thread_start()
+        os_thread.bootstrapper.nbthreads += 1
+        os_thread.bootstrapper.release()
+        rffi.aroundstate.before()
 
     w_globals = space.newdict()
     space.setitem(w_globals, space.wrap('__builtins__'),

pypy/interpreter/pyparser/future.py

-"""
-This automaton is designed to be invoked on a Python source string
-before the real parser starts working, in order to find all legal
-'from __future__ import blah'. As soon as something is encountered that
-would prevent more future imports, the analysis is aborted.
-The resulting legal futures are avaliable in self.flags after the
-pass has ended.
-
-Invocation is through get_futures(src), which returns a field of flags, one per
-found correct future import.
-
-The flags can then be used to set up the parser.
-All error detection is left to the parser.
-
-The reason we are not using the regular lexer/parser toolchain is that
-we do not want the overhead of generating tokens for entire files just
-to find information that resides in the first few lines of the file.
-Neither do we require sane error messages, as this job is handled by
-the parser.
-
-To make the parsing fast, especially when the module is translated to C,
-the code has been written in a very serial fashion, using an almost
-assembler like style. A further speedup could be achieved by replacing
-the "in" comparisons with explicit numeric comparisons.
-"""
-
-from pypy.interpreter.astcompiler.consts import CO_GENERATOR_ALLOWED, \
-    CO_FUTURE_DIVISION, CO_FUTURE_WITH_STATEMENT, CO_FUTURE_ABSOLUTE_IMPORT
-
-def get_futures(future_flags, source):
-    futures = FutureAutomaton(future_flags, source)
-    try:
-        futures.start()
-    except DoneException, e:
-        pass
-    return futures.flags, (futures.lineno, futures.col_offset)
-
-class DoneException(Exception):
-    pass
-
-whitespace = ' \t\f'
-whitespace_or_newline = whitespace + '\n\r'
-letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYabcdefghijklmnopqrstuvwxyz_'
-alphanumerics = letters + '1234567890'
-
-class FutureAutomaton(object):
-    """
-    A future statement must appear near the top of the module.
-    The only lines that can appear before a future statement are:
-
-        * the module docstring (if any),
-        * comments,
-        * blank lines, and
-        * other future statements.
-
-    The features recognized by Python 2.5 are "generators",
-    "division", "nested_scopes" and "with_statement", "absolute_import".
-    "generators", "division" and "nested_scopes" are redundant
-    in 2.5 because they are always enabled.
-
-    This module parses the input until it encounters something that is
-    not recognized as a valid future statement or something that may
-    precede a future statement.
-    """
-
-    def __init__(self, future_flags, string):
-        self.future_flags = future_flags
-        self.s = string
-        self.pos = 0
-        self.current_lineno = 1
-        self.lineno = -1
-        self.line_start_pos = 0
-        self.col_offset = 0
-        self.docstring_consumed = False
-        self.flags = 0
-        self.got_features = 0
-
-    def getc(self, offset=0):
-        try:
-            return self.s[self.pos + offset]
-        except IndexError:
-            raise DoneException
-
-    def start(self):
-        c = self.getc()
-        if c in ("'", '"', "r", "u") and not self.docstring_consumed:
-            self.consume_docstring()
-        elif c == '\\' or c in whitespace_or_newline:
-            self.consume_empty_line()
-        elif c == '#':
-            self.consume_comment()
-        elif c == 'f':
-            self.consume_from()
-        else:
-            return
-
-    def atbol(self):
-        self.current_lineno += 1
-        self.line_start_pos = self.pos
-
-    def consume_docstring(self):
-        self.docstring_consumed = True
-        if self.getc() == "r":
-            self.pos += 1
-        if self.getc() == "u":
-            self.pos += 1
-        endchar = self.getc()
-        if (self.getc() == self.getc(+1) and
-            self.getc() == self.getc(+2)):
-            self.pos += 3
-            while 1: # Deal with a triple quoted docstring
-                c = self.getc()
-                if c == '\\':
-                    self.pos += 1
-                    self._skip_next_char_from_docstring()
-                elif c != endchar:
-                    self._skip_next_char_from_docstring()
-                else:
-                    self.pos += 1
-                    if (self.getc() == endchar and
-                        self.getc(+1) == endchar):
-                        self.pos += 2
-                        self.consume_empty_line()
-                        break
-
-        else: # Deal with a single quoted docstring
-            self.pos += 1
-            while 1:
-                c = self.getc()
-                self.pos += 1
-                if c == endchar:
-                    self.consume_empty_line()
-                    return
-                elif c == '\\':
-                    self._skip_next_char_from_docstring()
-                elif c in '\r\n':
-                    # Syntax error
-                    return
-
-    def _skip_next_char_from_docstring(self):
-        c = self.getc()
-        self.pos += 1
-        if c == '\n':
-            self.atbol()
-        elif c == '\r':
-            if self.getc() == '\n':
-                self.pos += 1
-            self.atbol()
-
-    def consume_continuation(self):
-        c = self.getc()
-        if c in '\n\r':
-            self.pos += 1
-            self.atbol()
-
-    def consume_empty_line(self):
-        """
-        Called when the remainder of the line can only contain whitespace
-        and comments.
-        """
-        while self.getc() in whitespace:
-            self.pos += 1
-        if self.getc() == '#':
-            self.consume_comment()
-        elif self.getc() == ';':
-            self.pos += 1
-            self.consume_whitespace()
-            self.start()
-        elif self.getc() in '\\':
-            self.pos += 1
-            self.consume_continuation()
-            self.start()
-        elif self.getc() in '\r\n':
-            c = self.getc()
-            self.pos += 1
-            if c == '\r':
-                if self.getc() == '\n':
-                    self.pos += 1
-                self.atbol()
-            else:
-                self.atbol()
-            self.start()
-
-    def consume_comment(self):
-        self.pos += 1
-        while self.getc() not in '\r\n':
-            self.pos += 1
-        self.consume_empty_line()
-
-    def consume_from(self):
-        col_offset = self.pos - self.line_start_pos
-        line = self.current_lineno
-        self.pos += 1
-        if self.getc() == 'r' and self.getc(+1) == 'o' and self.getc(+2) == 'm':
-            self.docstring_consumed = True
-            self.pos += 3
-            self.consume_mandatory_whitespace()
-            if self.s[self.pos:self.pos+10] != '__future__':
-                raise DoneException
-            self.pos += 10
-            self.consume_mandatory_whitespace()
-            if self.s[self.pos:self.pos+6] != 'import':
-                raise DoneException
-            self.pos += 6
-            self.consume_whitespace()
-            old_got = self.got_features
-            try:
-                if self.getc() == '(':
-                    self.pos += 1
-                    self.consume_whitespace()
-                    self.set_flag(self.get_name())
-                    # Set flag corresponding to name
-                    self.get_more(paren_list=True)
-                else:
-                    self.set_flag(self.get_name())
-                    self.get_more()
-            finally:
-                if self.got_features > old_got:
-                    self.col_offset = col_offset
-                    self.lineno = line
-            self.consume_empty_line()
-
-    def consume_mandatory_whitespace(self):
-        if self.getc() not in whitespace + '\\':
-            raise DoneException
-        self.consume_whitespace()
-
-    def consume_whitespace(self, newline_ok=False):
-        while 1:
-            c = self.getc()
-            if c in whitespace:
-                self.pos += 1
-                continue
-            elif c == '\\' or newline_ok:
-                slash = c == '\\'
-                if slash:
-                    self.pos += 1
-                c = self.getc()
-                if c == '\n':
-                    self.pos += 1
-                    self.atbol()
-                    continue
-                elif c == '\r':
-                    self.pos += 1
-                    if self.getc() == '\n':
-                        self.pos += 1
-                        self.atbol()
-                elif slash:
-                    raise DoneException
-                else:
-                    return
-            else:
-                return
-
-    def get_name(self):
-        if self.getc() not in letters:
-            raise DoneException
-        p = self.pos
-        try:
-            while self.getc() in alphanumerics:
-                self.pos += 1
-        except DoneException:
-            # If there's any name at all, we want to call self.set_flag().
-            # Something else while get the DoneException again.
-            if self.pos == p:
-                raise
-            end = self.pos
-        else:
-            end = self.pos
-            self.consume_whitespace()
-        return self.s[p:end]
-
-    def get_more(self, paren_list=False):
-        if paren_list and self.getc() == ')':
-            self.pos += 1
-            return
-        if (self.getc() == 'a' and
-            self.getc(+1) == 's' and
-            self.getc(+2) in whitespace):
-            self.get_name()
-            self.get_name()
-            self.get_more(paren_list=paren_list)
-            return
-        elif self.getc() != ',':
-            return
-        else:
-            self.pos += 1
-            self.consume_whitespace(paren_list)
-            if paren_list and self.getc() == ')':
-                self.pos += 1
-                return # Handles trailing comma inside parenthesis
-            self.set_flag(self.get_name())
-            self.get_more(paren_list=paren_list)
-
-    def set_flag(self, feature):
-        self.got_features += 1
-        try:
-            self.flags |= self.future_flags.compiler_features[feature]
-        except KeyError:
-            pass
-
-from codeop import PyCF_DONT_IMPLY_DEDENT
-from pypy.interpreter.error import OperationError
-
 from pypy.tool import stdlib___future__ as future
 
 class FutureFlags(object):
                 flag_names.append(name)
         return flag_names
 
+    def get_compiler_feature(self, name):
+        return self.compiler_features.get(name, 0)
+
 futureFlags_2_4 = FutureFlags((2, 4, 4, 'final', 0))
 futureFlags_2_5 = FutureFlags((2, 5, 0, 'final', 0))
 futureFlags_2_7 = FutureFlags((2, 7, 0, 'final', 0))
+
+
+class TokenIterator:
+    def __init__(self, tokens):
+        self.tokens = tokens
+        self.index = 0
+        self.next()
+
+    def next(self):
+        index = self.index
+        self.index = index + 1
+        self.tok = self.tokens[index]
+
+    def skip(self, n):
+        if self.tok[0] == n:
+            self.next()
+            return True
+        else:
+            return False
+
+    def skip_name(self, name):
+        from pypy.interpreter.pyparser import pygram
+        if self.tok[0] == pygram.tokens.NAME and self.tok[1] == name:
+            self.next()
+            return True
+        else:
+            return False
+
+    def next_feature_name(self):
+        from pypy.interpreter.pyparser import pygram
+        if self.tok[0] == pygram.tokens.NAME:
+            name = self.tok[1]
+            self.next()
+            if self.skip_name("as"):
+                self.skip(pygram.tokens.NAME)
+            return name
+        else:
+            return ''
+
+    def skip_newlines(self):
+        from pypy.interpreter.pyparser import pygram
+        while self.skip(pygram.tokens.NEWLINE):
+            pass
+
+
+def add_future_flags(future_flags, tokens):
+    from pypy.interpreter.pyparser import pygram
+    it = TokenIterator(tokens)
+    result = 0
+    #
+    # The only things that can precede a future statement are another
+    # future statement and a doc string (only one).  This is a very
+    # permissive parsing of the given list of tokens; it relies on
+    # the real parsing done afterwards to give errors.
+    it.skip_newlines()
+    it.skip_name("r") or it.skip_name("u") or it.skip_name("ru")
+    if it.skip(pygram.tokens.STRING):
+        it.skip_newlines()
+
+    while (it.skip_name("from") and
+           it.skip_name("__future__") and
+           it.skip_name("import")):
+        it.skip(pygram.tokens.LPAR)    # optionally
+        result |= future_flags.get_compiler_feature(it.next_feature_name())
+        while it.skip(pygram.tokens.COMMA):
+            result |= future_flags.get_compiler_feature(it.next_feature_name())
+        it.skip(pygram.tokens.RPAR)    # optionally
+        it.skip(pygram.tokens.SEMI)    # optionally
+        it.skip_newlines()
+
+    position = (it.tok[2], it.tok[3])
+    return result, position

pypy/interpreter/pyparser/pyparse.py

                         raise error.SyntaxError(space.str_w(w_message))
                     raise
 
-        f_flags, future_info = future.get_futures(self.future_flags, textsrc)
-        compile_info.last_future_import = future_info
-        compile_info.flags |= f_flags
-
         flags = compile_info.flags
 
-        if flags & consts.CO_FUTURE_PRINT_FUNCTION:
-            self.grammar = pygram.python_grammar_no_print
-        else:
-            self.grammar = pygram.python_grammar
-
         # The tokenizer is very picky about how it wants its input.
         source_lines = textsrc.splitlines(True)
         if source_lines and not source_lines[-1].endswith("\n"):
         tp = 0
         try:
             try:
+                # Note: we no longer pass the CO_FUTURE_* to the tokenizer,
+                # which is expected to work independently of them.  It's
+                # certainly the case for all futures in Python <= 2.7.
                 tokens = pytokenizer.generate_tokens(source_lines, flags)
+
+                newflags, last_future_import = (
+                    future.add_future_flags(self.future_flags, tokens))
+                compile_info.last_future_import = last_future_import
+                compile_info.flags |= newflags
+
+                if compile_info.flags & consts.CO_FUTURE_PRINT_FUNCTION:
+                    self.grammar = pygram.python_grammar_no_print
+                else:
+                    self.grammar = pygram.python_grammar
+
                 for tp, value, lineno, column, line in tokens:
                     if self.add_token(tp, value, lineno, column, line):
                         break

pypy/interpreter/pyparser/test/test_future.py

+import py
+from pypy.interpreter.pyparser import future, pytokenizer
+from pypy.tool import stdlib___future__ as fut
+
+def run(s, expected_last_future=None):
+    source_lines = s.splitlines(True)
+    tokens = pytokenizer.generate_tokens(source_lines, 0)
+    expected_last_future = expected_last_future or tokens[-1][2:4]
+    #
+    flags, last_future_import = future.add_future_flags(
+        future.futureFlags_2_7, tokens)
+    assert last_future_import == expected_last_future
+    return flags
+
+def test_docstring():
+    s = '"Docstring\\" "\nfrom  __future__ import division\n'
+    f = run(s)
+    assert f == fut.CO_FUTURE_DIVISION
+
+def test_comment():
+    s = '# A comment about nothing ;\n'
+    f = run(s)
+    assert f == 0
+
+def test_tripledocstring():
+    s = '''""" This is a
+docstring with line
+breaks in it. It even has a \n"""
+'''
+    f = run(s)
+    assert f == 0
+
+def test_escapedquote_in_tripledocstring():
+    s = '''""" This is a
+docstring with line
+breaks in it. \\"""It even has an escaped quote!"""
+'''
+    f = run(s)
+    assert f == 0
+
+def test_empty_line():
+    s = ' \t   \f \n   \n'
+    f = run(s)
+    assert f == 0
+
+def test_from():
+    s = 'from  __future__ import division\n'
+    f = run(s)
+    assert f == fut.CO_FUTURE_DIVISION
+
+def test_froms():
+    s = 'from  __future__ import division, generators, with_statement\n'
+    f = run(s)
+    assert f == (fut.CO_FUTURE_DIVISION |
+                 fut.CO_GENERATOR_ALLOWED |
+                 fut.CO_FUTURE_WITH_STATEMENT)
+
+def test_from_as():
+    s = 'from  __future__ import division as b\n'
+    f = run(s)
+    assert f == fut.CO_FUTURE_DIVISION
+    
+def test_froms_as():
+    s = 'from  __future__ import division as b, generators as c\n'
+    f = run(s)
+    assert f == (fut.CO_FUTURE_DIVISION |
+                 fut.CO_GENERATOR_ALLOWED)
+
+def test_from_paren():
+    s = 'from  __future__ import (division)\n'
+    f = run(s)
+    assert f == fut.CO_FUTURE_DIVISION
+
+def test_froms_paren():
+    s = 'from  __future__ import (division, generators)\n'
+    f = run(s)
+    assert f == (fut.CO_FUTURE_DIVISION |
+                 fut.CO_GENERATOR_ALLOWED)
+
+def test_froms_paren_as():
+    s = 'from  __future__ import (division as b, generators,)\n'
+    f = run(s)
+    assert f == (fut.CO_FUTURE_DIVISION |
+                 fut.CO_GENERATOR_ALLOWED)
+
+def test_paren_with_newline():
+    s = 'from __future__ import (division,\nabsolute_import)\n'
+    f = run(s)
+    assert f == (fut.CO_FUTURE_DIVISION | fut.CO_FUTURE_ABSOLUTE_IMPORT)
+
+def test_paren_with_newline_2():
+    s = 'from __future__ import (\ndivision,\nabsolute_import)\n'
+    f = run(s)
+    assert f == (fut.CO_FUTURE_DIVISION | fut.CO_FUTURE_ABSOLUTE_IMPORT)
+
+def test_multiline():
+    s = '"abc" #def\n  #ghi\nfrom  __future__ import (division as b, generators,)\nfrom __future__ import with_statement\n'
+    f = run(s)
+    assert f == (fut.CO_FUTURE_DIVISION |
+                 fut.CO_GENERATOR_ALLOWED |
+                 fut.CO_FUTURE_WITH_STATEMENT)
+
+def test_windows_style_lineendings():
+    s = '"abc" #def\r\n  #ghi\r\nfrom  __future__ import (division as b, generators,)\r\nfrom __future__ import with_statement\r\n'
+    f = run(s)
+    assert f == (fut.CO_FUTURE_DIVISION |
+                 fut.CO_GENERATOR_ALLOWED |
+                 fut.CO_FUTURE_WITH_STATEMENT)
+
+def test_mac_style_lineendings():
+    s = '"abc" #def\r  #ghi\rfrom  __future__ import (division as b, generators,)\rfrom __future__ import with_statement\r'
+    f = run(s)
+    assert f == (fut.CO_FUTURE_DIVISION |
+                 fut.CO_GENERATOR_ALLOWED |
+                 fut.CO_FUTURE_WITH_STATEMENT)
+
+def test_semicolon():
+    s = '"abc" #def\n  #ghi\nfrom  __future__ import (division as b, generators,);  from __future__ import with_statement\n'
+    f = run(s)
+    assert f == (fut.CO_FUTURE_DIVISION |
+                 fut.CO_GENERATOR_ALLOWED |
+                 fut.CO_FUTURE_WITH_STATEMENT)
+
+def test_semicolon_2():
+    s = 'from  __future__ import division; from foo import bar'
+    f = run(s, expected_last_future=(1, 39))
+    assert f == fut.CO_FUTURE_DIVISION
+
+def test_full_chain():
+    s = '"abc" #def\n  #ghi\nfrom  __future__ import (division as b, generators,);  from __future__ import with_statement\n'
+    f = run(s)
+    assert f == (fut.CO_FUTURE_DIVISION |
+                 fut.CO_GENERATOR_ALLOWED |
+                 fut.CO_FUTURE_WITH_STATEMENT)
+
+def test_intervening_code():
+    s = 'from  __future__ import (division as b, generators,)\nfrom sys import modules\nfrom __future__ import with_statement\n'
+    f = run(s, expected_last_future=(2, 5))
+    assert f == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED)
+
+def test_nonexisting():
+    s = 'from  __future__ import non_existing_feature\n'
+    f = run(s)
+    assert f == 0
+
+def test_nonexisting_2():
+    s = 'from  __future__ import non_existing_feature, with_statement\n'
+    f = run(s)
+    assert f == fut.CO_FUTURE_WITH_STATEMENT
+
+def test_from_import_abs_import():
+    s = 'from  __future__ import absolute_import\n'
+    f = run(s)
+    assert f == fut.CO_FUTURE_ABSOLUTE_IMPORT
+
+def test_raw_doc():
+    s = 'r"Doc"\nfrom __future__ import with_statement\n'
+    f = run(s)
+    assert f == fut.CO_FUTURE_WITH_STATEMENT
+
+def test_unicode_doc():
+    s = 'u"Doc"\nfrom __future__ import with_statement\n'
+    f = run(s)
+    assert f == fut.CO_FUTURE_WITH_STATEMENT
+
+def test_raw_unicode_doc():
+    s = 'ru"Doc"\nfrom __future__ import with_statement\n'
+    f = run(s)
+    assert f == fut.CO_FUTURE_WITH_STATEMENT
+
+def test_continuation_line():
+    s = "\\\nfrom __future__ import with_statement\n"
+    f = run(s)
+    assert f == fut.CO_FUTURE_WITH_STATEMENT
+
+def test_continuation_lines():
+    s = "\\\n  \t\\\nfrom __future__ import with_statement\n"
+    f = run(s)
+    assert f == fut.CO_FUTURE_WITH_STATEMENT
+
+def test_lots_of_continuation_lines():
+    s = "\\\n\\\n\\\n\\\n\\\n\\\n\nfrom __future__ import with_statement\n"
+    f = run(s)
+    assert f == fut.CO_FUTURE_WITH_STATEMENT
+
+def test_continuation_lines_raise():
+    s = "   \\\n  \t\\\nfrom __future__ import with_statement\n"
+    f = run(s, expected_last_future=(1, 0))
+    assert f == 0     # because of the INDENT
+
+def test_continuation_lines_in_docstring_single_quoted():
+    s = '"\\\n\\\n\\\n\\\n\\\n\\\n"\nfrom  __future__ import division\n'
+    f = run(s)
+    assert f == fut.CO_FUTURE_DIVISION
+
+def test_continuation_lines_in_docstring_triple_quoted():
+    s = '"""\\\n\\\n\\\n\\\n\\\n\\\n"""\nfrom  __future__ import division\n'
+    f = run(s)
+    assert f == fut.CO_FUTURE_DIVISION
+
+def test_blank_lines():
+    s = ('\n\t\n\nfrom __future__ import with_statement'
+         '  \n  \n  \nfrom __future__ import division')
+    f = run(s)
+    assert f == fut.CO_FUTURE_WITH_STATEMENT | fut.CO_FUTURE_DIVISION
+
+def test_dummy_semicolons():
+    s = ('from __future__ import division;\n'
+         'from __future__ import with_statement;')
+    f = run(s)
+    assert f == fut.CO_FUTURE_DIVISION | fut.CO_FUTURE_WITH_STATEMENT

pypy/interpreter/pyparser/test/test_futureautomaton.py

-import py
-import pypy.interpreter.pyparser.future as future
-from pypy.tool import stdlib___future__ as fut
-
-def run(s):
-    f = future.FutureAutomaton(future.futureFlags_2_7, s)
-    try:
-        f.start()
-    except future.DoneException:
-        pass
-    return f
-
-def test_docstring():
-    s = '"Docstring\\" "\nfrom  __future__ import division\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == fut.CO_FUTURE_DIVISION
-    assert f.lineno == 2
-    assert f.col_offset == 0
-
-def test_comment():
-    s = '# A comment about nothing ;\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.lineno == -1
-    assert f.col_offset == 0
-
-def test_tripledocstring():
-    s = '''""" This is a
-docstring with line
-breaks in it. It even has a \n"""
-'''
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.lineno == -1
-    assert f.col_offset == 0
-
-def test_escapedquote_in_tripledocstring():
-    s = '''""" This is a
-docstring with line
-breaks in it. \\"""It even has an escaped quote!"""
-'''
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.lineno == -1
-    assert f.col_offset == 0
-
-def test_empty_line():
-    s = ' \t   \f \n   \n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.lineno == -1
-    assert f.col_offset == 0
-
-def test_from():
-    s = 'from  __future__ import division\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == fut.CO_FUTURE_DIVISION
-    assert f.lineno == 1
-    assert f.col_offset == 0
-
-def test_froms():
-    s = 'from  __future__ import division, generators, with_statement\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == (fut.CO_FUTURE_DIVISION |
-                       fut.CO_GENERATOR_ALLOWED |
-                       fut.CO_FUTURE_WITH_STATEMENT)
-    assert f.lineno == 1
-    assert f.col_offset == 0
-
-def test_from_as():
-    s = 'from  __future__ import division as b\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == fut.CO_FUTURE_DIVISION
-    assert f.lineno == 1
-    assert f.col_offset == 0
-    
-def test_froms_as():
-    s = 'from  __future__ import division as b, generators as c\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == (fut.CO_FUTURE_DIVISION |
-                       fut.CO_GENERATOR_ALLOWED)
-    assert f.lineno == 1
-    assert f.col_offset == 0
-
-def test_from_paren():
-    s = 'from  __future__ import (division)\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == fut.CO_FUTURE_DIVISION
-    assert f.lineno == 1
-    assert f.col_offset == 0
-
-def test_froms_paren():
-    s = 'from  __future__ import (division, generators)\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == (fut.CO_FUTURE_DIVISION |
-                       fut.CO_GENERATOR_ALLOWED)
-    assert f.lineno == 1
-    assert f.col_offset == 0
-
-def test_froms_paren_as():
-    s = 'from  __future__ import (division as b, generators,)\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == (fut.CO_FUTURE_DIVISION |
-                       fut.CO_GENERATOR_ALLOWED)
-    assert f.lineno == 1
-    assert f.col_offset == 0
-
-def test_paren_with_newline():
-    s = 'from __future__ import (division,\nabsolute_import)\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_FUTURE_ABSOLUTE_IMPORT)
-    assert f.lineno == 1
-    assert f.col_offset == 0
-
-def test_multiline():
-    s = '"abc" #def\n  #ghi\nfrom  __future__ import (division as b, generators,)\nfrom __future__ import with_statement\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == (fut.CO_FUTURE_DIVISION |
-                       fut.CO_GENERATOR_ALLOWED |
-                       fut.CO_FUTURE_WITH_STATEMENT)
-    assert f.lineno == 4
-    assert f.col_offset == 0
-
-def test_windows_style_lineendings():
-    s = '"abc" #def\r\n  #ghi\r\nfrom  __future__ import (division as b, generators,)\r\nfrom __future__ import with_statement\r\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == (fut.CO_FUTURE_DIVISION |
-                       fut.CO_GENERATOR_ALLOWED |
-                       fut.CO_FUTURE_WITH_STATEMENT)
-    assert f.lineno == 4
-    assert f.col_offset == 0
-
-def test_mac_style_lineendings():
-    s = '"abc" #def\r  #ghi\rfrom  __future__ import (division as b, generators,)\rfrom __future__ import with_statement\r'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == (fut.CO_FUTURE_DIVISION |
-                       fut.CO_GENERATOR_ALLOWED |
-                       fut.CO_FUTURE_WITH_STATEMENT)
-    assert f.lineno == 4
-    assert f.col_offset == 0
-
-def test_semicolon():
-    s = '"abc" #def\n  #ghi\nfrom  __future__ import (division as b, generators,);  from __future__ import with_statement\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == (fut.CO_FUTURE_DIVISION |
-                       fut.CO_GENERATOR_ALLOWED |
-                       fut.CO_FUTURE_WITH_STATEMENT)
-    assert f.lineno == 3
-    assert f.col_offset == 55
-
-def test_full_chain():
-    s = '"abc" #def\n  #ghi\nfrom  __future__ import (division as b, generators,);  from __future__ import with_statement\n'
-    flags, pos = future.get_futures(future.futureFlags_2_5, s)
-    assert flags == (fut.CO_FUTURE_DIVISION |
-                     fut.CO_GENERATOR_ALLOWED |
-                     fut.CO_FUTURE_WITH_STATEMENT)
-    assert pos == (3, 55)
-
-def test_intervening_code():
-    s = 'from  __future__ import (division as b, generators,)\nfrom sys import modules\nfrom __future__ import with_statement\n'
-    flags, pos = future.get_futures(future.futureFlags_2_5, s)
-    assert flags & fut.CO_FUTURE_WITH_STATEMENT == 0
-    assert pos == (1, 0)
-
-def test_nonexisting():
-    s = 'from  __future__ import non_existing_feature\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == 0
-    assert f.lineno == 1
-    assert f.col_offset == 0
-
-def test_from_import_abs_import():
-    s = 'from  __future__ import absolute_import\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == fut.CO_FUTURE_ABSOLUTE_IMPORT
-    assert f.lineno == 1
-    assert f.col_offset == 0
-
-def test_raw_doc():
-    s = 'r"Doc"\nfrom __future__ import with_statement\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == fut.CO_FUTURE_WITH_STATEMENT
-    assert f.lineno == 2
-    assert f.col_offset == 0
-
-def test_unicode_doc():
-    s = 'u"Doc"\nfrom __future__ import with_statement\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == fut.CO_FUTURE_WITH_STATEMENT
-    assert f.lineno == 2
-    assert f.col_offset == 0
-
-def test_raw_unicode_doc():
-    s = 'ru"Doc"\nfrom __future__ import with_statement\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == fut.CO_FUTURE_WITH_STATEMENT
-
-def test_continuation_line():
-    s = "\\\nfrom __future__ import with_statement\n"
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == fut.CO_FUTURE_WITH_STATEMENT
-    assert f.lineno == 2
-    assert f.col_offset == 0
-
-def test_continuation_lines():
-    s = "\\\n  \t\\\nfrom __future__ import with_statement\n"
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == fut.CO_FUTURE_WITH_STATEMENT
-    assert f.lineno == 3
-    assert f.col_offset == 0
-
-def test_lots_of_continuation_lines():
-    s = "\\\n\\\n\\\n\\\n\\\n\\\n\nfrom __future__ import with_statement\n"
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == fut.CO_FUTURE_WITH_STATEMENT
-    assert f.lineno == 8
-    assert f.col_offset == 0
-
-# This looks like a bug in cpython parser
-# and would require extensive modifications
-# to future.py in order to emulate the same behaviour
-def test_continuation_lines_raise():
-    py.test.skip("probably a CPython bug")
-    s = "   \\\n  \t\\\nfrom __future__ import with_statement\n"
-    try:
-        f = run(s)
-    except IndentationError, e:
-        assert e.args == 'unexpected indent'
-        assert f.pos == len(s)
-        assert f.flags == 0
-        assert f.lineno == -1
-        assert f.col_offset == 0
-    else:
-        raise AssertionError('IndentationError not raised')
-    assert f.lineno == 2
-    assert f.col_offset == 0
-
-def test_continuation_lines_in_docstring_single_quoted():
-    s = '"\\\n\\\n\\\n\\\n\\\n\\\n"\nfrom  __future__ import division\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == fut.CO_FUTURE_DIVISION
-    assert f.lineno == 8
-    assert f.col_offset == 0
-
-def test_continuation_lines_in_docstring_triple_quoted():
-    s = '"""\\\n\\\n\\\n\\\n\\\n\\\n"""\nfrom  __future__ import division\n'
-    f = run(s)
-    assert f.pos == len(s)
-    assert f.flags == fut.CO_FUTURE_DIVISION
-    assert f.lineno == 8
-    assert f.col_offset == 0

pypy/interpreter/pyparser/test/test_pyparse.py

         self.parse('0b1101')
         self.parse('0b0l')
         py.test.raises(SyntaxError, self.parse, "0b112")
+
+    def test_print_function(self):
+        self.parse("from __future__ import print_function\nx = print\n")

pypy/interpreter/test/test_compiler.py

             'from __future__ import nested_scopes, generators',
             'from __future__ import (nested_scopes,\ngenerators)',
             'from __future__ import (nested_scopes,\ngenerators,)',
+            'from __future__ import (\nnested_scopes,\ngenerators)',
+            'from __future__ import(\n\tnested_scopes,\n\tgenerators)',
+            'from __future__ import(\n\t\nnested_scopes)',
             'from sys import stdin, stderr, stdout',
             'from sys import (stdin, stderr,\nstdout)',
             'from sys import (stdin, stderr,\nstdout,)',

pypy/module/micronumpy/interp_dtype.py

         if w_fields == space.w_None:
             self.fields = None
         else:
+            self.fields = {}
             ofs_and_items = []
             size = 0
             for key in space.listview(w_fields):
     elif char == 'V':
         num = 20
         basename = 'void'
-        w_box_type = space.gettypefor(interp_boxes.W_VoidBox)
-        return dtype_from_list(space, space.newlist([]))
+        itemtype = types.VoidType(size)
+        return W_Dtype(itemtype, 20, VOIDLTR, "void" + str(size),
+                    "V", space.gettypefor(interp_boxes.W_VoidBox))
     else:
         assert char == 'U'
         basename = 'unicode'

pypy/module/micronumpy/test/test_dtypes.py

         from numpypy import array, dtype
         from cPickle import loads, dumps
         a = array([1,2,3])
-         if self.ptr_size == 8:
+        if self.ptr_size == 8:
             assert a.dtype.__reduce__() == (dtype, ('i8', 0, 1), (3, '<', None, None, None, -1, -1, 0))
         else:
             assert a.dtype.__reduce__() == (dtype, ('i4', 0, 1), (3, '<', None, None, None, -1, -1, 0))
         assert loads(dumps(a.dtype)) == a.dtype
 
-    def test_pickle_record(self):
-        from numpypy import array, dtype
-        from cPickle import loads, dumps
-
-        d = dtype([("x", "int32"), ("y", "int32"), ("z", "int32"), ("value", float)])
-        assert d.__reduce__() == (dtype, ('V20', 0, 1), (3, '<', None, ('x', 'y', 'z', 'value'), {'y': (dtype('int32'), 4), 'x': (dtype('int32'), 0), 'z': (dtype('int32'), 8), 'value': (dtype('float64'), 12)}, 20, 1, 0))
-
-        new_d = loads(dumps(d))
-
-        assert new_d.__reduce__() == d.__reduce__()
-
 class AppTestTypes(BaseAppTestDtypes):
     def test_abstract_types(self):
         import numpypy as numpy
         assert isinstance(unicode_(3), unicode)
 
 class AppTestRecordDtypes(BaseNumpyAppTest):
+    spaceconfig = dict(usemodules=["micronumpy", "struct", "binascii"])
     def test_create(self):
         from numpypy import dtype, void
 
         assert dt.subdtype == (dtype(float), (10,))
         assert dt.base == dtype(float)
 
+    def test_pickle_record(self):
+        from numpypy import array, dtype
+        from cPickle import loads, dumps
+
+        d = dtype([("x", "int32"), ("y", "int32"), ("z", "int32"), ("value", float)])
+        assert d.__reduce__() == (dtype, ('V20', 0, 1), (3, '<', None, ('x', 'y', 'z', 'value'), {'y': (dtype('int32'), 4), 'x': (dtype('int32'), 0), 'z': (dtype('int32'), 8), 'value': (dtype('float64'), 12)}, 20, 1, 0))
+
+        new_d = loads(dumps(d))
+
+        assert new_d.__reduce__() == d.__reduce__()
+
+    def test_pickle_record_subarrays(self):
+        from numpypy import array, dtype
+        from cPickle import loads, dumps
+
+        d = dtype([("x", "int32", (3,)), ("y", "int32", (2,)), ("z", "int32", (4,)), ("value", float, (5,))])
+        new_d = loads(dumps(d))
+
+        keys = d.fields.keys()
+        keys.sort()
+        assert keys == ["value", "x", "y", "z"]
+
+        assert new_d.itemsize == d.itemsize == 76
+
 class AppTestNotDirect(BaseNumpyAppTest):
     def setup_class(cls):
         BaseNumpyAppTest.setup_class.im_func(cls)

pypy/module/micronumpy/types.py

     T = lltype.Char
 
     def _coerce(self, space, arr, ofs, dtype, w_items, shape):
-        items_w = space.fixedview(w_items)
-        for i in range(len(items_w)):
-            subdtype = dtype.subdtype
-            itemtype = subdtype.itemtype
-            if space.len_w(shape) <= 1:
-                w_box = itemtype.coerce(space, dtype.subdtype, items_w[i])
-                itemtype.store(arr, 0, ofs, w_box)
-                ofs += itemtype.get_element_size()
-            else:
-                size = 1
-                for dimension in shape[1:]:
-                    size *= dimension
-                size *= itemtype.get_element_size()
-                for w_item in items_w:
-                    self._coerce(space, arr, ofs, dtype, w_items, shape[1:])
-                    ofs += size
-        return arr
-
-    def _coerce(self, space, arr, ofs, dtype, w_items, shape):
         # TODO: Make sure the shape and the array match
         items_w = space.fixedview(w_items)
         subdtype = dtype.subdtype

pypy/module/pypyjit/test_pypy_c/test_array.py

         assert loop.match("""
             i10 = int_lt(i6, 1000)
             guard_true(i10, descr=...)
-            guard_not_invalidated(descr=...)
             i11 = int_lt(i6, i7)
             guard_true(i11, descr=...)
             f13 = getarrayitem_raw(i8, i6, descr=<ArrayF 8>)
         assert loop.match("""
             i10 = int_lt(i6, 1000)
             guard_true(i10, descr=...)
-            guard_not_invalidated(descr=...)
             i11 = int_lt(i6, i7)
             guard_true(i11, descr=...)
             i13 = getarrayitem_raw(i8, i6, descr=<Array. 4>)

pypy/module/pypyjit/test_pypy_c/test_call.py

         loop, = log.loops_by_filename(self.filepath)
         # the int strategy is used here
         assert loop.match_by_id('append', """
-            guard_not_invalidated(descr=...)
             i13 = getfield_gc(p8, descr=<FieldS list.length .*>)
             i15 = int_add(i13, 1)
             # Will be killed by the backend
         assert loop.match("""
             i2 = int_lt(i0, i1)
             guard_true(i2, descr=...)
-            guard_not_invalidated(descr=...)
             i3 = force_token()
             i4 = int_add(i0, 1)
             --TICK--
         """, [1000])
         loop, = log.loops_by_id('call')
         assert loop.match_by_id('call', '''
+        guard_not_invalidated(descr=...)
         i1 = force_token()
         ''')
 

pypy/module/test_lib_pypy/cffi_tests/backend_tests.py

         s.c = -4
         assert s.c == -4
 
+    def test_bitfield_enum(self):
+        ffi = FFI(backend=self.Backend())
+        ffi.cdef("""
+            typedef enum { AA, BB, CC } foo_e;
+            typedef struct { foo_e f:2; } foo_s;
+        """)
+        s = ffi.new("foo_s *")
+        s.f = 2
+        assert s.f == 2
+
     def test_anonymous_struct(self):
         ffi = FFI(backend=self.Backend())
         ffi.cdef("typedef struct { int a; } foo_t;")

pypy/module/test_lib_pypy/cffi_tests/test_ffi_backend.py

         assert ffi.typeof("long(*)(long, long**, ...)").cname == (
             "long(*)(long, long * *, ...)")
         assert ffi.typeof("long(*)(long, long**, ...)").ellipsis is True
+
+    def test_new_handle(self):
+        ffi = FFI(backend=self.Backend())
+        o = [2, 3, 4]
+        p = ffi.new_handle(o)
+        assert ffi.typeof(p) == ffi.typeof("void *")
+        assert ffi.from_handle(p) is o
+        assert ffi.from_handle(ffi.cast("char *", p)) is o
+        py.test.raises(RuntimeError, ffi.from_handle, ffi.NULL)

pypy/module/test_lib_pypy/cffi_tests/test_function.py

         assert lib.EE == -5
         assert lib.FF == -4
 
+    def test_void_star_accepts_string(self):
+        ffi = FFI(backend=self.Backend())
+        ffi.cdef("""int strlen(const void *);""")
+        lib = ffi.dlopen(None)
+        res = lib.strlen(b"hello")
+        assert res == 5
+
+    def test_signed_char_star_accepts_string(self):
+        if self.Backend is CTypesBackend:
+            py.test.skip("not supported by the ctypes backend")
+        ffi = FFI(backend=self.Backend())
+        ffi.cdef("""int strlen(signed char *);""")
+        lib = ffi.dlopen(None)
+        res = lib.strlen(b"hello")
+        assert res == 5
+
+    def test_unsigned_char_star_accepts_string(self):
+        if self.Backend is CTypesBackend:
+            py.test.skip("not supported by the ctypes backend")
+        ffi = FFI(backend=self.Backend())
+        ffi.cdef("""int strlen(unsigned char *);""")
+        lib = ffi.dlopen(None)
+        res = lib.strlen(b"hello")
+        assert res == 5
+
     def test_missing_function(self):
         ffi = FFI(backend=self.Backend())
         ffi.cdef("""
         """)
         m = ffi.dlopen("m")
         assert not hasattr(m, 'nonexistent')
+
+    def test_wraps_from_stdlib(self):
+        import functools
+        ffi = FFI(backend=self.Backend())
+        ffi.cdef("""
+            double sin(double x);
+        """)
+        def my_decorator(f):
+            @functools.wraps(f)
+            def wrapper(*args):
+                return f(*args) + 100
+            return wrapper
+        m = ffi.dlopen("m")
+        sin100 = my_decorator(m.sin)
+        x = sin100(1.23)
+        assert x == math.sin(1.23) + 100

pypy/module/test_lib_pypy/cffi_tests/test_verify.py

     py.test.raises(OverflowError, "s.b = 4")
     assert s.b == 3
 
+def test_struct_with_bitfield_enum():
+    ffi = FFI()
+    code = """
+        typedef enum { AA, BB, CC } foo_e;
+        typedef struct { foo_e f:2; } foo_s;
+    """
+    ffi.cdef(code)
+    ffi.verify(code)
+    s = ffi.new("foo_s *")
+    s.f = 2
+    assert s.f == 2
+
 def test_unsupported_struct_with_bitfield_ellipsis():
     ffi = FFI()
     py.test.raises(NotImplementedError, ffi.cdef,

pypy/module/test_lib_pypy/cffi_tests/test_zintegration.py

             modules = ('cffi', '_cffi_backend')
         except ImportError:
             modules = ('cffi', '_cffi_backend', 'pycparser')
+            try:
+                import ply
+            except ImportError:
+                pass
+            else:
+                modules += ('ply',)   # needed for older versions of pycparser
         for module in modules:
             target = imp.find_module(module)[1]
             os.symlink(target, os.path.join(site_packages,

pypy/objspace/std/iterobject.py

 """Generic iterator implementations"""
-from pypy.interpreter import gateway
+
 from pypy.interpreter.baseobjspace import W_Root
+from pypy.interpreter.gateway import interp2app, interpindirect2app
 from pypy.interpreter.error import OperationError
 from pypy.objspace.std.stdtypedef import StdTypeDef
 
 
 class W_AbstractSeqIterObject(W_Root):
-    def __init__(w_self, w_seq, index=0):
+    def __init__(self, w_seq, index=0):
         if index < 0:
             index = 0
-        w_self.w_seq = w_seq
-        w_self.index = index
+        self.w_seq = w_seq
+        self.index = index
 
     def getlength(self, space):
         if self.w_seq is None:
         XXX to do: remove this __reduce__ method and do
         a registration with copy_reg, instead.
         """
-        from pypy.objspace.std.iterobject import W_AbstractSeqIterObject
-        assert isinstance(self, W_AbstractSeqIterObject)
         from pypy.interpreter.mixedmodule import MixedModule
         w_mod = space.getbuiltinmodule('_pickle_support')
         mod = space.interp_w(MixedModule, w_mod)
         return space.newtuple([new_inst, space.newtuple(tup)])
 
     def descr_length_hint(self, space):
-        from pypy.objspace.std.iterobject import W_AbstractSeqIterObject
-        assert isinstance(self, W_AbstractSeqIterObject)
         return self.getlength(space)
 
 W_AbstractSeqIterObject.typedef = StdTypeDef(
 Get an iterator from an object.  In the first form, the argument must
 supply its own iterator, or be a sequence.
 In the second form, the callable is called until it returns the sentinel.''',
-
-    __iter__ = gateway.interp2app(W_AbstractSeqIterObject.descr_iter),
-    next = gateway.interpindirect2app(W_AbstractSeqIterObject.descr_next),
-    __reduce__ = gateway.interp2app(W_AbstractSeqIterObject.descr_reduce),
-    __length_hint__ = gateway.interp2app(
-        W_AbstractSeqIterObject.descr_length_hint),
+    __iter__ = interp2app(W_AbstractSeqIterObject.descr_iter),
+    next = interpindirect2app(W_AbstractSeqIterObject.descr_next),
+    __reduce__ = interp2app(W_AbstractSeqIterObject.descr_reduce),
+    __length_hint__ = interp2app(W_AbstractSeqIterObject.descr_length_hint),
 )
 W_AbstractSeqIterObject.typedef.acceptable_as_base_class = False
 
     """Sequence iterator specialized for tuples, accessing directly
     their RPython-level list of wrapped objects.
     """
-    def __init__(w_self, w_seq, wrappeditems):
-        W_AbstractSeqIterObject.__init__(w_self, w_seq)
-        w_self.tupleitems = wrappeditems
+    def __init__(self, w_seq, wrappeditems):
+        W_AbstractSeqIterObject.__init__(self, w_seq)
+        self.tupleitems = wrappeditems
 
     def descr_next(self, space):
         if self.tupleitems is None:
 
 
 class W_ReverseSeqIterObject(W_Root):
-    def __init__(w_self, space, w_seq, index=-1):
-        w_self.w_seq = w_seq
-        w_self.w_len = space.len(w_seq)
-        w_self.index = space.int_w(w_self.w_len) + index
+    def __init__(self, space, w_seq, index=-1):
+        self.w_seq = w_seq
+        self.w_len = space.len(w_seq)
+        self.index = space.int_w(self.w_len) + index
 
     def descr_reduce(self, space):
         """
         XXX to do: remove this __reduce__ method and do
         a registration with copy_reg, instead.
         """
-        from pypy.objspace.std.iterobject import W_ReverseSeqIterObject
-        assert isinstance(self, W_ReverseSeqIterObject)
         from pypy.interpreter.mixedmodule import MixedModule
         w_mod = space.getbuiltinmodule('_pickle_support')
         mod = space.interp_w(MixedModule, w_mod)
         return space.newtuple([new_inst, space.newtuple(tup)])
 
     def descr_length_hint(self, space):
-        from pypy.objspace.std.iterobject import W_ReverseSeqIterObject
-        assert isinstance(self, W_ReverseSeqIterObject)
         if self.w_seq is None:
             return space.wrap(0)
         index = self.index + 1
 
 W_ReverseSeqIterObject.typedef = StdTypeDef(
     "reversesequenceiterator",
-    __iter__ = gateway.interp2app(W_ReverseSeqIterObject.descr_iter),
-    next = gateway.interp2app(W_ReverseSeqIterObject.descr_next),
-    __reduce__ = gateway.interp2app(W_ReverseSeqIterObject.descr_reduce),
-    __length_hint__ = gateway.interp2app(
-        W_ReverseSeqIterObject.descr_length_hint),
+    __iter__ = interp2app(W_ReverseSeqIterObject.descr_iter),
+    next = interp2app(W_ReverseSeqIterObject.descr_next),
+    __reduce__ = interp2app(W_ReverseSeqIterObject.descr_reduce),
+    __length_hint__ = interp2app(W_ReverseSeqIterObject.descr_length_hint),
 )
 W_ReverseSeqIterObject.typedef.acceptable_as_base_class = False

pypy/objspace/std/listobject.py

+"""The builtin list implementation
+
+Lists optimize their storage by holding certain primitive datatypes in
+unwrapped form. For more information:
+
+http://morepypy.blogspot.com/2011/10/more-compact-lists-with-list-strategies.html
+
+"""
+
 import operator
-from sys import maxint
+import sys
 
 from pypy.interpreter.baseobjspace import W_Root
 from pypy.interpreter.error import OperationError, operationerrfmt
 from pypy.objspace.std.stringobject import W_StringObject
 from pypy.objspace.std.tupleobject import W_AbstractTupleObject
 from pypy.objspace.std.unicodeobject import W_UnicodeObject
-from pypy.objspace.std.util import negate, get_positive_index
-from rpython.rlib import rerased, jit, debug
+from pypy.objspace.std.util import get_positive_index, negate
+from rpython.rlib import debug, jit, rerased
 from rpython.rlib.listsort import make_timsort_class
-from rpython.rlib.objectmodel import (instantiate, newlist_hint, specialize,
-    resizelist_hint)
+from rpython.rlib.objectmodel import (
+    instantiate, newlist_hint, resizelist_hint, specialize)
 from rpython.tool.sourcetools import func_with_new_name
 
 __all__ = ['W_ListObject', 'make_range_list', 'make_empty_list_with_size']
 
 
 class W_ListObject(W_Root):
-    def __init__(w_self, space, wrappeditems, sizehint=-1):
+
+    def __init__(self, space, wrappeditems, sizehint=-1):
         assert isinstance(wrappeditems, list)
-        w_self.space = space
+        self.space = space
         if space.config.objspace.std.withliststrategies:
-            w_self.strategy = get_strategy_from_list_objects(space,
-                                                             wrappeditems,
-                                                             sizehint)
+            self.strategy = get_strategy_from_list_objects(space, wrappeditems,
+                                                           sizehint)
         else:
-            w_self.strategy = space.fromcache(ObjectListStrategy)
-        w_self.init_from_list_w(wrappeditems)
+            self.strategy = space.fromcache(ObjectListStrategy)
+        self.init_from_list_w(wrappeditems)
 
     @staticmethod
     def from_storage_and_strategy(space, storage, strategy):
-        w_self = instantiate(W_ListObject)
-        w_self.space = space
-        w_self.strategy = strategy
-        w_self.lstorage = storage
+        self = instantiate(W_ListObject)
+        self.space = space
+        self.strategy = strategy
+        self.lstorage = storage
         if not space.config.objspace.std.withliststrategies:
-            w_self.switch_to_object_strategy()
-        return w_self
+            self.switch_to_object_strategy()
+        return self
 
     @staticmethod
     def newlist_str(space, list_s):
         storage = strategy.erase(list_s)
         return W_ListObject.from_storage_and_strategy(space, storage, strategy)
 
-    def __repr__(w_self):
+    def __repr__(self):
         """ representation for debugging purposes """
-        return "%s(%s, %s)" % (w_self.__class__.__name__, w_self.strategy,
-                               w_self.lstorage._x)
+        return "%s(%s, %s)" % (self.__class__.__name__, self.strategy,
+                               self.lstorage._x)
 
     def unwrap(w_list, space):
         # for tests only!
         strategy and storage according to the other W_List"""
         self.strategy.copy_into(self, other)
 
-    def find(self, w_item, start=0, end=maxint):
+    def find(self, w_item, start=0, end=sys.maxint):
         """Find w_item in list[start:end]. If not found, raise ValueError"""
         return self.strategy.find(self, w_item, start, end)
 
         'L.remove(value) -- remove first occurrence of value'
         # needs to be safe against eq_w() mutating the w_list behind our back
         try:
-            i = self.find(w_value, 0, maxint)
+            i = self.find(w_value, 0, sys.maxint)
         except ValueError:
             raise OperationError(space.w_ValueError,
                                  space.wrap("list.remove(x): x not in list"))
         if i < self.length():  # otherwise list was mutated
             self.pop(i)
 
-    @unwrap_spec(w_start=WrappedDefault(0), w_stop=WrappedDefault(maxint))
+    @unwrap_spec(w_start=WrappedDefault(0), w_stop=WrappedDefault(sys.maxint))
     def descr_index(self, space, w_value, w_start, w_stop):
         '''L.index(value, [start, [stop]]) -> integer -- return
         first index of value'''

pypy/objspace/std/setobject.py

 class W_BaseSetObject(W_Root):
     typedef = None
 
-    def __init__(w_self, space, w_iterable=None):
+    def __init__(self, space, w_iterable=None):
         """Initialize the set by taking ownership of 'setdata'."""
-        w_self.space = space
-        set_strategy_and_setdata(space, w_self, w_iterable)
+        self.space = space
+        set_strategy_and_setdata(space, self, w_iterable)
 
-    def __repr__(w_self):
+    def __repr__(self):
         """representation for debugging purposes"""
-        reprlist = [repr(w_item) for w_item in w_self.getkeys()]
-        return "<%s(%s)>" % (w_self.__class__.__name__, ', '.join(reprlist))
+        reprlist = [repr(w_item) for w_item in self.getkeys()]
+        return "<%s(%s)>" % (self.__class__.__name__, ', '.join(reprlist))
 
-    def from_storage_and_strategy(w_self, storage, strategy):
-        obj = w_self._newobj(w_self.space, None)
+    def from_storage_and_strategy(self, storage, strategy):
+        obj = self._newobj(self.space, None)
         assert isinstance(obj, W_BaseSetObject)
         obj.strategy = strategy
         obj.sstorage = storage
 
 
 class W_SetObject(W_BaseSetObject):
-    def _newobj(w_self, space, w_iterable):
+    def _newobj(self, space, w_iterable):
         """Make a new set by taking ownership of 'w_iterable'."""
-        if type(w_self) is W_SetObject:
+        if type(self) is W_SetObject:
             return W_SetObject(space, w_iterable)
-        w_type = space.type(w_self)
+        w_type = space.type(self)
         w_obj = space.allocate_instance(W_SetObject, w_type)
         W_SetObject.__init__(w_obj, space, w_iterable)
         return w_obj
 class W_FrozensetObject(W_BaseSetObject):
     hash = 0
 
-    def _newobj(w_self, space, w_iterable):
+    def _newobj(self, space, w_iterable):
         """Make a new frozenset by taking ownership of 'w_iterable'."""
-        if type(w_self) is W_FrozensetObject:
+        if type(self) is W_FrozensetObject:
             return W_FrozensetObject(space, w_iterable)
-        w_type = space.type(w_self)
+        w_type = space.type(self)
         w_obj = space.allocate_instance(W_FrozensetObject, w_type)
         W_FrozensetObject.__init__(w_obj, space, w_iterable)
         return w_obj
 
 class W_SetIterObject(W_Root):
 
-    def __init__(w_self, space, iterimplementation):
-        w_self.space = space
-        w_self.iterimplementation = iterimplementation
+    def __init__(self, space, iterimplementation):
+        self.space = space
+        self.iterimplementation = iterimplementation
 
     def descr_length_hint(self, space):
         return space.wrap(self.iterimplementation.length())

pypy/objspace/std/specialisedtupleobject.py

 def make_specialised_class(typetuple):
     assert type(typetuple) == tuple
 
-    nValues = len(typetuple)
-    iter_n = unrolling_iterable(range(nValues))
+    typelen = len(typetuple)
+    iter_n = unrolling_iterable(range(typelen))
 
     class cls(W_AbstractTupleObject):
         def __init__(self, space, *values_w):
             self.space = space
-            assert len(values_w) == nValues
+            assert len(values_w) == typelen
             for i in iter_n:
                 w_obj = values_w[i]
                 val_type = typetuple[i]
                 setattr(self, 'value%s' % i, unwrapped)
 
         def length(self):
-            return nValues
+            return typelen
 
         def tolist(self):
-            list_w = [None] * nValues
+            list_w = [None] * typelen
             for i in iter_n:
                 value = getattr(self, 'value%s' % i)
                 if typetuple[i] != object:
         def descr_hash(self, space):
             mult = 1000003
             x = 0x345678
-            z = nValues
+            z = typelen
             for i in iter_n:
                 value = getattr(self, 'value%s' % i)
                 if typetuple[i] == object:
             if not isinstance(w_other, W_AbstractTupleObject):
                 return space.w_NotImplemented
             if not isinstance(w_other, cls):
-                if nValues != w_other.length():
+                if typelen != w_other.length():
                     return space.w_False
                 for i in iter_n:
                     myval = getattr(self, 'value%s' % i)
 
         def getitem(self, space, index):
             if index < 0:
-                index += nValues
+                index += typelen
             for i in iter_n:
                 if index == i:
                     value = getattr(self, 'value%s' % i)

pypy/objspace/std/tupleobject.py

+"""The builtin tuple implementation"""
+
 import sys
-from pypy.interpreter import gateway
+
 from pypy.interpreter.baseobjspace import W_Root
 from pypy.interpreter.error import OperationError
-from pypy.interpreter.gateway import interp2app, interpindirect2app
+from pypy.interpreter.gateway import (
+    WrappedDefault, interp2app, interpindirect2app, unwrap_spec)
 from pypy.objspace.std import slicetype
 from pypy.objspace.std.inttype import wrapint
 from pypy.objspace.std.sliceobject import W_SliceObject, normalize_simple_slice
                 count += 1
         return space.wrap(count)
 
-    @gateway.unwrap_spec(w_start=gateway.WrappedDefault(0),
-                         w_stop=gateway.WrappedDefault(sys.maxint))
+    @unwrap_spec(w_start=WrappedDefault(0), w_stop=WrappedDefault(sys.maxint))
     @jit.look_inside_iff(lambda self, _1, _2, _3, _4: _unroll_condition(self))
     def descr_index(self, space, w_obj, w_start, w_stop):
         """index(obj, [start, [stop]]) -> first index that obj appears in the

pypy/tool/release/package.py

         rename_pypy_c += '.exe'
     binaries = [(pypy_c, rename_pypy_c)]
     #
+    builddir = udir.ensure("build", dir=True)
+    pypydir = builddir.ensure(name, dir=True)
+    includedir = basedir.join('include')
+    pypydir.ensure('include', dir=True)
+
     if sys.platform == 'win32':
         #Don't include a mscvrXX.dll, users should get their own.
         #Instructions are provided on the website.
             p = pypy_c.dirpath().join(extra)
             if not p.check():
                 p = py.path.local.sysfind(extra)
-                assert p, "%s not found" % (extra,)
+                if not p:
+                    print "%s not found, expect trouble if this is a shared build" % (extra,)
+                    continue
             print "Picking %s" % p
             binaries.append((p, p.basename))
-    #
-    builddir = udir.ensure("build", dir=True)
-    pypydir = builddir.ensure(name, dir=True)
+        if pypy_c.dirpath().join("libpypy-c.lib").check():
+            shutil.copyfile(str(pypy_c.dirpath().join("libpypy-c.lib")),
+                        str(pypydir.join('include/python27.lib')))
+            print "Picking %s as %s" % (pypy_c.dirpath().join("libpypy-c.lib"),
+                        pypydir.join('include/python27.lib'))
+        else:
+            pass
+            # XXX users will complain that they cannot compile cpyext
+            # modules for windows, has the lib moved or are there no
+            # exported functions in the dll so no import library is created?
+
     # Careful: to copy lib_pypy, copying just the svn-tracked files
     # would not be enough: there are also ctypes_config_cache/_*_cache.py.
     shutil.copytree(str(basedir.join('lib-python').join(STDLIB_VER)),
                                            '*.c', '*.o'))
     for file in ['LICENSE', 'README.rst']:
         shutil.copy(str(basedir.join(file)), str(pypydir))
-    pypydir.ensure('include', dir=True)
-    if sys.platform == 'win32':
-        shutil.copyfile(str(pypy_c.dirpath().join("libpypy-c.lib")),
-                        str(pypydir.join('include/python27.lib')))
-    # we want to put there all *.h and *.inl from trunk/include
-    # and from pypy/_interfaces
-    includedir = basedir.join('include')
     headers = includedir.listdir('*.h') + includedir.listdir('*.inl')
     for n in headers:
+        # we want to put there all *.h and *.inl from trunk/include
+        # and from pypy/_interfaces
         shutil.copy(str(n), str(pypydir.join('include')))
     #
     spdir = pypydir.ensure('site-packages', dir=True)

rpython/config/translationoption.py

 from rpython.config.config import ChoiceOption, StrOption, Config
 from rpython.config.config import ConfigError
 from rpython.config.support import detect_number_of_processors
-from rpython.jit.backend.detect_cpu import autodetect
-from rpython.jit.backend.detect_cpu import MODEL_X86, MODEL_X86_NO_SSE2, MODEL_X86_64
 
 DEFL_INLINE_THRESHOLD = 32.4    # just enough to inline add__Int_Int()
 # and just small enough to prevend inlining of some rlist functions.
 
 DEFL_GC = "minimark"
 
-_is_x86 = autodetect() in (MODEL_X86, MODEL_X86_64, MODEL_X86_NO_SSE2)