Commits

Anonymous committed 2280949 Merge

merge with default

Comments (0)

Files changed (78)

lib_pypy/_ctypes/array.py

-
+import _ffi
 import _rawffi
 
 from _ctypes.basics import _CData, cdata_from_address, _CDataMeta, sizeof
 from _ctypes.basics import keepalive_key, store_reference, ensure_objects
-from _ctypes.basics import CArgObject
+from _ctypes.basics import CArgObject, as_ffi_pointer
 
 class ArrayMeta(_CDataMeta):
     def __new__(self, name, cls, typedict):
     def _to_ffi_param(self):
         return self._get_buffer_value()
 
+    def _as_ffi_pointer_(self, ffitype):
+        return as_ffi_pointer(self, ffitype)
+
 ARRAY_CACHE = {}
 
 def create_array_type(base, length):
             _type_ = base
         )
         cls = ArrayMeta(name, (Array,), tpdict)
+        cls._ffiargtype = _ffi.types.Pointer(base.get_ffi_argtype())
         ARRAY_CACHE[key] = cls
         return cls

lib_pypy/_ctypes/basics.py

     }
 
 
+# called from primitive.py, pointer.py, array.py
+def as_ffi_pointer(value, ffitype):
+    my_ffitype = type(value).get_ffi_argtype()
+    # for now, we always allow types.pointer, else a lot of tests
+    # break. We need to rethink how pointers are represented, though
+    if my_ffitype is not ffitype and ffitype is not _ffi.types.void_p:
+        raise ArgumentError("expected %s instance, got %s" % (type(value),
+                                                              ffitype))
+    return value._get_buffer_value()
+
+
 # used by "byref"
 from _ctypes.pointer import pointer

lib_pypy/_ctypes/pointer.py

 import _ffi
 from _ctypes.basics import _CData, _CDataMeta, cdata_from_address, ArgumentError
 from _ctypes.basics import keepalive_key, store_reference, ensure_objects
-from _ctypes.basics import sizeof, byref
+from _ctypes.basics import sizeof, byref, as_ffi_pointer
 from _ctypes.array import Array, array_get_slice_params, array_slice_getitem,\
      array_slice_setitem
 
     def _as_ffi_pointer_(self, ffitype):
         return as_ffi_pointer(self, ffitype)
 
-def as_ffi_pointer(value, ffitype):
-    my_ffitype = type(value).get_ffi_argtype()
-    # for now, we always allow types.pointer, else a lot of tests
-    # break. We need to rethink how pointers are represented, though
-    if my_ffitype is not ffitype and ffitype is not _ffi.types.void_p:
-        raise ArgumentError("expected %s instance, got %s" % (type(value),
-                                                              ffitype))
-    return value._get_buffer_value()
 
 def _cast_addr(obj, _, tp):
     if not (isinstance(tp, _CDataMeta) and tp._is_pointer_like()):

pypy/doc/cpython_differences.rst

   implementation detail that shows up because of internal C-level slots
   that PyPy does not have.
 
+* the ``__dict__`` attribute of new-style classes returns a normal dict, as
+  opposed to a dict proxy like in CPython. Mutating the dict will change the
+  type and vice versa. For builtin types, a dictionary will be returned that
+  cannot be changed (but still looks and behaves like a normal dictionary).
+
 
 .. include:: _ref.txt

pypy/interpreter/baseobjspace.py

                 raise
             modname = self.str_w(w_modname)
             mod = self.interpclass_w(w_mod)
-            if isinstance(mod, Module):
+            if isinstance(mod, Module) and not mod.startup_called:
                 self.timer.start("startup " + modname)
                 mod.init(self)
                 self.timer.stop("startup " + modname)

pypy/interpreter/test/test_objspace.py

             space.ALL_BUILTIN_MODULES.pop()
             del space._builtinmodule_list
             mods = space.get_builtinmodule_to_install()
+
+    def test_dont_reload_builtin_mods_on_startup(self):
+        from pypy.tool.option import make_config, make_objspace
+        config = make_config(None)
+        space = make_objspace(config)
+        w_executable = space.wrap('executable')
+        assert space.str_w(space.getattr(space.sys, w_executable)) == 'py.py'
+        space.setattr(space.sys, w_executable, space.wrap('foobar'))
+        assert space.str_w(space.getattr(space.sys, w_executable)) == 'foobar'
+        space.startup()
+        assert space.str_w(space.getattr(space.sys, w_executable)) == 'foobar'

pypy/interpreter/test/test_zpy.py

 def test_executable():
     """Ensures sys.executable points to the py.py script"""
     # TODO : watch out for spaces/special chars in pypypath
-    output = run(sys.executable, pypypath,
+    output = run(sys.executable, pypypath, '-S',
                  "-c", "import sys;print sys.executable")
     assert output.splitlines()[-1] == pypypath
 
 def test_special_names():
     """Test the __name__ and __file__ special global names"""
     cmd = "print __name__; print '__file__' in globals()"
-    output = run(sys.executable, pypypath, '-c', cmd)
+    output = run(sys.executable, pypypath, '-S', '-c', cmd)
     assert output.splitlines()[-2] == '__main__'
     assert output.splitlines()[-1] == 'False'
 
     tmpfile.write("print __name__; print __file__\n")
     tmpfile.close()
 
-    output = run(sys.executable, pypypath, tmpfilepath)
+    output = run(sys.executable, pypypath, '-S', tmpfilepath)
     assert output.splitlines()[-2] == '__main__'
     assert output.splitlines()[-1] == str(tmpfilepath)
 
 def test_argv_command():
     """Some tests on argv"""
     # test 1 : no arguments
-    output = run(sys.executable, pypypath,
+    output = run(sys.executable, pypypath, '-S',
                  "-c", "import sys;print sys.argv")
     assert output.splitlines()[-1] == str(['-c'])
 
     # test 2 : some arguments after
-    output = run(sys.executable, pypypath,
+    output = run(sys.executable, pypypath, '-S',
                  "-c", "import sys;print sys.argv", "hello")
     assert output.splitlines()[-1] == str(['-c','hello'])
     
     # test 3 : additionnal pypy parameters
-    output = run(sys.executable, pypypath,
+    output = run(sys.executable, pypypath, '-S',
                  "-O", "-c", "import sys;print sys.argv", "hello")
     assert output.splitlines()[-1] == str(['-c','hello'])
 
     tmpfile.close()
 
     # test 1 : no arguments
-    output = run(sys.executable, pypypath, tmpfilepath)
+    output = run(sys.executable, pypypath, '-S', tmpfilepath)
     assert output.splitlines()[-1] == str([tmpfilepath])
     
     # test 2 : some arguments after
-    output = run(sys.executable, pypypath, tmpfilepath, "hello")
+    output = run(sys.executable, pypypath, '-S', tmpfilepath, "hello")
     assert output.splitlines()[-1] == str([tmpfilepath,'hello'])
     
     # test 3 : additionnal pypy parameters
-    output = run(sys.executable, pypypath, "-O", tmpfilepath, "hello")
+    output = run(sys.executable, pypypath, '-S', "-O", tmpfilepath, "hello")
     assert output.splitlines()[-1] == str([tmpfilepath,'hello'])
     
 
     tmpfile.write(TB_NORMALIZATION_CHK)
     tmpfile.close()
 
-    popen = subprocess.Popen([sys.executable, str(pypypath), tmpfilepath],
+    popen = subprocess.Popen([sys.executable, str(pypypath), '-S', tmpfilepath],
                              stderr=subprocess.PIPE)
     _, stderr = popen.communicate()
     assert stderr.endswith('KeyError: <normalized>\n')

pypy/jit/backend/llsupport/gc.py

 import os
 from pypy.rlib import rgc
 from pypy.rlib.objectmodel import we_are_translated, specialize
-from pypy.rlib.debug import fatalerror
 from pypy.rlib.rarithmetic import ovfcheck
 from pypy.rpython.lltypesystem import lltype, llmemory, rffi, rclass, rstr
 from pypy.rpython.lltypesystem import llgroup

pypy/jit/backend/x86/assembler.py

File contents unchanged.

pypy/jit/backend/x86/detect_sse2.py

File contents unchanged.

pypy/jit/backend/x86/runner.py

File contents unchanged.

pypy/jit/backend/x86/support.py

 import sys
 from pypy.rpython.lltypesystem import lltype, rffi, llmemory
 from pypy.translator.tool.cbuild import ExternalCompilationInfo
+from pypy.jit.backend.x86.arch import WORD
 
 
 def values_array(TP, size):
 
 if sys.platform == 'win32':
     ensure_sse2_floats = lambda : None
+    # XXX check for SSE2 on win32 too
 else:
+    if WORD == 4:
+        extra = ['-DPYPY_X86_CHECK_SSE2']
+    else:
+        extra = []
     ensure_sse2_floats = rffi.llexternal_use_eci(ExternalCompilationInfo(
         compile_extra = ['-msse2', '-mfpmath=sse',
-                         '-DPYPY_CPU_HAS_STANDARD_PRECISION'],
+                         '-DPYPY_CPU_HAS_STANDARD_PRECISION'] + extra,
         ))

pypy/jit/backend/x86/test/test_ztranslation.py

             set_param(jitdriver, "trace_eagerness", 2)
             total = 0
             frame = Frame(i)
+            j = float(j)
             while frame.i > 3:
                 jitdriver.can_enter_jit(frame=frame, total=total, j=j)
                 jitdriver.jit_merge_point(frame=frame, total=total, j=j)

pypy/jit/metainterp/optimizeopt/optimizer.py

         assert isinstance(descr, compile.ResumeGuardDescr)
         modifier = resume.ResumeDataVirtualAdder(descr, self.resumedata_memo)
         try:
-            newboxes = modifier.finish(self.values, self.pendingfields)
+            newboxes = modifier.finish(self, self.pendingfields)
             if len(newboxes) > self.metainterp_sd.options.failargs_limit:
                 raise resume.TagOverflow
         except resume.TagOverflow:

pypy/jit/metainterp/optimizeopt/test/test_optimizeopt.py

         """
         self.optimize_loop(ops, expected)
         
+    def test_constant_failargs(self):
+        ops = """
+        [p1, i2, i3]
+        setfield_gc(p1, ConstPtr(myptr), descr=nextdescr)
+        p16 = getfield_gc(p1, descr=nextdescr)
+        guard_true(i2) [p16, i3]
+        jump(p1, i3, i2)
+        """
+        preamble = """
+        [p1, i2, i3]
+        setfield_gc(p1, ConstPtr(myptr), descr=nextdescr)
+        guard_true(i2) [i3]
+        jump(p1, i3)
+        """
+        expected = """
+        [p1, i3]
+        guard_true(i3) []
+        jump(p1, 1)
+        """
+        self.optimize_loop(ops, expected, preamble)
+
+    def test_issue1048(self):
+        ops = """
+        [p1, i2, i3]
+        p16 = getfield_gc(p1, descr=nextdescr)
+        guard_true(i2) [p16]
+        setfield_gc(p1, ConstPtr(myptr), descr=nextdescr)
+        jump(p1, i3, i2)
+        """
+        expected = """
+        [p1, i3]
+        guard_true(i3) []
+        jump(p1, 1)
+        """
+        self.optimize_loop(ops, expected)
+
+    def test_issue1048_ok(self):
+        ops = """
+        [p1, i2, i3]
+        p16 = getfield_gc(p1, descr=nextdescr)
+        call(p16, descr=nonwritedescr)
+        guard_true(i2) [p16]
+        setfield_gc(p1, ConstPtr(myptr), descr=nextdescr)
+        jump(p1, i3, i2)
+        """
+        expected = """
+        [p1, i3]
+        call(ConstPtr(myptr), descr=nonwritedescr)
+        guard_true(i3) []
+        jump(p1, 1)
+        """
+        self.optimize_loop(ops, expected)
+
 class TestLLtype(OptimizeOptTest, LLtypeMixin):
     pass
 

pypy/jit/metainterp/resume.py

 
     # env numbering
 
-    def number(self, values, snapshot):
+    def number(self, optimizer, snapshot):
         if snapshot is None:
             return lltype.nullptr(NUMBERING), {}, 0
         if snapshot in self.numberings:
              numb, liveboxes, v = self.numberings[snapshot]
              return numb, liveboxes.copy(), v
 
-        numb1, liveboxes, v = self.number(values, snapshot.prev)
+        numb1, liveboxes, v = self.number(optimizer, snapshot.prev)
         n = len(liveboxes)-v
         boxes = snapshot.boxes
         length = len(boxes)
         numb = lltype.malloc(NUMBERING, length)
         for i in range(length):
             box = boxes[i]
-            value = values.get(box, None)
-            if value is not None:
-                box = value.get_key_box()
+            value = optimizer.getvalue(box)
+            box = value.get_key_box()
 
             if isinstance(box, Const):
                 tagged = self.getconst(box)
         _, tagbits = untag(tagged)
         return tagbits == TAGVIRTUAL
 
-    def finish(self, values, pending_setfields=[]):
+    def finish(self, optimizer, pending_setfields=[]):
         # compute the numbering
         storage = self.storage
         # make sure that nobody attached resume data to this guard yet
         assert not storage.rd_numb
         snapshot = storage.rd_snapshot
         assert snapshot is not None # is that true?
-        numb, liveboxes_from_env, v = self.memo.number(values, snapshot)
+        numb, liveboxes_from_env, v = self.memo.number(optimizer, snapshot)
         self.liveboxes_from_env = liveboxes_from_env
         self.liveboxes = {}
         storage.rd_numb = numb
                 liveboxes[i] = box
             else:
                 assert tagbits == TAGVIRTUAL
-                value = values[box]
+                value = optimizer.getvalue(box)
                 value.get_args_for_fail(self)
 
         for _, box, fieldbox, _ in pending_setfields:
             self.register_box(box)
             self.register_box(fieldbox)
-            value = values[fieldbox]
+            value = optimizer.getvalue(fieldbox)
             value.get_args_for_fail(self)
 
-        self._number_virtuals(liveboxes, values, v)
+        self._number_virtuals(liveboxes, optimizer, v)
         self._add_pending_fields(pending_setfields)
 
         storage.rd_consts = self.memo.consts
         dump_storage(storage, liveboxes)
         return liveboxes[:]
 
-    def _number_virtuals(self, liveboxes, values, num_env_virtuals):
+    def _number_virtuals(self, liveboxes, optimizer, num_env_virtuals):
         # !! 'liveboxes' is a list that is extend()ed in-place !!
         memo = self.memo
         new_liveboxes = [None] * memo.num_cached_boxes()
             memo.nvholes += length - len(vfieldboxes)
             for virtualbox, fieldboxes in vfieldboxes.iteritems():
                 num, _ = untag(self.liveboxes[virtualbox])
-                value = values[virtualbox]
+                value = optimizer.getvalue(virtualbox)
                 fieldnums = [self._gettagged(box)
                              for box in fieldboxes]
                 vinfo = value.make_virtual_info(self, fieldnums)

pypy/jit/metainterp/test/test_ajit.py

         self.check_resops(arraylen_gc=3)
 
     def test_ulonglong_mod(self):
-        myjitdriver = JitDriver(greens = [], reds = ['n', 'sa', 'i'])
+        myjitdriver = JitDriver(greens = [], reds = ['n', 'a'])
+        class A:
+            pass
         def f(n):
             sa = i = rffi.cast(rffi.ULONGLONG, 1)
+            a = A()
             while i < rffi.cast(rffi.ULONGLONG, n):
-                myjitdriver.jit_merge_point(sa=sa, n=n, i=i)
+                a.sa = sa
+                a.i = i
+                myjitdriver.jit_merge_point(n=n, a=a)
+                sa = a.sa
+                i = a.i
                 sa += sa % i
                 i += 1
         res = self.meta_interp(f, [32])

pypy/jit/metainterp/test/test_quasiimmut.py

 from pypy.jit.metainterp.quasiimmut import get_current_qmut_instance
 from pypy.jit.metainterp.test.support import LLJitMixin
 from pypy.jit.codewriter.policy import StopAtXPolicy
-from pypy.rlib.jit import JitDriver, dont_look_inside
+from pypy.rlib.jit import JitDriver, dont_look_inside, unroll_safe
 
 
 def test_get_current_qmut_instance():
         assert res == 1
         self.check_jitcell_token_count(2)
 
+    def test_for_loop_array(self):
+        myjitdriver = JitDriver(greens=[], reds=["n", "i"])
+        class Foo(object):
+            _immutable_fields_ = ["x?[*]"]
+            def __init__(self, x):
+                self.x = x
+        f = Foo([1, 3, 5, 6])
+        @unroll_safe
+        def g(v):
+            for x in f.x:
+                if x & 1 == 0:
+                    v += 1
+            return v
+        def main(n):
+            i = 0
+            while i < n:
+                myjitdriver.jit_merge_point(n=n, i=i)
+                i = g(i)
+            return i
+        res = self.meta_interp(main, [10])
+        assert res == 10
+        self.check_resops({
+            "int_add": 2, "int_lt": 2, "jump": 1, "guard_true": 2,
+            "guard_not_invalidated": 2
+        })
+
 
 class TestLLtypeGreenFieldsTests(QuasiImmutTests, LLJitMixin):
     pass

pypy/jit/metainterp/test/test_resume.py

     rd_virtuals = None
     rd_pendingfields = None
 
+
+class FakeOptimizer(object):
+    def __init__(self, values):
+        self.values = values
+        
+    def getvalue(self, box):
+        try:
+            value = self.values[box]
+        except KeyError:
+            value = self.values[box] = OptValue(box)
+        return value
+        
+
 def test_tag():
     assert tag(3, 1) == rffi.r_short(3<<2|1)
     assert tag(-3, 2) == rffi.r_short(-3<<2|2)
     capture_resumedata(fs, None, [], storage)
     memo = ResumeDataLoopMemo(FakeMetaInterpStaticData())
     modifier = ResumeDataVirtualAdder(storage, memo)
-    liveboxes = modifier.finish({})
+    liveboxes = modifier.finish(FakeOptimizer({}))
     metainterp = MyMetaInterp()
 
     b1t, b2t, b3t = [BoxInt(), BoxPtr(), BoxInt()]
     capture_resumedata(fs, [b4], [], storage)
     memo = ResumeDataLoopMemo(FakeMetaInterpStaticData())
     modifier = ResumeDataVirtualAdder(storage, memo)
-    liveboxes = modifier.finish({})
+    liveboxes = modifier.finish(FakeOptimizer({}))
     metainterp = MyMetaInterp()
 
     b1t, b2t, b3t, b4t = [BoxInt(), BoxPtr(), BoxInt(), BoxPtr()]
     
     memo = ResumeDataLoopMemo(FakeMetaInterpStaticData())
     modifier = ResumeDataVirtualAdder(storage, memo)
-    liveboxes = modifier.finish({})
+    liveboxes = modifier.finish(FakeOptimizer({}))
 
     modifier = ResumeDataVirtualAdder(storage2, memo)
-    liveboxes2 = modifier.finish({})
+    liveboxes2 = modifier.finish(FakeOptimizer({}))
 
     metainterp = MyMetaInterp()
 
     memo = ResumeDataLoopMemo(FakeMetaInterpStaticData())
     values = {b2: virtual_value(b2, b5, c4)}
     modifier = ResumeDataVirtualAdder(storage, memo)
-    liveboxes = modifier.finish(values)
+    liveboxes = modifier.finish(FakeOptimizer(values))
     assert len(storage.rd_virtuals) == 1
     assert storage.rd_virtuals[0].fieldnums == [tag(-1, TAGBOX),
                                                 tag(0, TAGCONST)]
     values = {b2: virtual_value(b2, b4, v6), b6: v6}
     memo.clear_box_virtual_numbers()
     modifier = ResumeDataVirtualAdder(storage2, memo)
-    liveboxes2 = modifier.finish(values)
+    liveboxes2 = modifier.finish(FakeOptimizer(values))
     assert len(storage2.rd_virtuals) == 2    
     assert storage2.rd_virtuals[0].fieldnums == [tag(len(liveboxes2)-1, TAGBOX),
                                                  tag(-1, TAGVIRTUAL)]
     memo = ResumeDataLoopMemo(FakeMetaInterpStaticData())
     values = {b2: virtual_value(b2, b5, c4)}
     modifier = ResumeDataVirtualAdder(storage, memo)
-    liveboxes = modifier.finish(values)
+    liveboxes = modifier.finish(FakeOptimizer(values))
     assert len(storage.rd_virtuals) == 1
     assert storage.rd_virtuals[0].fieldnums == [tag(-1, TAGBOX),
                                                 tag(0, TAGCONST)]
     capture_resumedata(fs, None, [], storage2)
     values[b4] = virtual_value(b4, b6, c4)
     modifier = ResumeDataVirtualAdder(storage2, memo)
-    liveboxes = modifier.finish(values)
+    liveboxes = modifier.finish(FakeOptimizer(values))
     assert len(storage2.rd_virtuals) == 2
     assert storage2.rd_virtuals[1].fieldnums == storage.rd_virtuals[0].fieldnums
     assert storage2.rd_virtuals[1] is storage.rd_virtuals[0]
     v1.setfield(LLtypeMixin.nextdescr, v2)
     values = {b1: v1, b2: v2}
     modifier = ResumeDataVirtualAdder(storage, memo)
-    liveboxes = modifier.finish(values)
+    liveboxes = modifier.finish(FakeOptimizer(values))
     assert liveboxes == [b3]
     assert len(storage.rd_virtuals) == 2
     assert storage.rd_virtuals[0].fieldnums == [tag(-1, TAGBOX),
 
     memo = ResumeDataLoopMemo(FakeMetaInterpStaticData())
 
-    numb, liveboxes, v = memo.number({}, snap1)
+    numb, liveboxes, v = memo.number(FakeOptimizer({}), snap1)
     assert v == 0
 
     assert liveboxes == {b1: tag(0, TAGBOX), b2: tag(1, TAGBOX),
                                     tag(0, TAGBOX), tag(2, TAGINT)]
     assert not numb.prev.prev
 
-    numb2, liveboxes2, v = memo.number({}, snap2)
+    numb2, liveboxes2, v = memo.number(FakeOptimizer({}), snap2)
     assert v == 0
     
     assert liveboxes2 == {b1: tag(0, TAGBOX), b2: tag(1, TAGBOX),
             return self.virt
 
     # renamed
-    numb3, liveboxes3, v = memo.number({b3: FakeValue(False, c4)}, snap3)
+    numb3, liveboxes3, v = memo.number(FakeOptimizer({b3: FakeValue(False, c4)}),
+                                       snap3)
     assert v == 0
     
     assert liveboxes3 == {b1: tag(0, TAGBOX), b2: tag(1, TAGBOX)}
     env4 = [c3, b4, b1, c3]
     snap4 = Snapshot(snap, env4)    
 
-    numb4, liveboxes4, v = memo.number({b4: FakeValue(True, b4)}, snap4)
+    numb4, liveboxes4, v = memo.number(FakeOptimizer({b4: FakeValue(True, b4)}),
+                                       snap4)
     assert v == 1
     
     assert liveboxes4 == {b1: tag(0, TAGBOX), b2: tag(1, TAGBOX),
     env5 = [b1, b4, b5]
     snap5 = Snapshot(snap4, env5)    
 
-    numb5, liveboxes5, v = memo.number({b4: FakeValue(True, b4),
-                                        b5: FakeValue(True, b5)}, snap5)
+    numb5, liveboxes5, v = memo.number(FakeOptimizer({b4: FakeValue(True, b4),
+                                                      b5: FakeValue(True, b5)}),
+                                       snap5)
     assert v == 2
     
     assert liveboxes5 == {b1: tag(0, TAGBOX), b2: tag(1, TAGBOX),
     storage = make_storage(b1s, b2s, b3s)
     memo = ResumeDataLoopMemo(FakeMetaInterpStaticData())    
     modifier = ResumeDataVirtualAdder(storage, memo)
-    liveboxes = modifier.finish({})
+    liveboxes = modifier.finish(FakeOptimizer({}))
     assert storage.rd_snapshot is None
     cpu = MyCPU([])
     reader = ResumeDataDirectReader(MyMetaInterp(cpu), storage)
     storage = make_storage(b1s, b2s, b3s)
     memo = ResumeDataLoopMemo(FakeMetaInterpStaticData())
     modifier = ResumeDataVirtualAdder(storage, memo)
-    modifier.finish({})
+    modifier.finish(FakeOptimizer({}))
     assert len(memo.consts) == 2
     assert storage.rd_consts is memo.consts
 
     b1s, b2s, b3s = [ConstInt(sys.maxint), ConstInt(2**17), ConstInt(-65)]
     storage2 = make_storage(b1s, b2s, b3s)
     modifier2 = ResumeDataVirtualAdder(storage2, memo)
-    modifier2.finish({})
+    modifier2.finish(FakeOptimizer({}))
     assert len(memo.consts) == 3    
     assert storage2.rd_consts is memo.consts
 
 
     val = FakeValue()
     values = {b1s: val, b2s: val}  
-    liveboxes = modifier.finish(values)
+    liveboxes = modifier.finish(FakeOptimizer(values))
     assert storage.rd_snapshot is None
     b1t, b3t = [BoxInt(11), BoxInt(33)]
     newboxes = _resume_remap(liveboxes, [b1_2, b3s], b1t, b3t)
     storage = make_storage(b1s, b2s, b3s)
     memo = ResumeDataLoopMemo(FakeMetaInterpStaticData())        
     modifier = ResumeDataVirtualAdder(storage, memo)
-    liveboxes = modifier.finish({})
+    liveboxes = modifier.finish(FakeOptimizer({}))
     b2t, b3t = [BoxPtr(demo55o), BoxInt(33)]
     newboxes = _resume_remap(liveboxes, [b2s, b3s], b2t, b3t)
     metainterp = MyMetaInterp()
     values = {b2s: v2, b4s: v4}
 
     liveboxes = []
-    modifier._number_virtuals(liveboxes, values, 0)
+    modifier._number_virtuals(liveboxes, FakeOptimizer(values), 0)
     storage.rd_consts = memo.consts[:]
     storage.rd_numb = None
     # resume
     modifier.register_virtual_fields(b2s, [b4s, c1s])
     liveboxes = []
     values = {b2s: v2}
-    modifier._number_virtuals(liveboxes, values, 0)
+    modifier._number_virtuals(liveboxes, FakeOptimizer(values), 0)
     dump_storage(storage, liveboxes)
     storage.rd_consts = memo.consts[:]
     storage.rd_numb = None
     v2.setfield(LLtypeMixin.bdescr, OptValue(b4s))
     modifier.register_virtual_fields(b2s, [c1s, b4s])
     liveboxes = []
-    modifier._number_virtuals(liveboxes, {b2s: v2}, 0)
+    modifier._number_virtuals(liveboxes, FakeOptimizer({b2s: v2}), 0)
     dump_storage(storage, liveboxes)
     storage.rd_consts = memo.consts[:]
     storage.rd_numb = None
 
     values = {b4s: v4, b2s: v2}
     liveboxes = []
-    modifier._number_virtuals(liveboxes, values, 0)
+    modifier._number_virtuals(liveboxes, FakeOptimizer(values), 0)
     assert liveboxes == [b2s, b4s] or liveboxes == [b4s, b2s]
     modifier._add_pending_fields([(LLtypeMixin.nextdescr, b2s, b4s, -1)])
     storage.rd_consts = memo.consts[:]

pypy/jit/metainterp/warmspot.py

                     if sys.stdout == sys.__stdout__:
                         import pdb; pdb.post_mortem(tb)
                     raise e.__class__, e, tb
-                fatalerror('~~~ Crash in JIT! %s' % (e,), traceback=True)
+                fatalerror('~~~ Crash in JIT! %s' % (e,))
         crash_in_jit._dont_inline_ = True
 
         if self.translator.rtyper.type_system.name == 'lltypesystem':

pypy/jit/tl/tinyframe/tinyframe.py

     def repr(self):
         return "<function %s(%s)>" % (self.outer.repr(), self.inner.repr())
 
-driver = JitDriver(greens = ['code', 'i'], reds = ['self'],
+driver = JitDriver(greens = ['i', 'code'], reds = ['self'],
                    virtualizables = ['self'])
 
 class Frame(object):

pypy/module/_demo/test/test_sieve.py

+from pypy.conftest import gettestobjspace
+
+
+class AppTestSieve:
+    def setup_class(cls):
+        cls.space = gettestobjspace(usemodules=('_demo',))
+
+    def test_sieve(self):
+        import _demo
+        lst = _demo.sieve(100)
+        assert lst == [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41,
+                       43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97]

pypy/module/_io/__init__.py

         }
 
     def init(self, space):
+        MixedModule.init(self, space)
         w_UnsupportedOperation = space.call_function(
             space.w_type,
             space.wrap('UnsupportedOperation'),
             space.newdict())
         space.setattr(self, space.wrap('UnsupportedOperation'),
                       w_UnsupportedOperation)
+
+    def shutdown(self, space):
+        # at shutdown, flush all open streams.  Ignore I/O errors.
+        from pypy.module._io.interp_iobase import get_autoflushher
+        get_autoflushher(space).flush_all(space)
+

pypy/module/_io/interp_iobase.py

 from pypy.interpreter.gateway import interp2app
 from pypy.interpreter.error import OperationError, operationerrfmt
 from pypy.rlib.rstring import StringBuilder
+from pypy.rlib import rweakref
+
 
 DEFAULT_BUFFER_SIZE = 8192
 
         self.space = space
         self.w_dict = space.newdict()
         self.__IOBase_closed = False
+        self.streamholder = None # needed by AutoFlusher
+        get_autoflushher(space).add(self)
 
     def getdict(self, space):
         return self.w_dict
             space.call_method(self, "flush")
         finally:
             self.__IOBase_closed = True
+            get_autoflushher(space).remove(self)
 
     def flush_w(self, space):
         if self._CLOSED():
     read = interp2app(W_RawIOBase.read_w),
     readall = interp2app(W_RawIOBase.readall_w),
 )
+
+
+# ------------------------------------------------------------
+# functions to make sure that all streams are flushed on exit
+# ------------------------------------------------------------
+
+class StreamHolder(object):
+
+    def __init__(self, w_iobase):
+        self.w_iobase_ref = rweakref.ref(w_iobase)
+        w_iobase.autoflusher = self
+
+    def autoflush(self, space):
+        w_iobase = self.w_iobase_ref()
+        if w_iobase is not None:
+            try:
+                space.call_method(w_iobase, 'flush')
+            except OperationError, e:
+                # if it's an IOError, ignore it
+                if not e.match(space, space.w_IOError):
+                    raise
+        
+
+class AutoFlusher(object):
+    
+    def __init__(self, space):
+        self.streams = {}
+
+    def add(self, w_iobase):
+        assert w_iobase.streamholder is None
+        holder = StreamHolder(w_iobase)
+        w_iobase.streamholder = holder
+        self.streams[holder] = None
+
+    def remove(self, w_iobase):
+        holder = w_iobase.streamholder
+        if holder is not None:
+            del self.streams[holder]
+
+    def flush_all(self, space):
+        while self.streams:
+            for streamholder in self.streams.keys():
+                try:
+                    del self.streams[streamholder]
+                except KeyError:
+                    pass    # key was removed in the meantime
+                else:
+                    streamholder.autoflush(space)
+
+
+def get_autoflushher(space):
+    return space.fromcache(AutoFlusher)
+
+

pypy/module/_io/test/test_fileio.py

         f.close()
         assert repr(f) == "<_io.FileIO [closed]>"
 
+def test_flush_at_exit():
+    from pypy import conftest
+    from pypy.tool.option import make_config, make_objspace
+    from pypy.tool.udir import udir
+
+    tmpfile = udir.join('test_flush_at_exit')
+    config = make_config(conftest.option)
+    space = make_objspace(config)
+    space.appexec([space.wrap(str(tmpfile))], """(tmpfile):
+        import io
+        f = io.open(tmpfile, 'w', encoding='ascii')
+        f.write('42')
+        # no flush() and no close()
+        import sys; sys._keepalivesomewhereobscure = f
+    """)
+    space.finish()
+    assert tmpfile.read() == '42'
+
+def test_flush_at_exit_IOError():
+    from pypy import conftest
+    from pypy.tool.option import make_config, make_objspace
+
+    config = make_config(conftest.option)
+    space = make_objspace(config)
+    space.appexec([], """():
+        import io
+        class MyStream(io.IOBase):
+            def flush(self):
+                raise IOError
+
+        s = MyStream()
+        import sys; sys._keepalivesomewhereobscure = s
+    """)
+    space.finish() # the IOError has been ignored

pypy/module/cpyext/api.py

         "Tuple": "space.w_tuple",
         "List": "space.w_list",
         "Set": "space.w_set",
+        "FrozenSet": "space.w_frozenset",
         "Int": "space.w_int",
         "Bool": "space.w_bool",
         "Float": "space.w_float",
         }.items():
         GLOBALS['Py%s_Type#' % (cpyname, )] = ('PyTypeObject*', pypyexpr)
 
-    for cpyname in 'Method List Int Long Dict Tuple Class'.split():
+    for cpyname in 'Method List Long Dict Tuple Class'.split():
         FORWARD_DECLS.append('typedef struct { PyObject_HEAD } '
                              'Py%sObject' % (cpyname, ))
 build_exported_objects()

pypy/module/cpyext/dictobject.py

         w_item = space.call_method(w_iter, "next")
         w_key, w_value = space.fixedview(w_item, 2)
         state = space.fromcache(RefcountState)
-        pkey[0]   = state.make_borrowed(w_dict, w_key)
-        pvalue[0] = state.make_borrowed(w_dict, w_value)
+        if pkey:
+            pkey[0]   = state.make_borrowed(w_dict, w_key)
+        if pvalue:
+            pvalue[0] = state.make_borrowed(w_dict, w_value)
         ppos[0] += 1
     except OperationError, e:
         if not e.match(space, space.w_StopIteration):

pypy/module/cpyext/eval.py

 from pypy.interpreter.error import OperationError
+from pypy.interpreter.astcompiler import consts
 from pypy.rpython.lltypesystem import rffi, lltype
 from pypy.module.cpyext.api import (
     cpython_api, CANNOT_FAIL, CONST_STRING, FILEP, fread, feof, Py_ssize_tP,
     cpython_struct)
 from pypy.module.cpyext.pyobject import PyObject, borrow_from
 from pypy.module.cpyext.pyerrors import PyErr_SetFromErrno
+from pypy.module.cpyext.funcobject import PyCodeObject
 from pypy.module.__builtin__ import compiling
 
 PyCompilerFlags = cpython_struct(
-    "PyCompilerFlags", ())
+    "PyCompilerFlags", (("cf_flags", rffi.INT),))
 PyCompilerFlagsPtr = lltype.Ptr(PyCompilerFlags)
 
+PyCF_MASK = (consts.CO_FUTURE_DIVISION | 
+             consts.CO_FUTURE_ABSOLUTE_IMPORT |
+             consts.CO_FUTURE_WITH_STATEMENT |
+             consts.CO_FUTURE_PRINT_FUNCTION |
+             consts.CO_FUTURE_UNICODE_LITERALS)
+
 @cpython_api([PyObject, PyObject, PyObject], PyObject)
 def PyEval_CallObjectWithKeywords(space, w_obj, w_arg, w_kwds):
     return space.call(w_obj, w_arg, w_kwds)
         return None
     return borrow_from(None, caller.w_globals)
 
+@cpython_api([PyCodeObject, PyObject, PyObject], PyObject)
+def PyEval_EvalCode(space, w_code, w_globals, w_locals):
+    """This is a simplified interface to PyEval_EvalCodeEx(), with just
+    the code object, and the dictionaries of global and local variables.
+    The other arguments are set to NULL."""
+    if w_globals is None:
+        w_globals = space.w_None
+    if w_locals is None:
+        w_locals = space.w_None
+    return compiling.eval(space, w_code, w_globals, w_locals)
+
 @cpython_api([PyObject, PyObject], PyObject)
 def PyObject_CallObject(space, w_obj, w_arg):
     """
 Py_file_input = 257
 Py_eval_input = 258
 
-def compile_string(space, source, filename, start):
+def compile_string(space, source, filename, start, flags=0):
     w_source = space.wrap(source)
     start = rffi.cast(lltype.Signed, start)
     if start == Py_file_input:
     else:
         raise OperationError(space.w_ValueError, space.wrap(
             "invalid mode parameter for compilation"))
-    return compiling.compile(space, w_source, filename, mode)
+    return compiling.compile(space, w_source, filename, mode, flags)
 
 def run_string(space, source, filename, start, w_globals, w_locals):
     w_code = compile_string(space, source, filename, start)
     filename = "<string>"
     return run_string(space, source, filename, start, w_globals, w_locals)
 
+@cpython_api([rffi.CCHARP, rffi.INT_real, PyObject, PyObject,
+              PyCompilerFlagsPtr], PyObject)
+def PyRun_StringFlags(space, source, start, w_globals, w_locals, flagsptr):
+    """Execute Python source code from str in the context specified by the
+    dictionaries globals and locals with the compiler flags specified by
+    flags.  The parameter start specifies the start token that should be used to
+    parse the source code.
+
+    Returns the result of executing the code as a Python object, or NULL if an
+    exception was raised."""
+    source = rffi.charp2str(source)
+    if flagsptr:
+        flags = rffi.cast(lltype.Signed, flagsptr.c_cf_flags)
+    else:
+        flags = 0
+    w_code = compile_string(space, source, "<string>", start, flags)
+    return compiling.eval(space, w_code, w_globals, w_locals)
+
 @cpython_api([FILEP, CONST_STRING, rffi.INT_real, PyObject, PyObject], PyObject)
 def PyRun_File(space, fp, filename, start, w_globals, w_locals):
     """This is a simplified interface to PyRun_FileExFlags() below, leaving
 
 @cpython_api([rffi.CCHARP, rffi.CCHARP, rffi.INT_real, PyCompilerFlagsPtr],
              PyObject)
-def Py_CompileStringFlags(space, source, filename, start, flags):
+def Py_CompileStringFlags(space, source, filename, start, flagsptr):
     """Parse and compile the Python source code in str, returning the
     resulting code object.  The start token is given by start; this
     can be used to constrain the code which can be compiled and should
     returns NULL if the code cannot be parsed or compiled."""
     source = rffi.charp2str(source)
     filename = rffi.charp2str(filename)
-    if flags:
-        raise OperationError(space.w_NotImplementedError, space.wrap(
-                "cpyext Py_CompileStringFlags does not accept flags"))
-    return compile_string(space, source, filename, start)
+    if flagsptr:
+        flags = rffi.cast(lltype.Signed, flagsptr.c_cf_flags)
+    else:
+        flags = 0
+    return compile_string(space, source, filename, start, flags)
+
+@cpython_api([PyCompilerFlagsPtr], rffi.INT_real, error=CANNOT_FAIL)
+def PyEval_MergeCompilerFlags(space, cf):
+    """This function changes the flags of the current evaluation
+    frame, and returns true on success, false on failure."""
+    flags = rffi.cast(lltype.Signed, cf.c_cf_flags)
+    result = flags != 0
+    current_frame = space.getexecutioncontext().gettopframe_nohidden()
+    if current_frame:
+        codeflags = current_frame.pycode.co_flags
+        compilerflags = codeflags & PyCF_MASK
+        if compilerflags:
+            result = 1
+            flags |= compilerflags
+        # No future keyword at the moment
+        # if codeflags & CO_GENERATOR_ALLOWED:
+        #     result = 1
+        #     flags |= CO_GENERATOR_ALLOWED
+    cf.c_cf_flags = rffi.cast(rffi.INT, flags)
+    return result
+
+        

pypy/module/cpyext/funcobject.py

 from pypy.rpython.lltypesystem import rffi, lltype
 from pypy.module.cpyext.api import (
-    PyObjectFields, generic_cpy_call, CONST_STRING,
+    PyObjectFields, generic_cpy_call, CONST_STRING, CANNOT_FAIL,
     cpython_api, bootstrap_function, cpython_struct, build_type_checkers)
 from pypy.module.cpyext.pyobject import (
     PyObject, make_ref, from_ref, Py_DecRef, make_typedescr, borrow_from)
 
 PyFunction_Check, PyFunction_CheckExact = build_type_checkers("Function", Function)
 PyMethod_Check, PyMethod_CheckExact = build_type_checkers("Method", Method)
+PyCode_Check, PyCode_CheckExact = build_type_checkers("Code", PyCode)
 
 def function_attach(space, py_obj, w_obj):
     py_func = rffi.cast(PyFunctionObject, py_obj)
                              freevars=[],
                              cellvars=[]))
 
+@cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL)
+def PyCode_GetNumFree(space, w_co):
+    """Return the number of free variables in co."""
+    co = space.interp_w(PyCode, w_co)
+    return len(co.co_freevars)
+

pypy/module/cpyext/include/Python.h

 #include "compile.h"
 #include "frameobject.h"
 #include "eval.h"
+#include "pymath.h"
 #include "pymem.h"
 #include "pycobject.h"
 #include "pycapsule.h"

pypy/module/cpyext/include/code.h

 
 /* Masks for co_flags above */
 /* These values are also in funcobject.py */
-#define CO_OPTIMIZED	0x0001
-#define CO_NEWLOCALS	0x0002
-#define CO_VARARGS	0x0004
-#define CO_VARKEYWORDS	0x0008
+#define CO_OPTIMIZED    0x0001
+#define CO_NEWLOCALS    0x0002
+#define CO_VARARGS      0x0004
+#define CO_VARKEYWORDS  0x0008
 #define CO_NESTED       0x0010
 #define CO_GENERATOR    0x0020
 
+#define CO_FUTURE_DIVISION         0x02000
+#define CO_FUTURE_ABSOLUTE_IMPORT  0x04000
+#define CO_FUTURE_WITH_STATEMENT   0x08000
+#define CO_FUTURE_PRINT_FUNCTION   0x10000
+#define CO_FUTURE_UNICODE_LITERALS 0x20000
+
 #ifdef __cplusplus
 }
 #endif

pypy/module/cpyext/include/intobject.h

 extern "C" {
 #endif
 
+typedef struct {
+    PyObject_HEAD
+    long ob_ival;
+} PyIntObject;
+
 #ifdef __cplusplus
 }
 #endif

pypy/module/cpyext/include/pymath.h

+#ifndef Py_PYMATH_H
+#define Py_PYMATH_H
+
+/**************************************************************************
+Symbols and macros to supply platform-independent interfaces to mathematical
+functions and constants
+**************************************************************************/
+
+/* HUGE_VAL is supposed to expand to a positive double infinity.  Python
+ * uses Py_HUGE_VAL instead because some platforms are broken in this
+ * respect.  We used to embed code in pyport.h to try to worm around that,
+ * but different platforms are broken in conflicting ways.  If you're on
+ * a platform where HUGE_VAL is defined incorrectly, fiddle your Python
+ * config to #define Py_HUGE_VAL to something that works on your platform.
+ */
+#ifndef Py_HUGE_VAL
+#define Py_HUGE_VAL HUGE_VAL
+#endif
+
+#endif /* Py_PYMATH_H */

pypy/module/cpyext/include/pythonrun.h

     int cf_flags;  /* bitmask of CO_xxx flags relevant to future */
 } PyCompilerFlags;
 
+#define PyCF_MASK (CO_FUTURE_DIVISION | CO_FUTURE_ABSOLUTE_IMPORT | \
+                   CO_FUTURE_WITH_STATEMENT | CO_FUTURE_PRINT_FUNCTION | \
+                   CO_FUTURE_UNICODE_LITERALS)
+#define PyCF_MASK_OBSOLETE (CO_NESTED)
+#define PyCF_SOURCE_IS_UTF8  0x0100
+#define PyCF_DONT_IMPLY_DEDENT 0x0200
+#define PyCF_ONLY_AST 0x0400
+
 #define Py_CompileString(str, filename, start) Py_CompileStringFlags(str, filename, start, NULL)
 
 #ifdef __cplusplus

pypy/module/cpyext/intobject.py

 from pypy.rpython.lltypesystem import rffi, lltype
 from pypy.interpreter.error import OperationError
 from pypy.module.cpyext.api import (
-    cpython_api, build_type_checkers, PyObject,
-    CONST_STRING, CANNOT_FAIL, Py_ssize_t)
+    cpython_api, cpython_struct, build_type_checkers, bootstrap_function,
+    PyObject, PyObjectFields, CONST_STRING, CANNOT_FAIL, Py_ssize_t)
+from pypy.module.cpyext.pyobject import (
+    make_typedescr, track_reference, RefcountState, from_ref)
 from pypy.rlib.rarithmetic import r_uint, intmask, LONG_TEST
+from pypy.objspace.std.intobject import W_IntObject
 import sys
 
+PyIntObjectStruct = lltype.ForwardReference()
+PyIntObject = lltype.Ptr(PyIntObjectStruct)
+PyIntObjectFields = PyObjectFields + \
+    (("ob_ival", rffi.LONG),)
+cpython_struct("PyIntObject", PyIntObjectFields, PyIntObjectStruct)
+
+@bootstrap_function
+def init_intobject(space):
+    "Type description of PyIntObject"
+    make_typedescr(space.w_int.instancetypedef,
+                   basestruct=PyIntObject.TO,
+                   realize=int_realize)
+
+def int_realize(space, obj):
+    intval = rffi.cast(lltype.Signed, rffi.cast(PyIntObject, obj).c_ob_ival)
+    w_type = from_ref(space, rffi.cast(PyObject, obj.c_ob_type))
+    w_obj = space.allocate_instance(W_IntObject, w_type)
+    w_obj.__init__(intval)
+    track_reference(space, obj, w_obj)
+    state = space.fromcache(RefcountState)
+    state.set_lifeline(w_obj, obj)
+    return w_obj
+
 PyInt_Check, PyInt_CheckExact = build_type_checkers("Int")
 
 @cpython_api([], lltype.Signed, error=CANNOT_FAIL)

pypy/module/cpyext/object.py

     if not obj:
         PyErr_NoMemory(space)
     obj.c_ob_type = type
-    _Py_NewReference(space, obj)
+    obj.c_ob_refcnt = 1
     return obj
 
 @cpython_api([PyVarObject, PyTypeObjectPtr, Py_ssize_t], PyObject)

pypy/module/cpyext/pyobject.py

 
 class BaseCpyTypedescr(object):
     basestruct = PyObject.TO
+    W_BaseObject = W_ObjectObject
 
     def get_dealloc(self, space):
         from pypy.module.cpyext.typeobject import subtype_dealloc
     def attach(self, space, pyobj, w_obj):
         pass
 
-    def realize(self, space, ref):
-        # For most types, a reference cannot exist without
-        # a real interpreter object
-        raise InvalidPointerException(str(ref))
+    def realize(self, space, obj):
+        w_type = from_ref(space, rffi.cast(PyObject, obj.c_ob_type))
+        w_obj = space.allocate_instance(self.W_BaseObject, w_type)
+        track_reference(space, obj, w_obj)
+        if w_type is not space.gettypefor(self.W_BaseObject):
+            state = space.fromcache(RefcountState)
+            state.set_lifeline(w_obj, obj)
+        return w_obj
 
 typedescr_cache = {}
 
     obj.c_ob_refcnt = 1
     w_type = from_ref(space, rffi.cast(PyObject, obj.c_ob_type))
     assert isinstance(w_type, W_TypeObject)
-    if w_type.is_cpytype():
-        w_obj = space.allocate_instance(W_ObjectObject, w_type)
-        track_reference(space, obj, w_obj)
-        state = space.fromcache(RefcountState)
-        state.set_lifeline(w_obj, obj)
-    else:
-        assert False, "Please add more cases in _Py_NewReference()"
+    get_typedescr(w_type.instancetypedef).realize(space, obj)
 
 def _Py_Dealloc(space, obj):
     from pypy.module.cpyext.api import generic_cpy_call_dont_decref

pypy/module/cpyext/stubs.py

     used as the positional and keyword parameters to the object's constructor."""
     raise NotImplementedError
 
-@cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL)
-def PyCode_Check(space, co):
-    """Return true if co is a code object"""
-    raise NotImplementedError
-
-@cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL)
-def PyCode_GetNumFree(space, co):
-    """Return the number of free variables in co."""
-    raise NotImplementedError
-
 @cpython_api([PyObject], rffi.INT_real, error=-1)
 def PyCodec_Register(space, search_function):
     """Register a new codec search function.
     that haven't been explicitly destroyed at that point."""
     raise NotImplementedError
 
-@cpython_api([rffi.VOIDP], lltype.Void)
-def Py_AddPendingCall(space, func):
-    """Post a notification to the Python main thread.  If successful, func will
-    be called with the argument arg at the earliest convenience.  func will be
-    called having the global interpreter lock held and can thus use the full
-    Python API and can take any action such as setting object attributes to
-    signal IO completion.  It must return 0 on success, or -1 signalling an
-    exception.  The notification function won't be interrupted to perform another
-    asynchronous notification recursively, but it can still be interrupted to
-    switch threads if the global interpreter lock is released, for example, if it
-    calls back into Python code.
-
-    This function returns 0 on success in which case the notification has been
-    scheduled.  Otherwise, for example if the notification buffer is full, it
-    returns -1 without setting any exception.
-
-    This function can be called on any thread, be it a Python thread or some
-    other system thread.  If it is a Python thread, it doesn't matter if it holds
-    the global interpreter lock or not.
-    """
-    raise NotImplementedError
-
 @cpython_api([Py_tracefunc, PyObject], lltype.Void)
 def PyEval_SetProfile(space, func, obj):
     """Set the profiler function to func.  The obj parameter is passed to the
     """
     raise NotImplementedError
 
-@cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL)
-def Py_UNICODE_ISTITLE(space, ch):
-    """Return 1 or 0 depending on whether ch is a titlecase character."""
-    raise NotImplementedError
-
-@cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL)
-def Py_UNICODE_ISDIGIT(space, ch):
-    """Return 1 or 0 depending on whether ch is a digit character."""
-    raise NotImplementedError
-
-@cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL)
-def Py_UNICODE_ISNUMERIC(space, ch):
-    """Return 1 or 0 depending on whether ch is a numeric character."""
-    raise NotImplementedError
-
-@cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL)
-def Py_UNICODE_ISALPHA(space, ch):
-    """Return 1 or 0 depending on whether ch is an alphabetic character."""
-    raise NotImplementedError
-
 @cpython_api([rffi.CCHARP], PyObject)
 def PyUnicode_FromFormat(space, format):
     """Take a C printf()-style format string and a variable number of
     use the default error handling."""
     raise NotImplementedError
 
-@cpython_api([PyObject, PyObject, Py_ssize_t, Py_ssize_t, rffi.INT_real], rffi.INT_real, error=-1)
-def PyUnicode_Tailmatch(space, str, substr, start, end, direction):
-    """Return 1 if substr matches str*[*start:end] at the given tail end
-    (direction == -1 means to do a prefix match, direction == 1 a suffix match),
-    0 otherwise. Return -1 if an error occurred.
-
-    This function used an int type for start and end. This
-    might require changes in your code for properly supporting 64-bit
-    systems."""
-    raise NotImplementedError
-
 @cpython_api([PyObject, PyObject, Py_ssize_t, Py_ssize_t, rffi.INT_real], Py_ssize_t, error=-2)
 def PyUnicode_Find(space, str, substr, start, end, direction):
     """Return the first position of substr in str*[*start:end] using the given
     properly supporting 64-bit systems."""
     raise NotImplementedError
 
-@cpython_api([PyObject, PyObject, PyObject, Py_ssize_t], PyObject)
-def PyUnicode_Replace(space, str, substr, replstr, maxcount):
-    """Replace at most maxcount occurrences of substr in str with replstr and
-    return the resulting Unicode object. maxcount == -1 means replace all
-    occurrences.
-
-    This function used an int type for maxcount. This might
-    require changes in your code for properly supporting 64-bit systems."""
-    raise NotImplementedError
-
 @cpython_api([PyObject, PyObject, rffi.INT_real], PyObject)
 def PyUnicode_RichCompare(space, left, right, op):
     """Rich compare two unicode strings and return one of the following:
     source code is read from fp instead of an in-memory string."""
     raise NotImplementedError
 
-@cpython_api([rffi.CCHARP, rffi.INT_real, PyObject, PyObject, PyCompilerFlags], PyObject)
-def PyRun_StringFlags(space, str, start, globals, locals, flags):
-    """Execute Python source code from str in the context specified by the
-    dictionaries globals and locals with the compiler flags specified by
-    flags.  The parameter start specifies the start token that should be used to
-    parse the source code.
-
-    Returns the result of executing the code as a Python object, or NULL if an
-    exception was raised."""
-    raise NotImplementedError
-
 @cpython_api([FILE, rffi.CCHARP, rffi.INT_real, PyObject, PyObject, rffi.INT_real], PyObject)
 def PyRun_FileEx(space, fp, filename, start, globals, locals, closeit):
     """This is a simplified interface to PyRun_FileExFlags() below, leaving
     returns."""
     raise NotImplementedError
 
-@cpython_api([PyCodeObject, PyObject, PyObject], PyObject)
-def PyEval_EvalCode(space, co, globals, locals):
-    """This is a simplified interface to PyEval_EvalCodeEx(), with just
-    the code object, and the dictionaries of global and local variables.
-    The other arguments are set to NULL."""
-    raise NotImplementedError
-
 @cpython_api([PyCodeObject, PyObject, PyObject, PyObjectP, rffi.INT_real, PyObjectP, rffi.INT_real, PyObjectP, rffi.INT_real, PyObject], PyObject)
 def PyEval_EvalCodeEx(space, co, globals, locals, args, argcount, kws, kwcount, defs, defcount, closure):
     """Evaluate a precompiled code object, given a particular environment for its
     throw() methods of generator objects."""
     raise NotImplementedError
 
-@cpython_api([PyCompilerFlags], rffi.INT_real, error=CANNOT_FAIL)
-def PyEval_MergeCompilerFlags(space, cf):
-    """This function changes the flags of the current evaluation frame, and returns
-    true on success, false on failure."""
-    raise NotImplementedError
-
 @cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL)
 def PyWeakref_Check(space, ob):
     """Return true if ob is either a reference or proxy object.

pypy/module/cpyext/stubsactive.py

 def Py_MakePendingCalls(space):
     return 0
 
+pending_call = lltype.Ptr(lltype.FuncType([rffi.VOIDP], rffi.INT_real))
+@cpython_api([pending_call, rffi.VOIDP], rffi.INT_real, error=-1)
+def Py_AddPendingCall(space, func, arg):
+    """Post a notification to the Python main thread.  If successful,
+    func will be called with the argument arg at the earliest
+    convenience.  func will be called having the global interpreter
+    lock held and can thus use the full Python API and can take any
+    action such as setting object attributes to signal IO completion.
+    It must return 0 on success, or -1 signalling an exception.  The
+    notification function won't be interrupted to perform another
+    asynchronous notification recursively, but it can still be
+    interrupted to switch threads if the global interpreter lock is
+    released, for example, if it calls back into Python code.
+
+    This function returns 0 on success in which case the notification
+    has been scheduled.  Otherwise, for example if the notification
+    buffer is full, it returns -1 without setting any exception.
+
+    This function can be called on any thread, be it a Python thread
+    or some other system thread.  If it is a Python thread, it doesn't
+    matter if it holds the global interpreter lock or not.
+    """
+    return -1
+
+thread_func = lltype.Ptr(lltype.FuncType([rffi.VOIDP], lltype.Void))
+@cpython_api([thread_func, rffi.VOIDP], rffi.INT_real, error=-1)
+def PyThread_start_new_thread(space, func, arg):
+    return -1

pypy/module/cpyext/test/test_dictobject.py

         assert space.eq_w(space.len(w_copy), space.len(w_dict))
         assert space.eq_w(w_copy, w_dict)
 
+    def test_iterkeys(self, space, api):
+        w_dict = space.sys.getdict(space)
+        py_dict = make_ref(space, w_dict)
+
+        ppos = lltype.malloc(Py_ssize_tP.TO, 1, flavor='raw')
+        pkey = lltype.malloc(PyObjectP.TO, 1, flavor='raw')
+        pvalue = lltype.malloc(PyObjectP.TO, 1, flavor='raw')
+
+        keys_w = []
+        values_w = []
+        try:
+            ppos[0] = 0
+            while api.PyDict_Next(w_dict, ppos, pkey, None):
+                w_key = from_ref(space, pkey[0])
+                keys_w.append(w_key)
+            ppos[0] = 0
+            while api.PyDict_Next(w_dict, ppos, None, pvalue):
+                w_value = from_ref(space, pvalue[0])
+                values_w.append(w_value)
+        finally:
+            lltype.free(ppos, flavor='raw')
+            lltype.free(pkey, flavor='raw')
+            lltype.free(pvalue, flavor='raw')
+
+        api.Py_DecRef(py_dict) # release borrowed references
+
+        assert space.eq_w(space.newlist(keys_w),
+                          space.call_method(w_dict, "keys"))
+        assert space.eq_w(space.newlist(values_w),
+                          space.call_method(w_dict, "values"))
+
     def test_dictproxy(self, space, api):
         w_dict = space.sys.get('modules')
         w_proxy = api.PyDictProxy_New(w_dict)

pypy/module/cpyext/test/test_eval.py

 from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase
 from pypy.module.cpyext.test.test_api import BaseApiTest
 from pypy.module.cpyext.eval import (
-    Py_single_input, Py_file_input, Py_eval_input)
+    Py_single_input, Py_file_input, Py_eval_input, PyCompilerFlags)
 from pypy.module.cpyext.api import fopen, fclose, fileno, Py_ssize_tP
 from pypy.interpreter.gateway import interp2app
+from pypy.interpreter.astcompiler import consts
 from pypy.tool.udir import udir
 import sys, os
 
 
         assert space.int_w(w_res) == 10
 
+    def test_evalcode(self, space, api):
+        w_f = space.appexec([], """():
+            def f(*args):
+                assert isinstance(args, tuple)
+                return len(args) + 8
+            return f
+            """)
+
+        w_t = space.newtuple([space.wrap(1), space.wrap(2)])
+        w_globals = space.newdict()
+        w_locals = space.newdict()
+        space.setitem(w_locals, space.wrap("args"), w_t)
+        w_res = api.PyEval_EvalCode(w_f.code, w_globals, w_locals)
+
+        assert space.int_w(w_res) == 10
+
     def test_run_simple_string(self, space, api):
         def run(code):
             buf = rffi.str2charp(code)
         assert 42 * 43 == space.unwrap(
             api.PyObject_GetItem(w_globals, space.wrap("a")))
 
+    def test_run_string_flags(self, space, api):
+        flags = lltype.malloc(PyCompilerFlags, flavor='raw')
+        flags.c_cf_flags = rffi.cast(rffi.INT, consts.PyCF_SOURCE_IS_UTF8)
+        w_globals = space.newdict()
+        buf = rffi.str2charp("a = u'caf\xc3\xa9'")
+        try:
+            api.PyRun_StringFlags(buf, Py_single_input,
+                                  w_globals, w_globals, flags)
+        finally:
+            rffi.free_charp(buf)
+        w_a = space.getitem(w_globals, space.wrap("a"))
+        assert space.unwrap(w_a) == u'caf\xe9'
+        lltype.free(flags, flavor='raw')
+
     def test_run_file(self, space, api):
         filepath = udir / "cpyext_test_runfile.py"
         filepath.write("raise ZeroDivisionError")
         print dir(mod)
         print mod.__dict__
         assert mod.f(42) == 47
+
+    def test_merge_compiler_flags(self):
+        module = self.import_extension('foo', [
+            ("get_flags", "METH_NOARGS",
+             """
+                PyCompilerFlags flags;
+                flags.cf_flags = 0;
+                int result = PyEval_MergeCompilerFlags(&flags);
+                return Py_BuildValue("ii", result, flags.cf_flags);
+             """),
+            ])
+        assert module.get_flags() == (0, 0)
+
+        ns = {'module':module}
+        exec """from __future__ import division    \nif 1:
+                def nested_flags():
+                    return module.get_flags()""" in ns
+        assert ns['nested_flags']() == (1, 0x2000)  # CO_FUTURE_DIVISION

pypy/module/cpyext/test/test_funcobject.py

         rffi.free_charp(filename)
         rffi.free_charp(funcname)
 
+    def test_getnumfree(self, space, api):
+        w_function = space.appexec([], """():
+            a = 5
+            def method(x): return a, x
+            return method
+        """)
+        assert api.PyCode_GetNumFree(w_function.code) == 1
+
     def test_classmethod(self, space, api):
         w_function = space.appexec([], """():
             def method(x): return x

pypy/module/cpyext/test/test_intobject.py

         values = module.values()
         types = [type(x) for x in values]
         assert types == [int, long, int, int]
-        
+
+    def test_int_subtype(self):
+        module = self.import_extension(
+            'foo', [
+            ("newEnum", "METH_VARARGS",
+             """
+                EnumObject *enumObj;
+                long intval;
+                PyObject *name;
+
+                if (!PyArg_ParseTuple(args, "Oi", &name, &intval))
+                    return NULL;
+
+                PyType_Ready(&Enum_Type);
+                enumObj = PyObject_New(EnumObject, &Enum_Type);
+                if (!enumObj) {
+                    return NULL;
+                }
+
+                enumObj->ob_ival = intval;
+                Py_INCREF(name);
+                enumObj->ob_name = name;
+
+                return (PyObject *)enumObj;
+             """),
+            ], 
+            prologue="""
+            typedef struct
+            {
+                PyObject_HEAD
+                long ob_ival;
+                PyObject* ob_name;
+            } EnumObject;
+
+            static void
+            enum_dealloc(EnumObject *op)
+            {
+                    Py_DECREF(op->ob_name);
+                    Py_TYPE(op)->tp_free((PyObject *)op);
+            }
+
+            static PyMemberDef enum_members[] = {
+                {"name", T_OBJECT, offsetof(EnumObject, ob_name), 0, NULL},
+                {NULL}  /* Sentinel */
+            };
+
+            PyTypeObject Enum_Type = {
+                PyObject_HEAD_INIT(0)
+                /*ob_size*/             0,
+                /*tp_name*/             "Enum",
+                /*tp_basicsize*/        sizeof(EnumObject),
+                /*tp_itemsize*/         0,
+                /*tp_dealloc*/          enum_dealloc,
+                /*tp_print*/            0,
+                /*tp_getattr*/          0,
+                /*tp_setattr*/          0,
+                /*tp_compare*/          0,
+                /*tp_repr*/             0,
+                /*tp_as_number*/        0,
+                /*tp_as_sequence*/      0,
+                /*tp_as_mapping*/       0,
+                /*tp_hash*/             0,
+                /*tp_call*/             0,
+                /*tp_str*/              0,
+                /*tp_getattro*/         0,
+                /*tp_setattro*/         0,
+                /*tp_as_buffer*/        0,
+                /*tp_flags*/            Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE,
+                /*tp_doc*/              0,
+                /*tp_traverse*/         0,
+                /*tp_clear*/            0,
+                /*tp_richcompare*/      0,
+                /*tp_weaklistoffset*/   0,
+                /*tp_iter*/             0,
+                /*tp_iternext*/         0,
+                /*tp_methods*/          0,
+                /*tp_members*/          enum_members,
+                /*tp_getset*/           0,
+                /*tp_base*/             &PyInt_Type,
+                /*tp_dict*/             0,
+                /*tp_descr_get*/        0,
+                /*tp_descr_set*/        0,
+                /*tp_dictoffset*/       0,
+                /*tp_init*/             0,
+                /*tp_alloc*/            0,
+                /*tp_new*/              0
+            };
+            """)
+
+        a = module.newEnum("ULTIMATE_ANSWER", 42)
+        assert type(a).__name__ == "Enum"
+        assert isinstance(a, int)
+        assert a == int(a) == 42
+        assert a.name == "ULTIMATE_ANSWER"

pypy/module/cpyext/test/test_unicodeobject.py

             assert api.Py_UNICODE_ISSPACE(unichr(char))
         assert not api.Py_UNICODE_ISSPACE(u'a')
 
+        assert api.Py_UNICODE_ISALPHA(u'a')
+        assert not api.Py_UNICODE_ISALPHA(u'0')
+        assert api.Py_UNICODE_ISALNUM(u'a')
+        assert api.Py_UNICODE_ISALNUM(u'0')
+        assert not api.Py_UNICODE_ISALNUM(u'+')
+
         assert api.Py_UNICODE_ISDECIMAL(u'\u0660')
         assert not api.Py_UNICODE_ISDECIMAL(u'a')
+        assert api.Py_UNICODE_ISDIGIT(u'9')
+        assert not api.Py_UNICODE_ISDIGIT(u'@')
+        assert api.Py_UNICODE_ISNUMERIC(u'9')
+        assert not api.Py_UNICODE_ISNUMERIC(u'@')
 
         for char in [0x0a, 0x0d, 0x1c, 0x1d, 0x1e, 0x85, 0x2028, 0x2029]:
             assert api.Py_UNICODE_ISLINEBREAK(unichr(char))
         assert not api.Py_UNICODE_ISUPPER(u'a')
         assert not api.Py_UNICODE_ISLOWER(u'�')
         assert api.Py_UNICODE_ISUPPER(u'�')
+        assert not api.Py_UNICODE_ISTITLE(u'A')
+        assert api.Py_UNICODE_ISTITLE(
+            u'\N{LATIN CAPITAL LETTER L WITH SMALL LETTER J}')
 
     def test_TOLOWER(self, space, api):
         assert api.Py_UNICODE_TOLOWER(u'�') == u'�'
         w_char = api.PyUnicode_FromOrdinal(0xFFFF)
         assert space.unwrap(w_char) == u'\uFFFF'
 
+    def test_replace(self, space, api):
+        w_str = space.wrap(u"abababab")
+        w_substr = space.wrap(u"a")
+        w_replstr = space.wrap(u"z")
+        assert u"zbzbabab" == space.unwrap(
+            api.PyUnicode_Replace(w_str, w_substr, w_replstr, 2))
+        assert u"zbzbzbzb" == space.unwrap(
+            api.PyUnicode_Replace(w_str, w_substr, w_replstr, -1))
+
+    def test_tailmatch(self, space, api):
+        w_str = space.wrap(u"abcdef")
+        assert api.PyUnicode_Tailmatch(w_str, space.wrap("cde"), 2, 10, 1) == 1
+        assert api.PyUnicode_Tailmatch(w_str, space.wrap("cde"), 1, 5, -1) == 1
+        self.raises(space, api, TypeError,
+                    api.PyUnicode_Tailmatch, w_str, space.wrap(3), 2, 10, 1)

pypy/module/cpyext/unicodeobject.py

     make_typedescr, get_typedescr)
 from pypy.module.cpyext.stringobject import PyString_Check
 from pypy.module.sys.interp_encoding import setdefaultencoding
-from pypy.objspace.std import unicodeobject, unicodetype
+from pypy.objspace.std import unicodeobject, unicodetype, stringtype
 from pypy.rlib import runicode
 from pypy.tool.sourcetools import func_renamer
 import sys
     return unicodedb.isspace(ord(ch))
 
 @cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL)
+def Py_UNICODE_ISALPHA(space, ch):
+    """Return 1 or 0 depending on whether ch is an alphabetic character."""
+    return unicodedb.isalpha(ord(ch))
+
+@cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL)
 def Py_UNICODE_ISALNUM(space, ch):
     """Return 1 or 0 depending on whether ch is an alphanumeric character."""
     return unicodedb.isalnum(ord(ch))
     return unicodedb.isdecimal(ord(ch))
 
 @cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL)
+def Py_UNICODE_ISDIGIT(space, ch):
+    """Return 1 or 0 depending on whether ch is a digit character."""
+    return unicodedb.isdigit(ord(ch))
+
+@cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL)
+def Py_UNICODE_ISNUMERIC(space, ch):
+    """Return 1 or 0 depending on whether ch is a numeric character."""
+    return unicodedb.isnumeric(ord(ch))
+
+@cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL)
 def Py_UNICODE_ISLOWER(space, ch):
     """Return 1 or 0 depending on whether ch is a lowercase character."""
     return unicodedb.islower(ord(ch))
     """Return 1 or 0 depending on whether ch is an uppercase character."""
     return unicodedb.isupper(ord(ch))
 
+@cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL)
+def Py_UNICODE_ISTITLE(space, ch):
+    """Return 1 or 0 depending on whether ch is a titlecase character."""
+    return unicodedb.istitle(ord(ch))
+
 @cpython_api([Py_UNICODE], Py_UNICODE, error=CANNOT_FAIL)
 def Py_UNICODE_TOLOWER(space, ch):
     """Return the character ch converted to lower case."""
     except KeyError:
         return -1.0
 
+@cpython_api([], Py_UNICODE, error=CANNOT_FAIL)
+def PyUnicode_GetMax(space):
+    """Get the maximum ordinal for a Unicode character."""
+    return unichr(runicode.MAXUNICODE)
+
 @cpython_api([PyObject], rffi.CCHARP, error=CANNOT_FAIL)
 def PyUnicode_AS_DATA(space, ref):
     """Return a pointer to the internal buffer of the object. o has to be a
 
 @cpython_api([PyObject, PyObject], PyObject)
 def PyUnicode_Join(space, w_sep, w_seq):
-    """Join a sequence of strings using the given separator and return the resulting
-    Unicode string."""
+    """Join a sequence of strings using the given separator and return
+    the resulting Unicode string."""
     return space.call_method(w_sep, 'join', w_seq)
+
+@cpython_api([PyObject, PyObject, PyObject, Py_ssize_t], PyObject)
+def PyUnicode_Replace(space, w_str, w_substr, w_replstr, maxcount):
+    """Replace at most maxcount occurrences of substr in str with replstr and
+    return the resulting Unicode object. maxcount == -1 means replace all
+    occurrences."""
+    return space.call_method(w_str, "replace", w_substr, w_replstr,
+                             space.wrap(maxcount))
+
+@cpython_api([PyObject, PyObject, Py_ssize_t, Py_ssize_t, rffi.INT_real],
+             rffi.INT_real, error=-1)
+def PyUnicode_Tailmatch(space, w_str, w_substr, start, end, direction):
+    """Return 1 if substr matches str[start:end] at the given tail end
+    (direction == -1 means to do a prefix match, direction == 1 a
+    suffix match), 0 otherwise. Return -1 if an error occurred."""
+    str = space.unicode_w(w_str)
+    substr = space.unicode_w(w_substr)
+    if rffi.cast(lltype.Signed, direction) >= 0:
+        return stringtype.stringstartswith(str, substr, start, end)
+    else:
+        return stringtype.stringendswith(str, substr, start, end)
+

pypy/module/micronumpy/__init__.py

         ("arccos", "arccos"),
         ("arcsin", "arcsin"),
         ("arctan", "arctan"),
+        ("arccosh", "arccosh"),
         ("arcsinh", "arcsinh"),
         ("arctanh", "arctanh"),
         ("copysign", "copysign"),
         ("cos", "cos"),
+        ("cosh", "cosh"),
         ("divide", "divide"),
         ("true_divide", "true_divide"),
         ("equal", "equal"),
         ("reciprocal", "reciprocal"),
         ("sign", "sign"),
         ("sin", "sin"),
+        ("sinh", "sinh"),
         ("subtract", "subtract"),
         ('sqrt', 'sqrt'),
         ("tan", "tan"),
+        ("tanh", "tanh"),
         ('bitwise_and', 'bitwise_and'),
         ('bitwise_or', 'bitwise_or'),
         ('bitwise_xor', 'bitwise_xor'),

pypy/module/micronumpy/interp_boxes.py

 from pypy.interpreter.baseobjspace import Wrappable