1. Pypy
  2. Untitled project
  3. pypy

Commits

Lukas Diekmann  committed 55e13d8 Merge

merged with pypy

  • Participants
  • Parent commits 53269ee, 655453f
  • Branches default

Comments (0)

Files changed (136)

File lib-python/2.7/ssl.py

View file
  • Ignore whitespace
 from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION
 from _ssl import SSLError
 from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
-from _ssl import PROTOCOL_SSLv2, PROTOCOL_SSLv3, PROTOCOL_SSLv23, PROTOCOL_TLSv1
 from _ssl import RAND_status, RAND_egd, RAND_add
 from _ssl import \
      SSL_ERROR_ZERO_RETURN, \
      SSL_ERROR_WANT_CONNECT, \
      SSL_ERROR_EOF, \
      SSL_ERROR_INVALID_ERROR_CODE
+from _ssl import PROTOCOL_SSLv3, PROTOCOL_SSLv23, PROTOCOL_TLSv1
+_PROTOCOL_NAMES = {
+    PROTOCOL_TLSv1: "TLSv1",
+    PROTOCOL_SSLv23: "SSLv23",
+    PROTOCOL_SSLv3: "SSLv3",
+}
+try:
+    from _ssl import PROTOCOL_SSLv2
+except ImportError:
+    pass
+else:
+    _PROTOCOL_NAMES[PROTOCOL_SSLv2] = "SSLv2"
 
 from socket import socket, _fileobject, _delegate_methods, error as socket_error
 from socket import getnameinfo as _getnameinfo
     return DER_cert_to_PEM_cert(dercert)
 
 def get_protocol_name(protocol_code):
-    if protocol_code == PROTOCOL_TLSv1:
-        return "TLSv1"
-    elif protocol_code == PROTOCOL_SSLv23:
-        return "SSLv23"
-    elif protocol_code == PROTOCOL_SSLv2:
-        return "SSLv2"
-    elif protocol_code == PROTOCOL_SSLv3:
-        return "SSLv3"
-    else:
-        return "<unknown>"
+    return _PROTOCOL_NAMES.get(protocol_code, '<unknown>')
 
 
 # a replacement for the old socket.ssl function

File lib-python/2.7/test/test_ssl.py

View file
  • Ignore whitespace
 
 # Issue #9415: Ubuntu hijacks their OpenSSL and forcefully disables SSLv2
 def skip_if_broken_ubuntu_ssl(func):
-    # We need to access the lower-level wrapper in order to create an
-    # implicit SSL context without trying to connect or listen.
-    try:
-        import _ssl
-    except ImportError:
-        # The returned function won't get executed, just ignore the error
-        pass
-    @functools.wraps(func)
-    def f(*args, **kwargs):
+    if hasattr(ssl, 'PROTOCOL_SSLv2'):
+        # We need to access the lower-level wrapper in order to create an
+        # implicit SSL context without trying to connect or listen.
         try:
-            s = socket.socket(socket.AF_INET)
-            _ssl.sslwrap(s._sock, 0, None, None,
-                         ssl.CERT_NONE, ssl.PROTOCOL_SSLv2, None, None)
-        except ssl.SSLError as e:
-            if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and
-                platform.linux_distribution() == ('debian', 'squeeze/sid', '')
-                and 'Invalid SSL protocol variant specified' in str(e)):
-                raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour")
-        return func(*args, **kwargs)
-    return f
+            import _ssl
+        except ImportError:
+            # The returned function won't get executed, just ignore the error
+            pass
+        @functools.wraps(func)
+        def f(*args, **kwargs):
+            try:
+                s = socket.socket(socket.AF_INET)
+                _ssl.sslwrap(s._sock, 0, None, None,
+                             ssl.CERT_NONE, ssl.PROTOCOL_SSLv2, None, None)
+            except ssl.SSLError as e:
+                if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and
+                    platform.linux_distribution() == ('debian', 'squeeze/sid', '')
+                    and 'Invalid SSL protocol variant specified' in str(e)):
+                    raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour")
+            return func(*args, **kwargs)
+        return f
+    else:
+        return func
 
 
 class BasicSocketTests(unittest.TestCase):
 
     def test_constants(self):
-        ssl.PROTOCOL_SSLv2
+        #ssl.PROTOCOL_SSLv2
         ssl.PROTOCOL_SSLv23
         ssl.PROTOCOL_SSLv3
         ssl.PROTOCOL_TLSv1
             try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True)
             try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_OPTIONAL)
             try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_REQUIRED)
-            try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv2, False)
+            if hasattr(ssl, 'PROTOCOL_SSLv2'):
+                try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv2, False)
             try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv23, False)
             try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_TLSv1, False)
 
             try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True)
             try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True, ssl.CERT_OPTIONAL)
             try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True, ssl.CERT_REQUIRED)
-            try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False)
+            if hasattr(ssl, 'PROTOCOL_SSLv2'):
+                try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False)
             try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv3, False)
             try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv23, False)
 

File lib-python/modified-2.7/ssl.py

View file
  • Ignore whitespace
 from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION
 from _ssl import SSLError
 from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
-from _ssl import PROTOCOL_SSLv2, PROTOCOL_SSLv3, PROTOCOL_SSLv23, PROTOCOL_TLSv1
 from _ssl import RAND_status, RAND_egd, RAND_add
 from _ssl import \
      SSL_ERROR_ZERO_RETURN, \
      SSL_ERROR_WANT_CONNECT, \
      SSL_ERROR_EOF, \
      SSL_ERROR_INVALID_ERROR_CODE
+from _ssl import PROTOCOL_SSLv3, PROTOCOL_SSLv23, PROTOCOL_TLSv1
+_PROTOCOL_NAMES = {
+    PROTOCOL_TLSv1: "TLSv1",
+    PROTOCOL_SSLv23: "SSLv23",
+    PROTOCOL_SSLv3: "SSLv3",
+}
+try:
+    from _ssl import PROTOCOL_SSLv2
+except ImportError:
+    pass
+else:
+    _PROTOCOL_NAMES[PROTOCOL_SSLv2] = "SSLv2"
 
 from socket import socket, _fileobject, error as socket_error
 from socket import getnameinfo as _getnameinfo
     return DER_cert_to_PEM_cert(dercert)
 
 def get_protocol_name(protocol_code):
-    if protocol_code == PROTOCOL_TLSv1:
-        return "TLSv1"
-    elif protocol_code == PROTOCOL_SSLv23:
-        return "SSLv23"
-    elif protocol_code == PROTOCOL_SSLv2:
-        return "SSLv2"
-    elif protocol_code == PROTOCOL_SSLv3:
-        return "SSLv3"
-    else:
-        return "<unknown>"
+    return _PROTOCOL_NAMES.get(protocol_code, '<unknown>')
 
 
 # a replacement for the old socket.ssl function

File lib-python/modified-2.7/test/test_ssl.py

View file
  • Ignore whitespace
 
 # Issue #9415: Ubuntu hijacks their OpenSSL and forcefully disables SSLv2
 def skip_if_broken_ubuntu_ssl(func):
-    # We need to access the lower-level wrapper in order to create an
-    # implicit SSL context without trying to connect or listen.
-    try:
-        import _ssl
-    except ImportError:
-        # The returned function won't get executed, just ignore the error
-        pass
-    @functools.wraps(func)
-    def f(*args, **kwargs):
+    if hasattr(ssl, 'PROTOCOL_SSLv2'):
+        # We need to access the lower-level wrapper in order to create an
+        # implicit SSL context without trying to connect or listen.
         try:
-            s = socket.socket(socket.AF_INET)
-            _ssl.sslwrap(s._sock, 0, None, None,
-                         ssl.CERT_NONE, ssl.PROTOCOL_SSLv2, None, None)
-        except ssl.SSLError as e:
-            if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and
-                platform.linux_distribution() == ('debian', 'squeeze/sid', '')
-                and 'Invalid SSL protocol variant specified' in str(e)):
-                raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour")
-        return func(*args, **kwargs)
-    return f
+            import _ssl
+        except ImportError:
+            # The returned function won't get executed, just ignore the error
+            pass
+        @functools.wraps(func)
+        def f(*args, **kwargs):
+            try:
+                s = socket.socket(socket.AF_INET)
+                _ssl.sslwrap(s._sock, 0, None, None,
+                             ssl.CERT_NONE, ssl.PROTOCOL_SSLv2, None, None)
+            except ssl.SSLError as e:
+                if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and
+                    platform.linux_distribution() == ('debian', 'squeeze/sid', '')
+                    and 'Invalid SSL protocol variant specified' in str(e)):
+                    raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour")
+            return func(*args, **kwargs)
+        return f
+    else:
+        return func
 
 
 class BasicSocketTests(unittest.TestCase):
 
     def test_constants(self):
-        ssl.PROTOCOL_SSLv2
+        #ssl.PROTOCOL_SSLv2
         ssl.PROTOCOL_SSLv23
         ssl.PROTOCOL_SSLv3
         ssl.PROTOCOL_TLSv1
             try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True)
             try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_OPTIONAL)
             try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_REQUIRED)
-            try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv2, False)
+            if hasattr(ssl, 'PROTOCOL_SSLv2'):
+                try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv2, False)
             try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv23, False)
             try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_TLSv1, False)
 
             try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True)
             try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True, ssl.CERT_OPTIONAL)
             try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True, ssl.CERT_REQUIRED)
-            try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False)
+            if hasattr(ssl, 'PROTOCOL_SSLv2'):
+                try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False)
             try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv3, False)
             try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv23, False)
 

File lib_pypy/_functools.py

View file
  • Ignore whitespace
             raise TypeError("the first argument must be callable")
         self.func = func
         self.args = args
-        self.keywords = keywords
+        self.keywords = keywords or None
 
     def __call__(self, *fargs, **fkeywords):
-        newkeywords = self.keywords.copy()
-        newkeywords.update(fkeywords)
-        return self.func(*(self.args + fargs), **newkeywords)
-
+        if self.keywords is not None:
+            fkeywords.update(self.keywords)
+        return self.func(*(self.args + fargs), **fkeywords)

File lib_pypy/greenlet.py

View file
  • Ignore whitespace
     def switch(self, *args):
         "Switch execution to this greenlet, optionally passing the values "
         "given as argument(s).  Returns the value passed when switching back."
-        return self.__switch(_continulet.switch, args)
+        return self.__switch('switch', args)
 
     def throw(self, typ=GreenletExit, val=None, tb=None):
         "raise exception in greenlet, return value passed when switching back"
-        return self.__switch(_continulet.throw, typ, val, tb)
+        return self.__switch('throw', typ, val, tb)
 
-    def __switch(target, unbound_method, *args):
+    def __switch(target, methodname, *args):
         current = getcurrent()
         #
         while not target:
             if not target.__started:
-                if unbound_method != _continulet.throw:
+                if methodname == 'switch':
                     greenlet_func = _greenlet_start
                 else:
                     greenlet_func = _greenlet_throw
                 _continulet.__init__(target, greenlet_func, *args)
-                unbound_method = _continulet.switch
+                methodname = 'switch'
                 args = ()
                 target.__started = True
                 break
             target = target.parent
         #
         try:
-            if current.__main:
-                if target.__main:
-                    # switch from main to main
-                    if unbound_method == _continulet.throw:
-                        raise args[0], args[1], args[2]
-                    (args,) = args
-                else:
-                    # enter from main to target
-                    args = unbound_method(target, *args)
-            else:
-                if target.__main:
-                    # leave to go to target=main
-                    args = unbound_method(current, *args)
-                else:
-                    # switch from non-main to non-main
-                    args = unbound_method(current, *args, to=target)
+            unbound_method = getattr(_continulet, methodname)
+            args = unbound_method(current, *args, to=target)
         except GreenletExit, e:
             args = (e,)
         finally:
     try:
         res = greenlet.run(*args)
     finally:
-        if greenlet.parent is not _tls.main:
-            _continuation.permute(greenlet, greenlet.parent)
+        _continuation.permute(greenlet, greenlet.parent)
     return (res,)
 
 def _greenlet_throw(greenlet, exc, value, tb):
     try:
         raise exc, value, tb
     finally:
-        if greenlet.parent is not _tls.main:
-            _continuation.permute(greenlet, greenlet.parent)
+        _continuation.permute(greenlet, greenlet.parent)

File lib_pypy/stackless.py

View file
  • Ignore whitespace
 
 import traceback
 import _continuation
-from functools import partial
 
 class TaskletExit(Exception):
     pass
 
 CoroutineExit = TaskletExit
 
-class GWrap(_continuation.continulet):
-    """This is just a wrapper around continulet to allow
-       to stick additional attributes to a continulet.
-       To be more concrete, we need a backreference to
-       the coroutine object"""
-
 
 class coroutine(object):
     "we can't have continulet as a base, because continulets can't be rebound"
            arguments *argl, **argd
         """
         if self._frame is None or not self._frame.is_pending():
-
-            def _func(c, *args, **kwargs):
-                return func(*args, **kwargs)
-            
-            run = partial(_func, *argl, **argd)
-            self._frame = frame = GWrap(run)
+            def run(c):
+                _tls.current_coroutine = self
+                return func(*argl, **argd)
+            self._frame = frame = _continuation.continulet(run)
         else:
             raise ValueError("cannot bind a bound coroutine")
 
            None is returned
         """
         current = _getcurrent()
-        current._jump_to(self)
-
-    def _jump_to(self, coroutine):
-        _tls.current_coroutine = coroutine
-        self._frame.switch(to=coroutine._frame)
+        try:
+            current._frame.switch(to=self._frame)
+        finally:
+            _tls.current_coroutine = current
 
     def kill(self):
         """coro.kill() : kill coroutine coro"""
-        _tls.current_coroutine = self
-        self._frame.throw(CoroutineExit)
+        current = _getcurrent()
+        try:
+            current._frame.throw(CoroutineExit, to=self._frame)
+        finally:
+            _tls.current_coroutine = current
 
     def _is_alive(self):
         if self._frame is None:
 
     def getcurrent():
         """coroutine.getcurrent() -> the currently running coroutine"""
-        try:
-            return _getcurrent()
-        except AttributeError:
-            return _maincoro
+        return _getcurrent()
     getcurrent = staticmethod(getcurrent)
 
     def __reduce__(self):
     # create the main coroutine for this thread
     _tls.current_coroutine = None
     main_coroutine = coroutine()
-    main_coroutine.bind(lambda x:x)
+    typ = _continuation.continulet
+    main_coroutine._frame = typ.__new__(typ)
     _tls.main_coroutine = main_coroutine
     _tls.current_coroutine = main_coroutine
-    return main_coroutine
-
-
-_maincoro = _coroutine_create_main()
 
 
 from collections import deque
     _last_task = next
     assert not next.blocked
     if next is not current:
-        try:
+        #try:
             next.switch()
-        except CoroutineExit:
-            raise TaskletExit
+        #except CoroutineExit:  --- they are the same anyway
+        #    raise TaskletExit
     return current
 
 def set_schedule_callback(callback):
         def _func():
             try:
                 try:
+                    coroutine.switch(back)
                     func(*argl, **argd)
                 except TaskletExit:
                     pass
 
         self.func = None
         coroutine.bind(self, _func)
+        back = _getcurrent()
+        coroutine.switch(self)
         self.alive = True
         _scheduler_append(self)
         return self

File pypy/annotation/annrpython.py

View file
  • Ignore whitespace
         desc = olddesc.bind_self(classdef)
         args = self.bookkeeper.build_args("simple_call", args_s[:])
         desc.consider_call_site(self.bookkeeper, desc.getcallfamily(), [desc],
-            args, annmodel.s_ImpossibleValue)
+            args, annmodel.s_ImpossibleValue, None)
         result = []
         def schedule(graph, inputcells):
             result.append((graph, inputcells))

File pypy/annotation/bookkeeper.py

View file
  • Ignore whitespace
                 self.consider_call_site(call_op)
 
             for pbc, args_s in self.emulated_pbc_calls.itervalues():
-                self.consider_call_site_for_pbc(pbc, 'simple_call', 
-                                                args_s, s_ImpossibleValue)
+                self.consider_call_site_for_pbc(pbc, 'simple_call',
+                                                args_s, s_ImpossibleValue, None)
             self.emulated_pbc_calls = {}
         finally:
             self.leave()
             args_s = [lltype_to_annotation(adtmeth.ll_ptrtype)] + args_s
         if isinstance(s_callable, SomePBC):
             s_result = binding(call_op.result, s_ImpossibleValue)
-            self.consider_call_site_for_pbc(s_callable,
-                                            call_op.opname,
-                                            args_s, s_result)
+            self.consider_call_site_for_pbc(s_callable, call_op.opname, args_s,
+                                            s_result, call_op)
 
-    def consider_call_site_for_pbc(self, s_callable, opname, args_s, s_result):
+    def consider_call_site_for_pbc(self, s_callable, opname, args_s, s_result,
+                                   call_op):
         descs = list(s_callable.descriptions)
         if not descs:
             return
         family = descs[0].getcallfamily()
         args = self.build_args(opname, args_s)
         s_callable.getKind().consider_call_site(self, family, descs, args,
-                                                s_result)
+                                                s_result, call_op)
 
     def getuniqueclassdef(self, cls):
         """Get the ClassDef associated with the given user cls.
                 whence = None
             else:
                 whence = emulated # callback case
+            op = None
             s_previous_result = s_ImpossibleValue
 
         def schedule(graph, inputcells):
 
         results = []
         for desc in descs:
-            results.append(desc.pycall(schedule, args, s_previous_result))
+            results.append(desc.pycall(schedule, args, s_previous_result, op))
         s_result = unionof(*results)
         return s_result
 

File pypy/annotation/description.py

View file
  • Ignore whitespace
             raise TypeError, "signature mismatch: %s" % e.getmsg(self.name)
         return inputcells
 
-    def specialize(self, inputcells):
+    def specialize(self, inputcells, op=None):
+        if (op is None and
+            getattr(self.bookkeeper, "position_key", None) is not None):
+            _, block, i = self.bookkeeper.position_key
+            op = block.operations[i]
         if self.specializer is None:
             # get the specializer based on the tag of the 'pyobj'
             # (if any), according to the current policy
                 enforceargs = Sig(*enforceargs)
                 self.pyobj._annenforceargs_ = enforceargs
             enforceargs(self, inputcells) # can modify inputcells in-place
-        return self.specializer(self, inputcells)
+        if getattr(self.pyobj, '_annspecialcase_', '').endswith("call_location"):
+            return self.specializer(self, inputcells, op)
+        else:
+            return self.specializer(self, inputcells)
 
-    def pycall(self, schedule, args, s_previous_result):
+    def pycall(self, schedule, args, s_previous_result, op=None):
         inputcells = self.parse_arguments(args)
-        result = self.specialize(inputcells)
+        result = self.specialize(inputcells, op)
         if isinstance(result, FunctionGraph):
             graph = result         # common case
             # if that graph has a different signature, we need to re-parse
                                              None,       # selfclassdef
                                              name)
 
-    def consider_call_site(bookkeeper, family, descs, args, s_result):
+    def consider_call_site(bookkeeper, family, descs, args, s_result, op):
         shape = rawshape(args)
-        row = FunctionDesc.row_to_consider(descs, args)
+        row = FunctionDesc.row_to_consider(descs, args, op)
         family.calltable_add_row(shape, row)
     consider_call_site = staticmethod(consider_call_site)
 
-    def variant_for_call_site(bookkeeper, family, descs, args):
+    def variant_for_call_site(bookkeeper, family, descs, args, op):
         shape = rawshape(args)
         bookkeeper.enter(None)
         try:
-            row = FunctionDesc.row_to_consider(descs, args)
+            row = FunctionDesc.row_to_consider(descs, args, op)
         finally:
             bookkeeper.leave()
         index = family.calltable_lookup_row(shape, row)
     def rowkey(self):
         return self
 
-    def row_to_consider(descs, args):
+    def row_to_consider(descs, args, op):
         # see comments in CallFamily
         from pypy.annotation.model import s_ImpossibleValue
         row = {}
             def enlist(graph, ignore):
                 row[desc.rowkey()] = graph
                 return s_ImpossibleValue   # meaningless
-            desc.pycall(enlist, args, s_ImpossibleValue)
+            desc.pycall(enlist, args, s_ImpossibleValue, op)
         return row
     row_to_consider = staticmethod(row_to_consider)
 
                             "specialization" % (self.name,))
         return self.getclassdef(None)
 
-    def pycall(self, schedule, args, s_previous_result):
+    def pycall(self, schedule, args, s_previous_result, op=None):
         from pypy.annotation.model import SomeInstance, SomeImpossibleValue
         if self.specialize:
             if self.specialize == 'specialize:ctr_location':
             cdesc = cdesc.basedesc
         return s_result     # common case
 
-    def consider_call_site(bookkeeper, family, descs, args, s_result):
+    def consider_call_site(bookkeeper, family, descs, args, s_result, op):
         from pypy.annotation.model import SomeInstance, SomePBC, s_None
         if len(descs) == 1:
             # call to a single class, look at the result annotation
             initdescs[0].mergecallfamilies(*initdescs[1:])
             initfamily = initdescs[0].getcallfamily()
             MethodDesc.consider_call_site(bookkeeper, initfamily, initdescs,
-                                          args, s_None)
+                                          args, s_None, op)
     consider_call_site = staticmethod(consider_call_site)
 
     def getallbases(self):
     def getuniquegraph(self):
         return self.funcdesc.getuniquegraph()
 
-    def pycall(self, schedule, args, s_previous_result):
+    def pycall(self, schedule, args, s_previous_result, op=None):
         from pypy.annotation.model import SomeInstance
         if self.selfclassdef is None:
             raise Exception("calling %r" % (self,))
         s_instance = SomeInstance(self.selfclassdef, flags = self.flags)
         args = args.prepend(s_instance)
-        return self.funcdesc.pycall(schedule, args, s_previous_result)
+        return self.funcdesc.pycall(schedule, args, s_previous_result, op)
 
     def bind_under(self, classdef, name):
         self.bookkeeper.warning("rebinding an already bound %r" % (self,))
                                              self.name,
                                              flags)
 
-    def consider_call_site(bookkeeper, family, descs, args, s_result):
+    def consider_call_site(bookkeeper, family, descs, args, s_result, op):
         shape = rawshape(args, nextra=1)     # account for the extra 'self'
         funcdescs = [methoddesc.funcdesc for methoddesc in descs]
-        row = FunctionDesc.row_to_consider(descs, args)
+        row = FunctionDesc.row_to_consider(descs, args, op)
         family.calltable_add_row(shape, row)
     consider_call_site = staticmethod(consider_call_site)
 
         return '<MethodOfFrozenDesc %r of %r>' % (self.funcdesc,
                                                   self.frozendesc)
 
-    def pycall(self, schedule, args, s_previous_result):
+    def pycall(self, schedule, args, s_previous_result, op=None):
         from pypy.annotation.model import SomePBC
         s_self = SomePBC([self.frozendesc])
         args = args.prepend(s_self)
-        return self.funcdesc.pycall(schedule, args, s_previous_result)
+        return self.funcdesc.pycall(schedule, args, s_previous_result, op)
 
-    def consider_call_site(bookkeeper, family, descs, args, s_result):
+    def consider_call_site(bookkeeper, family, descs, args, s_result, op):
         shape = rawshape(args, nextra=1)    # account for the extra 'self'
         funcdescs = [mofdesc.funcdesc for mofdesc in descs]
-        row = FunctionDesc.row_to_consider(descs, args)
+        row = FunctionDesc.row_to_consider(descs, args, op)
         family.calltable_add_row(shape, row)
     consider_call_site = staticmethod(consider_call_site)
 

File pypy/annotation/policy.py

View file
  • Ignore whitespace
 # base annotation policy for specialization
 from pypy.annotation.specialize import default_specialize as default
 from pypy.annotation.specialize import specialize_argvalue, specialize_argtype, specialize_arglistitemtype
-from pypy.annotation.specialize import memo
+from pypy.annotation.specialize import memo, specialize_call_location
 # for some reason, model must be imported first,
 # or we create a cycle.
 from pypy.annotation import model as annmodel
     specialize__arg = staticmethod(specialize_argvalue) # specialize:arg(N)
     specialize__argtype = staticmethod(specialize_argtype) # specialize:argtype(N)
     specialize__arglistitemtype = staticmethod(specialize_arglistitemtype)
+    specialize__call_location = staticmethod(specialize_call_location)
 
     def specialize__ll(pol, *args):
         from pypy.rpython.annlowlevel import LowLevelAnnotatorPolicy

File pypy/annotation/specialize.py

View file
  • Ignore whitespace
     else:
         key = s.listdef.listitem.s_value.knowntype
     return maybe_star_args(funcdesc, key, args_s)
+
+def specialize_call_location(funcdesc, args_s, op):
+    assert op is not None
+    return maybe_star_args(funcdesc, op, args_s)

File pypy/annotation/test/test_annrpython.py

View file
  • Ignore whitespace
         allocdesc = a.bookkeeper.getdesc(alloc)
         s_C1 = a.bookkeeper.immutablevalue(C1)
         s_C2 = a.bookkeeper.immutablevalue(C2)
-        graph1 = allocdesc.specialize([s_C1])
-        graph2 = allocdesc.specialize([s_C2])
+        graph1 = allocdesc.specialize([s_C1], None)
+        graph2 = allocdesc.specialize([s_C2], None)
         assert a.binding(graph1.getreturnvar()).classdef == C1df
         assert a.binding(graph2.getreturnvar()).classdef == C2df
         assert graph1 in a.translator.graphs
         allocdesc = a.bookkeeper.getdesc(alloc)
         s_C1 = a.bookkeeper.immutablevalue(C1)
         s_C2 = a.bookkeeper.immutablevalue(C2)
-        graph1 = allocdesc.specialize([s_C1, s_C2])
-        graph2 = allocdesc.specialize([s_C2, s_C2])
+        graph1 = allocdesc.specialize([s_C1, s_C2], None)
+        graph2 = allocdesc.specialize([s_C2, s_C2], None)
         assert a.binding(graph1.getreturnvar()).classdef == C1df
         assert a.binding(graph2.getreturnvar()).classdef == C2df
         assert graph1 in a.translator.graphs
         assert len(executedesc._cache[(0, 'star', 2)].startblock.inputargs) == 4
         assert len(executedesc._cache[(1, 'star', 3)].startblock.inputargs) == 5
 
+    def test_specialize_call_location(self):
+        def g(a):
+            return a
+        g._annspecialcase_ = "specialize:call_location"
+        def f(x):
+            return g(x)
+        f._annspecialcase_ = "specialize:argtype(0)"
+        def h(y):
+            w = f(y)
+            return int(f(str(y))) + w
+        a = self.RPythonAnnotator()
+        assert a.build_types(h, [int]) == annmodel.SomeInteger()
+
     def test_assert_list_doesnt_lose_info(self):
         class T(object):
             pass

File pypy/doc/stackless.rst

View file
  • Ignore whitespace
 In practice, in PyPy, you cannot change the ``f_back`` of an abitrary
 frame, but only of frames stored in ``continulets``.
 
-Continulets are internally implemented using stacklets.  Stacklets are a
+Continulets are internally implemented using stacklets_.  Stacklets are a
 bit more primitive (they are really one-shot continuations), but that
 idea only works in C, not in Python.  The basic idea of continulets is
 to have at any point in time a complete valid stack; this is important
 
 * Support for other CPUs than x86 and x86-64
 
-* The app-level ``f_back`` field of frames crossing continulet boundaries
-  is None for now, unlike what I explain in the theoretical overview
-  above.  It mostly means that in a ``pdb.set_trace()`` you cannot go
-  ``up`` past countinulet boundaries.  This could be fixed.
-
 .. __: `recursion depth limit`_
 
 (*) Pickling, as well as changing threads, could be implemented by using
 to use other interfaces like genlets and greenlets.)
 
 
+Stacklets
++++++++++
+
+Continulets are internally implemented using stacklets, which is the
+generic RPython-level building block for "one-shot continuations".  For
+more information about them please see the documentation in the C source
+at `pypy/translator/c/src/stacklet/stacklet.h`_.
+
+The module ``pypy.rlib.rstacklet`` is a thin wrapper around the above
+functions.  The key point is that new() and switch() always return a
+fresh stacklet handle (or an empty one), and switch() additionally
+consumes one.  It makes no sense to have code in which the returned
+handle is ignored, or used more than once.  Note that ``stacklet.c`` is
+written assuming that the user knows that, and so no additional checking
+occurs; this can easily lead to obscure crashes if you don't use a
+wrapper like PyPy's '_continuation' module.
+
+
 Theory of composability
 +++++++++++++++++++++++
 

File pypy/interpreter/argument.py

View file
  • Ignore whitespace
 
     ###  Manipulation  ###
 
+    @jit.look_inside_iff(lambda self: not self._dont_jit)
     def unpack(self): # slowish
         "Return a ([w1,w2...], {'kw':w3...}) pair."
         kwds_w = {}
 
     ###  Parsing for function calls  ###
 
+    # XXX: this should be @jit.look_inside_iff, but we need key word arguments,
+    # and it doesn't support them for now.
     def _match_signature(self, w_firstarg, scope_w, signature, defaults_w=None,
                          blindargs=0):
         """Parse args and kwargs according to the signature of a code object,

File pypy/interpreter/baseobjspace.py

View file
  • Ignore whitespace
 from pypy.interpreter.miscutils import ThreadLocals
 from pypy.tool.cache import Cache
 from pypy.tool.uid import HUGEVAL_BYTES
-from pypy.rlib.objectmodel import we_are_translated
+from pypy.rlib.objectmodel import we_are_translated, newlist
 from pypy.rlib.debug import make_sure_not_resized
 from pypy.rlib.timer import DummyTimer, Timer
 from pypy.rlib.rarithmetic import r_uint
 from pypy.rlib import jit
 from pypy.tool.sourcetools import func_with_new_name
-import os, sys, py
+import os, sys
 
 __all__ = ['ObjSpace', 'OperationError', 'Wrappable', 'W_Root']
 
         w_iterator = self.iter(w_iterable)
         # If we know the expected length we can preallocate.
         if expected_length == -1:
-            items = []
+            try:
+                lgt_estimate = self.len_w(w_iterable)
+            except OperationError, o:
+                if (not o.match(self, self.w_AttributeError) and
+                    not o.match(self, self.w_TypeError)):
+                    raise
+                items = []
+            else:
+                try:
+                    items = newlist(lgt_estimate)
+                except MemoryError:
+                    items = [] # it might have lied
         else:
             items = [None] * expected_length
         idx = 0

File pypy/interpreter/executioncontext.py

View file
  • Ignore whitespace
 import sys
-from pypy.interpreter.miscutils import Stack
 from pypy.interpreter.error import OperationError
 from pypy.rlib.rarithmetic import LONG_BIT
 from pypy.rlib.unroll import unrolling_iterable
         return frame
 
     @staticmethod
+    @jit.unroll_safe  # should usually loop 0 times, very rarely more than once
     def getnextframe_nohidden(frame):
         frame = frame.f_backref()
         while frame and frame.hide():
 
     # ________________________________________________________________
 
-
-    class Subcontext(object):
-        # coroutine: subcontext support
-
-        def __init__(self):
-            self.topframe = None
-            self.w_tracefunc = None
-            self.profilefunc = None
-            self.w_profilefuncarg = None
-            self.is_tracing = 0
-
-        def enter(self, ec):
-            ec.topframeref = jit.non_virtual_ref(self.topframe)
-            ec.w_tracefunc = self.w_tracefunc
-            ec.profilefunc = self.profilefunc
-            ec.w_profilefuncarg = self.w_profilefuncarg
-            ec.is_tracing = self.is_tracing
-            ec.space.frame_trace_action.fire()
-
-        def leave(self, ec):
-            self.topframe = ec.gettopframe()
-            self.w_tracefunc = ec.w_tracefunc
-            self.profilefunc = ec.profilefunc
-            self.w_profilefuncarg = ec.w_profilefuncarg
-            self.is_tracing = ec.is_tracing
-
-        def clear_framestack(self):
-            self.topframe = None
-
-        # the following interface is for pickling and unpickling
-        def getstate(self, space):
-            if self.topframe is None:
-                return space.w_None
-            return self.topframe
-
-        def setstate(self, space, w_state):
-            from pypy.interpreter.pyframe import PyFrame
-            if space.is_w(w_state, space.w_None):
-                self.topframe = None
-            else:
-                self.topframe = space.interp_w(PyFrame, w_state)
-
-        def getframestack(self):
-            lst = []
-            f = self.topframe
-            while f is not None:
-                lst.append(f)
-                f = f.f_backref()
-            lst.reverse()
-            return lst
-        # coroutine: I think this is all, folks!
-
     def c_call_trace(self, frame, w_func, args=None):
         "Profile the call of a builtin function"
         self._c_call_return_trace(frame, w_func, args, 'c_call')

File pypy/interpreter/function.py

View file
  • Ignore whitespace
             # we have been seen by other means so rtyping should not choke
             # on us
             identifier = self.code.identifier
-            assert Function._all.get(identifier, self) is self, ("duplicate "
-                                                                 "function ids")
+            previous = Function._all.get(identifier, self)
+            assert previous is self, (
+                "duplicate function ids with identifier=%r: %r and %r" % (
+                identifier, previous, self))
             self.add_to_table()
         return False
 

File pypy/interpreter/miscutils.py

View file
  • Ignore whitespace
 Miscellaneous utilities.
 """
 
-import types
-
-from pypy.rlib.rarithmetic import r_uint
-
-class RootStack:
-    pass
-
-class Stack(RootStack):
-    """Utility class implementing a stack."""
-
-    _annspecialcase_ = "specialize:ctr_location" # polymorphic
-
-    def __init__(self):
-        self.items = []
-
-    def clone(self):
-        s = self.__class__()
-        for item in self.items:
-            try:
-                item = item.clone()
-            except AttributeError:
-                pass
-            s.push(item)
-        return s
-
-    def push(self, item):
-        self.items.append(item)
-
-    def pop(self):
-        return self.items.pop()
-
-    def drop(self, n):
-        if n > 0:
-            del self.items[-n:]
-
-    def top(self, position=0):
-        """'position' is 0 for the top of the stack, 1 for the item below,
-        and so on.  It must not be negative."""
-        if position < 0:
-            raise ValueError, 'negative stack position'
-        if position >= len(self.items):
-            raise IndexError, 'not enough entries in stack'
-        return self.items[~position]
-
-    def set_top(self, value, position=0):
-        """'position' is 0 for the top of the stack, 1 for the item below,
-        and so on.  It must not be negative."""
-        if position < 0:
-            raise ValueError, 'negative stack position'
-        if position >= len(self.items):
-            raise IndexError, 'not enough entries in stack'
-        self.items[~position] = value
-
-    def depth(self):
-        return len(self.items)
-
-    def empty(self):
-        return len(self.items) == 0
-
-
-class FixedStack(RootStack):
-    _annspecialcase_ = "specialize:ctr_location" # polymorphic
-
-    # unfortunately, we have to re-do everything
-    def __init__(self):
-        pass
-
-    def setup(self, stacksize):
-        self.ptr = r_uint(0) # we point after the last element
-        self.items = [None] * stacksize
-
-    def clone(self):
-        # this is only needed if we support flow space
-        s = self.__class__()
-        s.setup(len(self.items))
-        for item in self.items[:self.ptr]:
-            try:
-                item = item.clone()
-            except AttributeError:
-                pass
-            s.push(item)
-        return s
-
-    def push(self, item):
-        ptr = self.ptr
-        self.items[ptr] = item
-        self.ptr = ptr + 1
-
-    def pop(self):
-        ptr = self.ptr - 1
-        ret = self.items[ptr]   # you get OverflowError if the stack is empty
-        self.items[ptr] = None
-        self.ptr = ptr
-        return ret
-
-    def drop(self, n):
-        while n > 0:
-            n -= 1
-            self.ptr -= 1
-            self.items[self.ptr] = None
-
-    def top(self, position=0):
-        # for a fixed stack, we assume correct indices
-        return self.items[self.ptr + ~position]
-
-    def set_top(self, value, position=0):
-        # for a fixed stack, we assume correct indices
-        self.items[self.ptr + ~position] = value
-
-    def depth(self):
-        return self.ptr
-
-    def empty(self):
-        return not self.ptr
-
-
-class InitializedClass(type):
-    """NOT_RPYTHON.  A meta-class that allows a class to initialize itself (or
-    its subclasses) by calling __initclass__() as a class method."""
-    def __init__(self, name, bases, dict):
-        super(InitializedClass, self).__init__(name, bases, dict)
-        for basecls in self.__mro__:
-            raw = basecls.__dict__.get('__initclass__')
-            if isinstance(raw, types.FunctionType):
-                raw(self)   # call it as a class method
-
-
-class RwDictProxy(object):
-    """NOT_RPYTHON.  A dict-like class standing for 'cls.__dict__', to work
-    around the fact that the latter is a read-only proxy for new-style
-    classes."""
-    
-    def __init__(self, cls):
-        self.cls = cls
-
-    def __getitem__(self, attr):
-        return self.cls.__dict__[attr]
-
-    def __setitem__(self, attr, value):
-        setattr(self.cls, attr, value)
-
-    def __contains__(self, value):
-        return value in self.cls.__dict__
-
-    def items(self):
-        return self.cls.__dict__.items()
-
-
 class ThreadLocals:
     """Pseudo thread-local storage, for 'space.threadlocals'.
     This is not really thread-local at all; the intention is that the PyPy

File pypy/interpreter/pycode.py

View file
  • Ignore whitespace
 from pypy.interpreter.argument import Signature
 from pypy.interpreter.error import OperationError
 from pypy.interpreter.gateway import NoneNotWrapped, unwrap_spec
-from pypy.interpreter.astcompiler.consts import (CO_OPTIMIZED,
+from pypy.interpreter.astcompiler.consts import (
     CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS, CO_NESTED,
     CO_GENERATOR, CO_CONTAINSGLOBALS)
 from pypy.rlib.rarithmetic import intmask

File pypy/interpreter/pyframe.py

View file
  • Ignore whitespace
         make_sure_not_resized(self.locals_stack_w)
         check_nonneg(self.nlocals)
         #
-        if space.config.objspace.honor__builtins__:
+        if space.config.objspace.honor__builtins__ and w_globals is not None:
             self.builtin = space.builtin.pick_builtin(w_globals)
         # regular functions always have CO_OPTIMIZED and CO_NEWLOCALS.
         # class bodies only have CO_NEWLOCALS.
         return self.get_builtin().getdict(space)
 
     def fget_f_back(self, space):
-        return self.space.wrap(self.f_backref())
+        f_back = ExecutionContext.getnextframe_nohidden(self)
+        return self.space.wrap(f_back)
 
     def fget_f_lasti(self, space):
         return self.space.wrap(self.last_instr)

File pypy/interpreter/test/test_objspace.py

View file
  • Ignore whitespace
         assert err.value.match(space, space.w_ValueError)
         err = raises(OperationError, space.unpackiterable, w_l, 5)
         assert err.value.match(space, space.w_ValueError)
+        w_a = space.appexec((), """():
+        class A(object):
+            def __iter__(self):
+                return self
+            def next(self):
+                raise StopIteration
+            def __len__(self):
+                1/0
+        return A()
+        """)
+        try:
+            space.unpackiterable(w_a)
+        except OperationError, o:
+            if not o.match(space, space.w_ZeroDivisionError):
+                raise Exception("DID NOT RAISE")
+        else:
+            raise Exception("DID NOT RAISE")
 
     def test_fixedview(self):
         space = self.space

File pypy/interpreter/test/test_pyframe.py

View file
  • Ignore whitespace
 from pypy.tool import udir
+from pypy.conftest import option
 
 
 class AppTestPyFrame:
     def setup_class(cls):
         cls.w_udir = cls.space.wrap(str(udir.udir))
         cls.w_tempfile1 = cls.space.wrap(str(udir.udir.join('tempfile1')))
+        if not option.runappdirect:
+            w_call_further = cls.space.appexec([], """():
+                def call_further(f):
+                    return f()
+                return call_further
+            """)
+            assert not w_call_further.code.hidden_applevel
+            w_call_further.code.hidden_applevel = True       # hack
+            cls.w_call_further = w_call_further
 
     # test for the presence of the attributes, not functionality
 
         frame = f()
         assert frame.f_back.f_code.co_name == 'f'
 
+    def test_f_back_hidden(self):
+        if not hasattr(self, 'call_further'):
+            skip("not for runappdirect testing")
+        import sys
+        def f():
+            return (sys._getframe(0),
+                    sys._getframe(1),
+                    sys._getframe(0).f_back)
+        def main():
+            return self.call_further(f)
+        f0, f1, f1bis = main()
+        assert f0.f_code.co_name == 'f'
+        assert f1.f_code.co_name == 'main'
+        assert f1bis is f1
+        assert f0.f_back is f1
+
     def test_f_exc_xxx(self):
         import sys
 

File pypy/jit/backend/llsupport/llmodel.py

View file
  • Ignore whitespace
         u = lltype.cast_opaque_ptr(lltype.Ptr(rstr.UNICODE), string)
         u.chars[index] = unichr(newvalue)
 
+    def bh_copystrcontent(self, src, dst, srcstart, dststart, length):
+        src = lltype.cast_opaque_ptr(lltype.Ptr(rstr.STR), src)
+        dst = lltype.cast_opaque_ptr(lltype.Ptr(rstr.STR), dst)
+        rstr.copy_string_contents(src, dst, srcstart, dststart, length)
+
+    def bh_copyunicodecontent(self, src, dst, srcstart, dststart, length):
+        src = lltype.cast_opaque_ptr(lltype.Ptr(rstr.UNICODE), src)
+        dst = lltype.cast_opaque_ptr(lltype.Ptr(rstr.UNICODE), dst)
+        rstr.copy_unicode_contents(src, dst, srcstart, dststart, length)
+
     def bh_call_i(self, func, calldescr, args_i, args_r, args_f):
         assert isinstance(calldescr, BaseIntCallDescr)
         if not we_are_translated():

File pypy/jit/backend/model.py

View file
  • Ignore whitespace
         Optionally, return a ``ops_offset`` dictionary.  See the docstring of
         ``compiled_loop`` for more informations about it.
         """
-        raise NotImplementedError    
+        raise NotImplementedError
 
     def dump_loop_token(self, looptoken):
         """Print a disassembled version of looptoken to stdout"""
         raise NotImplementedError
     def bh_unicodesetitem(self, string, index, newvalue):
         raise NotImplementedError
+    def bh_copystrcontent(self, src, dst, srcstart, dststart, length):
+        raise NotImplementedError
+    def bh_copyunicodecontent(self, src, dst, srcstart, dststart, length):
+        raise NotImplementedError
 
     def force(self, force_token):
         raise NotImplementedError

File pypy/jit/codewriter/jtransform.py

View file
  • Ignore whitespace
             return SpaceOperation('%s_assert_green' % kind, args, None)
         elif oopspec_name == 'jit.current_trace_length':
             return SpaceOperation('current_trace_length', [], op.result)
+        elif oopspec_name == 'jit.isconstant':
+            kind = getkind(args[0].concretetype)
+            return SpaceOperation('%s_isconstant' % kind, args, op.result)
+        elif oopspec_name == 'jit.isvirtual':
+            kind = getkind(args[0].concretetype)
+            return SpaceOperation('%s_isvirtual' % kind, args, op.result)
         else:
             raise AssertionError("missing support for %r" % oopspec_name)
 
         else:
             assert 0, "args[0].concretetype must be STR or UNICODE"
         #
+        if oopspec_name == 'stroruni.copy_contents':
+            if SoU.TO == rstr.STR:
+                new_op = 'copystrcontent'
+            elif SoU.TO == rstr.UNICODE:
+                new_op = 'copyunicodecontent'
+            else:
+                assert 0
+            return SpaceOperation(new_op, args, op.result)
         if oopspec_name == "stroruni.equal":
             for otherindex, othername, argtypes, resulttype in [
                 (EffectInfo.OS_STREQ_SLICE_CHECKNULL,

File pypy/jit/metainterp/blackhole.py

View file
  • Ignore whitespace
     def bhimpl_current_trace_length():
         return -1
 
+    @arguments("i", returns="i")
+    def bhimpl_int_isconstant(x):
+        return False
+
+    @arguments("r", returns="i")
+    def bhimpl_ref_isconstant(x):
+        return False
+
+    @arguments("r", returns="i")
+    def bhimpl_ref_isvirtual(x):
+        return False
+
     # ----------
     # the main hints and recursive calls
 
     @arguments("cpu", "r", "i", "i")
     def bhimpl_strsetitem(cpu, string, index, newchr):
         cpu.bh_strsetitem(string, index, newchr)
+    @arguments("cpu", "r", "r", "i", "i", "i")
+    def bhimpl_copystrcontent(cpu, src, dst, srcstart, dststart, length):
+        cpu.bh_copystrcontent(src, dst, srcstart, dststart, length)
 
     @arguments("cpu", "i", returns="r")
     def bhimpl_newunicode(cpu, length):
     @arguments("cpu", "r", "i", "i")
     def bhimpl_unicodesetitem(cpu, unicode, index, newchr):
         cpu.bh_unicodesetitem(unicode, index, newchr)
+    @arguments("cpu", "r", "r", "i", "i", "i")
+    def bhimpl_copyunicodecontent(cpu, src, dst, srcstart, dststart, length):
+        cpu.bh_copyunicodecontent(src, dst, srcstart, dststart, length)
 
     @arguments(returns=(longlong.is_64_bit and "i" or "f"))
     def bhimpl_ll_read_timestamp():
 def resume_in_blackhole(metainterp_sd, jitdriver_sd, resumedescr,
                         all_virtuals=None):
     from pypy.jit.metainterp.resume import blackhole_from_resumedata
-    debug_start('jit-blackhole')
+    #debug_start('jit-blackhole')
     metainterp_sd.profiler.start_blackhole()
     blackholeinterp = blackhole_from_resumedata(
         metainterp_sd.blackholeinterpbuilder,
         _run_forever(blackholeinterp, current_exc)
     finally:
         metainterp_sd.profiler.end_blackhole()
-        debug_stop('jit-blackhole')
+        #debug_stop('jit-blackhole')
 
 def convert_and_run_from_pyjitpl(metainterp, raising_exception=False):
     # Get a chain of blackhole interpreters and fill them by copying
     # 'metainterp.framestack'.
-    debug_start('jit-blackhole')
+    #debug_start('jit-blackhole')
     metainterp_sd = metainterp.staticdata
     metainterp_sd.profiler.start_blackhole()
     nextbh = None
         _run_forever(firstbh, current_exc)
     finally:
         metainterp_sd.profiler.end_blackhole()
-        debug_stop('jit-blackhole')
+        #debug_stop('jit-blackhole')

File pypy/jit/metainterp/heapcache.py

View file
  • Ignore whitespace
+from pypy.jit.metainterp.history import ConstInt
+from pypy.jit.metainterp.resoperation import rop
+
+
+class HeapCache(object):
+    def __init__(self):
+        self.reset()
+
+    def reset(self):
+        # contains boxes where the class is already known
+        self.known_class_boxes = {}
+        # store the boxes that contain newly allocated objects, this maps the
+        # boxes to a bool, the bool indicates whether or not the object has
+        # escaped the trace or not (True means the box never escaped, False
+        # means it did escape), its presences in the mapping shows that it was
+        # allocated inside the trace
+        self.new_boxes = {}
+        # Tracks which boxes should be marked as escaped when the key box
+        # escapes.
+        self.dependencies = {}
+        # contains frame boxes that are not virtualizables
+        self.nonstandard_virtualizables = {}
+        # heap cache
+        # maps descrs to {from_box, to_box} dicts
+        self.heap_cache = {}
+        # heap array cache
+        # maps descrs to {index: {from_box: to_box}} dicts
+        self.heap_array_cache = {}
+        # cache the length of arrays
+        self.length_cache = {}
+
+    def invalidate_caches(self, opnum, descr, argboxes):
+        self.mark_escaped(opnum, argboxes)
+        self.clear_caches(opnum, descr, argboxes)
+
+    def mark_escaped(self, opnum, argboxes):
+        idx = 0
+        if opnum == rop.SETFIELD_GC:
+            assert len(argboxes) == 2
+            box, valuebox = argboxes
+            if self.is_unescaped(box) and self.is_unescaped(valuebox):
+                self.dependencies.setdefault(box, []).append(valuebox)
+            else:
+                self._escape(valuebox)
+        # GETFIELD_GC doesn't escape it's argument
+        elif opnum != rop.GETFIELD_GC:
+            for box in argboxes:
+                # setarrayitem_gc don't escape its first argument
+                if not (idx == 0 and opnum in [rop.SETARRAYITEM_GC]):
+                    self._escape(box)
+                idx += 1
+
+    def _escape(self, box):
+        if box in self.new_boxes:
+            self.new_boxes[box] = False
+        if box in self.dependencies:
+            for dep in self.dependencies[box]:
+                self._escape(dep)
+            del self.dependencies[box]
+
+    def clear_caches(self, opnum, descr, argboxes):
+        if opnum == rop.SETFIELD_GC:
+            return
+        if opnum == rop.SETARRAYITEM_GC:
+            return
+        if opnum == rop.SETFIELD_RAW:
+            return
+        if opnum == rop.SETARRAYITEM_RAW:
+            return
+        if rop._OVF_FIRST <= opnum <= rop._OVF_LAST:
+            return
+        if rop._NOSIDEEFFECT_FIRST <= opnum <= rop._NOSIDEEFFECT_LAST:
+            return
+        if opnum == rop.CALL or opnum == rop.CALL_LOOPINVARIANT:
+            effectinfo = descr.get_extra_info()
+            ef = effectinfo.extraeffect
+            if ef == effectinfo.EF_LOOPINVARIANT or \
+               ef == effectinfo.EF_ELIDABLE_CANNOT_RAISE or \
+               ef == effectinfo.EF_ELIDABLE_CAN_RAISE:
+                return
+            # A special case for ll_arraycopy, because it is so common, and its
+            # effects are so well defined.
+            elif effectinfo.oopspecindex == effectinfo.OS_ARRAYCOPY:
+                # The destination box
+                if argboxes[2] in self.new_boxes:
+                    # XXX: no descr here so we invalidate any of them, not just
+                    # of the correct type
+                    # XXX: in theory the indices of the copy could be looked at
+                    # as well
+                    for descr, cache in self.heap_array_cache.iteritems():
+                        for idx, cache in cache.iteritems():
+                            for frombox in cache.keys():
+                                if frombox not in self.new_boxes:
+                                    del cache[frombox]
+                    return
+
+        self.heap_cache.clear()
+        self.heap_array_cache.clear()
+
+    def is_class_known(self, box):
+        return box in self.known_class_boxes
+
+    def class_now_known(self, box):
+        self.known_class_boxes[box] = None
+
+    def is_nonstandard_virtualizable(self, box):
+        return box in self.nonstandard_virtualizables
+
+    def nonstandard_virtualizables_now_known(self, box):
+        self.nonstandard_virtualizables[box] = None
+
+    def is_unescaped(self, box):
+        return self.new_boxes.get(box, False)
+
+    def new(self, box):
+        self.new_boxes[box] = True
+
+    def new_array(self, box, lengthbox):
+        self.new(box)
+        self.arraylen_now_known(box, lengthbox)
+
+    def getfield(self, box, descr):
+        d = self.heap_cache.get(descr, None)
+        if d:
+            tobox = d.get(box, None)
+            if tobox:
+                return tobox
+        return None
+
+    def getfield_now_known(self, box, descr, fieldbox):
+        self.heap_cache.setdefault(descr, {})[box] = fieldbox
+
+    def setfield(self, box, descr, fieldbox):
+        d = self.heap_cache.get(descr, None)
+        new_d = self._do_write_with_aliasing(d, box, fieldbox)
+        self.heap_cache[descr] = new_d
+
+    def _do_write_with_aliasing(self, d, box, fieldbox):
+        # slightly subtle logic here
+        # a write to an arbitrary box, all other boxes can alias this one
+        if not d or box not in self.new_boxes:
+            # therefore we throw away the cache
+            return {box: fieldbox}
+        # the object we are writing to is freshly allocated
+        # only remove some boxes from the cache
+        new_d = {}
+        for frombox, tobox in d.iteritems():
+            # the other box is *also* freshly allocated
+            # therefore frombox and box *must* contain different objects
+            # thus we can keep it in the cache
+            if frombox in self.new_boxes:
+                new_d[frombox] = tobox
+        new_d[box] = fieldbox
+        return new_d
+
+    def getarrayitem(self, box, descr, indexbox):
+        if not isinstance(indexbox, ConstInt):
+            return
+        index = indexbox.getint()
+        cache = self.heap_array_cache.get(descr, None)
+        if cache:
+            indexcache = cache.get(index, None)
+            if indexcache is not None:
+                return indexcache.get(box, None)
+
+    def getarrayitem_now_known(self, box, descr, indexbox, valuebox):
+        if not isinstance(indexbox, ConstInt):
+            return
+        index = indexbox.getint()
+        cache = self.heap_array_cache.setdefault(descr, {})
+        indexcache = cache.get(index, None)
+        if indexcache is not None:
+            indexcache[box] = valuebox
+        else:
+            cache[index] = {box: valuebox}
+
+    def setarrayitem(self, box, descr, indexbox, valuebox):
+        if not isinstance(indexbox, ConstInt):
+            cache = self.heap_array_cache.get(descr, None)
+            if cache is not None:
+                cache.clear()
+            return
+        index = indexbox.getint()
+        cache = self.heap_array_cache.setdefault(descr, {})
+        indexcache = cache.get(index, None)
+        cache[index] = self._do_write_with_aliasing(indexcache, box, valuebox)
+
+    def arraylen(self, box):
+        return self.length_cache.get(box, None)
+
+    def arraylen_now_known(self, box, lengthbox):
+        self.length_cache[box] = lengthbox
+
+    def _replace_box(self, d, oldbox, newbox):
+        new_d = {}
+        for frombox, tobox in d.iteritems():
+            if frombox is oldbox:
+                frombox = newbox
+            if tobox is oldbox:
+                tobox = newbox
+            new_d[frombox] = tobox
+        return new_d
+
+    def replace_box(self, oldbox, newbox):
+        for descr, d in self.heap_cache.iteritems():
+            self.heap_cache[descr] = self._replace_box(d, oldbox, newbox)
+        for descr, d in self.heap_array_cache.iteritems():
+            for index, cache in d.iteritems():
+                d[index] = self._replace_box(cache, oldbox, newbox)
+        self.length_cache = self._replace_box(self.length_cache, oldbox, newbox)

File pypy/jit/metainterp/optimizeopt/optimizer.py

View file
  • Ignore whitespace
             guards.append(op)
         elif self.level == LEVEL_KNOWNCLASS:
             op = ResOperation(rop.GUARD_NONNULL, [box], None)
-            guards.append(op)            
+            guards.append(op)
             op = ResOperation(rop.GUARD_CLASS, [box, self.known_class], None)
             guards.append(op)
         else:
                     self.lenbound.bound.intersect(other.lenbound.bound)
                 else:
                     self.lenbound = other.lenbound.clone()
-                    
+
 
     def force_box(self):
         return self.box
         assert isinstance(constbox, Const)
         self.box = constbox
         self.level = LEVEL_CONSTANT
-        
+
         if isinstance(constbox, ConstInt):
             val = constbox.getint()
             self.intbound = IntBound(val, val)
         new.set_optimizations(optimizations)
         new.quasi_immutable_deps = self.quasi_immutable_deps
         return new
-        
+
     def produce_potential_short_preamble_ops(self, sb):
         raise NotImplementedError('This is implemented in unroll.UnrollableOptimizer')
 
         if op.returns_bool_result():
             self.bool_boxes[self.getvalue(op.result)] = None
         self._emit_operation(op)
-        
+
     @specialize.argtype(0)
-    def _emit_operation(self, op):        
+    def _emit_operation(self, op):
         for i in range(op.numargs()):
             arg = op.getarg(i)
             try:
                 arg = value.get_key_box()
             args[i] = arg
         args[n] = ConstInt(op.getopnum())
-        args[n+1] = op.getdescr()
+        args[n + 1] = op.getdescr()
         return args
 
     @specialize.argtype(0)
 
     def remember_emitting_pure(self, op):
         pass
-    
+
     def constant_fold(self, op):
         argboxes = [self.get_constant_box(op.getarg(i))
                     for i in range(op.numargs())]
             arrayvalue = self.getvalue(op.getarg(0))
             arrayvalue.make_len_gt(MODE_UNICODE, op.getdescr(), indexvalue.box.getint())
         self.optimize_default(op)
-        
 
-    
+
+
 
 dispatch_opt = make_dispatcher_method(Optimizer, 'optimize_',
         default=Optimizer.optimize_default)

File pypy/jit/metainterp/optimizeopt/rewrite.py

View file
  • Ignore whitespace
 
     def new(self):
         return OptRewrite()
-        
+
     def produce_potential_short_preamble_ops(self, sb):
         for op in self.loop_invariant_producer.values():
             sb.add_potential(op)
             else:
                 self.make_constant(op.result, result)
                 return
+
+        args = self.optimizer.make_args_key(op)
+        oldop = self.optimizer.pure_operations.get(args, None)
+        if oldop is not None and oldop.getdescr() is op.getdescr():
+            assert oldop.getopnum() == op.getopnum()
+            self.make_equal_to(op.result, self.getvalue(oldop.result))
+            return
+        else:
+            self.optimizer.pure_operations[args] = op
+            self.optimizer.remember_emitting_pure(op)
+
         # replace CALL_PURE with just CALL
         args = op.getarglist()
         self.emit_operation(ResOperation(rop.CALL, args, op.result,
         # expects a compile-time constant
         assert isinstance(arg, Const)
         key = make_hashable_int(arg.getint())
-        
+
         resvalue = self.loop_invariant_results.get(key, None)
         if resvalue is not None:
             self.make_equal_to(op.result, resvalue)

File pypy/jit/metainterp/optimizeopt/test/test_optimizebasic.py

View file
  • Ignore whitespace
         """
         self.optimize_loop(ops, expected)
 
+    def test_empty_copystrunicontent(self):
+        ops = """
+        [p0, p1, i0, i2, i3]
+        i4 = int_eq(i3, 0)
+        guard_true(i4) []
+        copystrcontent(p0, p1, i0, i2, i3)
+        jump(p0, p1, i0, i2, i3)
+        """
+        expected = """
+        [p0, p1, i0, i2, i3]
+        i4 = int_eq(i3, 0)
+        guard_true(i4) []
+        jump(p0, p1, i0, i2, 0)
+        """
+        self.optimize_strunicode_loop(ops, expected)
+
+    def test_empty_copystrunicontent_virtual(self):
+        ops = """
+        [p0]
+        p1 = newstr(23)
+        copystrcontent(p0, p1, 0, 0, 0)
+        jump(p0)
+        """
+        expected = """
+        [p0]
+        jump(p0)
+        """
+        self.optimize_strunicode_loop(ops, expected)
+
     def test_forced_virtuals_aliasing(self):
         ops = """
         [i0, i1]
         self.optimize_loop(ops, expected)
 
 
+
 class TestLLtype(BaseTestOptimizeBasic, LLtypeMixin):
     pass
 

File pypy/jit/metainterp/optimizeopt/test/test_optimizeopt.py