Commits

Ronan Lamy committed 581e3b2 Merge

hg merge default

Comments (0)

Files changed (94)

lib-python/2.7/collections.py

 __all__ += _abcoll.__all__
 
 from _collections import deque, defaultdict
+from operator import itemgetter as _itemgetter, eq as _eq
 from keyword import iskeyword as _iskeyword
 import sys as _sys
 import heapq as _heapq
-from operator import itemgetter as _itemgetter
 from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
+from itertools import imap as _imap
 
 try:
     from thread import get_ident as _get_ident
             self.__map = {}
         self.__update(*args, **kwds)
 
-    def __setitem__(self, key, value, PREV=0, NEXT=1, dict_setitem=dict.__setitem__):
+    def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
         'od.__setitem__(i, y) <==> od[i]=y'
         # Setting a new item creates a new link at the end of the linked list,
         # and the inherited dictionary is updated with the new key/value pair.
         if key not in self:
             root = self.__root
-            last = root[PREV]
-            last[NEXT] = root[PREV] = self.__map[key] = [last, root, key]
-        dict_setitem(self, key, value)
+            last = root[0]
+            last[1] = root[0] = self.__map[key] = [last, root, key]
+        return dict_setitem(self, key, value)
 
-    def __delitem__(self, key, PREV=0, NEXT=1, dict_delitem=dict.__delitem__):
+    def __delitem__(self, key, dict_delitem=dict.__delitem__):
         'od.__delitem__(y) <==> del od[y]'
         # Deleting an existing item uses self.__map to find the link which gets
         # removed by updating the links in the predecessor and successor nodes.
         dict_delitem(self, key)
         link_prev, link_next, key = self.__map.pop(key)
-        link_prev[NEXT] = link_next
-        link_next[PREV] = link_prev
+        link_prev[1] = link_next                        # update link_prev[NEXT]
+        link_next[0] = link_prev                        # update link_next[PREV]
 
     def __iter__(self):
         'od.__iter__() <==> iter(od)'
         # Traverse the linked list in order.
-        NEXT, KEY = 1, 2
         root = self.__root
-        curr = root[NEXT]
+        curr = root[1]                                  # start at the first node
         while curr is not root:
-            yield curr[KEY]
-            curr = curr[NEXT]
+            yield curr[2]                               # yield the curr[KEY]
+            curr = curr[1]                              # move to next node
 
     def __reversed__(self):
         'od.__reversed__() <==> reversed(od)'
         # Traverse the linked list in reverse order.
-        PREV, KEY = 0, 2
         root = self.__root
-        curr = root[PREV]
+        curr = root[0]                                  # start at the last node
         while curr is not root:
-            yield curr[KEY]
-            curr = curr[PREV]
+            yield curr[2]                               # yield the curr[KEY]
+            curr = curr[0]                              # move to previous node
 
     def clear(self):
         'od.clear() -> None.  Remove all items from od.'
-        for node in self.__map.itervalues():
-            del node[:]
         root = self.__root
         root[:] = [root, root, None]
         self.__map.clear()
 
         '''
         if isinstance(other, OrderedDict):
-            return len(self)==len(other) and self.items() == other.items()
+            return dict.__eq__(self, other) and all(_imap(_eq, self, other))
         return dict.__eq__(self, other)
 
     def __ne__(self, other):

lib-python/2.7/test/test_modulefinder.py

 # library.
 
 TEST_DIR = tempfile.mkdtemp()
-TEST_PATH = [TEST_DIR, os.path.dirname(__future__.__file__)]
+TEST_PATH = [TEST_DIR, os.path.dirname(tempfile.__file__)]
 
 # Each test description is a list of 5 items:
 #

lib-python/conftest.py

     RegrTest('test_codeop.py', core=True),
     RegrTest('test_coding.py', core=True),
     RegrTest('test_coercion.py', core=True),
-    RegrTest('test_collections.py'),
+    RegrTest('test_collections.py', usemodules='binascii struct'),
     RegrTest('test_colorsys.py'),
     RegrTest('test_commands.py'),
     RegrTest('test_compare.py', core=True),
     RegrTest('test_csv.py', usemodules='_csv'),
     RegrTest('test_ctypes.py', usemodules="_rawffi thread"),
     RegrTest('test_curses.py'),
-    RegrTest('test_datetime.py'),
+    RegrTest('test_datetime.py', usemodules='binascii struct'),
     RegrTest('test_dbm.py'),
     RegrTest('test_decimal.py'),
     RegrTest('test_decorators.py', core=True),

lib_pypy/_collections.py

         return c
 
     def remove(self, value):
-        # Need to be defensive for mutating comparisons
-        for i in range(len(self)):
-            if self[i] == value:
-                del self[i]
-                return
-        raise ValueError("deque.remove(x): x not in deque")
+        # Need to defend mutating or failing comparisons
+        i = 0
+        try:
+            for i in range(len(self)):
+                if self[0] == value:
+                    self.popleft()
+                    return
+                self.append(self.popleft())
+            i += 1
+            raise ValueError("deque.remove(x): x not in deque")
+        finally:
+            self.rotate(i)
 
     def rotate(self, n=1):
         length = len(self)

lib_pypy/cPickle.py

 #
-# One-liner implementation of cPickle
+# Reimplementation of cPickle, mostly as a copy of pickle.py
 #
 
 from pickle import Pickler, dump, dumps, PickleError, PicklingError, UnpicklingError, _EmptyClass
 
 # Unpickling machinery
 
+class _Stack(list):
+    def pop(self, index=-1):
+        try:
+            return list.pop(self, index)
+        except IndexError:
+            raise UnpicklingError("unpickling stack underflow")
+
 class Unpickler(object):
 
     def __init__(self, file):
         Return the reconstituted object hierarchy specified in the file.
         """
         self.mark = object() # any new unique object
-        self.stack = []
+        self.stack = _Stack()
         self.append = self.stack.append
         try:
             key = ord(self.read(1))

lib_pypy/ctypes_support.py

 
 if sys.platform == 'win32':
     standard_c_lib._errno.restype = ctypes.POINTER(ctypes.c_int)
+    standard_c_lib._errno.argtypes = None
     def _where_is_errno():
         return standard_c_lib._errno()
     
 elif sys.platform in ('linux2', 'freebsd6'):
     standard_c_lib.__errno_location.restype = ctypes.POINTER(ctypes.c_int)
+    standard_c_lib.__errno_location.argtypes = None
     def _where_is_errno():
         return standard_c_lib.__errno_location()
 
 elif sys.platform in ('darwin', 'freebsd7', 'freebsd8', 'freebsd9'):
     standard_c_lib.__error.restype = ctypes.POINTER(ctypes.c_int)
+    standard_c_lib.__error.argtypes = None
     def _where_is_errno():
         return standard_c_lib.__error()
 

lib_pypy/datetime.py

 
 import time as _time
 import math as _math
+import decimal as _decimal
 
 MINYEAR = 1
 MAXYEAR = 9999
         return offset
     raise ValueError("%s()=%d, must be in -1439..1439" % (name, offset))
 
+def _check_int_field(value):
+    if not isinstance(value, (int, long, _decimal.Decimal)):
+        raise TypeError('integer argument expected')
+    return int(value)
+
 def _check_date_fields(year, month, day):
-    for value in [year, day]:
-        if not isinstance(value, (int, long)):
-            raise TypeError('int expected')
+    year = _check_int_field(year)
+    month = _check_int_field(month)
+    day = _check_int_field(day)
     if not MINYEAR <= year <= MAXYEAR:
         raise ValueError('year must be in %d..%d' % (MINYEAR, MAXYEAR), year)
     if not 1 <= month <= 12:
     dim = _days_in_month(year, month)
     if not 1 <= day <= dim:
         raise ValueError('day must be in 1..%d' % dim, day)
+    return year, month, day
 
 def _check_time_fields(hour, minute, second, microsecond):
-    for value in [hour, minute, second, microsecond]:
-        if not isinstance(value, (int, long)):
-            raise TypeError('int expected')
+    hour = _check_int_field(hour)
+    minute = _check_int_field(minute)
+    second = _check_int_field(second)
+    microsecond = _check_int_field(microsecond)
     if not 0 <= hour <= 23:
         raise ValueError('hour must be in 0..23', hour)
     if not 0 <= minute <= 59:
         raise ValueError('second must be in 0..59', second)
     if not 0 <= microsecond <= 999999:
         raise ValueError('microsecond must be in 0..999999', microsecond)
+    return hour, minute, second, microsecond
 
 def _check_tzinfo_arg(tz):
     if tz is not None and not isinstance(tz, tzinfo):
             self = object.__new__(cls)
             self.__setstate(year)
             return self
-        _check_date_fields(year, month, day)
+        year, month, day = _check_date_fields(year, month, day)
         self = object.__new__(cls)
         self._year = year
         self._month = month
             month = self._month
         if day is None:
             day = self._day
-        _check_date_fields(year, month, day)
+        year, month, day = _check_date_fields(year, month, day)
         return date(year, month, day)
 
     # Comparisons of date objects with other.
         second, microsecond (default to zero)
         tzinfo (default to None)
         """
-        self = object.__new__(cls)
         if isinstance(hour, str):
             # Pickle support
+            self = object.__new__(cls)
             self.__setstate(hour, minute or None)
             return self
+        hour, minute, second, microsecond = _check_time_fields(hour, minute, second, microsecond)
         _check_tzinfo_arg(tzinfo)
-        _check_time_fields(hour, minute, second, microsecond)
+        self = object.__new__(cls)
         self._hour = hour
         self._minute = minute
         self._second = second
             microsecond = self.microsecond
         if tzinfo is True:
             tzinfo = self.tzinfo
-        _check_time_fields(hour, minute, second, microsecond)
+        hour, minute, second, microsecond = _check_time_fields(hour, minute, second, microsecond)
         _check_tzinfo_arg(tzinfo)
         return time(hour, minute, second, microsecond, tzinfo)
 
             self = date.__new__(cls, year[:4])
             self.__setstate(year, month)
             return self
+        year, month, day = _check_date_fields(year, month, day)
+        hour, minute, second, microsecond = _check_time_fields(hour, minute, second, microsecond)
         _check_tzinfo_arg(tzinfo)
-        _check_time_fields(hour, minute, second, microsecond)
-        self = date.__new__(cls, year, month, day)
-        # XXX This duplicates __year, __month, __day for convenience :-(
+        self = object.__new__(cls)
         self._year = year
         self._month = month
         self._day = day
             microsecond = self.microsecond
         if tzinfo is True:
             tzinfo = self.tzinfo
-        _check_date_fields(year, month, day)
-        _check_time_fields(hour, minute, second, microsecond)
+        year, month, day = _check_date_fields(year, month, day)
+        hour, minute, second, microsecond = _check_time_fields(hour, minute, second, microsecond)
         _check_tzinfo_arg(tzinfo)
         return datetime(year, month, day, hour, minute, second,
                           microsecond, tzinfo)

lib_pypy/greenlet.py

 import _continuation, sys
 
+__version__ = "0.4.0"
 
 # ____________________________________________________________
 # Exceptions
     def __switch(target, methodname, *args):
         current = getcurrent()
         #
-        while not target:
+        while not (target.__main or _continulet.is_pending(target)):
+            # inlined __nonzero__ ^^^ in case it's overridden
             if not target.__started:
                 if methodname == 'switch':
                     greenlet_func = _greenlet_start

lib_pypy/pypy_test/test_cPickle.py

+from __future__ import absolute_import
+import py
+
+from lib_pypy import cPickle
+
+def test_stack_underflow():
+    py.test.raises(cPickle.UnpicklingError, cPickle.loads, "a string")

lib_pypy/pypy_test/test_collections.py

         d = collections.deque([MutatingCmp()])
         py.test.raises(IndexError, d.remove, 1)
 
+    def test_remove_failing(self):
+        class FailingCmp(object):
+            def __eq__(self, other):
+                assert False
+
+        f = FailingCmp()
+        d = collections.deque([1, 2, 3, f, 4, 5])
+        d.remove(3)
+        py.test.raises(AssertionError, d.remove, 4)
+        assert d == collections.deque([1, 2, f, 4, 5])
+
     def test_maxlen(self):
         d = collections.deque([], 3)
         d.append(1); d.append(2); d.append(3); d.append(4)

lib_pypy/pyrepl/unix_console.py

 
         if hasattr(self, 'old_sigwinch'):
             signal.signal(signal.SIGWINCH, self.old_sigwinch)
+            del self.old_sigwinch
 
     def __sigwinch(self, signum, frame):
         self.height, self.width = self.getheightwidth()
         if self.config.option.runappdirect:
             # only collect regular tests if we are in an 'app_test' directory,
             # or in test_lib_pypy
-            names = self.listnames()
-            return "app_test" in names or "test_lib_pypy" in names
-        else:
-            return True
+            for name in self.listnames():
+                if "app_test" in name or "test_lib_pypy" in name:
+                    return True
+            return False
+        return True
 
     def funcnamefilter(self, name):
         if name.startswith('test_'):
 
 ::
 
-  pypy ~/path_to_pypy_checkout/pypy/translator/goal/translate.py -O1 --platform=arm target.py
+  pypy ~/path_to_pypy_checkout/rpython/translator/goal/translate.py -O1 --platform=arm target.py
 
 If everything worked correctly this should yield an ARM binary. Running this binary in the ARM chroot or on an ARM device should produce the output ``"Hello World"``.
 

pypy/doc/cppyy.rst

     $ hg clone https://bitbucket.org/pypy/pypy
     $ cd pypy
     $ hg up reflex-support         # optional
-    $ cd pypy/translator/goal
+    $ cd pypy/goal
     
     # This example shows python, but using pypy-c is faster and uses less memory
-    $ python translate.py -O jit --gcrootfinder=shadowstack targetpypystandalone.py --withmod-cppyy
+    $ python ../../rpython/bin/rpython.py -O jit --gcrootfinder=shadowstack targetpypystandalone.py --withmod-cppyy
 
 This will build a ``pypy-c`` that includes the cppyy module, and through that,
 Reflex support.

pypy/doc/ctypes-implementation.rst

 The pypy-c translated to run the ctypes tests can be used to run the pyglet examples as well. They can be run like e.g.::
 
     $ cd pyglet/
-    $ PYTHONPATH=. ../ctypes-stable/pypy/translator/goal/pypy-c examples/opengl.py
+    $ PYTHONPATH=. ../ctypes-stable/pypy/goal/pypy-c examples/opengl.py
 
 
 they usually should be terminated with ctrl-c. Refer to the their doc strings for details about how they should behave.

pypy/doc/getting-started-dev.rst

     python bin/translatorshell.py
 
 Test snippets of translatable code are provided in the file
-``pypy/translator/test/snippet.py``, which is imported under the name
+``rpython/translator/test/snippet.py``, which is imported under the name
 ``snippet``.  For example::
 
     >>> t = Translation(snippet.is_perfect_number, [int])
 The graph can be turned into C code::
 
    >>> t.rtype()
-   >>> f = t.compile_c()
+   >>> lib = t.compile_c()
 
 The first command replaces the operations with other low level versions that
-only use low level types that are available in C (e.g. int). To try out the
-compiled version::
+only use low level types that are available in C (e.g. int). The compiled
+version is now in a ``.so`` library. You can run it say using ctypes:
 
+   >>> from ctypes import CDLL
+   >>> f = CDLL(lib)
    >>> f(5)
-   False
+   0
    >>> f(6)
-   True
+   1
 
 Translating the flow graph to CLI or JVM code
 +++++++++++++++++++++++++++++++++++++++++++++
 There is a small-to-medium demo showing the translator and the annotator::
 
     cd demo
-    ../pypy/translator/goal/translate.py --view --annotate bpnn.py
+    ../rpython/translator/goal/translate.py --view --annotate bpnn.py
 
 This causes ``bpnn.py`` to display itself as a call graph and class
 hierarchy.  Clicking on functions shows the flow graph of the particular
 To turn this example to C code (compiled to the executable ``bpnn-c``),
 type simply::
 
-    ../pypy/translator/goal/translate.py bpnn.py
+    ../rpython/translator/goal/translate.py bpnn.py
 
 
 Translating Full Programs
 +++++++++++++++++++++++++
 
 To translate full RPython programs, there is the script ``translate.py`` in
-``translator/goal``. Examples for this are a slightly changed version of
+``rpython/translator/goal``. Examples for this are a slightly changed version of
 Pystone::
 
-    cd pypy/translator/goal
+    cd rpython/translator/goal
     python translate.py targetrpystonedalone
 
 This will produce the executable "targetrpystonedalone-c".

pypy/doc/index.rst

 ================================   =========================================== 
 Directory                          explanation/links
 ================================   =========================================== 
-`pypy/annotation/`_                `type inferencing code`_ for `RPython`_ programs 
 
-`pypy/bin/`_                       command-line scripts, mainly `py.py`_ and `translatorshell.py`_
+`pypy/bin/`_                       command-line scripts, mainly `pyinteractive.py`_
 
 `pypy/config/`_                    handles the numerous options for building and running PyPy
 
 
 `pypy/objspace/`_                  `object space`_ implementations
 
-`pypy/objspace/flow/`_             the FlowObjSpace_ implementing `abstract interpretation`_
-
 `pypy/objspace/std/`_              the StdObjSpace_ implementing CPython's objects and types
 
-`pypy/rlib/`_                      a `"standard library"`_ for RPython_ programs
-
-`pypy/rpython/`_                   the `RPython Typer`_ 
-
-`pypy/rpython/lltypesystem/`_      the `low-level type system`_ for C-like backends
-
-`pypy/rpython/ootypesystem/`_      the `object-oriented type system`_ for OO backends
-
-`pypy/rpython/memory/`_            the `garbage collector`_ construction framework
-
 `pypy/tool/`_                      various utilities and hacks used from various places 
 
 `pypy/tool/algo/`_                 general-purpose algorithmic and mathematic
 
 `pypy/tool/pytest/`_               support code for our `testing methods`_
 
-`pypy/translator/`_                translation_ backends and support code
 
-`pypy/translator/backendopt/`_     general optimizations that run before a backend generates code
+`rpython/annotator/`_              `type inferencing code`_ for `RPython`_ programs 
 
-`pypy/translator/c/`_              the `GenC backend`_, producing C code from an
+`rpython/config/`_                 handles the numerous options for RPython
+
+
+`rpython/flowspace/`_              the FlowObjSpace_ implementing `abstract interpretation`_
+
+
+`rpython/rlib/`_                   a `"standard library"`_ for RPython_ programs
+
+`rpython/rtyper/`_                 the `RPython Typer`_ 
+
+`rpython/rtyper/lltypesystem/`_    the `low-level type system`_ for C-like backends
+
+`rpython/rtyper/ootypesystem/`_    the `object-oriented type system`_ for OO backends
+
+`rpython/rtyper/memory/`_          the `garbage collector`_ construction framework
+
+`rpython/translator/`_             translation_ backends and support code
+
+`rpython/translator/backendopt/`_  general optimizations that run before a backend generates code
+
+`rpython/translator/c/`_           the `GenC backend`_, producing C code from an
                                    RPython program (generally via the rtyper_)
 
-`pypy/translator/cli/`_            the `CLI backend`_ for `.NET`_ (Microsoft CLR or Mono_)
+`rpython/translator/cli/`_         the `CLI backend`_ for `.NET`_ (Microsoft CLR or Mono_)
 
-`pypy/translator/goal/`_           our `main PyPy-translation scripts`_ live here
+`pypy/goal/`_                      our `main PyPy-translation scripts`_ live here
 
-`pypy/translator/jvm/`_            the Java backend
+`rpython/translator/jvm/`_         the Java backend
 
-`pypy/translator/tool/`_           helper tools for translation, including the Pygame
+`rpython/translator/tool/`_        helper tools for translation, including the Pygame
                                    `graph viewer`_
 
 ``*/test/``                        many directories have a test subdirectory containing test 

pypy/doc/sandbox.rst

 -----
 
 
-In pypy/translator/goal::
+In pypy/goal::
 
-   ./translate.py -O2 --sandbox targetpypystandalone.py
+   ../../rpython/bin/rpython -O2 --sandbox targetpypystandalone.py
 
 If you don't have a regular PyPy installed, you should, because it's
 faster to translate, but you can also run ``python translate.py`` instead.
 
 
-To run it, use the tools in the pypy/translator/sandbox directory::
+To run it, use the tools in the pypy/sandbox directory::
 
    ./pypy_interact.py /some/path/pypy-c-sandbox [args...]
 

pypy/doc/whatsnew-head.rst

 .. branch: fix-e4fa0b2
 .. branch: win32-fixes
 .. branch: fix-version-tool
+.. branch: popen2-removal
 
 .. branch: release-2.0-beta1
 
 .. branch: inline-virtualref-2
 Better optimized certain types of frame accesses in the JIT, particularly
 around exceptions that escape the function they were raised in.
+
+.. branch: missing-ndarray-attributes
+Some missing attributes from ndarrays

pypy/interpreter/app_main.py

     # (relevant in case of "reload(sys)")
     sys.argv[:] = argv
 
-    if PYTHON26 and not options["ignore_environment"]:
-        if os.getenv('PYTHONNOUSERSITE'):
-            options["no_user_site"] = 1
-        if os.getenv('PYTHONDONTWRITEBYTECODE'):
-            options["dont_write_bytecode"] = 1
+    if not options["ignore_environment"]:
+        if os.getenv('PYTHONUNBUFFERED'):
+            options["unbuffered"] = 1
+        if PYTHON26:
+            if os.getenv('PYTHONNOUSERSITE'):
+                options["no_user_site"] = 1
+            if os.getenv('PYTHONDONTWRITEBYTECODE'):
+                options["dont_write_bytecode"] = 1
 
     if (options["interactive"] or
         (not options["ignore_environment"] and os.getenv('PYTHONINSPECT'))):
         root = dn(dn(dn(thisfile)))
         return [join(root, 'lib-python', '2.7'),
                 join(root, 'lib_pypy')]
-    
+
     def pypy_resolvedirof(s):
         # we ignore the issue of symlinks; for tests, the executable is always
         # interpreter/app_main.py anyway
     del os # make sure that os is not available globally, because this is what
            # happens in "real life" outside the tests
 
+    if 'time' not in sys.builtin_module_names:
+        # make some tests happy by loading this before we clobber sys.path
+        import time; del time
+
     # no one should change to which lists sys.argv and sys.path are bound
     old_argv = sys.argv
     old_path = sys.path

pypy/interpreter/test2/test_app_main.py

         assert data == '\x00(STDOUT)\n\x00'    # from stdout
         child_out_err.close()
 
+    def test_non_interactive_stdout_unbuffered(self, monkeypatch):
+        monkeypatch.setenv('PYTHONUNBUFFERED', '1')
+        path = getscript(r"""
+            import sys, time
+            sys.stdout.write('\x00(STDOUT)\n\x00')
+            time.sleep(1)
+            sys.stderr.write('\x00[STDERR]\n\x00')
+            time.sleep(1)
+            # stdout flushed automatically here
+            """)
+        cmdline = '%s -E "%s" %s' % (sys.executable, app_main, path)
+        print 'POPEN:', cmdline
+        child_in, child_out_err = os.popen4(cmdline)
+        data = child_out_err.read(11)
+        assert data == '\x00(STDOUT)\n\x00'    # from stderr
+        data = child_out_err.read(11)
+        assert data == '\x00[STDERR]\n\x00'    # from stdout
+        child_out_err.close()
+        child_in.close()
+
     def test_proper_sys_path(self, tmpdir):
         data = self.run('-c "import _ctypes"', python_flags='-S')
         if data.startswith('Traceback'):

pypy/module/__pypy__/__init__.py

 class TimeModule(MixedModule):
     appleveldefs = {}
     interpleveldefs = {}
-    if sys.platform.startswith("linux"):
+    if sys.platform.startswith("linux") or 'bsd' in sys.platform:
         from pypy.module.__pypy__ import interp_time
         interpleveldefs["clock_gettime"] = "interp_time.clock_gettime"
         interpleveldefs["clock_getres"] = "interp_time.clock_getres"

pypy/module/_cffi_backend/cbuffer.py

 from pypy.interpreter.baseobjspace import Wrappable
 from pypy.interpreter.buffer import RWBuffer
 from pypy.interpreter.gateway import unwrap_spec, interp2app
-from pypy.interpreter.typedef import TypeDef
+from pypy.interpreter.typedef import TypeDef, make_weakref_descr
 from rpython.rtyper.lltypesystem import rffi
 from pypy.module._cffi_backend import cdataobj, ctypeptr, ctypearray
 
     # a different subclass of Wrappable for the MiniBuffer, because we
     # want a slightly different (simplified) API at the level of Python.
 
-    def __init__(self, buffer):
+    def __init__(self, buffer, keepalive=None):
         self.buffer = buffer
+        self.keepalive = keepalive
 
     def descr_len(self, space):
         return self.buffer.descr_len(space)
     __getitem__ = interp2app(MiniBuffer.descr_getitem),
     __setitem__ = interp2app(MiniBuffer.descr_setitem),
     __buffer__ = interp2app(MiniBuffer.descr__buffer__),
+    __weakref__ = make_weakref_descr(MiniBuffer),
     )
 MiniBuffer.typedef.acceptable_as_base_class = False
 
         raise operationerrfmt(space.w_TypeError,
                               "don't know the size pointed to by '%s'",
                               ctype.name)
-    return space.wrap(MiniBuffer(LLBuffer(cdata._cdata, size)))
+    return space.wrap(MiniBuffer(LLBuffer(cdata._cdata, size), cdata))

pypy/module/_cffi_backend/ctypeptr.py

 
     def _prepare_pointer_call_argument(self, w_init, cdata):
         space = self.space
-        if (space.isinstance_w(w_init, space.w_list) or
+        if space.is_w(w_init, space.w_None):
+            rffi.cast(rffi.CCHARPP, cdata)[0] = lltype.nullptr(rffi.CCHARP.TO)
+            return 3
+        elif (space.isinstance_w(w_init, space.w_list) or
             space.isinstance_w(w_init, space.w_tuple)):
             length = space.int_w(space.len(w_init))
         elif space.isinstance_w(w_init, space.w_basestring):

pypy/module/_cffi_backend/test/_backend_test_c.py

             return eval('u'+repr(other).replace(r'\\u', r'\u')
                                        .replace(r'\\U', r'\U'))
     u = U()
+    str2bytes = str
 else:
     type_or_class = "class"
     long = int
     bytechr = lambda n: bytes([n])
     bitem2bchr = bytechr
     u = ""
+    str2bytes = lambda s: bytes(s, "ascii")
 
 def size_of_int():
     BInt = new_primitive_type("int")
     f = cast(BFunc23, _testfunc(23))
     res = f(b"foo")
     assert res == 1000 * ord(b'f')
+    res = f(None)
+    assert res == -42
 
 def test_call_function_23_bis():
     # declaring the function as int(unsigned char*)
     import _weakref
     BInt = new_primitive_type("int")
     BPtr = new_pointer_type(BInt)
-    _weakref.ref(BInt)
-    _weakref.ref(newp(BPtr, 42))
-    _weakref.ref(cast(BPtr, 42))
-    _weakref.ref(cast(BInt, 42))
+    rlist = [_weakref.ref(BInt),
+             _weakref.ref(newp(BPtr, 42)),
+             _weakref.ref(cast(BPtr, 42)),
+             _weakref.ref(cast(BInt, 42)),
+             _weakref.ref(buffer(newp(BPtr, 42))),
+             ]
+    for i in range(5):
+        import gc; gc.collect()
+        if [r() for r in rlist] == [None for r in rlist]:
+            break
 
 def test_no_inheritance():
     BInt = new_primitive_type("int")
     BCharP = new_pointer_type(new_primitive_type("char"))
     BCharArray = new_array_type(BCharP, None)
     py.test.raises(TypeError, newp, BCharArray, u+'foobar')
+
+def test_buffer_keepalive():
+    BCharP = new_pointer_type(new_primitive_type("char"))
+    BCharArray = new_array_type(BCharP, None)
+    buflist = []
+    for i in range(20):
+        c = newp(BCharArray, str2bytes("hi there %d" % i))
+        buflist.append(buffer(c))
+    import gc; gc.collect()
+    for i in range(20):
+        buf = buflist[i]
+        assert buf[:] == str2bytes("hi there %d\x00" % i)

pypy/module/_cffi_backend/test/_test_lib.c

 
 static int _testfunc23(char *p)
 {
-    return 1000 * p[0];
+    if (p)
+        return 1000 * p[0];
+    return -42;
 }
 
 DLLEXPORT void *gettestfunc(int num)

pypy/module/_multibytecodec/c_codecs.py

         "pypy_cjk_enc_init", "pypy_cjk_enc_free", "pypy_cjk_enc_chunk",
         "pypy_cjk_enc_reset", "pypy_cjk_enc_outbuf", "pypy_cjk_enc_outlen",
         "pypy_cjk_enc_inbuf_remaining", "pypy_cjk_enc_inbuf_consumed",
-        "pypy_cjk_enc_replace_on_error",
+        "pypy_cjk_enc_replace_on_error", "pypy_cjk_enc_getcodec",
     ] + ["pypy_cjkcodec_%s" % codec for codec in codecs],
 )
 

pypy/module/_socket/interp_func.py

File contents unchanged.

pypy/module/_socket/interp_socket.py

File contents unchanged.

pypy/module/bz2/test/test_bz2_compdecomp.py

 if os.name == "nt":
     from py.test import skip
     skip("bz2 module is not available on Windows")
-        
+
 def setup_module(mod):
     DATA = 'BZh91AY&SY.\xc8N\x18\x00\x01>_\x80\x00\x10@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe00\x01\x99\xaa\x00\xc0\x03F\x86\x8c#&\x83F\x9a\x03\x06\xa6\xd0\xa6\x93M\x0fQ\xa7\xa8\x06\x804hh\x12$\x11\xa4i4\xf14S\xd2<Q\xb5\x0fH\xd3\xd4\xdd\xd5\x87\xbb\xf8\x94\r\x8f\xafI\x12\xe1\xc9\xf8/E\x00pu\x89\x12]\xc9\xbbDL\nQ\x0e\t1\x12\xdf\xa0\xc0\x97\xac2O9\x89\x13\x94\x0e\x1c7\x0ed\x95I\x0c\xaaJ\xa4\x18L\x10\x05#\x9c\xaf\xba\xbc/\x97\x8a#C\xc8\xe1\x8cW\xf9\xe2\xd0\xd6M\xa7\x8bXa<e\x84t\xcbL\xb3\xa7\xd9\xcd\xd1\xcb\x84.\xaf\xb3\xab\xab\xad`n}\xa0lh\tE,\x8eZ\x15\x17VH>\x88\xe5\xcd9gd6\x0b\n\xe9\x9b\xd5\x8a\x99\xf7\x08.K\x8ev\xfb\xf7xw\xbb\xdf\xa1\x92\xf1\xdd|/";\xa2\xba\x9f\xd5\xb1#A\xb6\xf6\xb3o\xc9\xc5y\\\xebO\xe7\x85\x9a\xbc\xb6f8\x952\xd5\xd7"%\x89>V,\xf7\xa6z\xe2\x9f\xa3\xdf\x11\x11"\xd6E)I\xa9\x13^\xca\xf3r\xd0\x03U\x922\xf26\xec\xb6\xed\x8b\xc3U\x13\x9d\xc5\x170\xa4\xfa^\x92\xacDF\x8a\x97\xd6\x19\xfe\xdd\xb8\xbd\x1a\x9a\x19\xa3\x80ankR\x8b\xe5\xd83]\xa9\xc6\x08\x82f\xf6\xb9"6l$\xb8j@\xc0\x8a\xb0l1..\xbak\x83ls\x15\xbc\xf4\xc1\x13\xbe\xf8E\xb8\x9d\r\xa8\x9dk\x84\xd3n\xfa\xacQ\x07\xb1%y\xaav\xb4\x08\xe0z\x1b\x16\xf5\x04\xe9\xcc\xb9\x08z\x1en7.G\xfc]\xc9\x14\xe1B@\xbb!8`'
 
 
     def test_creation(self):
         from bz2 import BZ2Compressor
-        
+
         raises(TypeError, BZ2Compressor, "foo")
         raises(ValueError, BZ2Compressor, 10)
-        
+
         BZ2Compressor(1)
         BZ2Compressor(9)
-        
+
     def test_compress(self):
         from bz2 import BZ2Compressor
-        
+
         bz2c = BZ2Compressor()
         raises(TypeError, bz2c.compress)
         data = bz2c.compress(self.TEXT)
         data = "%s%s" % (data, bz2c.flush())
         assert self.decompress(data) == self.TEXT
-        
+
     def test_compress_huge_data(self):
         if not self.HUGE_OK:
             skip("skipping test requiring lots of memory")
-        from bz2 import BZ2Compressor            
-        
+        from bz2 import BZ2Compressor
+
         HUGE_DATA = self.TEXT * 10000
         bz2c = BZ2Compressor()
         raises(TypeError, bz2c.compress)
         assert self.decompress(data) == HUGE_DATA
 
     def test_compress_chunks_10(self):
-        from bz2 import BZ2Compressor            
-        
+        from bz2 import BZ2Compressor
+
         bz2c = BZ2Compressor()
         n = 0
         data = ""
         cls.w_TEXT = cls.space.wrap(TEXT)
         cls.w_DATA = cls.space.wrap(DATA)
         cls.w_BUGGY_DATA = cls.space.wrap(BUGGY_DATA)
-        
+
     def test_creation(self):
         from bz2 import BZ2Decompressor
-        
+
         raises(TypeError, BZ2Decompressor, "foo")
-        
+
         BZ2Decompressor()
-        
+
     def test_attribute(self):
         from bz2 import BZ2Decompressor
-        
+
         bz2d = BZ2Decompressor()
         assert bz2d.unused_data == ""
 
     def test_decompress(self):
         from bz2 import BZ2Decompressor
-        
+
         bz2d = BZ2Decompressor()
         raises(TypeError, bz2d.decompress)
         decompressed_data = bz2d.decompress(self.DATA)
 
     def test_decompress_chunks_10(self):
         from bz2 import BZ2Decompressor
-        
+
         bz2d = BZ2Decompressor()
         decompressed_data = ""
         n = 0
                 break
             decompressed_data = "%s%s" % (decompressed_data, bz2d.decompress(temp))
             n += 1
-        
+
         assert decompressed_data == self.TEXT
-    
+
     def test_decompress_unused_data(self):
         # test with unused data. (data after EOF)
         from bz2 import BZ2Decompressor
-        
+
         bz2d = BZ2Decompressor()
         unused_data = "this is unused data"
         decompressed_data = bz2d.decompress(self.DATA + unused_data)
 
     def test_EOF_error(self):
         from bz2 import BZ2Decompressor
-        
+
         bz2d = BZ2Decompressor()
         bz2d.decompress(self.DATA)
         raises(EOFError, bz2d.decompress, "foo")
 
     def test_compress_function(self):
         from bz2 import compress
-    
+
         raises(TypeError, compress, 123)
         raises(ValueError, compress, "foo", 10)
         raises(TypeError, compress, "foo", "foo")
-    
+
         data = compress(self.TEXT)
         assert self.decompress(data) == self.TEXT
 
         if not self.HUGE_OK:
             skip("skipping test requiring lots of memory")
         from bz2 import compress
-    
+
         HUGE_DATA = self.TEXT * 10000
 
         data = compress(HUGE_DATA)
 
     def test_decompress_function(self):
         import bz2
-    
+
         raises(TypeError, bz2.decompress)
         assert bz2.decompress("") == ""
         decompressed_data = bz2.decompress(self.DATA)

pypy/module/bz2/test/test_bz2_file.py

 import py
 
 from pypy.interpreter.gateway import unwrap_spec, interp2app
+from pypy.module.bz2.test.support import CheckAllocation
 
 
 if os.name == "nt":
     mod.RANDOM_DATA = ''.join([s[int(random.random() * len(s))] for i in range(30000)])
 
 
-class AppTestBZ2File: #(CheckAllocation):
-    # XXX for unknown reasons, we cannot do allocation checks, as sth is
-    # keeping those objects alive (BZ2File objects)
-
+class AppTestBZ2File(CheckAllocation):
     spaceconfig = {
         "usemodules": ["bz2", "binascii", "rctime"]
     }
         assert bz2f.closed == False
         bz2f.close()
         assert bz2f.closed == True
-    
+
     def test_creation(self):
         from bz2 import BZ2File
-        
+
         raises(ValueError, BZ2File, self.temppath, mode='w', compresslevel=10)
         raises(ValueError, BZ2File, self.temppath, mode='XYZ')
         # XXX the following is fine, currently:
         #raises(ValueError, BZ2File, self.temppath, mode='ww')
-        
+
         BZ2File(self.temppath, mode='wU', buffering=0, compresslevel=8)
         BZ2File(self.temppath, mode='wb')
         # a large buf size
 
     def test_close(self):
         from bz2 import BZ2File
-        
+
         # writeonly
         bz2f = BZ2File(self.temppath, mode='w')
         bz2f.close()
-        # since we use fclose() internally you can't close it twice
-        # bz2f.close()
-        
+        bz2f.close()
+
         # readonly
         bz2f = BZ2File(self.temppath, mode='r')
         bz2f.close()
-        
+        bz2f.close()
+
     def test_tell(self):
         from bz2 import BZ2File
-        
+
         bz2f = BZ2File(self.temppath, mode='w')
         bz2f.close()
         raises(ValueError, bz2f.tell)
-        
+
         bz2f = BZ2File(self.temppath, mode='w')
         pos = bz2f.tell()
         bz2f.close()
         assert pos == 0
-    
+
     def test_seek(self):
         from bz2 import BZ2File
-        
+
         # hack to create a foo file
         open(self.temppath, "w").close()
-        
+
         # cannot seek if close
         bz2f = BZ2File(self.temppath, mode='r')
         bz2f.close()
         raises(ValueError, bz2f.seek, 0)
-        
+
         # cannot seek if 'w'
         bz2f = BZ2File(self.temppath, mode='w')
         raises(IOError, bz2f.seek, 0)
         bz2f.close()
-        
+
         bz2f = BZ2File(self.temppath, mode='r')
         raises(TypeError, bz2f.seek)
         raises(TypeError, bz2f.seek, "foo")
         raises(TypeError, bz2f.seek, 0, "foo")
-        
+
         bz2f.seek(0)
         assert bz2f.tell() == 0
         del bz2f   # delete from this frame, which is captured in the traceback
     def test_open_close_del(self):
         from bz2 import BZ2File
         self.create_temp_file()
-        
+
         for i in range(10):
             f = BZ2File(self.temppath)
             f.close()
             del f
-    
+
     def test_open_non_existent(self):
         from bz2 import BZ2File
         raises(IOError, BZ2File, "/non/existent/path")
-    
+
     def test_open_mode_U(self):
         # bug #1194181: bz2.BZ2File opened for write with mode "U"
         from bz2 import BZ2File
         self.create_temp_file()
-        
+
         bz2f = BZ2File(self.temppath, "U")
         bz2f.close()
         f = open(self.temppath)
         f.read()
         assert f.tell() == len(self.DATA)
         f.close()
-    
+
     def test_seek_forward(self):
         from bz2 import BZ2File
         self.create_temp_file()
         assert bz2f.tell() == len(self.TEXT)
         assert bz2f.read() == ""
         bz2f.close()
-    
+
     def test_seek_post_end_twice(self):
         from bz2 import BZ2File
         self.create_temp_file()
         from bz2 import BZ2File
         from cStringIO import StringIO
         self.create_temp_file()
-        
+
         bz2f = BZ2File(self.temppath)
-        # XXX
-        #raises(TypeError, bz2f.readline, None)
+        raises(TypeError, bz2f.readline, None)
         sio = StringIO(self.TEXT)
         for line in sio.readlines():
             line_read = bz2f.readline()
     def test_read(self):
         from bz2 import BZ2File
         self.create_temp_file()
-        
+
         bz2f = BZ2File(self.temppath)
-        # XXX
-        # raises(TypeError, bz2f.read, None)
+        raises(TypeError, bz2f.read, None)
         text_read = bz2f.read()
         assert text_read == self.TEXT
         bz2f.close()
     def test_read_chunk9(self):
         from bz2 import BZ2File
         self.create_temp_file()
-        
+
         bz2f = BZ2File(self.temppath)
         text_read = ""
         while True:
     def test_read_100_bytes(self):
         from bz2 import BZ2File
         self.create_temp_file()
-        
+
         bz2f = BZ2File(self.temppath)
         assert bz2f.read(100) == self.TEXT[:100]
         bz2f.close()
     def test_universal_newlines_lf(self):
         from bz2 import BZ2File
         self.create_temp_file()
-        
+
         bz2f = BZ2File(self.temppath, "rU")
         assert bz2f.read() == self.TEXT
         assert bz2f.newlines == "\n"
     def test_universal_newlines_crlf(self):
         from bz2 import BZ2File
         self.create_temp_file(crlf=True)
-        
+
         bz2f = BZ2File(self.temppath, "rU")
         data = bz2f.read()
         assert data == self.TEXT
         from bz2 import BZ2File
         from cStringIO import StringIO
         self.create_temp_file()
-        
+
         bz2f = BZ2File(self.temppath)
-        # XXX
-        #raises(TypeError, bz2f.readlines, None)
+        raises(TypeError, bz2f.readlines, None)
         sio = StringIO(self.TEXT)
         assert bz2f.readlines() == sio.readlines()
         bz2f.close()
         from bz2 import BZ2File
         from cStringIO import StringIO
         self.create_temp_file()
-        
+
         bz2f = BZ2File(self.temppath)
         sio = StringIO(self.TEXT)
         assert list(iter(bz2f)) == sio.readlines()
         bz2f.close()
-        
+
     def test_xreadlines(self):
         from bz2 import BZ2File
         from cStringIO import StringIO
         self.create_temp_file()
-        
+
         bz2f = BZ2File(self.temppath)
         sio = StringIO(self.TEXT)
         assert list(bz2f.xreadlines()) == sio.readlines()
     def test_readlines_bug_1191043(self):
         # readlines()/xreadlines() for files containing no newline
         from bz2 import BZ2File
-        
+
         DATA = 'BZh91AY&SY\xd9b\x89]\x00\x00\x00\x03\x80\x04\x00\x02\x00\x0c\x00 \x00!\x9ah3M\x13<]\xc9\x14\xe1BCe\x8a%t'
         f = open(self.temppath, "wb")
         f.write(DATA)
         f.close()
-        
+
         bz2f = BZ2File(self.temppath)
         lines = bz2f.readlines()
         bz2f.close()
         xlines = list(bz2f.xreadlines())
         bz2f.close()
         assert xlines == ['Test']
-    
+
     def test_write(self):
         from bz2 import BZ2File
 
         raises(TypeError, bz2f.write)
         bz2f.write(self.TEXT)
         bz2f.close()
-        
+
         f = open(self.temppath, "rb")
         assert self.decompress(f.read()) == self.TEXT
         f.close()
             data = self.TEXT[n * 10:(n + 1) * 10]
             if not data:
                 break
-            
+
             bz2f.write(data)
             n += 1
         bz2f.close()
-        
+
         f = open(self.temppath, "rb")
         assert self.decompress(f.read()) == self.TEXT
         f.close()
         f = open(self.temppath, "rb")
         assert self.decompress(f.read()) == self.TEXT
         f.close()
-        
+
     def test_write_methods_on_readonly_file(self):
         from bz2 import BZ2File
 
             assert data == "abc"
         assert f.closed
 
-        
-        
+
+
 # has_cmdline_bunzip2 = sys.platform not in ("win32", "os2emx", "riscos")
 # 
 # if has_cmdline_bunzip2:

pypy/module/imp/__init__.py

     __import__ function.
     """
     interpleveldefs = {
+        'SEARCH_ERROR':    'space.wrap(importing.SEARCH_ERROR)',
         'PY_SOURCE':       'space.wrap(importing.PY_SOURCE)',
         'PY_COMPILED':     'space.wrap(importing.PY_COMPILED)',
         'C_EXTENSION':     'space.wrap(importing.C_EXTENSION)',
         'PKG_DIRECTORY':   'space.wrap(importing.PKG_DIRECTORY)',
         'C_BUILTIN':       'space.wrap(importing.C_BUILTIN)',
         'PY_FROZEN':       'space.wrap(importing.PY_FROZEN)',
+        'IMP_HOOK':        'space.wrap(importing.IMP_HOOK)',
         'get_suffixes':    'interp_imp.get_suffixes',
 
         'get_magic':       'interp_imp.get_magic',

pypy/module/micronumpy/__init__.py

         'True_': 'types.Bool.True',
         'False_': 'types.Bool.False',
 
+        'bool': 'space.w_bool',
+        'int': 'space.w_int',
+
         'typeinfo': 'interp_dtype.get_dtype_cache(space).w_typeinfo',
 
         'generic': 'interp_boxes.W_GenericBox',

pypy/module/micronumpy/arrayimpl/base.py

     def is_scalar(self):
         return False
 
+    def base(self):
+        raise NotImplementedError
+
+    def create_iter(self, shape=None):
+        raise NotImplementedError
+
 class BaseArrayIterator(object):
     def next(self):
         raise NotImplementedError # purely abstract base class

pypy/module/micronumpy/arrayimpl/concrete.py

 
 from pypy.module.micronumpy.arrayimpl import base
-from pypy.module.micronumpy import support, loop
+from pypy.module.micronumpy import support, loop, iter
 from pypy.module.micronumpy.base import convert_to_array, W_NDimArray,\
      ArrayArgumentException
 from pypy.module.micronumpy.strides import calc_new_strides, shape_agreement,\
      calculate_broadcast_strides, calculate_dot_strides
 from pypy.module.micronumpy.iter import Chunk, Chunks, NewAxisChunk, RecordChunk
 from pypy.interpreter.error import OperationError, operationerrfmt
+from pypy.interpreter.buffer import RWBuffer
+from rpython.rlib import jit
 from rpython.rtyper.lltypesystem import rffi, lltype
-from rpython.rlib import jit
-from rpython.rlib.rawstorage import free_raw_storage, RAW_STORAGE
+from rpython.rlib.rawstorage import free_raw_storage, raw_storage_getitem,\
+     raw_storage_setitem, RAW_STORAGE
+from pypy.module.micronumpy.arrayimpl.sort import argsort_array
 from rpython.rlib.debug import make_sure_not_resized
 
-class ConcreteArrayIterator(base.BaseArrayIterator):
-    _immutable_fields_ = ['dtype', 'skip', 'size']
-    def __init__(self, array):
-        self.array = array
-        self.offset = 0
-        self.dtype = array.dtype
-        self.skip = self.dtype.itemtype.get_element_size()
-        self.size = array.size
-
-    def setitem(self, elem):
-        self.dtype.setitem(self.array, self.offset, elem)
-
-    def getitem(self):
-        return self.dtype.getitem(self.array, self.offset)
-
-    def getitem_bool(self):
-        return self.dtype.getitem_bool(self.array, self.offset)
-
-    def next(self):
-        self.offset += self.skip
-
-    def next_skip_x(self, x):
-        self.offset += self.skip * x
-
-    def done(self):
-        return self.offset >= self.size
-
-    def reset(self):
-        self.offset %= self.size
-
-class OneDimViewIterator(ConcreteArrayIterator):
-    ''' The view iterator dtype can be different from the
-    array.dtype, this is what makes it a View
-    '''
-    def __init__(self, array, dtype, start, strides, shape):
-        self.array = array
-        self.dtype = dtype
-        self.offset = start
-        self.skip = strides[0]
-        self.index = 0
-        self.size = shape[0]
-
-    def next(self):
-        self.offset += self.skip
-        self.index += 1
-
-    def next_skip_x(self, x):
-        self.offset += self.skip * x
-        self.index += x
-
-    def done(self):
-        return self.index >= self.size
-
-    def reset(self):
-        self.offset %= self.size
-
-class MultiDimViewIterator(ConcreteArrayIterator):
-    ''' The view iterator dtype can be different from the
-    array.dtype, this is what makes it a View
-    '''
-    def __init__(self, array, dtype, start, strides, backstrides, shape):
-        self.indexes = [0] * len(shape)
-        self.array = array
-        self.dtype = dtype
-        self.shape = shape
-        self.offset = start
-        self.shapelen = len(shape)
-        self._done = False
-        self.strides = strides
-        self.backstrides = backstrides
-        self.size = array.size
-
-    @jit.unroll_safe
-    def next(self):
-        offset = self.offset
-        for i in range(self.shapelen - 1, -1, -1):
-            if self.indexes[i] < self.shape[i] - 1:
-                self.indexes[i] += 1
-                offset += self.strides[i]
-                break
-            else:
-                self.indexes[i] = 0
-                offset -= self.backstrides[i]
-        else:
-            self._done = True
-        self.offset = offset
-
-    @jit.unroll_safe
-    def next_skip_x(self, step):
-        for i in range(len(self.shape) - 1, -1, -1):
-            if self.indexes[i] < self.shape[i] - step:
-                self.indexes[i] += step
-                self.offset += self.strides[i] * step
-                break
-            else:
-                remaining_step = (self.indexes[i] + step) // self.shape[i]
-                this_i_step = step - remaining_step * self.shape[i]
-                self.offset += self.strides[i] * this_i_step
-                self.indexes[i] = self.indexes[i] +  this_i_step
-                step = remaining_step
-        else:
-            self._done = True
-
-    def done(self):
-        return self._done
-
-    def reset(self):
-        self.offset %= self.size
-
-class AxisIterator(base.BaseArrayIterator):
-    def __init__(self, array, shape, dim):
-        self.shape = shape
-        strides = array.get_strides()
-        backstrides = array.get_backstrides()
-        if len(shape) == len(strides):
-            # keepdims = True
-            self.strides = strides[:dim] + [0] + strides[dim + 1:]
-            self.backstrides = backstrides[:dim] + [0] + backstrides[dim + 1:]
-        else:
-            self.strides = strides[:dim] + [0] + strides[dim:]
-            self.backstrides = backstrides[:dim] + [0] + backstrides[dim:]
-        self.first_line = True
-        self.indices = [0] * len(shape)
-        self._done = False
-        self.offset = array.start
-        self.dim = dim
-        self.array = array
-        self.dtype = array.dtype
-        
-    def setitem(self, elem):
-        self.dtype.setitem(self.array, self.offset, elem)
-
-    def getitem(self):
-        return self.dtype.getitem(self.array, self.offset)
-
-    @jit.unroll_safe
-    def next(self):
-        for i in range(len(self.shape) - 1, -1, -1):
-            if self.indices[i] < self.shape[i] - 1:
-                if i == self.dim:
-                    self.first_line = False
-                self.indices[i] += 1
-                self.offset += self.strides[i]
-                break
-            else:
-                if i == self.dim:
-                    self.first_line = True
-                self.indices[i] = 0
-                self.offset -= self.backstrides[i]
-        else:
-            self._done = True
-
-    def done(self):
-        return self._done
-
-def int_w(space, w_obj):
-    try:
-        return space.int_w(space.index(w_obj))
-    except OperationError:
-        return space.int_w(space.int(w_obj))
-
 class BaseConcreteArray(base.BaseArrayImplementation):
     start = 0
     parent = None
     def get_size(self):
         return self.size // self.dtype.itemtype.get_element_size()
 
-    def reshape(self, space, new_shape):
+    def reshape(self, space, orig_array, new_shape):
         # Since we got to here, prod(new_shape) == self.size
         new_strides = None
         if self.size > 0:
             for nd in range(ndims):
                 new_backstrides[nd] = (new_shape[nd] - 1) * new_strides[nd]
             return SliceArray(self.start, new_strides, new_backstrides,
-                              new_shape, self)
+                              new_shape, self, orig_array)
         else:
             return None
     
-    def get_real(self):
+    def get_real(self, orig_array):
         strides = self.get_strides()
         backstrides = self.get_backstrides()
         if self.dtype.is_complex_type():
             dtype =  self.dtype.float_type
             return SliceArray(self.start, strides, backstrides,
-                          self.get_shape(), self, dtype=dtype)
+                          self.get_shape(), self, orig_array, dtype=dtype)
         return SliceArray(self.start, strides, backstrides, 
-                          self.get_shape(), self)
+                          self.get_shape(), self, orig_array)
 
-    def get_imag(self):
+    def get_imag(self, orig_array):
         strides = self.get_strides()
         backstrides = self.get_backstrides()
         if self.dtype.is_complex_type():
             dtype =  self.dtype.float_type
             return SliceArray(self.start + dtype.get_size(), strides, 
-                    backstrides, self.get_shape(), self, dtype=dtype)
+                    backstrides, self.get_shape(), self, orig_array, dtype=dtype)
         if self.dtype.is_flexible_type():
             # numpy returns self for self.imag
             return SliceArray(self.start, strides, backstrides,
-                    self.get_shape(), self)
+                    self.get_shape(), self, orig_array)
         impl = NonWritableArray(self.get_shape(), self.dtype, self.order, strides,
                              backstrides)
         impl.fill(self.dtype.box(0))
         for i, w_index in enumerate(view_w):
             if space.isinstance_w(w_index, space.w_slice):
                 raise IndexError
-            idx = int_w(space, w_index)
+            idx = support.int_w(space, w_index)
             if idx < 0:
                 idx = self.get_shape()[i] + idx
             if idx < 0 or idx >= self.get_shape()[i]:
             return self._lookup_by_index(space, view_w)
         if shape_len > 1:
             raise IndexError
-        idx = int_w(space, w_idx)
+        idx = support.int_w(space, w_idx)
         return self._lookup_by_index(space, [space.wrap(idx)])
 
     @jit.unroll_safe
                 i += 1
         return Chunks(result)
 
-    def descr_getitem(self, space, w_index):
+    def descr_getitem(self, space, orig_arr, w_index):
         try:
             item = self._single_item_index(space, w_index)
             return self.getitem(item)
         except IndexError:
             # not a single result
             chunks = self._prepare_slice_args(space, w_index)
-            return chunks.apply(self)
+            return chunks.apply(orig_arr)
 
-    def descr_setitem(self, space, w_index, w_value):
+    def descr_setitem(self, space, orig_arr, w_index, w_value):
         try:
             item = self._single_item_index(space, w_index)
             self.setitem(item, self.dtype.coerce(space, w_value))
         except IndexError:
             w_value = convert_to_array(space, w_value)
             chunks = self._prepare_slice_args(space, w_index)
-            view = chunks.apply(self)
+            view = chunks.apply(orig_arr)
             view.implementation.setslice(space, w_value)
 
-    def transpose(self):
+    def transpose(self, orig_array):
         if len(self.get_shape()) < 2:
             return self
         strides = []
             backstrides.append(self.get_backstrides()[i])
             shape.append(self.get_shape()[i])
         return SliceArray(self.start, strides,
-                          backstrides, shape, self)
+                          backstrides, shape, self, orig_array)
 
     def copy(self):
         strides, backstrides = support.calc_strides(self.get_shape(), self.dtype,
                              backstrides)
         return loop.setslice(self.get_shape(), impl, self)
 
-    def create_axis_iter(self, shape, dim):
-        return AxisIterator(self, shape, dim)
+    def create_axis_iter(self, shape, dim, cum):
+        return iter.AxisIterator(self, shape, dim, cum)
 
     def create_dot_iter(self, shape, skip):
         r = calculate_dot_strides(self.get_strides(), self.get_backstrides(),
                                   shape, skip)
-        return MultiDimViewIterator(self, self.dtype, self.start, r[0], r[1], shape)
+        return iter.MultiDimViewIterator(self, self.dtype, self.start, r[0], r[1], shape)
 
-    def swapaxes(self, axis1, axis2):
+    def swapaxes(self, orig_arr, axis1, axis2):
         shape = self.get_shape()[:]
         strides = self.get_strides()[:]
         backstrides = self.get_backstrides()[:]
         strides[axis1], strides[axis2] = strides[axis2], strides[axis1]
         backstrides[axis1], backstrides[axis2] = backstrides[axis2], backstrides[axis1] 
         return W_NDimArray.new_slice(self.start, strides, 
-                                     backstrides, shape, self)
+                                     backstrides, shape, self, orig_arr)
 
     def get_storage_as_int(self, space):
         return rffi.cast(lltype.Signed, self.storage)
 
+    def get_storage(self):
+        return self.storage
+
+    def get_buffer(self, space):
+        return ArrayBuffer(self)
+
+    def astype(self, space, dtype):
+        new_arr = W_NDimArray.from_shape(self.get_shape(), dtype)
+        loop.copy_from_to(self, new_arr.implementation, dtype)
+        return new_arr
+
 class ConcreteArrayNotOwning(BaseConcreteArray):
     def __init__(self, shape, dtype, order, strides, backstrides, storage):
+
         make_sure_not_resized(shape)
         make_sure_not_resized(strides)
         make_sure_not_resized(backstrides)
 
     def create_iter(self, shape=None):
         if shape is None or shape == self.get_shape():
-            return ConcreteArrayIterator(self)
+            return iter.ConcreteArrayIterator(self)
         r = calculate_broadcast_strides(self.get_strides(),
                                         self.get_backstrides(),
                                         self.get_shape(), shape)
-        return MultiDimViewIterator(self, self.dtype, 0, r[0], r[1], shape)
+        return iter.MultiDimViewIterator(self, self.dtype, 0, r[0], r[1], shape)
 
     def fill(self, box):
         self.dtype.fill(self.storage, box, 0, self.size)
 
-    def set_shape(self, space, new_shape):
+    def set_shape(self, space, orig_array, new_shape):
         strides, backstrides = support.calc_strides(new_shape, self.dtype,
                                                     self.order)
-        return SliceArray(0, strides, backstrides, new_shape, self)
+        return SliceArray(0, strides, backstrides, new_shape, self,
+                          orig_array)
+
+    def argsort(self, space, w_axis):
+        return argsort_array(self, space, w_axis)
+
+    def base(self):
+        return None
 
 class ConcreteArray(ConcreteArrayNotOwning):
     def __init__(self, shape, dtype, order, strides, backstrides):
         free_raw_storage(self.storage, track_allocation=False)
 
 
+        
+
 class NonWritableArray(ConcreteArray):
-    def descr_setitem(self, space, w_index, w_value):
+    def descr_setitem(self, space, orig_array, w_index, w_value):
         raise OperationError(space.w_RuntimeError, space.wrap(
             "array is not writable"))
         
 
 class SliceArray(BaseConcreteArray):
-    def __init__(self, start, strides, backstrides, shape, parent, dtype=None):
+    def __init__(self, start, strides, backstrides, shape, parent, orig_arr,
+                 dtype=None):
         self.strides = strides
         self.backstrides = backstrides
         self.shape = shape
         self.dtype = dtype
         self.size = support.product(shape) * self.dtype.itemtype.get_element_size()
         self.start = start
+        self.orig_arr = orig_arr
+
+    def base(self):
+        return self.orig_arr
 
     def fill(self, box):
         loop.fill(self, box.convert_to(self.dtype))
             r = calculate_broadcast_strides(self.get_strides(),
                                             self.get_backstrides(),
                                             self.get_shape(), shape)
-            return MultiDimViewIterator(self.parent, self.dtype,
-                                        self.start, r[0], r[1], shape)
+            return iter.MultiDimViewIterator(self.parent, self.dtype,
+                                             self.start, r[0], r[1], shape)
         if len(self.get_shape()) == 1:
-            return OneDimViewIterator(self.parent, self.dtype, self.start, 
+            return iter.OneDimViewIterator(self.parent, self.dtype, self.start, 
                     self.get_strides(), self.get_shape())
-        return MultiDimViewIterator(self.parent, self.dtype, self.start,
+        return iter.MultiDimViewIterator(self.parent, self.dtype, self.start,
                                     self.get_strides(),
                                     self.get_backstrides(), self.get_shape())
 
-    def set_shape(self, space, new_shape):
+    def set_shape(self, space, orig_array, new_shape):
         if len(self.get_shape()) < 2 or self.size == 0:
             # TODO: this code could be refactored into calc_strides
             # but then calc_strides would have to accept a stepping factor
                 backstrides.reverse()
                 new_shape.reverse()
             return SliceArray(self.start, strides, backstrides, new_shape,
-                              self)
+                              self, orig_array)
         new_strides = calc_new_strides(new_shape, self.get_shape(),
                                        self.get_strides(),
                                        self.order)
         for nd in range(len(new_shape)):
             new_backstrides[nd] = (new_shape[nd] - 1) * new_strides[nd]
         return SliceArray(self.start, new_strides, new_backstrides, new_shape,
-                          self)
+                          self, orig_array)
+
+class ArrayBuffer(RWBuffer):
+    def __init__(self, impl):
+        self.impl = impl
+
+    def getitem(self, item):
+        return raw_storage_getitem(lltype.Char, self.impl.storage, item)
+
+    def setitem(self, item, v):
+        return raw_storage_setitem(self.impl.storage, item,
+                                   rffi.cast(lltype.Char, v))
+
+    def getlength(self):
+        return self.impl.size

pypy/module/micronumpy/arrayimpl/scalar.py

 class ScalarIterator(base.BaseArrayIterator):
     def __init__(self, v):
         self.v = v
+        self.called_once = False
 
     def next(self):
-        pass
+        self.called_once = True
 
     def getitem(self):
-        return self.v
+        return self.v.get_scalar_value()
 
     def setitem(self, v):
-        raise Exception("Don't call setitem on scalar iterators")
+        self.v.set_scalar_value(v)
 
     def done(self):
-        raise Exception("should not call done on scalar")
+        return self.called_once
 
     def reset(self):
         pass
         return []
 
     def create_iter(self, shape=None):
-        return ScalarIterator(self.value)
+        return ScalarIterator(self)
 
     def get_scalar_value(self):
         return self.value
     def get_size(self):
         return 1
 
-    def transpose(self):
+    def transpose(self, _):
         return self
 
-    def descr_getitem(self, space, w_idx):
+    def descr_getitem(self, space, _, w_idx):
         raise OperationError(space.w_IndexError,
                              space.wrap("scalars cannot be indexed"))