Commits

Alex Gaynor  committed 3ef1815 Merge

Merged default.

  • Participants
  • Parent commits 0634b04, 76b86c6
  • Branches mixed-submodules

Comments (0)

Files changed (8)

     DEALINGS IN THE SOFTWARE.
 
 
-PyPy Copyright holders 2003-2010
+PyPy Copyright holders 2003-2011
 ----------------------------------- 
 
 Except when otherwise stated (look for LICENSE files or information at

File pypy/interpreter/mixedmodule.py

         space.builtin_modules"""
         Module.install(self)
         if hasattr(self, "submodules"):
-            name = self.space.unwrap(self.w_name)
+            space = self.space
+            name = space.unwrap(self.w_name)
             for sub_name, module_cls in self.submodules.iteritems():
-                module_name = self.space.wrap("%s.%s" % (name, sub_name))
-                m = module_cls(self.space, module_name)
+                module_name = space.wrap("%s.%s" % (name, sub_name))
+                m = module_cls(space, module_name)
                 m.install()
+                space.setitem(self.w_dict, space.wrap(sub_name), space.wrap(m))
 
     def init(self, space):
         """This is called each time the module is imported or reloaded

File pypy/interpreter/test/test_mixedmodule.py

 
         assert self.space.builtin_modules["test_module"] is m
         assert isinstance(self.space.builtin_modules["test_module.sub"], SubModule)
+
+class AppTestMixedModule(object):
+    def setup_class(cls):
+        space = cls.space
+
+        class SubModule(MixedModule):
+            interpleveldefs = {}
+            appleveldefs = {}
+
+        class Module(MixedModule):
+            interpleveldefs = {}
+            appleveldefs = {}
+            submodules = {
+                "sub": SubModule
+            }
+
+        m = Module(space, space.wrap("test_module"))
+        m.install()
+
+    def test_attibute(self):
+        import test_module
+
+        assert hasattr(test_module, "sub")
+
+    def test_submodule_import(self):
+        from test_module import sub

File pypy/jit/metainterp/greenfield.py

                 seen.add(objname)
         assert len(seen) == 1, (
             "Current limitation: you can only give one instance with green "
-            "fields.  Found %r" % seen.keys())
+            "fields.  Found %r" % list(seen))
         self.red_index = jd.jitdriver.reds.index(objname)
         #
         # a list of (GTYPE, fieldname)

File pypy/module/_weakref/interp__weakref.py

 reprdescr = interp2app(descr__repr__, unwrap_spec=[ObjSpace, W_WeakrefBase])
 
 W_Weakref.typedef = TypeDef("weakref",
-    __doc__ = """A weak reference to an object 'obj'.  A 'callback' can given,
-which is called with the weak reference as an argument when 'obj'
-is about to be finalized.""",
+    __doc__ = """A weak reference to an object 'obj'.  A 'callback' can be given,
+which is called with 'obj' as an argument when it is about to be finalized.""",
     __new__ = interp2app(descr__new__weakref,
                          unwrap_spec=[ObjSpace, W_Root, W_Root, W_Root,
                                       Arguments]),

File pypy/module/operator/interp_operator.py

 
 def concat(space, w_obj1, w_obj2):
     'concat(a, b) -- Same as a + b, for a and b sequences.'
-    if space.findattr(w_obj1, space.wrap('__getitem__')) is None or \
-            space.findattr(w_obj2, space.wrap('__getitem__')) is None:
+    if (space.lookup(w_obj1, '__getitem__') is None or
+        space.lookup(w_obj2, '__getitem__') is None):
         raise OperationError(space.w_TypeError, space.w_None)
 
     return space.add(w_obj1, w_obj2)
 
 def iconcat(space, w_obj1, w_obj2):
     'iconcat(a, b) -- Same as a += b, for a and b sequences.'
-    if space.findattr(w_obj1, space.wrap('__getitem__')) is None or \
-            space.findattr(w_obj2, space.wrap('__getitem__')) is None:
+    if (space.lookup(w_obj1, '__getitem__') is None or
+        space.lookup(w_obj2, '__getitem__') is None):
         raise OperationError(space.w_TypeError, space.w_None)
 
     return space.inplace_add(w_obj1, w_obj2)
 
 def irepeat(space, w_obj1, w_obj2):
     'irepeat(a, b) -- Same as a *= b, for a and b sequences.'
-    if space.findattr(w_obj1, space.wrap('__getitem__')) is None:
+    if space.lookup(w_obj1, '__getitem__') is None:
         # first arg has to be a sequence
         raise OperationError(space.w_TypeError,
                            space.wrap("non-sequence object can't be repeated"))

File pypy/rpython/memory/gc/minimark.py

         "card_page_indices": 128,
 
         # Objects whose total size is at least 'large_object' bytes are
-        # allocated out of the nursery immediately.  If the object
-        # has GC pointers in its varsized part, we use instead the
-        # higher limit 'large_object_gcptrs'.  The idea is that
-        # separately allocated objects are allocated immediately "old"
-        # and it's not good to have too many pointers from old to young
-        # objects.
-        "large_object": 1600*WORD,
-        "large_object_gcptrs": 8250*WORD,
+        # allocated out of the nursery immediately, as old objects.  The
+        # minimal allocated size of the nursery is 1.9x the following
+        # number (by default, at least 500KB on 32-bit and 1000KB on 64-bit).
+        "large_object": 65792*WORD,
         }
 
     def __init__(self, config,
                  growth_rate_max=2.5,   # for tests
                  card_page_indices=0,
                  large_object=8*WORD,
-                 large_object_gcptrs=10*WORD,
                  ArenaCollectionClass=None,
                  **kwds):
         MovingGCBase.__init__(self, config, **kwds)
             while (1 << self.card_page_shift) < self.card_page_indices:
                 self.card_page_shift += 1
         #
-        # 'large_object' and 'large_object_gcptrs' limit how big objects
-        # can be in the nursery, so they give a lower bound on the allowed
-        # size of the nursery.
+        # 'large_object' limit how big objects can be in the nursery, so
+        # it gives a lower bound on the allowed size of the nursery.
         self.nonlarge_max = large_object - 1
-        self.nonlarge_gcptrs_max = large_object_gcptrs - 1
-        assert self.nonlarge_max <= self.nonlarge_gcptrs_max
         #
         self.nursery      = NULL
         self.nursery_free = NULL
         else:
             #
             defaultsize = self.nursery_size
-            minsize = 2 * (self.nonlarge_gcptrs_max + 1)
+            minsize = int(1.9 * self.nonlarge_max)
+            if we_are_translated():
+                minsize = (minsize + 4095) & ~4095
             self.nursery_size = minsize
             self.allocate_nursery()
             #
             # forces a minor collect for every malloc.  Useful to debug
             # external factors, like trackgcroot or the handling of the write
             # barrier.  Implemented by still using 'minsize' for the nursery
-            # size (needed to handle e.g. mallocs of 8249 words) but hacking
-            # at the current nursery position in collect_and_reserve().
+            # size (needed to handle mallocs just below 'large_objects') but
+            # hacking at the current nursery position in collect_and_reserve().
             if newsize <= 0:
                 newsize = env.estimate_best_nursery_size()
                 if newsize <= 0:
 
 
     def _nursery_memory_size(self):
-        extra = self.nonlarge_gcptrs_max + 1
+        extra = self.nonlarge_max + 1
         return self.nursery_size + extra
 
     def _alloc_nursery(self):
         # below 'nonlarge_max'.  All the following logic is usually
         # constant-folded because self.nonlarge_max, size and itemsize
         # are all constants (the arguments are constant due to
-        # inlining) and self.has_gcptr_in_varsize() is constant-folded.
-        if self.has_gcptr_in_varsize(typeid):
-            nonlarge_max = self.nonlarge_gcptrs_max
+        # inlining).
+        if not raw_malloc_usage(itemsize):
+            too_many_items = raw_malloc_usage(nonvarsize) > self.nonlarge_max
         else:
-            nonlarge_max = self.nonlarge_max
-
-        if not raw_malloc_usage(itemsize):
-            too_many_items = raw_malloc_usage(nonvarsize) > nonlarge_max
-        else:
-            maxlength = nonlarge_max - raw_malloc_usage(nonvarsize)
+            maxlength = self.nonlarge_max - raw_malloc_usage(nonvarsize)
             maxlength = maxlength // raw_malloc_usage(itemsize)
             too_many_items = length > maxlength
 
             # Check if we need to introduce the card marker bits area.
             if (self.card_page_indices <= 0  # <- this check is constant-folded
                 or not self.has_gcptr_in_varsize(typeid) or
-                raw_malloc_usage(totalsize) <= self.nonlarge_gcptrs_max):
+                raw_malloc_usage(totalsize) <= self.nonlarge_max):
                 #
                 # In these cases, we don't want a card marker bits area.
                 # This case also includes all fixed-size objects.

File pypy/rpython/memory/test/test_transformed_gc.py

                          'arena_size': 64*WORD,
                          'small_request_threshold': 5*WORD,
                          'large_object': 8*WORD,
-                         'large_object_gcptrs': 10*WORD,
                          'card_page_indices': 4,
                          'translated_to_c': False,
                          }