Commits

Anonymous committed 13693bb

import stmgc

Comments (0)

Files changed (5)

rpython/translator/stm/import_stmgc.py

     for p in sorted(plist):
         if not (p.basename.endswith('.c') or p.basename.endswith('.h')):
             continue
+        if p.basename.startswith('.'):
+            continue        
         if p.basename.startswith('demo'):
             continue
         path = stmgc_dest.join(p.relto(stmgc_dir))

rpython/translator/stm/src_stm/revision

-5dbd50990e2c
+cfd37feb0f23+

rpython/translator/stm/src_stm/stm/gcpage.c

     /* thread-safe: use the lock of pages.c to prevent any remapping
        from occurring under our feet */
     mutex_pages_lock();
-    increment_total_allocated(size + LARGE_MALLOC_OVERHEAD);
 
     /* Allocate the object with largemalloc.c from the lower addresses. */
     char *addr = _stm_large_malloc(size);

rpython/translator/stm/src_stm/stm/largemalloc.c

 #define BOTH_CHUNKS_USED     0
 #define CHUNK_HEADER_SIZE    offsetof(struct malloc_chunk, d)
 #define END_MARKER           0xDEADBEEF
+#define MIN_ALLOC_SIZE       (sizeof(struct malloc_chunk) - CHUNK_HEADER_SIZE)
 
 #define chunk_at_offset(p, ofs)  ((mchunk_t *)(((char *)(p)) + (ofs)))
 #define data2chunk(p)            chunk_at_offset(p, -CHUNK_HEADER_SIZE)
    The additional chunks of a given size are linked "vertically" in
    the secondary 'u' doubly-linked list.
 
-   
+
                             +-----+
                             | 296 |
                             +-----+
 
     /* it can be very small, but we need to ensure a minimal size
        (currently 32 bytes) */
-    if (request_size < sizeof(struct malloc_chunk) - CHUNK_HEADER_SIZE)
-        request_size = sizeof(struct malloc_chunk) - CHUNK_HEADER_SIZE;
+    if (request_size < MIN_ALLOC_SIZE)
+        request_size = MIN_ALLOC_SIZE;
 
     size_t index = largebin_index(request_size);
     sort_bin(index);
     }
     mscan->size = request_size;
     mscan->prev_size = BOTH_CHUNKS_USED;
+    increment_total_allocated(request_size + LARGE_MALLOC_OVERHEAD);
 
     return (char *)&mscan->d;
 }
     assert((chunk->size & (sizeof(char *) - 1)) == 0);
     assert(chunk->prev_size != THIS_CHUNK_FREE);
 
+    /* 'size' is at least MIN_ALLOC_SIZE */
+    increment_total_allocated(-(chunk->size + LARGE_MALLOC_OVERHEAD));
+
 #ifndef NDEBUG
     assert(chunk->size >= sizeof(dlist_t));
     assert(chunk->size <= (((char *)last_chunk) - (char *)data));
         chunk = next_chunk(chunk);   /* go to the first non-free chunk */
 
     while (chunk != last_chunk) {
-
         /* here, the chunk we're pointing to is not free */
         assert(chunk->prev_size != THIS_CHUNK_FREE);
 
         /* use the callback to know if 'chunk' contains an object that
            survives or dies */
         if (!_largemalloc_sweep_keep(chunk)) {
-            size_t size = chunk->size;
-            increment_total_allocated(-(size + LARGE_MALLOC_OVERHEAD));
             _stm_large_free((char *)&chunk->d);     /* dies */
         }
         chunk = mnext;

rpython/translator/stm/src_stm/stm/nursery.c

             }
             char *realobj = REAL_ADDRESS(pseg->pub.segment_base, item->addr);
             ssize_t size = stmcb_size_rounded_up((struct object_s *)realobj);
-            increment_total_allocated(-(size + LARGE_MALLOC_OVERHEAD));
             _stm_large_free(stm_object_pages + item->addr);
         } TREE_LOOP_END;