This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

ggc_free, take 2


It now passes, both with and without gcac, so comitted.


r~


        * ggc.h (ggc_free): Declare.
        * ggc-common.c (ggc_realloc): Use it.
        * ggc-page.c: Remove lots of inline markers.
        (globals): Add free_object_list.
        (ggc_alloc): Tidy.
        (ggc_free, validate_free_objects): New.
        (poison_pages): Provide default.
        (ggc_collect): Call validate_free_objects; emit markers to
        the debug file.

Index: ggc-common.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/ggc-common.c,v
retrieving revision 1.80
diff -c -p -d -r1.80 ggc-common.c
*** ggc-common.c	26 Jan 2004 22:59:40 -0000	1.80
--- ggc-common.c	28 Jan 2004 18:40:50 -0000
*************** ggc_realloc (void *x, size_t size)
*** 147,152 ****
--- 147,153 ----
      return ggc_alloc (size);
  
    old_size = ggc_get_size (x);
+ 
    if (size <= old_size)
      {
        /* Mark the unwanted memory as unaccessible.  We also need to make
*************** ggc_realloc (void *x, size_t size)
*** 176,182 ****
    memcpy (r, x, old_size);
  
    /* The old object is not supposed to be used anymore.  */
!   VALGRIND_DISCARD (VALGRIND_MAKE_NOACCESS (x, old_size));
  
    return r;
  }
--- 177,183 ----
    memcpy (r, x, old_size);
  
    /* The old object is not supposed to be used anymore.  */
!   ggc_free (x);
  
    return r;
  }
Index: ggc-page.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/ggc-page.c,v
retrieving revision 1.86
diff -c -p -d -r1.86 ggc-page.c
*** ggc-page.c	26 Jan 2004 22:59:40 -0000	1.86
--- ggc-page.c	28 Jan 2004 18:40:50 -0000
*************** static struct globals
*** 401,406 ****
--- 401,417 ----
       zero otherwise.  We allocate them all together, to enable a
       better runtime data access pattern.  */
    unsigned long **save_in_use;
+ 
+ #ifdef ENABLE_GC_ALWAYS_COLLECT
+   /* List of free objects to be verified as actually free on the
+      next collection.  */
+   struct free_object
+   {
+     void *object;
+     struct free_object *next;
+   } *free_object_list;
+ #endif
+ 
  #ifdef GATHER_STATISTICS
    struct
    {
*************** static void compute_inverse (unsigned);
*** 466,475 ****
  static inline void adjust_depth (void);
  static void move_ptes_to_front (int, int);
  
- #ifdef ENABLE_GC_CHECKING
- static void poison_pages (void);
- #endif
- 
  void debug_print_page_list (int);
  static void push_depth (unsigned int);
  static void push_by_depth (page_entry *, unsigned long *);
--- 477,482 ----
*************** adjust_depth (void)
*** 894,900 ****
  
  /* For a page that is no longer needed, put it on the free page list.  */
  
! static inline void
  free_page (page_entry *entry)
  {
    if (GGC_DEBUG_LEVEL >= 2)
--- 901,907 ----
  
  /* For a page that is no longer needed, put it on the free page list.  */
  
! static void
  free_page (page_entry *entry)
  {
    if (GGC_DEBUG_LEVEL >= 2)
*************** ggc_alloc_zone (size_t size, struct allo
*** 1049,1064 ****
  void *
  ggc_alloc (size_t size)
  {
!   unsigned order, word, bit, object_offset;
    struct page_entry *entry;
    void *result;
  
    if (size <= 256)
!     order = size_lookup[size];
    else
      {
        order = 9;
!       while (size > OBJECT_SIZE (order))
  	order++;
      }
  
--- 1056,1074 ----
  void *
  ggc_alloc (size_t size)
  {
!   size_t order, word, bit, object_offset, object_size;
    struct page_entry *entry;
    void *result;
  
    if (size <= 256)
!     {
!       order = size_lookup[size];
!       object_size = OBJECT_SIZE (order);
!     }
    else
      {
        order = 9;
!       while (size > (object_size = OBJECT_SIZE (order)))
  	order++;
      }
  
*************** ggc_alloc (size_t size)
*** 1121,1127 ****
        /* Next time, try the next bit.  */
        entry->next_bit_hint = hint + 1;
  
!       object_offset = hint * OBJECT_SIZE (order);
      }
  
    /* Set the in-use bit.  */
--- 1131,1137 ----
        /* Next time, try the next bit.  */
        entry->next_bit_hint = hint + 1;
  
!       object_offset = hint * object_size;
      }
  
    /* Set the in-use bit.  */
*************** ggc_alloc (size_t size)
*** 1149,1164 ****
       exact same semantics in presence of memory bugs, regardless of
       ENABLE_VALGRIND_CHECKING.  We override this request below.  Drop the
       handle to avoid handle leak.  */
!   VALGRIND_DISCARD (VALGRIND_MAKE_WRITABLE (result, OBJECT_SIZE (order)));
  
    /* `Poison' the entire allocated object, including any padding at
       the end.  */
!   memset (result, 0xaf, OBJECT_SIZE (order));
  
    /* Make the bytes after the end of the object unaccessible.  Discard the
       handle to avoid handle leak.  */
    VALGRIND_DISCARD (VALGRIND_MAKE_NOACCESS ((char *) result + size,
! 					    OBJECT_SIZE (order) - size));
  #endif
  
    /* Tell Valgrind that the memory is there, but its content isn't
--- 1159,1174 ----
       exact same semantics in presence of memory bugs, regardless of
       ENABLE_VALGRIND_CHECKING.  We override this request below.  Drop the
       handle to avoid handle leak.  */
!   VALGRIND_DISCARD (VALGRIND_MAKE_WRITABLE (result, object_size));
  
    /* `Poison' the entire allocated object, including any padding at
       the end.  */
!   memset (result, 0xaf, object_size);
  
    /* Make the bytes after the end of the object unaccessible.  Discard the
       handle to avoid handle leak.  */
    VALGRIND_DISCARD (VALGRIND_MAKE_NOACCESS ((char *) result + size,
! 					    object_size - size));
  #endif
  
    /* Tell Valgrind that the memory is there, but its content isn't
*************** ggc_alloc (size_t size)
*** 1168,1204 ****
  
    /* Keep track of how many bytes are being allocated.  This
       information is used in deciding when to collect.  */
!   G.allocated += OBJECT_SIZE (order);
  
  #ifdef GATHER_STATISTICS
    {
!     G.stats.total_overhead += OBJECT_SIZE (order) - size;
!     G.stats.total_allocated += OBJECT_SIZE(order);
!     G.stats.total_overhead_per_order[order] += OBJECT_SIZE (order) - size;
!     G.stats.total_allocated_per_order[order] += OBJECT_SIZE (order);
! 
!     if (size <= 32){
!       G.stats.total_overhead_under32 += OBJECT_SIZE (order) - size;
!       G.stats.total_allocated_under32 += OBJECT_SIZE(order);
!     }
  
!     if (size <= 64){
!       G.stats.total_overhead_under64 += OBJECT_SIZE (order) - size;
!       G.stats.total_allocated_under64 += OBJECT_SIZE(order);
!     }
!   
!     if (size <= 128){
!       G.stats.total_overhead_under128 += OBJECT_SIZE (order) - size;
!       G.stats.total_allocated_under128 += OBJECT_SIZE(order);
!     }
  
    }
  #endif
!   
    if (GGC_DEBUG_LEVEL >= 3)
      fprintf (G.debug_file,
  	     "Allocating object, requested size=%lu, actual=%lu at %p on %p\n",
! 	     (unsigned long) size, (unsigned long) OBJECT_SIZE (order), result,
  	     (void *) entry);
  
    return result;
--- 1178,1216 ----
  
    /* Keep track of how many bytes are being allocated.  This
       information is used in deciding when to collect.  */
!   G.allocated += object_size;
  
  #ifdef GATHER_STATISTICS
    {
!     size_t overhead = object_size - size;
  
!     G.stats.total_overhead += overhead;
!     G.stats.total_allocated += object_size;
!     G.stats.total_overhead_per_order[order] += overhead;
!     G.stats.total_allocated_per_order[order] += object_size;
  
+     if (size <= 32)
+       {
+ 	G.stats.total_overhead_under32 += overhead;
+ 	G.stats.total_allocated_under32 += object_size;
+       }
+     if (size <= 64)
+       {
+ 	G.stats.total_overhead_under64 += overhead;
+ 	G.stats.total_allocated_under64 += object_size;
+       }
+     if (size <= 128)
+       {
+ 	G.stats.total_overhead_under128 += overhead;
+ 	G.stats.total_allocated_under128 += object_size;
+       }
    }
  #endif
! 
    if (GGC_DEBUG_LEVEL >= 3)
      fprintf (G.debug_file,
  	     "Allocating object, requested size=%lu, actual=%lu at %p on %p\n",
! 	     (unsigned long) size, (unsigned long) object_size, result,
  	     (void *) entry);
  
    return result;
*************** ggc_get_size (const void *p)
*** 1279,1284 ****
--- 1291,1368 ----
    page_entry *pe = lookup_page_table_entry (p);
    return OBJECT_SIZE (pe->order);
  }
+ 
+ /* Release the memory for object P.  */
+ 
+ void
+ ggc_free (void *p)
+ {
+   page_entry *pe = lookup_page_table_entry (p);
+   size_t order = pe->order;
+   size_t size = OBJECT_SIZE (order);
+ 
+   if (GGC_DEBUG_LEVEL >= 3)
+     fprintf (G.debug_file,
+ 	     "Freeing object, actual size=%lu, at %p on %p\n",
+ 	     (unsigned long) size, p, (void *) pe);
+ 
+ #ifdef ENABLE_GC_CHECKING
+   /* Poison the data, to indicate the data is garbage.  */
+   VALGRIND_DISCARD (VALGRIND_MAKE_WRITABLE (p, size));
+   memset (p, 0xa5, size);
+ #endif
+   /* Let valgrind know the object is free.  */
+   VALGRIND_DISCARD (VALGRIND_MAKE_NOACCESS (p, size));
+ 
+ #ifdef ENABLE_GC_ALWAYS_COLLECT
+   /* In the completely-anal-checking mode, we do *not* immediately free
+      the data, but instead verify that the data is *actually* not 
+      reachable the next time we collect.  */
+   {
+     struct free_object *fo = xmalloc (sizeof (struct free_object));
+     fo->object = p;
+     fo->next = G.free_object_list;
+     G.free_object_list = fo;
+   }
+ #else
+   {
+     unsigned int bit_offset, word, bit;
+ 
+     G.allocated -= size;
+ 
+     /* Mark the object not-in-use.  */
+     bit_offset = OFFSET_TO_BIT (((const char *) p) - pe->page, order);
+     word = bit_offset / HOST_BITS_PER_LONG;
+     bit = bit_offset % HOST_BITS_PER_LONG;
+     pe->in_use_p[word] &= ~(1UL << bit);
+ 
+     if (pe->num_free_objects++ == 0)
+       {
+ 	/* If the page is completely full, then it's supposed to
+ 	   be after all pages that aren't.  Since we've freed one
+ 	   object from a page that was full, we need to move the
+ 	   page to the head of the list.  */
+ 
+ 	page_entry *p, *q;
+ 	for (q = NULL, p = G.pages[order]; ; q = p, p = p->next)
+ 	  if (p == pe)
+ 	    break;
+ 	if (q && q->num_free_objects == 0)
+ 	  {
+ 	    p = pe->next;
+ 	    q->next = p;
+ 	    if (!p)
+ 	      G.page_tails[order] = q;
+ 	    pe->next = G.pages[order];
+ 	    G.pages[order] = pe;
+ 	  }
+ 
+ 	/* Reset the hint bit to point to the only free object.  */
+ 	pe->next_bit_hint = bit_offset;
+       }
+   }
+ #endif
+ }
  
  /* Subroutine of init_ggc which computes the pair of numbers used to
     perform division by OBJECT_SIZE (order) and fills in inverse_table[].
*************** ggc_pop_context (void)
*** 1567,1573 ****
  
  /* Unmark all objects.  */
  
! static inline void
  clear_marks (void)
  {
    unsigned order;
--- 1651,1657 ----
  
  /* Unmark all objects.  */
  
! static void
  clear_marks (void)
  {
    unsigned order;
*************** clear_marks (void)
*** 1612,1618 ****
  /* Free all empty pages.  Partially empty pages need no attention
     because the `mark' bit doubles as an `unused' bit.  */
  
! static inline void
  sweep_pages (void)
  {
    unsigned order;
--- 1696,1702 ----
  /* Free all empty pages.  Partially empty pages need no attention
     because the `mark' bit doubles as an `unused' bit.  */
  
! static void
  sweep_pages (void)
  {
    unsigned order;
*************** sweep_pages (void)
*** 1721,1727 ****
  #ifdef ENABLE_GC_CHECKING
  /* Clobber all free objects.  */
  
! static inline void
  poison_pages (void)
  {
    unsigned order;
--- 1805,1811 ----
  #ifdef ENABLE_GC_CHECKING
  /* Clobber all free objects.  */
  
! static void
  poison_pages (void)
  {
    unsigned order;
*************** poison_pages (void)
*** 1767,1772 ****
--- 1851,1899 ----
  	}
      }
  }
+ #else
+ #define poison_pages()
+ #endif
+ 
+ #ifdef ENABLE_GC_ALWAYS_COLLECT
+ /* Validate that the reportedly free objects actually are.  */
+ 
+ static void
+ validate_free_objects (void)
+ {
+   struct free_object *f, *next, *still_free = NULL;
+ 
+   for (f = G.free_object_list; f ; f = next)
+     {
+       page_entry *pe = lookup_page_table_entry (f->object);
+       size_t bit, word;
+ 
+       bit = OFFSET_TO_BIT ((char *)f->object - pe->page, pe->order);
+       word = bit / HOST_BITS_PER_LONG;
+       bit = bit % HOST_BITS_PER_LONG;
+       next = f->next;
+ 
+       /* Make certain it isn't visible from any root.  Notice that we
+ 	 do this check before sweep_pages merges save_in_use_p.  */
+       if (pe->in_use_p[word] & (1UL << bit))
+ 	abort ();
+ 
+       /* If the object comes from an outer context, then retain the
+ 	 free_object entry, so that we can verify that the address
+ 	 isn't live on the stack in some outer context.  */
+       if (pe->context_depth != G.context_depth)
+ 	{
+ 	  f->next = still_free;
+ 	  still_free = f;
+ 	}
+       else
+ 	free (f);
+     }
+ 
+   G.free_object_list = still_free;
+ }
+ #else
+ #define validate_free_objects()
  #endif
  
  /* Top level mark-and-sweep routine.  */
*************** ggc_collect (void)
*** 1788,1793 ****
--- 1915,1922 ----
    timevar_push (TV_GC);
    if (!quiet_flag)
      fprintf (stderr, " {GC %luk -> ", (unsigned long) G.allocated / 1024);
+   if (GGC_DEBUG_LEVEL >= 2)
+     fprintf (G.debug_file, "BEGIN COLLECTING\n");
  
    /* Zero the total allocated bytes.  This will be recalculated in the
       sweep phase.  */
*************** ggc_collect (void)
*** 1802,1812 ****
  
    clear_marks ();
    ggc_mark_roots ();
- 
- #ifdef ENABLE_GC_CHECKING
    poison_pages ();
! #endif
! 
    sweep_pages ();
  
    G.allocated_last_gc = G.allocated;
--- 1931,1938 ----
  
    clear_marks ();
    ggc_mark_roots ();
    poison_pages ();
!   validate_free_objects ();
    sweep_pages ();
  
    G.allocated_last_gc = G.allocated;
*************** ggc_collect (void)
*** 1815,1820 ****
--- 1941,1948 ----
  
    if (!quiet_flag)
      fprintf (stderr, "%luk}", (unsigned long) G.allocated / 1024);
+   if (GGC_DEBUG_LEVEL >= 2)
+     fprintf (G.debug_file, "END COLLECTING\n");
  }
  
  /* Print allocation statistics.  */
Index: ggc.h
===================================================================
RCS file: /cvs/gcc/gcc/gcc/ggc.h,v
retrieving revision 1.63
diff -c -p -d -r1.63 ggc.h
*** ggc.h	26 Jan 2004 22:59:40 -0000	1.63
--- ggc.h	28 Jan 2004 18:40:50 -0000
*************** extern void *ggc_alloc_cleared_zone (siz
*** 223,228 ****
--- 223,230 ----
  extern void *ggc_realloc (void *, size_t);
  /* Like ggc_alloc_cleared, but performs a multiplication.  */
  extern void *ggc_calloc (size_t, size_t);
+ /* Free a block.  To be used when known for certain it's not reachable.  */
+ extern void ggc_free (void *);
  
  #define ggc_alloc_rtx(CODE)                    \
    ((rtx) ggc_alloc_typed (gt_ggc_e_7rtx_def, RTX_SIZE (CODE)))


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]