/* "Bag-of-pages" garbage collector for the GNU compiler.
- Copyright (C) 1999-2015 Free Software Foundation, Inc.
+ Copyright (C) 1999-2018 Free Software Foundation, Inc.
This file is part of GCC.
#include "alias.h"
#include "tree.h"
#include "rtl.h"
+#include "memmodel.h"
#include "tm_p.h"
#include "diagnostic-core.h"
#include "flags.h"
3: Object allocations as well.
4: Object marks as well. */
#define GGC_DEBUG_LEVEL (0)
-\f
-#ifndef HOST_BITS_PER_PTR
-#define HOST_BITS_PER_PTR HOST_BITS_PER_LONG
-#endif
-
\f
/* A two-level tree is used to look up the page-entry for a given
pointer. Two chunks of the pointer's bits are extracted to index
#define ROUND_UP_VALUE(x, f) ((f) - 1 - ((f) - 1 + (x)) % (f))
-/* Compute the smallest multiple of F that is >= X. */
-
-#define ROUND_UP(x, f) (CEIL (x, f) * (f))
-
/* Round X to next multiple of the page size */
-#define PAGE_ALIGN(x) (((x) + G.pagesize - 1) & ~(G.pagesize - 1))
+#define PAGE_ALIGN(x) ROUND_UP ((x), G.pagesize)
/* The Ith entry is the number of objects on a page or order I. */
/* A page_entry records the status of an allocation page. This
structure is dynamically sized to fit the bitmap in_use_p. */
-typedef struct page_entry
+struct page_entry
{
/* The next page-entry with objects of the same size, or NULL if
this is the last page-entry. */
Nth bit is one if the Nth object on this page is allocated. This
array is dynamically sized. */
unsigned long in_use_p[1];
-} page_entry;
+};
#ifdef USING_MALLOC_PAGE_GROUPS
/* A page_group describes a large allocation from malloc, from which
we parcel out aligned pages. */
-typedef struct page_group
+struct page_group
{
/* A linked list of all extant page groups. */
struct page_group *next;
/* A bitmask of pages in use. */
unsigned int in_use;
-} page_group;
+};
#endif
#if HOST_BITS_PER_PTR <= 32
void (*m_function)(void *);
size_t m_object_size;
size_t m_n_objects;
- };
+};
#ifdef ENABLE_GC_ALWAYS_COLLECT
/* List of free objects to be verified as actually free on the
better runtime data access pattern. */
unsigned long **save_in_use;
- /* Finalizers for single objects. */
- vec<finalizer> finalizers;
+ /* Finalizers for single objects. The first index is collection_depth. */
+ vec<vec<finalizer> > finalizers;
/* Finalizers for vectors of objects. */
- vec<vec_finalizer> vec_finalizers;
+ vec<vec<vec_finalizer> > vec_finalizers;
#ifdef ENABLE_GC_ALWAYS_COLLECT
/* List of free objects to be verified as actually free on the
/* Initial guess as to how many page table entries we might need. */
#define INITIAL_PTE_COUNT 128
\f
-static int ggc_allocated_p (const void *);
static page_entry *lookup_page_table_entry (const void *);
static void set_page_table_entry (void *, page_entry *);
#ifdef USING_MMAP
#define save_in_use_p(__p) \
(save_in_use_p_i (__p->index_by_depth))
-/* Returns nonzero if P was allocated in GC'able memory. */
+/* Traverse the page table and find the entry for a page.
+ If the object wasn't allocated in GC return NULL. */
-static inline int
-ggc_allocated_p (const void *p)
+static inline page_entry *
+safe_lookup_page_table_entry (const void *p)
{
page_entry ***base;
size_t L1, L2;
while (1)
{
if (table == NULL)
- return 0;
+ return NULL;
if (table->high_bits == high_bits)
break;
table = table->next;
/* Extract the level 1 and 2 indices. */
L1 = LOOKUP_L1 (p);
L2 = LOOKUP_L2 (p);
+ if (! base[L1])
+ return NULL;
- return base[L1] && base[L1][L2];
+ return base[L1][L2];
}
/* Traverse the page table and find the entry for a page.
return size;
}
+/* Push a finalizer onto the appropriate vec. */
+
+static void
+add_finalizer (void *result, void (*f)(void *), size_t s, size_t n)
+{
+ if (f == NULL)
+ /* No finalizer. */;
+ else if (n == 1)
+ {
+ finalizer fin (result, f);
+ G.finalizers[G.context_depth].safe_push (fin);
+ }
+ else
+ {
+ vec_finalizer fin (reinterpret_cast<uintptr_t> (result), f, s, n);
+ G.vec_finalizers[G.context_depth].safe_push (fin);
+ }
+}
+
/* Allocate a chunk of memory of SIZE bytes. Its contents are undefined. */
void *
/* For timevar statistics. */
timevar_ggc_mem_total += object_size;
- if (f && n == 1)
- G.finalizers.safe_push (finalizer (result, f));
- else if (f)
- G.vec_finalizers.safe_push
- (vec_finalizer (reinterpret_cast<uintptr_t> (result), f, s, n));
+ if (f)
+ add_finalizer (result, f, s, n);
if (GATHER_STATISTICS)
{
unsigned long mask;
unsigned long offset;
- if (!p || !ggc_allocated_p (p))
+ if (!p)
return;
- /* Look up the page on which the object is alloced. . */
- entry = lookup_page_table_entry (p);
- gcc_assert (entry);
+ /* Look up the page on which the object is alloced. If it was not
+ GC allocated, gracefully bail out. */
+ entry = safe_lookup_page_table_entry (p);
+ if (!entry)
+ return;
/* Calculate the index of the object on the page; this is its bit
position in the in_use_p bitmap. Note that because a char* might
G.by_depth_max = INITIAL_PTE_COUNT;
G.by_depth = XNEWVEC (page_entry *, G.by_depth_max);
G.save_in_use = XNEWVEC (unsigned long *, G.by_depth_max);
+
+ /* Allocate space for the depth 0 finalizers. */
+ G.finalizers.safe_push (vNULL);
+ G.vec_finalizers.safe_push (vNULL);
+ gcc_assert (G.finalizers.length() == 1);
}
/* Merge the SAVE_IN_USE_P and IN_USE_P arrays in P so that IN_USE_P
static void
ggc_handle_finalizers ()
{
- if (G.context_depth != 0)
- return;
-
- unsigned length = G.finalizers.length ();
- for (unsigned int i = 0; i < length;)
+ unsigned dlen = G.finalizers.length();
+ for (unsigned d = G.context_depth; d < dlen; ++d)
{
- finalizer &f = G.finalizers[i];
- if (!ggc_marked_p (f.addr ()))
+ vec<finalizer> &v = G.finalizers[d];
+ unsigned length = v.length ();
+ for (unsigned int i = 0; i < length;)
{
- f.call ();
- G.finalizers.unordered_remove (i);
- length--;
+ finalizer &f = v[i];
+ if (!ggc_marked_p (f.addr ()))
+ {
+ f.call ();
+ v.unordered_remove (i);
+ length--;
+ }
+ else
+ i++;
}
- else
- i++;
}
-
- length = G.vec_finalizers.length ();
- for (unsigned int i = 0; i < length;)
+ gcc_assert (dlen == G.vec_finalizers.length());
+ for (unsigned d = G.context_depth; d < dlen; ++d)
{
- vec_finalizer &f = G.vec_finalizers[i];
- if (!ggc_marked_p (f.addr ()))
+ vec<vec_finalizer> &vv = G.vec_finalizers[d];
+ unsigned length = vv.length ();
+ for (unsigned int i = 0; i < length;)
{
- f.call ();
- G.vec_finalizers.unordered_remove (i);
- length--;
+ vec_finalizer &f = vv[i];
+ if (!ggc_marked_p (f.addr ()))
+ {
+ f.call ();
+ vv.unordered_remove (i);
+ length--;
+ }
+ else
+ i++;
}
- else
- i++;
}
}
void
ggc_grow (void)
{
-#ifndef ENABLE_CHECKING
- G.allocated_last_gc = MAX (G.allocated_last_gc,
- G.allocated);
-#else
- ggc_collect ();
-#endif
+ if (!flag_checking)
+ G.allocated_last_gc = MAX (G.allocated_last_gc,
+ G.allocated);
+ else
+ ggc_collect ();
if (!quiet_flag)
fprintf (stderr, " {GC start %luk} ", (unsigned long) G.allocated / 1024);
}
static void
move_ptes_to_front (int count_old_page_tables, int count_new_page_tables)
{
- unsigned i;
-
/* First, we swap the new entries to the front of the varrays. */
page_entry **new_by_depth;
unsigned long **new_save_in_use;
G.save_in_use = new_save_in_use;
/* Now update all the index_by_depth fields. */
- for (i = G.by_depth_in_use; i > 0; --i)
+ for (unsigned i = G.by_depth_in_use; i--;)
{
- page_entry *p = G.by_depth[i-1];
- p->index_by_depth = i-1;
+ page_entry *p = G.by_depth[i];
+ p->index_by_depth = i;
}
/* And last, we update the depth pointers in G.depth. The first
pages to be 1 too. PCH pages will have depth 0. */
gcc_assert (!G.context_depth);
G.context_depth = 1;
+ /* Allocate space for the depth 1 finalizers. */
+ G.finalizers.safe_push (vNULL);
+ G.vec_finalizers.safe_push (vNULL);
+ gcc_assert (G.finalizers.length() == 2);
for (i = 0; i < NUM_ORDERS; i++)
{
page_entry *p;