GCC Middle and Back End API Reference
ggc-page.cc File Reference
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "backend.h"
#include "alias.h"
#include "tree.h"
#include "rtl.h"
#include "memmodel.h"
#include "tm_p.h"
#include "diagnostic-core.h"
#include "flags.h"
#include "ggc-internal.h"
#include "timevar.h"
#include "cgraph.h"
#include "cfgloop.h"
#include "plugin.h"
Include dependency graph for ggc-page.cc:

Data Structures

struct  max_alignment
 
struct  page_entry
 
struct  page_group
 
class  finalizer
 
class  vec_finalizer
 
struct  ggc_globals
 
struct  ggc_pch_ondisk
 
struct  ggc_pch_data
 

Macros

#define USING_MALLOC_PAGE_GROUPS
 
#define GGC_DEBUG_LEVEL   (0)
 
#define PAGE_L1_BITS   (8)
 
#define PAGE_L2_BITS   (32 - PAGE_L1_BITS - G.lg_pagesize)
 
#define PAGE_L1_SIZE   ((uintptr_t) 1 << PAGE_L1_BITS)
 
#define PAGE_L2_SIZE   ((uintptr_t) 1 << PAGE_L2_BITS)
 
#define LOOKUP_L1(p)    (((uintptr_t) (p) >> (32 - PAGE_L1_BITS)) & ((1 << PAGE_L1_BITS) - 1))
 
#define LOOKUP_L2(p)    (((uintptr_t) (p) >> G.lg_pagesize) & ((1 << PAGE_L2_BITS) - 1))
 
#define OBJECTS_PER_PAGE(ORDER)   objects_per_page_table[ORDER]
 
#define OBJECTS_IN_PAGE(P)   ((P)->bytes / OBJECT_SIZE ((P)->order))
 
#define OBJECT_SIZE(ORDER)   object_size_table[ORDER]
 
#define DIV_MULT(ORDER)   inverse_table[ORDER].mult
 
#define DIV_SHIFT(ORDER)   inverse_table[ORDER].shift
 
#define OFFSET_TO_BIT(OFFSET, ORDER)    (((OFFSET) * DIV_MULT (ORDER)) >> DIV_SHIFT (ORDER))
 
#define MAX_ALIGNMENT   (offsetof (struct max_alignment, u))
 
#define NUM_EXTRA_ORDERS   ARRAY_SIZE (extra_order_size_table)
 
#define RTL_SIZE(NSLOTS)    (RTX_HDR_SIZE + (NSLOTS) * sizeof (rtunion))
 
#define TREE_EXP_SIZE(OPS)    (sizeof (struct tree_exp) + ((OPS) - 1) * sizeof (tree))
 
#define NUM_ORDERS   (HOST_BITS_PER_PTR + NUM_EXTRA_ORDERS)
 
#define ROUND_UP_VALUE(x, f)   ((f) - 1 - ((f) - 1 + (x)) % (f))
 
#define PAGE_ALIGN(x)   ROUND_UP ((x), G.pagesize)
 
#define BITMAP_SIZE(Num_objects)    (CEIL ((Num_objects), HOST_BITS_PER_LONG) * sizeof (long))
 
#define GGC_QUIRE_SIZE   16
 
#define INITIAL_PTE_COUNT   128
 
#define prefetch(X)   ((void) X)
 
#define save_in_use_p_i(__i)    (G.save_in_use[__i])
 
#define save_in_use_p(__p)    (save_in_use_p_i (__p->index_by_depth))
 
#define NUM_SIZE_LOOKUP   512
 
#define poison_pages()
 
#define validate_free_objects()
 

Typedefs

typedef page_entry ** page_table[PAGE_L1_SIZE]
 

Functions

static page_entrylookup_page_table_entry (const void *)
 
static void set_page_table_entry (void *, page_entry *)
 
static size_t page_group_index (char *, char *)
 
static void set_page_group_in_use (page_group *, char *)
 
static void clear_page_group_in_use (page_group *, char *)
 
static struct page_entryalloc_page (unsigned)
 
static void free_page (struct page_entry *)
 
static void clear_marks (void)
 
static void sweep_pages (void)
 
static void ggc_recalculate_in_use_p (page_entry *)
 
static void compute_inverse (unsigned)
 
static void adjust_depth (void)
 
static void move_ptes_to_front (int, int)
 
void debug_print_page_list (int)
 
static void push_depth (unsigned int)
 
static void push_by_depth (page_entry *, unsigned long *)
 
static page_entrysafe_lookup_page_table_entry (const void *p)
 
static void release_pages (void)
 
static void ggc_round_alloc_size_1 (size_t requested_size, size_t *size_order, size_t *alloced_size)
 
size_t ggc_round_alloc_size (size_t requested_size)
 
static void add_finalizer (void *result, void(*f)(void *), size_t s, size_t n)
 
voidggc_internal_alloc (size_t size, void(*f)(void *), size_t s, size_t n MEM_STAT_DECL)
 
void gt_ggc_m_S (const void *p)
 
void gt_ggc_mx (const char *&x)
 
void gt_ggc_mx (char *&x)
 
void gt_ggc_mx (unsigned char *&x)
 
void gt_ggc_mx (unsigned char &x)
 
bool ggc_set_mark (const void *p)
 
bool ggc_marked_p (const void *p)
 
size_t ggc_get_size (const void *p)
 
void ggc_free (void *p)
 
void init_ggc (void)
 
static void ggc_handle_finalizers ()
 
void ggc_collect (enum ggc_collect mode)
 
void ggc_trim ()
 
void ggc_grow (void)
 
void ggc_print_statistics (void)
 
struct ggc_pch_datainit_ggc_pch (void)
 
void ggc_pch_count_object (struct ggc_pch_data *d, void *x, size_t size)
 
size_t ggc_pch_total_size (struct ggc_pch_data *d)
 
void ggc_pch_this_base (struct ggc_pch_data *d, void *base)
 
charggc_pch_alloc_object (struct ggc_pch_data *d, void *x, size_t size)
 
void ggc_pch_prepare_write (struct ggc_pch_data *d, FILE *f)
 
void ggc_pch_write_object (struct ggc_pch_data *d, FILE *f, void *x, void *newx, size_t size)
 
void ggc_pch_finish (struct ggc_pch_data *d, FILE *f)
 
void ggc_pch_read (FILE *f, void *addr)
 

Variables

static const size_t extra_order_size_table []
 
static unsigned objects_per_page_table [NUM_ORDERS]
 
static size_t object_size_table [NUM_ORDERS]
 
struct { 
 
   size_t   mult 
 
   unsigned int   shift 
 
inverse_table [NUM_ORDERS
 
static struct ggc_globals G
 
static bool in_gc = false
 
static unsigned char size_lookup [NUM_SIZE_LOOKUP]
 

Macro Definition Documentation

◆ BITMAP_SIZE

#define BITMAP_SIZE ( Num_objects)     (CEIL ((Num_objects), HOST_BITS_PER_LONG) * sizeof (long))
The size in bytes required to maintain a bitmap for the objects
on a page-entry.   

Referenced by alloc_page(), clear_marks(), ggc_pch_read(), ggc_print_statistics(), and ggc_recalculate_in_use_p().

◆ DIV_MULT

#define DIV_MULT ( ORDER)    inverse_table[ORDER].mult
For speed, we avoid doing a general integer divide to locate the
offset in the allocation bitmap, by precalculating numbers M, S
such that (O * M) >> S == O / Z (modulo 2^32), for any offset O
within the page which is evenly divisible by the object size Z.   

Referenced by compute_inverse().

◆ DIV_SHIFT

#define DIV_SHIFT ( ORDER)    inverse_table[ORDER].shift

Referenced by compute_inverse().

◆ GGC_DEBUG_LEVEL

#define GGC_DEBUG_LEVEL   (0)
Strategy:

This garbage-collecting allocator allocates objects on one of a set
of pages.  Each page can allocate objects of a single size only;
available sizes are powers of two starting at four bytes.  The size
of an allocation request is rounded up to the next power of two
(`order'), and satisfied from the appropriate page.

Each page is recorded in a page-entry, which also maintains an
in-use bitmap of object positions on the page.  This allows the
allocation state of a particular object to be flipped without
touching the page itself.

Each page-entry also has a context depth, which is used to track
pushing and popping of allocation contexts.  Only objects allocated
in the current (highest-numbered) context may be collected.

Page entries are arranged in an array of singly-linked lists.  The
array is indexed by the allocation size, in bits, of the pages on
it; i.e. all pages on a list allocate objects of the same size.
Pages are ordered on the list such that all non-full pages precede
all full pages, with non-full pages arranged in order of decreasing
context depth.

Empty pages (of all orders) are kept on a single page cache list,
and are considered first when new pages are required; they are
deallocated at the start of the next collection if they haven't
been recycled by then.   
Define GGC_DEBUG_LEVEL to print debugging information.
 0: No debugging output.
 1: GC statistics only.
 2: Page-entry allocations/deallocations as well.
 3: Object allocations as well.
 4: Object marks as well.   

Referenced by alloc_page(), free_page(), ggc_collect(), ggc_free(), ggc_internal_alloc(), ggc_set_mark(), and gt_ggc_m_S().

◆ GGC_QUIRE_SIZE

#define GGC_QUIRE_SIZE   16
Allocate pages in chunks of this size, to throttle calls to memory
allocation routines.  The first page is used, the rest go onto the
free list.  This cannot be larger than HOST_BITS_PER_INT for the
in_use bitmask for page_group.  Hosts that need a different value
can override this by defining GGC_QUIRE_SIZE explicitly.   

Referenced by alloc_page(), and release_pages().

◆ INITIAL_PTE_COUNT

#define INITIAL_PTE_COUNT   128
Initial guess as to how many page table entries we might need.   

Referenced by init_ggc().

◆ LOOKUP_L1

#define LOOKUP_L1 ( p)     (((uintptr_t) (p) >> (32 - PAGE_L1_BITS)) & ((1 << PAGE_L1_BITS) - 1))

◆ LOOKUP_L2

#define LOOKUP_L2 ( p)     (((uintptr_t) (p) >> G.lg_pagesize) & ((1 << PAGE_L2_BITS) - 1))

◆ MAX_ALIGNMENT

#define MAX_ALIGNMENT   (offsetof (struct max_alignment, u))
The biggest alignment required.   

Referenced by init_ggc().

◆ NUM_EXTRA_ORDERS

#define NUM_EXTRA_ORDERS   ARRAY_SIZE (extra_order_size_table)
The number of extra orders, not corresponding to power-of-two sized
objects.   

◆ NUM_ORDERS

◆ NUM_SIZE_LOOKUP

#define NUM_SIZE_LOOKUP   512
This table provides a fast way to determine ceil(log_2(size)) for
allocation requests.  The minimum allocation size is eight bytes.   

Referenced by ggc_pch_alloc_object(), ggc_pch_count_object(), ggc_pch_write_object(), ggc_round_alloc_size_1(), and init_ggc().

◆ OBJECT_SIZE

◆ OBJECTS_IN_PAGE

#define OBJECTS_IN_PAGE ( P)    ((P)->bytes / OBJECT_SIZE ((P)->order))
The number of objects in P.   

Referenced by clear_marks(), ggc_print_statistics(), ggc_recalculate_in_use_p(), and sweep_pages().

◆ OBJECTS_PER_PAGE

#define OBJECTS_PER_PAGE ( ORDER)    objects_per_page_table[ORDER]
The number of objects per allocation page, for objects on a page of
the indicated ORDER.   

Referenced by alloc_page().

◆ OFFSET_TO_BIT

#define OFFSET_TO_BIT ( OFFSET,
ORDER )    (((OFFSET) * DIV_MULT (ORDER)) >> DIV_SHIFT (ORDER))

◆ PAGE_ALIGN

#define PAGE_ALIGN ( x)    ROUND_UP ((x), G.pagesize)
Round X to next multiple of the page size  

Referenced by alloc_page(), ggc_pch_read(), ggc_pch_this_base(), and ggc_pch_total_size().

◆ PAGE_L1_BITS

#define PAGE_L1_BITS   (8)
A two-level tree is used to look up the page-entry for a given
pointer.  Two chunks of the pointer's bits are extracted to index
the first and second levels of the tree, as follows:

                           HOST_PAGE_SIZE_BITS
                   32           |      |
    msb +----------------+----+------+------+ lsb
                    |    |      |
                 PAGE_L1_BITS   |
                         |      |
                       PAGE_L2_BITS

The bottommost HOST_PAGE_SIZE_BITS are ignored, since page-entry
pages are aligned on system page boundaries.  The next most
significant PAGE_L2_BITS and PAGE_L1_BITS are the second and first
index values in the lookup table, respectively.

For 32-bit architectures and the settings below, there are no
leftover bits.  For architectures with wider pointers, the lookup
tree points to a list of pages, which must be scanned to find the
correct one.   

◆ PAGE_L1_SIZE

#define PAGE_L1_SIZE   ((uintptr_t) 1 << PAGE_L1_BITS)

◆ PAGE_L2_BITS

#define PAGE_L2_BITS   (32 - PAGE_L1_BITS - G.lg_pagesize)

◆ PAGE_L2_SIZE

#define PAGE_L2_SIZE   ((uintptr_t) 1 << PAGE_L2_BITS)

Referenced by set_page_table_entry().

◆ poison_pages

#define poison_pages ( )

Referenced by ggc_collect(), and ggc_pch_read().

◆ prefetch

#define prefetch ( X)    ((void) X)

◆ ROUND_UP_VALUE

#define ROUND_UP_VALUE ( x,
f )   ((f) - 1 - ((f) - 1 + (x)) % (f))
Compute the smallest nonnegative number which when added to X gives
a multiple of F.   

Referenced by ggc_pch_write_object().

◆ RTL_SIZE

#define RTL_SIZE ( NSLOTS)     (RTX_HDR_SIZE + (NSLOTS) * sizeof (rtunion))

◆ save_in_use_p

#define save_in_use_p ( __p)     (save_in_use_p_i (__p->index_by_depth))

◆ save_in_use_p_i

#define save_in_use_p_i ( __i)     (G.save_in_use[__i])

◆ TREE_EXP_SIZE

#define TREE_EXP_SIZE ( OPS)     (sizeof (struct tree_exp) + ((OPS) - 1) * sizeof (tree))

◆ USING_MALLOC_PAGE_GROUPS

#define USING_MALLOC_PAGE_GROUPS
"Bag-of-pages" garbage collector for the GNU compiler.
   Copyright (C) 1999-2024 Free Software Foundation, Inc.

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.   
Prefer MAP_ANON(YMOUS) to /dev/zero, since we don't need to keep a
file open.  Prefer either to valloc.   

◆ validate_free_objects

#define validate_free_objects ( )

Referenced by ggc_collect(), and ggc_pch_read().

Typedef Documentation

◆ page_table

On 32-bit hosts, we use a two level page table, as pictured above.   

Function Documentation

◆ add_finalizer()

static void add_finalizer ( void * result,
void(*)(void *) f,
size_t s,
size_t n )
static
Push a finalizer onto the appropriate vec.   

References ggc_globals::context_depth, ggc_globals::finalizers, G, ggc_alloc(), NULL, and ggc_globals::vec_finalizers.

Referenced by ggc_internal_alloc().

◆ adjust_depth()

static void adjust_depth ( void )
inlinestatic
Adjust the size of G.depth so that no index greater than the one
used by the top of the G.by_depth is used.   

References ggc_globals::by_depth, ggc_globals::by_depth_in_use, ggc_globals::depth_in_use, G, and ggc_alloc().

Referenced by free_page().

◆ alloc_page()

◆ clear_marks()

◆ clear_page_group_in_use()

static void clear_page_group_in_use ( page_group * group,
char * page )
inlinestatic

◆ compute_inverse()

static void compute_inverse ( unsigned order)
static
Subroutine of init_ggc which computes the pair of numbers used to
perform division by OBJECT_SIZE (order) and fills in inverse_table[].

This algorithm is taken from Granlund and Montgomery's paper
"Division by Invariant Integers using Multiplication"
(Proc. SIGPLAN PLDI, 1994), section 9 (Exact division by
constants).   

References DIV_MULT, DIV_SHIFT, and OBJECT_SIZE.

Referenced by init_ggc().

◆ debug_print_page_list()

DEBUG_FUNCTION void debug_print_page_list ( int order)
Prints the page-entry for object size ORDER, for debugging.   

References page_entry::context_depth, G, ggc_alloc(), NULL, page_entry::order, ggc_globals::page_tails, and ggc_globals::pages.

◆ free_page()

◆ ggc_collect()

◆ ggc_free()

◆ ggc_get_size()

size_t ggc_get_size ( const void * p)
Return the size of the gc-able object P.   

References lookup_page_table_entry(), OBJECT_SIZE, and page_entry::order.

Referenced by ggc_realloc(), and gt_pch_note_object().

◆ ggc_grow()

void ggc_grow ( void )
Assume that all GGC memory is reachable and grow the limits for next
collection.  With checking, trigger GGC so -Q compilation outputs how much
of memory really is reachable.   

References ggc_globals::allocated, ggc_globals::allocated_last_gc, G, ggc_alloc(), MAX, PRsa, and SIZE_AMOUNT.

◆ ggc_handle_finalizers()

static void ggc_handle_finalizers ( )
static
Check if any blocks with a registered finalizer have become unmarked. If so
run the finalizer and unregister it because the block is about to be freed.
Note that no garantee is made about what order finalizers will run in so
touching other objects in gc memory is extremely unwise.   

References finalizer::addr(), vec_finalizer::addr(), finalizer::call(), vec_finalizer::call(), ggc_globals::context_depth, ggc_globals::finalizers, G, gcc_assert, ggc_alloc(), ggc_marked_p(), i, and ggc_globals::vec_finalizers.

Referenced by ggc_collect().

◆ ggc_internal_alloc()

◆ ggc_marked_p()

◆ ggc_pch_alloc_object()

char * ggc_pch_alloc_object ( struct ggc_pch_data * d,
void * x,
size_t size )
Assuming that the objects really do end up at the address
passed to ggc_pch_this_base, return the address of this object.   

References ggc_pch_data::d, NUM_SIZE_LOOKUP, OBJECT_SIZE, and size_lookup.

Referenced by ggc_call_alloc().

◆ ggc_pch_count_object()

void ggc_pch_count_object ( struct ggc_pch_data * d,
void * x,
size_t size )
The second parameter and third parameters give the address and size
of an object.  Update the ggc_pch_data structure with as much of
that information as is necessary.   

References ggc_pch_data::d, NUM_SIZE_LOOKUP, OBJECT_SIZE, size_lookup, and ggc_pch_ondisk::totals.

Referenced by ggc_call_count().

◆ ggc_pch_finish()

void ggc_pch_finish ( struct ggc_pch_data * d,
FILE * f )
All objects have been written, write out any final information
required.   

References ggc_pch_data::d, fatal_error(), free(), ggc_alloc(), and input_location.

Referenced by gt_pch_save().

◆ ggc_pch_prepare_write()

void ggc_pch_prepare_write ( struct ggc_pch_data * d,
FILE * f )
Write out any initial information required.   

Referenced by gt_pch_save().

◆ ggc_pch_read()

◆ ggc_pch_this_base()

void ggc_pch_this_base ( struct ggc_pch_data * d,
void * base )
The objects, when read, will most likely be at the address
in the second parameter.   

References a, ggc_pch_data::base, ggc_pch_data::d, ggc_alloc(), i, NUM_ORDERS, OBJECT_SIZE, PAGE_ALIGN, and ggc_pch_ondisk::totals.

Referenced by gt_pch_save().

◆ ggc_pch_total_size()

size_t ggc_pch_total_size ( struct ggc_pch_data * d)
Return the total size of the data to be written to hold all
the objects previously passed to ggc_pch_count_object.   

References a, ggc_pch_data::d, i, NUM_ORDERS, OBJECT_SIZE, PAGE_ALIGN, and ggc_pch_ondisk::totals.

Referenced by gt_pch_save().

◆ ggc_pch_write_object()

void ggc_pch_write_object ( struct ggc_pch_data * d,
FILE * f,
void * x,
void * newx,
size_t size )

◆ ggc_print_statistics()

◆ ggc_recalculate_in_use_p()

static void ggc_recalculate_in_use_p ( page_entry * p)
static
Merge the SAVE_IN_USE_P and IN_USE_P arrays in P so that IN_USE_P
reflects reality.  Recalculate NUM_FREE_OBJECTS as well.   

References BITMAP_SIZE, CEIL, gcc_assert, ggc_alloc(), i, page_entry::in_use_p, page_entry::num_free_objects, OBJECTS_IN_PAGE, and save_in_use_p.

Referenced by sweep_pages().

◆ ggc_round_alloc_size()

size_t ggc_round_alloc_size ( size_t requested_size)
For a given size of memory requested for allocation, return the
actual size that is going to be allocated.   

References ggc_alloc(), ggc_round_alloc_size_1(), and NULL.

Referenced by va_gc::reserve().

◆ ggc_round_alloc_size_1()

static void ggc_round_alloc_size_1 ( size_t requested_size,
size_t * size_order,
size_t * alloced_size )
static
For a given size of memory requested for allocation, return the
actual size that is going to be allocated, as well as the size
order.   

References ggc_alloc(), NUM_SIZE_LOOKUP, OBJECT_SIZE, page_entry::order, and size_lookup.

Referenced by ggc_internal_alloc(), and ggc_round_alloc_size().

◆ ggc_set_mark()

bool ggc_set_mark ( const void * p)
If P is not marked, marks it and return false.  Otherwise return true.
P must have been allocated by the GC allocator; it mustn't point to
static objects, stack variables, or memory allocated with malloc.   

References ggc_globals::debug_file, G, gcc_assert, ggc_alloc(), GGC_DEBUG_LEVEL, HOST_BITS_PER_LONG, page_entry::in_use_p, lookup_page_table_entry(), page_entry::num_free_objects, OFFSET_TO_BIT, page_entry::order, and page_entry::page.

◆ ggc_trim()

◆ gt_ggc_m_S()

◆ gt_ggc_mx() [1/4]

void gt_ggc_mx ( char *& x)

References gt_ggc_m_S().

◆ gt_ggc_mx() [2/4]

void gt_ggc_mx ( const char *& x)
User-callable entry points for marking string X.   

References gt_ggc_m_S().

Referenced by ggc_remove< T >::ggc_mx(), and gt_ggc_mx().

◆ gt_ggc_mx() [3/4]

void gt_ggc_mx ( unsigned char & x)

◆ gt_ggc_mx() [4/4]

void gt_ggc_mx ( unsigned char *& x)

References gt_ggc_m_S().

◆ init_ggc()

◆ init_ggc_pch()

struct ggc_pch_data * init_ggc_pch ( void )
Return a new ggc_pch_data structure.   

References ggc_alloc().

Referenced by gt_pch_save().

◆ lookup_page_table_entry()

static page_entry * lookup_page_table_entry ( const void * p)
inlinestatic
Traverse the page table and find the entry for a page.
Die (probably) if the object wasn't allocated via GC.   

References G, ggc_alloc(), ggc_globals::lookup, LOOKUP_L1, LOOKUP_L2, and table.

Referenced by ggc_free(), ggc_get_size(), ggc_marked_p(), and ggc_set_mark().

◆ move_ptes_to_front()

static void move_ptes_to_front ( int count_old_page_tables,
int count_new_page_tables )
static
Move the PCH PTE entries just added to the end of by_depth, to the
front.   

References ggc_globals::by_depth, ggc_globals::by_depth_in_use, ggc_globals::by_depth_max, free(), G, ggc_alloc(), i, page_entry::index_by_depth, push_depth(), and ggc_globals::save_in_use.

Referenced by ggc_pch_read().

◆ page_group_index()

static size_t page_group_index ( char * allocation,
char * page )
inlinestatic
Compute the index for this page into the page group.   

References G, ggc_globals::lg_pagesize, and page_entry::page.

Referenced by clear_page_group_in_use(), and set_page_group_in_use().

◆ push_by_depth()

static void push_by_depth ( page_entry * p,
unsigned long * s )
inlinestatic
Push an entry onto G.by_depth and G.save_in_use.   

References ggc_globals::by_depth, ggc_globals::by_depth_in_use, ggc_globals::by_depth_max, G, ggc_alloc(), and ggc_globals::save_in_use.

Referenced by ggc_internal_alloc(), and ggc_pch_read().

◆ push_depth()

static void push_depth ( unsigned int i)
inlinestatic

◆ release_pages()

◆ safe_lookup_page_table_entry()

static page_entry * safe_lookup_page_table_entry ( const void * p)
inlinestatic
Traverse the page table and find the entry for a page.
If the object wasn't allocated in GC return NULL.   

References G, ggc_alloc(), ggc_globals::lookup, LOOKUP_L1, LOOKUP_L2, NULL, and table.

Referenced by gt_ggc_m_S().

◆ set_page_group_in_use()

static void set_page_group_in_use ( page_group * group,
char * page )
inlinestatic
Set and clear the in_use bit for this page in the page group.   

References page_group::allocation, page_entry::group, page_group::in_use, page_entry::page, and page_group_index().

Referenced by alloc_page().

◆ set_page_table_entry()

static void set_page_table_entry ( void * p,
page_entry * entry )
static
Set the page table entry for a page.   

References G, ggc_alloc(), ggc_globals::lookup, LOOKUP_L1, LOOKUP_L2, NULL, PAGE_L2_SIZE, and table.

Referenced by alloc_page(), free_page(), and ggc_pch_read().

◆ sweep_pages()

Variable Documentation

◆ extra_order_size_table

const size_t extra_order_size_table[]
static
Initial value:
= {
sizeof (struct tree_decl_non_common),
}
Definition cfgloop.h:120
#define MAX_ALIGNMENT
Definition ggc-page.cc:161
T * ggc_alloc(ALONE_CXX_MEM_STAT_INFO)
Definition ggc.h:184
Definition basic-block.h:117
Definition cgraph.h:875
Definition function.h:249
The Ith entry is the maximum size of an object to be stored in the
Ith extra order.  Adding a new entry to this array is the *only*
thing you need to do to add a new special allocation size.   

Referenced by init_ggc().

◆ G

◆ in_gc

bool in_gc = false
static
True if a gc is currently taking place.   

Referenced by ggc_collect(), and ggc_free().

◆ [struct]

struct { ... } inverse_table[NUM_ORDERS]
The Ith entry is a pair of numbers (mult, shift) such that
((k * mult) >> shift) mod 2^32 == (k / OBJECT_SIZE(I)) mod 2^32,
for all k evenly divisible by OBJECT_SIZE(I).   

◆ mult

◆ object_size_table

size_t object_size_table[NUM_ORDERS]
static
The Ith entry is the size of an object on a page of order I.   

Referenced by gt_ggc_m_S(), and init_ggc().

◆ objects_per_page_table

unsigned objects_per_page_table[NUM_ORDERS]
static
The Ith entry is the number of objects on a page or order I.   

Referenced by init_ggc().

◆ shift

◆ size_lookup