GCC Middle and Back End API Reference
function.cc File Reference
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "backend.h"
#include "target.h"
#include "rtl.h"
#include "tree.h"
#include "gimple-expr.h"
#include "cfghooks.h"
#include "df.h"
#include "memmodel.h"
#include "tm_p.h"
#include "stringpool.h"
#include "expmed.h"
#include "optabs.h"
#include "opts.h"
#include "regs.h"
#include "emit-rtl.h"
#include "recog.h"
#include "rtl-error.h"
#include "hard-reg-set.h"
#include "alias.h"
#include "fold-const.h"
#include "stor-layout.h"
#include "varasm.h"
#include "except.h"
#include "dojump.h"
#include "explow.h"
#include "calls.h"
#include "expr.h"
#include "optabs-tree.h"
#include "output.h"
#include "langhooks.h"
#include "common/common-target.h"
#include "gimplify.h"
#include "tree-pass.h"
#include "cfgrtl.h"
#include "cfganal.h"
#include "cfgbuild.h"
#include "cfgcleanup.h"
#include "cfgexpand.h"
#include "shrink-wrap.h"
#include "toplev.h"
#include "rtl-iter.h"
#include "tree-dfa.h"
#include "tree-ssa.h"
#include "attribs.h"
#include "gimple.h"
#include "options.h"
#include "function-abi.h"
#include "value-range.h"
#include "gimple-range.h"
#include "insn-attr.h"
#include "gt-function.h"

Data Structures

struct  insn_cache_hasher
class  temp_slot
struct  temp_slot_address_entry
struct  temp_address_hasher
struct  initial_value_pair
struct  initial_value_struct
struct  assign_parm_data_all
struct  assign_parm_data_one


#define FLOOR_ROUND(VALUE, ALIGN)   ((VALUE) & ~((ALIGN) - 1))
#define CEIL_ROUND(VALUE, ALIGN)   (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))


static class temp_slotfind_temp_slot_from_address (rtx)
static void pad_to_arg_alignment (struct args_size *, int, struct args_size *)
static void pad_below (struct args_size *, machine_mode, tree)
static void reorder_blocks_1 (rtx_insn *, tree, vec< tree > *)
static int all_blocks (tree, tree *)
static treeget_block_vector (tree, int *)
tree debug_find_var_in_block_tree (tree, tree)
static void record_insns (rtx_insn *, rtx, hash_table< insn_cache_hasher > **)
static bool contains (const rtx_insn *, hash_table< insn_cache_hasher > *)
static void prepare_function_start (void)
static void do_clobber_return_reg (rtx, void *)
static void do_use_return_reg (rtx, void *)
void push_function_context (void)
void pop_function_context (void)
void free_after_parsing (struct function *f)
void free_after_compilation (struct function *f)
poly_int64 get_frame_size (void)
bool frame_offset_overflow (poly_int64 offset, tree func)
unsigned int spill_slot_alignment (machine_mode mode)
static unsigned int get_stack_local_alignment (tree type, machine_mode mode)
static bool try_fit_stack_local (poly_int64 start, poly_int64 length, poly_int64 size, unsigned int alignment, poly_int64 *poffset)
static void add_frame_space (poly_int64 start, poly_int64 end)
rtx assign_stack_local_1 (machine_mode mode, poly_int64 size, int align, int kind)
rtx assign_stack_local (machine_mode mode, poly_int64 size, int align)
static void cut_slot_from_list (class temp_slot *temp, class temp_slot **list)
static void insert_slot_to_list (class temp_slot *temp, class temp_slot **list)
static class temp_slot ** temp_slots_at_level (int level)
static int max_slot_level (void)
static void move_slot_to_level (class temp_slot *temp, int level)
static void make_slot_available (class temp_slot *temp)
static hashval_t temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
static void insert_temp_slot_address (rtx address, class temp_slot *temp_slot)
int remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
static void remove_unused_temp_slot_addresses (void)
rtx assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
rtx assign_stack_temp (machine_mode mode, poly_int64 size)
rtx assign_temp (tree type_or_decl, int memory_required, int dont_promote)
static void combine_temp_slots (void)
void update_temp_slot_address (rtx old_rtx, rtx new_rtx)
void preserve_temp_slots (rtx x)
void free_temp_slots (void)
void push_temp_slots (void)
void pop_temp_slots (void)
void init_temp_slots (void)
rtx get_hard_reg_initial_reg (rtx reg)
rtx get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
rtx has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
void emit_initial_value_sets (void)
bool initial_value_entry (int i, rtx *hreg, rtx *preg)
static rtx instantiate_new_reg (rtx x, poly_int64 *poffset)
static bool instantiate_virtual_regs_in_rtx (rtx *loc)
static bool safe_insn_predicate (int code, int operand, rtx x)
static void instantiate_virtual_regs_in_insn (rtx_insn *insn)
void instantiate_decl_rtl (rtx x)
static tree instantiate_expr (tree *tp, int *walk_subtrees, void *data)
static void instantiate_decls_1 (tree let)
static void instantiate_decls (tree fndecl)
poly_int64 get_stack_dynamic_offset ()
static void instantiate_virtual_regs (void)
rtl_opt_passmake_pass_instantiate_virtual_regs (gcc::context *ctxt)
bool aggregate_value_p (const_tree exp, const_tree fntype)
bool use_register_for_decl (const_tree decl)
static void assign_parms_initialize_all (struct assign_parm_data_all *all)
static void split_complex_args (vec< tree > *args)
static vec< treeassign_parms_augmented_arg_list (struct assign_parm_data_all *all)
static void assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm, struct assign_parm_data_one *data)
static void assign_parms_setup_varargs (struct assign_parm_data_all *all, struct assign_parm_data_one *data, bool no_rtl)
static void assign_parm_find_entry_rtl (struct assign_parm_data_all *all, struct assign_parm_data_one *data)
static bool assign_parm_is_stack_parm (struct assign_parm_data_all *all, struct assign_parm_data_one *data)
static void assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
static void assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
static void assign_parm_remove_parallels (struct assign_parm_data_one *data)
static void assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
static bool assign_parm_setup_block_p (struct assign_parm_data_one *data)
static void assign_parm_setup_block (struct assign_parm_data_all *all, tree parm, struct assign_parm_data_one *data)
static void assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, struct assign_parm_data_one *data)
static void assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm, struct assign_parm_data_one *data)
static void assign_parms_unsplit_complex (struct assign_parm_data_all *all, vec< tree > fnargs)
static void assign_parms (tree fndecl)
gimple_seq gimplify_parameters (gimple_seq *cleanup)
void locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs, int reg_parm_stack_space, int partial, tree fndecl, struct args_size *initial_offset_ptr, struct locate_and_pad_arg_data *locate)
static bool regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
static void setjmp_vars_warning (bitmap setjmp_crosses, tree block)
static void setjmp_args_warning (bitmap setjmp_crosses)
void generate_setjmp_warnings (void)
static tree block_fragments_nreverse (tree t)
static tree blocks_nreverse_all (tree t)
void reorder_blocks (void)
void clear_block_marks (tree block)
tree blocks_nreverse (tree t)
tree block_chainon (tree op1, tree op2)
void number_blocks (tree fn)
static void invoke_set_current_function_hook (tree fndecl)
void set_cfun (struct function *new_cfun, bool force)
void push_cfun (struct function *new_cfun)
void pop_cfun (void)
int get_next_funcdef_no (void)
int get_last_funcdef_no (void)
static void allocate_stack_usage_info (void)
void allocate_struct_function (tree fndecl, bool abstract_p)
void push_struct_function (tree fndecl, bool abstract_p)
void push_dummy_function (bool with_decl)
void init_dummy_function_start (void)
void init_function_start (tree subr)
void stack_protect_epilogue (void)
void expand_function_start (tree subr)
void pop_dummy_function (void)
void expand_dummy_function_end (void)
void diddle_return_value_1 (void(*doit)(rtx, void *), void *arg, rtx outgoing)
void diddle_return_value (void(*doit)(rtx, void *), void *arg)
void clobber_return_register (void)
static void use_return_register (void)
void expand_function_end (void)
rtx get_arg_pointer_save_area (void)
void dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
void maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
bool prologue_contains (const rtx_insn *insn)
bool epilogue_contains (const rtx_insn *insn)
bool prologue_epilogue_contains (const rtx_insn *insn)
void record_prologue_seq (rtx_insn *seq)
void record_epilogue_seq (rtx_insn *seq)
void set_return_jump_label (rtx_insn *returnjump)
static rtx_insnmake_split_prologue_seq (void)
static rtx_insnmake_prologue_seq (void)
static void gen_call_used_regs_seq (rtx_insn *ret, unsigned int zero_regs_type)
static rtx_insnmake_epilogue_seq (void)
void thread_prologue_and_epilogue_insns (void)
void reposition_prologue_and_epilogue_notes (void)
const char * fndecl_name (tree fndecl)
const char * function_name (const function *fn)
const char * current_function_name (void)
static void rest_of_handle_check_leaf_regs (void)
static void used_types_insert_helper (tree type, struct function *func)
void used_types_insert (tree t)
static hashval_t hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
void types_used_by_var_decl_insert (tree type, tree var_decl)
rtl_opt_passmake_pass_leaf_regs (gcc::context *ctxt)
static void rest_of_handle_thread_prologue_and_epilogue (function *fun)
void record_final_call (tree callee, location_t location)
void record_dynamic_alloc (tree decl_or_exp)
rtl_opt_passmake_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
rtl_opt_passmake_pass_late_thread_prologue_and_epilogue (gcc::context *ctxt)
rtl_opt_passmake_pass_zero_call_used_regs (gcc::context *ctxt)
static int matching_constraint_num (const char *constraint)
static void match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
void add_local_decl (struct function *fun, tree d)
rtl_opt_passmake_pass_match_asm_constraints (gcc::context *ctxt)


int virtuals_instantiated
static int funcdef_no
struct machine_function *(* init_machine_status )(void)
struct functioncfun = 0
static hash_table< insn_cache_hasher > * prologue_insn_hash
static hash_table< insn_cache_hasher > * epilogue_insn_hash
hash_table< used_type_hasher > * types_used_by_vars_hash = NULL
vec< tree, va_gc > * types_used_by_cur_var_decl
static vec< function * > function_context_stack
static hash_table< temp_address_hasher > * temp_slot_address_table
static size_t n_temp_slots_in_use
static poly_int64 in_arg_offset
static poly_int64 var_offset
static poly_int64 dynamic_offset
static poly_int64 out_arg_offset
static poly_int64 cfa_offset
static int next_block_index = 2
static bool in_dummy_function
static vec< function * > cfun_stack
bool currently_expanding_function_start

Macro Definition Documentation


ALIGN )   (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
Similar, but round to the next highest integer that meets the

Referenced by assign_parm_find_entry_rtl(), and assign_parm_setup_block().


ALIGN )   ((VALUE) & ~((ALIGN) - 1))
Round a value to the lowest integer less than it that is a multiple of
the required alignment.  Avoid using division in case the value is
negative.  Assume the alignment is a power of two.   


Expands front end tree to back end RTL for GCC.
   Copyright (C) 1987-2024 Free Software Foundation, Inc.

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
This file handles the generation of rtl code from tree structure
at the level of the function as a whole.
It creates the rtl expressions for parameters and auto variables
and has full responsibility for allocating stack slots.

`expand_function_start' is called at the beginning of a function,
before the function body is parsed, and `expand_function_end' is
called after parsing the body.

Call `assign_stack_local' to allocate a stack slot for a local variable.
This is usually done during the RTL generation for the function body,
but it can also be done in the reload pass when a pseudo-register does
not get a hard register.   
So we can assign to cfun in this file.   

Referenced by assign_stack_local_1().




((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
Definition defaults.h:779
#define crtl
Definition emit-rtl.h:347
Definition function.cc:1393
poly_int< NUM_POLY_INT_COEFFS, HOST_WIDE_INT > poly_int64
Definition poly-int-types.h:24
If not defined, pick an appropriate default for the offset of dynamically
allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
The bottom of the stack points to the actual arguments.  If
REG_PARM_STACK_SPACE is defined, this includes the space for the register
parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
stack space for register parameters is not pushed by the caller, but
rather part of the fixed stack areas and hence not included in
`crtl->outgoing_args_size'.  Nevertheless, we must allow
for it when allocating stack dynamic objects.   

Referenced by get_stack_dynamic_offset().


In most machines, the stack pointer register is equivalent to the bottom
of the stack.   

Function Documentation

◆ add_frame_space()

static void add_frame_space ( poly_int64 start,
poly_int64 end )
Create a new frame_space structure describing free space in the stack
frame beginning at START and ending at END, and chain it into the
function's frame_space_list.   

References crtl, end(), ggc_alloc(), frame_space::length, frame_space::next, and frame_space::start.

Referenced by assign_stack_local_1().

◆ add_local_decl()

◆ aggregate_value_p()

◆ all_blocks()

static int all_blocks ( tree block,
tree * vector )
Count the subblocks of the list starting with BLOCK.  If VECTOR is
non-NULL, list them all into VECTOR, in a depth-first preorder
traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all


◆ allocate_stack_usage_info()

static void allocate_stack_usage_info ( void )
Allocate and initialize the stack usage info data structure for the
current function.   

References cfun, gcc_assert, and ggc_cleared_alloc().

Referenced by allocate_struct_function(), and prepare_function_start().

◆ allocate_struct_function()

void allocate_struct_function ( tree fndecl,
bool abstract_p )
Allocate a function structure for FNDECL and set its contents
to the defaults.  Set cfun to the newly-allocated object.
Some of the helper functions invoked during initialization assume
that cfun has already been set.  Therefore, assign the new object
directly into cfun and invoke the back end hook explicitly at the
very end, rather than initializing a temporary and calling set_cfun
on it.

ABSTRACT_P is true if this is a function that will never be seen by
the middle-end.  Such functions are front-end concepts (like C++
function templates) that do not correspond directly to functions
placed in object files.   

References aggregate_value_p(), allocate_stack_usage_info(), cfun, current_function_funcdef_no, DECL_ARGUMENTS, DECL_CHAIN, DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT, DECL_RESULT, DECL_STRUCT_FUNCTION, lang_hooks::emits_begin_stmt, get_next_funcdef_no(), ggc_cleared_alloc(), init_eh_for_function(), init_machine_status, invoke_set_current_function_hook(), MAY_HAVE_DEBUG_MARKER_STMTS, NULL_TREE, relayout_decl(), stdarg_p(), targetm, TREE_TYPE, VA_LIST_MAX_FPR_SIZE, and VA_LIST_MAX_GPR_SIZE.

Referenced by cgraph_build_static_cdtor_1(), function_reader::create_function(), create_loop_fn(), cgraph_node::create_wrapper(), expand_thunk(), finalize_size_functions(), init_lowered_empty_function(), push_function_context(), and push_struct_function().

◆ assign_parm_adjust_entry_rtl()

static void assign_parm_adjust_entry_rtl ( struct assign_parm_data_one * data)
A subroutine of assign_parms.  Adjust DATA->ENTRY_RTL such that it's
always valid and contiguous.   

References copy_rtx(), emit_group_store(), gcc_assert, GET_CODE, GET_MODE, i, int_size_in_bytes(), INTVAL, move_block_from_reg(), NULL, NULL_RTX, REG_P, REGNO, validize_mem(), XEXP, XVECEXP, and XVECLEN.

Referenced by assign_parms().

◆ assign_parm_adjust_stack_rtl()

static void assign_parm_adjust_stack_rtl ( struct assign_parm_data_one * data)
A subroutine of assign_parms.  Adjust DATA->STACK_RTL such that it's
always valid and properly aligned.   


Referenced by assign_parms().

◆ assign_parm_find_data_types()

static void assign_parm_find_data_types ( struct assign_parm_data_all * all,
tree parm,
struct assign_parm_data_one * data )
A subroutine of assign_parms.  Examine PARM and pull out type and mode
data for the parameter.  Incorporate ABI specifics such as pass-by-
reference and type promotion.   

References apply_pass_by_reference_rules(), assign_parm_data_all::args_so_far, assign_parm_data_all::args_so_far_v, cfun, current_function_decl, DECL_ARG_TYPE, DECL_CHAIN, error_mark_node, first_field(), NULL, promote_function_mode(), RECORD_OR_UNION_TYPE_P, targetm, TREE_CODE, TREE_TYPE, TYPE_MODE, TYPE_TRANSPARENT_AGGR, TYPE_UNSIGNED, void_type_node, and VOID_TYPE_P.

Referenced by assign_parms(), and gimplify_parameters().

◆ assign_parm_find_entry_rtl()

static void assign_parm_find_entry_rtl ( struct assign_parm_data_all * all,
struct assign_parm_data_one * data )

◆ assign_parm_find_stack_rtl()

static void assign_parm_find_stack_rtl ( tree parm,
struct assign_parm_data_one * data )

◆ assign_parm_is_stack_parm()

static bool assign_parm_is_stack_parm ( struct assign_parm_data_all * all,
struct assign_parm_data_one * data )
A subroutine of assign_parms.  If there is actually space on the stack
for this parm, count it in stack_args_size and return true.   

References ADD_PARM_SIZE, args_size::constant, GET_CODE, NULL, NULL_RTX, assign_parm_data_all::reg_parm_stack_space, assign_parm_data_all::stack_args_size, XEXP, and XVECEXP.

Referenced by assign_parms().

◆ assign_parm_remove_parallels()

static void assign_parm_remove_parallels ( struct assign_parm_data_one * data)
A subroutine of assign_parms.  Reconstitute any values which were
passed in multiple registers and would fit in a single register.   

References emit_group_store(), gen_reg_rtx(), GET_CODE, GET_MODE, and GET_MODE_SIZE().

Referenced by assign_parm_setup_reg(), and assign_parm_setup_stack().

◆ assign_parm_setup_block()

◆ assign_parm_setup_block_p()

static bool assign_parm_setup_block_p ( struct assign_parm_data_one * data)
A subroutine of assign_parms.  Return true if the current parameter
should be stored as a BLKmode in the current frame.   


Referenced by assign_parms().

◆ assign_parm_setup_reg()

static void assign_parm_setup_reg ( struct assign_parm_data_all * all,
tree parm,
struct assign_parm_data_one * data )

◆ assign_parm_setup_stack()

◆ assign_parms()

static void assign_parms ( tree fndecl)
Assign RTL expressions to the function's parameters.  This may involve
copying them into registers and using those registers as the DECL_RTL.   

References AGGREGATE_TYPE_P, ARGS_GROW_DOWNWARD, ARGS_SIZE_RTX, assign_parm_data_all::args_so_far, assign_parm_data_all::args_so_far_v, assign_parm_adjust_entry_rtl(), assign_parm_adjust_stack_rtl(), assign_parm_find_data_types(), assign_parm_find_entry_rtl(), assign_parm_find_stack_rtl(), assign_parm_is_stack_parm(), assign_parm_setup_block(), assign_parm_setup_block_p(), assign_parm_setup_reg(), assign_parm_setup_stack(), assign_parms_augmented_arg_list(), assign_parms_initialize_all(), assign_parms_setup_varargs(), assign_parms_unsplit_complex(), build1(), cfun, const0_rtx, args_size::constant, convert_memory_address, crtl, current_function_decl, DECL_BY_REFERENCE, DECL_CHAIN, DECL_HAS_VALUE_EXPR_P, DECL_INCOMING_RTL, DECL_MODE, DECL_REGISTER, DECL_RESULT, DECL_RTL, DECL_RTL_SET_P, emit_insn(), expand_expr(), EXPAND_NORMAL, assign_parm_data_all::extra_pretend_bytes, assign_parm_data_all::first_conversion_insn, FOR_EACH_VEC_ELT, assign_parm_data_all::function_result_decl, gcc_assert, gen_int_mode(), gen_rtx_MEM(), GET_MODE_ALIGNMENT, i, int_size_in_bytes(), MEM_P, MINIMUM_ALIGNMENT, NULL_RTX, assign_parm_data_all::pretend_args_size, REG_FUNCTION_VALUE_P, REG_P, assign_parm_data_all::reg_parm_stack_space, REGNO, set_decl_incoming_rtl(), SET_DECL_RTL, SET_DECL_VALUE_EXPR, set_mem_attributes(), set_parm_rtl(), size_diffop, size_int, assign_parm_data_all::stack_args_size, SUPPORTS_STACK_ALIGNMENT, targetm, TREE_ADDRESSABLE, TREE_TYPE, TYPE_ALIGN, TYPE_EMPTY_P, TYPE_MODE, TYPE_NO_NAMED_ARGS_STDARG_P, use_register_for_decl(), and args_size::var.

Referenced by expand_function_start().

◆ assign_parms_augmented_arg_list()

static vec< tree > assign_parms_augmented_arg_list ( struct assign_parm_data_all * all)
A subroutine of assign_parms.  Adjust the parameter list to incorporate
the hidden struct return argument, and (abi willing) complex args.
Return the new parameter list.   

References aggregate_value_p(), build_decl(), build_pointer_type(), cfun, current_function_decl, DECL_ARG_TYPE, DECL_ARGUMENTS, DECL_ARTIFICIAL, DECL_CHAIN, DECL_NAMELESS, DECL_RESULT, DECL_SOURCE_LOCATION, assign_parm_data_all::function_result_decl, get_identifier(), assign_parm_data_all::orig_fnargs, split_complex_args(), targetm, TREE_CONSTANT, TREE_TYPE, type(), and vNULL.

Referenced by assign_parms(), and gimplify_parameters().

◆ assign_parms_initialize_all()

static void assign_parms_initialize_all ( struct assign_parm_data_all * all)

◆ assign_parms_setup_varargs()

static void assign_parms_setup_varargs ( struct assign_parm_data_all * all,
struct assign_parm_data_one * data,
bool no_rtl )
A subroutine of assign_parms.  Invoke setup_incoming_varargs.   

References assign_parm_data_all::args_so_far, function_arg_info::named, assign_parm_data_all::pretend_args_size, and targetm.

Referenced by assign_parms().

◆ assign_parms_unsplit_complex()

◆ assign_stack_local()

◆ assign_stack_local_1()

rtx assign_stack_local_1 ( machine_mode mode,
poly_int64 size,
int align,
int kind )
Allocate a stack slot of SIZE bytes and return a MEM rtx for it
with machine mode MODE.

ALIGN controls the amount of alignment for the address of the slot:
0 means according to MODE,
-1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
-2 means use BITS_PER_UNIT,
positive specifies alignment boundary in bits.

KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
alignment and ASLK_RECORD_PAD bit set if we should remember
extra space we allocated for alignment purposes.  When we are
called from assign_stack_temp_for_type, it is not set so we don't
track the same stack slot in two independent lists.

We do not round to stack_boundary here.   

References add_frame_space(), ASLK_RECORD_PAD, ASLK_REDUCE_ALIGN, crtl, current_function_decl, FRAME_GROWS_DOWNWARD, frame_offset, frame_offset_overflow(), frame_pointer_rtx, gcc_assert, gcc_checking_assert, gen_rtx_MEM(), GET_MODE_ALIGNMENT, GET_MODE_SIZE(), get_stack_local_alignment(), known_eq, known_gt, known_lt, frame_space::length, MAX_SUPPORTED_STACK_ALIGNMENT, MEM_NOTRAP_P, frame_space::next, NULL, plus_constant(), set_mem_align(), STACK_ALIGNMENT_NEEDED, stack_slot_list, frame_space::start, SUPPORTS_STACK_ALIGNMENT, targetm, trunc_int_for_mode(), try_fit_stack_local(), vec_safe_push(), virtual_stack_vars_rtx, and virtuals_instantiated.

Referenced by assign_stack_local(), assign_stack_temp_for_type(), and setup_save_areas().

◆ assign_stack_temp()

rtx assign_stack_temp ( machine_mode mode,
poly_int64 size )
Allocate a temporary stack slot and record it for possible later
reuse.  First two arguments are same as in preceding function.   

References assign_stack_temp_for_type(), NULL_TREE, and temp_slot::size.

Referenced by emit_group_load_1(), emit_group_store(), emit_library_call_value_1(), expand_assignment(), expand_builtin_issignaling(), expand_expr_real_1(), extract_bit_field_1(), and store_bit_field_1().

◆ assign_stack_temp_for_type()

◆ assign_temp()

rtx assign_temp ( tree type_or_decl,
int memory_required,
int dont_promote )
Assign a temporary.
If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
and so that should be used in error messages.  In either case, we
allocate of the given type.
MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
it is 0 if a register is OK.
DONT_PROMOTE is 1 if we should not promote values in register
to wider modes.   

References assign_stack_temp_for_type(), COMPLETE_TYPE_P, DECL_P, error(), gcc_assert, gen_reg_rtx(), known_eq, max_int_size_in_bytes(), NULL, poly_int_tree_p(), promote_mode(), temp_slot::size, TREE_ADDRESSABLE, TREE_CODE, TREE_TYPE, type(), TYPE_MODE, TYPE_SIZE_UNIT, and TYPE_UNSIGNED.

Referenced by emit_library_call_value_1(), emit_push_insn(), expand_asm_stmt(), expand_builtin_cexpi(), expand_call(), expand_cond_expr_using_cmove(), expand_constructor(), expand_expr_real_1(), expand_expr_real_2(), expand_return(), expand_vector_ubsan_overflow(), get_temp_reg(), initialize_argument_information(), and store_one_arg().

◆ block_chainon()

tree block_chainon ( tree op1,
tree op2 )
Concatenate two chains of blocks (chained through BLOCK_CHAIN)
by modifying the last node in chain 1 to point to chain 2.   

References BLOCK_CHAIN, and gcc_assert.

◆ block_fragments_nreverse()

static tree block_fragments_nreverse ( tree t)
Reverse the order of elements in the fragment chain T of blocks,
and return the new head of the chain (old last element).
In addition to that clear BLOCK_SAME_RANGE flags when needed
and adjust BLOCK_SUPERCONTEXT from the super fragment to
its super fragment origin.   


Referenced by blocks_nreverse_all().

◆ blocks_nreverse()

tree blocks_nreverse ( tree t)
Reverse the order of elements in the chain T of blocks,
and return the new head of the chain (old last element).   

References BLOCK_CHAIN.

Referenced by lower_function_body(), lower_gimple_bind(), and remap_blocks().

◆ blocks_nreverse_all()

static tree blocks_nreverse_all ( tree t)
Reverse the order of elements in the chain T of blocks,
and return the new head of the chain (old last element).
Also do the same on subblocks and reverse the order of elements

References BLOCK_CHAIN, BLOCK_FRAGMENT_CHAIN, BLOCK_FRAGMENT_ORIGIN, block_fragments_nreverse(), BLOCK_SAME_RANGE, BLOCK_SUBBLOCKS, blocks_nreverse_all(), and NULL_TREE.

Referenced by blocks_nreverse_all(), and reorder_blocks().

◆ clear_block_marks()

void clear_block_marks ( tree block)
Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.   

References BLOCK_CHAIN, BLOCK_SUBBLOCKS, clear_block_marks(), and TREE_ASM_WRITTEN.

Referenced by clear_block_marks(), lower_function_body(), and reorder_blocks().

◆ clobber_return_register()

◆ combine_temp_slots()

static void combine_temp_slots ( void )
Combine temporary stack slots which are adjacent on the stack.

This allows for better use of already allocated stack space.  This is only
done for BLKmode slots because we can be sure that we won't have alignment
problems in this case.   

References avail_temp_slots, temp_slot::base_offset, cut_slot_from_list(), temp_slot::full_size, GET_MODE, known_eq, temp_slot::next, temp_slot::size, and temp_slot::slot.

Referenced by free_temp_slots().

◆ contains()

static bool contains ( const rtx_insn * insn,
hash_table< insn_cache_hasher > * hash )
Determine if any INSNs in HASH are, or are part of, INSN.  Because
we can be running after reorg, SEQUENCE rtl is possible.   

References as_a(), rtx_sequence::element(), hash_table< Descriptor, Lazy, Allocator >::find(), GET_CODE, i, rtx_sequence::len(), NONJUMP_INSN_P, NULL, and PATTERN().

Referenced by epilogue_contains(), prologue_contains(), prologue_epilogue_contains(), and reposition_prologue_and_epilogue_notes().

◆ current_function_name()

◆ cut_slot_from_list()

static void cut_slot_from_list ( class temp_slot * temp,
class temp_slot ** list )
Removes temporary slot TEMP from LIST.   

References NULL.

Referenced by assign_stack_temp_for_type(), combine_temp_slots(), make_slot_available(), and move_slot_to_level().

◆ debug_find_var_in_block_tree()

DEBUG_FUNCTION tree debug_find_var_in_block_tree ( tree var,
tree block )
If VAR is present in a subblock of BLOCK, return the subblock.   

References BLOCK_SUBBLOCKS, BLOCK_VARS, debug_find_var_in_block_tree(), NULL_TREE, and TREE_CHAIN.

Referenced by debug_find_var_in_block_tree().

◆ diddle_return_value()

void diddle_return_value ( void(* doit )(rtx, void *),
void * arg )
Call DOIT for each hard register used as a return value from
the current function.   

References crtl, and diddle_return_value_1().

Referenced by clobber_return_register(), df_get_exit_block_use_set(), and use_return_register().

◆ diddle_return_value_1()

void diddle_return_value_1 ( void(* doit )(rtx, void *),
void * arg,
rtx outgoing )
Helper for diddle_return_value.   


Referenced by diddle_return_value().

◆ do_clobber_return_reg()

static void do_clobber_return_reg ( rtx reg,
void * arg )

References emit_clobber().

Referenced by clobber_return_register().

◆ do_use_return_reg()

static void do_use_return_reg ( rtx reg,
void * arg )

References emit_use().

Referenced by use_return_register().

◆ dump_stack_clash_frame_info()

void dump_stack_clash_frame_info ( enum stack_clash_probes probes,
bool residuals )
If debugging dumps are requested, dump information about how the
target handled -fstack-check=clash for the prologue.

PROBES describes what if any probes were emitted.

RESIDUALS indicates if the prologue had any residual allocation
(i.e. total allocation was not a multiple of PROBE_INTERVAL).   

References cfun, dump_file, frame_pointer_needed, NO_PROBE_NO_FRAME, NO_PROBE_SMALL_FRAME, PROBE_INLINE, PROBE_LOOP, and TREE_THIS_VOLATILE.

◆ emit_initial_value_sets()

◆ epilogue_contains()

bool epilogue_contains ( const rtx_insn * insn)

References contains(), and epilogue_insn_hash.

◆ expand_dummy_function_end()

void expand_dummy_function_end ( void )

◆ expand_function_end()

◆ expand_function_start()

◆ find_temp_slot_from_address()

◆ fndecl_name()

const char * fndecl_name ( tree fndecl)
Returns the name of function declared by FNDECL.   

References lang_hooks::decl_printable_name, and NULL.

Referenced by cgraph_node::add_new_function(), function_name(), get_static_name(), and ipa_reference_read_optimization_summary().

◆ frame_offset_overflow()

bool frame_offset_overflow ( poly_int64 offset,
tree func )
Issue an error message and return TRUE if frame OFFSET overflows in
the signed target pointer arithmetics for function FUNC.  Otherwise
return FALSE.   

References coeffs_in_range_p(), DECL_SOURCE_LOCATION, error_at(), FRAME_GROWS_DOWNWARD, GET_MODE_BITSIZE(), HOST_WIDE_INT_1U, poly_int< N, C >::is_constant(), and offset.

Referenced by alloc_stack_frame_space(), and assign_stack_local_1().

◆ free_after_compilation()

void free_after_compilation ( struct function * f)
Clear out all parts of the state in F that can safely be discarded
after the function has been compiled, to let garbage collection
reclaim the memory.   

References function::cfg, function::cond_uids, crtl, function::curr_properties, function::eh, epilogue_insn_hash, free(), function::machine, NULL, prologue_insn_hash, and regno_reg_rtx.

Referenced by expand_dummy_function_end(), expand_thunk(), and rest_of_clean_state().

◆ free_after_parsing()

void free_after_parsing ( struct function * f)
Clear out all parts of the state in F that can safely be discarded
after the function has been parsed, but not compiled, to let
garbage collection reclaim the memory.   

References function::language.

Referenced by expand_dummy_function_end(), and rest_of_clean_state().

◆ free_temp_slots()

void free_temp_slots ( void )

◆ function_name()

const char * function_name ( const function * fn)

◆ gen_call_used_regs_seq()

◆ generate_setjmp_warnings()

void generate_setjmp_warnings ( void )

◆ get_arg_pointer_save_area()

◆ get_block_vector()

static tree * get_block_vector ( tree block,
int * n_blocks_p )
Return a vector containing all the blocks rooted at BLOCK.  The
number of elements in the vector is stored in N_BLOCKS_P.  The
vector is dynamically allocated; it is the caller's responsibility
to call `free' on the pointer returned.   

References all_blocks, and NULL.

Referenced by number_blocks().

◆ get_frame_size()

poly_int64 get_frame_size ( void )
Return size needed for stack frame based on slots so far allocated.
This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
the caller may have to do that.   

References FRAME_GROWS_DOWNWARD, and frame_offset.

Referenced by do_reload(), final_start_function_1(), get_initial_register_offset(), lra(), reload(), and rtx_addr_can_trap_p_1().

◆ get_hard_reg_initial_reg()

rtx get_hard_reg_initial_reg ( rtx reg)
If a pseudo represents an initial hard reg (or expression), return
it, else return NULL_RTX.   

References crtl, initial_value_struct::entries, initial_value_pair::hard_reg, i, NULL_RTX, initial_value_struct::num_entries, initial_value_pair::pseudo, and rtx_equal_p().

◆ get_hard_reg_initial_val()

rtx get_hard_reg_initial_val ( machine_mode mode,
unsigned int regno )
Make sure that there's a pseudo register of mode MODE that stores the
initial value of hard register REGNO.  Return an rtx for such a pseudo.   

References crtl, initial_value_struct::entries, gen_reg_rtx(), gen_rtx_REG(), ggc_alloc(), GGC_RESIZEVEC, ggc_vec_alloc(), initial_value_pair::hard_reg, has_hard_reg_initial_val(), initial_value_struct::max_entries, initial_value_struct::num_entries, and initial_value_pair::pseudo.

◆ get_last_funcdef_no()

int get_last_funcdef_no ( void )
Return value of funcdef.   

References funcdef_no.

◆ get_next_funcdef_no()

int get_next_funcdef_no ( void )
Return value of funcdef and increase it.   

References funcdef_no.

Referenced by allocate_struct_function().

◆ get_stack_dynamic_offset()

poly_int64 get_stack_dynamic_offset ( )
Return the value of STACK_DYNAMIC_OFFSET for the current function.
This is done through a function wrapper so that the macro sees a
predictable set of included files.   

References current_function_decl, and STACK_DYNAMIC_OFFSET.

Referenced by allocate_dynamic_stack_space(), and instantiate_virtual_regs().

◆ get_stack_local_alignment()

static unsigned int get_stack_local_alignment ( tree type,
machine_mode mode )
Return stack slot alignment in bits for TYPE and MODE.   

References GET_MODE_ALIGNMENT, STACK_SLOT_ALIGNMENT, lang_hooks_for_types::type_for_mode, and lang_hooks::types.

Referenced by assign_stack_local_1(), and assign_stack_temp_for_type().

◆ gimplify_parameters()

◆ has_hard_reg_initial_val()

rtx has_hard_reg_initial_val ( machine_mode mode,
unsigned int regno )
See if get_hard_reg_initial_val has been used to create a pseudo
for the initial value of hard register REGNO in mode MODE.  Return
the associated pseudo if so, otherwise return NULL.   

References crtl, initial_value_struct::entries, GET_MODE, initial_value_pair::hard_reg, i, NULL_RTX, initial_value_struct::num_entries, initial_value_pair::pseudo, and REGNO.

Referenced by get_hard_reg_initial_val().

◆ hash_types_used_by_vars_entry()

static hashval_t hash_types_used_by_vars_entry ( const struct types_used_by_vars_entry * entry)
Helper to Hash a struct types_used_by_vars_entry.   

References gcc_assert, types_used_by_vars_entry::type, and types_used_by_vars_entry::var_decl.

Referenced by used_type_hasher::hash().

◆ init_dummy_function_start()

void init_dummy_function_start ( void )
Initialize the rtl expansion mechanism so that we can do simple things
like generate sequences.  This is used to provide a context during global
initialization of some passes.  You must call expand_dummy_function_end
to exit this context.   

References prepare_function_start(), and push_dummy_function().

Referenced by backend_init_target().

◆ init_function_start()

void init_function_start ( tree subr)
Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
and initialize static variables for generating RTL for the statements
of the function.   

References AGGREGATE_TYPE_P, decide_function_section(), DECL_RESULT, DECL_SOURCE_LOCATION, initialize_rtl(), prepare_function_start(), TREE_TYPE, and warning_at().

Referenced by cgraph_node::expand(), and expand_thunk().

◆ init_temp_slots()

◆ initial_value_entry()

bool initial_value_entry ( int i,
rtx * hreg,
rtx * preg )
Return the hardreg-pseudoreg initial values pair entry I and
TRUE if I is a valid entry, or FALSE if I is not a valid entry.   

References crtl, initial_value_struct::entries, initial_value_pair::hard_reg, i, initial_value_struct::num_entries, and initial_value_pair::pseudo.

Referenced by allocate_initial_values().

◆ insert_slot_to_list()

static void insert_slot_to_list ( class temp_slot * temp,
class temp_slot ** list )
Inserts temporary slot TEMP to LIST.   

References temp_slot::next, NULL, and temp_slot::prev.

Referenced by assign_stack_temp_for_type(), make_slot_available(), and move_slot_to_level().

◆ insert_temp_slot_address()

static void insert_temp_slot_address ( rtx address,
class temp_slot * temp_slot )

◆ instantiate_decl_rtl()

void instantiate_decl_rtl ( rtx x)
Subroutine of instantiate_decls.  Given RTL representing a decl,
do any instantiation required.   

References CONSTANT_P, GET_CODE, instantiate_decl_rtl(), instantiate_virtual_regs_in_rtx(), MEM_P, REG_P, VIRTUAL_REGISTER_P, and XEXP.

Referenced by instantiate_decl_rtl(), instantiate_decls(), instantiate_decls_1(), and instantiate_expr().

◆ instantiate_decls()

static void instantiate_decls ( tree fndecl)

◆ instantiate_decls_1()

static void instantiate_decls_1 ( tree let)
Subroutine of instantiate_decls: Process all decls in the given
BLOCK node and all its subblocks.   

References BLOCK_CHAIN, BLOCK_SUBBLOCKS, BLOCK_VARS, DECL_CHAIN, DECL_HAS_VALUE_EXPR_P, DECL_RTL, DECL_RTL_SET_P, DECL_VALUE_EXPR, instantiate_decl_rtl(), instantiate_decls_1(), instantiate_expr(), NULL, VAR_P, and walk_tree.

Referenced by instantiate_decls(), and instantiate_decls_1().

◆ instantiate_expr()

static tree instantiate_expr ( tree * tp,
int * walk_subtrees,
void * data )
Helper for instantiate_decls called via walk_tree: Process all decls
in the given DECL_VALUE_EXPR.   

References DECL_HAS_VALUE_EXPR_P, DECL_INCOMING_RTL, DECL_NAMELESS, DECL_P, DECL_RTL, DECL_RTL_SET_P, DECL_VALUE_EXPR, EXPR_P, instantiate_decl_rtl(), instantiate_expr(), NULL, TREE_CODE, VAR_P, and walk_tree.

Referenced by instantiate_decls(), instantiate_decls_1(), and instantiate_expr().

◆ instantiate_new_reg()

static rtx instantiate_new_reg ( rtx x,
poly_int64 * poffset )
Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
is a virtual register, return the equivalent hard register and set the
offset indirectly through the pointer.  Otherwise, return 0.   

References arg_pointer_rtx, cfa_offset, crtl, dynamic_offset, frame_pointer_rtx, GEN_INT, in_arg_offset, NULL_RTX, offset, out_arg_offset, stack_pointer_rtx, stack_realign_drap, var_offset, virtual_cfa_rtx, virtual_incoming_args_rtx, virtual_outgoing_args_rtx, virtual_preferred_stack_boundary_rtx, virtual_stack_dynamic_rtx, and virtual_stack_vars_rtx.

Referenced by instantiate_virtual_regs_in_insn(), and instantiate_virtual_regs_in_rtx().

◆ instantiate_virtual_regs()

◆ instantiate_virtual_regs_in_insn()

◆ instantiate_virtual_regs_in_rtx()

static bool instantiate_virtual_regs_in_rtx ( rtx * loc)
A subroutine of instantiate_virtual_regs.  Instantiate any virtual
registers present inside of *LOC.  The expression is simplified,
as much as possible, but is not to be considered "valid" in any sense
implied by the target.  Return true if any change is made.   

References changed, FOR_EACH_SUBRTX_PTR, GET_CODE, GET_MODE, instantiate_new_reg(), offset, plus_constant(), and XEXP.

Referenced by instantiate_decl_rtl(), instantiate_virtual_regs(), and instantiate_virtual_regs_in_insn().

◆ invoke_set_current_function_hook()

static void invoke_set_current_function_hook ( tree fndecl)
Invoke the target hook when setting cfun.  Update the optimization options
if the function uses different options than the default.   

References DECL_FUNCTION_SPECIFIC_OPTIMIZATION, global_options, global_options_set, in_dummy_function, init_tree_optimization_optabs(), optimization_current_node, optimization_default_node, parse_alignment_opts(), targetm, this_fn_optabs, TREE_OPTIMIZATION, and TREE_OPTIMIZATION_OPTABS.

Referenced by allocate_struct_function(), and set_cfun().

◆ locate_and_pad_parm()

void locate_and_pad_parm ( machine_mode passed_mode,
tree type,
int in_regs,
int reg_parm_stack_space,
int partial,
tree fndecl,
struct args_size * initial_offset_ptr,
struct locate_and_pad_arg_data * locate )
Compute the size and offset from the start of the stacked arguments for a
parm passed in mode PASSED_MODE and with type TYPE.

INITIAL_OFFSET_PTR points to the current offset into the stacked

The starting offset and size for this parm are returned in
LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
nonzero, the offset is that of stack slot, which is returned in
padding required from the initial offset ptr to the stack slot.

IN_REGS is nonzero if the argument will be passed in registers.  It will
never be set if REG_PARM_STACK_SPACE is not defined.

REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
for arguments which are passed in registers.

FNDECL is the function in which the argument was defined.

There are two types of rounding that are done.  The first, controlled by
TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
argument list to be aligned to the specific boundary (in bits).  This
rounding affects the initial and starting offsets, but not the argument

optionally rounds the size of the parm to PARM_BOUNDARY.  The
initial offset is not affected by this rounding, while the size always
is and the starting offset may be.   
LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
callers pass in the total size of args so far as
INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.   

References ADD_PARM_SIZE, locate_and_pad_arg_data::alignment_pad, arg_size_in_bytes(), ARGS_GROW_DOWNWARD, ARGS_SIZE_TREE, locate_and_pad_arg_data::boundary, args_size::constant, crtl, gcc_assert, GET_MODE_SIZE(), MAX_SUPPORTED_STACK_ALIGNMENT, locate_and_pad_arg_data::offset, pad_below(), PAD_DOWNWARD, PAD_NONE, pad_to_arg_alignment(), round_up, locate_and_pad_arg_data::size, size_binop, size_int, locate_and_pad_arg_data::slot_offset, ssize_int, SUB_PARM_SIZE, SUPPORTS_STACK_ALIGNMENT, targetm, tree_fits_uhwi_p(), TREE_INT_CST_LOW, tree_to_uhwi(), args_size::var, and locate_and_pad_arg_data::where_pad.

Referenced by assign_parm_find_entry_rtl(), emit_library_call_value_1(), and initialize_argument_information().

◆ make_epilogue_seq()

static rtx_insn * make_epilogue_seq ( void )

◆ make_pass_instantiate_virtual_regs()

rtl_opt_pass * make_pass_instantiate_virtual_regs ( gcc::context * ctxt)

◆ make_pass_late_thread_prologue_and_epilogue()

rtl_opt_pass * make_pass_late_thread_prologue_and_epilogue ( gcc::context * ctxt)

◆ make_pass_leaf_regs()

rtl_opt_pass * make_pass_leaf_regs ( gcc::context * ctxt)

◆ make_pass_match_asm_constraints()

rtl_opt_pass * make_pass_match_asm_constraints ( gcc::context * ctxt)

◆ make_pass_thread_prologue_and_epilogue()

rtl_opt_pass * make_pass_thread_prologue_and_epilogue ( gcc::context * ctxt)

◆ make_pass_zero_call_used_regs()

rtl_opt_pass * make_pass_zero_call_used_regs ( gcc::context * ctxt)

◆ make_prologue_seq()

static rtx_insn * make_prologue_seq ( void )

◆ make_slot_available()

static void make_slot_available ( class temp_slot * temp)

◆ make_split_prologue_seq()

static rtx_insn * make_split_prologue_seq ( void )
Return a sequence to be used as the split prologue for the current
function, or NULL.   

References cfun, DECL_ATTRIBUTES, emit_insn(), end_sequence(), get_insns(), lookup_attribute(), NULL, prologue_insn_hash, prologue_location, record_insns(), set_insn_locations(), start_sequence(), and targetm.

Referenced by thread_prologue_and_epilogue_insns().

◆ match_asm_constraints_1()

static void match_asm_constraints_1 ( rtx_insn * insn,
rtx * p_sets,
int noutputs )
This mini-pass fixes fall-out from SSA in asm statements that have
in-out constraints.  Say you start with

  orig = inout;
  asm ("": "+mr" (inout));
  use (orig);

which is transformed very early to use explicit output and match operands:

  orig = inout;
  asm ("": "=mr" (inout) : "0" (inout));
  use (orig);

Or, after SSA and copyprop,

  asm ("": "=mr" (inout_2) : "0" (inout_1));
  use (inout_1);

Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
they represent two separate values, so they will get different pseudo
registers during expansion.  Then, since the two operands need to match
per the constraints, but use different pseudo registers, reload can
only register a reload for these operands.  But reloads can only be
satisfied by hardregs, not by memory, so we need a register for this
reload, just because we are presented with non-matching operands.
So, even though we allow memory for this operand, no memory can be
used for it, just because the two operands don't match.  This can
cause reload failures on register-starved targets.

So it's a symptom of reload not being able to use memory for reloads
or, alternatively it's also a symptom of both operands not coming into
reload as matching (in which case the pseudo could go to memory just
fine, as the alternative allows it, and no reload would be necessary).
We fix the latter problem here, by transforming

  asm ("": "=mr" (inout_2) : "0" (inout_1));

back to

  inout_2 = inout_1;
  asm ("": "=mr" (inout_2) : "0" (inout_2));   

References ASM_OPERANDS_INPUT_CONSTRAINT, ASM_OPERANDS_INPUT_LENGTH, ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_OUTPUT_CONSTRAINT, changed, CONSTANT_P, copy_rtx(), df_insn_rescan(), emit_insn_before(), emit_move_insn(), end_sequence(), gcc_assert, general_operand(), get_insns(), GET_MODE, i, insns, matching_constraint_num(), MEM_P, NULL, reg_overlap_mentioned_p(), REG_P, replace_rtx(), RTVEC_ELT, rtx_equal_p(), SET_DEST, SET_SRC, start_sequence(), and SUBREG_P.

◆ matching_constraint_num()

static int matching_constraint_num ( const char * constraint)
If CONSTRAINT is a matching constraint, then return its number.
Otherwise, return -1.   

References IN_RANGE, and NULL.

Referenced by match_asm_constraints_1().

◆ max_slot_level()

static int max_slot_level ( void )
Returns the maximal temporary slot level.   

References used_temp_slots.

Referenced by find_temp_slot_from_address().

◆ maybe_copy_prologue_epilogue_insn()

void maybe_copy_prologue_epilogue_insn ( rtx insn,
rtx copy )
INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
basic block, splitting or peepholes.  If INSN is a prologue or epilogue
insn, then record COPY as well.   

References epilogue_insn_hash, hash_table< Descriptor, Lazy, Allocator >::find(), hash_table< Descriptor, Lazy, Allocator >::find_slot(), gcc_assert, NULL, and prologue_insn_hash.

Referenced by copy_frame_info_to_split_insn(), and duplicate_insn_chain().

◆ move_slot_to_level()

static void move_slot_to_level ( class temp_slot * temp,
int level )
Moves temporary slot TEMP to LEVEL.   

References cut_slot_from_list(), insert_slot_to_list(), temp_slot::level, and temp_slots_at_level().

Referenced by preserve_temp_slots().

◆ number_blocks()

void number_blocks ( tree fn)
Set BLOCK_NUMBER for all the blocks in FN.   

References BLOCK_NUMBER, DECL_INITIAL, free(), get_block_vector(), i, and next_block_index.

Referenced by final_start_function_1(), optimize_inline_calls(), and tree_function_versioning().

◆ pad_below()

static void pad_below ( struct args_size * offset_ptr,
machine_mode passed_mode,
tree sizetree )

◆ pad_to_arg_alignment()

static void pad_to_arg_alignment ( struct args_size * offset_ptr,
int boundary,
struct args_size * alignment_pad )
Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
BOUNDARY is measured in bits, but must be a multiple of a storage unit.   

References ARGS_GROW_DOWNWARD, ARGS_SIZE_TREE, args_size::constant, NULL_TREE, offset, round_down, round_up, size_binop, ssize_int, STACK_POINTER_OFFSET, and args_size::var.

Referenced by locate_and_pad_parm().

◆ pop_cfun()

◆ pop_dummy_function()

void pop_dummy_function ( void )

◆ pop_function_context()

void pop_function_context ( void )
Restore the last saved context, at the end of a nested function.
This function is called from language-specific code.   

References current_function_decl, function::decl, function_context_stack, generating_concat_p, set_cfun(), and virtuals_instantiated.

◆ pop_temp_slots()

void pop_temp_slots ( void )

◆ prepare_function_start()

static void prepare_function_start ( void )

◆ preserve_temp_slots()

void preserve_temp_slots ( rtx x)
If X could be a reference to a temporary slot, mark that slot as
belonging to the to one level higher than the current level.  If X
matched one of our slots, just mark that one.  Otherwise, we can't
easily predict which it is, so upgrade all of them.

This is called when an ({...}) construct occurs and a statement
returns a value in memory.   

References CONSTANT_P, find_temp_slot_from_address(), temp_slot::level, MEM_P, move_slot_to_level(), temp_slot::next, REG_P, REG_POINTER, temp_slot_level, temp_slots_at_level(), and XEXP.

Referenced by expand_assignment(), precompute_register_parameters(), and store_one_arg().

◆ prologue_contains()

bool prologue_contains ( const rtx_insn * insn)

References contains(), and prologue_insn_hash.

◆ prologue_epilogue_contains()

bool prologue_epilogue_contains ( const rtx_insn * insn)

◆ push_cfun()

◆ push_dummy_function()

◆ push_function_context()

void push_function_context ( void )
Save the current context for compilation of a nested function.
This is called from language-specific code.   

References allocate_struct_function(), cfun, function_context_stack, NULL, and set_cfun().

◆ push_struct_function()

void push_struct_function ( tree fndecl,
bool abstract_p )
This is like allocate_struct_function, but pushes a new cfun for FNDECL
instead of just setting it.   

References allocate_struct_function(), cfun, cfun_stack, current_function_decl, gcc_assert, and in_dummy_function.

Referenced by create_assumption_fn(), create_omp_child_function(), gimplify_function_tree(), initialize_cfun(), input_function(), and push_dummy_function().

◆ push_temp_slots()

void push_temp_slots ( void )

◆ record_dynamic_alloc()

void record_dynamic_alloc ( tree decl_or_exp)

◆ record_epilogue_seq()

◆ record_final_call()

void record_final_call ( tree callee,
location_t location )
Record a final call to CALLEE at LOCATION.   

References cfun, and vec_safe_push().

Referenced by emit_library_call_value_1(), and expand_call().

◆ record_insns()

static void record_insns ( rtx_insn * insns,
rtx end,
hash_table< insn_cache_hasher > ** hashp )
We always define `record_insns' even if it's not used so that we
can always export `prologue_epilogue_contains'.   
Add a list of INSNS to the hash HASHP, possibly allocating HASHP
for the first time.   

References hash_table< Descriptor, Lazy, Allocator >::create_ggc(), end(), hash_table< Descriptor, Lazy, Allocator >::find_slot(), gcc_assert, insns, NEXT_INSN(), and NULL.

Referenced by make_epilogue_seq(), make_prologue_seq(), make_split_prologue_seq(), record_epilogue_seq(), record_prologue_seq(), and thread_prologue_and_epilogue_insns().

◆ record_prologue_seq()

◆ regno_clobbered_at_setjmp()

static bool regno_clobbered_at_setjmp ( bitmap setjmp_crosses,
int regno )
True if register REGNO was alive at a place where `setjmp' was
called and was set more than once or is an argument.  Such regs may
be clobbered by `longjmp'.   

References cfun, df_get_live_out(), ENTRY_BLOCK_PTR_FOR_FN, max_reg_num(), REG_N_SETS(), REGNO_REG_SET_P, and setjmp_crosses.

Referenced by setjmp_args_warning(), and setjmp_vars_warning().

◆ remove_unused_temp_slot_addresses()

static void remove_unused_temp_slot_addresses ( void )
Remove all mappings of addresses to unused temp slots.   

References n_temp_slots_in_use, NULL, remove_unused_temp_slot_addresses_1(), and temp_slot_address_table.

Referenced by free_temp_slots().

◆ remove_unused_temp_slot_addresses_1()

int remove_unused_temp_slot_addresses_1 ( temp_slot_address_entry ** slot,
void *  )
Remove an address -> temp slot mapping entry if the temp slot is
not in use anymore.  Callback for remove_unused_temp_slot_addresses.   

References temp_slot::in_use, temp_slot_address_entry::temp_slot, and temp_slot_address_table.

Referenced by remove_unused_temp_slot_addresses().

◆ reorder_blocks()

void reorder_blocks ( void )
Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
and create duplicate blocks.   
??? Need an option to either create block fragments or to create
abstract origin duplicates of a source block.  It really depends
on what optimization has been performed.   

References BLOCK_CHAIN, BLOCK_SUBBLOCKS, blocks_nreverse_all(), clear_block_marks(), current_function_decl, DECL_INITIAL, get_insns(), NULL_TREE, and reorder_blocks_1().

Referenced by reemit_insn_block_notes().

◆ reorder_blocks_1()

static void reorder_blocks_1 ( rtx_insn * insns,
tree current_block,
vec< tree > * p_block_stack )

◆ reposition_prologue_and_epilogue_notes()

void reposition_prologue_and_epilogue_notes ( void )
Reposition the prologue-end and epilogue-begin notes after
instruction scheduling.   

References BB_END, cfun, contains(), epilogue_insn_hash, EXIT_BLOCK_PTR_FOR_FN, FOR_BB_INSNS, FOR_EACH_EDGE, get_insns(), LABEL_P, last, NEXT_INSN(), NOTE_KIND, NOTE_P, NULL, PREV_INSN(), prologue_insn_hash, reorder_insns(), and targetm.

◆ rest_of_handle_check_leaf_regs()

static void rest_of_handle_check_leaf_regs ( void )

◆ rest_of_handle_thread_prologue_and_epilogue()

◆ safe_insn_predicate()

static bool safe_insn_predicate ( int code,
int operand,
rtx x )
A subroutine of instantiate_virtual_regs_in_insn.  Return true if X
matches the predicate for insn CODE operand OPERAND.   

References insn_operand_matches().

Referenced by instantiate_virtual_regs_in_insn().

◆ set_cfun()

◆ set_return_jump_label()

void set_return_jump_label ( rtx_insn * returnjump)
Set JUMP_LABEL for a return insn.   


Referenced by find_end_label(), force_nonfallthru_and_redirect(), and make_epilogue_seq().

◆ setjmp_args_warning()

static void setjmp_args_warning ( bitmap setjmp_crosses)
Do the appropriate part of setjmp_vars_warning
but for arguments instead of local variables.   

References current_function_decl, DECL_ARGUMENTS, DECL_CHAIN, DECL_RTL, REG_P, REGNO, regno_clobbered_at_setjmp(), setjmp_crosses, and warning().

Referenced by generate_setjmp_warnings().

◆ setjmp_vars_warning()

static void setjmp_vars_warning ( bitmap setjmp_crosses,
tree block )
Walk the tree of blocks describing the binding levels within a
function and warn about variables the might be killed by setjmp or
vfork.  This is done after calling flow_analysis before register
allocation since that will clobber the pseudo-regs to hard

References BLOCK_CHAIN, BLOCK_SUBBLOCKS, BLOCK_VARS, DECL_CHAIN, DECL_RTL, DECL_RTL_SET_P, REG_P, REGNO, regno_clobbered_at_setjmp(), setjmp_crosses, setjmp_vars_warning(), VAR_P, and warning().

Referenced by generate_setjmp_warnings(), and setjmp_vars_warning().

◆ spill_slot_alignment()

unsigned int spill_slot_alignment ( machine_mode mode)
Return the minimum spill slot alignment for a register of mode MODE.   


Referenced by add_pseudo_to_slot().

◆ split_complex_args()

static void split_complex_args ( vec< tree > * args)
If ARGS contains entries with complex types, split the entry into two
entries of the component type.  Return a new list of substitutions are
needed, else the old list.   


Referenced by assign_parms_augmented_arg_list().

◆ stack_protect_epilogue()

void stack_protect_epilogue ( void )
Expand code to verify the stack_protect_guard.  This is invoked at
the end of a function to be protected.   

References const0_rtx, crtl, DECL_P, DECL_RTL, emit_cmp_and_jump_insns(), emit_insn(), emit_label(), expand_call(), expand_normal(), free_temp_slots(), gcc_assert, gen_label_rtx(), get_last_insn(), JUMP_P, NULL, NULL_RTX, predict_insn_def(), ptr_mode, TAKEN, targetm, and y.

Referenced by expand_call(), and expand_function_end().

◆ temp_slot_address_compute_hash()

static hashval_t temp_slot_address_compute_hash ( struct temp_slot_address_entry * t)
Compute the hash value for an address -> temp slot mapping.
The value is cached on the mapping entry.   

References temp_slot_address_entry::address, do_not_record, GET_MODE, hash_rtx(), and NULL.

Referenced by find_temp_slot_from_address(), and insert_temp_slot_address().

◆ temp_slots_at_level()

static class temp_slot ** temp_slots_at_level ( int level)

◆ thread_prologue_and_epilogue_insns()

void thread_prologue_and_epilogue_insns ( void )
Generate the prologue and epilogue RTL if the machine supports it.  Thread
this into place with notes indicating where the prologue ends and where
the epilogue begins.  Update the basic block information when possible.

Notes on epilogue placement:
There are several kinds of edges to the exit block:
* a single fallthru edge from LAST_BB
* possibly, edges from blocks containing sibcalls
* possibly, fake edges from infinite loops

The epilogue is always emitted on the fallthru edge from the last basic
block in the function, LAST_BB, into the exit block.

If LAST_BB is empty except for a label, it is the target of every
other basic block in the function that ends in a return.  If a
target has a return or simple_return pattern (possibly with
conditional variants), these basic blocks can be changed so that a
return insn is emitted into them, and their target is adjusted to
the real exit block.

Notes on shrink wrapping: We implement a fairly conservative
version of shrink-wrapping rather than the textbook one.  We only
generate a single prologue and a single epilogue.  This is
sufficient to catch a number of interesting cases involving early

First, we identify the blocks that require the prologue to occur before
them.  These are the ones that modify a call-saved register, or reference
any of the stack or frame pointer registers.  To simplify things, we then
mark everything reachable from these blocks as also requiring a prologue.
This takes care of loops automatically, and avoids the need to examine
whether MEMs reference the frame, since it is sufficient to check for
occurrences of the stack or frame pointer.

We then compute the set of blocks for which the need for a prologue
is anticipatable (borrowing terminology from the shrink-wrapping
description in Muchnick's book).  These are the blocks which either
require a prologue themselves, or those that have only successors
where the prologue is anticipatable.  The prologue needs to be
inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
is not.  For the moment, we ensure that only one such edge exists.

The epilogue is placed as described above, but we make a
distinction between inserting return and simple_return patterns
when modifying other blocks that end in a return.  Blocks that end
in a sibcall omit the sibcall_epilogue if the block is not in

References as_a(), basic_block_def::aux, BB_END, bitmap_clear(), bitmap_set_bit, BLOCK_FOR_INSN(), CALL_P, cfg_layout_finalize(), cfg_layout_initialize(), cfun, commit_edge_insertions(), crtl, default_rtl_profile(), df_analyze(), df_update_entry_block_defs(), df_update_entry_exit_and_calls(), df_update_exit_block_uses(), eh_returnjump_p(), ei_next(), ei_safe_edge(), ei_start, emit_insn(), emit_insn_before(), emit_note(), emit_note_after(), end_sequence(), ENTRY_BLOCK_PTR_FOR_FN, epilogue_completed, epilogue_insn_hash, epilogue_location, EXIT_BLOCK_PTR_FOR_FN, find_fallthru_edge(), find_many_sub_basic_blocks(), find_sub_basic_blocks(), FOR_EACH_BB_FN, FOR_EACH_EDGE, gcc_assert, get_insns(), basic_block_def::index, insert_insn_on_edge(), last, last_basic_block_for_fn, make_epilogue_seq(), make_prologue_seq(), make_split_prologue_seq(), next_active_insn(), basic_block_def::next_bb, NEXT_INSN(), NONDEBUG_INSN_P, NOTE_KIND, NOTE_P, NULL, NUM_FIXED_BLOCKS, PATTERN(), PREV_INSN(), record_insns(), reorder_insns(), returnjump_p(), rtl_profile_for_bb(), set_insn_locations(), SIBLING_CALL_P, single_succ_edge(), single_succ_p(), start_sequence(), targetm, try_shrink_wrapping(), try_shrink_wrapping_separate(), and try_split().

Referenced by rest_of_handle_thread_prologue_and_epilogue().

◆ try_fit_stack_local()

static bool try_fit_stack_local ( poly_int64 start,
poly_int64 length,
poly_int64 size,
unsigned int alignment,
poly_int64 * poffset )
Determine whether it is possible to fit a stack slot of size SIZE and
alignment ALIGNMENT into an area in the stack frame that starts at
frame offset START and has a length of LENGTH.  If so, store the frame
offset to be used for the stack slot in *POFFSET and return true;
return false otherwise.  This function will extend the frame size when
given a start/length pair that lies at the end of the frame.   

References FRAME_GROWS_DOWNWARD, frame_offset, frame_phase, known_eq, maybe_gt, PREFERRED_STACK_BOUNDARY, and targetm.

Referenced by assign_stack_local_1().

◆ types_used_by_var_decl_insert()

void types_used_by_var_decl_insert ( tree type,
tree var_decl )

◆ update_temp_slot_address()

void update_temp_slot_address ( rtx old_rtx,
rtx new_rtx )
Indicate that NEW_RTX is an alternate way of referring to the temp
slot that previously was known by OLD_RTX.   

References find_temp_slot_from_address(), GET_CODE, insert_temp_slot_address(), REG_P, rtx_equal_p(), update_temp_slot_address(), and XEXP.

Referenced by memory_address_addr_space(), offset_address(), replace_equiv_address(), and update_temp_slot_address().

◆ use_register_for_decl()

◆ use_return_register()

static void use_return_register ( void )

◆ used_types_insert()

void used_types_insert ( tree t)

◆ used_types_insert_helper()

static void used_types_insert_helper ( tree type,
struct function * func )
Insert a TYPE into the used types hash table of CFUN.   

References hash_set< KeyId, Lazy, Traits >::add(), hash_set< KeyId, Lazy, Traits >::create_ggc(), NULL, and function::used_types_hash.

Referenced by used_types_insert().

Variable Documentation

◆ cfa_offset

poly_int64 cfa_offset

◆ cfun

struct function* cfun = 0
The currently compiled function.   

Referenced by push_function_context().

◆ cfun_stack

vec<function *> cfun_stack
Initialized with NOGC, making this poisonous to the garbage collector.   

Referenced by pop_cfun(), push_cfun(), and push_struct_function().

◆ currently_expanding_function_start

bool currently_expanding_function_start
Start the RTL for a new function, and set variables used for
emitting RTL.
PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
the function's parameters, which must be run at any return statement.   

Referenced by expand_function_start().

◆ dynamic_offset

poly_int64 dynamic_offset

◆ epilogue_insn_hash

◆ funcdef_no

int funcdef_no
Assign unique numbers to labels generated for profiling, debugging, etc.   

Referenced by draw_cfg_node(), draw_cfg_node_succ_edges(), draw_cfg_nodes_for_loop(), get_last_funcdef_no(), and get_next_funcdef_no().

◆ function_context_stack

vec<function *> function_context_stack
Stack of nested functions.   
Keep track of the cfun stack.   

Referenced by pop_function_context(), and push_function_context().

◆ in_arg_offset

poly_int64 in_arg_offset
These routines are responsible for converting virtual register references
to the actual hard register references once RTL generation is complete.

The following four variables are used for communication between the
routines.  They contain the offsets of the virtual registers from their
respective hard registers.   

Referenced by instantiate_new_reg(), and instantiate_virtual_regs().

◆ in_dummy_function

bool in_dummy_function
Keep track of whether we're in a dummy function context.  If we are,
we don't want to invoke the set_current_function hook, because we'll
get into trouble if the hook calls target_reinit () recursively or
when the initial initialization is not yet complete.   

Referenced by expand_dummy_function_end(), invoke_set_current_function_hook(), pop_cfun(), pop_dummy_function(), prepare_function_start(), push_dummy_function(), and push_struct_function().

◆ init_machine_status

struct machine_function *(* init_machine_status) (void) ( void )
These variables hold pointers to functions to create and destroy
target specific, per-function data structures.   

Referenced by allocate_struct_function().

◆ n_temp_slots_in_use

◆ next_block_index

int next_block_index = 2

Referenced by number_blocks().

◆ out_arg_offset

poly_int64 out_arg_offset

◆ prologue_insn_hash

◆ temp_slot_address_table

hash_table<temp_address_hasher>* temp_slot_address_table
A table of addresses that represent a stack slot.  The table is a mapping
from address RTXen to a temp slot.   

Referenced by find_temp_slot_from_address(), init_temp_slots(), insert_temp_slot_address(), remove_unused_temp_slot_addresses(), and remove_unused_temp_slot_addresses_1().

◆ types_used_by_cur_var_decl

vec<tree, va_gc>* types_used_by_cur_var_decl
During parsing of a global variable, this vector contains the types
referenced by the global variable.   

Referenced by used_types_insert().

◆ types_used_by_vars_hash

hash_table<used_type_hasher>* types_used_by_vars_hash = NULL
Hash table making the relationship between a global variable
and the types it references in its initializer. The key of the
entry is a referenced type, and the value is the DECL of the global
variable. types_use_by_vars_do_hash and types_used_by_vars_eq below are
the hash and equality functions to use for this hash table.   

Referenced by types_used_by_var_decl_insert().

◆ var_offset

poly_int64 var_offset

◆ virtuals_instantiated

int virtuals_instantiated
Nonzero once virtual register instantiation has been done.
assign_stack_local uses frame_pointer_rtx when this is nonzero.
calls.cc:emit_library_call_value_1 uses it to set up
post-instantiation libcalls.   

Referenced by allocate_dynamic_stack_space(), assign_stack_local_1(), assign_stack_temp_for_type(), emit_library_call_value_1(), instantiate_virtual_regs(), pop_function_context(), and prepare_function_start().