GCC Middle and Back End API Reference
explow.h File Reference
This graph shows which files directly or indirectly include this file:

Go to the source code of this file.

Macros

#define memory_address(MODE, RTX)
 

Enumerations

enum  save_level { SAVE_BLOCK , SAVE_FUNCTION , SAVE_NONLOCAL }
 

Functions

rtx validize_mem (rtx)
 
rtx use_anchored_address (rtx)
 
rtx copy_to_reg (rtx)
 
rtx copy_addr_to_reg (rtx)
 
rtx copy_to_mode_reg (machine_mode, rtx)
 
rtx copy_to_suggested_reg (rtx, rtx, machine_mode)
 
rtx force_reg (machine_mode, rtx)
 
rtx force_subreg (machine_mode, rtx, machine_mode, poly_uint64)
 
rtx force_lowpart_subreg (machine_mode, rtx, machine_mode)
 
rtx force_highpart_subreg (machine_mode, rtx, machine_mode)
 
rtx force_not_mem (rtx)
 
machine_mode promote_function_mode (const_tree, machine_mode, int *, const_tree, int)
 
machine_mode promote_mode (const_tree, machine_mode, int *)
 
machine_mode promote_decl_mode (const_tree, int *)
 
machine_mode promote_ssa_mode (const_tree, int *)
 
void adjust_stack (rtx)
 
void anti_adjust_stack (rtx)
 
void anti_adjust_stack_and_probe (rtx, bool)
 
void anti_adjust_stack_and_probe_stack_clash (rtx)
 
void compute_stack_clash_protection_loop_data (rtx *, rtx *, rtx *, HOST_WIDE_INT *, rtx)
 
void emit_stack_clash_protection_probe_loop_start (rtx *, rtx *, rtx, bool)
 
void emit_stack_clash_protection_probe_loop_end (rtx, rtx, rtx, bool)
 
void emit_stack_save (enum save_level, rtx *)
 
void emit_stack_restore (enum save_level, rtx)
 
void update_nonlocal_goto_save_area (void)
 
void record_new_stack_level (void)
 
rtx allocate_dynamic_stack_space (rtx, unsigned, unsigned, HOST_WIDE_INT, bool)
 
void get_dynamic_stack_size (rtx *, unsigned, unsigned, HOST_WIDE_INT *)
 
rtx get_dynamic_stack_base (poly_int64, unsigned, rtx)
 
rtx align_dynamic_address (rtx, unsigned)
 
void emit_stack_probe (rtx)
 
void probe_stack_range (HOST_WIDE_INT, rtx)
 
rtx hard_libcall_value (machine_mode, rtx)
 
rtx hard_function_value (const_tree, const_tree, const_tree, int)
 
rtx memory_address_addr_space (machine_mode, rtx, addr_space_t)
 
rtx eliminate_constant_term (rtx, rtx *)
 

Macro Definition Documentation

◆ memory_address

#define memory_address ( MODE,
RTX )
Value:
#define ADDR_SPACE_GENERIC
Definition coretypes.h:187
rtx memory_address_addr_space(machine_mode, rtx, addr_space_t)
Definition explow.cc:438
Like memory_address_addr_space, except assume the memory address points to
the generic named address space.   

Referenced by do_tablejump(), emit_call_1(), emit_library_call_value_1(), emit_push_insn(), expand_builtin_apply(), expand_builtin_return_addr(), expand_builtin_strub_leave(), expand_builtin_update_setjmp_buf(), expand_call(), get_memory_rtx(), prepare_call_address(), probe_stack_range(), push_block(), and store_one_arg().

Enumeration Type Documentation

◆ save_level

enum save_level
This enum is used for the following two functions.   
Enumerator
SAVE_BLOCK 
SAVE_FUNCTION 
SAVE_NONLOCAL 

Function Documentation

◆ adjust_stack()

void adjust_stack ( rtx adjust)
extern
Remove some bytes from the stack.  An rtx says how many.   
Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
This pops when ADJUST is positive.  ADJUST need not be constant.   

References adjust_stack_1(), const0_rtx, poly_int_rtx_p(), and stack_pointer_delta.

Referenced by anti_adjust_stack_and_probe(), do_pending_stack_adjust(), and emit_call_1().

◆ align_dynamic_address()

rtx align_dynamic_address ( rtx target,
unsigned required_align )
extern
Return an rtx doing runtime alignment to REQUIRED_ALIGN on TARGET.   

References expand_binop(), expand_divmod(), expand_mult(), gen_int_mode(), NULL_RTX, and OPTAB_LIB_WIDEN.

Referenced by allocate_dynamic_stack_space(), assign_parm_setup_block(), and get_dynamic_stack_base().

◆ allocate_dynamic_stack_space()

rtx allocate_dynamic_stack_space ( rtx size,
unsigned size_align,
unsigned required_align,
HOST_WIDE_INT max_size,
bool cannot_accumulate )
extern
Allocate some space on the stack dynamically and return its address.   
Return an rtx representing the address of an area of memory dynamically
pushed on the stack.

Any required stack pointer alignment is preserved.

SIZE is an rtx representing the size of the area.

SIZE_ALIGN is the alignment (in bits) that we know SIZE has.  This
parameter may be zero.  If so, a proper value will be extracted
from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.

REQUIRED_ALIGN is the alignment (in bits) required for the region
of memory.

MAX_SIZE is an upper bound for SIZE, if SIZE is not constant, or -1 if
no such upper bound is known.

If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
stack space allocated by the generated code cannot be added with itself
in the course of the execution of the function.  It is always safe to
pass FALSE here and the following criterion is sufficient in order to
pass TRUE: every path in the CFG that starts at the allocation point and
loops to it executes the associated deallocation code.   

References align_dynamic_address(), anti_adjust_stack(), anti_adjust_stack_and_probe(), anti_adjust_stack_and_probe_stack_clash(), cfun, const0_rtx, CONST_INT_P, create_convert_operand_to(), create_fixed_operand(), crtl, current_function_dynamic_stack_size, current_function_has_unbounded_dynamic_stack_size, do_pending_stack_adjust(), emit_barrier(), emit_cmp_and_jump_insns(), emit_insn(), emit_jump(), emit_label(), emit_library_call_value(), emit_move_insn(), error(), expand_binop(), expand_insn(), find_reg_equal_equiv_note(), force_operand(), gcc_assert, gen_int_mode(), gen_label_rtx(), gen_reg_rtx(), GENERIC_STACK_CHECK, get_dynamic_stack_size(), get_last_insn(), get_stack_check_protect(), get_stack_dynamic_offset(), init_one_libfunc(), INTVAL, LCT_NORMAL, MALLOC_ABI_ALIGNMENT, mark_reg_pointer(), NULL, NULL_RTX, OPTAB_LIB_WIDEN, OPTAB_WIDEN, plus_constant(), PREFERRED_STACK_BOUNDARY, probe_stack_range(), record_new_stack_level(), REG_P, rtx_equal_p(), SET_DEST, SET_SRC, single_set(), STACK_GROWS_DOWNWARD, stack_limit_rtx, stack_pointer_delta, stack_pointer_rtx, STATIC_BUILTIN_STACK_CHECK, suppress_reg_args_size, expand_operand::target, targetm, virtual_stack_dynamic_rtx, virtuals_instantiated, and XEXP.

Referenced by expand_builtin_alloca(), expand_builtin_apply(), expand_call(), and initialize_argument_information().

◆ anti_adjust_stack()

void anti_adjust_stack ( rtx adjust)
extern
Add some bytes to the stack.  An rtx says how many.   
Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
This pushes when ADJUST is positive.  ADJUST need not be constant.   

References adjust_stack_1(), const0_rtx, poly_int_rtx_p(), and stack_pointer_delta.

Referenced by allocate_dynamic_stack_space(), anti_adjust_stack_and_probe(), anti_adjust_stack_and_probe_stack_clash(), emit_call_1(), emit_library_call_value_1(), emit_push_insn(), expand_call(), push_block(), and store_one_arg().

◆ anti_adjust_stack_and_probe()

void anti_adjust_stack_and_probe ( rtx size,
bool adjust_back )
extern
Add some bytes to the stack while probing it.  An rtx says how many.  
Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
while probing it.  This pushes when SIZE is positive.  SIZE need not
be constant.  If ADJUST_BACK is true, adjust back the stack pointer
by plus SIZE at the end.   

References adjust_stack(), anti_adjust_stack(), const0_rtx, CONST_INT_P, convert_to_mode(), emit_cmp_and_jump_insns(), emit_jump(), emit_label(), emit_stack_probe(), force_operand(), GEN_INT, gen_int_mode(), gen_label_rtx(), GET_CODE, GET_MODE, i, INTVAL, NULL_RTX, plus_constant(), PROBE_INTERVAL, simplify_gen_binary(), STACK_GROW_OP, and stack_pointer_rtx.

Referenced by allocate_dynamic_stack_space(), and expand_function_end().

◆ anti_adjust_stack_and_probe_stack_clash()

void anti_adjust_stack_and_probe_stack_clash ( rtx size)
extern
Add some bytes to the stack while probing it.  An rtx says how
many.  Add additional probes to prevent stack clashing attacks.   
Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
while probing it.  This pushes when SIZE is positive.  SIZE need not
be constant.

This is subtly different than anti_adjust_stack_and_probe to try and
prevent stack-clash attacks

  1. It must assume no knowledge of the probing state, any allocation
     must probe.

     Consider the case of a 1 byte alloca in a loop.  If the sum of the
     allocations is large, then this could be used to jump the guard if
     probes were not emitted.

  2. It never skips probes, whereas anti_adjust_stack_and_probe will
     skip the probe on the first PROBE_INTERVAL on the assumption it
     was already done in the prologue and in previous allocations.

  3. It only allocates and probes SIZE bytes, it does not need to
     allocate/probe beyond that because this probing style does not
     guarantee signal handling capability if the guard is hit.   

References anti_adjust_stack(), compute_stack_clash_protection_loop_data(), CONST0_RTX, CONST_INT_P, convert_to_mode(), emit_cmp_and_jump_insns(), emit_insn(), emit_label(), emit_stack_clash_protection_probe_loop_end(), emit_stack_clash_protection_probe_loop_start(), emit_stack_probe(), gcc_assert, gen_blockage(), GEN_INT, gen_label_rtx(), gen_rtx_CONST_INT(), GET_MODE, GET_MODE_SIZE(), i, INTVAL, NULL_RTX, plus_constant(), rotate_loop(), stack_pointer_rtx, targetm, and word_mode.

Referenced by allocate_dynamic_stack_space().

◆ compute_stack_clash_protection_loop_data()

void compute_stack_clash_protection_loop_data ( rtx * rounded_size,
rtx * last_addr,
rtx * residual,
HOST_WIDE_INT * probe_interval,
rtx size )
extern
Support for building allocation/probing loops for stack-clash
protection of dyamically allocated stack space.   
Compute parameters for stack clash probing a dynamic stack
allocation of SIZE bytes.

We compute ROUNDED_SIZE, LAST_ADDR, RESIDUAL and PROBE_INTERVAL.

Additionally we conditionally dump the type of probing that will
be needed given the values computed.   

References CONST0_RTX, CONST_INT_P, dump_file, force_operand(), GEN_INT, INTVAL, NULL_RTX, simplify_gen_binary(), STACK_GROW_OP, and stack_pointer_rtx.

Referenced by anti_adjust_stack_and_probe_stack_clash().

◆ copy_addr_to_reg()

rtx copy_addr_to_reg ( rtx x)
extern
Like copy_to_reg but always make the reg Pmode.   
Like copy_to_reg but always give the new register mode Pmode
in case X is a constant.   

References copy_to_mode_reg().

Referenced by emit_block_op_via_libcall(), expand_builtin_apply(), expand_builtin_apply_args_1(), expand_builtin_cexpi(), expand_builtin_eh_return(), expand_builtin_frame_address(), expand_call(), set_storage_via_libcall(), sjlj_emit_function_enter(), and try_store_by_multiple_pieces().

◆ copy_to_mode_reg()

◆ copy_to_reg()

◆ copy_to_suggested_reg()

rtx copy_to_suggested_reg ( rtx x,
rtx target,
machine_mode mode )
extern
Copy given rtx to given temp reg and return that.   
Copy X to TARGET (if it's nonzero and a reg)
or to a new temp reg and return that reg.
MODE is the mode to use for X in case it is a constant.   

References emit_move_insn(), gen_reg_rtx(), and REG_P.

◆ eliminate_constant_term()

rtx eliminate_constant_term ( rtx x,
rtx * constptr )
extern
If X is a sum, return a new sum like X but lacking any constant terms.
Add all the removed constant terms into *CONSTPTR.
X itself is not altered.  The result != X if and only if
it is not isomorphic to X.   

References const0_rtx, CONST_INT_P, eliminate_constant_term(), GET_CODE, GET_MODE, simplify_binary_operation(), and XEXP.

Referenced by eliminate_constant_term(), and memory_address_addr_space().

◆ emit_stack_clash_protection_probe_loop_end()

void emit_stack_clash_protection_probe_loop_end ( rtx loop_lab,
rtx end_loop,
rtx last_addr,
bool rotated )
extern
Emit the end of a stack clash probing loop.

This consists of just the jump back to LOOP_LAB and
emitting END_LOOP after the loop.   

References emit_cmp_and_jump_insns(), emit_jump(), emit_label(), NULL_RTX, and stack_pointer_rtx.

Referenced by anti_adjust_stack_and_probe_stack_clash().

◆ emit_stack_clash_protection_probe_loop_start()

void emit_stack_clash_protection_probe_loop_start ( rtx * loop_lab,
rtx * end_lab,
rtx last_addr,
bool rotated )
extern
Emit the start of an allocate/probe loop for stack
clash protection.

LOOP_LAB and END_LAB are returned for use when we emit the
end of the loop.

LAST addr is the value for SP which stops the loop.   

References emit_cmp_and_jump_insns(), emit_label(), gen_label_rtx(), NULL_RTX, and stack_pointer_rtx.

Referenced by anti_adjust_stack_and_probe_stack_clash().

◆ emit_stack_probe()

◆ emit_stack_restore()

void emit_stack_restore ( enum save_level,
rtx  )
extern
Restore the stack pointer from a save area of the specified level.   

◆ emit_stack_save()

void emit_stack_save ( enum save_level,
rtx *  )
extern
Save the stack pointer at the specified level.   

◆ force_highpart_subreg()

rtx force_highpart_subreg ( machine_mode outermode,
rtx op,
machine_mode innermode )
extern
Try to return an rvalue expression for the OUTERMODE highpart of OP,
which has mode INNERMODE.  Allow OP to be forced into a new register
if necessary.

Return null on failure.   

References force_subreg(), and subreg_highpart_offset().

Referenced by emit_store_flag_1(), and expand_builtin_issignaling().

◆ force_lowpart_subreg()

rtx force_lowpart_subreg ( machine_mode outermode,
rtx op,
machine_mode innermode )
extern
Try to return an rvalue expression for the OUTERMODE lowpart of OP,
which has mode INNERMODE.  Allow OP to be forced into a new register
if necessary.

Return null on failure.   

References force_subreg(), and subreg_lowpart_offset().

Referenced by convert_mode_scalar(), expand_absneg_bit(), expand_builtin_issignaling(), expand_copysign_bit(), expand_doubleword_mod(), and expand_expr_real_2().

◆ force_not_mem()

rtx force_not_mem ( rtx x)
extern
Return given rtx, copied into a new temp reg if it was in memory.   
If X is a memory ref, copy its contents to a new temp reg and return
that reg.  Otherwise, return X.   

References emit_move_insn(), gen_reg_rtx(), GET_MODE, MEM_P, MEM_POINTER, and REG_POINTER.

Referenced by expand_assignment(), expand_return(), and prepare_call_address().

◆ force_reg()

rtx force_reg ( machine_mode mode,
rtx x )
extern
Copy a value to a register if it isn't already a register.
Args are mode (in case value is a constant) and the value.   
Load X into a register if it is not already one.
Use mode MODE for the register.
X should be valid for mode MODE, but it may be a constant which
is valid for all integer modes; that's why caller must specify MODE.

The caller must not alter the value in the register we return,
since we mark it as a "constant" register.   

References CONST_INT_P, CONSTANT_P, ctz_hwi(), DECL_ALIGN, DECL_P, emit_move_insn(), force_operand(), gen_reg_rtx(), general_operand(), GET_CODE, get_last_insn(), INTVAL, mark_reg_pointer(), MEM_P, MEM_POINTER, MIN, NULL_RTX, REG_P, rtx_equal_p(), SET_DEST, SET_SRC, set_unique_reg_note(), single_set(), SYMBOL_REF_DECL, and XEXP.

Referenced by asan_clear_shadow(), assign_parm_setup_block(), assign_parm_setup_stack(), avoid_expensive_constant(), break_out_memory_refs(), builtin_memset_gen_str(), calculate_table_based_CRC(), compress_float_constant(), convert_extracted_bit_field(), convert_float_to_wider_int(), convert_mode_scalar(), convert_wider_int_to_float(), emit_cmp_and_jump_insns(), emit_conditional_move(), emit_group_load_1(), emit_group_load_into_temps(), emit_library_call_value_1(), expand_asm_stmt(), expand_binop(), expand_builtin_frob_return_addr(), expand_builtin_longjmp(), expand_builtin_memset_args(), expand_builtin_setjmp_setup(), expand_builtin_signbit(), expand_builtin_stack_address(), expand_builtin_strub_leave(), expand_builtin_strub_update(), expand_call(), expand_divmod(), expand_doubleword_clz_ctz_ffs(), expand_doubleword_mult(), expand_expr_real_1(), expand_expr_real_2(), expand_ifn_atomic_compare_exchange_into_call(), expand_movstr(), expand_mul_overflow(), expand_mult(), expand_mult_const(), expand_sdiv_pow2(), expand_shift_1(), expand_smod_pow2(), expand_vec_perm_1(), expand_vec_perm_const(), extract_bit_field_1(), extract_fixed_bit_field_1(), extract_low_bits(), force_operand(), force_reload_address(), gen_lowpart_general(), hwasan_frame_base(), instantiate_virtual_regs_in_insn(), memory_address_addr_space(), offset_address(), operand_subword_force(), precompute_register_parameters(), prepare_call_address(), prepare_cmp_insn(), resolve_shift_zext(), sjlj_emit_dispatch_table(), store_bit_field_using_insv(), store_constructor(), store_fixed_bit_field_1(), store_split_bit_field(), try_store_by_multiple_pieces(), use_anchored_address(), and widen_operand().

◆ force_subreg()

rtx force_subreg ( machine_mode outermode,
rtx op,
machine_mode innermode,
poly_uint64 byte )
extern

◆ get_dynamic_stack_base()

rtx get_dynamic_stack_base ( poly_int64 offset,
unsigned required_align,
rtx base )
extern
Returns the address of the dynamic stack space without allocating it.   
Return an rtx representing the address of an area of memory already
statically pushed onto the stack in the virtual stack vars area.  (It is
assumed that the area is allocated in the function prologue.)

Any required stack pointer alignment is preserved.

OFFSET is the offset of the area into the virtual stack vars area.

REQUIRED_ALIGN is the alignment (in bits) required for the region
of memory.

BASE is the rtx of the base of this virtual stack vars area.
The only time this is not `virtual_stack_vars_rtx` is when tagging pointers
on the stack.   

References align_dynamic_address(), crtl, emit_move_insn(), expand_binop(), gen_int_mode(), gen_reg_rtx(), mark_reg_pointer(), NULL_RTX, offset, OPTAB_LIB_WIDEN, PREFERRED_STACK_BOUNDARY, and expand_operand::target.

Referenced by expand_stack_vars().

◆ get_dynamic_stack_size()

void get_dynamic_stack_size ( rtx * psize,
unsigned size_align,
unsigned required_align,
HOST_WIDE_INT * pstack_usage_size )
extern
Calculate the necessary size of a constant dynamic stack allocation from the
size of the variable area.   
Return an rtx through *PSIZE, representing the size of an area of memory to
be dynamically pushed on the stack.

*PSIZE is an rtx representing the size of the area.

SIZE_ALIGN is the alignment (in bits) that we know SIZE has.  This
parameter may be zero.  If so, a proper value will be extracted
from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.

REQUIRED_ALIGN is the alignment (in bits) required for the region
of memory.

If PSTACK_USAGE_SIZE is not NULL it points to a value that is increased for
the additional size returned.   

References CONST_INT_P, convert_to_mode(), crtl, force_operand(), GET_MODE, HOST_BITS_PER_INT, INTVAL, MAX_SUPPORTED_STACK_ALIGNMENT, NULL_RTX, plus_constant(), PREFERRED_STACK_BOUNDARY, REGNO_POINTER_ALIGN, round_push(), UINT_MAX, and VIRTUAL_STACK_DYNAMIC_REGNUM.

Referenced by allocate_dynamic_stack_space(), assign_parm_setup_block(), and expand_stack_vars().

◆ hard_function_value()

rtx hard_function_value ( const_tree valtype,
const_tree func,
const_tree fntype,
int outgoing )
extern
Return an rtx that refers to the value returned by a function
in its original home.  This becomes invalid if any more code is emitted.   
Return an rtx representing the register or memory location
in which a scalar value of data type VALTYPE
was returned by a function call to function FUNC.
FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
function is known, otherwise 0.
OUTGOING is 1 if on a machine with register windows this function
should return the register in which the function will put its result
and 0 otherwise.   

References arg_int_size_in_bytes(), FOR_EACH_MODE_IN_CLASS, GET_MODE, GET_MODE_SIZE(), PUT_MODE(), REG_P, opt_mode< T >::require(), and targetm.

Referenced by aggregate_value_p(), emit_library_call_value_1(), expand_call(), expand_function_start(), and vectorizable_store().

◆ hard_libcall_value()

rtx hard_libcall_value ( machine_mode mode,
rtx fun )
extern
Return an rtx that refers to the value returned by a library call
in its original home.  This becomes invalid if any more code is emitted.   
Return an rtx representing the register or memory location
in which a scalar value of mode MODE was returned by a library call.   

References expand_operand::mode, and targetm.

Referenced by emit_library_call_value_1(), and expand_unop().

◆ memory_address_addr_space()

rtx memory_address_addr_space ( machine_mode mode,
rtx x,
addr_space_t as )
extern
Convert arg to a valid memory address for specified machine mode that points
to a specific named address space, by emitting insns to perform arithmetic
if necessary.   
Return something equivalent to X but valid as a memory address for something
of mode MODE in the named address space AS.  When X is not itself valid,
this works by copying X or subexpressions of it into registers.   

References break_out_memory_refs(), const0_rtx, CONST_INT_P, CONSTANT_ADDRESS_P, CONSTANT_P, convert_memory_address_addr_space(), copy_to_reg(), cse_not_expected, eliminate_constant_term(), force_operand(), force_reg(), gcc_assert, GET_CODE, GET_MODE, mark_reg_pointer(), memory_address_addr_space_p(), NULL_RTX, REG_P, targetm, update_temp_slot_address(), XEXP, and y.

Referenced by change_address_1(), and expand_expr_real_1().

◆ probe_stack_range()

void probe_stack_range ( HOST_WIDE_INT first,
rtx size )
extern
Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
FIRST is a constant and size is a Pmode RTX.  These are offsets from
the current stack pointer.  STACK_GROWS_DOWNWARD says whether to add
or subtract them from the stack pointer.   

References const0_rtx, CONST_INT_P, convert_to_mode(), create_input_operand(), emit_cmp_and_jump_insns(), emit_insn(), emit_jump(), emit_label(), emit_library_call(), emit_move_insn(), emit_stack_probe(), expand_binop(), force_operand(), gcc_assert, gen_blockage(), gen_int_mode(), gen_label_rtx(), GET_MODE, i, INTVAL, LCT_THROW, maybe_expand_insn(), memory_address, NULL_RTX, offset, OPTAB_WIDEN, plus_constant(), PROBE_INTERVAL, simplify_gen_binary(), stack_check_libfunc, STACK_GROW_OFF, STACK_GROW_OP, STACK_GROW_OPTAB, stack_pointer_rtx, and targetm.

Referenced by allocate_dynamic_stack_space(), and expand_function_end().

◆ promote_decl_mode()

machine_mode promote_decl_mode ( const_tree decl,
int * punsignedp )
Return mode and signedness to use when object is promoted.   
Use one of promote_mode or promote_function_mode to find the promoted
mode of DECL.  If PUNSIGNEDP is not NULL, store there the unsignedness
of DECL after promotion.   

References current_function_decl, DECL_BY_REFERENCE, DECL_MODE, promote_function_mode(), promote_mode(), TREE_CODE, TREE_TYPE, and TYPE_UNSIGNED.

Referenced by expand_expr_real_1(), expand_function_start(), expand_one_register_var(), promote_ssa_mode(), and store_constructor().

◆ promote_function_mode()

machine_mode promote_function_mode ( const_tree type,
machine_mode mode,
int * punsignedp,
const_tree funtype,
int for_return )
extern
Return mode and signedness to use when an argument or result in the
given mode is promoted.   
Return the mode to use to pass or return a scalar of TYPE and MODE.
PUNSIGNEDP points to the signedness of the type and may be adjusted
to show what signedness to use on extension operations.

FOR_RETURN is nonzero if the caller is promoting the return value
of FNDECL, else it is for promoting args.   

References INTEGRAL_MODE_P, NULL_TREE, targetm, and TREE_CODE.

Referenced by assign_parm_find_data_types(), assign_parm_setup_reg(), emit_library_call_value_1(), expand_call(), expand_expr_real_1(), expand_function_end(), expand_value_return(), initialize_argument_information(), prepare_libcall_arg(), promote_decl_mode(), and setup_incoming_promotions().

◆ promote_mode()

machine_mode promote_mode ( const_tree type,
machine_mode mode,
int * punsignedp )
extern
Return mode and signedness to use when an object in the given mode
is promoted.   
Return the mode to use to store a scalar of TYPE and MODE.
PUNSIGNEDP points to the signedness of the type and may be adjusted
to show what signedness to use on extension operations.   

References as_a(), NULL_TREE, targetm, TREE_CODE, TREE_TYPE, and TYPE_ADDR_SPACE.

Referenced by assign_temp(), default_promote_function_mode(), default_promote_function_mode_always_promote(), expand_cond_expr_using_cmove(), precompute_arguments(), promote_decl_mode(), and promote_ssa_mode().

◆ promote_ssa_mode()

machine_mode promote_ssa_mode ( const_tree name,
int * punsignedp )
Return mode and signedness to use when object is promoted.   
Return the promoted mode for name.  If it is a named SSA_NAME, it
is the same as promote_decl_mode.  Otherwise, it is the promoted
mode of a temp decl of same type as the SSA_NAME, if we had created
one.   

References gcc_assert, promote_decl_mode(), promote_mode(), SSA_NAME_VAR, TREE_CODE, TREE_TYPE, TYPE_MODE, and TYPE_UNSIGNED.

Referenced by assign_parm_setup_block(), expand_expr_real_1(), expand_function_start(), expand_one_ssa_partition(), get_temp_reg(), gimple_can_coalesce_p(), insert_value_copy_on_edge(), and set_rtl().

◆ record_new_stack_level()

void record_new_stack_level ( void )
extern
Record a new stack level.   
Record a new stack level for the current function.  This should be called
whenever we allocate or deallocate dynamic stack space.   

References cfun, global_options, UI_SJLJ, update_nonlocal_goto_save_area(), and update_sjlj_context().

Referenced by allocate_dynamic_stack_space(), expand_call(), and expand_stack_restore().

◆ update_nonlocal_goto_save_area()

void update_nonlocal_goto_save_area ( void )
extern
Invoke emit_stack_save for the nonlocal_goto_save_area.   
Invoke emit_stack_save on the nonlocal_goto_save_area for the current
function.  This should be called whenever we allocate or deallocate
dynamic stack space.   

References build4(), cfun, emit_stack_save(), expand_expr(), EXPAND_WRITE, integer_one_node, NULL_RTX, NULL_TREE, SAVE_NONLOCAL, and TREE_TYPE.

Referenced by expand_function_start(), and record_new_stack_level().

◆ use_anchored_address()

rtx use_anchored_address ( rtx x)
extern
If X is a memory reference to a member of an object block, try rewriting
it to use an anchor instead.  Return the new memory reference on success
and the old one on failure.   

References CONST_INT_P, cse_not_expected, force_reg(), GET_CODE, GET_MODE, get_section_anchor(), INTVAL, MEM_P, NULL, offset, place_block_symbol(), plus_constant(), replace_equiv_address(), SYMBOL_REF_ANCHOR_P, SYMBOL_REF_BLOCK, SYMBOL_REF_BLOCK_OFFSET, SYMBOL_REF_HAS_BLOCK_INFO_P, SYMBOL_REF_TLS_MODEL, targetm, and XEXP.

Referenced by emit_move_insn(), emit_move_multi_word(), expand_expr_constant(), expand_expr_real_1(), and validize_mem().

◆ validize_mem()

rtx validize_mem ( rtx ref)
extern
Export function prototypes from explow.cc.
   Copyright (C) 2015-2024 Free Software Foundation, Inc.

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.   
Return a memory reference like MEMREF, but which is known to have a
valid address.   
Convert a mem ref into one with a valid memory address.
Pass through anything else unchanged.   

References GET_MODE, MEM_ADDR_SPACE, MEM_P, memory_address_addr_space_p(), replace_equiv_address(), use_anchored_address(), and XEXP.

Referenced by assign_parm_adjust_entry_rtl(), assign_parm_setup_block(), assign_parm_setup_reg(), assign_parm_setup_stack(), calculate_table_based_CRC(), compress_float_constant(), emit_library_call_value_1(), emit_move_insn(), emit_push_insn(), emit_stack_probe(), emit_stack_restore(), emit_stack_save(), expand_asm_stmt(), expand_builtin_setjmp_setup(), expand_constructor(), expand_expr_real_1(), get_arg_pointer_save_area(), get_builtin_sync_mem(), init_set_costs(), load_register_parameters(), move_block_to_reg(), and store_one_arg().