GCC Middle and Back End API Reference
emit-rtl.cc File Reference
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "memmodel.h"
#include "backend.h"
#include "target.h"
#include "rtl.h"
#include "tree.h"
#include "df.h"
#include "tm_p.h"
#include "stringpool.h"
#include "insn-config.h"
#include "regs.h"
#include "emit-rtl.h"
#include "recog.h"
#include "diagnostic-core.h"
#include "alias.h"
#include "fold-const.h"
#include "varasm.h"
#include "cfgrtl.h"
#include "tree-eh.h"
#include "explow.h"
#include "expr.h"
#include "builtins.h"
#include "rtl-iter.h"
#include "stor-layout.h"
#include "opts.h"
#include "optabs.h"
#include "predict.h"
#include "rtx-vector-builder.h"
#include "gimple.h"
#include "gimple-ssa.h"
#include "gimplify.h"
#include "gt-emit-rtl.h"
Include dependency graph for emit-rtl.cc:

Data Structures

struct  const_int_hasher
 
struct  const_wide_int_hasher
 
struct  const_poly_int_hasher
 
struct  reg_attr_hasher
 
struct  const_double_hasher
 
struct  const_fixed_hasher
 

Macros

#define initial_regno_reg_rtx   (this_target_rtl->x_initial_regno_reg_rtx)
 
#define cur_insn_uid   (crtl->emit.x_cur_insn_uid)
 
#define cur_debug_insn_uid   (crtl->emit.x_cur_debug_insn_uid)
 
#define first_label_num   (crtl->emit.x_first_label_num)
 

Functions

static void set_used_decls (tree)
 
static void mark_label_nuses (rtx)
 
static rtx lookup_const_double (rtx)
 
static rtx lookup_const_fixed (rtx)
 
static rtx gen_const_vector (machine_mode, int)
 
static void copy_rtx_if_shared_1 (rtx *orig)
 
bool mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
 
static void set_mem_attrs (rtx mem, mem_attrs *attrs)
 
static reg_attrsget_reg_attrs (tree decl, poly_int64 offset)
 
rtx gen_blockage (void)
 
void set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
 
rtx init_raw_REG (rtx x, machine_mode mode, unsigned int regno)
 
rtx gen_raw_REG (machine_mode mode, unsigned int regno)
 
rtx_expr_listgen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
 
rtx_insn_listgen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
 
rtx_insngen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn, basic_block bb, rtx pattern, int location, int code, rtx reg_notes)
 
rtx gen_rtx_CONST_INT (machine_mode mode, HOST_WIDE_INT arg)
 
rtx gen_int_mode (poly_int64 c, machine_mode mode)
 
rtx const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
 
rtx const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
 
double_int rtx_to_double_int (const_rtx cst)
 
static rtx immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
 
rtx immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
 
rtx immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
 
rtx gen_rtx_REG (machine_mode mode, unsigned int regno)
 
rtx gen_rtx_MEM (machine_mode mode, rtx addr)
 
rtx gen_const_mem (machine_mode mode, rtx addr)
 
rtx gen_frame_mem (machine_mode mode, rtx addr)
 
rtx gen_tmp_stack_mem (machine_mode mode, rtx addr)
 
bool validate_subreg (machine_mode omode, machine_mode imode, const_rtx reg, poly_uint64 offset)
 
rtx gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
 
rtx gen_lowpart_SUBREG (machine_mode mode, rtx reg)
 
rtx gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc, enum var_init_status status)
 
rtvec gen_rtvec (int n,...)
 
rtvec gen_rtvec_v (int n, rtx *argp)
 
rtvec gen_rtvec_v (int n, rtx_insn **argp)
 
poly_int64 byte_lowpart_offset (machine_mode outer_mode, machine_mode inner_mode)
 
poly_int64 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode, poly_uint64 offset)
 
poly_int64 subreg_memory_offset (const_rtx x)
 
rtx gen_reg_rtx (machine_mode mode)
 
bool reg_is_parm_p (rtx reg)
 
static void update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
 
rtx gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno, poly_int64 offset)
 
rtx gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
 
void adjust_reg_mode (rtx reg, machine_mode mode)
 
void set_reg_attrs_from_value (rtx reg, rtx x)
 
rtx gen_reg_rtx_and_attrs (rtx x)
 
void set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
 
void set_reg_attrs_for_decl_rtl (tree t, rtx x)
 
void set_decl_rtl (tree t, rtx x)
 
void set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
 
void mark_user_reg (rtx reg)
 
void mark_reg_pointer (rtx reg, int align)
 
int max_reg_num (void)
 
int max_label_num (void)
 
int get_first_label_num (void)
 
void maybe_set_first_label_num (rtx_code_label *x)
 
void maybe_set_max_label_num (rtx_code_label *x)
 
rtx gen_lowpart_common (machine_mode mode, rtx x)
 
rtx gen_highpart (machine_mode mode, rtx x)
 
rtx gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
 
poly_uint64 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
 
poly_uint64 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
 
bool subreg_lowpart_p (const_rtx x)
 
rtx operand_subword (rtx op, poly_uint64 offset, int validate_address, machine_mode mode)
 
rtx operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
 
bool mem_expr_equal_p (const_tree expr1, const_tree expr2)
 
int get_mem_align_offset (rtx mem, unsigned int align)
 
void set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, poly_int64 bitpos)
 
void set_mem_attributes (rtx ref, tree t, int objectp)
 
void set_mem_alias_set (rtx mem, alias_set_type set)
 
void set_mem_addr_space (rtx mem, addr_space_t addrspace)
 
void set_mem_align (rtx mem, unsigned int align)
 
void set_mem_expr (rtx mem, tree expr)
 
void set_mem_offset (rtx mem, poly_int64 offset)
 
void clear_mem_offset (rtx mem)
 
void set_mem_size (rtx mem, poly_int64 size)
 
void clear_mem_size (rtx mem)
 
static rtx change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate, bool inplace)
 
rtx change_address (rtx memref, machine_mode mode, rtx addr)
 
rtx adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset, int validate, int adjust_address, int adjust_object, poly_int64 size)
 
rtx adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr, poly_int64 offset, int validate)
 
rtx offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
 
rtx replace_equiv_address (rtx memref, rtx addr, bool inplace)
 
rtx replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
 
rtx force_reload_address (rtx mem)
 
rtx widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
 
tree get_spill_slot_decl (bool force_build_p)
 
void set_mem_attrs_for_spill (rtx mem)
 
rtx_code_labelgen_label_rtx (void)
 
void set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
 
static void unshare_all_rtl_1 (rtx_insn *insn)
 
void unshare_all_rtl_again (rtx_insn *insn)
 
void unshare_all_rtl (void)
 
static void verify_rtx_sharing (rtx orig, rtx insn)
 
static void reset_insn_used_flags (rtx insn)
 
static void reset_all_used_flags (void)
 
static void verify_insn_sharing (rtx insn)
 
DEBUG_FUNCTION void verify_rtl_sharing (void)
 
void unshare_all_rtl_in_chain (rtx_insn *insn)
 
rtx copy_rtx_if_shared (rtx orig)
 
static void mark_used_flags (rtx x, int flag)
 
void reset_used_flags (rtx x)
 
void set_used_flags (rtx x)
 
rtx make_safe_from (rtx x, rtx other)
 
rtx_insnget_last_insn_anywhere (void)
 
rtx_insnget_first_nonnote_insn (void)
 
rtx_insnget_last_nonnote_insn (void)
 
int get_max_insn_count (void)
 
rtx_insnnext_insn (rtx_insn *insn)
 
rtx_insnprevious_insn (rtx_insn *insn)
 
rtx_insnnext_nonnote_insn (rtx_insn *insn)
 
rtx_insnnext_nondebug_insn (rtx_insn *insn)
 
rtx_insnprev_nonnote_insn (rtx_insn *insn)
 
rtx_insnprev_nondebug_insn (rtx_insn *insn)
 
rtx_insnnext_nonnote_nondebug_insn (rtx_insn *insn)
 
rtx_insnnext_nonnote_nondebug_insn_bb (rtx_insn *insn)
 
rtx_insnprev_nonnote_nondebug_insn (rtx_insn *insn)
 
rtx_insnprev_nonnote_nondebug_insn_bb (rtx_insn *insn)
 
rtx_insnnext_real_insn (rtx_insn *insn)
 
rtx_insnprev_real_insn (rtx_insn *insn)
 
rtx_insnnext_real_nondebug_insn (rtx uncast_insn)
 
rtx_insnprev_real_nondebug_insn (rtx_insn *insn)
 
rtx_call_insnlast_call_insn (void)
 
bool active_insn_p (const rtx_insn *insn)
 
rtx_insnnext_active_insn (rtx_insn *insn)
 
rtx_insnprev_active_insn (rtx_insn *insn)
 
static int find_auto_inc (const_rtx x, const_rtx reg)
 
rtx_insntry_split (rtx pat, rtx_insn *trial, int last)
 
rtx_insnmake_insn_raw (rtx pattern)
 
static rtx_insnmake_debug_insn_raw (rtx pattern)
 
static rtx_insnmake_jump_insn_raw (rtx pattern)
 
static rtx_insnmake_call_insn_raw (rtx pattern)
 
static rtx_notemake_note_raw (enum insn_note subtype)
 
static void link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
 
void add_insn (rtx_insn *insn)
 
static void add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
 
static void add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
 
void add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
 
void add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
 
void set_insn_deleted (rtx_insn *insn)
 
void remove_insn (rtx_insn *insn)
 
void add_function_usage_to (rtx call_insn, rtx call_fusage)
 
void delete_insns_since (rtx_insn *from)
 
void reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
 
void reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
 
static rtx_insnemit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last, basic_block bb, rtx_insn *(*make_raw)(rtx))
 
rtx_insnemit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
 
rtx_jump_insnemit_jump_insn_before_noloc (rtx x, rtx_insn *before)
 
rtx_insnemit_call_insn_before_noloc (rtx x, rtx_insn *before)
 
rtx_insnemit_debug_insn_before_noloc (rtx x, rtx_insn *before)
 
rtx_barrieremit_barrier_before (rtx_insn *before)
 
rtx_code_labelemit_label_before (rtx_code_label *label, rtx_insn *before)
 
static rtx_insnemit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
 
static rtx_insnemit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb, rtx_insn *(*make_raw)(rtx))
 
rtx_insnemit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
 
rtx_jump_insnemit_jump_insn_after_noloc (rtx x, rtx_insn *after)
 
rtx_insnemit_call_insn_after_noloc (rtx x, rtx_insn *after)
 
rtx_insnemit_debug_insn_after_noloc (rtx x, rtx_insn *after)
 
rtx_barrieremit_barrier_after (rtx_insn *after)
 
rtx_insnemit_label_after (rtx_insn *label, rtx_insn *after)
 
static bool note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
 
rtx_noteemit_note_after (enum insn_note subtype, rtx_insn *after)
 
rtx_noteemit_note_before (enum insn_note subtype, rtx_insn *before)
 
static rtx_insnemit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc, rtx_insn *(*make_raw)(rtx))
 
static rtx_insnemit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns, rtx_insn *(*make_raw)(rtx))
 
rtx_insnemit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
 
rtx_insnemit_insn_after (rtx pattern, rtx_insn *after)
 
rtx_jump_insnemit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
 
rtx_jump_insnemit_jump_insn_after (rtx pattern, rtx_insn *after)
 
rtx_insnemit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
 
rtx_insnemit_call_insn_after (rtx pattern, rtx_insn *after)
 
rtx_insnemit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
 
rtx_insnemit_debug_insn_after (rtx pattern, rtx_insn *after)
 
static rtx_insnemit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc, bool insnp, rtx_insn *(*make_raw)(rtx))
 
static rtx_insnemit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns, bool insnp, rtx_insn *(*make_raw)(rtx))
 
rtx_insnemit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
 
rtx_insnemit_insn_before (rtx pattern, rtx_insn *before)
 
rtx_jump_insnemit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
 
rtx_jump_insnemit_jump_insn_before (rtx pattern, rtx_insn *before)
 
rtx_insnemit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
 
rtx_insnemit_call_insn_before (rtx pattern, rtx_insn *before)
 
rtx_insnemit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
 
rtx_insnemit_debug_insn_before (rtx pattern, rtx_insn *before)
 
rtx_insnemit_insn (rtx x)
 
rtx_insnemit_debug_insn (rtx x)
 
rtx_insnemit_jump_insn (rtx x)
 
rtx_insnemit_likely_jump_insn (rtx x)
 
rtx_insnemit_unlikely_jump_insn (rtx x)
 
rtx_insnemit_call_insn (rtx x)
 
rtx_code_labelemit_label (rtx uncast_label)
 
rtx_jump_table_dataemit_jump_table_data (rtx table)
 
rtx_barrieremit_barrier (void)
 
rtx_noteemit_note_copy (rtx_note *orig)
 
rtx_noteemit_note (enum insn_note kind)
 
rtx_insnemit_clobber (rtx x)
 
rtx_insngen_clobber (rtx x)
 
rtx_insnemit_use (rtx x)
 
rtx_insngen_use (rtx x)
 
rtx set_for_reg_notes (rtx insn)
 
rtx set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
 
rtx set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
 
rtx_insnemit (rtx x, bool allow_barrier_p)
 
void start_sequence (void)
 
void push_to_sequence (rtx_insn *first)
 
void push_to_sequence2 (rtx_insn *first, rtx_insn *last)
 
void push_topmost_sequence (void)
 
void pop_topmost_sequence (void)
 
void end_sequence (void)
 
bool in_sequence_p (void)
 
static void init_virtual_regs (void)
 
rtx copy_insn_1 (rtx orig)
 
rtx copy_insn (rtx insn)
 
rtx_insncopy_delay_slot_insn (rtx_insn *insn)
 
void init_emit (void)
 
wide_int const_vector_int_elt (const_rtx x, unsigned int i)
 
rtx const_vector_elt (const_rtx x, unsigned int i)
 
bool valid_for_const_vector_p (machine_mode, rtx x)
 
rtx gen_const_vec_duplicate (machine_mode mode, rtx elt)
 
rtx gen_vec_duplicate (machine_mode mode, rtx x)
 
bool const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
 
rtx gen_const_vec_series (machine_mode mode, rtx base, rtx step)
 
rtx gen_vec_series (machine_mode mode, rtx base, rtx step)
 
rtx gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
 
void init_emit_regs (void)
 
void init_derived_machine_modes (void)
 
void init_emit_once (void)
 
rtx_insnemit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
 
rtx gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
 
void insn_locations_init (void)
 
void insn_locations_finalize (void)
 
void set_curr_insn_location (location_t location)
 
location_t curr_insn_location (void)
 
void set_insn_locations (rtx_insn *insn, location_t loc)
 
tree insn_scope (const rtx_insn *insn)
 
int insn_line (const rtx_insn *insn)
 
const charinsn_file (const rtx_insn *insn)
 
expanded_location insn_location (const rtx_insn *insn)
 
bool need_atomic_barrier_p (enum memmodel model, bool pre)
 
rtx gen_int_shift_amount (machine_mode, poly_int64 value)
 

Variables

struct target_rtl default_target_rtl
 
scalar_int_mode byte_mode
 
scalar_int_mode word_mode
 
scalar_int_mode ptr_mode
 
struct rtl_data x_rtl
 
rtxregno_reg_rtx
 
static int label_num = 1
 
rtx const_tiny_rtx [4][(int) MAX_MACHINE_MODE]
 
rtx const_true_rtx
 
REAL_VALUE_TYPE dconst0
 
REAL_VALUE_TYPE dconst1
 
REAL_VALUE_TYPE dconst2
 
REAL_VALUE_TYPE dconstm0
 
REAL_VALUE_TYPE dconstm1
 
REAL_VALUE_TYPE dconsthalf
 
REAL_VALUE_TYPE dconstinf
 
REAL_VALUE_TYPE dconstninf
 
FIXED_VALUE_TYPE fconst0 [MAX_FCONST0]
 
FIXED_VALUE_TYPE fconst1 [MAX_FCONST1]
 
rtx const_int_rtx [MAX_SAVED_CONST_INT *2+1]
 
rtx pc_rtx
 
rtx ret_rtx
 
rtx simple_return_rtx
 
rtx_insninvalid_insn_rtx
 
static hash_table< const_int_hasher > * const_int_htab
 
static hash_table< const_wide_int_hasher > * const_wide_int_htab
 
static hash_table< const_poly_int_hasher > * const_poly_int_htab
 
static hash_table< reg_attr_hasher > * reg_attrs_htab
 
static hash_table< const_double_hasher > * const_double_htab
 
static hash_table< const_fixed_hasher > * const_fixed_htab
 
profile_probability split_branch_probability
 
static tree spill_slot_decl
 
static struct sequence_stackfree_sequence_stack
 
static rtx copy_insn_scratch_in [MAX_RECOG_OPERANDS]
 
static rtx copy_insn_scratch_out [MAX_RECOG_OPERANDS]
 
static int copy_insn_n_scratches
 
static rtvec orig_asm_operands_vector
 
static rtvec copy_asm_operands_vector
 
static rtvec orig_asm_constraints_vector
 
static rtvec copy_asm_constraints_vector
 
static rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER]
 
location_t prologue_location
 
location_t epilogue_location
 
static location_t curr_location
 

Macro Definition Documentation

◆ cur_debug_insn_uid

#define cur_debug_insn_uid   (crtl->emit.x_cur_debug_insn_uid)

◆ cur_insn_uid

◆ first_label_num

#define first_label_num   (crtl->emit.x_first_label_num)

◆ initial_regno_reg_rtx

#define initial_regno_reg_rtx   (this_target_rtl->x_initial_regno_reg_rtx)

Referenced by init_emit(), and init_emit_regs().

Function Documentation

◆ active_insn_p()

◆ add_function_usage_to()

void add_function_usage_to ( rtx call_insn,
rtx call_fusage )
Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.   

References CALL_INSN_FUNCTION_USAGE, CALL_P, gcc_assert, ggc_alloc(), and XEXP.

Referenced by emit_call_1(), and expand_builtin_apply().

◆ add_insn()

◆ add_insn_after()

void add_insn_after ( rtx_insn * insn,
rtx_insn * after,
basic_block bb )
Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
If BB is NULL, an attempt is made to infer the bb from before.

This and the next function should be the only functions called
to insert an insn once delay slots have been filled since only
they know how to update a SEQUENCE.  

References add_insn_after_nobb(), BARRIER_P, BB_END, BLOCK_FOR_INSN(), df_insn_rescan(), ggc_alloc(), INSN_P, NOTE_INSN_BASIC_BLOCK_P, and set_block_for_insn().

Referenced by delete_from_delay_slot(), emit_barrier_after(), emit_delay_sequence(), emit_label_after(), emit_note_after(), emit_pattern_after_noloc(), make_return_insns(), and relax_delay_slots().

◆ add_insn_after_nobb()

static void add_insn_after_nobb ( rtx_insn * insn,
rtx_insn * after )
static
Add INSN into the doubly-linked list after insn AFTER.   

References gcc_assert, get_current_sequence(), ggc_alloc(), sequence_stack::last, link_insn_into_chain(), sequence_stack::next, NEXT_INSN(), and NULL.

Referenced by add_insn_after(), and emit_note_after().

◆ add_insn_before()

void add_insn_before ( rtx_insn * insn,
rtx_insn * before,
basic_block bb )
Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
If BB is NULL, an attempt is made to infer the bb from before.

This and the previous function should be the only functions called
to insert an insn once delay slots have been filled since only
they know how to update a SEQUENCE.  

References add_insn_before_nobb(), BARRIER_P, BB_HEAD, BLOCK_FOR_INSN(), df_insn_rescan(), gcc_assert, ggc_alloc(), INSN_P, NOTE_INSN_BASIC_BLOCK_P, and set_block_for_insn().

Referenced by emit_barrier_before(), emit_label_before(), emit_note_before(), and emit_pattern_before_noloc().

◆ add_insn_before_nobb()

static void add_insn_before_nobb ( rtx_insn * insn,
rtx_insn * before )
static
Add INSN into the doubly-linked list before insn BEFORE.   

References sequence_stack::first, gcc_assert, get_current_sequence(), ggc_alloc(), link_insn_into_chain(), sequence_stack::next, NULL, and PREV_INSN().

Referenced by add_insn_before(), and emit_note_before().

◆ adjust_address_1()

rtx adjust_address_1 ( rtx memref,
machine_mode mode,
poly_int64 offset,
int validate,
int adjust_address,
int adjust_object,
poly_int64 size )
Return a memory reference like MEMREF, but with its mode changed
to MODE and its address offset by OFFSET bytes.  If VALIDATE is
nonzero, the memory address is forced to be valid.
If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
and the caller is responsible for adjusting MEMREF base register.
If ADJUST_OBJECT is zero, the underlying object associated with the
memory reference is left unchanged and the caller is responsible for
dealing with it.  Otherwise, if the new memory reference is outside
the underlying object, even partially, then the object is dropped.
SIZE, if nonzero, is the size of an access in cases where MODE
has no inherent size.   

References adjust_address, change_address_1(), copy_rtx(), gcc_assert, get_address_mode(), GET_CODE, get_mem_attrs(), GET_MODE, GET_MODE_ALIGNMENT, ggc_alloc(), known_eq, maybe_gt, memory_address_addr_space_p(), MIN, mode_mem_attrs, NULL_TREE, offset, attrs::offset, plus_constant(), set_mem_attrs(), mem_attrs::size, targetm, trunc_int_for_mode(), and XEXP.

Referenced by adjust_automodify_address_1(), store_expr(), and widen_memory_access().

◆ adjust_automodify_address_1()

rtx adjust_automodify_address_1 ( rtx memref,
machine_mode mode,
rtx addr,
poly_int64 offset,
int validate )
Return a memory reference like MEMREF, but with its mode changed
to MODE and its address changed to ADDR, which is assumed to be
MEMREF offset by OFFSET bytes.  If VALIDATE is
nonzero, the memory address is forced to be valid.   

References adjust_address_1(), change_address_1(), ggc_alloc(), and offset.

◆ adjust_reg_mode()

void adjust_reg_mode ( rtx reg,
machine_mode mode )
Adjust REG in-place so that it has mode MODE.  It is assumed that the
new register is a (possibly paradoxical) lowpart of the old one.   

References byte_lowpart_offset(), GET_MODE, PUT_MODE(), and update_reg_offset().

Referenced by subst_mode(), try_combine(), and undo_to_marker().

◆ byte_lowpart_offset()

poly_int64 byte_lowpart_offset ( machine_mode outer_mode,
machine_mode inner_mode )
Return the number of bytes between the start of an OUTER_MODE
in-memory value and the start of an INNER_MODE in-memory value,
given that the former is a lowpart of the latter.  It may be a
paradoxical lowpart, in which case the offset will be negative
on big-endian targets.   

References paradoxical_subreg_p(), and subreg_lowpart_offset().

Referenced by adjust_reg_mode(), alter_subreg(), gen_lowpart_for_combine(), gen_lowpart_general(), gen_lowpart_if_possible(), replace_reg_with_saved_mem(), rtl_for_decl_location(), rtx_equal_for_field_assignment_p(), set_reg_attrs_for_decl_rtl(), set_reg_attrs_from_value(), simplify_immed_subreg(), track_loc_p(), var_lowpart(), and vt_add_function_parameter().

◆ change_address()

rtx change_address ( rtx memref,
machine_mode mode,
rtx addr )

◆ change_address_1()

static rtx change_address_1 ( rtx memref,
machine_mode mode,
rtx addr,
int validate,
bool inplace )
static
Return a memory reference like MEMREF, but with its mode changed to MODE
and its address changed to ADDR.  (VOIDmode means don't change the mode.
NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
returned memory location is required to be valid.  INPLACE is true if any
changes can be made directly to MEMREF or false if MEMREF must be treated
as immutable.

The memory attributes are not changed.   

References gcc_assert, gen_rtx_MEM(), GET_MODE, ggc_alloc(), lra_in_progress, MEM_ADDR_SPACE, MEM_COPY_ATTRIBUTES, MEM_P, memory_address_addr_space(), memory_address_addr_space_p(), reload_completed, reload_in_progress, rtx_equal_p(), and XEXP.

Referenced by adjust_address_1(), adjust_automodify_address_1(), change_address(), offset_address(), replace_equiv_address(), and replace_equiv_address_nv().

◆ clear_mem_offset()

◆ clear_mem_size()

void clear_mem_size ( rtx mem)
Clear the size of MEM.   

References attrs, get_mem_attrs(), and set_mem_attrs().

Referenced by merge_memattrs().

◆ const_double_from_real_value()

◆ const_fixed_from_fixed_value()

rtx const_fixed_from_fixed_value ( FIXED_VALUE_TYPE value,
machine_mode mode )
Return a CONST_FIXED rtx for a fixed-point value specified by
VALUE in mode MODE.   

References ggc_alloc(), lookup_const_fixed(), PUT_MODE(), and rtx_alloc().

◆ const_vec_series_p_1()

bool const_vec_series_p_1 ( const_rtx x,
rtx * base_out,
rtx * step_out )
A subroutine of const_vec_series_p that handles the case in which:

  (GET_CODE (X) == CONST_VECTOR
   && CONST_VECTOR_NPATTERNS (X) == 1
   && !CONST_VECTOR_DUPLICATE_P (X))

is known to hold.   

References CONST0_RTX, CONST_VECTOR_ELT, CONST_VECTOR_ENCODED_ELT, CONST_VECTOR_NUNITS, CONST_VECTOR_STEPPED_P, GET_MODE, GET_MODE_CLASS, GET_MODE_INNER, ggc_alloc(), rtx_equal_p(), and simplify_binary_operation().

Referenced by const_vec_series_p().

◆ const_vector_elt()

◆ const_vector_int_elt()

wide_int const_vector_int_elt ( const_rtx x,
unsigned int i )
Return the value of element I of CONST_VECTOR X as a wide_int.   

References wi::add(), CONST_VECTOR_ENCODED_ELT, const_vector_encoded_nelts(), CONST_VECTOR_NPATTERNS, CONST_VECTOR_STEPPED_P, count, GET_MODE, GET_MODE_INNER, ggc_alloc(), i, wi::sub(), and XVECLEN.

Referenced by const_vector_elt().

◆ copy_delay_slot_insn()

rtx_insn * copy_delay_slot_insn ( rtx_insn * insn)
Return a copy of INSN that can be used in a SEQUENCE delay slot,
on that assumption that INSN itself remains in its original place.   

References copy_rtx(), cur_insn_uid, ggc_alloc(), and INSN_UID().

Referenced by fill_simple_delay_slots(), fill_slots_from_thread(), and steal_delay_list_from_target().

◆ copy_insn()

rtx copy_insn ( rtx insn)
Create a new copy of an rtx.
This function differs from copy_rtx in that it handles SCRATCHes and
ASM_OPERANDs properly.
INSN doesn't really have to be a full INSN; it could be just the
pattern.   

References copy_asm_constraints_vector, copy_asm_operands_vector, copy_insn_1(), copy_insn_n_scratches, orig_asm_constraints_vector, and orig_asm_operands_vector.

Referenced by bypass_block(), compare_and_jump_seq(), do_remat(), eliminate_partially_redundant_load(), eliminate_regs_in_insn(), emit_copy_of_insn_after(), lra_process_new_insns(), and try_optimize_cfg().

◆ copy_insn_1()

rtx copy_insn_1 ( rtx orig)
Recursively create a new copy of an rtx for copy_insn.
This function differs from copy_rtx in that it handles SCRATCHes and
ASM_OPERANDs properly.
Normally, this function is not used directly; use copy_insn as front end.
However, you could first copy an insn pattern with copy_insn and then use
this function afterwards to properly copy any REG_NOTEs containing
SCRATCHes.   

References ASM_OPERANDS_INPUT_CONSTRAINT_VEC, ASM_OPERANDS_INPUT_VEC, CASE_CONST_ANY, copy_asm_constraints_vector, copy_asm_operands_vector, copy_insn_1(), copy_insn_n_scratches, copy_insn_scratch_in, copy_insn_scratch_out, gcc_assert, gcc_unreachable, GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), HARD_REGISTER_NUM_P, i, INSN_P, NULL, orig_asm_constraints_vector, orig_asm_operands_vector, ORIGINAL_REGNO, REG_P, REGNO, rtvec_alloc(), RTX_CODE, RTX_FLAG, shallow_copy_rtx(), shared_const_p(), XEXP, XVEC, XVECEXP, and XVECLEN.

Referenced by copy_insn(), copy_insn_1(), duplicate_reg_note(), eliminate_regs_in_insn(), and gcse_emit_move_after().

◆ copy_rtx_if_shared()

rtx copy_rtx_if_shared ( rtx orig)
Mark ORIG as in use, and return a copy of it if it was already in use.
Recursively does the same for subexpressions.  Uses
copy_rtx_if_shared_1 to reduce stack space.   

References copy_rtx_if_shared_1().

Referenced by emit_notes_in_bb(), try_combine(), unshare_all_rtl(), unshare_all_rtl_1(), and unshare_all_rtl_in_chain().

◆ copy_rtx_if_shared_1()

static void copy_rtx_if_shared_1 ( rtx * orig1)
static
Mark *ORIG1 as in use, and set it to a copy of it if it was already in
use.  Recursively does the same for subexpressions.   

References CASE_CONST_ANY, copy_rtx_if_shared_1(), gen_rtvec_v(), GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), HARD_REGISTER_NUM_P, i, NULL, ORIGINAL_REGNO, REG_P, REGNO, RTX_FLAG, shallow_copy_rtx(), shared_const_p(), XEXP, XVEC, XVECEXP, and XVECLEN.

Referenced by copy_rtx_if_shared(), and copy_rtx_if_shared_1().

◆ curr_insn_location()

◆ delete_insns_since()

◆ emit()

rtx_insn * emit ( rtx x,
bool allow_barrier_p )
Emit the rtl pattern X as an appropriate kind of insn.  Also emit a
following barrier if the instruction needs one and if ALLOW_BARRIER_P
is true.

If X is a label, it is simply added into the insn chain.   

References any_uncondjump_p(), classify_insn(), emit_barrier(), emit_call_insn(), emit_debug_insn(), emit_insn(), emit_jump_insn(), emit_label(), gcc_unreachable, GET_CODE, and ggc_alloc().

Referenced by ensure_regno(), and gen_reg_rtx().

◆ emit_barrier()

◆ emit_barrier_after()

◆ emit_barrier_before()

rtx_barrier * emit_barrier_before ( rtx_insn * before)
Make an insn of code BARRIER
and output it before the insn BEFORE.   

References add_insn_before(), cur_insn_uid, ggc_alloc(), INSN_UID(), NULL, and rtx_alloc().

◆ emit_call_insn()

rtx_insn * emit_call_insn ( rtx x)
Make an insn of code CALL_INSN with pattern X
and add it to the end of the doubly-linked list.   

References add_insn(), emit_insn(), gcc_unreachable, GET_CODE, ggc_alloc(), and make_call_insn_raw().

Referenced by emit().

◆ emit_call_insn_after()

rtx_insn * emit_call_insn_after ( rtx pattern,
rtx_insn * after )
Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER.   

References emit_pattern_after(), ggc_alloc(), and make_call_insn_raw().

Referenced by emit_copy_of_insn_after().

◆ emit_call_insn_after_noloc()

rtx_insn * emit_call_insn_after_noloc ( rtx x,
rtx_insn * after )
Make an instruction with body X and code CALL_INSN
and output it after the instruction AFTER.   

References emit_pattern_after_noloc(), ggc_alloc(), make_call_insn_raw(), and NULL.

◆ emit_call_insn_after_setloc()

rtx_insn * emit_call_insn_after_setloc ( rtx pattern,
rtx_insn * after,
location_t loc )
Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC.   

References emit_pattern_after_setloc(), ggc_alloc(), and make_call_insn_raw().

◆ emit_call_insn_before()

rtx_insn * emit_call_insn_before ( rtx pattern,
rtx_insn * before )
Like emit_call_insn_before_noloc,
but set insn_location according to BEFORE.   

References emit_pattern_before(), ggc_alloc(), and make_call_insn_raw().

◆ emit_call_insn_before_noloc()

rtx_insn * emit_call_insn_before_noloc ( rtx x,
rtx_insn * before )
Make an instruction with body X and code CALL_INSN
and output it before the instruction BEFORE.   

References emit_pattern_before_noloc(), ggc_alloc(), make_call_insn_raw(), and NULL.

◆ emit_call_insn_before_setloc()

rtx_insn * emit_call_insn_before_setloc ( rtx pattern,
rtx_insn * before,
location_t loc )
Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.   

References emit_pattern_before_setloc(), ggc_alloc(), and make_call_insn_raw().

◆ emit_clobber()

◆ emit_copy_of_insn_after()

◆ emit_debug_insn()

rtx_insn * emit_debug_insn ( rtx x)
Make an insn of code DEBUG_INSN with pattern X
and add it to the end of the doubly-linked list.   

References add_insn(), gcc_unreachable, GET_CODE, get_last_insn(), ggc_alloc(), last, make_debug_insn_raw(), sequence_stack::next, NEXT_INSN(), and NULL_RTX.

Referenced by emit(), and expand_gimple_basic_block().

◆ emit_debug_insn_after()

rtx_insn * emit_debug_insn_after ( rtx pattern,
rtx_insn * after )
Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER.   

References emit_pattern_after(), ggc_alloc(), and make_debug_insn_raw().

Referenced by dead_debug_insert_temp(), and emit_copy_of_insn_after().

◆ emit_debug_insn_after_noloc()

rtx_insn * emit_debug_insn_after_noloc ( rtx x,
rtx_insn * after )
Make an instruction with body X and code CALL_INSN
and output it after the instruction AFTER.   

References emit_pattern_after_noloc(), ggc_alloc(), make_debug_insn_raw(), and NULL.

◆ emit_debug_insn_after_setloc()

rtx_insn * emit_debug_insn_after_setloc ( rtx pattern,
rtx_insn * after,
location_t loc )
Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC.   

References emit_pattern_after_setloc(), ggc_alloc(), and make_debug_insn_raw().

◆ emit_debug_insn_before()

rtx_insn * emit_debug_insn_before ( rtx pattern,
rtx_insn * before )
Like emit_debug_insn_before_noloc,
but set insn_location according to BEFORE.   

References emit_pattern_before(), ggc_alloc(), and make_debug_insn_raw().

Referenced by avoid_complex_debug_insns(), dead_debug_insert_temp(), dead_debug_promote_uses(), delete_trivially_dead_insns(), and propagate_for_debug_subst().

◆ emit_debug_insn_before_noloc()

rtx_insn * emit_debug_insn_before_noloc ( rtx x,
rtx_insn * before )
Make an instruction with body X and code DEBUG_INSN
and output it before the instruction BEFORE.   

References emit_pattern_before_noloc(), ggc_alloc(), make_debug_insn_raw(), and NULL.

◆ emit_debug_insn_before_setloc()

rtx_insn * emit_debug_insn_before_setloc ( rtx pattern,
rtx_insn * before,
location_t loc )
Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.   

References emit_pattern_before_setloc(), ggc_alloc(), and make_debug_insn_raw().

◆ emit_insn()

rtx_insn * emit_insn ( rtx x)
Take X and emit it at the end of the doubly-linked
INSN list.

Returns the last insn emitted.   

References add_insn(), gcc_unreachable, GET_CODE, get_last_insn(), ggc_alloc(), last, make_insn_raw(), sequence_stack::next, NEXT_INSN(), and NULL_RTX.

Referenced by allocate_dynamic_stack_space(), anti_adjust_stack_and_probe_stack_clash(), asan_clear_shadow(), assign_parm_setup_reg(), assign_parms(), base_to_reg(), check_and_process_move(), compute_can_copy(), convert_mode_scalar(), curr_insn_transform(), default_speculation_safe_value(), do_compare_and_jump(), do_remat(), emit(), emit_add2_insn(), emit_add3_insn(), address_reload_context::emit_autoinc(), emit_call_1(), emit_call_insn(), emit_clobber(), emit_delay_sequence(), emit_inc_dec_insn_before(), emit_input_reload_insns(), emit_insn_if_valid_for_reload_1(), emit_move_ccmode(), emit_move_insn_1(), emit_move_list(), emit_move_multi_word(), emit_move_via_integer(), emit_output_reload_insns(), emit_stack_probe(), emit_stack_restore(), emit_stack_save(), emit_use(), expand_absneg_bit(), expand_asm_loc(), expand_asm_memory_blockage(), expand_asm_reg_clobber_mem_blockage(), expand_asm_stmt(), expand_atomic_fetch_op(), expand_binop(), expand_binop_directly(), expand_builtin_apply(), expand_builtin_feclear_feraise_except(), expand_builtin_fegetround(), expand_builtin_goacc_parlevel_id_size(), expand_builtin_int_roundingfn(), expand_builtin_int_roundingfn_2(), expand_builtin_longjmp(), expand_builtin_mathfn_3(), expand_builtin_mathfn_ternary(), expand_builtin_nonlocal_goto(), expand_builtin_prefetch(), expand_builtin_return(), expand_builtin_setjmp_receiver(), expand_builtin_setjmp_setup(), expand_builtin_trap(), expand_call(), expand_ccmp_expr(), expand_clrsb_using_clz(), expand_compare_and_swap_loop(), expand_cond_expr_using_cmove(), expand_copysign_bit(), expand_ctz(), expand_DIVMOD(), expand_doubleword_clz_ctz_ffs(), expand_doubleword_popcount(), expand_dw2_landing_pad_for_region(), expand_eh_return(), expand_expr_divmod(), expand_expr_real_1(), expand_expr_real_2(), expand_ffs(), expand_function_end(), expand_GOACC_DIM_POS(), expand_GOACC_DIM_SIZE(), expand_GOMP_SIMT_LANE(), expand_mem_thread_fence(), expand_memory_blockage(), expand_POPCOUNT(), expand_sdiv_pow2(), expand_UNIQUE(), expand_unop(), expand_unop_direct(), expand_vector_broadcast(), expmed_mult_highpart_optab(), find_shift_sequence(), gen_cond_trap(), gen_reload(), gen_reload_chain_without_interm_reg_p(), move_by_pieces_d::generate(), store_by_pieces_d::generate(), inc_for_reload(), pieces_addr::increment_address(), inline_string_cmp(), insert_insn_on_edge(), insert_move_for_subreg(), ira(), lra_emit_move(), lra_process_new_insns(), make_prologue_seq(), make_split_prologue_seq(), match_reload(), maybe_emit_unop_insn(), maybe_expand_insn(), maybe_optimize_mod_cmp(), maybe_optimize_pow2p_mod_cmp(), move_block_from_reg(), move_block_to_reg(), noce_convert_multiple_sets_1(), noce_emit_all_but_last(), noce_emit_cmove(), noce_emit_insn(), noce_emit_move_insn(), noce_emit_store_flag(), prepare_call_address(), prepare_cmp_insn(), prepare_copy_insn(), prepend_insn_to_edge(), probe_stack_range(), process_addr_reg(), process_address_1(), remove_inheritance_pseudos(), stack_protect_epilogue(), stack_protect_prologue(), store_constructor(), and thread_prologue_and_epilogue_insns().

◆ emit_insn_after()

◆ emit_insn_after_1()

static rtx_insn * emit_insn_after_1 ( rtx_insn * first,
rtx_insn * after,
basic_block bb )
static

◆ emit_insn_after_noloc()

rtx_insn * emit_insn_after_noloc ( rtx x,
rtx_insn * after,
basic_block bb )
Make X be output after the insn AFTER and set the BB of insn.  If
BB is NULL, an attempt is made to infer the BB from AFTER.   

References emit_pattern_after_noloc(), ggc_alloc(), and make_insn_raw().

Referenced by cfg_layout_merge_blocks(), commit_one_edge_insertion(), emit_nop_for_unique_locus_between(), expand_gimple_basic_block(), fixup_reorder_chain(), insert_insn_end_basic_block(), insert_insn_start_basic_block(), and lra_process_new_insns().

◆ emit_insn_after_setloc()

rtx_insn * emit_insn_after_setloc ( rtx pattern,
rtx_insn * after,
location_t loc )
Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC.   

References emit_pattern_after_setloc(), ggc_alloc(), and make_insn_raw().

Referenced by peep2_attempt(), and try_split().

◆ emit_insn_before()

◆ emit_insn_before_noloc()

rtx_insn * emit_insn_before_noloc ( rtx x,
rtx_insn * before,
basic_block bb )
Make X be output before the instruction BEFORE.   

References emit_pattern_before_noloc(), ggc_alloc(), and make_insn_raw().

Referenced by commit_one_edge_insertion(), insert_insn_end_basic_block(), and lra_process_new_insns().

◆ emit_insn_before_setloc()

◆ emit_jump_insn()

◆ emit_jump_insn_after()

rtx_jump_insn * emit_jump_insn_after ( rtx pattern,
rtx_insn * after )
Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER.   

References emit_pattern_after(), ggc_alloc(), and make_jump_insn_raw().

Referenced by add_labels_and_missing_jumps(), doloop_modify(), emit_copy_of_insn_after(), find_cond_trap(), and handle_simple_exit().

◆ emit_jump_insn_after_noloc()

rtx_jump_insn * emit_jump_insn_after_noloc ( rtx x,
rtx_insn * after )
Make an insn of code JUMP_INSN with body X
and output it after the insn AFTER.   

References emit_pattern_after_noloc(), ggc_alloc(), make_jump_insn_raw(), and NULL.

Referenced by try_redirect_by_replacing_jump().

◆ emit_jump_insn_after_setloc()

rtx_jump_insn * emit_jump_insn_after_setloc ( rtx pattern,
rtx_insn * after,
location_t loc )
Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC.   

References emit_pattern_after_setloc(), ggc_alloc(), and make_jump_insn_raw().

Referenced by force_nonfallthru_and_redirect().

◆ emit_jump_insn_before()

rtx_jump_insn * emit_jump_insn_before ( rtx pattern,
rtx_insn * before )
Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE.   

References emit_pattern_before(), ggc_alloc(), and make_jump_insn_raw().

Referenced by cse_insn().

◆ emit_jump_insn_before_noloc()

rtx_jump_insn * emit_jump_insn_before_noloc ( rtx x,
rtx_insn * before )
Make an instruction with body X and code JUMP_INSN
and output it before the instruction BEFORE.   

References emit_pattern_before_noloc(), ggc_alloc(), make_jump_insn_raw(), and NULL.

◆ emit_jump_insn_before_setloc()

rtx_jump_insn * emit_jump_insn_before_setloc ( rtx pattern,
rtx_insn * before,
location_t loc )
like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.   

References emit_pattern_before_setloc(), ggc_alloc(), and make_jump_insn_raw().

◆ emit_jump_table_data()

rtx_jump_table_data * emit_jump_table_data ( rtx table)
Make an insn of code JUMP_TABLE_DATA
and add it to the end of the doubly-linked list.   

References add_insn(), BLOCK_FOR_INSN(), cur_insn_uid, ggc_alloc(), INSN_UID(), NULL, PATTERN(), rtx_alloc(), and table.

Referenced by emit_case_dispatch_table().

◆ emit_label()

rtx_code_label * emit_label ( rtx uncast_label)
Add the label LABEL to the end of the doubly-linked list.   

References add_insn(), cur_insn_uid, gcc_checking_assert, ggc_alloc(), and INSN_UID().

Referenced by allocate_dynamic_stack_space(), anti_adjust_stack_and_probe(), anti_adjust_stack_and_probe_stack_clash(), asan_clear_shadow(), asan_emit_stack_protection(), compare_by_pieces(), create_eh_forwarder_block(), do_compare_rtx_and_jump(), do_jump(), do_jump_1(), do_jump_by_parts_equality_rtx(), do_jump_by_parts_greater_rtx(), do_jump_by_parts_zero_rtx(), dw2_build_landing_pads(), emit(), emit_block_cmp_via_loop(), emit_block_move_via_loop(), emit_block_move_via_oriented_loop(), emit_case_dispatch_table(), emit_stack_clash_protection_probe_loop_end(), emit_stack_clash_protection_probe_loop_start(), emit_store_flag_force(), expand_abs(), expand_addsub_overflow(), expand_arith_overflow_result_store(), expand_asm_stmt(), expand_builtin_atomic_compare_exchange(), expand_builtin_strub_leave(), expand_builtin_strub_update(), expand_compare_and_swap_loop(), expand_copysign_absneg(), expand_divmod(), expand_doubleword_clz_ctz_ffs(), expand_doubleword_shift(), expand_eh_return(), expand_expr_real_1(), expand_expr_real_2(), expand_ffs(), expand_fix(), expand_float(), expand_function_end(), expand_gimple_basic_block(), expand_label(), expand_mul_overflow(), expand_neg_overflow(), expand_sdiv_pow2(), expand_sjlj_dispatch_table(), expand_smod_pow2(), expand_vector_ubsan_overflow(), find_end_label(), fix_crossing_conditional_branches(), get_uncond_jump_length(), inline_string_cmp(), prepare_call_address(), probe_stack_range(), sjlj_emit_dispatch_table(), stack_protect_epilogue(), store_constructor(), store_expr(), and try_store_by_multiple_pieces().

◆ emit_label_after()

rtx_insn * emit_label_after ( rtx_insn * label,
rtx_insn * after )
Emit the label LABEL after the insn AFTER.   

References add_insn_after(), cur_insn_uid, gcc_checking_assert, ggc_alloc(), INSN_UID(), and NULL.

Referenced by find_end_label(), and get_label_before().

◆ emit_label_before()

rtx_code_label * emit_label_before ( rtx_code_label * label,
rtx_insn * before )
Emit the label LABEL before the insn BEFORE.   

References add_insn_before(), cur_insn_uid, gcc_checking_assert, ggc_alloc(), INSN_UID(), and NULL.

Referenced by block_label().

◆ emit_likely_jump_insn()

rtx_insn * emit_likely_jump_insn ( rtx x)
Make an insn of code JUMP_INSN with pattern X,
add a REG_BR_PROB note that indicates very likely probability,
and add it to the end of the doubly-linked list.   

References add_reg_br_prob_note(), emit_jump_insn(), and profile_probability::very_likely().

◆ emit_note()

◆ emit_note_after()

◆ emit_note_before()

◆ emit_note_copy()

rtx_note * emit_note_copy ( rtx_note * orig)
Emit a copy of note ORIG.   

References add_insn(), ggc_alloc(), make_note_raw(), NOTE_DATA, and NOTE_KIND.

Referenced by duplicate_insn_chain().

◆ emit_pattern_after()

static rtx_insn * emit_pattern_after ( rtx pattern,
rtx_insn * after,
bool skip_debug_insns,
rtx_insn *(*)(rtx) make_raw )
static
Insert PATTERN after AFTER.  MAKE_RAW indicates how to turn PATTERN
into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert after
any DEBUG_INSNs.   

References DEBUG_INSN_P, emit_pattern_after_noloc(), emit_pattern_after_setloc(), ggc_alloc(), INSN_LOCATION(), INSN_P, NULL, and PREV_INSN().

Referenced by emit_call_insn_after(), emit_debug_insn_after(), emit_insn_after(), and emit_jump_insn_after().

◆ emit_pattern_after_noloc()

◆ emit_pattern_after_setloc()

static rtx_insn * emit_pattern_after_setloc ( rtx pattern,
rtx_insn * after,
location_t loc,
rtx_insn *(*)(rtx) make_raw )
static
Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
MAKE_RAW indicates how to turn PATTERN into a real insn.   

References active_insn_p(), emit_pattern_after_noloc(), ggc_alloc(), INSN_LOCATION(), JUMP_TABLE_DATA_P, last, NEXT_INSN(), NULL, and NULL_RTX.

Referenced by emit_call_insn_after_setloc(), emit_debug_insn_after_setloc(), emit_insn_after_setloc(), emit_jump_insn_after_setloc(), and emit_pattern_after().

◆ emit_pattern_before()

static rtx_insn * emit_pattern_before ( rtx pattern,
rtx_insn * before,
bool skip_debug_insns,
bool insnp,
rtx_insn *(*)(rtx) make_raw )
static
Insert PATTERN before BEFORE.  MAKE_RAW indicates how to turn PATTERN
into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert
before any DEBUG_INSNs.  INSNP indicates if PATTERN is meant for an
INSN as opposed to a JUMP_INSN, CALL_INSN, etc.   

References DEBUG_INSN_P, emit_pattern_before_noloc(), emit_pattern_before_setloc(), ggc_alloc(), INSN_LOCATION(), INSN_P, sequence_stack::next, NULL, and PREV_INSN().

Referenced by emit_call_insn_before(), emit_debug_insn_before(), emit_insn_before(), and emit_jump_insn_before().

◆ emit_pattern_before_noloc()

static rtx_insn * emit_pattern_before_noloc ( rtx x,
rtx_insn * before,
rtx_insn * last,
basic_block bb,
rtx_insn *(*)(rtx) make_raw )
static
Emit insn(s) of given code and pattern
at a specified place within the doubly-linked list.

All of the emit_foo global entry points accept an object
X which is either an insn list or a PATTERN of a single
instruction.

There are thus a few canonical ways to generate code and
emit it at a specific place in the instruction stream.  For
example, consider the instruction named SPOT and the fact that
we would like to emit some instructions before SPOT.  We might
do it like this:

     start_sequence ();
     ... emit the new instructions ...
     insns_head = get_insns ();
     end_sequence ();

     emit_insn_before (insns_head, SPOT);

It used to be common to generate SEQUENCE rtl instead, but that
is a relic of the past which no longer occurs.  The reason is that
SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
generated would almost certainly die right after it was created.   

References add_insn_before(), gcc_assert, gcc_unreachable, GET_CODE, ggc_alloc(), last, sequence_stack::next, NEXT_INSN(), and NULL_RTX.

Referenced by emit_call_insn_before_noloc(), emit_debug_insn_before_noloc(), emit_insn_before_noloc(), emit_jump_insn_before_noloc(), emit_pattern_before(), and emit_pattern_before_setloc().

◆ emit_pattern_before_setloc()

static rtx_insn * emit_pattern_before_setloc ( rtx pattern,
rtx_insn * before,
location_t loc,
bool insnp,
rtx_insn *(*)(rtx) make_raw )
static
Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
MAKE_RAW indicates how to turn PATTERN into a real insn.  INSNP
indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
CALL_INSN, etc.   

References active_insn_p(), emit_pattern_before_noloc(), sequence_stack::first, get_insns(), ggc_alloc(), INSN_LOCATION(), JUMP_TABLE_DATA_P, last, NEXT_INSN(), NULL, NULL_RTX, and PREV_INSN().

Referenced by emit_call_insn_before_setloc(), emit_debug_insn_before_setloc(), emit_insn_before_setloc(), emit_jump_insn_before_setloc(), and emit_pattern_before().

◆ emit_unlikely_jump_insn()

rtx_insn * emit_unlikely_jump_insn ( rtx x)
Make an insn of code JUMP_INSN with pattern X,
add a REG_BR_PROB note that indicates very unlikely probability,
and add it to the end of the doubly-linked list.   

References add_reg_br_prob_note(), emit_jump_insn(), and profile_probability::very_unlikely().

◆ emit_use()

◆ end_sequence()

void end_sequence ( void )
After emitting to a sequence, restore previous saved state.

To get the contents of the sequence just made, you must call
`get_insns' *before* calling here.

If the compiler might have deferred popping arguments while
generating this sequence, and this sequence will not be immediately
inserted into the instruction stream, use do_pending_stack_adjust
before calling get_insns.  That will ensure that the deferred
pops are inserted into this sequence, and not into some random
location in the instruction stream.  See INHIBIT_DEFER_POP for more
information about deferred popping of arguments.   

References free_sequence_stack, get_current_sequence(), ggc_alloc(), sequence_stack::next, set_first_insn(), and set_last_insn().

Referenced by add_test(), asan_clear_shadow(), asan_emit_allocas_unpoison(), asan_emit_stack_protection(), assign_parm_setup_block(), assign_parm_setup_reg(), assign_parm_setup_stack(), assign_parms_unsplit_complex(), attempt_change(), check_and_process_move(), combine_reaching_defs(), combine_var_copies_in_loop_exit(), compare_and_jump_seq(), computation_cost(), compute_can_copy(), cond_move_process_if_block(), convert_mode_scalar(), curr_insn_transform(), do_remat(), doloop_modify(), dw2_build_landing_pads(), emit_common_heads_for_components(), emit_common_tails_for_components(), emit_delay_sequence(), emit_inc_dec_insn_before(), emit_initial_value_sets(), emit_input_reload_insns(), emit_move_list(), emit_move_multi_word(), emit_output_reload_insns(), emit_partition_copy(), end_ifcvt_sequence(), expand_absneg_bit(), expand_asm_stmt(), expand_atomic_fetch_op(), expand_binop(), expand_builtin_apply_args(), expand_builtin_int_roundingfn(), expand_builtin_int_roundingfn_2(), expand_builtin_mathfn_3(), expand_builtin_mathfn_ternary(), expand_builtin_return(), expand_builtin_saveregs(), expand_builtin_strlen(), expand_call(), expand_clrsb_using_clz(), expand_cond_expr_using_cmove(), expand_copysign_bit(), expand_ctz(), expand_DIVMOD(), expand_doubleword_clz_ctz_ffs(), expand_doubleword_popcount(), expand_dummy_function_end(), expand_expr_divmod(), expand_expr_real_2(), expand_ffs(), expand_fix(), expand_fixed_convert(), expand_float(), expand_function_end(), expand_POPCOUNT(), expand_sdiv_pow2(), expand_twoval_binop_libfunc(), expand_unop(), expmed_mult_highpart_optab(), find_shift_sequence(), fix_crossing_unconditional_branches(), gen_call_used_regs_seq(), gen_clobber(), gen_cond_trap(), gen_move_insn(), gen_use(), get_arg_pointer_save_area(), get_uncond_jump_length(), hwasan_emit_untag_frame(), hwasan_frame_base(), inherit_in_ebb(), inherit_reload_reg(), init_set_costs(), initialize_uninitialized_regs(), inline_string_cmp(), insert_base_initialization(), insert_insn_on_edge(), insert_move_for_subreg(), insert_prologue_epilogue_for_components(), insert_value_copy_on_edge(), insert_var_expansion_initialization(), instantiate_virtual_regs_in_insn(), ira(), lra_process_new_insns(), make_epilogue_seq(), make_prologue_seq(), make_split_prologue_seq(), match_asm_constraints_1(), match_reload(), maybe_optimize_mod_cmp(), maybe_optimize_pow2p_mod_cmp(), noce_convert_multiple_sets(), noce_convert_multiple_sets_1(), noce_emit_cmove(), noce_emit_move_insn(), noce_emit_store_flag(), noce_process_if_block(), noce_try_abs(), noce_try_addcc(), noce_try_cmove(), noce_try_cmove_arith(), noce_try_cond_zero_arith(), noce_try_inverse_constants(), noce_try_minmax(), noce_try_sign_mask(), noce_try_store_flag(), noce_try_store_flag_constants(), noce_try_store_flag_mask(), pop_topmost_sequence(), prepare_copy_insn(), prepare_float_lib_cmp(), prepend_insn_to_edge(), process_addr_reg(), process_address_1(), process_invariant_for_inheritance(), record_store(), remove_inheritance_pseudos(), replace_read(), resolve_shift_zext(), resolve_simple_move(), rtl_lv_add_condition_to_bb(), sjlj_emit_dispatch_table(), sjlj_emit_function_enter(), sjlj_emit_function_exit(), sjlj_mark_call_sites(), split_iv(), thread_prologue_and_epilogue_insns(), try_emit_cmove_seq(), and unroll_loop_runtime_iterations().

◆ find_auto_inc()

static int find_auto_inc ( const_rtx x,
const_rtx reg )
static
Find a RTX_AUTOINC class rtx which matches DATA.   

References FOR_EACH_SUBRTX, GET_CODE, GET_RTX_CLASS, ggc_alloc(), RTX_AUTOINC, rtx_equal_p(), and XEXP.

Referenced by try_split().

◆ force_reload_address()

rtx force_reload_address ( rtx mem)
Return a memory reference like MEM, but with the address reloaded into a
pseudo register.   

References address_reload_context::emit_autoinc(), force_reg(), GET_CODE, GET_MODE, GET_MODE_SIZE(), GET_RTX_CLASS, ggc_alloc(), replace_equiv_address(), RTX_AUTOINC, mem_attrs::size, and XEXP.

◆ gen_blockage()

rtx gen_blockage ( void )
Generate an empty ASM_INPUT, which is used to block attempts to schedule,
and to block register equivalences to be seen across this insn.   

References gen_rtx_ASM_INPUT, ggc_alloc(), and MEM_VOLATILE_P.

Referenced by anti_adjust_stack_and_probe_stack_clash(), expand_builtin_longjmp(), expand_builtin_nonlocal_goto(), expand_builtin_setjmp_receiver(), expand_function_end(), make_prologue_seq(), and probe_stack_range().

◆ gen_clobber()

rtx_insn * gen_clobber ( rtx x)
Return a sequence of insns to clobber lvalue X.   

References emit_clobber(), end_sequence(), get_insns(), and start_sequence().

Referenced by eliminate_regs_in_insn(), and find_reloads().

◆ gen_const_mem()

rtx gen_const_mem ( machine_mode mode,
rtx addr )
Generate a memory referring to non-trapping constant memory.   

References gen_rtx_MEM(), MEM_NOTRAP_P, and MEM_READONLY_P.

Referenced by assemble_trampoline_template(), build_constant_desc(), do_tablejump(), and force_const_mem().

◆ gen_const_vec_duplicate()

rtx gen_const_vec_duplicate ( machine_mode mode,
rtx elt )
Generate a vector constant of mode MODE in which every element has
value ELT.   

References ggc_alloc().

Referenced by builtin_memset_read_str(), expand_vector_broadcast(), gen_const_vector(), gen_rtx_CONST_VECTOR(), gen_vec_duplicate(), relational_result(), and simplify_const_unary_operation().

◆ gen_const_vec_series()

rtx gen_const_vec_series ( machine_mode mode,
rtx base,
rtx step )
Generate a vector constant of mode MODE in which element I has
the value BASE + I * STEP.   

References gcc_assert, GET_MODE_INNER, ggc_alloc(), i, simplify_gen_binary(), and valid_for_const_vector_p().

Referenced by gen_vec_series(), and simplify_context::simplify_binary_operation_1().

◆ gen_const_vector()

static rtx gen_const_vector ( machine_mode mode,
int constant )
static
Generate a new vector constant for mode MODE and constant value
CONSTANT.   

References const_tiny_rtx, DECIMAL_FLOAT_MODE_P, gcc_assert, gen_const_vec_duplicate(), and GET_MODE_INNER.

Referenced by init_emit_once().

◆ gen_frame_mem()

rtx gen_frame_mem ( machine_mode mode,
rtx addr )
Generate a MEM referring to fixed portions of the frame, e.g., register
save areas.   

References gen_rtx_MEM(), get_frame_alias_set(), MEM_NOTRAP_P, and set_mem_alias_set().

Referenced by expand_builtin_return_addr().

◆ gen_hard_reg_clobber()

rtx gen_hard_reg_clobber ( machine_mode mode,
unsigned int regno )

◆ gen_highpart()

◆ gen_highpart_mode()

rtx gen_highpart_mode ( machine_mode outermode,
machine_mode innermode,
rtx exp )
Like gen_highpart, but accept mode of EXP operand in case EXP can
be VOIDmode constant.   

References exp(), gcc_assert, gen_highpart(), GET_MODE, ggc_alloc(), simplify_gen_subreg(), and subreg_highpart_offset().

◆ gen_int_mode()

rtx gen_int_mode ( poly_int64 c,
machine_mode mode )

References poly_int< N, C >::coeffs, poly_int< N, C >::from(), GEN_INT, GET_MODE_PRECISION(), ggc_alloc(), immed_wide_int_const(), poly_int< N, C >::is_constant(), SIGNED, and trunc_int_for_mode().

Referenced by add_args_size_note(), addr_offset_valid_p(), align_dynamic_address(), allocate_dynamic_stack_space(), anti_adjust_stack_and_probe(), asan_clear_shadow(), asan_emit_stack_protection(), assemble_real(), assign_parm_setup_block(), assign_parms(), autoinc_split(), builtin_memset_read_str(), canonicalize_address_mult(), canonicalize_condition(), cleanup_auto_inc_dec(), combine_set_extension(), combine_simplify_rtx(), convert_mode_scalar(), create_integer_operand(), cselib_hash_rtx(), default_memtag_untagged_pointer(), do_pending_stack_adjust(), do_tablejump(), dw2_asm_output_offset(), address_reload_context::emit_autoinc(), emit_call_1(), emit_library_call_value_1(), emit_move_complex(), emit_move_resolve_push(), emit_push_insn(), emit_store_flag(), emit_store_flag_int(), expand_atomic_test_and_set(), expand_binop(), expand_builtin_memset_args(), expand_call(), expand_ctz(), expand_debug_expr(), expand_divmod(), expand_doubleword_clz_ctz_ffs(), expand_doubleword_mod(), expand_expr_real_1(), expand_expr_real_2(), expand_ffs(), expand_field_assignment(), expand_fix(), expand_HWASAN_CHOOSE_TAG(), expand_sdiv_pow2(), expand_SET_EDOM(), expand_shift_1(), expand_smod_pow2(), expand_stack_vars(), expand_unop(), expand_vector_ubsan_overflow(), expmed_mult_highpart(), extract_left_shift(), find_split_point(), asan_redzone_buffer::flush_redzone_payload(), for_each_inc_dec_find_inc_dec(), force_int_to_mode(), force_to_mode(), gen_int_shift_amount(), get_call_args(), get_dynamic_stack_base(), get_mode_bounds(), get_stored_val(), hwasan_emit_prologue(), hwasan_truncate_to_tag_size(), if_then_else_cond(), immed_double_const(), immed_wide_int_const_1(), inc_for_reload(), pieces_addr::increment_address(), init_caller_save(), init_one_dwarf_reg_size(), init_reload(), init_return_column_size(), insert_const_anchor(), instantiate_virtual_regs_in_insn(), iv_number_of_iterations(), make_compound_operation_int(), make_extraction(), make_field_assignment(), maybe_legitimize_operand(), mem_loc_descriptor(), move2add_use_add2_insn(), move2add_use_add3_insn(), multiplier_allowed_in_address_p(), native_decode_vector_rtx(), noce_try_bitop(), noce_try_cmove(), noce_try_store_flag_constants(), optimize_bitfield_assignment_op(), output_constant_pool_2(), plus_constant(), probe_stack_range(), push_block(), reload_cse_move2add(), round_trampoline_addr(), simplify_and_const_int(), simplify_and_const_int_1(), simplify_context::simplify_binary_operation_1(), simplify_compare_const(), simplify_comparison(), simplify_if_then_else(), simplify_set(), simplify_shift_const_1(), simplify_context::simplify_ternary_operation(), sjlj_mark_call_sites(), split_iv(), store_bit_field_using_insv(), store_constructor(), store_field(), store_one_arg(), try_store_by_multiple_pieces(), unroll_loop_constant_iterations(), unroll_loop_runtime_iterations(), update_reg_equal_equiv_notes(), validate_test_and_branch(), vec_perm_indices_to_rtx(), and widen_leading().

◆ gen_int_shift_amount()

◆ gen_label_rtx()

rtx_code_label * gen_label_rtx ( void )
Return a newly created CODE_LABEL rtx with a unique label number.   

References ggc_alloc(), label_num, NULL, and NULL_RTX.

Referenced by allocate_dynamic_stack_space(), anti_adjust_stack_and_probe(), anti_adjust_stack_and_probe_stack_clash(), asan_clear_shadow(), asan_emit_stack_protection(), block_label(), compare_by_pieces(), do_compare_rtx_and_jump(), do_jump(), do_jump_1(), do_jump_by_parts_equality_rtx(), do_jump_by_parts_greater_rtx(), do_jump_by_parts_zero_rtx(), dw2_build_landing_pads(), dw2_fix_up_crossing_landing_pad(), emit_block_cmp_via_loop(), emit_block_move_via_loop(), emit_block_move_via_oriented_loop(), emit_case_dispatch_table(), emit_stack_clash_protection_probe_loop_start(), emit_store_flag_force(), expand_abs(), expand_addsub_overflow(), expand_arith_overflow_result_store(), expand_asm_stmt(), expand_builtin_atomic_compare_exchange(), expand_builtin_eh_return(), expand_builtin_strub_leave(), expand_builtin_strub_update(), expand_compare_and_swap_loop(), expand_copysign_absneg(), expand_divmod(), expand_doubleword_clz_ctz_ffs(), expand_doubleword_shift(), expand_eh_return(), expand_expr_real_1(), expand_expr_real_2(), expand_ffs(), expand_fix(), expand_float(), expand_function_start(), expand_mul_overflow(), expand_naked_return(), expand_neg_overflow(), expand_sdiv_pow2(), expand_sjlj_dispatch_table(), expand_smod_pow2(), expand_vector_ubsan_overflow(), find_end_label(), fix_crossing_conditional_branches(), get_label_before(), get_uncond_jump_length(), inline_string_cmp(), label_rtx(), label_rtx_for_bb(), prepare_call_address(), probe_stack_range(), sjlj_build_landing_pads(), sjlj_emit_dispatch_table(), sjlj_fix_up_crossing_landing_pad(), stack_protect_epilogue(), store_constructor(), store_expr(), and try_store_by_multiple_pieces().

◆ gen_lowpart_common()

rtx gen_lowpart_common ( machine_mode mode,
rtx x )
Return a value representing some low-order bits of X, where the number
of low-order bits is given by MODE.  Note that no conversion is done
between floating-point and fixed-point values, rather, the bit
representation is returned.

This function handles the cases in common between gen_lowpart, below,
and two variants in cse.cc and combine.cc.  These are the cases that can
be safely handled at all points in the compilation.

If this is not a case we can handle, return 0.   

References CONST_DOUBLE_AS_FLOAT_P, CONST_INT_P, CONST_POLY_INT_P, CONST_SCALAR_INT_P, gcc_assert, gen_lowpart_common(), GET_CODE, GET_MODE, GET_MODE_SIZE(), ggc_alloc(), HOST_BITS_PER_DOUBLE_INT, HOST_BITS_PER_WIDE_INT, int_mode_for_size(), known_le, lowpart_subreg(), maybe_gt, REG_P, REGMODE_NATURAL_SIZE, opt_mode< T >::require(), SCALAR_FLOAT_MODE_P, and XEXP.

Referenced by combine_simplify_rtx(), do_output_reload(), expand_expr_real_1(), extract_low_bits(), force_to_mode(), gen_lowpart_common(), gen_lowpart_for_combine(), gen_lowpart_general(), gen_lowpart_if_possible(), move2add_use_add2_insn(), store_split_bit_field(), and strip_paradoxical_subreg().

◆ gen_lowpart_SUBREG()

◆ gen_raw_REG()

◆ gen_reg_rtx()

rtx gen_reg_rtx ( machine_mode mode)
Generate a REG rtx for a new pseudo register of mode MODE.
This pseudo is assigned the next sequential register number.   

References can_create_pseudo_p, crtl, emit(), gcc_assert, gen_raw_REG(), gen_reg_rtx(), generating_concat_p, GET_MODE_ALIGNMENT, GET_MODE_CLASS, GET_MODE_INNER, ggc_alloc(), min_align(), MINIMUM_ALIGNMENT, NULL, reg_rtx_no, regno_reg_rtx, and SUPPORTS_STACK_ALIGNMENT.

Referenced by allocate_basic_variable(), allocate_dynamic_stack_space(), asan_emit_stack_protection(), assign_parm_remove_parallels(), assign_parm_setup_block(), assign_parm_setup_reg(), assign_parm_setup_stack(), assign_temp(), avoid_likely_spilled_reg(), builtin_memset_gen_str(), builtin_memset_read_str(), compare_by_pieces(), compress_float_constant(), convert_mode_scalar(), convert_modes(), copy_blkmode_to_reg(), copy_to_mode_reg(), copy_to_reg(), copy_to_suggested_reg(), do_compare_and_jump(), do_jump_by_parts_zero_rtx(), do_store_flag(), do_tablejump(), doloop_optimize(), emit_block_cmp_via_loop(), emit_block_move_via_loop(), emit_conditional_add(), emit_conditional_move(), emit_conditional_move_1(), emit_conditional_neg_or_complement(), emit_cstore(), emit_group_load_1(), emit_group_store(), emit_libcall_block_1(), emit_library_call_value_1(), emit_push_insn(), emit_stack_save(), emit_store_flag_1(), emit_store_flag_force(), entry_register(), expand_abs(), expand_absneg_bit(), expand_addsub_overflow(), expand_asm_stmt(), expand_atomic_compare_and_swap(), expand_atomic_fetch_op(), expand_atomic_load(), expand_atomic_test_and_set(), expand_binop(), expand_builtin(), expand_builtin_apply(), expand_builtin_cexpi(), expand_builtin_eh_copy_values(), expand_builtin_eh_filter(), expand_builtin_eh_pointer(), expand_builtin_feclear_feraise_except(), expand_builtin_fegetround(), expand_builtin_goacc_parlevel_id_size(), expand_builtin_int_roundingfn(), expand_builtin_int_roundingfn_2(), expand_builtin_issignaling(), expand_builtin_mathfn_3(), expand_builtin_mathfn_ternary(), expand_builtin_powi(), expand_builtin_signbit(), expand_builtin_sincos(), expand_builtin_stpcpy_1(), expand_builtin_strlen(), expand_builtin_thread_pointer(), expand_call(), expand_ccmp_expr(), expand_compare_and_swap_loop(), expand_copysign_absneg(), expand_copysign_bit(), expand_divmod(), expand_DIVMOD(), expand_doubleword_bswap(), expand_doubleword_clz_ctz_ffs(), expand_doubleword_mod(), expand_doubleword_popcount(), expand_doubleword_shift_condmove(), expand_expr_real_1(), expand_expr_real_2(), expand_fix(), expand_float(), expand_function_end(), expand_function_start(), expand_gimple_basic_block(), expand_GOMP_SIMT_ENTER_ALLOC(), expand_ifn_atomic_bit_test_and(), expand_ifn_atomic_op_fetch_cmp_0(), expand_mul_overflow(), expand_mult_highpart(), expand_neg_overflow(), expand_one_error_var(), expand_one_register_var(), expand_one_ssa_partition(), expand_parity(), expand_sdiv_pow2(), expand_sfix_optab(), expand_single_bit_test(), expand_smod_pow2(), expand_speculation_safe_value(), expand_stack_vars(), expand_twoval_binop(), expand_twoval_unop(), expand_UADDC(), expand_ubsan_result_store(), expand_unop(), expand_used_vars(), expand_var_during_unrolling(), expand_vec_perm_const(), expand_vec_perm_var(), expand_vec_set_optab_fn(), expand_vector_broadcast(), expand_vector_ubsan_overflow(), extract_bit_field_1(), extract_bit_field_using_extv(), extract_integral_bit_field(), find_shift_sequence(), fix_crossing_unconditional_branches(), force_not_mem(), force_operand(), force_reg(), gen_group_rtx(), gen_reg_rtx(), gen_reg_rtx_and_attrs(), gen_reg_rtx_offset(), get_dynamic_stack_base(), get_hard_reg_initial_val(), address_reload_context::get_reload_reg(), get_scratch_reg(), get_temp_reg(), inline_string_cmp(), ira_create_new_reg(), load_register_parameters(), lra(), lra_create_new_reg_with_unique_value(), make_more_copies(), make_safe_from(), maybe_emit_compare_and_swap_exchange_loop(), maybe_emit_group_store(), maybe_legitimize_operand(), maybe_optimize_fetch_op(), noce_convert_multiple_sets_1(), noce_emit_cmove(), noce_process_if_block(), noce_try_addcc(), noce_try_cmove_arith(), noce_try_cond_zero_arith(), noce_try_inverse_constants(), noce_try_sign_mask(), noce_try_store_flag_constants(), noce_try_store_flag_mask(), prepare_cmp_insn(), prepare_float_lib_cmp(), resolve_simple_move(), round_trampoline_addr(), split_iv(), store_bit_field_using_insv(), store_constructor(), store_field(), store_integral_bit_field(), store_one_arg(), store_unaligned_arguments_into_pseudos(), unroll_loop_runtime_iterations(), widen_bswap(), widen_leading(), and widen_operand().

◆ gen_reg_rtx_and_attrs()

rtx gen_reg_rtx_and_attrs ( rtx x)
Generate a REG rtx for a new pseudo register, copying the mode
and attributes from X.   

References gen_reg_rtx(), GET_MODE, and set_reg_attrs_from_value().

Referenced by build_store_vectors(), delete_store(), hoist_code(), move_invariant_reg(), and pre_delete().

◆ gen_reg_rtx_offset()

rtx gen_reg_rtx_offset ( rtx reg,
machine_mode mode,
int offset )
Generate a new pseudo-register with the same attributes as REG, but
with OFFSET added to the REG_OFFSET.   

References gen_reg_rtx(), offset, and update_reg_offset().

Referenced by decompose_register().

◆ gen_rtvec()

rtvec gen_rtvec ( int n,
... )

◆ gen_rtvec_v() [1/2]

◆ gen_rtvec_v() [2/2]

rtvec gen_rtvec_v ( int n,
rtx_insn ** argp )

References ggc_alloc(), i, NULL_RTVEC, and rtvec_alloc().

◆ gen_rtx_CONST_INT()

◆ gen_rtx_CONST_VECTOR()

rtx gen_rtx_CONST_VECTOR ( machine_mode mode,
rtvec v )
Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
all elements are zero, and the one vector when all elements are one.   

References gcc_assert, gen_const_vec_duplicate(), GET_MODE_NUNITS(), GET_NUM_ELEM, ggc_alloc(), i, known_eq, rtvec_all_equal_p(), and RTVEC_ELT.

Referenced by function_reader::consolidate_singletons(), simplify_context::simplify_binary_operation_1(), simplify_const_binary_operation(), and simplify_context::simplify_ternary_operation().

◆ gen_rtx_EXPR_LIST()

rtx_expr_list * gen_rtx_EXPR_LIST ( machine_mode mode,
rtx expr,
rtx expr_list )
There are some RTL codes that require special attention; the generation
functions do the raw handling.  If you add to this list, modify
special_rtx in gengenrtl.cc as well.   

References ggc_alloc().

Referenced by alloc_EXPR_LIST(), assemble_external_libcall(), clobber_reg_mode(), emit_call_1(), emit_library_call_value_1(), emit_note_insn_var_location(), expand_call(), gen_group_rtx(), prepare_call_arguments(), use_reg_mode(), and vt_add_function_parameter().

◆ gen_rtx_INSN()

rtx_insn * gen_rtx_INSN ( machine_mode mode,
rtx_insn * prev_insn,
rtx_insn * next_insn,
basic_block bb,
rtx pattern,
int location,
int code,
rtx reg_notes )

◆ gen_rtx_INSN_LIST()

◆ gen_rtx_MEM()

rtx gen_rtx_MEM ( machine_mode mode,
rtx addr )

References ggc_alloc(), and MEM_ATTRS.

Referenced by asan_emit_stack_protection(), assign_parm_find_stack_rtl(), assign_parm_setup_block(), assign_parm_setup_reg(), assign_parms(), assign_stack_local_1(), assign_stack_temp_for_type(), change_address(), change_address_1(), compute_argument_addresses(), cselib_init(), decode_addr_const(), default_static_chain(), emit_call_1(), emit_library_call_value_1(), emit_move_change_mode(), emit_move_complex_push(), emit_push_insn(), emit_stack_probe(), emit_stack_restore(), expand_asm_loc(), expand_asm_memory_blockage(), expand_asm_reg_clobber_mem_blockage(), expand_asm_stmt(), expand_assignment(), expand_builtin_apply(), expand_builtin_atomic_compare_exchange(), expand_builtin_init_descriptor(), expand_builtin_init_dwarf_reg_sizes(), expand_builtin_init_trampoline(), expand_builtin_longjmp(), expand_builtin_nonlocal_goto(), expand_builtin_return(), expand_builtin_setjmp_setup(), expand_builtin_strlen(), expand_builtin_strub_leave(), expand_builtin_update_setjmp_buf(), expand_builtin_va_copy(), expand_call(), expand_debug_expr(), expand_expr_real_1(), expand_function_start(), expand_one_error_var(), expand_one_stack_var_at(), expand_SET_EDOM(), gen_const_mem(), gen_frame_mem(), gen_tmp_stack_mem(), get_builtin_sync_mem(), get_group_info(), get_memory_rtx(), get_spill_slot_decl(), init_caller_save(), init_expr_target(), init_fake_stack_mems(), init_reload(), init_set_costs(), initialize_argument_information(), make_decl_rtl(), noce_try_cmove_arith(), prepare_call_address(), prepare_call_arguments(), produce_memory_decl_rtl(), replace_pseudos_in(), rtl_for_decl_init(), rtl_for_decl_location(), scan_insn(), store_one_arg(), and vt_add_function_parameter().

◆ gen_rtx_REG()

rtx gen_rtx_REG ( machine_mode mode,
unsigned int regno )

References arg_pointer_rtx, cfun, fixed_regs, frame_pointer_needed, frame_pointer_rtx, gen_raw_REG(), ggc_alloc(), HARD_FRAME_POINTER_IS_FRAME_POINTER, HARD_FRAME_POINTER_REGNUM, hard_frame_pointer_rtx, INVALID_REGNUM, lra_in_progress, PIC_OFFSET_TABLE_REGNUM, pic_offset_table_rtx, reg_raw_mode, regno_reg_rtx, reload_completed, reload_in_progress, return_address_pointer_rtx, and stack_pointer_rtx.

Referenced by assign_parm_setup_block(), can_assign_to_reg_without_clobbers_p(), can_eliminate_compare(), can_reload_into(), canon_reg(), canonicalize_comparison(), change_zero_ext(), choose_reload_regs(), combine_reaching_defs(), combine_reloads(), combine_set_extension(), compute_can_copy(), cse_cc_succs(), cse_condition_code_reg(), cse_insn(), default_static_chain(), default_zero_call_used_regs(), do_output_reload(), do_reload(), emit_library_call_value_1(), emit_push_insn(), expand_asm_stmt(), expand_builtin_apply(), expand_builtin_apply_args_1(), expand_builtin_init_dwarf_reg_sizes(), expand_builtin_return(), expand_call(), expand_dw2_landing_pad_for_region(), expand_function_end(), find_and_remove_re(), find_dummy_reload(), find_reloads_address_1(), gen_hard_reg_clobber(), gen_reload(), gen_reload_chain_without_interm_reg_p(), get_hard_reg_initial_val(), init_caller_save(), init_elim_table(), init_elim_table(), init_emit_regs(), init_expr_target(), init_lower_subreg(), init_reload(), insert_restore(), insert_save(), load_register_parameters(), maybe_memory_address_addr_space_p(), maybe_select_cc_mode(), move2add_use_add3_insn(), move_block_from_reg(), move_block_to_reg(), peep2_find_free_register(), prefer_and_bit_test(), push_reload(), reload_adjust_reg_for_mode(), reload_combine_recognize_pattern(), reload_cse_regs_1(), reload_cse_simplify_operands(), reload_cse_simplify_set(), replace_reg_with_saved_mem(), result_vector(), set_reload_reg(), setup_prohibited_mode_move_regs(), simplify_set(), split_reg(), transform_ifelse(), try_combine(), try_eliminate_compare(), vt_add_function_parameter(), and zcur_select_mode_rtx().

◆ gen_rtx_REG_offset()

rtx gen_rtx_REG_offset ( rtx reg,
machine_mode mode,
unsigned int regno,
poly_int64 offset )
Generate a register with same attributes as REG, but with OFFSET
added to the REG_OFFSET.   

References gen_raw_REG(), offset, and update_reg_offset().

Referenced by alter_subreg(), expand_debug_parm_decl(), simplify_context::simplify_subreg(), var_lowpart(), and vt_add_function_parameter().

◆ gen_rtx_SUBREG()

◆ gen_rtx_VAR_LOCATION()

◆ gen_tmp_stack_mem()

rtx gen_tmp_stack_mem ( machine_mode mode,
rtx addr )
Generate a MEM referring to a temporary use of the stack, not part
of the fixed stack frame.  For example, something which is pushed
by a target splitter.   

References cfun, gen_rtx_MEM(), get_frame_alias_set(), MEM_NOTRAP_P, and set_mem_alias_set().

◆ gen_use()

rtx_insn * gen_use ( rtx x)
Return a sequence of insns to use rvalue X.   

References emit_use(), end_sequence(), get_insns(), and start_sequence().

Referenced by rtl_flow_call_edges_add().

◆ gen_vec_duplicate()

rtx gen_vec_duplicate ( machine_mode mode,
rtx x )
Return a vector rtx of mode MODE in which every element has value X.
The result will be a constant if X is constant.   

References gen_const_vec_duplicate(), ggc_alloc(), and valid_for_const_vector_p().

Referenced by gen_vec_series(), simplify_context::simplify_binary_operation_1(), simplify_context::simplify_subreg(), and simplify_context::simplify_unary_operation_1().

◆ gen_vec_series()

rtx gen_vec_series ( machine_mode mode,
rtx base,
rtx step )
Generate a vector of mode MODE in which element I has the value
BASE + I * STEP.  The result will be a constant if BASE and STEP
are both constants.   

References const0_rtx, gen_const_vec_series(), gen_vec_duplicate(), ggc_alloc(), and valid_for_const_vector_p().

Referenced by simplify_context::simplify_binary_operation_series(), and simplify_context::simplify_unary_operation_1().

◆ get_first_label_num()

int get_first_label_num ( void )
Return first label number used in this function (if any were used).   

References first_label_num.

Referenced by compute_alignments(), init_eliminable_invariants(), and reload_combine().

◆ get_first_nonnote_insn()

rtx_insn * get_first_nonnote_insn ( void )
Return the first nonnote insn emitted in current sequence or current
function.  This routine looks inside SEQUENCEs.   

References GET_CODE, get_insns(), ggc_alloc(), next_insn(), NONJUMP_INSN_P, NOTE_P, and PATTERN().

◆ get_last_insn_anywhere()

rtx_insn * get_last_insn_anywhere ( void )
Emission of insns (adding them to the doubly-linked list).   
Return the last insn emitted, even if it is in a sequence now pushed.   

References get_current_sequence(), sequence_stack::last, and sequence_stack::next.

◆ get_last_nonnote_insn()

rtx_insn * get_last_nonnote_insn ( void )
Return the last nonnote insn emitted in current sequence or current
function.  This routine looks inside SEQUENCEs.   

References get_last_insn(), ggc_alloc(), NONJUMP_INSN_P, NOTE_P, PATTERN(), and previous_insn().

◆ get_max_insn_count()

int get_max_insn_count ( void )
Return the number of actual (non-debug) insns emitted in this
function.   

References cur_debug_insn_uid, cur_insn_uid, and ggc_alloc().

Referenced by alloc_hash_table(), and alloc_hash_table().

◆ get_mem_align_offset()

int get_mem_align_offset ( rtx mem,
unsigned int align )
Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
-1 if not known.   

References component_ref_field_offset(), DECL_ALIGN, DECL_FIELD_BIT_OFFSET, DECL_FIELD_CONTEXT, DECL_P, expr, gcc_assert, ggc_alloc(), INDIRECT_REF_P, MEM_EXPR, MEM_OFFSET, MEM_OFFSET_KNOWN_P, MEM_P, NULL_TREE, offset, poly_int_tree_p(), TREE_CODE, tree_fits_uhwi_p(), TREE_OPERAND, tree_to_uhwi(), TREE_TYPE, and TYPE_ALIGN.

◆ get_reg_attrs()

static reg_attrs * get_reg_attrs ( tree decl,
poly_int64 offset )
static
Allocate a new reg_attrs structure and insert it into the hash table if
one identical to it is not already in the table.  We are doing this for
MEM of mode MODE.   

References attrs, ggc_alloc(), known_eq, offset, attrs::offset, and reg_attrs_htab.

Referenced by set_reg_attrs_for_decl_rtl(), set_reg_attrs_for_parm(), set_reg_attrs_from_value(), and update_reg_offset().

◆ get_spill_slot_decl()

◆ immed_double_const()

rtx immed_double_const ( HOST_WIDE_INT i0,
HOST_WIDE_INT i1,
machine_mode mode )
Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
of ints: I0 is the low-order word and I1 is the high-order word.
For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
implied upper bits are copies of the high bit of i1.  The value
itself is neither signed nor unsigned.  Do not use this routine for
non-integer modes; convert to REAL_VALUE_TYPE and use
const_double_from_real_value.   

References CONST_DOUBLE_FORMAT, CONST_DOUBLE_HIGH, CONST_DOUBLE_LOW, GEN_INT, gen_int_mode(), GET_MODE_BITSIZE(), ggc_alloc(), HOST_BITS_PER_WIDE_INT, i, i1, lookup_const_double(), PUT_MODE(), rtx_alloc(), and XWINT.

Referenced by immed_wide_int_const_1().

◆ immed_wide_int_const()

rtx immed_wide_int_const ( const poly_wide_int_ref & c,
machine_mode mode )

◆ immed_wide_int_const_1()

static rtx immed_wide_int_const_1 ( const wide_int_ref & v,
machine_mode mode )
static
Return an rtx constant for V, given that the constant has mode MODE.
The returned rtx will be a CONST_INT if V fits, otherwise it will be
a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
(if TARGET_SUPPORTS_WIDE_INT).   

References const_wide_int_alloc, CONST_WIDE_INT_ELT, CWI_PUT_NUM_ELEM, generic_wide_int< storage >::elt(), gcc_assert, gen_int_mode(), GET_MODE_PRECISION(), ggc_alloc(), HOST_BITS_PER_WIDE_INT, i, immed_double_const(), and PUT_MODE().

Referenced by immed_wide_int_const().

◆ in_sequence_p()

bool in_sequence_p ( void )
Return true if currently emitting into a sequence.   

References get_current_sequence(), and sequence_stack::next.

Referenced by expand_dummy_function_end(), expand_function_end(), leaf_function_p(), and resolve_simple_move().

◆ init_derived_machine_modes()

void init_derived_machine_modes ( void )

◆ init_emit()

◆ init_emit_once()

◆ init_emit_regs()

◆ init_raw_REG()

rtx init_raw_REG ( rtx x,
machine_mode mode,
unsigned int regno )
Initialize a fresh REG rtx with mode MODE and register REGNO.   

References NULL, ORIGINAL_REGNO, REG_ATTRS, and set_mode_and_regno().

Referenced by gen_raw_REG().

◆ init_virtual_regs()

◆ insn_file()

const char * insn_file ( const rtx_insn * insn)
Return source file of the statement that produced this insn.   

References INSN_LOCATION(), and LOCATION_FILE.

◆ insn_line()

int insn_line ( const rtx_insn * insn)
Return line number of the statement that produced this insn.   

References INSN_LOCATION(), and LOCATION_LINE.

◆ insn_location()

expanded_location insn_location ( const rtx_insn * insn)
Return expanded location of the statement that produced this insn.   

References expand_location(), and INSN_LOCATION().

Referenced by notice_source_line(), and rtx_writer::print_rtx_operand_code_i().

◆ insn_locations_finalize()

void insn_locations_finalize ( void )
At the end of emit stage, clear current location.   

References curr_location, epilogue_location, and UNKNOWN_LOCATION.

Referenced by expand_thunk().

◆ insn_locations_init()

void insn_locations_init ( void )
Allocate insn location datastructure.   

References curr_location, epilogue_location, prologue_location, and UNKNOWN_LOCATION.

Referenced by compile_file(), and expand_thunk().

◆ insn_scope()

tree insn_scope ( const rtx_insn * insn)
Return lexical scope block insn belongs to.   

References INSN_LOCATION(), and LOCATION_BLOCK.

Referenced by reemit_insn_block_notes().

◆ last_call_insn()

rtx_call_insn * last_call_insn ( void )
Return the last CALL_INSN in the current list, or 0 if there is none.
This routine does not look inside SEQUENCEs.   

References CALL_P, get_last_insn(), ggc_alloc(), and PREV_INSN().

Referenced by emit_call_1(), emit_library_call_value_1(), expand_builtin_apply(), and expand_call().

◆ link_insn_into_chain()

static void link_insn_into_chain ( rtx_insn * insn,
rtx_insn * prev,
rtx_insn * next )
inlinestatic
Add INSN to the end of the doubly-linked list, between PREV and NEXT.
INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
but also BARRIERs and JUMP_TABLE_DATAs.  PREV and NEXT may be NULL.   

References GET_CODE, ggc_alloc(), rtx_sequence::insn(), rtx_sequence::len(), sequence_stack::next, NONJUMP_INSN_P, NULL, PATTERN(), SET_NEXT_INSN(), and SET_PREV_INSN().

Referenced by add_insn(), add_insn_after_nobb(), and add_insn_before_nobb().

◆ lookup_const_double()

static rtx lookup_const_double ( rtx real)
static
CONST_DOUBLEs might be created from pairs of integers, or from
REAL_VALUE_TYPEs.  Also, their length is known only at run time,
so we cannot use gen_rtx_raw_CONST_DOUBLE.   
Determine whether REAL, a CONST_DOUBLE, already exists in the
hash table.  If so, return its counterpart; otherwise add it
to the hash table and return it.   

References const_double_htab, and ggc_alloc().

Referenced by const_double_from_real_value(), and immed_double_const().

◆ lookup_const_fixed()

static rtx lookup_const_fixed ( rtx fixed)
static
Determine whether FIXED, a CONST_FIXED, already exists in the
hash table.  If so, return its counterpart; otherwise add it
to the hash table and return it.   

References const_fixed_htab, and ggc_alloc().

Referenced by const_fixed_from_fixed_value().

◆ make_call_insn_raw()

◆ make_debug_insn_raw()

◆ make_insn_raw()

◆ make_jump_insn_raw()

◆ make_note_raw()

static rtx_note * make_note_raw ( enum insn_note subtype)
static
Like `make_insn_raw' but make a NOTE instead of an insn.   

References BLOCK_FOR_INSN(), cur_insn_uid, gcc_assert, ggc_alloc(), INSN_UID(), NOTE_DATA, NOTE_KIND, NULL, and rtx_alloc().

Referenced by emit_note(), emit_note_after(), emit_note_before(), and emit_note_copy().

◆ make_safe_from()

rtx make_safe_from ( rtx x,
rtx other )
Copy X if necessary so that it won't be altered by changes in OTHER.
Return X or the rtx for the pseudo reg the value of X was copied into.
OTHER must be valid as a SET_DEST.   

References CONSTANT_P, emit_move_insn(), gen_reg_rtx(), GET_CODE, GET_MODE, ggc_alloc(), MEM_P, reg_mentioned_p(), REG_P, REGNO, SUBREG_REG, and XEXP.

◆ mark_label_nuses()

static void mark_label_nuses ( rtx x)
static
Increment the label uses for all labels present in rtx.   

References GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, LABEL_NUSES, LABEL_P, label_ref_label(), mark_label_nuses(), XEXP, XVECEXP, and XVECLEN.

Referenced by mark_label_nuses(), and try_split().

◆ mark_reg_pointer()

◆ mark_used_flags()

static void mark_used_flags ( rtx x,
int flag )
static
Set the USED bit in X and its non-shareable subparts to FLAG.   

References CASE_CONST_ANY, GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, mark_used_flags(), RTX_FLAG, XEXP, XVECEXP, and XVECLEN.

Referenced by mark_used_flags(), reset_used_flags(), and set_used_flags().

◆ mark_user_reg()

void mark_user_reg ( rtx reg)
Identify REG (which may be a CONCAT) as a user register.   

References gcc_assert, GET_CODE, ggc_alloc(), REG_P, REG_USERVAR_P, and XEXP.

Referenced by adjust_one_expanded_partition_var(), assign_parm_setup_reg(), and expand_one_register_var().

◆ max_label_num()

int max_label_num ( void )
Return 1 + the largest label number used so far in the current function.   

References label_num.

Referenced by compute_alignments(), grow_label_align(), init_eliminable_invariants(), reload_combine(), shorten_branches(), and update_alignments().

◆ max_reg_num()

int max_reg_num ( void )
Return 1 plus largest pseudo reg number used in the current function.   

References reg_rtx_no.

Referenced by allocate_reg_info(), assign_by_spills(), build_conflicts(), build_store_vectors(), calculate_equiv_gains(), cleanup_cfg(), combine_and_move_insns(), combine_split_insns(), compute_hash_table_work(), compute_store_table(), create_cands(), create_live_range_start_chains(), create_log_links(), cse_main(), cselib_init(), curr_insn_transform(), dead_or_predicable(), decompose_multiword_subregs(), df_grow_reg_info(), df_print_word_regset(), dump_reg_info(), expand_reg_data(), expand_reg_info(), expand_reg_info(), find_costs_and_classes(), find_moveable_pseudos(), fix_reg_equiv_init(), fwprop_done(), gcse_or_cprop_is_too_expensive(), grow_reg_equivs(), if_convert(), init_alias_analysis(), init_costs(), init_live_reload_and_inheritance_pseudos(), init_lives(), init_loop_tree_node(), init_reg_info(), init_regno_assign_info(), init_subregs_of_mode(), initiate_allocnos(), initiate_regno_cost_classes(), ira(), ira_emit(), ira_expand_reg_equiv(), ira_flattening(), ira_print_disposition(), ira_set_pseudo_classes(), ira_sort_regnos_for_alter_reg(), lra(), lra_assign(), lra_clear_live_ranges(), lra_coalesce(), lra_constrain_insn(), lra_constraints(), lra_create_live_ranges_1(), lra_create_new_reg_with_unique_value(), lra_emit_add(), lra_emit_move(), lra_final_code_change(), lra_init_equiv(), lra_need_for_scratch_reg_p(), lra_need_for_spills_p(), lra_remat(), lra_spill(), lra_split_hard_reg_for(), lra_update_reg_val_offset(), maybe_memory_address_addr_space_p(), print_live_ranges(), print_pseudo_costs(), propagate_allocno_info(), rebuild_regno_allocno_maps(), regno_clobbered_at_setjmp(), regstat_compute_calls_crossed(), regstat_compute_ri(), regstat_init_n_sets_and_refs(), remove_some_program_points_and_update_live_ranges(), remove_unnecessary_allocnos(), resize_reg_info(), rest_of_handle_combine(), rest_of_handle_cse(), rest_of_handle_cse2(), rest_of_handle_cse_after_global_opts(), run_rtl_passes(), setup_live_pseudos_and_spill_after_risky_transforms(), setup_min_max_allocno_live_range_point(), setup_preferred_alternate_classes_for_new_pseudos(), setup_reg_classes(), setup_reg_equiv_init(), spill_pseudos(), and try_combine().

◆ maybe_set_first_label_num()

void maybe_set_first_label_num ( rtx_code_label * x)
If the rtx for label was created during the expansion of a nested
function, then first_label_num won't include this label number.
Fix this now so that array indices work later.   

References CODE_LABEL_NUMBER, and first_label_num.

Referenced by expand_label().

◆ maybe_set_max_label_num()

void maybe_set_max_label_num ( rtx_code_label * x)
For use by the RTL function loader, when mingling with normal
functions.
Ensure that label_num is greater than the label num of X, to avoid
duplicate labels in the generated assembler.   

References CODE_LABEL_NUMBER, and label_num.

Referenced by function_reader::parse_insn().

◆ mem_attrs_eq_p()

◆ mem_expr_equal_p()

bool mem_expr_equal_p ( const_tree expr1,
const_tree expr2 )
Returns true if both MEM_EXPR can be considered equal
and false otherwise.   

References ggc_alloc(), operand_equal_p(), and TREE_CODE.

Referenced by merge_memattrs().

◆ need_atomic_barrier_p()

bool need_atomic_barrier_p ( enum memmodel model,
bool pre )
Return true if memory model MODEL requires a pre-operation (release-style)
barrier or a post-operation (acquire-style) barrier.  While not universal,
this function matches behavior of several targets.   

References gcc_unreachable, ggc_alloc(), MEMMODEL_ACQ_REL, MEMMODEL_ACQUIRE, MEMMODEL_BASE_MASK, MEMMODEL_CONSUME, MEMMODEL_RELAXED, MEMMODEL_RELEASE, and MEMMODEL_SEQ_CST.

◆ next_active_insn()

◆ next_insn()

◆ next_nondebug_insn()

rtx_insn * next_nondebug_insn ( rtx_insn * insn)
Return the next insn after INSN that is not a DEBUG_INSN.  This
routine does not look inside SEQUENCEs.   

References DEBUG_INSN_P, and NEXT_INSN().

Referenced by constprop_register().

◆ next_nonnote_insn()

◆ next_nonnote_nondebug_insn()

rtx_insn * next_nonnote_nondebug_insn ( rtx_insn * insn)
Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
This routine does not look inside SEQUENCEs.   

References DEBUG_INSN_P, NEXT_INSN(), and NOTE_P.

Referenced by distribute_notes(), find_moveable_pseudos(), reload_cse_move2add(), rtl_verify_bb_layout(), rtx_renumbered_equal_p(), try_combine(), try_redirect_by_replacing_jump(), and update_cfg_for_uncondjump().

◆ next_nonnote_nondebug_insn_bb()

rtx_insn * next_nonnote_nondebug_insn_bb ( rtx_insn * insn)
Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
but stop the search before we enter another basic block.  This
routine does not look inside SEQUENCEs.   

References DEBUG_INSN_P, NEXT_INSN(), NOTE_INSN_BASIC_BLOCK_P, NOTE_P, and NULL.

Referenced by get_last_bb_insn(), and setup_sp_offset().

◆ next_real_insn()

rtx_insn * next_real_insn ( rtx_insn * insn)
Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
or 0, if there is none.  This routine does not look inside
SEQUENCEs.   

References INSN_P, and NEXT_INSN().

Referenced by decrease_live_ranges_number(), expand_gimple_stmt(), and mark_transaction_restart_calls().

◆ next_real_nondebug_insn()

rtx_insn * next_real_nondebug_insn ( rtx uncast_insn)
Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
or 0, if there is none.  This routine does not look inside
SEQUENCEs.   

References ggc_alloc(), NEXT_INSN(), and NONDEBUG_INSN_P.

Referenced by fill_simple_delay_slots(), and relax_delay_slots().

◆ note_outside_basic_block_p()

static bool note_outside_basic_block_p ( enum insn_note subtype,
bool on_bb_boundary_p )
static
Notes require a bit of special handling: Some notes need to have their
BLOCK_FOR_INSN set, others should never have it set, and some should
have it set or clear depending on the context.    
Return true iff a note of kind SUBTYPE should be emitted with routines
that never set BLOCK_FOR_INSN on NOTE.  BB_BOUNDARY is true if the
caller is asked to emit a note before BB_HEAD, or after BB_END.   

References ggc_alloc().

Referenced by emit_note_after(), and emit_note_before().

◆ offset_address()

rtx offset_address ( rtx memref,
rtx offset,
unsigned HOST_WIDE_INT pow2 )
Return a memory reference like MEMREF, but whose address is changed by
adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
known to be in OFFSET (possibly 1).   

References attrs, change_address_1(), force_reg(), get_address_mode(), GET_CODE, get_mem_attrs(), GET_MODE, ggc_alloc(), memory_address_addr_space_p(), MIN, mode_mem_attrs, offset, pic_offset_table_rtx, set_mem_attrs(), simplify_gen_binary(), update_temp_slot_address(), and XEXP.

Referenced by expand_assignment(), expand_expr_real_1(), store_constructor(), and store_expr().

◆ operand_subword()

rtx operand_subword ( rtx op,
poly_uint64 offset,
int validate_address,
machine_mode mode )
Return subword OFFSET of operand OP.
 The word number, OFFSET, is interpreted as the word number starting
 at the low-order address.  OFFSET 0 is the low-order word if not
 WORDS_BIG_ENDIAN, otherwise it is the high-order word.

 If we cannot extract the required word, we return zero.  Otherwise,
 an rtx corresponding to the requested word will be returned.

 VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
 reload has completed, a valid address will always be returned.  After
 reload, if a valid address cannot be returned, we return zero.

 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
 it is the responsibility of the caller.

 MODE is the mode of OP in case it is a CONST_INT.

 ??? This is still rather broken for some cases.  The problem for the
 moment is that all callers of this thing provide no 'goal mode' to
 tell us to work with.  This exists because all callers were written
 in a word based SUBREG world.
 Now use of this function can be deprecated by simplify_subreg in most
 cases.

References adjust_address_nv, const0_rtx, gcc_assert, GET_MODE, GET_MODE_SIZE(), ggc_alloc(), maybe_gt, MEM_ADDR_SPACE, MEM_P, offset, reload_completed, replace_equiv_address(), simplify_gen_subreg(), strict_memory_address_addr_space_p(), word_mode, and XEXP.

Referenced by convert_mode_scalar(), copy_blkmode_from_reg(), copy_blkmode_to_reg(), emit_move_multi_word(), expand_absneg_bit(), expand_binop(), expand_copysign_bit(), expand_doubleword_bswap(), expand_doubleword_mult(), expand_unop(), extract_integral_bit_field(), find_equiv_reg(), move_block_from_reg(), and operand_subword_force().

◆ operand_subword_force()

◆ pop_topmost_sequence()

void pop_topmost_sequence ( void )
After emitting to the outer-level insn chain, update the outer-level
insn chain, and restore the previous saved state.   

References end_sequence(), get_insns(), get_last_insn(), get_topmost_sequence(), and ggc_alloc().

Referenced by expand_builtin_apply_args(), expand_builtin_saveregs(), and get_arg_pointer_save_area().

◆ prev_active_insn()

rtx_insn * prev_active_insn ( rtx_insn * insn)
Find the last insn before INSN that really does something.  This routine
does not look inside SEQUENCEs.  After reload this also skips over
standalone USE and CLOBBER insn.   

References active_insn_p(), and PREV_INSN().

Referenced by delete_dead_insn(), merge_blocks_move_successor_nojumps(), and relax_delay_slots().

◆ prev_nondebug_insn()

rtx_insn * prev_nondebug_insn ( rtx_insn * insn)
Return the previous insn before INSN that is not a DEBUG_INSN.
This routine does not look inside SEQUENCEs.   

References DEBUG_INSN_P, and PREV_INSN().

Referenced by combine_and_move_insns(), doloop_condition_get(), expand_gimple_basic_block(), and try_forward_edges().

◆ prev_nonnote_insn()

rtx_insn * prev_nonnote_insn ( rtx_insn * insn)

◆ prev_nonnote_nondebug_insn()

rtx_insn * prev_nonnote_nondebug_insn ( rtx_insn * insn)
Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
This routine does not look inside SEQUENCEs.   

References DEBUG_INSN_P, NOTE_P, and PREV_INSN().

Referenced by canonicalize_condition(), cleanup_barriers(), noce_get_alt_condition(), noce_process_if_block(), noce_try_abs(), and reload_combine_recognize_pattern().

◆ prev_nonnote_nondebug_insn_bb()

rtx_insn * prev_nonnote_nondebug_insn_bb ( rtx_insn * insn)
Return the previous insn before INSN that is not a NOTE nor
DEBUG_INSN, but stop the search before we enter another basic
block.  This routine does not look inside SEQUENCEs.   

References DEBUG_INSN_P, NOTE_INSN_BASIC_BLOCK_P, NOTE_P, NULL, and PREV_INSN().

Referenced by find_bb_boundaries().

◆ prev_real_insn()

rtx_insn * prev_real_insn ( rtx_insn * insn)
Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
or 0, if there is none.  This routine does not look inside
SEQUENCEs.   

References INSN_P, and PREV_INSN().

Referenced by add_var_loc_to_decl(), and lra_delete_dead_insn().

◆ prev_real_nondebug_insn()

rtx_insn * prev_real_nondebug_insn ( rtx_insn * insn)
Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
or 0, if there is none.  This routine does not look inside
SEQUENCEs.   

References NONDEBUG_INSN_P, and PREV_INSN().

Referenced by move_deaths(), and try_head_merge_bb().

◆ previous_insn()

rtx_insn * previous_insn ( rtx_insn * insn)
Return the previous insn.  If it is a SEQUENCE, return the last insn
of the sequence.   

References ggc_alloc(), NONJUMP_INSN_P, PATTERN(), and PREV_INSN().

Referenced by get_last_nonnote_insn().

◆ push_to_sequence()

void push_to_sequence ( rtx_insn * first)
Set up the insn chain starting with FIRST as the current sequence,
saving the previously current one.  See the documentation for
start_sequence for more information about how to use this function.   

References sequence_stack::first, last, NEXT_INSN(), set_first_insn(), set_last_insn(), and start_sequence().

Referenced by asan_emit_allocas_unpoison(), curr_insn_transform(), emit_input_reload_insns(), emit_output_reload_insns(), expand_builtin_return(), insert_insn_on_edge(), insert_move_for_subreg(), match_reload(), process_addr_reg(), and process_address_1().

◆ push_to_sequence2()

void push_to_sequence2 ( rtx_insn * first,
rtx_insn * last )
Like push_to_sequence, but take the last insn as an argument to avoid
looping through the list.   

References sequence_stack::first, last, set_first_insn(), set_last_insn(), and start_sequence().

Referenced by assign_parm_setup_block(), assign_parm_setup_reg(), assign_parm_setup_stack(), assign_parms_unsplit_complex(), and expand_asm_stmt().

◆ push_topmost_sequence()

void push_topmost_sequence ( void )
Set up the outer-level insn chain
as the current sequence, saving the previously current one.   

References get_topmost_sequence(), ggc_alloc(), set_first_insn(), set_last_insn(), and start_sequence().

Referenced by expand_builtin_apply_args(), expand_builtin_saveregs(), and get_arg_pointer_save_area().

◆ reg_is_parm_p()

bool reg_is_parm_p ( rtx reg)
Return TRUE if REG is a PARM_DECL, FALSE otherwise.   

References gcc_assert, ggc_alloc(), REG_EXPR, REG_P, and TREE_CODE.

Referenced by ira_build_conflicts().

◆ remove_insn()

void remove_insn ( rtx_insn * insn)
Unlink INSN from the insn chain.

This function knows how to handle sequences.

This function does not invalidate data flow information associated with
INSN (i.e. does not call df_insn_delete).  That makes this function
usable for only disconnecting an insn from the chain, and re-emit it
elsewhere later.

To later insert INSN elsewhere in the insn chain via add_insn and
similar functions, PREV_INSN and NEXT_INSN must be nullified by
the caller.  Nullifying them here breaks many insn chain walks.

To really delete an insn and related DF information, use delete_insn.   

References BARRIER_P, BB_END, BB_HEAD, BLOCK_FOR_INSN(), sequence_stack::first, gcc_assert, GET_CODE, get_current_sequence(), ggc_alloc(), rtx_sequence::insn(), sequence_stack::last, rtx_sequence::len(), sequence_stack::next, NEXT_INSN(), NONJUMP_INSN_P, NOTE_P, PATTERN(), PREV_INSN(), SET_NEXT_INSN(), and SET_PREV_INSN().

Referenced by delete_insn(), emit_delay_sequence(), and resolve_simple_move().

◆ reorder_insns()

◆ reorder_insns_nobb()

void reorder_insns_nobb ( rtx_insn * from,
rtx_insn * to,
rtx_insn * after )
This function is deprecated, please use sequences instead.

Move a consecutive bunch of insns to a different place in the chain.
The insns to be moved are those between FROM and TO.
They are moved to a new position after the insn AFTER.
AFTER must not be FROM or TO or any insn in between.

This function does not know about SEQUENCEs and hence should not be
called after delay-slot filling has been done.   

References gcc_assert, get_insns(), get_last_insn(), ggc_alloc(), NEXT_INSN(), PREV_INSN(), set_first_insn(), set_last_insn(), SET_NEXT_INSN(), and SET_PREV_INSN().

Referenced by cleanup_barriers(), create_basic_block_structure(), delete_insn(), find_bb_boundaries(), maybe_duplicate_computed_goto(), merge_blocks_move_predecessor_nojumps(), merge_blocks_move_successor_nojumps(), reload_as_needed(), reorder_insns(), and rtl_merge_blocks().

◆ replace_equiv_address()

rtx replace_equiv_address ( rtx memref,
rtx addr,
bool inplace )
Return a memory reference like MEMREF, but with its address changed to
ADDR.  The caller is asserting that the actual piece of memory pointed
to is the same, just the form of the address is being changed, such as
by putting something into a register.  INPLACE is true if any changes
can be made directly to MEMREF or false if MEMREF must be treated as
immutable.   

References change_address_1(), ggc_alloc(), update_temp_slot_address(), and XEXP.

Referenced by emit_move_resolve_push(), expand_expr_real_1(), expand_movstr(), force_reload_address(), instantiate_virtual_regs_in_insn(), maybe_legitimize_operand_same_code(), operand_subword(), try_apply_stack_adjustment(), try_store_by_multiple_pieces(), use_anchored_address(), and validize_mem().

◆ replace_equiv_address_nv()

◆ reset_all_used_flags()

static void reset_all_used_flags ( void )
static
Go through all the RTL insn bodies and clear all the USED bits.   

References gcc_assert, GET_CODE, get_insns(), ggc_alloc(), i, INSN_P, NEXT_INSN(), NULL, PATTERN(), REG_NOTES, reset_insn_used_flags(), XVECEXP, and XVECLEN.

Referenced by verify_rtl_sharing().

◆ reset_insn_used_flags()

static void reset_insn_used_flags ( rtx insn)
static
Reset used-flags for INSN.   

References CALL_INSN_FUNCTION_USAGE, CALL_P, gcc_assert, INSN_P, PATTERN(), REG_NOTES, and reset_used_flags().

Referenced by reset_all_used_flags().

◆ reset_used_flags()

void reset_used_flags ( rtx x)
Clear all the USED bits in X to allow copy_rtx_if_shared to be used
to look for shared sub-parts.   

References mark_used_flags().

Referenced by doloop_modify(), reset_insn_used_flags(), try_combine(), and unshare_all_rtl_again().

◆ rtx_to_double_int()

double_int rtx_to_double_int ( const_rtx cst)

◆ set_curr_insn_location()

◆ set_decl_incoming_rtl()

void set_decl_incoming_rtl ( tree t,
rtx x,
bool by_reference_p )
Assign the RTX X to parameter declaration T.  BY_REFERENCE_P is true
if the ABI requires the parameter to be passed by reference.   

References DECL_INCOMING_RTL, ggc_alloc(), and set_reg_attrs_for_decl_rtl().

Referenced by assign_parms(), assign_parms_unsplit_complex(), and expand_function_start().

◆ set_decl_rtl()

void set_decl_rtl ( tree t,
rtx x )
Assign the RTX X to declaration T.   

References DECL_WRTL_CHECK, and set_reg_attrs_for_decl_rtl().

Referenced by initialize_argument_information().

◆ set_dst_reg_note()

◆ set_for_reg_notes()

rtx set_for_reg_notes ( rtx insn)
Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
Return the set in INSN that such notes describe, or NULL if the notes
have no meaning for INSN.   

References GET_CODE, ggc_alloc(), INSN_P, multiple_sets(), NULL_RTX, PATTERN(), REG_P, SET, SET_DEST, XEXP, and XVECEXP.

Referenced by add_equal_note(), forward_propagate_and_simplify(), reload_cse_simplify(), set_dst_reg_note(), set_unique_reg_note(), and try_fwprop_subst_pattern().

◆ set_insn_deleted()

void set_insn_deleted ( rtx_insn * insn)
Replace insn with an deleted instruction note.   

References df_insn_delete(), ggc_alloc(), INSN_P, NOTE_KIND, and PUT_CODE.

Referenced by find_moveable_pseudos(), and one_cprop_pass().

◆ set_insn_locations()

void set_insn_locations ( rtx_insn * insn,
location_t loc )
Set the location of the insn chain starting at INSN to LOC.   

References INSN_LOCATION(), INSN_P, and NEXT_INSN().

Referenced by emit_moves(), expand_function_end(), make_epilogue_seq(), make_prologue_seq(), make_split_prologue_seq(), and thread_prologue_and_epilogue_insns().

◆ set_mem_addr_space()

void set_mem_addr_space ( rtx mem,
addr_space_t addrspace )

◆ set_mem_alias_set()

◆ set_mem_align()

◆ set_mem_attributes()

void set_mem_attributes ( rtx ref,
tree t,
int objectp )

◆ set_mem_attributes_minus_bitpos()

◆ set_mem_attrs()

◆ set_mem_attrs_for_spill()

void set_mem_attrs_for_spill ( rtx mem)
Given MEM, a result from assign_stack_local, fill in the memory
attributes as appropriate for a register allocator spill slot.
These slots are not aliasable by other memory.  We arrange for
them all to use a single MEM_EXPR, so that the aliasing code can
work properly in the case of shared spill slots.   

References ADDR_SPACE_GENERIC, attrs, DECL_RTL, get_mem_attrs(), get_spill_slot_decl(), MEM_ALIAS_SET, MEM_NOTRAP_P, attrs::offset, set_mem_attrs(), strip_offset(), and XEXP.

Referenced by alter_reg(), and assign_mem_slot().

◆ set_mem_expr()

◆ set_mem_offset()

void set_mem_offset ( rtx mem,
poly_int64 offset )

◆ set_mem_size()

◆ set_mode_and_regno()

void set_mode_and_regno ( rtx x,
machine_mode mode,
unsigned int regno )

◆ set_new_first_and_last_insn()

void set_new_first_and_last_insn ( rtx_insn * first,
rtx_insn * last )
For procedure integration.   
Install new pointers to the first and last insns in the chain.
Also, set cur_insn_uid to one higher than the last in use.
Used for an inline-procedure after copying the insn chain.   

References cur_debug_insn_uid, cur_insn_uid, DEBUG_INSN_P, ggc_alloc(), INSN_UID(), last, MAX, MAY_HAVE_DEBUG_INSNS, NEXT_INSN(), set_first_insn(), and set_last_insn().

◆ set_reg_attrs_for_decl_rtl()

◆ set_reg_attrs_for_parm()

void set_reg_attrs_for_parm ( rtx parm_rtx,
rtx mem )
Set the register attributes for registers contained in PARM_RTX.
Use needed values from memory attributes of MEM.   

References GET_CODE, get_reg_attrs(), ggc_alloc(), i, INTVAL, MEM_EXPR, REG_ATTRS, REG_P, set_reg_attrs_from_value(), XEXP, XVECEXP, and XVECLEN.

Referenced by assign_parm_find_stack_rtl().

◆ set_reg_attrs_from_value()

void set_reg_attrs_from_value ( rtx reg,
rtx x )

◆ set_unique_reg_note()

◆ set_used_decls()

static void set_used_decls ( tree blk)
static
Go through all virtual stack slots of a function and mark them as
shared.  We never replace the DECL_RTLs themselves with a copy,
but expressions mentioned into a DECL_RTL cannot be shared with
expressions in the instruction stream.

Note that reload may convert pseudo registers into memories in-place.
Pseudo registers are always shared, but MEMs never are.  Thus if we
reset the used flags on MEMs in the instruction stream, we must set
them again on MEMs that appear in DECL_RTLs.   

References BLOCK_CHAIN, BLOCK_SUBBLOCKS, BLOCK_VARS, DECL_CHAIN, DECL_RTL, DECL_RTL_SET_P, ggc_alloc(), set_used_decls(), and set_used_flags().

Referenced by set_used_decls(), and unshare_all_rtl_again().

◆ set_used_flags()

void set_used_flags ( rtx x)
Set all the USED bits in X to allow copy_rtx_if_shared to be used
to look for shared sub-parts.   

References mark_used_flags().

Referenced by doloop_modify(), end_ifcvt_sequence(), noce_convert_multiple_sets(), noce_process_if_block(), set_used_decls(), and unshare_all_rtl_again().

◆ start_sequence()

void start_sequence ( void )
Begin emitting insns to a sequence.  If this sequence will contain
something that might cause the compiler to pop arguments to function
calls (because those pops have previously been deferred; see
INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
before calling this function.  That will ensure that the deferred
pops are not accidentally emitted in the middle of this sequence.   

References free_sequence_stack, get_current_sequence(), get_insns(), get_last_insn(), ggc_alloc(), sequence_stack::next, NULL, set_first_insn(), and set_last_insn().

Referenced by add_test(), asan_clear_shadow(), asan_emit_allocas_unpoison(), asan_emit_stack_protection(), assign_parm_setup_reg(), attempt_change(), check_and_process_move(), combine_reaching_defs(), combine_var_copies_in_loop_exit(), compare_and_jump_seq(), computation_cost(), compute_can_copy(), cond_move_process_if_block(), convert_mode_scalar(), curr_insn_transform(), do_remat(), doloop_modify(), dw2_build_landing_pads(), emit_common_heads_for_components(), emit_common_tails_for_components(), emit_delay_sequence(), emit_inc_dec_insn_before(), emit_initial_value_sets(), emit_move_list(), emit_move_multi_word(), emit_output_reload_insns(), emit_partition_copy(), expand_absneg_bit(), expand_asm_stmt(), expand_atomic_fetch_op(), expand_binop(), expand_builtin_apply_args(), expand_builtin_int_roundingfn(), expand_builtin_int_roundingfn_2(), expand_builtin_mathfn_3(), expand_builtin_mathfn_ternary(), expand_builtin_saveregs(), expand_builtin_strlen(), expand_call(), expand_clrsb_using_clz(), expand_cond_expr_using_cmove(), expand_copysign_bit(), expand_ctz(), expand_DIVMOD(), expand_doubleword_clz_ctz_ffs(), expand_doubleword_popcount(), expand_expr_divmod(), expand_expr_real_2(), expand_ffs(), expand_fix(), expand_fixed_convert(), expand_float(), expand_function_end(), expand_POPCOUNT(), expand_sdiv_pow2(), expand_twoval_binop_libfunc(), expand_unop(), expmed_mult_highpart_optab(), find_shift_sequence(), fix_crossing_unconditional_branches(), gen_call_used_regs_seq(), gen_clobber(), gen_cond_trap(), gen_move_insn(), gen_use(), get_arg_pointer_save_area(), get_uncond_jump_length(), hwasan_emit_untag_frame(), hwasan_frame_base(), inherit_in_ebb(), inherit_reload_reg(), init_set_costs(), initialize_uninitialized_regs(), inline_string_cmp(), insert_base_initialization(), insert_insn_on_edge(), insert_move_for_subreg(), insert_prologue_epilogue_for_components(), insert_value_copy_on_edge(), insert_var_expansion_initialization(), instantiate_virtual_regs_in_insn(), ira(), lra_process_new_insns(), make_epilogue_seq(), make_prologue_seq(), make_split_prologue_seq(), match_asm_constraints_1(), match_reload(), maybe_optimize_mod_cmp(), maybe_optimize_pow2p_mod_cmp(), noce_convert_multiple_sets(), noce_emit_cmove(), noce_emit_move_insn(), noce_emit_store_flag(), noce_process_if_block(), noce_try_abs(), noce_try_addcc(), noce_try_bitop(), noce_try_cmove(), noce_try_cmove_arith(), noce_try_cond_zero_arith(), noce_try_ifelse_collapse(), noce_try_inverse_constants(), noce_try_minmax(), noce_try_move(), noce_try_sign_mask(), noce_try_store_flag(), noce_try_store_flag_constants(), noce_try_store_flag_mask(), prepare_copy_insn(), prepare_float_lib_cmp(), prepend_insn_to_edge(), process_addr_reg(), process_address_1(), process_invariant_for_inheritance(), push_to_sequence(), push_to_sequence2(), push_topmost_sequence(), record_store(), remove_inheritance_pseudos(), replace_read(), resolve_shift_zext(), resolve_simple_move(), rtl_lv_add_condition_to_bb(), sjlj_emit_dispatch_table(), sjlj_emit_function_enter(), sjlj_emit_function_exit(), sjlj_mark_call_sites(), split_iv(), thread_prologue_and_epilogue_insns(), try_emit_cmove_seq(), and unroll_loop_runtime_iterations().

◆ subreg_lowpart_p()

◆ subreg_memory_offset() [1/2]

poly_int64 subreg_memory_offset ( const_rtx x)
As above, but return the offset that existing subreg X would have
if SUBREG_REG (X) were stored in memory.  The only significant thing
about the current SUBREG_REG is its mode.   

References GET_MODE, SUBREG_BYTE, subreg_memory_offset(), and SUBREG_REG.

◆ subreg_memory_offset() [2/2]

poly_int64 subreg_memory_offset ( machine_mode outer_mode,
machine_mode inner_mode,
poly_uint64 offset )
Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
from address X.  For paradoxical big-endian subregs this is a
negative value, otherwise it's the same as OFFSET.   

References gcc_assert, ggc_alloc(), known_eq, offset, paradoxical_subreg_p(), and subreg_lowpart_offset().

Referenced by simplify_context::simplify_binary_operation_1(), simplify_context::simplify_subreg(), store_bit_field_1(), subreg_memory_offset(), and undefined_operand_subword_p().

◆ subreg_size_highpart_offset()

poly_uint64 subreg_size_highpart_offset ( poly_uint64 outer_bytes,
poly_uint64 inner_bytes )
Return the SUBREG_BYTE for a highpart subreg whose outer mode has
OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.   

References gcc_assert, ggc_alloc(), known_ge, and subreg_size_offset_from_lsb().

Referenced by subreg_highpart_offset().

◆ subreg_size_lowpart_offset()

poly_uint64 subreg_size_lowpart_offset ( poly_uint64 outer_bytes,
poly_uint64 inner_bytes )
Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.   

References gcc_checking_assert, ggc_alloc(), maybe_gt, and subreg_size_offset_from_lsb().

Referenced by alter_reg(), assign_mem_slot(), maybe_mode_change(), subreg_get_info(), and subreg_lowpart_offset().

◆ try_split()

◆ unshare_all_rtl()

◆ unshare_all_rtl_1()

static void unshare_all_rtl_1 ( rtx_insn * insn)
static
Go through all the RTL insn bodies and copy any invalid shared
structure.  This routine should only be called once.   

References copy_rtx_if_shared(), FOR_EACH_VEC_SAFE_ELT, i, stack_slot_list, and unshare_all_rtl_in_chain().

Referenced by unshare_all_rtl(), and unshare_all_rtl_again().

◆ unshare_all_rtl_again()

void unshare_all_rtl_again ( rtx_insn * insn)
Go through all the RTL insn bodies and copy any invalid shared
structure, again.  This is a fairly expensive thing to do so it
should be done sparingly.   

References CALL_INSN_FUNCTION_USAGE, CALL_P, cfun, DECL_ARGUMENTS, DECL_CHAIN, DECL_INITIAL, DECL_RTL, FOR_EACH_VEC_SAFE_ELT, i, INSN_P, NEXT_INSN(), PATTERN(), REG_NOTES, reset_used_flags(), set_used_decls(), set_used_flags(), stack_slot_list, and unshare_all_rtl_1().

Referenced by lra(), and reload().

◆ unshare_all_rtl_in_chain()

void unshare_all_rtl_in_chain ( rtx_insn * insn)
Go through all the RTL insn bodies and copy any invalid shared structure.
Assumes the mark bits are cleared at entry.   

References CALL_INSN_FUNCTION_USAGE, CALL_P, copy_rtx_if_shared(), INSN_P, NEXT_INSN(), PATTERN(), and REG_NOTES.

Referenced by add_test(), doloop_modify(), end_ifcvt_sequence(), noce_convert_multiple_sets(), noce_process_if_block(), try_split(), unroll_loop_runtime_iterations(), and unshare_all_rtl_1().

◆ update_reg_offset()

static void update_reg_offset ( rtx new_rtx,
rtx reg,
poly_int64 offset )
static
Update NEW with the same attributes as REG, but with OFFSET added
to the REG_OFFSET.   

References get_reg_attrs(), offset, REG_ATTRS, REG_EXPR, and REG_OFFSET.

Referenced by adjust_reg_mode(), gen_reg_rtx_offset(), gen_rtx_REG_offset(), and set_reg_attrs_from_value().

◆ valid_for_const_vector_p()

◆ validate_subreg()

◆ verify_insn_sharing()

static void verify_insn_sharing ( rtx insn)
static
Verify sharing in INSN.   

References CALL_INSN_FUNCTION_USAGE, CALL_P, gcc_assert, INSN_P, PATTERN(), REG_NOTES, and verify_rtx_sharing().

Referenced by verify_rtl_sharing().

◆ verify_rtl_sharing()

DEBUG_FUNCTION void verify_rtl_sharing ( void )
Go through all the RTL insn bodies and check that there is no unexpected
sharing in between the subexpressions.   

References GET_CODE, get_insns(), ggc_alloc(), i, INSN_P, NEXT_INSN(), PATTERN(), reset_all_used_flags(), timevar_pop(), timevar_push(), verify_insn_sharing(), XVECEXP, and XVECLEN.

Referenced by execute_function_todo().

◆ verify_rtx_sharing()

static void verify_rtx_sharing ( rtx orig,
rtx insn )
static

◆ widen_memory_access()

rtx widen_memory_access ( rtx memref,
machine_mode mode,
poly_int64 offset )
Return a memory reference like MEMREF, but with its mode widened to
MODE and offset by OFFSET.  This would be used by targets that e.g.
cannot issue QImode memory operations and have to use SImode memory
operations plus masking logic.   

References adjust_address_1(), attrs, component_ref_field_offset(), DECL_FIELD_BIT_OFFSET, DECL_P, DECL_SIZE_UNIT, get_mem_attrs(), GET_MODE_SIZE(), ggc_alloc(), known_ge, NULL_TREE, offset, attrs::offset, poly_int_tree_p(), set_mem_attrs(), mem_attrs::size, wi::to_poly_offset(), TREE_CODE, TREE_OPERAND, and tree_to_uhwi().

Variable Documentation

◆ byte_mode

◆ const_double_htab

hash_table<const_double_hasher>* const_double_htab
static

◆ const_fixed_htab

hash_table<const_fixed_hasher>* const_fixed_htab
static

◆ const_int_htab

hash_table<const_int_hasher>* const_int_htab
static

◆ const_int_rtx

rtx const_int_rtx[MAX_SAVED_CONST_INT *2+1]
We make one copy of (const_int C) where C is in
[- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
to save space during the compilation and simplify comparisons of
integers.   

Referenced by gen_rtx_CONST_INT(), and init_emit_once().

◆ const_poly_int_htab

hash_table<const_poly_int_hasher>* const_poly_int_htab
static

◆ const_tiny_rtx

rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE]
We record floating-point CONST_DOUBLEs in each floating-point mode for
the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
record a copy of const[012]_rtx and constm1_rtx.  CONSTM1_RTX
is set only for MODE_INT and MODE_VECTOR_INT modes.   

Referenced by gen_const_vector(), and init_emit_once().

◆ const_true_rtx

◆ const_wide_int_htab

hash_table<const_wide_int_hasher>* const_wide_int_htab
static

Referenced by init_emit_once().

◆ copy_asm_constraints_vector

rtvec copy_asm_constraints_vector
static

Referenced by copy_insn(), and copy_insn_1().

◆ copy_asm_operands_vector

rtvec copy_asm_operands_vector
static
When an insn is being copied by copy_insn_1, this is nonzero if we have
copied an ASM_OPERANDS.
In that case, it is the copied input-operand vector.   

Referenced by copy_insn(), and copy_insn_1().

◆ copy_insn_n_scratches

int copy_insn_n_scratches
static

Referenced by copy_insn(), and copy_insn_1().

◆ copy_insn_scratch_in

rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS]
static
Used by copy_insn_1 to avoid copying SCRATCHes more than once.   

Referenced by copy_insn_1().

◆ copy_insn_scratch_out

rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS]
static

Referenced by copy_insn_1().

◆ curr_location

location_t curr_location
static
Hold current location information and last location information, so the
datastructures are built lazily only when some instructions in given
place are needed.   

Referenced by branch_prob(), curr_insn_location(), insn_locations_finalize(), insn_locations_init(), and set_curr_insn_location().

◆ dconst0

◆ dconst1

◆ dconst2

◆ dconsthalf

◆ dconstinf

◆ dconstm0

◆ dconstm1

◆ dconstninf

◆ default_target_rtl

struct target_rtl default_target_rtl
Emit RTL for the GCC expander.
   Copyright (C) 1987-2024 Free Software Foundation, Inc.

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.   
Middle-to-low level generation of rtx code and insns.

This file contains support functions for creating rtl expressions
and manipulating them in the doubly-linked chain of insns.

The patterns of the insns are created by machine-dependent
routines in insn-emit.cc, which is generated automatically from
the machine description.  These routines make the individual rtx's
of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
which are automatically generated from rtl.def; what is machine
dependent is the kind of rtx's they make and what arguments they
use.   

◆ epilogue_location

◆ fconst0

Record fixed-point constant 0 and 1.   

◆ fconst1

◆ free_sequence_stack

struct sequence_stack* free_sequence_stack
static
Space for free sequence stack entries.   

Referenced by end_sequence(), and start_sequence().

◆ hard_reg_clobbers

rtx hard_reg_clobbers[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER]
static

Referenced by gen_hard_reg_clobber().

◆ invalid_insn_rtx

rtx_insn* invalid_insn_rtx
Marker used for denoting an INSN, which should never be accessed (i.e.,
this pointer should normally never be dereferenced), but is required to be
distinct from NULL_RTX.  Currently used by peephole2 pass.   

Referenced by init_emit_once().

◆ label_num

int label_num = 1
static
This is *not* reset after each function.  It gives each CODE_LABEL
in the entire compilation a unique label number.   

Referenced by gen_label_rtx(), init_emit(), max_label_num(), and maybe_set_max_label_num().

◆ orig_asm_constraints_vector

rtvec orig_asm_constraints_vector
static
Likewise for the constraints vector.   

Referenced by copy_insn(), and copy_insn_1().

◆ orig_asm_operands_vector

rtvec orig_asm_operands_vector
static
When an insn is being copied by copy_insn_1, this is nonzero if we have
copied an ASM_OPERANDS.
In that case, it is the original input-operand vector.   

Referenced by copy_insn(), and copy_insn_1().

◆ pc_rtx

◆ prologue_location

◆ ptr_mode

scalar_int_mode ptr_mode

Referenced by asan_emit_allocas_unpoison(), asan_emit_stack_protection(), default_addr_space_pointer_mode(), default_emit_call_builtin___clear_cache(), default_valid_pointer_mode(), emit_block_op_via_libcall(), expand_asan_emit_allocas_unpoison(), expand_atomic_compare_and_swap(), expand_atomic_fetch_op(), expand_BITINTTOFLOAT(), expand_builtin_adjust_descriptor(), expand_builtin_alloca(), expand_builtin_apply(), expand_builtin_eh_copy_values(), expand_builtin_eh_pointer(), expand_builtin_extend_pointer(), expand_builtin_frob_return_addr(), expand_builtin_init_descriptor(), expand_builtin_memory_copy_args(), expand_builtin_memset_args(), expand_builtin_next_arg(), expand_builtin_sincos(), expand_builtin_stack_address(), expand_builtin_strncpy(), expand_builtin_strub_enter(), expand_builtin_strub_leave(), expand_builtin_strub_update(), expand_DIVMODBITINT(), expand_dw2_landing_pad_for_region(), expand_expr_addr_expr(), expand_expr_real_2(), expand_FLOATTOBITINT(), expand_HWASAN_ALLOCA_UNPOISON(), expand_HWASAN_MARK(), expand_HWASAN_SET_TAG(), expand_MULBITINT(), expand_omp_atomic_mutex(), expand_omp_atomic_pipeline(), fold_builtin_memcmp(), get_memory_rtx(), get_nl_goto_field(), get_section_anchor(), gimple_fold_builtin_memory_op(), gimple_load_first_char(), strlen_pass::handle_builtin_memcmp(), hwasan_emit_prologue(), hwasan_emit_untag_frame(), init_derived_machine_modes(), maybe_emit_call_builtin___clear_cache(), maybe_emit_sync_lock_test_and_set(), mem_loc_descriptor(), nonzero_bits1(), num_sign_bit_copies1(), prepare_call_address(), process_addr_reg(), push_block(), set_stack_check_libfunc(), simplify_context::simplify_unary_operation_1(), sjlj_emit_dispatch_table(), sjlj_emit_function_enter(), stack_protect_epilogue(), try_store_by_multiple_pieces(), and verify_gimple_assign_unary().

◆ reg_attrs_htab

hash_table<reg_attr_hasher>* reg_attrs_htab
static

◆ regno_reg_rtx

rtx* regno_reg_rtx
Indexed by pseudo register number, gives the rtx for that pseudo.
Allocated in parallel with regno_pointer_align.
FIXME: We could put it into emit_status struct, but gengtype is not able to deal
with length attribute nested in top level structures.   

Referenced by allocno_reload_assign(), allocnos_conflict_by_live_ranges_p(), alter_reg(), assign_by_spills(), assign_mem_slot(), assign_parm_setup_reg(), calculate_equiv_gains(), canon_reg(), combine_and_move_insns(), consolidate_reg(), cse_insn(), dead_debug_insert_temp(), decompose_multiword_subregs(), decompose_register(), df_bb_refs_collect(), df_entry_block_defs_collect(), df_exit_block_uses_collect(), df_get_call_refs(), df_insn_refs_collect(), df_ref_change_reg_with_loc_1(), df_ref_record(), df_set_dead_notes_for_mw(), df_set_unused_notes_for_mw(), distribute_notes(), do_input_reload(), dump_reg_info(), emit_reload_insns(), emit_status::ensure_regno_capacity(), equiv_can_be_consumed_p(), expand_asm_reg_clobber_mem_blockage(), find_costs_and_classes(), find_split_point(), free_after_compilation(), gen_reg_rtx(), gen_rtx_REG(), get_subreg_tracking_sizes(), inherit_in_ebb(), inherit_reload_reg(), init_alias_analysis(), init_emit(), init_virtual_regs(), ira_build_conflicts(), ira_emit(), lra_constraints(), lra_create_live_ranges_1(), lra_final_code_change(), move_deaths(), non_spilled_static_chain_regno_p(), process_alt_operands(), process_bb_lives(), record_operand_costs(), reload(), remove_inheritance_pseudos(), remove_init_insns(), replace_pseudos_in(), save_call_clobbered_regs(), setup_preferred_alternate_classes_for_new_pseudos(), setup_regno_cost_classes_by_aclass(), setup_save_areas(), simplify_operand_subreg(), simplify_set(), split_reg(), subst_mode(), substitute(), target_reinit(), try_combine(), undo_to_marker(), use_regs(), and zcur_select_mode_rtx().

◆ ret_rtx

◆ simple_return_rtx

◆ spill_slot_decl

tree spill_slot_decl
static
A fake decl that is used as the MEM_EXPR of spill slots.   

Referenced by get_spill_slot_decl().

◆ split_branch_probability

profile_probability split_branch_probability
Probability of the conditional branch currently proceeded by try_split.   

Referenced by init_emit_regs(), and try_split().

◆ word_mode

scalar_int_mode word_mode

Referenced by adjust_bit_field_mem_for_reg(), anti_adjust_stack_and_probe_stack_clash(), assign_parm_setup_block(), build_word_mode_vector_type(), can_assign_to_reg_without_clobbers_p(), can_decompose_p(), compute_costs(), compute_splitting_shift(), convert_mode_scalar(), copy_blkmode_from_reg(), copy_blkmode_to_reg(), create_new_invariant(), decompose_register(), default_emutls_var_fields(), default_libgcc_cmp_return_mode(), default_libgcc_shift_count_mode(), default_preferred_simd_mode(), default_unwind_word_mode(), desired_pro_or_demotion_p(), do_jump_by_parts_equality_rtx(), do_jump_by_parts_greater_rtx(), do_jump_by_parts_zero_rtx(), do_output_reload(), doloop_optimize(), tree_switch_conversion::bit_test_cluster::emit(), emit_block_cmp_via_loop(), emit_block_move_via_loop(), emit_library_call_value_1(), emit_push_insn(), emit_stack_probe(), emit_store_flag_1(), emit_store_flag_force(), emit_store_flag_int(), emutls_common_1(), equiv_constant(), expand_absneg_bit(), expand_binop(), expand_builtin_signbit(), expand_copysign_absneg(), expand_copysign_bit(), expand_debug_expr(), expand_DIVMOD(), expand_doubleword_bswap(), expand_doubleword_clz_ctz_ffs(), expand_doubleword_mod(), expand_doubleword_mult(), expand_doubleword_parity(), expand_doubleword_popcount(), expand_doubleword_shift(), expand_doubleword_shift_condmove(), expand_expr_real_2(), expand_SET_EDOM(), expand_subword_shift(), expand_superword_shift(), expand_unop(), expand_vector_parallel(), expression_expensive_p(), extract_high_half(), extract_integral_bit_field(), extract_split_bit_field(), find_decomposable_shift_zext(), find_decomposable_subregs(), find_if_header(), find_reloads_address_1(), force_const_mem(), get_optab_extraction_insn(), get_traditional_extraction_insn(), strlen_pass::handle_builtin_memcmp(), init_caller_save(), init_derived_machine_modes(), init_expr_target(), init_lower_subreg(), init_reg_modes_target(), load_register_parameters(), lshift_cheap_p(), make_extraction(), mem_loc_descriptor(), move_block_from_reg(), move_block_to_reg(), operand_subword(), optimize_range_tests_to_bit_test(), push_reload(), reload_cse_regs_1(), reload_cse_simplify_operands(), reload_cse_simplify_set(), replace_read(), resolve_clobber(), resolve_shift_zext(), resolve_simple_move(), setup_prohibited_mode_move_regs(), simplify_and_const_int_1(), simplify_context::simplify_binary_operation_1(), simplify_const_binary_operation(), simplify_context::simplify_subreg(), simplify_while_replacing(), store_constructor(), store_integral_bit_field(), store_split_bit_field(), store_unaligned_arguments_into_pseudos(), try_combine(), valid_multiword_target_p(), validate_subreg(), and vectorizable_operation().

◆ x_rtl

Datastructures maintained for currently processed function in RTL form.