GCC Middle and Back End API Reference
rtlanal.cc File Reference
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "backend.h"
#include "target.h"
#include "rtl.h"
#include "rtlanal.h"
#include "tree.h"
#include "predict.h"
#include "df.h"
#include "memmodel.h"
#include "tm_p.h"
#include "insn-config.h"
#include "regs.h"
#include "emit-rtl.h"
#include "recog.h"
#include "addresses.h"
#include "rtl-iter.h"
#include "hard-reg-set.h"
#include "function-abi.h"
Include dependency graph for rtlanal.cc:

Data Structures

struct  set_of_data
 
struct  parms_set_data
 

Macros

#define cached_num_sign_bit_copies   sorry_i_am_preventing_exponential_behavior
 

Functions

static void set_of_1 (rtx, const_rtx, void *)
 
static bool covers_regno_p (const_rtx, unsigned int)
 
static bool covers_regno_no_parallel_p (const_rtx, unsigned int)
 
static bool computed_jump_p_1 (const_rtx)
 
static void parms_set (rtx, const_rtx, void *)
 
static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, scalar_int_mode, const_rtx, machine_mode, unsigned HOST_WIDE_INT)
 
static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, scalar_int_mode, const_rtx, machine_mode, unsigned HOST_WIDE_INT)
 
static unsigned int cached_num_sign_bit_copies (const_rtx, scalar_int_mode, const_rtx, machine_mode, unsigned int)
 
static unsigned int num_sign_bit_copies1 (const_rtx, scalar_int_mode, const_rtx, machine_mode, unsigned int)
 
bool rtx_unstable_p (const_rtx x)
 
bool rtx_varies_p (const_rtx x, bool for_alias)
 
static poly_int64 get_initial_register_offset (int from, int to)
 
static bool rtx_addr_can_trap_p_1 (const_rtx x, poly_int64 offset, poly_int64 size, machine_mode mode, bool unaligned_mems)
 
bool rtx_addr_can_trap_p (const_rtx x)
 
bool contains_mem_rtx_p (rtx x)
 
bool nonzero_address_p (const_rtx x)
 
bool rtx_addr_varies_p (const_rtx x, bool for_alias)
 
rtx get_call_rtx_from (const rtx_insn *insn)
 
tree get_call_fndecl (const rtx_insn *insn)
 
HOST_WIDE_INT get_integer_term (const_rtx x)
 
rtx get_related_value (const_rtx x)
 
bool offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
 
void split_const (rtx x, rtx *base_out, rtx *offset_out)
 
rtx strip_offset (rtx x, poly_int64 *offset_out)
 
poly_int64 get_args_size (const_rtx x)
 
int count_occurrences (const_rtx x, const_rtx find, int count_dest)
 
bool unsigned_reg_p (rtx op)
 
bool reg_mentioned_p (const_rtx reg, const_rtx in)
 
bool no_labels_between_p (const rtx_insn *beg, const rtx_insn *end)
 
bool reg_used_between_p (const_rtx reg, const rtx_insn *from_insn, const rtx_insn *to_insn)
 
bool reg_referenced_p (const_rtx x, const_rtx body)
 
bool reg_set_between_p (const_rtx reg, const rtx_insn *from_insn, const rtx_insn *to_insn)
 
bool reg_set_p (const_rtx reg, const_rtx insn)
 
bool modified_between_p (const_rtx x, const rtx_insn *start, const rtx_insn *end)
 
bool modified_in_p (const_rtx x, const_rtx insn)
 
bool read_modify_subreg_p (const_rtx x)
 
const_rtx set_of (const_rtx pat, const_rtx insn)
 
rtx simple_regno_set (rtx pat, unsigned int regno)
 
void find_all_hard_regs (const_rtx x, HARD_REG_SET *pset)
 
void record_hard_reg_sets (rtx x, const_rtx pat, void *data)
 
void find_all_hard_reg_sets (const rtx_insn *insn, HARD_REG_SET *pset, bool implicit)
 
void record_hard_reg_uses (rtx *px, void *data)
 
rtx single_set_2 (const rtx_insn *insn, const_rtx pat)
 
bool multiple_sets (const_rtx insn)
 
bool set_noop_p (const_rtx set)
 
bool noop_move_p (const rtx_insn *insn)
 
bool refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x, rtx *loc)
 
bool reg_overlap_mentioned_p (const_rtx x, const_rtx in)
 
void note_pattern_stores (const_rtx x, void(*fun)(rtx, const_rtx, void *), void *data)
 
void note_stores (const rtx_insn *insn, void(*fun)(rtx, const_rtx, void *), void *data)
 
void note_uses (rtx *pbody, void(*fun)(rtx *, void *), void *data)
 
bool dead_or_set_p (const rtx_insn *insn, const_rtx x)
 
bool dead_or_set_regno_p (const rtx_insn *insn, unsigned int test_regno)
 
rtx find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
 
rtx find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
 
rtx find_reg_equal_equiv_note (const_rtx insn)
 
rtx find_constant_src (const rtx_insn *insn)
 
bool find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
 
bool find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
 
static bool int_reg_note_p (enum reg_note kind)
 
rtx alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
 
void add_reg_note (rtx insn, enum reg_note kind, rtx datum)
 
void add_int_reg_note (rtx_insn *insn, enum reg_note kind, int datum)
 
void add_args_size_note (rtx_insn *insn, poly_int64 value)
 
void add_shallow_copy_of_reg_note (rtx_insn *insn, rtx note)
 
rtx duplicate_reg_note (rtx note)
 
void remove_note (rtx_insn *insn, const_rtx note)
 
bool remove_reg_equal_equiv_notes (rtx_insn *insn, bool no_rescan)
 
void remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
 
bool in_insn_list_p (const rtx_insn_list *listp, const rtx_insn *node)
 
void remove_node_from_insn_list (const rtx_insn *node, rtx_insn_list **listp)
 
bool volatile_insn_p (const_rtx x)
 
bool volatile_refs_p (const_rtx x)
 
bool side_effects_p (const_rtx x)
 
bool may_trap_p_1 (const_rtx x, unsigned flags)
 
bool may_trap_p (const_rtx x)
 
bool may_trap_or_fault_p (const_rtx x)
 
rtx replace_rtx (rtx x, rtx from, rtx to, bool all_regs)
 
void replace_label (rtx *loc, rtx old_label, rtx new_label, bool update_label_nuses)
 
void replace_label_in_insn (rtx_insn *insn, rtx_insn *old_label, rtx_insn *new_label, bool update_label_nuses)
 
bool rtx_referenced_p (const_rtx x, const_rtx body)
 
bool tablejump_p (const rtx_insn *insn, rtx_insn **labelp, rtx_jump_table_data **tablep)
 
rtx tablejump_casesi_pattern (const rtx_insn *insn)
 
bool computed_jump_p (const rtx_insn *insn)
 
static int for_each_inc_dec_find_inc_dec (rtx mem, for_each_inc_dec_fn fn, void *data)
 
int for_each_inc_dec (rtx x, for_each_inc_dec_fn fn, void *data)
 
rtx regno_use_in (unsigned int regno, rtx x)
 
int commutative_operand_precedence (rtx op)
 
bool swap_commutative_operands_p (rtx x, rtx y)
 
bool auto_inc_p (const_rtx x)
 
bool loc_mentioned_in_p (rtx *loc, const_rtx in)
 
poly_uint64 subreg_size_lsb (poly_uint64 outer_bytes, poly_uint64 inner_bytes, poly_uint64 subreg_byte)
 
poly_uint64 subreg_lsb (const_rtx x)
 
poly_uint64 subreg_size_offset_from_lsb (poly_uint64 outer_bytes, poly_uint64 inner_bytes, poly_uint64 lsb_shift)
 
void subreg_get_info (unsigned int xregno, machine_mode xmode, poly_uint64 offset, machine_mode ymode, struct subreg_info *info)
 
unsigned int subreg_regno_offset (unsigned int xregno, machine_mode xmode, poly_uint64 offset, machine_mode ymode)
 
bool subreg_offset_representable_p (unsigned int xregno, machine_mode xmode, poly_uint64 offset, machine_mode ymode)
 
int simplify_subreg_regno (unsigned int xregno, machine_mode xmode, poly_uint64 offset, machine_mode ymode)
 
int lowpart_subreg_regno (unsigned int regno, machine_mode xmode, machine_mode ymode)
 
unsigned int subreg_regno (const_rtx x)
 
unsigned int subreg_nregs (const_rtx x)
 
unsigned int subreg_nregs_with_regno (unsigned int regno, const_rtx x)
 
rtx_insnfind_first_parameter_load (rtx_insn *call_insn, rtx_insn *boundary)
 
bool keep_with_call_p (const rtx_insn *insn)
 
bool label_is_jump_target_p (const_rtx label, const rtx_insn *jump_insn)
 
int rtx_cost (rtx x, machine_mode mode, enum rtx_code outer_code, int opno, bool speed)
 
void get_full_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer, int opno, struct full_rtx_costs *c)
 
int address_cost (rtx x, machine_mode mode, addr_space_t as, bool speed)
 
int default_address_cost (rtx x, machine_mode, addr_space_t, bool speed)
 
unsigned HOST_WIDE_INT nonzero_bits (const_rtx x, machine_mode mode)
 
unsigned int num_sign_bit_copies (const_rtx x, machine_mode mode)
 
static bool nonzero_bits_binary_arith_p (const_rtx x)
 
static bool num_sign_bit_copies_binary_arith_p (const_rtx x)
 
int pattern_cost (rtx pat, bool speed)
 
int insn_cost (rtx_insn *insn, bool speed)
 
unsigned seq_cost (const rtx_insn *seq, bool speed)
 
rtx canonicalize_condition (rtx_insn *insn, rtx cond, int reverse, rtx_insn **earliest, rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
 
rtx get_condition (rtx_insn *jump, rtx_insn **earliest, int allow_cc_mode, int valid_at_insn_p)
 
static void init_num_sign_bit_copies_in_rep (void)
 
bool truncated_to_mode (machine_mode mode, const_rtx x)
 
static bool setup_reg_subrtx_bounds (unsigned int code)
 
void init_rtlanal (void)
 
bool constant_pool_constant_p (rtx x)
 
int low_bitmask_len (machine_mode mode, unsigned HOST_WIDE_INT m)
 
scalar_int_mode get_address_mode (rtx mem)
 
void split_double (rtx value, rtx *first, rtx *second)
 
static bool lsb_bitfield_op_p (rtx x)
 
rtxstrip_address_mutations (rtx *loc, enum rtx_code *outer_code)
 
static bool binary_scale_code_p (enum rtx_code code)
 
static rtxget_base_term (rtx *inner)
 
static rtxget_index_term (rtx *inner)
 
static void set_address_segment (struct address_info *info, rtx *loc, rtx *inner)
 
static void set_address_base (struct address_info *info, rtx *loc, rtx *inner)
 
static void set_address_index (struct address_info *info, rtx *loc, rtx *inner)
 
static void set_address_disp (struct address_info *info, rtx *loc, rtx *inner)
 
static void decompose_incdec_address (struct address_info *info)
 
static void decompose_automod_address (struct address_info *info)
 
static rtx ** extract_plus_operands (rtx *loc, rtx **ptr, rtx **end)
 
static int baseness (rtx x, machine_mode mode, addr_space_t as, enum rtx_code outer_code, enum rtx_code index_code)
 
static void decompose_normal_address (struct address_info *info)
 
void decompose_address (struct address_info *info, rtx *loc, machine_mode mode, addr_space_t as, enum rtx_code outer_code)
 
void decompose_lea_address (struct address_info *info, rtx *loc)
 
void decompose_mem_address (struct address_info *info, rtx x)
 
void update_address (struct address_info *info)
 
HOST_WIDE_INT get_index_scale (const struct address_info *info)
 
enum rtx_code get_index_code (const struct address_info *info)
 
bool contains_symbol_ref_p (const_rtx x)
 
bool contains_symbolic_reference_p (const_rtx x)
 
bool contains_constant_pool_address_p (const_rtx x)
 
bool tls_referenced_p (const_rtx x)
 
void add_auto_inc_notes (rtx_insn *insn, rtx x)
 
bool register_asm_p (const_rtx x)
 
bool vec_series_highpart_p (machine_mode result_mode, machine_mode op_mode, rtx sel)
 
bool vec_series_lowpart_p (machine_mode result_mode, machine_mode op_mode, rtx sel)
 
bool contains_paradoxical_subreg_p (rtx x)
 

Variables

rtx_subrtx_bound_info rtx_all_subrtx_bounds [NUM_RTX_CODE]
 
rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds [NUM_RTX_CODE]
 
static unsigned int num_sign_bit_copies_in_rep [MAX_MODE_INT+1][MAX_MODE_INT+1]
 

Macro Definition Documentation

◆ cached_num_sign_bit_copies

#define cached_num_sign_bit_copies   sorry_i_am_preventing_exponential_behavior
We let num_sign_bit_copies recur into nonzero_bits as that is useful.
We don't let nonzero_bits recur into num_sign_bit_copies, because that
is less useful.  We can't allow both, because that results in exponential
run time recursion.  There is a nullstone testcase that triggered
this.  This macro avoids accidental uses of num_sign_bit_copies.   

Referenced by cached_num_sign_bit_copies(), num_sign_bit_copies(), and num_sign_bit_copies1().

Function Documentation

◆ add_args_size_note()

void add_args_size_note ( rtx_insn * insn,
poly_int64 value )
Add a REG_ARGS_SIZE note to INSN with value VALUE.   

References add_reg_note(), find_reg_note(), gcc_checking_assert, gen_int_mode(), ggc_alloc(), and NULL_RTX.

Referenced by adjust_stack_1(), emit_call_1(), expand_builtin_trap(), and fixup_args_size_notes().

◆ add_auto_inc_notes()

void add_auto_inc_notes ( rtx_insn * insn,
rtx x )
Process recursively X of INSN and add REG_INC notes if necessary.   

References add_auto_inc_notes(), add_reg_note(), auto_inc_p(), GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, XEXP, XVECEXP, and XVECLEN.

Referenced by add_auto_inc_notes(), peep2_attempt(), reload(), and update_inc_notes().

◆ add_int_reg_note()

void add_int_reg_note ( rtx_insn * insn,
enum reg_note kind,
int datum )
Add an integer register note with kind KIND and datum DATUM to INSN.   

References gcc_checking_assert, ggc_alloc(), int_reg_note_p(), and REG_NOTES.

Referenced by add_reg_br_prob_note(), and add_shallow_copy_of_reg_note().

◆ add_reg_note()

◆ add_shallow_copy_of_reg_note()

void add_shallow_copy_of_reg_note ( rtx_insn * insn,
rtx note )
Add a register note like NOTE to INSN.   

References add_int_reg_note(), add_reg_note(), GET_CODE, ggc_alloc(), REG_NOTE_KIND, XEXP, and XINT.

Referenced by distribute_notes().

◆ address_cost()

int address_cost ( rtx x,
machine_mode mode,
addr_space_t as,
bool speed )
Return cost of address expression X.
Expect that X is properly formed address reference.

SPEED parameter specify whether costs optimized for speed or size should
be returned.   

References memory_address_addr_space_p(), and targetm.

Referenced by computation_cost(), create_new_invariant(), force_expr_to_var_cost(), get_address_cost(), get_address_cost_ainc(), preferred_mem_scale_factor(), should_replace_address(), and try_replace_in_use().

◆ alloc_reg_note()

rtx alloc_reg_note ( enum reg_note kind,
rtx datum,
rtx list )
Allocate a register note with kind KIND and datum DATUM.  LIST is
stored as the pointer to the next register note.   

References alloc_EXPR_LIST(), alloc_INSN_LIST(), gcc_checking_assert, ggc_alloc(), int_reg_note_p(), and PUT_REG_NOTE_KIND.

Referenced by add_reg_note(), distribute_notes(), duplicate_reg_note(), eliminate_regs_1(), lra_eliminate_regs_1(), move_deaths(), recog_for_combine_1(), and try_combine().

◆ auto_inc_p()

bool auto_inc_p ( const_rtx x)
Return true if X is an autoincrement side effect and the register is
not the stack pointer.   

References GET_CODE, ggc_alloc(), stack_pointer_rtx, and XEXP.

Referenced by add_auto_inc_notes().

◆ baseness()

static int baseness ( rtx x,
machine_mode mode,
addr_space_t as,
enum rtx_code outer_code,
enum rtx_code index_code )
static
Evaluate the likelihood of X being a base or index value, returning
positive if it is likely to be a base, negative if it is likely to be
an index, and 0 if we can't tell.  Make the magnitude of the return
value reflect the amount of confidence we have in the answer.

MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1.   

References ggc_alloc(), HARD_REGISTER_P, MEM_P, MEM_POINTER, ok_for_base_p_1(), REG_P, REG_POINTER, and REGNO.

Referenced by decompose_normal_address().

◆ binary_scale_code_p()

static bool binary_scale_code_p ( enum rtx_code code)
static
Return true if CODE applies some kind of scale.  The scaled value is
is the first operand and the scale is the second.   

References ggc_alloc().

Referenced by get_index_term().

◆ cached_nonzero_bits()

static unsigned HOST_WIDE_INT cached_nonzero_bits ( const_rtx x,
scalar_int_mode mode,
const_rtx known_x,
machine_mode known_mode,
unsigned HOST_WIDE_INT known_ret )
static
The function cached_nonzero_bits is a wrapper around nonzero_bits1.
It avoids exponential behavior in nonzero_bits1 when X has
identical subexpressions on the first or the second level.   

References cached_nonzero_bits(), ggc_alloc(), nonzero_bits1(), nonzero_bits_binary_arith_p(), and XEXP.

Referenced by cached_nonzero_bits(), nonzero_bits(), and nonzero_bits1().

◆ cached_num_sign_bit_copies()

static unsigned int cached_num_sign_bit_copies ( const_rtx x,
scalar_int_mode mode,
const_rtx known_x,
machine_mode known_mode,
unsigned int known_ret )
static
The function cached_num_sign_bit_copies is a wrapper around
num_sign_bit_copies1.  It avoids exponential behavior in
num_sign_bit_copies1 when X has identical subexpressions on the
first or the second level.   

References cached_num_sign_bit_copies, ggc_alloc(), num_sign_bit_copies1(), num_sign_bit_copies_binary_arith_p(), and XEXP.

◆ canonicalize_condition()

rtx canonicalize_condition ( rtx_insn * insn,
rtx cond,
int reverse,
rtx_insn ** earliest,
rtx want_reg,
int allow_cc_mode,
int valid_at_insn_p )
Given an insn INSN and condition COND, return the condition in a
canonical form to simplify testing by callers.  Specifically:

(1) The code will always be a comparison operation (EQ, NE, GT, etc.).
(2) Both operands will be machine operands.
(3) If an operand is a constant, it will be the second operand.
(4) (LE x const) will be replaced with (LT x <const+1>) and similarly
    for GE, GEU, and LEU.

If the condition cannot be understood, or is an inequality floating-point
comparison which needs to be reversed, 0 will be returned.

If REVERSE is nonzero, then reverse the condition prior to canonizing it.

If EARLIEST is nonzero, it is a pointer to a place where the earliest
insn used in locating the condition was found.  If a replacement test
of the condition is desired, it should be placed in front of that
insn and we will be sure that the inputs are still valid.

If WANT_REG is nonzero, we wish the condition to be relative to that
register, if possible.  Therefore, do not canonicalize the condition
further.  If ALLOW_CC_MODE is nonzero, allow the condition returned
to be a compare to a CC mode register.

If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
and at INSN.   

References BLOCK_FOR_INSN(), COMPARISON_P, CONST0_RTX, CONST_INT_P, const_val, CONSTANT_P, FIND_REG_INC_NOTE, gen_int_mode(), GET_CODE, GET_MODE, GET_MODE_CLASS, GET_MODE_MASK, GET_MODE_PRECISION(), GET_RTX_CLASS, ggc_alloc(), HOST_BITS_PER_WIDE_INT, HOST_WIDE_INT_1U, INTVAL, modified_between_p(), modified_in_p(), NONJUMP_INSN_P, NULL_RTX, prev_nonnote_nondebug_insn(), REAL_VALUE_NEGATIVE, REAL_VALUE_TYPE, REG_P, reg_set_p(), reversed_comparison_code(), RTX_COMM_COMPARE, RTX_COMPARE, rtx_equal_p(), SCALAR_FLOAT_MODE_P, SET, SET_DEST, set_of(), SET_SRC, STORE_FLAG_VALUE, swap_condition(), val_signbit_known_set_p(), and XEXP.

Referenced by get_condition(), noce_get_alt_condition(), and noce_get_condition().

◆ commutative_operand_precedence()

int commutative_operand_precedence ( rtx op)
Return a value indicating whether OP, an operand of a commutative
operation, is preferred as the first or second operand.  The more
positive the value, the stronger the preference for being the first
operand.   

References avoid_constant_pool_reference(), GET_CODE, GET_RTX_CLASS, ggc_alloc(), MEM_P, MEM_POINTER, OBJECT_P, REG_P, REG_POINTER, RTX_BIN_ARITH, RTX_COMM_ARITH, RTX_CONST_OBJ, RTX_EXTRA, RTX_OBJ, RTX_UNARY, and SUBREG_REG.

Referenced by compare_address_parts(), simplify_plus_minus_op_data_cmp(), swap_commutative_operands_p(), and swap_commutative_operands_with_target().

◆ computed_jump_p()

bool computed_jump_p ( const rtx_insn * insn)
Return true if INSN is an indirect jump (aka computed jump).

Tablejumps and casesi insns are not considered indirect jumps;
we can recognize them by a (use (label_ref)).   

References computed_jump_p_1(), GET_CODE, ggc_alloc(), i, JUMP_LABEL, JUMP_P, NULL, set_of_data::pat, PATTERN(), pc_rtx, SET, SET_DEST, SET_SRC, XEXP, XVECEXP, and XVECLEN.

Referenced by bypass_conditional_jumps(), create_trace_edges(), default_invalid_within_doloop(), duplicate_computed_gotos(), fix_crossing_unconditional_branches(), make_edges(), patch_jump_insn(), reorder_basic_blocks_simple(), try_crossjump_bb(), and try_head_merge_bb().

◆ computed_jump_p_1()

static bool computed_jump_p_1 ( const_rtx x)
static
A subroutine of computed_jump_p, return true if X contains a REG or MEM or
constant that is not in the constant pool and not in the condition
of an IF_THEN_ELSE.   

References CASE_CONST_ANY, computed_jump_p_1(), CONSTANT_POOL_ADDRESS_P, GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, XEXP, XVECEXP, and XVECLEN.

Referenced by computed_jump_p(), and computed_jump_p_1().

◆ constant_pool_constant_p()

bool constant_pool_constant_p ( rtx x)
Check whether this is a constant pool constant.   

References avoid_constant_pool_reference(), and CONST_DOUBLE_P.

Referenced by get_inv_cost().

◆ contains_constant_pool_address_p()

bool contains_constant_pool_address_p ( const_rtx x)
Return true if RTL X contains a constant pool address.   

References CONSTANT_POOL_ADDRESS_P, FOR_EACH_SUBRTX, and SYMBOL_REF_P.

◆ contains_mem_rtx_p()

bool contains_mem_rtx_p ( rtx x)
Return true if X contains a MEM subrtx.   

References FOR_EACH_SUBRTX, and MEM_P.

Referenced by bb_valid_for_noce_process_p(), prune_expressions(), and try_fwprop_subst_pattern().

◆ contains_paradoxical_subreg_p()

bool contains_paradoxical_subreg_p ( rtx x)
Return true if X contains a paradoxical subreg.   

References FOR_EACH_SUBRTX_VAR, ggc_alloc(), paradoxical_subreg_p(), and SUBREG_P.

Referenced by forward_propagate_and_simplify(), try_fwprop_subst_pattern(), and try_replace_reg().

◆ contains_symbol_ref_p()

bool contains_symbol_ref_p ( const_rtx x)
Return true if RTL X contains a SYMBOL_REF.   

References FOR_EACH_SUBRTX, and SYMBOL_REF_P.

Referenced by lra_constraints(), scan_one_insn(), and track_expr_p().

◆ contains_symbolic_reference_p()

bool contains_symbolic_reference_p ( const_rtx x)
Return true if RTL X contains a SYMBOL_REF or LABEL_REF.   

References FOR_EACH_SUBRTX, GET_CODE, ggc_alloc(), and SYMBOL_REF_P.

Referenced by simplify_context::simplify_binary_operation_1().

◆ count_occurrences()

int count_occurrences ( const_rtx x,
const_rtx find,
int count_dest )
Return the number of places FIND appears within X.  If COUNT_DEST is
zero, we do not count occurrences inside the destination of a SET.   

References CASE_CONST_ANY, count, count_occurrences(), find(), GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, MEM_P, rtx_equal_p(), SET, SET_DEST, SET_SRC, XEXP, XVECEXP, and XVECLEN.

Referenced by count_occurrences(), delete_output_reload(), emit_input_reload_insns(), find_inc(), record_value_for_reg(), and reload_as_needed().

◆ covers_regno_no_parallel_p()

static bool covers_regno_no_parallel_p ( const_rtx dest,
unsigned int test_regno )
static
Return TRUE iff DEST is a register or subreg of a register, is a
complete rather than read-modify-write destination, and contains
register TEST_REGNO.   

References END_REGNO(), GET_CODE, ggc_alloc(), read_modify_subreg_p(), REG_P, REGNO, and SUBREG_REG.

Referenced by covers_regno_p(), and simple_regno_set().

◆ covers_regno_p()

static bool covers_regno_p ( const_rtx dest,
unsigned int test_regno )
static
Like covers_regno_no_parallel_p, but also handles PARALLELs where
any member matches the covers_regno_no_parallel_p criteria.   

References covers_regno_no_parallel_p(), GET_CODE, ggc_alloc(), i, NULL_RTX, XEXP, XVECEXP, and XVECLEN.

Referenced by dead_or_set_regno_p().

◆ dead_or_set_p()

bool dead_or_set_p ( const rtx_insn * insn,
const_rtx x )
Return true if X's old contents don't survive after INSN.
This will be true if X is a register and X dies in INSN or because
INSN entirely sets X.

"Entirely set" means set directly and not through a SUBREG, or
ZERO_EXTRACT, so no trace of the old contents remains.
Likewise, REG_INC does not count.

REG may be a hard or pseudo reg.  Renumbering is not taken into account,
but for this use that makes no difference, since regs don't overlap
during their lifetimes.  Therefore, this function may be used
at any time after deaths have been computed.

If REG is a hard reg that occupies multiple machine registers, this
function will only return true if each of those registers will be replaced
by INSN.   

References dead_or_set_regno_p(), END_REGNO(), gcc_assert, i, REG_P, and REGNO.

Referenced by decrease_live_ranges_number(), distribute_notes(), do_input_reload(), emit_input_reload_insns(), find_single_use(), set_nonzero_bits_and_sign_copies(), and try_combine().

◆ dead_or_set_regno_p()

bool dead_or_set_regno_p ( const rtx_insn * insn,
unsigned int test_regno )
Utility function for dead_or_set_p to check an individual register.  

References CALL_P, COND_EXEC_CODE, covers_regno_p(), find_regno_fusage(), find_regno_note(), GET_CODE, ggc_alloc(), i, PATTERN(), SET, SET_DEST, XVECEXP, and XVECLEN.

Referenced by dead_or_set_p(), distribute_notes(), and move_deaths().

◆ decompose_address()

void decompose_address ( struct address_info * info,
rtx * loc,
machine_mode mode,
addr_space_t as,
enum rtx_code outer_code )
Describe address *LOC in *INFO.  MODE is the mode of the addressed value,
or VOIDmode if not known.  AS is the address space associated with LOC.
OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise.   

References decompose_automod_address(), decompose_incdec_address(), decompose_normal_address(), GET_CODE, ggc_alloc(), and strip_address_mutations().

Referenced by decompose_lea_address(), decompose_mem_address(), and update_address().

◆ decompose_automod_address()

static void decompose_automod_address ( struct address_info * info)
static
INFO->INNER describes a {PRE,POST}_MODIFY address.  Set up the rest
of INFO accordingly.   

References CONSTANT_P, gcc_assert, gcc_checking_assert, GET_CODE, ggc_alloc(), rtx_equal_p(), set_address_base(), set_address_disp(), set_address_index(), strip_address_mutations(), and XEXP.

Referenced by decompose_address().

◆ decompose_incdec_address()

static void decompose_incdec_address ( struct address_info * info)
static
INFO->INNER describes a {PRE,POST}_{INC,DEC} address.  Set up the
rest of INFO accordingly.   

References gcc_checking_assert, ggc_alloc(), set_address_base(), and XEXP.

Referenced by decompose_address().

◆ decompose_lea_address()

void decompose_lea_address ( struct address_info * info,
rtx * loc )
Describe address operand LOC in INFO.   

References ADDR_SPACE_GENERIC, decompose_address(), and ggc_alloc().

Referenced by process_address_1(), and satisfies_address_constraint_p().

◆ decompose_mem_address()

void decompose_mem_address ( struct address_info * info,
rtx x )
Describe the address of MEM X in INFO.   

References decompose_address(), gcc_assert, GET_MODE, ggc_alloc(), MEM_ADDR_SPACE, MEM_P, and XEXP.

Referenced by process_address_1(), and satisfies_memory_constraint_p().

◆ decompose_normal_address()

static void decompose_normal_address ( struct address_info * info)
static
INFO->INNER describes a normal, non-automodified address.
Fill in the rest of INFO accordingly.   

References baseness(), CONSTANT_P, extract_plus_operands(), gcc_assert, get_base_term(), GET_CODE, get_index_term(), ggc_alloc(), set_address_base(), set_address_disp(), set_address_index(), set_address_segment(), and strip_address_mutations().

Referenced by decompose_address().

◆ default_address_cost()

int default_address_cost ( rtx x,
machine_mode ,
addr_space_t ,
bool speed )
If the target doesn't override, compute the cost as with arithmetic.   

References ggc_alloc(), and rtx_cost().

◆ duplicate_reg_note()

rtx duplicate_reg_note ( rtx note)
Duplicate NOTE and return the copy.   

References alloc_reg_note(), copy_insn_1(), GET_CODE, ggc_alloc(), NULL_RTX, REG_NOTE_KIND, XEXP, and XINT.

Referenced by emit_copy_of_insn_after().

◆ extract_plus_operands()

static rtx ** extract_plus_operands ( rtx * loc,
rtx ** ptr,
rtx ** end )
static
Treat *LOC as a tree of PLUS operands and store pointers to the summed
values in [PTR, END).  Return a pointer to the end of the used array.   

References end(), extract_plus_operands(), gcc_assert, GET_CODE, ggc_alloc(), and XEXP.

Referenced by decompose_normal_address(), and extract_plus_operands().

◆ find_all_hard_reg_sets()

void find_all_hard_reg_sets ( const rtx_insn * insn,
HARD_REG_SET * pset,
bool implicit )
Examine INSN, and compute the set of hard registers written by it.
Store it in *PSET.  Should only be called after reload.

IMPLICIT is true if we should include registers that are fully-clobbered
by calls.  This should be used with caution, since it doesn't include
partially-clobbered registers.   

References CALL_P, CLEAR_HARD_REG_SET, function_abi::full_reg_clobbers(), ggc_alloc(), insn_callee_abi(), note_stores(), NULL, record_hard_reg_sets(), REG_NOTE_KIND, REG_NOTES, and XEXP.

Referenced by collect_fn_hard_reg_usage().

◆ find_all_hard_regs()

void find_all_hard_regs ( const_rtx x,
HARD_REG_SET * pset )
Add all hard register in X to *PSET.   

References add_to_hard_reg_set(), FOR_EACH_SUBRTX, GET_MODE, ggc_alloc(), REG_P, and REGNO.

Referenced by record_hard_reg_uses().

◆ find_constant_src()

rtx find_constant_src ( const rtx_insn * insn)
Check whether INSN is a single_set whose source is known to be
equivalent to a constant.  Return that constant if so, otherwise
return null.   

References avoid_constant_pool_reference(), CONSTANT_P, find_reg_equal_equiv_note(), ggc_alloc(), NULL_RTX, SET_SRC, single_set(), and XEXP.

◆ find_first_parameter_load()

rtx_insn * find_first_parameter_load ( rtx_insn * call_insn,
rtx_insn * boundary )
Look backward for first parameter to be loaded.
Note that loads of all parameters will not necessarily be
found if CSE has eliminated some of them (e.g., an argument
to the outer function is passed down as a parameter).
Do not skip BOUNDARY.   

References CALL_INSN_FUNCTION_USAGE, CALL_P, CLEAR_HARD_REG_SET, gcc_assert, GET_CODE, ggc_alloc(), INSN_P, LABEL_P, note_stores(), parms_set(), PREV_INSN(), REG_P, REGNO, SET_HARD_REG_BIT, STATIC_CHAIN_REG_P, and XEXP.

Referenced by insert_insn_end_basic_block(), and sjlj_mark_call_sites().

◆ find_reg_equal_equiv_note()

◆ find_reg_fusage()

bool find_reg_fusage ( const_rtx insn,
enum rtx_code code,
const_rtx datum )
Return true if DATUM, or any overlap of DATUM, of kind CODE is found
in the CALL_INSN_FUNCTION_USAGE information of INSN.   

References CALL_INSN_FUNCTION_USAGE, CALL_P, END_REGNO(), find_regno_fusage(), gcc_assert, GET_CODE, ggc_alloc(), i, REG_P, REGNO, rtx_equal_p(), and XEXP.

Referenced by can_combine_p(), decrease_live_ranges_number(), distribute_links(), distribute_notes(), no_conflict_move_test(), push_reload(), reg_set_p(), and reg_used_between_p().

◆ find_reg_note()

rtx find_reg_note ( const_rtx insn,
enum reg_note kind,
const_rtx datum )
Return the reg-note of kind KIND in insn INSN, if there is one.
If DATUM is nonzero, look for one whose datum is DATUM.   

References gcc_checking_assert, ggc_alloc(), INSN_P, REG_NOTE_KIND, REG_NOTES, and XEXP.

Referenced by add_args_size_note(), add_insn_allocno_copies(), add_reg_br_prob_note(), add_store_equivs(), adjust_insn(), any_uncondjump_p(), attempt_change(), find_comparison_dom_walker::before_dom_children(), can_combine_p(), can_nonlocal_goto(), canonicalize_insn(), check_for_inc_dec(), check_for_inc_dec_1(), check_for_label_ref(), combine_and_move_insns(), combine_predictions_for_insn(), combine_stack_adjustments_for_block(), compute_outgoing_frequencies(), cond_exec_process_if_block(), control_flow_insn_p(), copy_reg_eh_region_note_backward(), copy_reg_eh_region_note_forward(), copyprop_hardreg_forward_1(), create_trace_edges(), cse_insn(), cselib_process_insn(), curr_insn_transform(), dead_or_predicable(), decrease_live_ranges_number(), deletable_insn_p(), delete_insn(), delete_unmarked_insns(), distribute_notes(), do_local_cprop(), do_output_reload(), dw2_fix_up_crossing_landing_pad(), emit_cmp_and_jump_insn_1(), emit_input_reload_insns(), emit_libcall_block_1(), expand_addsub_overflow(), expand_gimple_stmt(), expand_loc(), expand_mul_overflow(), expand_neg_overflow(), find_dummy_reload(), find_equiv_reg(), find_moveable_store(), find_reloads(), fixup_args_size_notes(), fixup_eh_region_note(), fixup_reorder_chain(), fixup_tail_calls(), force_move_args_size_note(), force_nonfallthru_and_redirect(), forward_propagate_and_simplify(), forward_propagate_into(), fp_setter_insn(), get_call_fndecl(), get_eh_region_and_lp_from_rtx(), hash_scan_set(), indirect_jump_optimize(), inherit_in_ebb(), init_alias_analysis(), init_eliminable_invariants(), init_elimination(), insn_stack_adjust_offset_pre_post(), ira_update_equiv_info_by_shuffle_insn(), label_is_jump_target_p(), lra_process_new_insns(), make_edges(), make_reg_eh_region_note_nothrow_nononlocal(), mark_jump_label_1(), mark_referenced_resources(), mark_set_resources(), match_reload(), maybe_merge_cfa_adjust(), maybe_move_args_size_note(), maybe_propagate_label_ref(), merge_if_block(), mostly_true_jump(), move_invariant_reg(), need_cmov_or_rewire(), need_fake_edge_p(), no_equiv(), noce_process_if_block(), notice_args_size(), old_insns_match_p(), outgoing_edges_match(), patch_jump_insn(), peep2_attempt(), process_alt_operands(), process_bb_lives(), process_bb_node_lives(), purge_dead_edges(), record_reg_classes(), record_set_data(), record_store(), redirect_jump_2(), redundant_insn(), reg_scan_mark_refs(), regstat_bb_compute_ri(), reload(), reload_as_needed(), resolve_simple_move(), rest_of_clean_state(), rtl_verify_edges(), save_call_clobbered_regs(), scan_insn(), scan_one_insn(), scan_trace(), set_unique_reg_note(), setup_reg_equiv(), setup_save_areas(), single_set_2(), sjlj_fix_up_crossing_landing_pad(), split_all_insns(), subst_reloads(), try_back_substitute_reg(), try_combine(), try_eliminate_compare(), try_fwprop_subst_pattern(), try_head_merge_bb(), try_split(), rtx_properties::try_to_add_insn(), update_br_prob_note(), update_equiv_regs(), and validate_equiv_mem().

◆ find_regno_fusage()

bool find_regno_fusage ( const_rtx insn,
enum rtx_code code,
unsigned int regno )
Return true if REGNO, or any overlap of REGNO, of kind CODE is found
in the CALL_INSN_FUNCTION_USAGE information of INSN.   

References CALL_INSN_FUNCTION_USAGE, CALL_P, END_REGNO(), GET_CODE, ggc_alloc(), REG_P, REGNO, and XEXP.

Referenced by dead_or_set_regno_p(), distribute_notes(), and find_reg_fusage().

◆ find_regno_note()

◆ for_each_inc_dec()

int for_each_inc_dec ( rtx x,
for_each_inc_dec_fn fn,
void * data )
Traverse *LOC looking for MEMs that have autoinc addresses.
For each such autoinc operation found, call FN, passing it
the innermost enclosing MEM, the operation itself, the RTX modified
by the operation, two RTXs (the second may be NULL) that, once
added, represent the value to be held by the modified RTX
afterwards, and DATA.  FN is to return 0 to continue the
traversal or any other value to have it returned to the caller of
for_each_inc_dec.   

References for_each_inc_dec_find_inc_dec(), FOR_EACH_SUBRTX_VAR, GET_CODE, GET_RTX_CLASS, ggc_alloc(), MEM_P, RTX_AUTOINC, and XEXP.

Referenced by check_for_inc_dec(), check_for_inc_dec_1(), cselib_record_sets(), stack_adjust_offset_pre_post(), and try_combine().

◆ for_each_inc_dec_find_inc_dec()

static int for_each_inc_dec_find_inc_dec ( rtx mem,
for_each_inc_dec_fn fn,
void * data )
static
MEM has a PRE/POST-INC/DEC/MODIFY address X.  Extract the operands of
the equivalent add insn and pass the result to FN, using DATA as the
final argument.   

References gcc_unreachable, gen_int_mode(), GET_CODE, GET_MODE, GET_MODE_SIZE(), ggc_alloc(), NULL, and XEXP.

Referenced by for_each_inc_dec().

◆ get_address_mode()

◆ get_args_size()

poly_int64 get_args_size ( const_rtx x)

◆ get_base_term()

static rtx * get_base_term ( rtx * inner)
static
If *INNER can be interpreted as a base, return a pointer to the inner term
(see address_info).  Return null otherwise.   

References GET_CODE, ggc_alloc(), MEM_P, REG_P, strip_address_mutations(), and XEXP.

Referenced by decompose_normal_address().

◆ get_call_fndecl()

tree get_call_fndecl ( const rtx_insn * insn)
Get the declaration of the function called by INSN.   

References find_reg_note(), ggc_alloc(), NULL_RTX, NULL_TREE, SYMBOL_REF_DECL, and XEXP.

Referenced by insn_callee_abi(), and self_recursive_call_p().

◆ get_call_rtx_from()

rtx get_call_rtx_from ( const rtx_insn * insn)
Return the CALL in X if there is one.   

References GET_CODE, ggc_alloc(), MEM_P, NULL_RTX, PATTERN(), SET, SET_SRC, XEXP, and XVECEXP.

Referenced by emit_call_1(), old_insns_match_p(), prepare_call_arguments(), and scan_insn().

◆ get_condition()

rtx get_condition ( rtx_insn * jump,
rtx_insn ** earliest,
int allow_cc_mode,
int valid_at_insn_p )
Given a jump insn JUMP, return the condition that will cause it to branch
to its JUMP_LABEL.  If the condition cannot be understood, or is an
inequality floating-point comparison which needs to be reversed, 0 will
be returned.

If EARLIEST is nonzero, it is a pointer to a place where the earliest
insn used in locating the condition was found.  If a replacement test
of the condition is desired, it should be placed in front of that
insn and we will be sure that the inputs are still valid.  If EARLIEST
is null, the returned condition will be valid at INSN.

If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
compare CC mode register.

VALID_AT_INSN_P is the same as for canonicalize_condition.   

References any_condjump_p(), canonicalize_condition(), GET_CODE, ggc_alloc(), JUMP_LABEL, JUMP_P, label_ref_label(), NULL_RTX, pc_set(), SET_SRC, and XEXP.

Referenced by bb_estimate_probability_locally(), check_simple_exit(), fis_get_condition(), simplify_using_initial_values(), and try_head_merge_bb().

◆ get_full_rtx_cost()

void get_full_rtx_cost ( rtx x,
machine_mode mode,
enum rtx_code outer,
int opno,
struct full_rtx_costs * c )
Fill in the structure C with information about both speed and size rtx
costs for X, which is operand OPNO in an expression with code OUTER.   

References rtx_cost(), full_rtx_costs::size, and full_rtx_costs::speed.

Referenced by get_full_set_rtx_cost(), and get_full_set_src_cost().

◆ get_index_code()

enum rtx_code get_index_code ( const struct address_info * info)
Return the "index code" of INFO, in the form required by
ok_for_base_p_1.   

References GET_CODE, and ggc_alloc().

Referenced by base_plus_disp_to_reg(), base_to_reg(), and process_address_1().

◆ get_index_scale()

HOST_WIDE_INT get_index_scale ( const struct address_info * info)
Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is
more complicated than that.   

References CONST_INT_P, GET_CODE, ggc_alloc(), HOST_WIDE_INT_1, INTVAL, and XEXP.

Referenced by equiv_address_substitution(), index_part_to_reg(), and process_address_1().

◆ get_index_term()

static rtx * get_index_term ( rtx * inner)
static
If *INNER can be interpreted as an index, return a pointer to the inner term
(see address_info).  Return null otherwise.   

References binary_scale_code_p(), CONSTANT_P, GET_CODE, ggc_alloc(), MEM_P, REG_P, strip_address_mutations(), and XEXP.

Referenced by decompose_normal_address().

◆ get_initial_register_offset()

static poly_int64 get_initial_register_offset ( int from,
int to )
static
Compute an approximation for the offset between the register
FROM and TO for the current function, as it was at the start
of the routine.   

References crtl, epilogue_completed, get_frame_size(), get_initial_register_offset(), ggc_alloc(), HARD_FRAME_POINTER_REGNUM, i, and table.

Referenced by get_initial_register_offset(), and rtx_addr_can_trap_p_1().

◆ get_integer_term()

HOST_WIDE_INT get_integer_term ( const_rtx x)
Return the value of the integer term in X, if one is apparent;
otherwise return 0.
Only obvious integer terms are detected.
This is used in cse.cc with the `related_value' field.   

References CONST_INT_P, GET_CODE, ggc_alloc(), INTVAL, and XEXP.

Referenced by use_related_value().

◆ get_related_value()

rtx get_related_value ( const_rtx x)
If X is a constant, return the value sans apparent integer term;
otherwise return 0.
Only obvious integer terms are detected.   

References CONST_INT_P, GET_CODE, ggc_alloc(), and XEXP.

Referenced by insert_with_costs(), and use_related_value().

◆ in_insn_list_p()

bool in_insn_list_p ( const rtx_insn_list * listp,
const rtx_insn * node )
Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
return 1 if it is found.  A simple equality test is used to determine if
NODE matches.   

References ggc_alloc(), and XEXP.

Referenced by remove_node_from_insn_list().

◆ init_num_sign_bit_copies_in_rep()

static void init_num_sign_bit_copies_in_rep ( void )
static
Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
TARGET_MODE_REP_EXTENDED.

Note that we assume that the property of
TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
narrower than mode B.  I.e., if A is a mode narrower than B then in
order to be able to operate on it in mode B, mode A needs to
satisfy the requirements set by the representation of mode B.   

References FOR_EACH_MODE, FOR_EACH_MODE_IN_CLASS, FOR_EACH_MODE_UNTIL, gcc_assert, GET_MODE_PRECISION(), GET_MODE_WIDER_MODE(), ggc_alloc(), i, num_sign_bit_copies_in_rep, require(), and targetm.

Referenced by init_rtlanal().

◆ init_rtlanal()

◆ insn_cost()

int insn_cost ( rtx_insn * insn,
bool speed )
Calculate the cost of a single instruction.  A return value of zero
indicates an instruction pattern without a known cost.   

References PATTERN(), pattern_cost(), and targetm.

Referenced by bb_ok_for_noce_convert_multiple_sets(), canonicalize_comparison(), cheap_bb_rtx_cost_p(), combine_instructions(), combine_validate_cost(), find_shift_sequence(), output_asm_name(), rtl_account_profile_record(), and seq_cost().

◆ int_reg_note_p()

static bool int_reg_note_p ( enum reg_note kind)
static
Return true if KIND is an integer REG_NOTE.   

References ggc_alloc().

Referenced by add_int_reg_note(), and alloc_reg_note().

◆ keep_with_call_p()

bool keep_with_call_p ( const rtx_insn * insn)
Return true if we should avoid inserting code between INSN and preceding
call instruction.   

References fixed_regs, general_operand(), ggc_alloc(), i2, INSN_P, keep_with_call_p(), next_nonnote_insn(), NULL, REG_P, REGNO, SET_DEST, SET_SRC, single_set(), stack_pointer_rtx, and targetm.

Referenced by keep_with_call_p(), rtl_block_ends_with_call_p(), and rtl_flow_call_edges_add().

◆ label_is_jump_target_p()

bool label_is_jump_target_p ( const_rtx label,
const rtx_insn * jump_insn )
Return true if LABEL is a target of JUMP_INSN.  This applies only
to non-complex jumps.  That is, direct unconditional, conditional,
and tablejumps, but not computed jumps or returns.  It also does
not apply to the fallthru case of a conditional jump.   

References find_reg_note(), GET_NUM_ELEM, ggc_alloc(), i, JUMP_LABEL, NULL, RTVEC_ELT, table, tablejump_p(), and XEXP.

Referenced by cfg_layout_redirect_edge_and_branch(), check_for_label_ref(), find_reloads(), subst_reloads(), and try_optimize_cfg().

◆ loc_mentioned_in_p()

bool loc_mentioned_in_p ( rtx * loc,
const_rtx in )
Return true if IN contains a piece of rtl that has the address LOC.   

References GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, loc_mentioned_in_p(), XEXP, XVECEXP, and XVECLEN.

Referenced by df_remove_dead_eq_notes(), loc_mentioned_in_p(), and remove_address_replacements().

◆ low_bitmask_len()

int low_bitmask_len ( machine_mode mode,
unsigned HOST_WIDE_INT m )
If M is a bitmask that selects a field of low-order bits within an item but
not the entire word, return the length of the field.  Return -1 otherwise.
M is used in machine mode MODE.   

References exact_log2(), GET_MODE_MASK, ggc_alloc(), and HWI_COMPUTABLE_MODE_P().

Referenced by try_widen_shift_mode().

◆ lowpart_subreg_regno()

int lowpart_subreg_regno ( unsigned int regno,
machine_mode xmode,
machine_mode ymode )
A wrapper around simplify_subreg_regno that uses subreg_lowpart_offset
(xmode, ymode) as the offset.   

References ggc_alloc(), offset, simplify_subreg_regno(), and subreg_lowpart_offset().

Referenced by gen_memset_value_from_prev().

◆ lsb_bitfield_op_p()

static bool lsb_bitfield_op_p ( rtx x)
static
Return true if X is a sign_extract or zero_extract from the least
significant bit.   

References GET_CODE, GET_MODE, GET_MODE_PRECISION(), GET_RTX_CLASS, ggc_alloc(), INTVAL, known_eq, RTX_BITFIELD_OPS, and XEXP.

Referenced by strip_address_mutations().

◆ may_trap_or_fault_p()

bool may_trap_or_fault_p ( const_rtx x)
Same as above, but additionally return true if evaluating rtx X might
cause a fault.  We define a fault for the purpose of this function as a
erroneous execution condition that cannot be encountered during the normal
execution of a valid program; the typical example is an unaligned memory
access on a strict alignment machine.  The compiler guarantees that it
doesn't generate code that will fault from a valid program, but this
guarantee doesn't mean anything for individual instructions.  Consider
the following example:

   struct S { int d; union { char *cp; int *ip; }; };

   int foo(struct S *s)
   {
     if (s->d == 1)
       return *s->ip;
     else
       return *s->cp;
   }

on a strict alignment machine.  In a valid program, foo will never be
invoked on a structure for which d is equal to 1 and the underlying
unique field of the union not aligned on a 4-byte boundary, but the
expression *s->ip might cause a fault if considered individually.

At the RTL level, potentially problematic expressions will almost always
verify may_trap_p; for example, the above dereference can be emitted as
(mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
However, suppose that foo is inlined in a caller that causes s->cp to
point to a local character variable and guarantees that s->d is not set
to 1; foo may have been effectively translated into pseudo-RTL as:

   if ((reg:SI) == 1)
     (set (reg:SI) (mem:SI (%fp - 7)))
   else
     (set (reg:QI) (mem:QI (%fp - 7)))

Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
memory reference to a stack slot, but it will certainly cause a fault
on a strict alignment machine.   

References may_trap_p_1().

Referenced by can_move_insns_across(), fill_simple_delay_slots(), fill_slots_from_thread(), find_invariant_insn(), noce_try_cmove_arith(), noce_try_sign_mask(), steal_delay_list_from_fallthrough(), steal_delay_list_from_target(), and update_equiv_regs().

◆ may_trap_p()

◆ may_trap_p_1()

bool may_trap_p_1 ( const_rtx x,
unsigned flags )
Return true if evaluating rtx X might cause a trap.
FLAGS controls how to consider MEMs.  A true means the context
of the access may have changed from the original, such that the
address may have become invalid.   

References CASE_CONST_ANY, const0_rtx, CONST_VECTOR_DUPLICATE_P, CONST_VECTOR_ELT, CONST_VECTOR_ENCODED_ELT, CONSTANT_P, FLOAT_MODE_P, GET_CODE, GET_MODE, GET_MODE_NUNITS(), GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), HONOR_NANS(), HONOR_SNANS(), i, may_trap_p_1(), MEM_NOTRAP_P, MEM_SIZE, MEM_SIZE_KNOWN_P, MEM_VOLATILE_P, rtx_addr_can_trap_p_1(), stack_pointer_rtx, targetm, XEXP, XVECEXP, and XVECLEN.

Referenced by default_unspec_may_trap_p(), may_trap_or_fault_p(), may_trap_p(), and may_trap_p_1().

◆ modified_between_p()

bool modified_between_p ( const_rtx x,
const rtx_insn * start,
const rtx_insn * end )
Similar to reg_set_between_p, but check all registers in X.  Return false
only if none of them are modified between START and END.  Return true if
X contains a MEM; this routine does use memory aliasing.   

References CASE_CONST_ANY, end(), GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, MEM_READONLY_P, memory_modified_in_insn_p(), modified_between_p(), NEXT_INSN(), reg_set_between_p(), XEXP, XVECEXP, and XVECLEN.

Referenced by can_combine_p(), canonicalize_condition(), check_cond_move_block(), cprop_jump(), cse_condition_code_reg(), find_call_crossed_cheap_reg(), find_moveable_pseudos(), modified_between_p(), no_conflict_move_test(), noce_process_if_block(), and try_combine().

◆ modified_in_p()

◆ multiple_sets()

bool multiple_sets ( const_rtx insn)

◆ no_labels_between_p()

bool no_labels_between_p ( const rtx_insn * beg,
const rtx_insn * end )
Return true if in between BEG and END, exclusive of BEG and END, there is
no CODE_LABEL insn.   

References end(), LABEL_P, and NEXT_INSN().

Referenced by fill_simple_delay_slots(), and relax_delay_slots().

◆ nonzero_address_p()

◆ nonzero_bits()

◆ nonzero_bits1()

◆ nonzero_bits_binary_arith_p()

static bool nonzero_bits_binary_arith_p ( const_rtx x)
inlinestatic
Return true if nonzero_bits1 might recurse into both operands
of X.   

References ARITHMETIC_P, GET_CODE, and ggc_alloc().

Referenced by cached_nonzero_bits().

◆ noop_move_p()

bool noop_move_p ( const rtx_insn * insn)
Return true if an insn consists only of SETs, each of which only sets a
value to itself.   

References COND_EXEC_CODE, GET_CODE, ggc_alloc(), i, INSN_CODE, NOOP_MOVE_INSN_CODE, set_of_data::pat, PATTERN(), SET, set_noop_p(), XVECEXP, and XVECLEN.

Referenced by copyprop_hardreg_forward_1(), delete_noop_moves(), delete_unmarked_insns(), distribute_notes(), and find_rename_reg().

◆ note_pattern_stores()

void note_pattern_stores ( const_rtx x,
void(*)(rtx, const_rtx, void *) fun,
void * data )
Call FUN on each register or MEM that is stored into or clobbered by X.
 (X would be the pattern of an insn).  DATA is an arbitrary pointer,
 ignored by note_stores, but passed to FUN.

 FUN receives three arguments:
 1. the REG, MEM or PC being stored in or clobbered,
 2. the SET or CLOBBER rtx that does the store,
 3. the pointer DATA provided to note_stores.

If the item being stored in or clobbered is a SUBREG of a hard register,
the SUBREG will be passed.   

References COND_EXEC_CODE, GET_CODE, ggc_alloc(), i, note_pattern_stores(), REG_P, REGNO, SET, SET_DEST, SUBREG_REG, XEXP, XVECEXP, and XVECLEN.

Referenced by cselib_record_sets(), note_pattern_stores(), note_stores(), reload(), set_of(), single_set_gcse(), try_combine(), and update_equiv_regs().

◆ note_stores()

◆ note_uses()

void note_uses ( rtx * pbody,
void(*)(rtx *, void *) fun,
void * data )
Like notes_stores, but call FUN for each expression that is being
referenced in PBODY, a pointer to the PATTERN of an insn.  We only call
FUN for each expression, not any interior subexpressions.  FUN receives a
pointer to the expression and the DATA passed to this function.

Note that this is not quite the same test as that done in reg_referenced_p
since that considers something as being referenced if it is being
partially set, while we do not.   

References ASM_OPERANDS_INPUT, ASM_OPERANDS_INPUT_LENGTH, COND_EXEC_CODE, COND_EXEC_TEST, GET_CODE, ggc_alloc(), i, MEM_P, note_uses(), PATTERN(), SET, SET_DEST, SET_SRC, TRAP_CONDITION, XEXP, XVECEXP, and XVECLEN.

Referenced by add_with_sets(), adjust_insn(), bypass_block(), combine_instructions(), copyprop_hardreg_forward_1(), cprop_insn(), find_call_stack_args(), insert_one_insn(), local_cprop_pass(), note_uses(), scan_insn(), try_shrink_wrapping(), and validate_replace_src_group().

◆ num_sign_bit_copies()

◆ num_sign_bit_copies1()

static unsigned int num_sign_bit_copies1 ( const_rtx x,
scalar_int_mode mode,
const_rtx known_x,
machine_mode known_mode,
unsigned int known_ret )
static

◆ num_sign_bit_copies_binary_arith_p()

static bool num_sign_bit_copies_binary_arith_p ( const_rtx x)
inlinestatic
See the macro definition above.   
Return true if num_sign_bit_copies1 might recurse into both operands
of X.   

References ARITHMETIC_P, GET_CODE, and ggc_alloc().

Referenced by cached_num_sign_bit_copies().

◆ offset_within_block_p()

bool offset_within_block_p ( const_rtx symbol,
HOST_WIDE_INT offset )
Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
to somewhere in the same object or object_block as SYMBOL.   

References CONSTANT_POOL_ADDRESS_P, GET_CODE, GET_MODE_SIZE(), get_pool_mode(), ggc_alloc(), int_size_in_bytes(), offset, SYMBOL_REF_BLOCK, SYMBOL_REF_BLOCK_OFFSET, SYMBOL_REF_DECL, SYMBOL_REF_HAS_BLOCK_INFO_P, and TREE_TYPE.

◆ parms_set()

static void parms_set ( rtx x,
const_rtx pat,
void * data )
static
Helper function for noticing stores to parameter registers.   

References CLEAR_HARD_REG_BIT, ggc_alloc(), parms_set_data::nregs, REG_P, REGNO, parms_set_data::regs, and TEST_HARD_REG_BIT.

Referenced by find_first_parameter_load().

◆ pattern_cost()

int pattern_cost ( rtx pat,
bool speed )
Calculate the rtx_cost of a single instruction pattern.  A return value of
zero indicates an instruction pattern without a known cost.   

References COSTS_N_INSNS, GET_CODE, GET_MODE, ggc_alloc(), i, NULL_RTX, SET, SET_DEST, SET_SRC, set_src_cost(), XVECEXP, and XVECLEN.

Referenced by bb_valid_for_noce_process_p(), and insn_cost().

◆ read_modify_subreg_p()

◆ record_hard_reg_sets()

void record_hard_reg_sets ( rtx x,
const_rtx pat,
void * data )
This function, called through note_stores, collects sets and
clobbers of hard registers in a HARD_REG_SET, which is pointed to
by DATA.   

References add_to_hard_reg_set(), GET_MODE, HARD_REGISTER_P, REG_P, and REGNO.

Referenced by assign_parm_setup_reg(), find_all_hard_reg_sets(), and try_shrink_wrapping().

◆ record_hard_reg_uses()

void record_hard_reg_uses ( rtx * px,
void * data )
Like record_hard_reg_sets, but called through note_uses.   

References find_all_hard_regs(), and ggc_alloc().

Referenced by try_shrink_wrapping().

◆ refers_to_regno_p()

bool refers_to_regno_p ( unsigned int regno,
unsigned int endregno,
const_rtx x,
rtx * loc )
Return true if register in range [REGNO, ENDREGNO)
appears either explicitly or implicitly in X
other than being stored into.

References contained within the substructure at LOC do not count.
LOC may be zero, meaning don't ignore anything.   

References END_REGNO(), GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, refers_to_regno_p(), REG_P, REGNO, RTX_CODE, SET, SET_DEST, SET_SRC, subreg_nregs(), SUBREG_REG, subreg_regno(), VIRTUAL_REGISTER_NUM_P, XEXP, XVECEXP, and XVECLEN.

Referenced by delete_output_reload(), df_get_call_refs(), distribute_notes(), move_insn_for_shrink_wrap(), refers_to_regno_p(), refers_to_regno_p(), reg_overlap_mentioned_p(), remove_invalid_refs(), and remove_invalid_subreg_refs().

◆ reg_mentioned_p()

◆ reg_overlap_mentioned_p()

bool reg_overlap_mentioned_p ( const_rtx x,
const_rtx in )
Rreturn true if modifying X will affect IN.  If X is a register or a SUBREG,
we check if any register number in X conflicts with the relevant register
numbers.  If X is a constant, return false.  If X is a MEM, return true iff
IN contains a MEM (we don't bother checking for memory addresses that can't
conflict because we expect this to be a rare case.   

References CONSTANT_P, END_REGNO(), gcc_assert, GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, MEM_P, refers_to_regno_p(), reg_mentioned_p(), reg_overlap_mentioned_p(), REGNO, subreg_nregs(), SUBREG_REG, subreg_regno(), XEXP, XVECEXP, and XVECLEN.

Referenced by add_equal_note(), add_removable_extension(), insn_propagation::apply_to_lvalue_1(), insn_propagation::apply_to_rvalue_1(), bb_valid_for_noce_process_p(), can_combine_p(), check_cond_move_block(), check_for_cc_cmp_clobbers(), check_invalid_inc_dec(), combinable_i3pat(), combine_reaching_defs(), conflicts_with_override(), convert_mode_scalar(), copyprop_hardreg_forward_1(), decrease_live_ranges_number(), delete_prior_computation(), distribute_links(), distribute_notes(), emit_move_complex_parts(), expand_absneg_bit(), expand_asm_stmt(), expand_atomic_compare_and_swap(), expand_binop(), expand_copysign_bit(), expand_expr_real_2(), expand_unop(), fill_slots_from_thread(), find_inc(), gen_reload(), init_alias_analysis(), match_asm_constraints_1(), need_cmov_or_rewire(), no_conflict_move_test(), noce_convert_multiple_sets_1(), noce_get_alt_condition(), noce_process_if_block(), noce_try_store_flag_constants(), process_bb_node_lives(), record_value_for_reg(), reg_overlap_mentioned_p(), reg_referenced_p(), reg_used_between_p(), reg_used_on_edge(), resolve_simple_move(), set_of_1(), subst(), try_combine(), validate_equiv_mem(), and validate_equiv_mem_from_store().

◆ reg_referenced_p()

◆ reg_set_between_p()

bool reg_set_between_p ( const_rtx reg,
const rtx_insn * from_insn,
const rtx_insn * to_insn )
Return true if register REG is set or clobbered in an insn between
FROM_INSN and TO_INSN (exclusive of those two).   

References ggc_alloc(), INSN_P, NEXT_INSN(), and reg_set_p().

Referenced by bb_valid_for_noce_process_p(), can_combine_p(), combine_reaching_defs(), distribute_notes(), eliminate_partially_redundant_load(), get_bb_avail_insn(), modified_between_p(), and try_combine().

◆ reg_set_p()

◆ reg_used_between_p()

bool reg_used_between_p ( const_rtx reg,
const rtx_insn * from_insn,
const rtx_insn * to_insn )
Return true if register REG is used in an insn between
FROM_INSN and TO_INSN (exclusive of those two).   

References CALL_P, find_reg_fusage(), ggc_alloc(), NEXT_INSN(), NONDEBUG_INSN_P, PATTERN(), and reg_overlap_mentioned_p().

Referenced by can_combine_p(), combine_reaching_defs(), eliminate_partially_redundant_load(), no_conflict_move_test(), and try_combine().

◆ register_asm_p()

bool register_asm_p ( const_rtx x)

◆ regno_use_in()

rtx regno_use_in ( unsigned int regno,
rtx x )
Searches X for any reference to REGNO, returning the rtx of the
reference found if any.  Otherwise, returns NULL_RTX.   

References GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, NULL_RTX, REG_P, REGNO, regno_use_in(), XEXP, XVECEXP, and XVECLEN.

Referenced by regno_use_in().

◆ remove_node_from_insn_list()

void remove_node_from_insn_list ( const rtx_insn * node,
rtx_insn_list ** listp )
Search LISTP (an INSN_LIST) for an entry whose first operand is NODE and
remove that entry from the list if it is found.

A simple equality test is used to determine if NODE matches.   

References gcc_checking_assert, ggc_alloc(), in_insn_list_p(), rtx_insn_list::insn(), rtx_insn_list::next(), NULL, and XEXP.

Referenced by delete_insn().

◆ remove_note()

◆ remove_reg_equal_equiv_notes()

bool remove_reg_equal_equiv_notes ( rtx_insn * insn,
bool no_rescan )
Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes.
If NO_RESCAN is false and any notes were removed, call
df_notes_rescan.  Return true if any note has been removed.   

References df_notes_rescan(), ggc_alloc(), REG_NOTE_KIND, REG_NOTES, and XEXP.

Referenced by adjust_for_new_dest(), reload_combine_recognize_pattern(), and try_apply_stack_adjustment().

◆ remove_reg_equal_equiv_notes_for_regno()

void remove_reg_equal_equiv_notes_for_regno ( unsigned int regno)
Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO.   

References df, DF_REF_INSN, DF_REG_EQ_USE_CHAIN, find_reg_equal_equiv_note(), gcc_assert, ggc_alloc(), NULL, and remove_note().

Referenced by dead_or_predicable(), and remove_reg_equal_equiv_notes_for_defs().

◆ replace_label()

void replace_label ( rtx * loc,
rtx old_label,
rtx new_label,
bool update_label_nuses )
Replace occurrences of the OLD_LABEL in *LOC with NEW_LABEL.  Also track
the change in LABEL_NUSES if UPDATE_LABEL_NUSES.   

References CONSTANT_POOL_ADDRESS_P, copy_rtx(), FOR_EACH_SUBRTX_PTR, force_const_mem(), GET_CODE, GET_NUM_ELEM, get_pool_constant(), get_pool_mode(), ggc_alloc(), i, JUMP_LABEL, JUMP_P, JUMP_TABLE_DATA_P, LABEL_NUSES, PATTERN(), replace_label(), replace_rtx(), RTVEC_ELT, rtx_referenced_p(), XEXP, and XVEC.

Referenced by replace_label(), and replace_label_in_insn().

◆ replace_label_in_insn()

void replace_label_in_insn ( rtx_insn * insn,
rtx_insn * old_label,
rtx_insn * new_label,
bool update_label_nuses )

◆ replace_rtx()

rtx replace_rtx ( rtx x,
rtx from,
rtx to,
bool all_regs )
Replace any occurrence of FROM in X with TO.  The function does
not enter into CONST_DOUBLE for the replace.

Note that copying is not done so X must not be shared unless all copies
are to be modified.

ALL_REGS is true if we want to replace all REGs equal to FROM, not just
those pointer-equal ones.   

References CONST_SCALAR_INT_P, gcc_assert, GET_CODE, GET_MODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, REG_P, REGNO, replace_rtx(), simplify_subreg(), simplify_unary_operation(), SUBREG_BYTE, SUBREG_REG, XEXP, XVECEXP, and XVECLEN.

Referenced by find_split_point(), match_asm_constraints_1(), record_value_for_reg(), replace_label(), and replace_rtx().

◆ rtx_addr_can_trap_p()

bool rtx_addr_can_trap_p ( const_rtx x)
Return true if the use of X as an address in a MEM can cause a trap.   

References ggc_alloc(), and rtx_addr_can_trap_p_1().

Referenced by find_comparison_args().

◆ rtx_addr_can_trap_p_1()

◆ rtx_addr_varies_p()

bool rtx_addr_varies_p ( const_rtx x,
bool for_alias )
Return true if X refers to a memory location whose address
cannot be compared reliably with constant addresses,
or if X refers to a BLKmode memory object.
FOR_ALIAS is nonzero if we are called from alias analysis; if it is
zero, we are slightly more conservative.   

References GET_CODE, GET_MODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, rtx_addr_varies_p(), rtx_varies_p(), XEXP, XVECEXP, and XVECLEN.

Referenced by rtx_addr_varies_p().

◆ rtx_cost()

int rtx_cost ( rtx x,
machine_mode mode,
enum rtx_code outer_code,
int opno,
bool speed )
Return an estimate of the cost of computing rtx X.
One use is in cse, to decide which expression to keep in the hash table.
Another is in rtl generation, to pick the cheapest way to multiply.
Other uses like the latter are expected in the future.

X appears as operand OPNO in an expression with code OUTER_CODE.
SPEED specifies whether costs optimized for speed or size should
be returned.   

References COSTS_N_INSNS, estimated_poly_value(), GET_CODE, GET_MODE, GET_MODE_SIZE(), GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, mode_size, rtx_cost(), SET, SET_DEST, SUBREG_REG, targetm, XEXP, XVECEXP, and XVECLEN.

Referenced by avoid_expensive_constant(), default_address_cost(), emit_conditional_move(), emit_store_flag(), emit_store_flag_int(), get_full_rtx_cost(), maybe_optimize_mod_cmp(), maybe_optimize_pow2p_mod_cmp(), notreg_cost(), prefer_and_bit_test(), prepare_cmp_insn(), rtx_cost(), set_rtx_cost(), and set_src_cost().

◆ rtx_referenced_p()

◆ rtx_unstable_p()

bool rtx_unstable_p ( const_rtx x)
Return true if the value of X is unstable
(would be different at a different point in the program).
The frame pointer, arg pointer, etc. are considered stable
(within one function) and so is anything marked `unchanging'.   

References arg_pointer_rtx, CASE_CONST_ANY, fixed_regs, frame_pointer_rtx, GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), hard_frame_pointer_rtx, i, MEM_READONLY_P, MEM_VOLATILE_P, PIC_OFFSET_TABLE_REG_CALL_CLOBBERED, pic_offset_table_rtx, RTX_CODE, rtx_unstable_p(), XEXP, XVECEXP, and XVECLEN.

Referenced by rtx_unstable_p().

◆ rtx_varies_p()

bool rtx_varies_p ( const_rtx x,
bool for_alias )
Return true if X has a value that can vary even between two
executions of the program.  false means X can be compared reliably
against certain constants or near-constants.
FOR_ALIAS is nonzero if we are called from alias analysis; if it is
zero, we are slightly more conservative.
The frame pointer and the arg pointer are considered constant.   

References arg_pointer_rtx, CASE_CONST_ANY, fixed_regs, frame_pointer_rtx, GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), hard_frame_pointer_rtx, i, MEM_READONLY_P, MEM_VOLATILE_P, PIC_OFFSET_TABLE_REG_CALL_CLOBBERED, pic_offset_table_rtx, RTX_CODE, rtx_varies_p(), XEXP, XVECEXP, and XVECLEN.

Referenced by equiv_init_movable_p(), equiv_init_varies_p(), init_alias_analysis(), make_memloc(), rtx_addr_varies_p(), rtx_varies_p(), and update_equiv_regs().

◆ seq_cost()

◆ set_address_base()

static void set_address_base ( struct address_info * info,
rtx * loc,
rtx * inner )
static
Set the base part of address INFO to LOC, given that INNER is the
unmutated value.   

References gcc_assert.

Referenced by decompose_automod_address(), decompose_incdec_address(), and decompose_normal_address().

◆ set_address_disp()

static void set_address_disp ( struct address_info * info,
rtx * loc,
rtx * inner )
static
Set the displacement part of address INFO to LOC, given that INNER
is the constant term.   

References gcc_assert.

Referenced by decompose_automod_address(), and decompose_normal_address().

◆ set_address_index()

static void set_address_index ( struct address_info * info,
rtx * loc,
rtx * inner )
static
Set the index part of address INFO to LOC, given that INNER is the
unmutated value.   

References gcc_assert.

Referenced by decompose_automod_address(), and decompose_normal_address().

◆ set_address_segment()

static void set_address_segment ( struct address_info * info,
rtx * loc,
rtx * inner )
static
Set the segment part of address INFO to LOC, given that INNER is the
unmutated value.   

References gcc_assert.

Referenced by decompose_normal_address().

◆ set_noop_p()

◆ set_of()

const_rtx set_of ( const_rtx pat,
const_rtx insn )
Give an INSN, return a SET or CLOBBER expression that does modify PAT
(either directly or via STRICT_LOW_PART and similar modifiers).   

References INSN_P, note_pattern_stores(), NULL_RTX, set_of_data::pat, PATTERN(), and set_of_1().

Referenced by canonicalize_condition(), check_cond_move_block(), end_ifcvt_sequence(), get_defs(), insn_valid_noce_process_p(), reg_set_p(), and reversed_comparison_code_parts().

◆ set_of_1()

static void set_of_1 ( rtx x,
const_rtx pat,
void * data1 )
static
Analyze RTL for GNU compiler.
   Copyright (C) 1987-2024 Free Software Foundation, Inc.

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.   
Forward declarations  

References ggc_alloc(), MEM_P, set_of_data::pat, reg_overlap_mentioned_p(), and rtx_equal_p().

Referenced by set_of().

◆ setup_reg_subrtx_bounds()

static bool setup_reg_subrtx_bounds ( unsigned int code)
static
Return true if RTX code CODE has a single sequence of zero or more
"e" operands and no rtvec operands.  Initialize its rtx_all_subrtx_bounds
entry in that case.   

References count, rtx_subrtx_bound_info::count, gcc_checking_assert, GET_RTX_FORMAT, i, rtx_all_subrtx_bounds, and rtx_subrtx_bound_info::start.

Referenced by init_rtlanal().

◆ side_effects_p()

bool side_effects_p ( const_rtx x)
Similar to above, except that it also rejects register pre- and post-
incrementing.   

References CASE_CONST_ANY, GET_CODE, GET_MODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, MEM_VOLATILE_P, RTX_CODE, side_effects_p(), XEXP, XVECEXP, and XVECLEN.

Referenced by add_insn_allocno_copies(), can_combine_p(), can_split_parallel_of_n_reg_sets(), check_cond_move_block(), combine_instructions(), combine_simplify_rtx(), copyprop_hardreg_forward_1(), count_reg_usage(), cse_insn(), cselib_add_permanent_equiv(), cselib_record_set(), delete_dead_insn(), delete_prior_computation(), delete_trivially_dead_insns(), distribute_notes(), eliminate_partially_redundant_loads(), emit_conditional_move_1(), expand_builtin_prefetch(), fill_slots_from_thread(), find_moveable_store(), find_split_point(), flow_find_cross_jump(), fold_rtx(), force_to_mode(), get_reload_reg(), if_then_else_cond(), interesting_dest_for_shprep(), invariant_p(), known_cond(), lra_coalesce(), lra_delete_dead_insn(), maybe_legitimize_operand_same_code(), noce_get_condition(), noce_operand_ok(), noce_process_if_block(), noce_try_sign_mask(), non_conflicting_reg_copy_p(), onlyjump_p(), recompute_constructor_flags(), record_set_data(), reload_cse_simplify_operands(), reload_cse_simplify_set(), resolve_simple_move(), rtl_can_remove_branch_p(), scan_one_insn(), set_live_p(), set_noop_p(), set_unique_reg_note(), side_effects_p(), simple_mem(), simplify_and_const_int_1(), simplify_context::simplify_binary_operation_1(), simplify_const_relational_operation(), simplify_context::simplify_distributive_operation(), simplify_if_then_else(), simplify_context::simplify_merge_mask(), simplify_context::simplify_relational_operation_1(), simplify_set(), simplify_shift_const_1(), simplify_context::simplify_ternary_operation(), simplify_context::simplify_unary_operation_1(), single_set_2(), store_expr(), thread_jump(), try_combine(), try_eliminate_compare(), try_merge_compare(), try_redirect_by_replacing_jump(), update_equiv_regs(), validate_equiv_mem(), and verify_constructor_flags().

◆ simple_regno_set()

rtx simple_regno_set ( rtx pat,
unsigned int regno )
Check whether instruction pattern PAT contains a SET with the following
properties:

- the SET is executed unconditionally; and
- either:
  - the destination of the SET is a REG that contains REGNO; or
  - both:
    - the destination of the SET is a SUBREG of such a REG; and
    - writing to the subreg clobbers all of the SUBREG_REG
      (in other words, read_modify_subreg_p is false).

If PAT does have a SET like that, return the set, otherwise return null.

This is intended to be an alternative to single_set for passes that
can handle patterns with multiple_sets.   

References covers_regno_no_parallel_p(), GET_CODE, ggc_alloc(), i, last, set_of_data::pat, SET, SET_DEST, simple_regno_set(), XVECEXP, and XVECLEN.

Referenced by forward_propagate_into(), and simple_regno_set().

◆ simplify_subreg_regno()

int simplify_subreg_regno ( unsigned int xregno,
machine_mode xmode,
poly_uint64 offset,
machine_mode ymode )

◆ single_set_2()

rtx single_set_2 ( const rtx_insn * insn,
const_rtx pat )
Given an INSN, return a SET expression if this insn has only a single SET.
It may also have CLOBBERs, USEs, or SET whose output
will not be used, which we ignore.   

References find_reg_note(), GET_CODE, ggc_alloc(), i, NULL, NULL_RTX, set_of_data::pat, SET, SET_DEST, side_effects_p(), XVECEXP, and XVECLEN.

Referenced by single_set().

◆ split_const()

void split_const ( rtx x,
rtx * base_out,
rtx * offset_out )
Split X into a base and a constant offset, storing them in *BASE_OUT
and *OFFSET_OUT respectively.   

References const0_rtx, CONST_INT_P, GET_CODE, ggc_alloc(), and XEXP.

Referenced by insn_propagation::apply_to_rvalue_1(), and simplify_replace_fn_rtx().

◆ split_double()

void split_double ( rtx value,
rtx * first,
rtx * second )
Split up a CONST_DOUBLE or integer constant rtx
into two rtx's for single words,
storing in *FIRST the word that comes first in memory in the target
and in *SECOND the other.

TODO: This function needs to be rewritten to work on any size
integer.   

References BITS_PER_WORD, const0_rtx, CONST_DOUBLE_HIGH, CONST_DOUBLE_LOW, CONST_DOUBLE_P, CONST_DOUBLE_REAL_VALUE, CONST_INT_P, CONST_WIDE_INT_ELT, CONST_WIDE_INT_NUNITS, constm1_rtx, gcc_assert, GEN_INT, GET_CODE, GET_MODE, GET_MODE_CLASS, ggc_alloc(), HOST_BITS_PER_LONG, HOST_BITS_PER_WIDE_INT, INTVAL, and REAL_VALUE_TO_TARGET_DOUBLE.

Referenced by emit_group_load_1().

◆ strip_address_mutations()

rtx * strip_address_mutations ( rtx * loc,
enum rtx_code * outer_code )
Strip outer address "mutations" from LOC and return a pointer to the
inner value.  If OUTER_CODE is nonnull, store the code of the innermost
stripped expression there.

"Mutations" either convert between modes or apply some kind of
extension, truncation or alignment.   

References CONST_INT_P, GET_CODE, GET_RTX_CLASS, ggc_alloc(), lsb_bitfield_op_p(), OBJECT_P, RTX_UNARY, subreg_lowpart_p(), SUBREG_REG, and XEXP.

Referenced by decompose_address(), decompose_automod_address(), decompose_normal_address(), get_base_term(), and get_index_term().

◆ strip_offset()

rtx strip_offset ( rtx x,
poly_int64 * offset_out )
Express integer value X as some value Y plus a polynomial offset,
where Y is either const0_rtx, X or something within X (as opposed
to a new rtx).  Return the Y and store the offset in *OFFSET_OUT.   

References const0_rtx, GET_CODE, ggc_alloc(), poly_int_rtx_p(), and XEXP.

Referenced by avoid_constant_pool_reference(), delegitimize_mem_from_attrs(), dwarf2out_frame_debug_def_cfa(), find_args_size_adjust(), find_temp_slot_from_address(), indirect_operand(), set_mem_attrs_for_spill(), store_one_arg(), strip_offset_and_add(), vt_add_function_parameter(), and vt_canonicalize_addr().

◆ subreg_get_info()

void subreg_get_info ( unsigned int xregno,
machine_mode xmode,
poly_uint64 offset,
machine_mode ymode,
struct subreg_info * info )
Fill in information about a subreg of a hard register.
xregno - A regno of an inner hard subreg_reg (or what will become one).
xmode  - The mode of xregno.
offset - The byte offset.
ymode  - The mode of a top level SUBREG (or what may become one).
info   - Pointer to structure to fill in.

Rather than considering one particular inner register (and thus one
particular "outer" register) in isolation, this function really uses
XREGNO as a model for a sequence of isomorphic hard registers.  Thus the
function does not check whether adding INFO->offset to XREGNO gives
a valid hard register; even if INFO->offset + XREGNO is out of range,
there might be another register of the same type that is in range.
Likewise it doesn't check whether targetm.hard_regno_mode_ok accepts
the new register, since that can depend on things like whether the final
register number is even or odd.  Callers that want to check whether
this particular subreg can be replaced by a simple (reg ...) should
use simplify_subreg_regno.   

References count, gcc_assert, gcc_checking_assert, gcc_unreachable, GET_MODE_INNER, GET_MODE_NUNITS(), GET_MODE_SIZE(), ggc_alloc(), hard_regno_nregs(), HARD_REGNO_NREGS_HAS_PADDING, HARD_REGNO_NREGS_WITH_PADDING, known_eq, maybe_gt, subreg_info::nregs, offset, subreg_info::offset, REG_WORDS_BIG_ENDIAN, subreg_info::representable_p, subreg_lowpart_offset(), subreg_size_lowpart_offset(), and poly_int< N, C >::to_constant().

Referenced by rtx_renumbered_equal_p(), simplify_subreg_regno(), subreg_nregs_with_regno(), subreg_offset_representable_p(), subreg_regno_offset(), and true_regnum().

◆ subreg_lsb()

poly_uint64 subreg_lsb ( const_rtx x)
Given a subreg X, return the bit offset where the subreg begins
(counting from the least significant bit of the reg).   

References GET_MODE, SUBREG_BYTE, subreg_lsb_1(), and SUBREG_REG.

Referenced by expand_field_assignment().

◆ subreg_nregs()

◆ subreg_nregs_with_regno()

unsigned int subreg_nregs_with_regno ( unsigned int regno,
const_rtx x )
Return the number of registers that a subreg REG with REGNO
expression refers to.  This is a copy of the rtlanal.cc:subreg_nregs
changed so that the regno can be passed in.  

References GET_MODE, ggc_alloc(), subreg_info::nregs, SUBREG_BYTE, subreg_get_info(), and SUBREG_REG.

Referenced by subreg_nregs().

◆ subreg_offset_representable_p()

bool subreg_offset_representable_p ( unsigned int xregno,
machine_mode xmode,
poly_uint64 offset,
machine_mode ymode )
This function returns true when the offset is representable via
subreg_offset in the given regno.
xregno - A regno of an inner hard subreg_reg (or what will become one).
xmode  - The mode of xregno.
offset - The byte offset.
ymode  - The mode of a top level SUBREG (or what may become one).
RETURN - Whether the offset is representable.   

References ggc_alloc(), offset, subreg_info::representable_p, and subreg_get_info().

Referenced by validate_subreg().

◆ subreg_regno()

◆ subreg_regno_offset()

unsigned int subreg_regno_offset ( unsigned int xregno,
machine_mode xmode,
poly_uint64 offset,
machine_mode ymode )
This function returns the regno offset of a subreg expression.
xregno - A regno of an inner hard subreg_reg (or what will become one).
xmode  - The mode of xregno.
offset - The byte offset.
ymode  - The mode of a top level SUBREG (or what may become one).
RETURN - The regno offset which would be used.   

References ggc_alloc(), offset, subreg_info::offset, and subreg_get_info().

Referenced by add_stored_regs(), choose_reload_regs(), constrain_operands(), df_ref_record(), find_dummy_reload(), find_reloads(), find_reloads_address_1(), get_hard_regno(), go_through_subreg(), maybe_mode_change(), move2add_valid_value_p(), operands_match_p(), push_reload(), reg_overlap_mentioned_for_reload_p(), reload_combine_note_store(), subreg_regno(), and var_lowpart().

◆ subreg_size_lsb()

poly_uint64 subreg_size_lsb ( poly_uint64 outer_bytes,
poly_uint64 inner_bytes,
poly_uint64 subreg_byte )
Reinterpret a subreg as a bit extraction from an integer and return
the position of the least significant bit of the extracted value.
In other words, if the extraction were performed as a shift right
and mask, return the number of bits to shift right.

The outer value of the subreg has OUTER_BYTES bytes and starts at
byte offset SUBREG_BYTE within an inner value of INNER_BYTES bytes.   

References gcc_assert, gcc_checking_assert, ggc_alloc(), known_eq, known_le, and maybe_gt.

Referenced by native_decode_rtx(), native_encode_rtx(), and subreg_lsb_1().

◆ subreg_size_offset_from_lsb()

poly_uint64 subreg_size_offset_from_lsb ( poly_uint64 outer_bytes,
poly_uint64 inner_bytes,
poly_uint64 lsb_shift )
Return the subreg byte offset for a subreg whose outer value has
OUTER_BYTES bytes, whose inner value has INNER_BYTES bytes, and where
there are LSB_SHIFT *bits* between the lsb of the outer value and the
lsb of the inner value.  This is the inverse of the calculation
performed by subreg_lsb_1 (which converts byte offsets to bit shifts).   

References gcc_checking_assert, ggc_alloc(), known_eq, and maybe_gt.

Referenced by native_decode_rtx(), subreg_offset_from_lsb(), subreg_size_highpart_offset(), and subreg_size_lowpart_offset().

◆ swap_commutative_operands_p()

◆ tablejump_casesi_pattern()

rtx tablejump_casesi_pattern ( const rtx_insn * insn)
For INSN known to satisfy tablejump_p, determine if it actually is a
CASESI.  Return the insn pattern if so, NULL_RTX otherwise.   

References GET_CODE, ggc_alloc(), NULL, NULL_RTX, pc_rtx, SET_DEST, SET_SRC, single_set(), and XEXP.

Referenced by create_trace_edges(), and patch_jump_insn().

◆ tablejump_p()

◆ tls_referenced_p()

bool tls_referenced_p ( const_rtx x)
Return true if X contains a thread-local symbol.   

References FOR_EACH_SUBRTX, GET_CODE, ggc_alloc(), SYMBOL_REF_TLS_MODEL, and targetm.

◆ truncated_to_mode()

bool truncated_to_mode ( machine_mode mode,
const_rtx x )
Suppose that truncation from the machine mode of X to MODE is not a
no-op.  See if there is anything special about X so that we can
assume it already contains a truncated value of MODE.   

References GET_MODE, num_sign_bit_copies(), num_sign_bit_copies_in_rep, REG_P, and rtl_hooks::reg_truncated_to_mode.

Referenced by simplify_context::simplify_unary_operation_1().

◆ unsigned_reg_p()

bool unsigned_reg_p ( rtx op)
Return TRUE if OP is a register or subreg of a register that
holds an unsigned quantity.  Otherwise, return FALSE.   

References GET_CODE, ggc_alloc(), REG_EXPR, REG_P, SUBREG_PROMOTED_SIGN, TREE_TYPE, and TYPE_UNSIGNED.

◆ update_address()

void update_address ( struct address_info * info)
Update INFO after a change to the address it describes.   

References decompose_address().

Referenced by equiv_address_substitution().

◆ vec_series_highpart_p()

bool vec_series_highpart_p ( machine_mode result_mode,
machine_mode op_mode,
rtx sel )
Return true if, for all OP of mode OP_MODE:

  (vec_select:RESULT_MODE OP SEL)

is equivalent to the highpart RESULT_MODE of OP.   

References GET_MODE_NUNITS(), ggc_alloc(), offset, rtvec_series_p(), targetm, XVEC, and XVECLEN.

Referenced by simplify_context::simplify_binary_operation_1().

◆ vec_series_lowpart_p()

bool vec_series_lowpart_p ( machine_mode result_mode,
machine_mode op_mode,
rtx sel )
Return true if, for all OP of mode OP_MODE:

  (vec_select:RESULT_MODE OP SEL)

is equivalent to the lowpart RESULT_MODE of OP.   

References GET_MODE_NUNITS(), ggc_alloc(), offset, rtvec_series_p(), targetm, XVEC, and XVECLEN.

Referenced by combine_simplify_rtx(), fold_rtx(), and simplify_context::simplify_binary_operation_1().

◆ volatile_insn_p()

bool volatile_insn_p ( const_rtx x)
Return true if X contains any volatile instructions.  These are instructions
which may cause unpredictable machine state instructions, and thus no
instructions or register uses should be moved or combined across them.
This includes only volatile asms and UNSPEC_VOLATILE instructions.   

References CASE_CONST_ANY, GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, MEM_VOLATILE_P, RTX_CODE, volatile_insn_p(), XEXP, XVECEXP, and XVECLEN.

Referenced by can_combine_p(), can_move_insns_across(), dead_debug_insert_temp(), find_equiv_reg(), propagate_for_debug(), propagate_for_debug_subst(), reload_combine(), and volatile_insn_p().

◆ volatile_refs_p()

bool volatile_refs_p ( const_rtx x)
Return true if X contains any volatile memory references
UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions.   

References CASE_CONST_ANY, GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, MEM_VOLATILE_P, RTX_CODE, volatile_refs_p(), XEXP, XVECEXP, and XVECLEN.

Referenced by can_combine_p(), deletable_insn_p_1(), forward_propagate_into(), process_bb_lives(), scan_insn(), try_combine(), and volatile_refs_p().

Variable Documentation

◆ num_sign_bit_copies_in_rep

unsigned int num_sign_bit_copies_in_rep[MAX_MODE_INT+1][MAX_MODE_INT+1]
static
Truncation narrows the mode from SOURCE mode to DESTINATION mode.
If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
SIGN_EXTEND then while narrowing we also have to enforce the
representation and sign-extend the value to mode DESTINATION_REP.

If the value is already sign-extended to DESTINATION_REP mode we
can just switch to DESTINATION mode on it.  For each pair of
integral modes SOURCE and DESTINATION, when truncating from SOURCE
to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
contains the number of high-order bits in SOURCE that have to be
copies of the sign-bit so that we can do this mode-switch to
DESTINATION.   

Referenced by init_num_sign_bit_copies_in_rep(), and truncated_to_mode().

◆ rtx_all_subrtx_bounds

◆ rtx_nonconst_subrtx_bounds

rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds[NUM_RTX_CODE]

Referenced by init_rtlanal().