GCC Middle and Back End API Reference
jump.cc File Reference
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "backend.h"
#include "target.h"
#include "rtl.h"
#include "tree.h"
#include "cfghooks.h"
#include "tree-pass.h"
#include "memmodel.h"
#include "tm_p.h"
#include "insn-config.h"
#include "regs.h"
#include "emit-rtl.h"
#include "recog.h"
#include "cfgrtl.h"
#include "rtl-iter.h"
Include dependency graph for jump.cc:


static void init_label_info (rtx_insn *)
static void mark_all_labels (rtx_insn *)
static void mark_jump_label_1 (rtx, rtx_insn *, bool, bool)
static void mark_jump_label_asm (rtx, rtx_insn *)
static void redirect_exp_1 (rtx *, rtx, rtx, rtx_insn *)
static bool invert_exp_1 (rtx, rtx_insn *)
static void rebuild_jump_labels_1 (rtx_insn *f, bool count_forced)
void rebuild_jump_labels (rtx_insn *f)
void rebuild_jump_labels_chain (rtx_insn *chain)
static unsigned int cleanup_barriers (void)
rtl_opt_passmake_pass_cleanup_barriers (gcc::context *ctxt)
static void maybe_propagate_label_ref (rtx_insn *jump_insn, rtx_insn *prev_nonjump_insn)
enum rtx_code reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0, const_rtx arg1, const rtx_insn *insn)
enum rtx_code reversed_comparison_code (const_rtx comparison, const rtx_insn *insn)
rtx reversed_comparison (const_rtx exp, machine_mode mode)
enum rtx_code reverse_condition (enum rtx_code code)
enum rtx_code reverse_condition_maybe_unordered (enum rtx_code code)
enum rtx_code swap_condition (enum rtx_code code)
enum rtx_code unsigned_condition (enum rtx_code code)
enum rtx_code signed_condition (enum rtx_code code)
bool comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
bool simplejump_p (const rtx_insn *insn)
bool condjump_p (const rtx_insn *insn)
bool condjump_in_parallel_p (const rtx_insn *insn)
rtx pc_set (const rtx_insn *insn)
bool any_uncondjump_p (const rtx_insn *insn)
bool any_condjump_p (const rtx_insn *insn)
rtx condjump_label (const rtx_insn *insn)
bool returnjump_p (const rtx_insn *insn)
bool eh_returnjump_p (rtx_insn *insn)
bool onlyjump_p (const rtx_insn *insn)
bool jump_to_label_p (const rtx_insn *insn)
void mark_jump_label (rtx x, rtx_insn *insn, int in_mem)
rtx_insndelete_related_insns (rtx uncast_insn)
void delete_for_peephole (rtx_insn *from, rtx_insn *to)
static rtx redirect_target (rtx x)
bool redirect_jump_1 (rtx_insn *jump, rtx nlabel)
bool redirect_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
void redirect_jump_2 (rtx_jump_insn *jump, rtx olabel, rtx nlabel, int delete_unused, int invert)
bool invert_jump_1 (rtx_jump_insn *jump, rtx nlabel)
bool invert_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
bool rtx_renumbered_equal_p (const_rtx x, const_rtx y)
int true_regnum (const_rtx x)
unsigned int reg_or_subregno (const_rtx reg)

Function Documentation

◆ any_condjump_p()

bool any_condjump_p ( const rtx_insn * insn)

◆ any_uncondjump_p()

bool any_uncondjump_p ( const rtx_insn * insn)
Return true when insn is an unconditional direct jump,
possibly bundled inside a PARALLEL, UNSPEC or UNSPEC_VOLATILE.
The instruction may have various other effects so before removing the jump
you must verify onlyjump_p.   

References find_reg_note(), GET_CODE, ggc_alloc(), NULL_RTX, pc_set(), and SET_SRC.

Referenced by add_test(), emit(), find_dead_or_set_registers(), follow_jumps(), rtl_block_empty_p(), rtl_tidy_fallthru_edge(), rtl_verify_edges(), and try_combine().

◆ cleanup_barriers()

static unsigned int cleanup_barriers ( void )
Some old code expects exactly one BARRIER as the NEXT_INSN of a
 non-fallthru insn.  This is not generally true, as multiple barriers
 may have crept in, or the BARRIER may be separated from the last
 real insn by one or more NOTEs.

 This simple pass moves barriers and removes duplicates so that the
 old code is happy.

References BARRIER_P, BB_END, BLOCK_FOR_INSN(), delete_insn(), end(), get_insns(), NEXT_INSN(), NULL, PREV_INSN(), prev_nonnote_nondebug_insn(), and reorder_insns_nobb().

◆ comparison_dominates_p()

bool comparison_dominates_p ( enum rtx_code code1,
enum rtx_code code2 )
Return true if CODE1 is more strict than CODE2, i.e., if the
truth of CODE1 implies the truth of CODE2.   

References ggc_alloc().

Referenced by condition_dominates_p(), fold_rtx(), known_cond(), and thread_jump().

◆ condjump_in_parallel_p()

bool condjump_in_parallel_p ( const rtx_insn * insn)
Return true if INSN is a (possibly) conditional jump inside a

Use this function is deprecated, since we need to support combined
branch and compare insns.  Use any_condjump_p instead whenever possible.   

References ANY_RETURN_P, GET_CODE, ggc_alloc(), PATTERN(), pc_rtx, SET, SET_DEST, SET_SRC, XEXP, and XVECEXP.

Referenced by dbr_schedule(), fill_eager_delay_slots(), fill_simple_delay_slots(), get_branch_condition(), get_jump_flags(), relax_delay_slots(), and reload_combine().

◆ condjump_label()

rtx condjump_label ( const rtx_insn * insn)
Return the label of a conditional jump.   

References GET_CODE, ggc_alloc(), NULL_RTX, pc_rtx, pc_set(), SET_SRC, and XEXP.

◆ condjump_p()

bool condjump_p ( const rtx_insn * insn)
Return true if INSN is a (possibly) conditional jump
and nothing more.

Use of this function is deprecated, since we need to support combined
branch and compare insns.  Use any_condjump_p instead whenever possible.   

References ANY_RETURN_P, GET_CODE, ggc_alloc(), PATTERN(), SET, SET_DEST, SET_SRC, and XEXP.

Referenced by cse_insn(), dbr_schedule(), fill_eager_delay_slots(), fill_simple_delay_slots(), get_jump_flags(), relax_delay_slots(), reload_combine(), and rtl_verify_bb_layout().

◆ delete_for_peephole()

void delete_for_peephole ( rtx_insn * from,
rtx_insn * to )
Delete a range of insns from FROM to TO, inclusive.
This is for the sake of peephole optimization, so assume
that whatever these insns do will still be done by a new
peephole insn that will replace them.   

References NEXT_INSN(), NOTE_P, PREV_INSN(), rtx_insn::set_deleted(), SET_NEXT_INSN(), and SET_PREV_INSN().

◆ delete_related_insns()

rtx_insn * delete_related_insns ( rtx uncast_insn)
Delete insn INSN from the chain of insns and update label ref counts
and delete insns now unreachable.

Returns the first insn after INSN that was not deleted.

Usage of this instruction is deprecated.  Use delete_insn instead and
subsequent cfg_cleanup pass to delete unreachable code if needed.   

References BARRIER_P, delete_insn(), delete_related_insns(), rtx_insn::deleted(), GET_CODE, GET_NUM_ELEM, ggc_alloc(), i, INSN_P, JUMP_LABEL, JUMP_TABLE_DATA_P, jump_to_label_p(), LABEL_NUSES, LABEL_P, NEXT_INSN(), NOTE_P, NULL, PATTERN(), PREV_INSN(), REG_NOTE_KIND, REG_NOTES, RTVEC_ELT, table, tablejump_p(), and XEXP.

Referenced by dbr_schedule(), delete_address_reloads(), delete_computation(), delete_from_delay_slot(), delete_related_insns(), delete_scheduled_jump(), fill_simple_delay_slots(), fill_slots_from_thread(), make_return_insns(), optimize_skip(), redirect_jump_2(), relax_delay_slots(), and try_merge_delay_insns().

◆ eh_returnjump_p()

bool eh_returnjump_p ( rtx_insn * insn)
Return true if INSN is a (possibly conditional) return insn.   

References FOR_EACH_SUBRTX, GET_CODE, ggc_alloc(), JUMP_P, and PATTERN().

Referenced by thread_prologue_and_epilogue_insns().

◆ init_label_info()

static void init_label_info ( rtx_insn * f)
Optimize jump instructions, for GNU compiler.
   Copyright (C) 1987-2024 Free Software Foundation, Inc.

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
This is the pathetic reminder of old fame of the jump-optimization pass
of the compiler.  Now it contains basically a set of utility functions to
operate with jumps.

Each CODE_LABEL has a count of the times it is used
stored in the LABEL_NUSES internal field, and each JUMP_INSN
has one label that it refers to stored in the
JUMP_LABEL internal field.  With this we can detect labels that
become unused because of the deletion of all the jumps that
formerly used them.  The JUMP_LABEL info is sometimes looked
at by later passes.  For return insns, it contains either a

The subroutines redirect_jump and invert_jump are used
from other passes as well.   
Optimize jump y; x: ... y: jumpif... x?
Don't know if it is worth bothering with.   
Optimize two cases of conditional jump to conditional jump?
This can never delete any instruction or make anything dead,
or even change what is live at any point.
So perhaps let combiner do it.   
for remaining targets for JUMP_P.  Delete any REG_LABEL_OPERAND
notes whose labels don't occur in the insn any more.   

References ggc_alloc(), INSN_P, LABEL_NUSES, LABEL_P, LABEL_PRESERVE_P, NEXT_INSN(), PATTERN(), reg_mentioned_p(), REG_NOTE_KIND, REG_NOTES, remove_note(), and XEXP.

Referenced by rebuild_jump_labels_1().

◆ invert_exp_1()

static bool invert_exp_1 ( rtx x,
rtx_insn * insn )
Invert the jump condition X contained in jump insn INSN.  Accrue the
modifications into the change group.  Return true for success.   

References comp, GET_CODE, GET_MODE, ggc_alloc(), reversed_comparison_code(), RTX_CODE, validate_change(), and XEXP.

Referenced by invert_jump_1(), and redirect_jump_2().

◆ invert_jump()

bool invert_jump ( rtx_jump_insn * jump,
rtx nlabel,
int delete_unused )
Invert the condition of the jump JUMP, and make it jump to label
NLABEL instead of where it jumps now.  Return true if successful.   

References apply_change_group(), cancel_changes(), ggc_alloc(), invert_jump_1(), JUMP_LABEL, and redirect_jump_2().

Referenced by fix_up_fall_thru_edges(), fixup_reorder_chain(), optimize_skip(), relax_delay_slots(), set_edge_can_fallthru_flag(), try_optimize_cfg(), and try_simplify_condjump().

◆ invert_jump_1()

bool invert_jump_1 ( rtx_jump_insn * jump,
rtx nlabel )
Invert the condition of the jump JUMP, and make it jump to label
NLABEL instead of where it jumps now.  Accrue changes into the
change group.  Return false if we didn't see how to perform the
inversion and redirection.   

References gcc_assert, ggc_alloc(), invert_exp_1(), JUMP_LABEL, NULL, num_validated_changes(), pc_set(), redirect_jump_1(), and SET_SRC.

Referenced by dead_or_predicable(), and invert_jump().

◆ jump_to_label_p()

bool jump_to_label_p ( const rtx_insn * insn)
Return true iff INSN is a jump and its JUMP_LABEL is a label, not
NULL or a return.   


Referenced by delete_related_insns(), fill_simple_delay_slots(), fill_slots_from_thread(), insn_current_reference_address(), and make_return_insns().

◆ make_pass_cleanup_barriers()

rtl_opt_pass * make_pass_cleanup_barriers ( gcc::context * ctxt)

References ggc_alloc().

◆ mark_all_labels()

static void mark_all_labels ( rtx_insn * f)

◆ mark_jump_label()

void mark_jump_label ( rtx x,
rtx_insn * insn,
int in_mem )
Find all CODE_LABELs referred to in X, and increment their use
counts.  If INSN is a JUMP_INSN and there is at least one
CODE_LABEL referenced in INSN as a jump target, then store the last
one in JUMP_LABEL (INSN).  For a tablejump, this must be the label
for the ADDR_VEC.  Store any other jump targets as REG_LABEL_TARGET
notes.  If INSN is an INSN or a CALL_INSN or non-target operands of
a JUMP_INSN, and there is at least one CODE_LABEL referenced in
INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
For returnjumps, the JUMP_LABEL will also be set as appropriate.

Note that two labels separated by a loop-beginning note
must be kept distinct if we have not yet done loop-optimization,
because the gap between them is where loop-optimize
will want to move invariant code to.  CROSS_JUMP tells us
that loop-optimization is done with.   

References extract_asm_operands(), ggc_alloc(), JUMP_P, mark_jump_label_1(), mark_jump_label_asm(), NULL, and PATTERN().

Referenced by emit_copy_of_insn_after(), gen_reload(), mark_all_labels(), mark_jump_label_1(), move_insn_for_shrink_wrap(), try_combine(), and try_split().

◆ mark_jump_label_1()

static void mark_jump_label_1 ( rtx x,
rtx_insn * insn,
bool in_mem,
bool is_target )
Worker function for mark_jump_label.  IN_MEM is TRUE when X occurs
within a (MEM ...).  IS_TARGET is TRUE when X is to be treated as a
jump-target; when the JUMP_LABEL field of INSN should be set or a
REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND

References add_reg_note(), CONSTANT_POOL_ADDRESS_P, rtx_insn::deleted(), find_reg_note(), gcc_assert, GET_CODE, get_pool_constant(), GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, rtx_sequence::insn(), JUMP_LABEL, LABEL_NUSES, LABEL_P, label_ref_label(), LABEL_REF_NONLOCAL_P, rtx_sequence::len(), mark_jump_label(), mark_jump_label_1(), NOTE_KIND, NOTE_P, NULL, PATTERN(), RTX_CODE, set_label_ref_label(), XEXP, XVECEXP, and XVECLEN.

Referenced by mark_jump_label(), mark_jump_label_1(), mark_jump_label_asm(), and maybe_propagate_label_ref().

◆ mark_jump_label_asm()

static void mark_jump_label_asm ( rtx asmop,
rtx_insn * insn )
Worker function for mark_jump_label.  Handle asm insns specially.
In particular, output operands need not be considered so we can
avoid re-scanning the replicated asm_operand.  Also, the asm_labels
need to be considered targets.   


Referenced by mark_jump_label().

◆ maybe_propagate_label_ref()

static void maybe_propagate_label_ref ( rtx_insn * jump_insn,
rtx_insn * prev_nonjump_insn )
A subroutine of mark_all_labels.  Trivially propagate a simple label
load into a jump_insn that uses it.   

References find_reg_note(), gcc_assert, GET_CODE, ggc_alloc(), JUMP_LABEL, label_ref_label(), mark_jump_label_1(), NULL, pc_set(), rtx_equal_p(), SET_DEST, SET_SRC, single_set(), and XEXP.

Referenced by mark_all_labels().

◆ onlyjump_p()

◆ pc_set()

◆ rebuild_jump_labels()

void rebuild_jump_labels ( rtx_insn * f)
This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
instructions and jumping insns that have labels as operands
(e.g. cbranchsi4).   

References rebuild_jump_labels_1().

Referenced by break_superblocks(), cfg_layout_finalize(), indirect_jump_optimize(), function_reader::parse_function(), peephole2_optimize(), rest_of_handle_combine(), rest_of_handle_cse(), rest_of_handle_cse2(), rest_of_handle_cse_after_global_opts(), and rest_of_handle_gcse2().

◆ rebuild_jump_labels_1()

static void rebuild_jump_labels_1 ( rtx_insn * f,
bool count_forced )
Worker for rebuild_jump_labels and rebuild_jump_labels_chain.   

References FOR_EACH_VEC_SAFE_ELT, forced_labels, ggc_alloc(), i, init_label_info(), LABEL_NUSES, LABEL_P, mark_all_labels(), timevar_pop(), and timevar_push().

Referenced by rebuild_jump_labels(), and rebuild_jump_labels_chain().

◆ rebuild_jump_labels_chain()

void rebuild_jump_labels_chain ( rtx_insn * chain)
This function is like rebuild_jump_labels, but doesn't run over
forced_labels.  It can be used on insn chains that aren't the 
main function chain.   

References rebuild_jump_labels_1().

Referenced by commit_edge_insertions(), and commit_one_edge_insertion().

◆ redirect_exp_1()

static void redirect_exp_1 ( rtx * loc,
rtx olabel,
rtx nlabel,
rtx_insn * insn )
Throughout LOC, redirect OLABEL to NLABEL.  Treat null OLABEL or
NLABEL as a return.  Accrue modifications into the change group.   

References ANY_RETURN_P, GET_CODE, GET_RTX_FORMAT, GET_RTX_LENGTH, ggc_alloc(), i, label_ref_label(), PATTERN(), pc_rtx, redirect_exp_1(), redirect_target(), RTX_CODE, SET, SET_DEST, SET_SRC, validate_change(), XEXP, XVECEXP, and XVECLEN.

Referenced by redirect_exp_1(), redirect_jump_1(), and redirect_jump_2().

◆ redirect_jump()

bool redirect_jump ( rtx_jump_insn * jump,
rtx nlabel,
int delete_unused )
Make JUMP go to NLABEL instead of where it jumps now.  If the old
jump target label is unused as a result, it and the code following
it may be deleted.

Normally, NLABEL will be a label, but it may also be a RETURN rtx;
in that case we are to turn the jump into a (possibly conditional)
return insn.

The return value will be true if the change was made, false if it wasn't
(this can only occur when trying to produce return insns).   

References apply_change_group(), epilogue_completed, gcc_unreachable, ggc_alloc(), rtx_jump_insn::jump_label(), redirect_jump_1(), and redirect_jump_2().

Referenced by compare_and_jump_seq(), dbr_schedule(), fix_crossing_conditional_branches(), force_nonfallthru_and_redirect(), patch_jump_insn(), reorg_redirect_jump(), try_optimize_cfg(), and try_redirect_by_replacing_jump().

◆ redirect_jump_1()

bool redirect_jump_1 ( rtx_insn * jump,
rtx nlabel )
Make JUMP go to NLABEL instead of where it jumps now.  Accrue
the modifications into the change group.  Return false if we did
not see how to do that.   

References ASM_OPERANDS_LABEL, ASM_OPERANDS_LABEL_LENGTH, extract_asm_operands(), gcc_assert, GET_CODE, ggc_alloc(), JUMP_LABEL, NULL, NULL_RTX, num_validated_changes(), PATTERN(), redirect_exp_1(), and XVECEXP.

Referenced by dead_or_predicable(), invert_jump_1(), and redirect_jump().

◆ redirect_jump_2()

void redirect_jump_2 ( rtx_jump_insn * jump,
rtx olabel,
rtx nlabel,
int delete_unused,
int invert )
Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
count has dropped to zero.   

References ANY_RETURN_P, confirm_change_group(), CROSSING_JUMP_P, delete_related_insns(), find_reg_note(), gcc_assert, ggc_alloc(), INSN_UID(), invert_br_probabilities(), invert_exp_1(), JUMP_LABEL, LABEL_NUSES, NULL_RTX, redirect_exp_1(), remove_note(), and XEXP.

Referenced by dead_or_predicable(), invert_jump(), and redirect_jump().

◆ redirect_target()

static rtx redirect_target ( rtx x)
A helper function for redirect_exp_1; examines its input X and returns
either a LABEL_REF around a label, or a RETURN if X was NULL.   

References ANY_RETURN_P, ggc_alloc(), NULL_RTX, and ret_rtx.

Referenced by redirect_exp_1().

◆ reg_or_subregno()

unsigned int reg_or_subregno ( const_rtx reg)
Return regno of the register REG and handle subregs too.   

References gcc_assert, GET_CODE, ggc_alloc(), REG_P, REGNO, and SUBREG_REG.

Referenced by gen_reload(), ira_get_dup_out_num(), push_reload(), and try_combine().

◆ returnjump_p()

◆ reverse_condition()

enum rtx_code reverse_condition ( enum rtx_code code)
Given an rtx-code for a comparison, return the code for the negated
comparison.  If no such code exists, return UNKNOWN.

WATCH OUT!  reverse_condition is not safe to use on a jump that might
be acting on the results of an IEEE floating point comparison, because
of the special treatment of non-signaling nans in comparisons.
Use reversed_comparison_code instead.   

References gcc_unreachable, and ggc_alloc().

Referenced by do_compare_rtx_and_jump(), emit_store_flag_force(), emit_store_flag_int(), fold_rtx(), iv_number_of_iterations(), known_cond(), noce_get_condition(), reversed_comparison_code_parts(), and simplify_comparison().

◆ reverse_condition_maybe_unordered()

enum rtx_code reverse_condition_maybe_unordered ( enum rtx_code code)
Similar, but we're allowed to generate unordered comparisons, which
makes it safe for IEEE floating-point.  Of course, we have to recognize
that the target will support them too...   

References gcc_unreachable, and ggc_alloc().

Referenced by do_compare_rtx_and_jump(), emit_store_flag(), emit_store_flag_force(), prepare_float_lib_cmp(), and reversed_comparison_code_parts().

◆ reversed_comparison()

rtx reversed_comparison ( const_rtx exp,
machine_mode mode )
Return comparison with reversed code of EXP.
Return NULL_RTX in case we fail to do the reversal.   

References exp(), ggc_alloc(), NULL, NULL_RTX, reversed_comparison_code(), simplify_gen_relational(), and XEXP.

Referenced by simplify_context::simplify_binary_operation_1(), simplify_if_then_else(), and simplify_context::simplify_unary_operation_1().

◆ reversed_comparison_code()

◆ reversed_comparison_code_parts()

enum rtx_code reversed_comparison_code_parts ( enum rtx_code code,
const_rtx arg0,
const_rtx arg1,
const rtx_insn * insn )
Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
of reversed comparison if it is possible to do so.  Otherwise return UNKNOWN.
UNKNOWN may be returned in case we are having CC_MODE compare and we don't
know whether it's source is floating point or integer comparison.  Machine
description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
to help this function avoid overhead in these cases.   

References CONST_INT_P, GET_CODE, GET_MODE, GET_MODE_CLASS, GET_RTX_CLASS, ggc_alloc(), HONOR_NANS(), LABEL_P, prev_nonnote_insn(), REG_P, REVERSE_CONDITION, reverse_condition(), reverse_condition_maybe_unordered(), REVERSIBLE_CC_MODE, RTX_COMM_COMPARE, RTX_COMPARE, rtx_equal_p(), SET, SET_DEST, set_of(), SET_SRC, and XEXP.

Referenced by combine_simplify_rtx(), emit_conditional_move(), record_jump_equiv(), and reversed_comparison_code().

◆ rtx_renumbered_equal_p()

◆ signed_condition()

enum rtx_code signed_condition ( enum rtx_code code)
Similarly, return the signed version of a comparison.   

References gcc_unreachable, and ggc_alloc().

Referenced by simplify_const_relational_operation().

◆ simplejump_p()

◆ swap_condition()

◆ true_regnum()

int true_regnum ( const_rtx x)
If X is a hard register or equivalent to one or a subregister of one,
return the hard register number.  If X is a pseudo register that was not
assigned a hard register, return the pseudo register number.  Otherwise,
return -1.  Any rtx is valid for X.   

References GET_CODE, GET_MODE, ggc_alloc(), lra_in_progress, subreg_info::offset, REG_P, reg_renumber, REGNO, subreg_info::representable_p, SUBREG_BYTE, subreg_get_info(), SUBREG_REG, and true_regnum().

Referenced by choose_reload_regs(), clear_reload_reg_in_use(), deallocate_reload_reg(), decompose(), find_equiv_reg(), find_reusable_reload(), reload_cse_simplify_operands(), reload_cse_simplify_set(), reload_reg_free_for_value_p(), set_reload_reg(), and true_regnum().

◆ unsigned_condition()

enum rtx_code unsigned_condition ( enum rtx_code code)
Given a comparison CODE, return the corresponding unsigned comparison.
If CODE is an equality comparison or already an unsigned comparison,
CODE is returned.   

References gcc_unreachable, and ggc_alloc().

Referenced by do_compare_rtx_and_jump(), emit_cmp_and_jump_insns(), emit_conditional_add(), emit_conditional_move(), emit_store_flag_1(), simplify_comparison(), and unsigned_condition_p().