GCC Middle and Back End API Reference
dse.cc File Reference
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "backend.h"
#include "target.h"
#include "rtl.h"
#include "tree.h"
#include "gimple.h"
#include "predict.h"
#include "df.h"
#include "memmodel.h"
#include "tm_p.h"
#include "gimple-ssa.h"
#include "expmed.h"
#include "optabs.h"
#include "emit-rtl.h"
#include "recog.h"
#include "alias.h"
#include "stor-layout.h"
#include "cfgrtl.h"
#include "cselib.h"
#include "tree-pass.h"
#include "explow.h"
#include "expr.h"
#include "dbgcnt.h"
#include "rtl-iter.h"
#include "cfgcleanup.h"
#include "calls.h"
Include dependency graph for dse.cc:

Data Structures

class  store_info
 
class  read_info_type
 
struct  insn_info_type
 
struct  dse_bb_info_type
 
struct  group_info
 
struct  deferred_change
 
struct  invariant_group_base_hasher
 
struct  note_add_store_info
 

Macros

#define MAX_OFFSET   (64 * 1024)
 

Typedefs

typedef class read_info_typeread_info_t
 
typedef struct insn_info_typeinsn_info_t
 
typedef struct dse_bb_info_typebb_info_t
 

Functions

static unsigned HOST_WIDE_INT lowpart_bitmask (int n)
 
static void print_range (FILE *file, poly_int64 offset, poly_int64 width)
 
static group_infoget_group_info (rtx base)
 
static void dse_step0 (void)
 
static void free_store_info (insn_info_t insn_info)
 
static void note_add_store (rtx loc, const_rtx expr, void *data)
 
static int emit_inc_dec_insn_before (rtx mem, rtx op, rtx dest, rtx src, rtx srcoff, void *arg)
 
static bool check_for_inc_dec_1 (insn_info_t insn_info)
 
bool check_for_inc_dec (rtx_insn *insn)
 
static void delete_dead_store_insn (insn_info_t insn_info)
 
static bool local_variable_can_escape (tree decl)
 
static bool can_escape (tree expr)
 
static void set_usage_bits (group_info *group, poly_int64 offset, poly_int64 width, tree expr)
 
static void reset_active_stores (void)
 
static void free_read_records (bb_info_t bb_info)
 
static void add_wild_read (bb_info_t bb_info)
 
static void add_non_frame_wild_read (bb_info_t bb_info)
 
static bool const_or_frame_p (rtx x)
 
static bool canon_address (rtx mem, int *group_id, poly_int64 *offset, cselib_val **base)
 
static void clear_rhs_from_active_local_stores (void)
 
static void set_position_unneeded (store_info *s_info, int pos)
 
static void set_all_positions_unneeded (store_info *s_info)
 
static bool any_positions_needed_p (store_info *s_info)
 
static bool all_positions_needed_p (store_info *s_info, poly_int64 start, poly_int64 width)
 
static rtx get_stored_val (store_info *, machine_mode, poly_int64, poly_int64, basic_block, bool)
 
static int record_store (rtx body, bb_info_t bb_info)
 
static void dump_insn_info (const char *start, insn_info_t insn_info)
 
static rtx find_shift_sequence (poly_int64 access_size, store_info *store_info, machine_mode read_mode, poly_int64 shift, bool speed, bool require_cst)
 
static void look_for_hardregs (rtx x, const_rtx pat, void *data)
 
static bool replace_read (store_info *store_info, insn_info_t store_insn, read_info_t read_info, insn_info_t read_insn, rtx *loc)
 
static void check_mem_read_rtx (rtx *loc, bb_info_t bb_info, bool used_in_call=false)
 
static void check_mem_read_use (rtx *loc, void *data)
 
static bool get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
 
static bitmap copy_fixed_regs (const_bitmap in)
 
static void scan_insn (bb_info_t bb_info, rtx_insn *insn, int max_active_local_stores)
 
static void remove_useless_values (cselib_val *base)
 
static void dse_step1 (void)
 
static void dse_step2_init (void)
 
static bool dse_step2 (void)
 
static int get_bitmap_index (group_info *group_info, HOST_WIDE_INT offset)
 
static void scan_stores (store_info *store_info, bitmap gen, bitmap kill)
 
static void scan_reads (insn_info_t insn_info, bitmap gen, bitmap kill)
 
static insn_info_t find_insn_before_first_wild_read (bb_info_t bb_info)
 
static void dse_step3_scan (basic_block bb)
 
static void dse_step3_exit_block_scan (bb_info_t bb_info)
 
static void mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
 
static void dse_step3 ()
 
static void dse_confluence_0 (basic_block bb)
 
static bool dse_confluence_n (edge e)
 
static bool dse_transfer_function (int bb_index)
 
static void dse_step4 (void)
 
static void dse_step5 (void)
 
static void dse_step6 (void)
 
static void dse_step7 (void)
 
static unsigned int rest_of_handle_dse (void)
 
rtl_opt_passmake_pass_rtl_dse1 (gcc::context *ctxt)
 
rtl_opt_passmake_pass_rtl_dse2 (gcc::context *ctxt)
 

Variables

static bitmap_obstack dse_bitmap_obstack
 
static struct obstack dse_obstack
 
static bitmap scratch = NULL
 
static object_allocator< store_infocse_store_info_pool ("cse_store_info_pool")
 
static object_allocator< store_infortx_store_info_pool ("rtx_store_info_pool")
 
static object_allocator< read_info_typeread_info_type_pool ("read_info_pool")
 
static object_allocator< insn_info_typeinsn_info_type_pool ("insn_info_pool")
 
static insn_info_t active_local_stores
 
static int active_local_stores_len
 
static object_allocator< dse_bb_info_typedse_bb_info_type_pool ("bb_info_pool")
 
static bb_info_tbb_table
 
static object_allocator< group_infogroup_info_pool ("rtx_group_info_pool")
 
static int rtx_group_next_id
 
static vec< group_info * > rtx_group_vec
 
static object_allocator< deferred_changedeferred_change_pool ("deferred_change_pool")
 
static deferred_changedeferred_change_list = NULL
 
static bool stores_off_frame_dead_at_return
 
static int globally_deleted
 
static int locally_deleted
 
static bitmap all_blocks
 
static bitmap kill_on_calls
 
static unsigned int current_position
 
static hash_table< invariant_group_base_hasher > * rtx_group_table
 

Macro Definition Documentation

◆ MAX_OFFSET

#define MAX_OFFSET   (64 * 1024)
RTL dead store elimination.
   Copyright (C) 2005-2024 Free Software Foundation, Inc.

   Contributed by Richard Sandiford <rsandifor@codesourcery.com>
   and Kenneth Zadeck <zadeck@naturalbridge.com>

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.   
This file contains three techniques for performing Dead Store
Elimination (dse).

* The first technique performs dse locally on any base address.  It
is based on the cselib which is a local value numbering technique.
This technique is local to a basic block but deals with a fairly
general addresses.

* The second technique performs dse globally but is restricted to
base addresses that are either constant or are relative to the
frame_pointer.

* The third technique, (which is only done after register allocation)
processes the spill slots.  This differs from the second
technique because it takes advantage of the fact that spilling is
completely free from the effects of aliasing.

Logically, dse is a backwards dataflow problem.  A store can be
deleted if it if cannot be reached in the backward direction by any
use of the value being stored.  However, the local technique uses a
forwards scan of the basic block because cselib requires that the
block be processed in that order.

The pass is logically broken into 7 steps:

0) Initialization.

1) The local algorithm, as well as scanning the insns for the two
global algorithms.

2) Analysis to see if the global algs are necessary.  In the case
of stores base on a constant address, there must be at least two
stores to that address, to make it possible to delete some of the
stores.  In the case of stores off of the frame or spill related
stores, only one store to an address is necessary because those
stores die at the end of the function.

3) Set up the global dataflow equations based on processing the
info parsed in the first step.

4) Solve the dataflow equations.

5) Delete the insns that the global analysis has indicated are
unnecessary.

6) Delete insns that store the same value as preceding store
where the earlier store couldn't be eliminated.

7) Cleanup.

This step uses cselib and canon_rtx to build the largest expression
possible for each address.  This pass is a forwards pass through
each basic block.  From the point of view of the global technique,
the first pass could examine a block in either direction.  The
forwards ordering is to accommodate cselib.

We make a simplifying assumption: addresses fall into four broad
categories:

1) base has rtx_varies_p == false, offset is constant.
2) base has rtx_varies_p == false, offset variable.
3) base has rtx_varies_p == true, offset constant.
4) base has rtx_varies_p == true, offset variable.

The local passes are able to process all 4 kinds of addresses.  The
global pass only handles 1).

The global problem is formulated as follows:

  A store, S1, to address A, where A is not relative to the stack
  frame, can be eliminated if all paths from S1 to the end of the
  function contain another store to A before a read to A.

  If the address A is relative to the stack frame, a store S2 to A
  can be eliminated if there are no paths from S2 that reach the
  end of the function that read A before another store to A.  In
  this case S2 can be deleted if there are paths from S2 to the
  end of the function that have no reads or writes to A.  This
  second case allows stores to the stack frame to be deleted that
  would otherwise die when the function returns.  This cannot be
  done if stores_off_frame_dead_at_return is not true.  See the doc
  for that variable for when this variable is false.

  The global problem is formulated as a backwards set union
  dataflow problem where the stores are the gens and reads are the
  kills.  Set union problems are rare and require some special
  handling given our representation of bitmaps.  A straightforward
  implementation requires a lot of bitmaps filled with 1s.
  These are expensive and cumbersome in our bitmap formulation so
  care has been taken to avoid large vectors filled with 1s.  See
  the comments in bb_info and in the dataflow confluence functions
  for details.

There are two places for further enhancements to this algorithm:

1) The original dse which was embedded in a pass called flow also
did local address forwarding.  For example in

A <- r100
... <- A

flow would replace the right hand side of the second insn with a
reference to r100.  Most of the information is available to add this
to this pass.  It has not done it because it is a lot of work in
the case that either r100 is assigned to between the first and
second insn and/or the second insn is a load of part of the value
stored by the first insn.

insn 5 in gcc.c-torture/compile/990203-1.c simple case.
insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
insn 44 in gcc.c-torture/execute/20010910-1.c simple case.

2) The cleaning up of spill code is quite profitable.  It currently
depends on reading tea leaves and chicken entrails left by reload.
This pass depends on reload creating a singleton alias set for each
spill slot and telling the next dse pass which of these alias sets
are the singletons.  Rather than analyze the addresses of the
spills, dse's spill processing just does analysis of the loads and
stores that use those alias sets.  There are three cases where this
falls short:

  a) Reload sometimes creates the slot for one mode of access, and
  then inserts loads and/or stores for a smaller mode.  In this
  case, the current code just punts on the slot.  The proper thing
  to do is to back out and use one bit vector position for each
  byte of the entity associated with the slot.  This depends on
  KNOWING that reload always generates the accesses for each of the
  bytes in some canonical (read that easy to understand several
  passes after reload happens) way.

  b) Reload sometimes decides that spill slot it allocated was not
  large enough for the mode and goes back and allocates more slots
  with the same mode and alias set.  The backout in this case is a
  little more graceful than (a).  In this case the slot is unmarked
  as being a spill slot and if final address comes out to be based
  off the frame pointer, the global algorithm handles this slot.

  c) For any pass that may prespill, there is currently no
  mechanism to tell the dse pass that the slot being used has the
  special properties that reload uses.  It may be that all that is
  required is to have those passes make the same calls that reload
  does, assuming that the alias sets can be manipulated in the same
  way.   
There are limits to the size of constant offsets we model for the
global problem.  There are certainly test cases, that exceed this
limit, however, it is unlikely that there are important programs
that really have constant offsets this size.   

Referenced by record_store(), and set_usage_bits().

Typedef Documentation

◆ bb_info_t

◆ insn_info_t

◆ read_info_t

Function Documentation

◆ add_non_frame_wild_read()

static void add_non_frame_wild_read ( bb_info_t bb_info)
static
Set the BB_INFO so that the last insn is marked as a wild read of
non-frame locations.   

References free_read_records(), ggc_alloc(), insn_info_type::non_frame_wild_read, and reset_active_stores().

Referenced by scan_insn().

◆ add_wild_read()

static void add_wild_read ( bb_info_t bb_info)
static
Set the BB_INFO so that the last insn is marked as a wild read.   

References free_read_records(), ggc_alloc(), reset_active_stores(), and insn_info_type::wild_read.

Referenced by check_mem_read_rtx(), record_store(), and scan_insn().

◆ all_positions_needed_p()

static bool all_positions_needed_p ( store_info * s_info,
poly_int64 start,
poly_int64 width )
inlinestatic
Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
store are known to be needed.   

References bitmap_bit_p, gcc_assert, ggc_alloc(), i, poly_int< N, C >::is_constant(), lowpart_bitmask(), and UNLIKELY.

Referenced by check_mem_read_rtx(), and record_store().

◆ any_positions_needed_p()

static bool any_positions_needed_p ( store_info * s_info)
inlinestatic
Return TRUE if any bytes from S_INFO store are needed.   

References gcc_checking_assert, ggc_alloc(), HOST_WIDE_INT_0U, and UNLIKELY.

Referenced by record_store().

◆ can_escape()

static bool can_escape ( tree expr)
static
Return whether EXPR can possibly escape the current function scope.   

References DECL_EXTERNAL, DECL_P, get_base_address(), local_variable_can_escape(), may_be_aliased(), TREE_STATIC, and VAR_P.

Referenced by set_usage_bits().

◆ canon_address()

static bool canon_address ( rtx mem,
int * group_id,
poly_int64 * offset,
cselib_val ** base )
static
Take all reasonable action to put the address of MEM into the form
that we can do analysis on.

The gold standard is to get the address into the form: address +
OFFSET where address is something that rtx_varies_p considers a
constant.  When we can get the address in this form, we can do
global analysis on it.  Note that for constant bases, address is
not actually returned, only the group_id.  The address can be
obtained from that.

If that fails, we try cselib to get a value we can at least use
locally.  If that fails we return false.

The GROUP_ID is set to -1 for cselib bases and the index of the
group for non_varying bases.

FOR_READ is true if this is a mem read and false if not.   

References ADDR_SPACE_GENERIC_P, canon_rtx(), const_or_frame_p(), cselib_expand_value_rtx(), cselib_lookup(), dump_file, dump_flags, get_address_mode(), GET_CODE, get_group_info(), GET_MODE, ggc_alloc(), group_info::id, MEM_ADDR_SPACE, NULL, NULL_RTX, offset, print_dec(), print_inline_rtx(), scratch, strip_offset_and_add(), TDF_DETAILS, and XEXP.

Referenced by check_mem_read_rtx(), and record_store().

◆ check_for_inc_dec()

bool check_for_inc_dec ( rtx_insn * insn)
Entry point for postreload.  If you work on reload_cse, or you need this
anywhere else, consider if you can provide register liveness information
and add a parameter to this function so that it can be passed down in
insn_info.fixed_regs_live.   

References emit_inc_dec_insn_before(), find_reg_note(), insn_info_type::fixed_regs_live, for_each_inc_dec(), FOR_EACH_SUBRTX, GET_CODE, GET_RTX_CLASS, ggc_alloc(), insn_info_type::insn, NULL, NULL_RTX, PATTERN(), and RTX_AUTOINC.

Referenced by reload_cse_simplify().

◆ check_for_inc_dec_1()

static bool check_for_inc_dec_1 ( insn_info_t insn_info)
static
Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
is there, is split into a separate insn.
Return true on success (or if there was nothing to do), false on failure.   

References emit_inc_dec_insn_before(), find_reg_note(), for_each_inc_dec(), FOR_EACH_SUBRTX, GET_CODE, GET_RTX_CLASS, ggc_alloc(), insn_info_type::insn, NULL_RTX, PATTERN(), and RTX_AUTOINC.

Referenced by delete_dead_store_insn(), and dse_step5().

◆ check_mem_read_rtx()

◆ check_mem_read_use()

static void check_mem_read_use ( rtx * loc,
void * data )
static
A note_uses callback in which DATA points the INSN_INFO for
as check_mem_read_rtx.  Nullify the pointer if i_m_r_m_r returns
true for any part of *LOC.   

References check_mem_read_rtx(), FOR_EACH_SUBRTX_PTR, ggc_alloc(), and MEM_P.

Referenced by scan_insn().

◆ clear_rhs_from_active_local_stores()

static void clear_rhs_from_active_local_stores ( void )
static

◆ const_or_frame_p()

static bool const_or_frame_p ( rtx x)
static
Return true if X is a constant or one of the registers that behave
as a constant over the life of a function.  This is equivalent to
!rtx_varies_p for memory addresses.   

References arg_pointer_rtx, CONSTANT_P, fixed_regs, frame_pointer_rtx, GET_CODE, ggc_alloc(), hard_frame_pointer_rtx, and pic_offset_table_rtx.

Referenced by canon_address().

◆ copy_fixed_regs()

static bitmap copy_fixed_regs ( const_bitmap in)
static
Return a bitmap of the fixed registers contained in IN.   

References ALLOC_REG_SET, bitmap_and(), fixed_reg_set, ggc_alloc(), and NULL.

Referenced by scan_insn().

◆ delete_dead_store_insn()

◆ dse_confluence_0()

static void dse_confluence_0 ( basic_block bb)
static
Confluence function for blocks with no successors.  Create an out
set from the gen set of the exit block.  This block logically has
the exit block as a successor.   

References bb_table, BITMAP_ALLOC, bitmap_copy(), dse_bitmap_obstack, EXIT_BLOCK, dse_bb_info_type::gen, ggc_alloc(), and basic_block_def::index.

Referenced by dse_step4().

◆ dse_confluence_n()

static bool dse_confluence_n ( edge e)
static
Propagate the information from the in set of the dest of E to the
out set of the src of E.  If the various in or out sets are not
there, that means they are all ones.   

References bb_table, BITMAP_ALLOC, bitmap_and_into(), bitmap_copy(), dse_bitmap_obstack, and ggc_alloc().

Referenced by dse_step4().

◆ dse_step0()

◆ dse_step1()

◆ dse_step2()

◆ dse_step2_init()

◆ dse_step3()

◆ dse_step3_exit_block_scan()

static void dse_step3_exit_block_scan ( bb_info_t bb_info)
static
Set the gen set of the exit block, and also any block with no
successors that does not have a wild read.   

References bitmap_ior_into(), FOR_EACH_VEC_ELT, group_info::frame_related, ggc_alloc(), group_info::group_kill, i, group_info::process_globally, rtx_group_vec, and stores_off_frame_dead_at_return.

Referenced by dse_step3().

◆ dse_step3_scan()

static void dse_step3_scan ( basic_block bb)
static
Scan the insns in BB_INFO starting at PTR and going to the top of
the block in order to build the gen and kill sets for the block.
We start at ptr which may be the last insn in the block or may be
the first insn with a wild read.  In the latter case we are able to
skip the rest of the block because it just does not matter:
anything that happens is hidden by the wild read.   

References bb_table, BITMAP_ALLOC, bitmap_clear(), BITMAP_FREE, dse_bitmap_obstack, find_insn_before_first_wild_read(), ggc_alloc(), basic_block_def::index, insn_info_type::insn, INSN_P, insn_info_type::prev_insn, scan_reads(), scan_stores(), and insn_info_type::store_rec.

Referenced by dse_step3().

◆ dse_step4()

◆ dse_step5()

◆ dse_step6()

◆ dse_step7()

◆ dse_transfer_function()

static bool dse_transfer_function ( int bb_index)
static
Propagate the info from the out to the in set of BB_INDEX's basic
  block.  There are three cases:

  1) The block has no kill set.  In this case the kill set is all
  ones.  It does not matter what the out set of the block is, none of
  the info can reach the top.  The only thing that reaches the top is
  the gen set and we just copy the set.

  2) There is a kill set but no out set and bb has successors.  In
  this case we just return. Eventually an out set will be created and
  it is better to wait than to create a set of ones.

  3) There is both a kill and out set.  We apply the obvious transfer
  function.

References bb_table, BITMAP_ALLOC, bitmap_copy(), bitmap_ior_and_compl(), dse_bitmap_obstack, and ggc_alloc().

Referenced by dse_step4().

◆ dump_insn_info()

static void dump_insn_info ( const char * start,
insn_info_t insn_info )
static

◆ emit_inc_dec_insn_before()

◆ find_insn_before_first_wild_read()

static insn_info_t find_insn_before_first_wild_read ( bb_info_t bb_info)
static
Return the insn in BB_INFO before the first wild read or if there
are no wild reads in the block, return the last insn.   

References ggc_alloc(), NULL, insn_info_type::prev_insn, and insn_info_type::wild_read.

Referenced by dse_step3_scan().

◆ find_shift_sequence()

static rtx find_shift_sequence ( poly_int64 access_size,
store_info * store_info,
machine_mode read_mode,
poly_int64 shift,
bool speed,
bool require_cst )
static
If the modes are different and the value's source and target do not
line up, we need to extract the value from lower part of the rhs of
the store, shift it, and then put it into a form that can be shoved
into the read_insn.  This function generates a right SHIFT of a
value that is at least ACCESS_SIZE bytes wide of READ_MODE.  The
shift sequence is returned or NULL if we failed to find a
shift.   

References BITS_PER_WORD, store_info::const_rhs, CONSTANT_P, copy_rtx(), COSTS_N_INSNS, emit_insn(), emit_move_insn(), end_sequence(), expand_binop(), extract_low_bits(), FOR_EACH_MODE_IN_CLASS, gen_int_shift_amount(), gen_reg_rtx(), get_insns(), GET_MODE, GET_MODE_BITSIZE(), GET_MODE_SIZE(), ggc_alloc(), insn_cost(), INSN_P, known_le, store_info::mem, new_mode(), NEXT_INSN(), NULL, NULL_RTX, offset, OPTAB_DIRECT, store_info::rhs, set_src_cost(), shift, simplify_const_binary_operation(), simplify_gen_subreg(), simplify_subreg(), smallest_int_mode_for_size(), start_sequence(), subreg_lowpart_offset(), subreg_offset_from_lsb(), targetm, and TRULY_NOOP_TRUNCATION_MODES_P.

Referenced by get_stored_val().

◆ free_read_records()

static void free_read_records ( bb_info_t bb_info)
static
Free all READ_REC of the LAST_INSN of BB_INFO.   

References ggc_alloc(), read_info_type_pool, and insn_info_type::read_rec.

Referenced by add_non_frame_wild_read(), and add_wild_read().

◆ free_store_info()

◆ get_bitmap_index()

static int get_bitmap_index ( group_info * group_info,
HOST_WIDE_INT offset )
static
Look up the bitmap index for OFFSET in GROUP_INFO.  If it is not
there, return 0.   

References ggc_alloc(), offset, group_info::offset_map_n, group_info::offset_map_p, group_info::offset_map_size_n, and group_info::offset_map_size_p.

Referenced by dse_step5(), scan_reads(), and scan_stores().

◆ get_call_args()

static bool get_call_args ( rtx call_insn,
tree fn,
rtx * args,
int nargs )
static
Get arguments passed to CALL_INSN.  Return TRUE if successful.
So far it only handles arguments passed in registers.   

References CALL_INSN_FUNCTION_USAGE, CONST_INT_P, cselib_expand_value_rtx(), gen_int_mode(), GET_CODE, GET_MODE, GET_MODE_SIZE(), ggc_alloc(), INTVAL, is_int_mode(), NULL_RTX, REG_P, REGNO, scratch, targetm, TREE_CHAIN, TREE_TYPE, TREE_VALUE, TYPE_ARG_TYPES, TYPE_MODE, void_list_node, and XEXP.

Referenced by scan_insn().

◆ get_group_info()

◆ get_stored_val()

static rtx get_stored_val ( store_info * store_info,
machine_mode read_mode,
poly_int64 read_offset,
poly_int64 read_width,
basic_block bb,
bool require_cst )
static
Helper function for replace_read and record_store.
Attempt to return a value of mode READ_MODE stored in STORE_INFO,
consisting of READ_WIDTH bytes starting from READ_OFFSET.  Return NULL
if not successful.  If REQUIRE_CST is true, return always constant.   

References const0_rtx, CONST_INT_P, store_info::const_rhs, CONSTANT_P, copy_rtx(), extract_low_bits(), find_shift_sequence(), gcc_assert, gen_int_mode(), gen_lowpart, GET_MODE, GET_MODE_BITSIZE(), GET_MODE_CLASS, GET_MODE_SIZE(), ggc_alloc(), HOST_BITS_PER_WIDE_INT, HOST_WIDE_INT_1, int_mode_for_mode(), INTVAL, poly_int< N, C >::is_constant(), known_le, store_info::mem, NULL_RTX, store_info::offset, optimize_bb_for_speed_p(), store_info::rhs, shift, targetm, VECTOR_MODE_P, and store_info::width.

Referenced by record_store(), and replace_read().

◆ local_variable_can_escape()

static bool local_variable_can_escape ( tree decl)
static
Return whether DECL, a local variable, can possibly escape the current
function scope.   

References cfun, ggc_alloc(), NULL, and TREE_ADDRESSABLE.

Referenced by can_escape().

◆ look_for_hardregs()

static void look_for_hardregs ( rtx x,
const_rtx pat,
void * data )
static
Call back for note_stores to find the hard regs set or clobbered by
insn.  Data is a bitmap of the hardregs set so far.   

References bitmap_set_range(), HARD_REGISTER_P, REG_NREGS, REG_P, REGNO, and regs_set.

Referenced by replace_read().

◆ lowpart_bitmask()

static unsigned HOST_WIDE_INT lowpart_bitmask ( int n)
static
Return a bitmask with the first N low bits set.   

References ggc_alloc(), HOST_BITS_PER_WIDE_INT, and HOST_WIDE_INT_M1U.

Referenced by all_positions_needed_p(), and record_store().

◆ make_pass_rtl_dse1()

rtl_opt_pass * make_pass_rtl_dse1 ( gcc::context * ctxt)

References ggc_alloc().

◆ make_pass_rtl_dse2()

rtl_opt_pass * make_pass_rtl_dse2 ( gcc::context * ctxt)

References ggc_alloc().

◆ mark_reachable_blocks()

static void mark_reachable_blocks ( sbitmap unreachable_blocks,
basic_block bb )
static
Find all of the blocks that are not backwards reachable from the
exit block or any block with no successors (BB).  These are the
infinite loops or infinite self loops.  These blocks will still
have their bits set in UNREACHABLE_BLOCKS.   

References bitmap_bit_p, bitmap_clear_bit(), FOR_EACH_EDGE, ggc_alloc(), basic_block_def::index, mark_reachable_blocks(), and basic_block_def::preds.

Referenced by dse_step3(), and mark_reachable_blocks().

◆ note_add_store()

static void note_add_store ( rtx loc,
const_rtx expr,
void * data )
static
Callback for emit_inc_dec_insn_before via note_stores.
Check if a register is clobbered which is live afterwards.   

References note_add_store_info::current, END_REGNO(), note_add_store_info::failure, note_add_store_info::first, note_add_store_info::fixed_regs_live, NEXT_INSN(), PATTERN(), REG_P, reg_referenced_p(), REGNO, and REGNO_REG_SET_P.

Referenced by emit_inc_dec_insn_before().

◆ print_range()

static void print_range ( FILE * file,
poly_int64 offset,
poly_int64 width )
static
Print offset range [OFFSET, OFFSET + WIDTH) to FILE.   

References ggc_alloc(), offset, print_dec(), and SIGNED.

Referenced by check_mem_read_rtx(), and record_store().

◆ record_store()

static int record_store ( rtx body,
bb_info_t bb_info )
static
BODY is an instruction pattern that belongs to INSN.  Return 1 if
there is a candidate store, after adding it to the appropriate
local store group if so.   

References active_local_stores, active_local_stores_len, add_wild_read(), store_info::addrspace, all_positions_needed_p(), any_positions_needed_p(), BITMAP_ALLOC, BLOCK_FOR_INSN(), store_info::bmap, insn_info_type::cannot_delete, canon_address(), group_info::canon_base_addr, canon_output_dependence(), canon_rtx(), clear_rhs_from_active_local_stores(), const0_rtx, CONST_INT_P, store_info::const_rhs, CONSTANT_P, insn_info_type::contains_cselib_groups, store_info::count, store_info::cse_base, cse_store_info_pool, cselib_expand_value_rtx(), delete_dead_store_insn(), df_find_single_def_src(), dse_bitmap_obstack, dump_file, dump_flags, end_sequence(), find_reg_note(), FLOAT_MODE_P, get_address_mode(), GET_CODE, get_insns(), GET_MODE, GET_MODE_SIZE(), get_stored_val(), ggc_alloc(), store_info::group_id, HOST_BITS_PER_WIDE_INT, i, insn_info_type::insn, INSN_UID(), poly_int< N, C >::is_constant(), store_info::is_large, store_info::is_set, known_eq, store_info::large, last, lowpart_bitmask(), MAX, MAX_OFFSET, may_be_sp_based_p(), store_info::mem, store_info::mem_addr, MEM_ADDR_SPACE, MEM_EXPR, MEM_P, MEM_SIZE, MEM_SIZE_KNOWN_P, MEM_VOLATILE_P, mems_same_for_tbaa_p(), MIN, store_info::next, insn_info_type::next_local_store, NULL, NULL_RTX, store_info::offset, offset, PATTERN(), plus_constant(), store_info::positions_needed, print_range(), store_info::redundant_reason, REG_P, reload_completed, store_info::rhs, rtx_equal_p(), rtx_group_vec, rtx_store_info_pool, scratch, SET, set_all_positions_unneeded(), SET_DEST, set_position_unneeded(), SET_SRC, set_usage_bits(), store_info::small_bitmask, insn_info_type::stack_pointer_based, start_sequence(), insn_info_type::store_rec, TDF_DETAILS, cselib_val::val_rtx, store_info::width, and XEXP.

Referenced by scan_insn().

◆ remove_useless_values()

static void remove_useless_values ( cselib_val * base)
static
Remove BASE from the set of active_local_stores.  This is a
callback from cselib that is used to get rid of the stores in
active_local_stores.   

References active_local_stores, active_local_stores_len, store_info::cse_base, free_store_info(), store_info::group_id, last, store_info::next, insn_info_type::next_local_store, NULL, and insn_info_type::store_rec.

◆ replace_read()

static bool replace_read ( store_info * store_info,
insn_info_t store_insn,
read_info_t read_info,
insn_info_t read_insn,
rtx * loc )
static
Take a sequence of:
  A <- r1
  ...
  ... <- A

and change it into
r2 <- r1
A <- r1
...
... <- r2

or

r3 <- extract (r1)
r3 <- r3 >> shift
r2 <- extract (r3)
... <- r2

or

r2 <- extract (r1)
... <- r2

Depending on the alignment and the mode of the store and
subsequent load.


The STORE_INFO and STORE_INSN are for the store and READ_INFO
and READ_INSN are for the read.  Return true if the replacement
went ok.   

References BITMAP_ALLOC, bitmap_and_into(), bitmap_empty_p(), BITMAP_FREE, BLOCK_FOR_INSN(), copy_to_mode_reg(), dbg_cnt(), deferred_change_list, deferred_change_pool, df_print_regset(), dump_file, dump_flags, emit_insn_before(), end_sequence(), FOR_EACH_SUBRTX, GET_CODE, get_insns(), GET_MODE, GET_MODE_NAME, GET_RTX_CLASS, get_stored_val(), ggc_alloc(), insn_invalid_p(), INSN_UID(), insns, look_for_hardregs(), store_info::mem, NEXT_INSN(), note_stores(), NULL_RTX, print_simple_rtl(), r, read_info_type_pool, reg_obstack, REG_P, regs_set, RTX_AUTOINC, shallow_copy_rtx(), start_sequence(), SUBREG_REG, TDF_DETAILS, this_insn, validate_change(), word_mode, and WORD_REGISTER_OPERATIONS.

Referenced by check_mem_read_rtx().

◆ reset_active_stores()

static void reset_active_stores ( void )
static

◆ rest_of_handle_dse()

static unsigned int rest_of_handle_dse ( void )
static
-------------------------------------------------------------------------
DSE
-------------------------------------------------------------------------  
Callback for running pass_rtl_dse.   

References CDI_DOMINATORS, cfun, cleanup_cfg(), delete_unreachable_blocks(), df_analyze(), DF_DEFER_INSN_RESCAN, DF_LR_RUN_DCE, df_note_add_problem(), df_set_flags(), dse_step0(), dse_step1(), dse_step2(), dse_step2_init(), dse_step3(), dse_step4(), dse_step5(), dse_step6(), dse_step7(), dump_file, dump_flags, free_dominance_info(), ggc_alloc(), globally_deleted, locally_deleted, purge_all_dead_edges(), and TDF_DETAILS.

◆ scan_insn()

◆ scan_reads()

◆ scan_stores()

static void scan_stores ( store_info * store_info,
bitmap gen,
bitmap kill )
static

◆ set_all_positions_unneeded()

static void set_all_positions_unneeded ( store_info * s_info)
inlinestatic
Mark the whole store S_INFO as unneeded.   

References bitmap_set_range(), gcc_checking_assert, ggc_alloc(), HOST_WIDE_INT_0U, and UNLIKELY.

Referenced by record_store().

◆ set_position_unneeded()

static void set_position_unneeded ( store_info * s_info,
int pos )
inlinestatic
Mark byte POS bytes from the beginning of store S_INFO as unneeded.   

References bitmap_set_bit, ggc_alloc(), HOST_WIDE_INT_1U, and UNLIKELY.

Referenced by record_store().

◆ set_usage_bits()

Variable Documentation

◆ active_local_stores

insn_info_t active_local_stores
static
The linked list of stores that are under consideration in this
basic block.   

Referenced by check_mem_read_rtx(), clear_rhs_from_active_local_stores(), dse_step1(), record_store(), remove_useless_values(), reset_active_stores(), and scan_insn().

◆ active_local_stores_len

◆ all_blocks

◆ bb_table

◆ cse_store_info_pool

object_allocator< store_info > cse_store_info_pool("cse_store_info_pool") ( "cse_store_info_pool" )
static

◆ current_position

unsigned int current_position
static
The number of bits used in the global bitmaps.   

Referenced by dse_step2(), and gcov_write_length().

◆ deferred_change_list

deferred_change* deferred_change_list = NULL
static

Referenced by dse_step1(), and replace_read().

◆ deferred_change_pool

object_allocator< deferred_change > deferred_change_pool("deferred_change_pool") ( "deferred_change_pool" )
static

Referenced by dse_step1(), dse_step7(), and replace_read().

◆ dse_bb_info_type_pool

object_allocator< dse_bb_info_type > dse_bb_info_type_pool("bb_info_pool") ( "bb_info_pool" )
static

Referenced by dse_step1(), and dse_step7().

◆ dse_bitmap_obstack

bitmap_obstack dse_bitmap_obstack
static
Obstack for the DSE dataflow bitmaps.  We don't want to put these
on the default obstack because these bitmaps can grow quite large
(~2GB for the small (!) test case of PR54146) and we'll hold on to
all that memory until the end of the compiler run.
As a bonus, delete_tree_live_info can destroy all the bitmaps by just
releasing the whole obstack.   

Referenced by dse_confluence_0(), dse_confluence_n(), dse_step0(), dse_step3(), dse_step3_scan(), dse_step7(), dse_transfer_function(), get_group_info(), and record_store().

◆ dse_obstack

struct obstack dse_obstack
static
Obstack for other data.  As for above: Kinda nice to be able to
throw it all away at the end in one big sweep.   

Referenced by dse_step0(), dse_step2_init(), and dse_step7().

◆ globally_deleted

int globally_deleted
static
Counter for stats.   

Referenced by dse_step0(), dse_step5(), and rest_of_handle_dse().

◆ group_info_pool

object_allocator< group_info > group_info_pool("rtx_group_info_pool") ( "rtx_group_info_pool" )
static

Referenced by dse_step7(), and get_group_info().

◆ insn_info_type_pool

object_allocator< insn_info_type > insn_info_type_pool("insn_info_pool") ( "insn_info_pool" )
static

Referenced by dse_step7(), and scan_insn().

◆ kill_on_calls

bitmap kill_on_calls
static
Locations that are killed by calls in the global phase.   

Referenced by dse_step0(), dse_step2(), and scan_reads().

◆ locally_deleted

int locally_deleted
static

◆ read_info_type_pool

object_allocator< read_info_type > read_info_type_pool("read_info_pool") ( "read_info_pool" )
static

◆ rtx_group_next_id

int rtx_group_next_id
static
Index into the rtx_group_vec.   

Referenced by dse_step0(), and get_group_info().

◆ rtx_group_table

hash_table<invariant_group_base_hasher>* rtx_group_table
static
Tables of group_info structures, hashed by base value.   

Referenced by dse_step0(), dse_step1(), dse_step7(), and get_group_info().

◆ rtx_group_vec

◆ rtx_store_info_pool

object_allocator< store_info > rtx_store_info_pool("rtx_store_info_pool") ( "rtx_store_info_pool" )
static

◆ scratch

◆ stores_off_frame_dead_at_return

bool stores_off_frame_dead_at_return
static
This is true except if cfun->stdarg -- i.e. we cannot do
this for vararg functions because they play games with the frame.   

Referenced by dse_step0(), dse_step1(), dse_step2_init(), and dse_step3_exit_block_scan().