GCC Middle and Back End API Reference
tree-ssa-dse.cc File Reference
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "backend.h"
#include "rtl.h"
#include "tree.h"
#include "gimple.h"
#include "tree-pass.h"
#include "ssa.h"
#include "gimple-pretty-print.h"
#include "fold-const.h"
#include "gimple-iterator.h"
#include "tree-cfg.h"
#include "tree-dfa.h"
#include "tree-cfgcleanup.h"
#include "alias.h"
#include "tree-ssa-loop.h"
#include "tree-ssa-dse.h"
#include "builtins.h"
#include "gimple-fold.h"
#include "gimplify.h"
#include "tree-eh.h"
#include "cfganal.h"
#include "cgraph.h"
#include "ipa-modref-tree.h"
#include "ipa-modref.h"
#include "target.h"
#include "tree-ssa-loop-niter.h"
#include "cfgloop.h"
#include "tree-data-ref.h"
#include "internal-fn.h"
#include "tree-ssa.h"
Include dependency graph for tree-ssa-dse.cc:

Functions

static void delete_dead_or_redundant_call (gimple_stmt_iterator *, const char *)
 
static bool initialize_ao_ref_for_dse (gimple *stmt, ao_ref *write, bool may_def_ok=false)
 
static bool valid_ao_ref_kill_for_dse (ao_ref *ref)
 
static bool valid_ao_ref_for_dse (ao_ref *ref)
 
static bool get_byte_aligned_range_containing_ref (ao_ref *ref, poly_int64 *offset, HOST_WIDE_INT *size)
 
static bool get_byte_aligned_range_contained_in_ref (ao_ref *ref, poly_int64 *offset, HOST_WIDE_INT *size)
 
static bool get_byte_range (ao_ref *copy, ao_ref *ref, bool kill, HOST_WIDE_INT *ret_offset, HOST_WIDE_INT *ret_size)
 
static void clear_live_bytes_for_ref (sbitmap live_bytes, ao_ref *ref, ao_ref *write)
 
static void clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref)
 
static bool setup_live_bytes_from_ref (ao_ref *ref, sbitmap live_bytes)
 
static void compute_trims (ao_ref *ref, sbitmap live, int *trim_head, int *trim_tail, gimple *stmt)
 
static void maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt)
 
static void maybe_trim_constructor_store (ao_ref *ref, sbitmap live, gimple *stmt)
 
static void decrement_count (gimple *stmt, int decrement)
 
static void increment_start_addr (gimple *stmt, tree *where, int increment)
 
static void maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt)
 
static void maybe_trim_partially_dead_store (ao_ref *ref, sbitmap live, gimple *stmt)
 
static bool live_bytes_read (ao_ref *use_ref, ao_ref *ref, sbitmap live)
 
static bool check_name (tree, tree *idx, void *data)
 
static void dse_optimize_redundant_stores (gimple *stmt)
 
static bool contains_phi_arg (gphi *phi, tree arg)
 
static dse_store_status dse_classify_store (ao_ref *ref, gimple *stmt, bool byte_tracking_enabled, sbitmap live_bytes, bool *by_clobber_p, tree stop_at_vuse, int &cnt, bitmap visited)
 
dse_store_status dse_classify_store (ao_ref *ref, gimple *stmt, bool byte_tracking_enabled, sbitmap live_bytes, bool *by_clobber_p, tree stop_at_vuse)
 
void delete_dead_or_redundant_assignment (gimple_stmt_iterator *gsi, const char *type, bitmap need_eh_cleanup, bitmap need_ab_cleanup)
 
static bool dse_optimize_call (gimple_stmt_iterator *gsi, sbitmap live_bytes)
 
static void dse_optimize_stmt (function *fun, gimple_stmt_iterator *gsi, sbitmap live_bytes)
 
gimple_opt_passmake_pass_dse (gcc::context *ctxt)
 

Variables

static bitmap need_eh_cleanup
 
static bitmap need_ab_cleanup
 
static hash_map< gimple *, data_reference_p > * dse_stmt_to_dr_map
 

Function Documentation

◆ check_name()

static bool check_name ( tree ,
tree * idx,
void * data )
static
Callback for dse_classify_store calling for_each_index.  Verify that
indices are invariant in the loop with backedge PHI in basic-block DATA.   

References CDI_DOMINATORS, dominated_by_p(), gimple_bb(), SSA_NAME_DEF_STMT, SSA_NAME_IS_DEFAULT_DEF, and TREE_CODE.

Referenced by dse_classify_store().

◆ clear_bytes_written_by()

static void clear_bytes_written_by ( sbitmap live_bytes,
gimple * stmt,
ao_ref * ref )
static
Clear any bytes written by STMT from the bitmap LIVE_BYTES.  The base
address written by STMT must match the one found in REF, which must
have its base address previously initialized.

This routine must be conservative.  If we don't know the offset or
actual size written, assume nothing was written.   

References as_a(), clear_live_bytes_for_ref(), dyn_cast(), get_modref_function_summary(), initialize_ao_ref_for_dse(), kill, and modref_summary::kills.

Referenced by dse_classify_store().

◆ clear_live_bytes_for_ref()

static void clear_live_bytes_for_ref ( sbitmap live_bytes,
ao_ref * ref,
ao_ref * write )
static
Update LIVE_BYTES tracking REF for write to WRITE:
Verify we have the same base memory address, the write
has a known size and overlaps with REF.   

References ao_ref::base, bitmap_clear_range(), get_byte_range(), OEP_ADDRESS_OF, operand_equal_p(), and valid_ao_ref_kill_for_dse().

Referenced by clear_bytes_written_by().

◆ compute_trims()

static void compute_trims ( ao_ref * ref,
sbitmap live,
int * trim_head,
int * trim_tail,
gimple * stmt )
static
Compute the number of stored bytes that we can trim from the head and
tail of REF.  LIVE is the bitmap of stores to REF that are still live.

Store the number of bytes trimmed from the head and tail in TRIM_HEAD
and TRIM_TAIL respectively.

STMT is the statement being trimmed and is used for debugging dump
output only.   

References ao_ref_alignment(), ao_ref::base, bitmap_first_set_bit(), bitmap_last_set_bit(), compare_tree_int(), dump_file, dump_flags, i, poly_int< N, C >::is_constant(), known_eq, ao_ref::max_size, ao_ref::offset, wi::popcount(), print_gimple_stmt(), ao_ref::size, TDF_DETAILS, TREE_CODE, TREE_TYPE, and TYPE_SIZE_UNIT.

Referenced by maybe_trim_complex_store(), maybe_trim_constructor_store(), and maybe_trim_memstar_call().

◆ contains_phi_arg()

static bool contains_phi_arg ( gphi * phi,
tree arg )
static
Return whether PHI contains ARG as an argument.   

References gimple_phi_arg_def(), gimple_phi_num_args(), and i.

Referenced by dse_classify_store().

◆ decrement_count()

static void decrement_count ( gimple * stmt,
int decrement )
static
STMT is a memcpy, memmove or memset.  Decrement the number of bytes
copied/set by DECREMENT.   

References gcc_assert, gimple_call_arg_ptr(), TREE_CODE, TREE_INT_CST_LOW, TREE_TYPE, and wide_int_to_tree().

Referenced by maybe_trim_memstar_call().

◆ delete_dead_or_redundant_assignment()

void delete_dead_or_redundant_assignment ( gimple_stmt_iterator * gsi,
const char * type,
bitmap need_eh_cleanup,
bitmap need_ab_cleanup )

◆ delete_dead_or_redundant_call()

static void delete_dead_or_redundant_call ( gimple_stmt_iterator * gsi,
const char * type )
static
Dead and redundant store elimination
   Copyright (C) 2004-2024 Free Software Foundation, Inc.

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option)
any later version.

GCC is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.   
This file implements dead store elimination.

A dead store is a store into a memory location which will later be
overwritten by another store without any intervening loads.  In this
case the earlier store can be deleted or trimmed if the store
was partially dead.

A redundant store is a store into a memory location which stores
the exact same value as a prior store to the same memory location.
While this can often be handled by dead store elimination, removing
the redundant store is often better than removing or trimming the
dead store.

In our SSA + virtual operand world we use immediate uses of virtual
operands to detect these cases.  If a store's virtual definition
is used precisely once by a later store to the same location which
post dominates the first store, then the first store is dead.  If
the data stored is the same, then the second store is redundant.

The single use of the store's virtual definition ensures that
there are no intervening aliased loads and the requirement that
the second load post dominate the first ensures that if the earlier
store executes, then the later stores will execute before the function
exits.

It may help to think of this as first moving the earlier store to
the point immediately before the later store.  Again, the single
use of the virtual definition and the post-dominance relationship
ensure that such movement would be safe.  Clearly if there are
back to back stores, then the second is makes the first dead.  If
the second store stores the same value, then the second store is
redundant.

Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
may also help in understanding this code since it discusses the
relationship between dead store and redundant load elimination.  In
fact, they are the same transformation applied to different views of
the CFG.   
Delete a dead call at GSI, which is mem* call of some kind.   

References bitmap_set_bit, dump_file, dump_flags, gimple_bb(), gimple_build_assign(), gimple_call_arg(), gimple_call_lhs(), gsi_remove(), gsi_replace(), gsi_stmt(), basic_block_def::index, need_eh_cleanup, print_gimple_stmt(), release_defs(), TDF_DETAILS, and unlink_stmt_vdef().

Referenced by dse_optimize_redundant_stores(), and dse_optimize_stmt().

◆ dse_classify_store() [1/2]

dse_store_status dse_classify_store ( ao_ref * ref,
gimple * stmt,
bool byte_tracking_enabled,
sbitmap live_bytes,
bool * by_clobber_p,
tree stop_at_vuse )

References dse_classify_store(), and visited.

◆ dse_classify_store() [2/2]

static dse_store_status dse_classify_store ( ao_ref * ref,
gimple * stmt,
bool byte_tracking_enabled,
sbitmap live_bytes,
bool * by_clobber_p,
tree stop_at_vuse,
int & cnt,
bitmap visited )
static

◆ dse_optimize_call()

◆ dse_optimize_redundant_stores()

static void dse_optimize_redundant_stores ( gimple * stmt)
static
STMT stores the value 0 into one or more memory locations
(via memset, empty constructor, calloc call, etc).

See if there is a subsequent store of the value 0 to one
or more of the same memory location(s).  If so, the subsequent
store is redundant and can be removed.

The subsequent stores could be via memset, empty constructors,
simple MEM stores, etc.   

References alias_set_subset_of(), ao_ref_alias_set(), ao_ref_base_alias_set(), ao_ref_init(), BUILT_IN_NORMAL, DECL_FUNCTION_CODE(), delete_dead_or_redundant_assignment(), delete_dead_or_redundant_call(), FOR_EACH_IMM_USE_STMT, gcc_unreachable, gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_single_p(), gimple_call_arg(), gimple_call_builtin_p(), gimple_call_fndecl(), gimple_vdef(), gsi_for_stmt(), initialize_ao_ref_for_dse(), initializer_zerop(), integer_zerop(), is_gimple_assign(), is_gimple_call(), need_ab_cleanup, need_eh_cleanup, NULL, stmt_kills_ref_p(), ui, and valid_ao_ref_for_dse().

Referenced by dse_optimize_stmt().

◆ dse_optimize_stmt()

static void dse_optimize_stmt ( function * fun,
gimple_stmt_iterator * gsi,
sbitmap live_bytes )
static
Attempt to eliminate dead stores in the statement referenced by BSI.

A dead store is a store into a memory location which will later be
overwritten by another store without any intervening loads.  In this
case the earlier store can be deleted.

In our SSA + virtual operand world we use immediate uses of virtual
operands to detect dead stores.  If a store's virtual definition
is used precisely once by a later store to the same location which
post dominates the first store, then the first store is dead.   

References as_a(), BUILT_IN_NORMAL, function::can_delete_dead_exceptions, DECL_FUNCTION_CODE(), delete_dead_or_redundant_assignment(), delete_dead_or_redundant_call(), dse_classify_store(), dse_optimize_call(), dse_optimize_redundant_stores(), DSE_STORE_DEAD, DSE_STORE_LIVE, DSE_STORE_MAYBE_PARTIAL_DEAD, dump_file, dump_flags, gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_single_p(), gimple_call_arg(), gimple_call_builtin_p(), gimple_call_fndecl(), gimple_call_fntype(), gimple_call_internal_fn(), gimple_call_internal_p(), gimple_call_return_slot_opt_p(), gimple_call_set_lhs(), gimple_clobber_p(), gimple_has_side_effects(), gimple_has_volatile_ops(), gsi_stmt(), initialize_ao_ref_for_dse(), initializer_zerop(), integer_zerop(), is_gimple_assign(), is_gimple_call(), maybe_trim_memstar_call(), maybe_trim_partially_dead_store(), need_ab_cleanup, need_eh_cleanup, NULL_TREE, operand_equal_p(), poly_int_tree_p(), print_gimple_stmt(), setup_live_bytes_from_ref(), stmt_could_throw_p(), TDF_DETAILS, TREE_ADDRESSABLE, TREE_CODE, TREE_TYPE, TYPE_SIZE, and update_stmt().

◆ get_byte_aligned_range_contained_in_ref()

static bool get_byte_aligned_range_contained_in_ref ( ao_ref * ref,
poly_int64 * offset,
HOST_WIDE_INT * size )
static
Initialize OFFSET and SIZE to a range known to be contained REF
where the boundaries are divisible by BITS_PER_UNIT (but still in bits).
Return false if this is impossible.   

References end(), known_eq, known_gt, ao_ref::max_size, ao_ref::offset, offset, and ao_ref::size.

Referenced by get_byte_range().

◆ get_byte_aligned_range_containing_ref()

static bool get_byte_aligned_range_containing_ref ( ao_ref * ref,
poly_int64 * offset,
HOST_WIDE_INT * size )
static
Initialize OFFSET and SIZE to a range known to contain REF
where the boundaries are divisible by BITS_PER_UNIT (bit still in bits).
Return false if this is impossible.   

References end(), ao_ref::max_size, ao_ref::offset, and offset.

Referenced by get_byte_range().

◆ get_byte_range()

static bool get_byte_range ( ao_ref * copy,
ao_ref * ref,
bool kill,
HOST_WIDE_INT * ret_offset,
HOST_WIDE_INT * ret_size )
static
Compute byte range (returned iN REF_OFFSET and RET_SIZE) for access COPY
inside REF.  If KILL is true, then COPY represent a kill and the byte range
needs to be fully contained in bit range given by COPY.  If KILL is false
then the byte range returned must contain the range of COPY.   

References get_byte_aligned_range_contained_in_ref(), get_byte_aligned_range_containing_ref(), and kill.

Referenced by clear_live_bytes_for_ref(), and live_bytes_read().

◆ increment_start_addr()

◆ initialize_ao_ref_for_dse()

static bool initialize_ao_ref_for_dse ( gimple * stmt,
ao_ref * write,
bool may_def_ok = false )
static
STMT is a statement that may write into memory.  Analyze it and
initialize WRITE to describe how STMT affects memory.  When
MAY_DEF_OK is true then the function initializes WRITE to what
the stmt may define.

Return TRUE if the statement was analyzed, FALSE otherwise.

It is always safe to return FALSE.  But typically better optimziation
can be achieved by analyzing more statements.   

References ao_ref_init(), ao_ref_init_from_ptr_and_size(), BUILT_IN_NORMAL, cfun, DECL_FUNCTION_CODE(), fold_build2, gimple_call_arg(), gimple_call_builtin_p(), gimple_call_fndecl(), gimple_call_internal_fn(), gimple_call_internal_p(), gimple_call_lhs(), gimple_get_lhs(), int_const_binop(), internal_fn_len_index(), internal_fn_stored_value_index(), is_gimple_call(), NULL_TREE, stmt_could_throw_p(), TREE_CODE, tree_fits_uhwi_p(), TREE_TYPE, and TYPE_SIZE_UNIT.

Referenced by clear_bytes_written_by(), dse_optimize_redundant_stores(), and dse_optimize_stmt().

◆ live_bytes_read()

static bool live_bytes_read ( ao_ref * use_ref,
ao_ref * ref,
sbitmap live )
static
Return TRUE if USE_REF reads bytes from LIVE where live is
derived from REF, a write reference.

While this routine may modify USE_REF, it's passed by value, not
location.  So callers do not see those modifications.   

References bitmap_bit_in_range_p(), get_byte_range(), known_eq, and ao_ref::size.

Referenced by dse_classify_store().

◆ make_pass_dse()

gimple_opt_pass * make_pass_dse ( gcc::context * ctxt)

◆ maybe_trim_complex_store()

static void maybe_trim_complex_store ( ao_ref * ref,
sbitmap live,
gimple * stmt )
static
STMT initializes an object from COMPLEX_CST where one or more of the bytes
written may be dead stores.  REF is a representation of the memory written.
LIVE is the bitmap of stores to REF that are still live.

Attempt to rewrite STMT so that only the real or the imaginary part of the
object is actually stored.   

References build1(), compute_trims(), gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_set_lhs(), gimple_assign_set_rhs1(), known_ge, ao_ref::size, TREE_IMAGPART, TREE_REALPART, TREE_TYPE, and y.

Referenced by maybe_trim_partially_dead_store().

◆ maybe_trim_constructor_store()

static void maybe_trim_constructor_store ( ao_ref * ref,
sbitmap live,
gimple * stmt )
static
STMT initializes an object using a CONSTRUCTOR where one or more of the
bytes written are dead stores.  REF is a representation of the memory
written.  LIVE is the bitmap of stores to REF that are still live.

Attempt to rewrite STMT so that it writes fewer memory locations.

The most common case for getting here is a CONSTRUCTOR with no elements
being used to zero initialize an object.  We do not try to handle other
cases as those would force us to fully cover the object with the
CONSTRUCTOR node except for the components that are dead.   

References build_array_type_nelts(), build_constructor(), build_fold_addr_expr, build_int_cst(), char_type_node, compute_trims(), CONSTRUCTOR_NELTS, count, exp(), fold_build2, gcc_assert, gimple_assign_lhs(), gimple_assign_rhs1(), gimple_assign_set_lhs(), gimple_assign_set_rhs1(), is_gimple_min_invariant(), NULL, reference_alias_ptr_type(), and ao_ref::size.

Referenced by maybe_trim_partially_dead_store().

◆ maybe_trim_memstar_call()

static void maybe_trim_memstar_call ( ao_ref * ref,
sbitmap live,
gimple * stmt )
static
STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
(ORIG & ~NEW) and need not be stored.  Try to rewrite STMT to reduce
the amount of data it actually writes.

Right now we only support trimming from the head or the tail of the
memory region.  In theory we could split the mem* call, but it's
likely of marginal value.   

References compute_trims(), DECL_FUNCTION_CODE(), decrement_count(), dump_file, dump_flags, get_range_strlen(), gimple_call_arg(), gimple_call_arg_ptr(), gimple_call_fndecl(), gimple_call_num_args(), gimple_call_set_arg(), increment_start_addr(), integer_all_onesp(), c_strlen_data::minlen, TDF_DETAILS, tree_fits_uhwi_p(), tree_to_uhwi(), TREE_TYPE, and wide_int_to_tree().

Referenced by dse_optimize_stmt().

◆ maybe_trim_partially_dead_store()

static void maybe_trim_partially_dead_store ( ao_ref * ref,
sbitmap live,
gimple * stmt )
static
STMT is a memory write where one or more bytes written are dead stores.
REF is a representation of the memory written.  LIVE is the bitmap of
stores to REF that are still live.

Attempt to rewrite STMT so that it writes fewer memory locations.  Right
now we only support trimming at the start or end of the memory region.
It's not clear how much there is to be gained by trimming from the middle
of the region.   

References gimple_assign_lhs(), gimple_assign_rhs_code(), is_gimple_assign(), maybe_trim_complex_store(), maybe_trim_constructor_store(), and TREE_CODE.

Referenced by dse_optimize_stmt().

◆ setup_live_bytes_from_ref()

static bool setup_live_bytes_from_ref ( ao_ref * ref,
sbitmap live_bytes )
static
REF is a memory write.  Extract relevant information from it and
initialize the LIVE_BYTES bitmap.  If successful, return TRUE.
Otherwise return FALSE.   

References bitmap_clear(), bitmap_set_range(), ao_ref::max_size, ao_ref::offset, and valid_ao_ref_for_dse().

Referenced by dse_optimize_call(), and dse_optimize_stmt().

◆ valid_ao_ref_for_dse()

static bool valid_ao_ref_for_dse ( ao_ref * ref)
static
Given REF from the alias oracle, return TRUE if it is a valid
load or store memory reference for dead store elimination, false otherwise.

Unlike for valid_ao_ref_kill_for_dse we can accept writes where max_size
is not same as size since we can handle conservatively the larger range.   

References ao_ref_base(), known_ge, ao_ref::max_size, and ao_ref::offset.

Referenced by dse_classify_store(), dse_optimize_redundant_stores(), and setup_live_bytes_from_ref().

◆ valid_ao_ref_kill_for_dse()

static bool valid_ao_ref_kill_for_dse ( ao_ref * ref)
static
Given REF from the alias oracle, return TRUE if it is a valid
kill memory reference for dead store elimination, false otherwise.

In particular, the reference must have a known base, known maximum
size, start at a byte offset and have a size that is one or more
bytes.   

References ao_ref_base(), known_eq, known_ge, ao_ref::max_size, ao_ref::offset, and ao_ref::size.

Referenced by clear_live_bytes_for_ref().

Variable Documentation

◆ dse_stmt_to_dr_map

hash_map<gimple *, data_reference_p>* dse_stmt_to_dr_map
static
Hash map of the memory use in a GIMPLE assignment to its
data reference.  If NULL data-ref analysis isn't used.   

Referenced by dse_classify_store().

◆ need_ab_cleanup

◆ need_eh_cleanup

bitmap need_eh_cleanup
static
Bitmap of blocks that have had EH statements cleaned.  We should
remove their dead edges eventually.   

Referenced by delete_dead_or_redundant_assignment(), delete_dead_or_redundant_call(), dse_optimize_call(), dse_optimize_redundant_stores(), and dse_optimize_stmt().