LCOV - code coverage report
Current view: top level - gcc - tree-ssa-dse.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 98.2 % 784 770
Test Date: 2026-02-28 14:20:25 Functions: 100.0 % 31 31
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Dead and redundant store elimination
       2              :    Copyright (C) 2004-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify
       7              : it under the terms of the GNU General Public License as published by
       8              : the Free Software Foundation; either version 3, or (at your option)
       9              : any later version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful,
      12              : but WITHOUT ANY WARRANTY; without even the implied warranty of
      13              : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
      14              : GNU General Public License for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : #include "config.h"
      21              : #include "system.h"
      22              : #include "coretypes.h"
      23              : #include "backend.h"
      24              : #include "rtl.h"
      25              : #include "tree.h"
      26              : #include "gimple.h"
      27              : #include "tree-pass.h"
      28              : #include "ssa.h"
      29              : #include "gimple-pretty-print.h"
      30              : #include "fold-const.h"
      31              : #include "gimple-iterator.h"
      32              : #include "tree-cfg.h"
      33              : #include "tree-dfa.h"
      34              : #include "tree-cfgcleanup.h"
      35              : #include "alias.h"
      36              : #include "tree-ssa-loop.h"
      37              : #include "tree-ssa-dse.h"
      38              : #include "builtins.h"
      39              : #include "gimple-fold.h"
      40              : #include "gimplify.h"
      41              : #include "tree-eh.h"
      42              : #include "cfganal.h"
      43              : #include "cgraph.h"
      44              : #include "ipa-modref-tree.h"
      45              : #include "ipa-modref.h"
      46              : #include "target.h"
      47              : #include "tree-ssa-loop-niter.h"
      48              : #include "cfgloop.h"
      49              : #include "tree-data-ref.h"
      50              : #include "internal-fn.h"
      51              : #include "tree-ssa.h"
      52              : 
      53              : /* This file implements dead store elimination.
      54              : 
      55              :    A dead store is a store into a memory location which will later be
      56              :    overwritten by another store without any intervening loads.  In this
      57              :    case the earlier store can be deleted or trimmed if the store
      58              :    was partially dead.
      59              : 
      60              :    A redundant store is a store into a memory location which stores
      61              :    the exact same value as a prior store to the same memory location.
      62              :    While this can often be handled by dead store elimination, removing
      63              :    the redundant store is often better than removing or trimming the
      64              :    dead store.
      65              : 
      66              :    In our SSA + virtual operand world we use immediate uses of virtual
      67              :    operands to detect these cases.  If a store's virtual definition
      68              :    is used precisely once by a later store to the same location which
      69              :    post dominates the first store, then the first store is dead.  If
      70              :    the data stored is the same, then the second store is redundant.
      71              : 
      72              :    The single use of the store's virtual definition ensures that
      73              :    there are no intervening aliased loads and the requirement that
      74              :    the second load post dominate the first ensures that if the earlier
      75              :    store executes, then the later stores will execute before the function
      76              :    exits.
      77              : 
      78              :    It may help to think of this as first moving the earlier store to
      79              :    the point immediately before the later store.  Again, the single
      80              :    use of the virtual definition and the post-dominance relationship
      81              :    ensure that such movement would be safe.  Clearly if there are
      82              :    back to back stores, then the second is makes the first dead.  If
      83              :    the second store stores the same value, then the second store is
      84              :    redundant.
      85              : 
      86              :    Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
      87              :    may also help in understanding this code since it discusses the
      88              :    relationship between dead store and redundant load elimination.  In
      89              :    fact, they are the same transformation applied to different views of
      90              :    the CFG.  */
      91              : 
      92              : static void delete_dead_or_redundant_call (gimple_stmt_iterator *, const char *);
      93              : 
      94              : /* Bitmap of blocks that have had EH statements cleaned.  We should
      95              :    remove their dead edges eventually.  */
      96              : static bitmap need_eh_cleanup;
      97              : static bitmap need_ab_cleanup;
      98              : 
      99              : /* STMT is a statement that may write into memory.  Analyze it and
     100              :    initialize WRITE to describe how STMT affects memory.  When
     101              :    MAY_DEF_OK is true then the function initializes WRITE to what
     102              :    the stmt may define.
     103              : 
     104              :    Return TRUE if the statement was analyzed, FALSE otherwise.
     105              : 
     106              :    It is always safe to return FALSE.  But typically better optimziation
     107              :    can be achieved by analyzing more statements.  */
     108              : 
     109              : static bool
     110    232648865 : initialize_ao_ref_for_dse (gimple *stmt, ao_ref *write, bool may_def_ok = false)
     111              : {
     112              :   /* It's advantageous to handle certain mem* functions.  */
     113    232648865 :   if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
     114              :     {
     115      5351120 :       switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
     116              :         {
     117      1291010 :         case BUILT_IN_MEMCPY:
     118      1291010 :         case BUILT_IN_MEMMOVE:
     119      1291010 :         case BUILT_IN_MEMSET:
     120      1291010 :         case BUILT_IN_MEMCPY_CHK:
     121      1291010 :         case BUILT_IN_MEMMOVE_CHK:
     122      1291010 :         case BUILT_IN_MEMSET_CHK:
     123      1291010 :         case BUILT_IN_STRNCPY:
     124      1291010 :         case BUILT_IN_STRNCPY_CHK:
     125      1291010 :           {
     126      1291010 :             tree size = gimple_call_arg (stmt, 2);
     127      1291010 :             tree ptr = gimple_call_arg (stmt, 0);
     128      1291010 :             ao_ref_init_from_ptr_and_size (write, ptr, size);
     129      1291010 :             return true;
     130              :           }
     131              : 
     132              :         /* A calloc call can never be dead, but it can make
     133              :            subsequent stores redundant if they store 0 into
     134              :            the same memory locations.  */
     135         3276 :         case BUILT_IN_CALLOC:
     136         3276 :           {
     137         3276 :             tree nelem = gimple_call_arg (stmt, 0);
     138         3276 :             tree selem = gimple_call_arg (stmt, 1);
     139         3276 :             tree lhs;
     140         3276 :             if (TREE_CODE (nelem) == INTEGER_CST
     141         2699 :                 && TREE_CODE (selem) == INTEGER_CST
     142         5783 :                 && (lhs = gimple_call_lhs (stmt)) != NULL_TREE)
     143              :               {
     144         2496 :                 tree size = fold_build2 (MULT_EXPR, TREE_TYPE (nelem),
     145              :                                          nelem, selem);
     146         2496 :                 ao_ref_init_from_ptr_and_size (write, lhs, size);
     147         2496 :                 return true;
     148              :               }
     149              :           }
     150              : 
     151              :         default:
     152              :           break;
     153              :         }
     154              :     }
     155    227297745 :   else if (is_gimple_call (stmt)
     156    227297745 :            && gimple_call_internal_p (stmt))
     157              :     {
     158       416248 :       switch (gimple_call_internal_fn (stmt))
     159              :         {
     160         1335 :         case IFN_LEN_STORE:
     161         1335 :         case IFN_MASK_STORE:
     162         1335 :         case IFN_MASK_LEN_STORE:
     163         1335 :           {
     164         1335 :             internal_fn ifn = gimple_call_internal_fn (stmt);
     165         1335 :             int stored_value_index = internal_fn_stored_value_index (ifn);
     166         1335 :             int len_index = internal_fn_len_index (ifn);
     167         1335 :             if (ifn == IFN_LEN_STORE)
     168              :               {
     169            0 :                 tree len = gimple_call_arg (stmt, len_index);
     170            0 :                 tree bias = gimple_call_arg (stmt, len_index + 1);
     171            0 :                 if (tree_fits_uhwi_p (len))
     172              :                   {
     173            0 :                     ao_ref_init_from_ptr_and_size (write,
     174              :                                                    gimple_call_arg (stmt, 0),
     175              :                                                    int_const_binop (MINUS_EXPR,
     176              :                                                                     len, bias));
     177            0 :                     return true;
     178              :                   }
     179              :               }
     180              :             /* We cannot initialize a must-def ao_ref (in all cases) but we
     181              :                can provide a may-def variant.  */
     182         1335 :             if (may_def_ok)
     183              :               {
     184         1297 :                 ao_ref_init_from_ptr_and_range (
     185              :                   write, gimple_call_arg (stmt, 0), true, 0, -1,
     186         1297 :                   tree_to_poly_int64 (TYPE_SIZE (
     187              :                     TREE_TYPE (gimple_call_arg (stmt, stored_value_index)))));
     188         1297 :                 return true;
     189              :               }
     190              :             break;
     191              :           }
     192              :         default:;
     193              :         }
     194              :     }
     195    231354062 :   if (tree lhs = gimple_get_lhs (stmt))
     196              :     {
     197    217312039 :       if (TREE_CODE (lhs) != SSA_NAME
     198    217312039 :           && (may_def_ok || !stmt_could_throw_p (cfun, stmt)))
     199              :         {
     200    200081147 :           ao_ref_init (write, lhs);
     201    200081147 :           return true;
     202              :         }
     203              :     }
     204              :   return false;
     205              : }
     206              : 
     207              : /* Given REF from the alias oracle, return TRUE if it is a valid
     208              :    kill memory reference for dead store elimination, false otherwise.
     209              : 
     210              :    In particular, the reference must have a known base, known maximum
     211              :    size, start at a byte offset and have a size that is one or more
     212              :    bytes.  */
     213              : 
     214              : static bool
     215    168893213 : valid_ao_ref_kill_for_dse (ao_ref *ref)
     216              : {
     217    168893213 :   return (ao_ref_base (ref)
     218    168893213 :           && known_size_p (ref->max_size)
     219    168578756 :           && maybe_ne (ref->size, 0)
     220    168560678 :           && known_eq (ref->max_size, ref->size)
     221    336950441 :           && known_ge (ref->offset, 0));
     222              : }
     223              : 
     224              : /* Given REF from the alias oracle, return TRUE if it is a valid
     225              :    load or store memory reference for dead store elimination, false otherwise.
     226              : 
     227              :    Unlike for valid_ao_ref_kill_for_dse we can accept writes where max_size
     228              :    is not same as size since we can handle conservatively the larger range.  */
     229              : 
     230              : static bool
     231     37586629 : valid_ao_ref_for_dse (ao_ref *ref)
     232              : {
     233     37586629 :   return (ao_ref_base (ref)
     234     37586629 :           && known_size_p (ref->max_size)
     235     74692693 :           && known_ge (ref->offset, 0));
     236              : }
     237              : 
     238              : /* Initialize OFFSET and SIZE to a range known to contain REF
     239              :    where the boundaries are divisible by BITS_PER_UNIT (bit still in bits).
     240              :    Return false if this is impossible.  */
     241              : 
     242              : static bool
     243    104891248 : get_byte_aligned_range_containing_ref (ao_ref *ref, poly_int64 *offset,
     244              :                                        HOST_WIDE_INT *size)
     245              : {
     246            0 :   if (!known_size_p (ref->max_size))
     247              :     return false;
     248    104891248 :   *offset = aligned_lower_bound (ref->offset, BITS_PER_UNIT);
     249    104891248 :   poly_int64 end = aligned_upper_bound (ref->offset + ref->max_size,
     250              :                                         BITS_PER_UNIT);
     251    104891248 :   return (end - *offset).is_constant (size);
     252              : }
     253              : 
     254              : /* Initialize OFFSET and SIZE to a range known to be contained REF
     255              :    where the boundaries are divisible by BITS_PER_UNIT (but still in bits).
     256              :    Return false if this is impossible.  */
     257              : 
     258              : static bool
     259     97860252 : get_byte_aligned_range_contained_in_ref (ao_ref *ref, poly_int64 *offset,
     260              :                                          HOST_WIDE_INT *size)
     261              : {
     262     97860252 :   if (!known_size_p (ref->size)
     263     97860252 :       || !known_eq (ref->size, ref->max_size))
     264              :     return false;
     265     97860252 :   *offset = aligned_upper_bound (ref->offset, BITS_PER_UNIT);
     266     97860252 :   poly_int64 end = aligned_lower_bound (ref->offset + ref->max_size,
     267              :                                         BITS_PER_UNIT);
     268              :   /* For bit accesses we can get -1 here, but also 0 sized kill is not
     269              :      useful.  */
     270     97860252 :   if (!known_gt (end, *offset))
     271              :     return false;
     272     97670418 :   return (end - *offset).is_constant (size);
     273              : }
     274              : 
     275              : /* Compute byte range (returned iN REF_OFFSET and RET_SIZE) for access COPY
     276              :    inside REF.  If KILL is true, then COPY represent a kill and the byte range
     277              :    needs to be fully contained in bit range given by COPY.  If KILL is false
     278              :    then the byte range returned must contain the range of COPY.  */
     279              : 
     280              : static bool
     281    101470667 : get_byte_range (ao_ref *copy, ao_ref *ref, bool kill,
     282              :                 HOST_WIDE_INT *ret_offset, HOST_WIDE_INT *ret_size)
     283              : {
     284    101470667 :   HOST_WIDE_INT copy_size, ref_size;
     285    101470667 :   poly_int64 copy_offset, ref_offset;
     286    101470667 :   HOST_WIDE_INT diff;
     287              : 
     288              :   /* First translate from bits to bytes, rounding to bigger or smaller ranges
     289              :      as needed.  Kills needs to be always rounded to smaller ranges while
     290              :      uses and stores to larger ranges.  */
     291    101470667 :   if (kill)
     292              :     {
     293     97860252 :       if (!get_byte_aligned_range_contained_in_ref (copy, &copy_offset,
     294              :                                                     &copy_size))
     295              :         return false;
     296              :     }
     297              :   else
     298              :     {
     299      3610415 :       if (!get_byte_aligned_range_containing_ref (copy, &copy_offset,
     300              :                                                   &copy_size))
     301              :         return false;
     302              :     }
     303              : 
     304    196560261 :   if (!get_byte_aligned_range_containing_ref (ref, &ref_offset, &ref_size)
     305              :       || !ordered_p (copy_offset, ref_offset))
     306              :     return false;
     307              : 
     308              :   /* Switch sizes from bits to bytes so we do not need to care about
     309              :      overflows.  Offset calculation needs to stay in bits until we compute
     310              :      the difference and can switch to HOST_WIDE_INT.  */
     311    101280833 :   copy_size /= BITS_PER_UNIT;
     312    101280833 :   ref_size /= BITS_PER_UNIT;
     313              : 
     314              :   /* If COPY starts before REF, then reset the beginning of
     315              :      COPY to match REF and decrease the size of COPY by the
     316              :      number of bytes removed from COPY.  */
     317    101280833 :   if (maybe_lt (copy_offset, ref_offset))
     318              :     {
     319      9276805 :       if (!(ref_offset - copy_offset).is_constant (&diff)
     320      9276805 :           || copy_size < diff / BITS_PER_UNIT)
     321              :         return false;
     322      2716847 :       copy_size -= diff / BITS_PER_UNIT;
     323      2716847 :       copy_offset = ref_offset;
     324              :     }
     325              : 
     326     94720875 :   if (!(copy_offset - ref_offset).is_constant (&diff)
     327     94720875 :       || ref_size <= diff / BITS_PER_UNIT)
     328              :     return false;
     329              : 
     330              :   /* If COPY extends beyond REF, chop off its size appropriately.  */
     331      6191239 :   HOST_WIDE_INT limit = ref_size - diff / BITS_PER_UNIT;
     332              : 
     333      6191239 :   if (copy_size > limit)
     334      1134526 :     copy_size = limit;
     335      6191239 :   *ret_size = copy_size;
     336      6191239 :   if (!(copy_offset - ref_offset).is_constant (ret_offset))
     337              :     return false;
     338      6191239 :   *ret_offset /= BITS_PER_UNIT;
     339      6191239 :   return true;
     340              : }
     341              : 
     342              : /* Update LIVE_BYTES tracking REF for write to WRITE:
     343              :    Verify we have the same base memory address, the write
     344              :    has a known size and overlaps with REF.  */
     345              : static void
     346    168893213 : clear_live_bytes_for_ref (sbitmap live_bytes, ao_ref *ref, ao_ref *write)
     347              : {
     348    168893213 :   HOST_WIDE_INT start, size;
     349              : 
     350    168893213 :   if (valid_ao_ref_kill_for_dse (write)
     351    168056971 :       && operand_equal_p (write->base, ref->base, OEP_ADDRESS_OF)
     352    266753465 :       && get_byte_range (write, ref, true, &start, &size))
     353      2580824 :     bitmap_clear_range (live_bytes, start, size);
     354    168893213 : }
     355              : 
     356              : /* Clear any bytes written by STMT from the bitmap LIVE_BYTES.  The base
     357              :    address written by STMT must match the one found in REF, which must
     358              :    have its base address previously initialized.
     359              : 
     360              :    This routine must be conservative.  If we don't know the offset or
     361              :    actual size written, assume nothing was written.  */
     362              : 
     363              : static void
     364    182905606 : clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref)
     365              : {
     366    182905606 :   ao_ref write;
     367              : 
     368    182905606 :   if (gcall *call = dyn_cast <gcall *> (stmt))
     369              :     {
     370      5534766 :       bool interposed;
     371      5534766 :       modref_summary *summary = get_modref_function_summary (call, &interposed);
     372              : 
     373      5534766 :       if (summary && !interposed)
     374       486962 :         for (auto kill : summary->kills)
     375        66541 :           if (kill.get_ao_ref (as_a <gcall *> (stmt), &write))
     376        66517 :             clear_live_bytes_for_ref (live_bytes, ref, &write);
     377              :     }
     378    182905606 :   if (!initialize_ao_ref_for_dse (stmt, &write))
     379     14078910 :     return;
     380              : 
     381    168826696 :   clear_live_bytes_for_ref (live_bytes, ref, &write);
     382              : }
     383              : 
     384              : /* REF is a memory write.  Extract relevant information from it and
     385              :    initialize the LIVE_BYTES bitmap.  If successful, return TRUE.
     386              :    Otherwise return FALSE.  */
     387              : 
     388              : static bool
     389     31173965 : setup_live_bytes_from_ref (ao_ref *ref, sbitmap live_bytes)
     390              : {
     391     31173965 :   HOST_WIDE_INT const_size;
     392     31173965 :   if (valid_ao_ref_for_dse (ref)
     393     30719278 :       && ((aligned_upper_bound (ref->offset + ref->max_size, BITS_PER_UNIT)
     394     30719278 :            - aligned_lower_bound (ref->offset,
     395     30719278 :                                   BITS_PER_UNIT)).is_constant (&const_size))
     396     30719278 :       && (const_size / BITS_PER_UNIT <= param_dse_max_object_size)
     397     61574653 :       && const_size > 1)
     398              :     {
     399     30400471 :       bitmap_clear (live_bytes);
     400     30400471 :       bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT);
     401     30400471 :       return true;
     402              :     }
     403              :   return false;
     404              : }
     405              : 
     406              : /* Compute the number of stored bytes that we can trim from the head and
     407              :    tail of REF.  LIVE is the bitmap of stores to REF that are still live.
     408              : 
     409              :    Store the number of bytes trimmed from the head and tail in TRIM_HEAD
     410              :    and TRIM_TAIL respectively.
     411              : 
     412              :    STMT is the statement being trimmed and is used for debugging dump
     413              :    output only.  */
     414              : 
     415              : static void
     416      3460918 : compute_trims (ao_ref *ref, sbitmap live, int *trim_head, int *trim_tail,
     417              :                gimple *stmt)
     418              : {
     419      3460918 :   *trim_head = 0;
     420      3460918 :   *trim_tail = 0;
     421              : 
     422              :   /* We use bitmaps biased such that ref->offset is contained in bit zero and
     423              :      the bitmap extends through ref->max_size, so we know that in the original
     424              :      bitmap bits 0 .. ref->max_size were true.  But we need to check that this
     425              :      covers the bytes of REF exactly.  */
     426      3460918 :   const unsigned int align = known_alignment (ref->offset);
     427      3460918 :   if ((align > 0 && align < BITS_PER_UNIT)
     428      3460918 :       || !known_eq (ref->size, ref->max_size))
     429        12388 :     return;
     430              : 
     431              :   /* Now identify how much, if any of the tail we can chop off.  */
     432      3448530 :   HOST_WIDE_INT const_size;
     433      3448530 :   int last_live = bitmap_last_set_bit (live);
     434      3448530 :   if (ref->size.is_constant (&const_size))
     435              :     {
     436      3448530 :       int last_orig = (const_size / BITS_PER_UNIT) - 1;
     437              :       /* We can leave inconvenient amounts on the tail as
     438              :          residual handling in mem* and str* functions is usually
     439              :          reasonably efficient.  */
     440      3448530 :       *trim_tail = last_orig - last_live;
     441              : 
     442              :       /* But don't trim away out of bounds accesses, as this defeats
     443              :          proper warnings.
     444              : 
     445              :          We could have a type with no TYPE_SIZE_UNIT or we could have a VLA
     446              :          where TYPE_SIZE_UNIT is not a constant.  */
     447      3448530 :       if (*trim_tail
     448        10256 :           && TYPE_SIZE_UNIT (TREE_TYPE (ref->base))
     449        10256 :           && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (ref->base))) == INTEGER_CST
     450      3458785 :           && compare_tree_int (TYPE_SIZE_UNIT (TREE_TYPE (ref->base)),
     451              :                                last_orig) <= 0)
     452          136 :         *trim_tail = 0;
     453              :     }
     454              : 
     455              :   /* Identify how much, if any of the head we can chop off.  */
     456      3448530 :   int first_orig = 0;
     457      3448530 :   int first_live = bitmap_first_set_bit (live);
     458      3448530 :   *trim_head = first_live - first_orig;
     459              : 
     460              :   /* If REF is aligned, try to maintain this alignment if it reduces
     461              :      the number of (power-of-two sized aligned) writes to memory.  */
     462      3448530 :   unsigned int align_bits;
     463      3448530 :   unsigned HOST_WIDE_INT bitpos;
     464      3359166 :   if ((*trim_head || *trim_tail)
     465        95160 :       && last_live - first_live >= 2
     466        94326 :       && ao_ref_alignment (ref, &align_bits, &bitpos)
     467        77212 :       && align_bits >= 32
     468        76864 :       && bitpos == 0
     469      3521645 :       && align_bits % BITS_PER_UNIT == 0)
     470              :     {
     471        73115 :       unsigned int align_units = align_bits / BITS_PER_UNIT;
     472        73115 :       if (align_units > 16)
     473              :         align_units = 16;
     474        75192 :       while ((first_live | (align_units - 1)) > (unsigned int)last_live)
     475         2077 :         align_units >>= 1;
     476              : 
     477        73115 :       if (*trim_head)
     478              :         {
     479        68447 :           unsigned int pos = first_live & (align_units - 1);
     480        75667 :           for (unsigned int i = 1; i <= align_units; i <<= 1)
     481              :             {
     482        75667 :               unsigned int mask = ~(i - 1);
     483        75667 :               unsigned int bytes = align_units - (pos & mask);
     484        75667 :               if (wi::popcount (bytes) <= 1)
     485              :                 {
     486        68447 :                   *trim_head &= mask;
     487        68447 :                   break;
     488              :                 }
     489              :             }
     490              :         }
     491              : 
     492        73115 :       if (*trim_tail)
     493              :         {
     494         7188 :           unsigned int pos = last_live & (align_units - 1);
     495        11313 :           for (unsigned int i = 1; i <= align_units; i <<= 1)
     496              :             {
     497        11313 :               int mask = i - 1;
     498        11313 :               unsigned int bytes = (pos | mask) + 1;
     499        11313 :               if ((last_live | mask) > (last_live + *trim_tail))
     500              :                 break;
     501        11313 :               if (wi::popcount (bytes) <= 1)
     502              :                 {
     503         7188 :                   unsigned int extra = (last_live | mask) - last_live;
     504         7188 :                   *trim_tail -= extra;
     505         7188 :                   break;
     506              :                 }
     507              :             }
     508              :         }
     509              :     }
     510              : 
     511      3448530 :   if ((*trim_head || *trim_tail) && dump_file && (dump_flags & TDF_DETAILS))
     512              :     {
     513           18 :       fprintf (dump_file, "  Trimming statement (head = %d, tail = %d): ",
     514              :                *trim_head, *trim_tail);
     515           18 :       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
     516           18 :       fprintf (dump_file, "\n");
     517              :     }
     518              : }
     519              : 
     520              : /* STMT initializes an object from COMPLEX_CST where one or more of the bytes
     521              :    written may be dead stores.  REF is a representation of the memory written.
     522              :    LIVE is the bitmap of stores to REF that are still live.
     523              : 
     524              :    Attempt to rewrite STMT so that only the real or the imaginary part of the
     525              :    object is actually stored.  */
     526              : 
     527              : static void
     528         5518 : maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt)
     529              : {
     530         5518 :   int trim_head, trim_tail;
     531         5518 :   compute_trims (ref, live, &trim_head, &trim_tail, stmt);
     532              : 
     533              :   /* The amount of data trimmed from the head or tail must be at
     534              :      least half the size of the object to ensure we're trimming
     535              :      the entire real or imaginary half.  By writing things this
     536              :      way we avoid more O(n) bitmap operations.  */
     537         5518 :   if (known_ge (trim_tail * 2 * BITS_PER_UNIT, ref->size))
     538              :     {
     539              :       /* TREE_REALPART is live */
     540            2 :       tree x = TREE_REALPART (gimple_assign_rhs1 (stmt));
     541            2 :       tree y = gimple_assign_lhs (stmt);
     542            2 :       y = build1 (REALPART_EXPR, TREE_TYPE (x), y);
     543            2 :       gimple_assign_set_lhs (stmt, y);
     544            2 :       gimple_assign_set_rhs1 (stmt, x);
     545              :     }
     546         5516 :   else if (known_ge (trim_head * 2 * BITS_PER_UNIT, ref->size))
     547              :     {
     548              :       /* TREE_IMAGPART is live */
     549            3 :       tree x = TREE_IMAGPART (gimple_assign_rhs1 (stmt));
     550            3 :       tree y = gimple_assign_lhs (stmt);
     551            3 :       y = build1 (IMAGPART_EXPR, TREE_TYPE (x), y);
     552            3 :       gimple_assign_set_lhs (stmt, y);
     553            3 :       gimple_assign_set_rhs1 (stmt, x);
     554              :     }
     555              : 
     556              :   /* Other cases indicate parts of both the real and imag subobjects
     557              :      are live.  We do not try to optimize those cases.  */
     558         5518 : }
     559              : 
     560              : /* STMT initializes an object using a CONSTRUCTOR where one or more of the
     561              :    bytes written are dead stores.  REF is a representation of the memory
     562              :    written.  LIVE is the bitmap of stores to REF that are still live.
     563              : 
     564              :    Attempt to rewrite STMT so that it writes fewer memory locations.
     565              : 
     566              :    The most common case for getting here is a CONSTRUCTOR with no elements
     567              :    being used to zero initialize an object.  We do not try to handle other
     568              :    cases as those would force us to fully cover the object with the
     569              :    CONSTRUCTOR node except for the components that are dead.
     570              :    Also handles integer stores of 0 which can happen with memset/memcpy optimizations.  */
     571              : 
     572              : static void
     573      3313301 : maybe_trim_constructor_store (ao_ref *ref, sbitmap live, gimple *stmt, bool was_integer_cst)
     574              : {
     575      3313301 :   tree ctor = gimple_assign_rhs1 (stmt);
     576              : 
     577              :   /* This is the only case we currently handle.  It actually seems to
     578              :      catch most cases of actual interest.  */
     579      3793289 :   gcc_assert (was_integer_cst ? integer_zerop (ctor) : CONSTRUCTOR_NELTS (ctor) == 0);
     580              : 
     581      3313301 :   int head_trim = 0;
     582      3313301 :   int tail_trim = 0;
     583      3313301 :   compute_trims (ref, live, &head_trim, &tail_trim, stmt);
     584              : 
     585              :   /* Now we want to replace the constructor initializer
     586              :      with memset (object + head_trim, 0, size - head_trim - tail_trim).  */
     587      3313301 :   if (head_trim || tail_trim)
     588              :     {
     589              :       /* We want &lhs for the MEM_REF expression.  */
     590        88571 :       tree lhs_addr = build_fold_addr_expr (gimple_assign_lhs (stmt));
     591              : 
     592        88571 :       STRIP_USELESS_TYPE_CONVERSION (lhs_addr);
     593              : 
     594        88571 :       if (! is_gimple_min_invariant (lhs_addr))
     595        17301 :         return;
     596              : 
     597              :       /* The number of bytes for the new constructor.  */
     598        71270 :       poly_int64 ref_bytes = exact_div (ref->size, BITS_PER_UNIT);
     599        71270 :       poly_int64 count = ref_bytes - head_trim - tail_trim;
     600              : 
     601              :       /* And the new type for the CONSTRUCTOR.  Essentially it's just
     602              :          a char array large enough to cover the non-trimmed parts of
     603              :          the original CONSTRUCTOR.  Note we want explicit bounds here
     604              :          so that we know how many bytes to clear when expanding the
     605              :          CONSTRUCTOR.  */
     606        71270 :       tree type = build_array_type_nelts (char_type_node, count);
     607              : 
     608              :       /* Build a suitable alias type rather than using alias set zero
     609              :          to avoid pessimizing.  */
     610        71270 :       tree alias_type = reference_alias_ptr_type (gimple_assign_lhs (stmt));
     611              : 
     612              :       /* Build a MEM_REF representing the whole accessed area, starting
     613              :          at the first byte not trimmed.  */
     614        71270 :       tree exp = fold_build2 (MEM_REF, type, lhs_addr,
     615              :                               build_int_cst (alias_type, head_trim));
     616              : 
     617              :       /* Now update STMT with a new RHS and LHS.  */
     618        71270 :       gimple_assign_set_lhs (stmt, exp);
     619        71270 :       gimple_assign_set_rhs1 (stmt, build_constructor (type, NULL));
     620              :     }
     621              : }
     622              : 
     623              : /* STMT is a memcpy, memmove or memset.  Decrement the number of bytes
     624              :    copied/set by DECREMENT.  */
     625              : static void
     626          764 : decrement_count (gimple *stmt, int decrement)
     627              : {
     628          764 :   tree *countp = gimple_call_arg_ptr (stmt, 2);
     629          764 :   gcc_assert (TREE_CODE (*countp) == INTEGER_CST);
     630         1528 :   *countp = wide_int_to_tree (TREE_TYPE (*countp), (TREE_INT_CST_LOW (*countp)
     631          764 :                                                     - decrement));
     632          764 : }
     633              : 
     634              : static void
     635          703 : increment_start_addr (gimple *stmt, tree *where, int increment)
     636              : {
     637          703 :   if (tree lhs = gimple_call_lhs (stmt))
     638            6 :     if (where == gimple_call_arg_ptr (stmt, 0))
     639              :       {
     640            6 :         gassign *newop = gimple_build_assign (lhs, unshare_expr (*where));
     641            6 :         gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
     642            6 :         gsi_insert_after (&gsi, newop, GSI_SAME_STMT);
     643            6 :         gimple_call_set_lhs (stmt, NULL_TREE);
     644            6 :         update_stmt (stmt);
     645              :       }
     646              : 
     647          703 :   if (TREE_CODE (*where) == SSA_NAME)
     648              :     {
     649          197 :       tree tem = make_ssa_name (TREE_TYPE (*where));
     650          197 :       gassign *newop
     651          197 :         = gimple_build_assign (tem, POINTER_PLUS_EXPR, *where,
     652          197 :                                build_int_cst (sizetype, increment));
     653          197 :       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
     654          197 :       gsi_insert_before (&gsi, newop, GSI_SAME_STMT);
     655          197 :       *where = tem;
     656          197 :       update_stmt (stmt);
     657          197 :       return;
     658              :     }
     659              : 
     660          506 :   *where = build_fold_addr_expr (fold_build2 (MEM_REF, char_type_node,
     661              :                                               *where,
     662              :                                               build_int_cst (ptr_type_node,
     663              :                                                              increment)));
     664          506 :   STRIP_USELESS_TYPE_CONVERSION (*where);
     665              : }
     666              : 
     667              : /* STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
     668              :    (ORIG & ~NEW) and need not be stored.  Try to rewrite STMT to reduce
     669              :    the amount of data it actually writes.
     670              : 
     671              :    Right now we only support trimming from the head or the tail of the
     672              :    memory region.  In theory we could split the mem* call, but it's
     673              :    likely of marginal value.  */
     674              : 
     675              : static void
     676       142099 : maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt)
     677              : {
     678       142099 :   int head_trim, tail_trim;
     679       142099 :   switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
     680              :     {
     681         4613 :     case BUILT_IN_STRNCPY:
     682         4613 :     case BUILT_IN_STRNCPY_CHK:
     683         4613 :       compute_trims (ref, live, &head_trim, &tail_trim, stmt);
     684         4613 :       if (head_trim)
     685              :         {
     686              :           /* Head trimming of strncpy is only possible if we can
     687              :              prove all bytes we would trim are non-zero (or we could
     688              :              turn the strncpy into memset if there must be zero
     689              :              among the head trimmed bytes).  If we don't know anything
     690              :              about those bytes, the presence or absence of '\0' bytes
     691              :              in there will affect whether it acts for the non-trimmed
     692              :              bytes as memset or memcpy/strncpy.  */
     693           74 :           c_strlen_data lendata = { };
     694           74 :           int orig_head_trim = head_trim;
     695           74 :           tree srcstr = gimple_call_arg (stmt, 1);
     696           74 :           if (!get_range_strlen (srcstr, &lendata, /*eltsize=*/1)
     697           74 :               || !tree_fits_uhwi_p (lendata.minlen))
     698            8 :             head_trim = 0;
     699           66 :           else if (tree_to_uhwi (lendata.minlen) < (unsigned) head_trim)
     700              :             {
     701           60 :               head_trim = tree_to_uhwi (lendata.minlen);
     702           60 :               if ((orig_head_trim & (UNITS_PER_WORD - 1)) == 0)
     703            0 :                 head_trim &= ~(UNITS_PER_WORD - 1);
     704              :             }
     705           74 :           if (orig_head_trim != head_trim
     706           68 :               && dump_file
     707           82 :               && (dump_flags & TDF_DETAILS))
     708            8 :             fprintf (dump_file,
     709              :                      "  Adjusting strncpy trimming to (head = %d,"
     710              :                      " tail = %d)\n", head_trim, tail_trim);
     711              :         }
     712         4613 :       goto do_memcpy;
     713              : 
     714        99733 :     case BUILT_IN_MEMCPY:
     715        99733 :     case BUILT_IN_MEMMOVE:
     716        99733 :     case BUILT_IN_MEMCPY_CHK:
     717        99733 :     case BUILT_IN_MEMMOVE_CHK:
     718        99733 :       compute_trims (ref, live, &head_trim, &tail_trim, stmt);
     719              : 
     720       104346 :     do_memcpy:
     721              :       /* Tail trimming is easy, we can just reduce the count.  */
     722       104346 :       if (tail_trim)
     723           72 :         decrement_count (stmt, tail_trim);
     724              : 
     725              :       /* Head trimming requires adjusting all the arguments.  */
     726       104346 :       if (head_trim)
     727              :         {
     728              :           /* For __*_chk need to adjust also the last argument.  */
     729          121 :           if (gimple_call_num_args (stmt) == 4)
     730              :             {
     731           49 :               tree size = gimple_call_arg (stmt, 3);
     732           49 :               if (!tree_fits_uhwi_p (size))
     733              :                 break;
     734            7 :               if (!integer_all_onesp (size))
     735              :                 {
     736            7 :                   unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
     737            7 :                   if (sz < (unsigned) head_trim)
     738              :                     break;
     739            7 :                   tree arg = wide_int_to_tree (TREE_TYPE (size),
     740            7 :                                                sz - head_trim);
     741            7 :                   gimple_call_set_arg (stmt, 3, arg);
     742              :                 }
     743              :             }
     744           79 :           tree *dst = gimple_call_arg_ptr (stmt, 0);
     745           79 :           increment_start_addr (stmt, dst, head_trim);
     746           79 :           tree *src = gimple_call_arg_ptr (stmt, 1);
     747           79 :           increment_start_addr (stmt, src, head_trim);
     748           79 :           decrement_count (stmt, head_trim);
     749              :         }
     750              :       break;
     751              : 
     752        37753 :     case BUILT_IN_MEMSET:
     753        37753 :     case BUILT_IN_MEMSET_CHK:
     754        37753 :       compute_trims (ref, live, &head_trim, &tail_trim, stmt);
     755              : 
     756              :       /* Tail trimming is easy, we can just reduce the count.  */
     757        37753 :       if (tail_trim)
     758           68 :         decrement_count (stmt, tail_trim);
     759              : 
     760              :       /* Head trimming requires adjusting all the arguments.  */
     761        37753 :       if (head_trim)
     762              :         {
     763              :           /* For __*_chk need to adjust also the last argument.  */
     764          545 :           if (gimple_call_num_args (stmt) == 4)
     765              :             {
     766            7 :               tree size = gimple_call_arg (stmt, 3);
     767            7 :               if (!tree_fits_uhwi_p (size))
     768              :                 break;
     769            7 :               if (!integer_all_onesp (size))
     770              :                 {
     771            7 :                   unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
     772            7 :                   if (sz < (unsigned) head_trim)
     773              :                     break;
     774            7 :                   tree arg = wide_int_to_tree (TREE_TYPE (size),
     775            7 :                                                sz - head_trim);
     776            7 :                   gimple_call_set_arg (stmt, 3, arg);
     777              :                 }
     778              :             }
     779          545 :           tree *dst = gimple_call_arg_ptr (stmt, 0);
     780          545 :           increment_start_addr (stmt, dst, head_trim);
     781          545 :           decrement_count (stmt, head_trim);
     782              :         }
     783              :       break;
     784              : 
     785              :     default:
     786              :       break;
     787              :     }
     788       142099 : }
     789              : 
     790              : /* STMT is a memory write where one or more bytes written are dead stores.
     791              :    REF is a representation of the memory written.  LIVE is the bitmap of
     792              :    stores to REF that are still live.
     793              : 
     794              :    Attempt to rewrite STMT so that it writes fewer memory locations.  Right
     795              :    now we only support trimming at the start or end of the memory region.
     796              :    It's not clear how much there is to be gained by trimming from the middle
     797              :    of the region.  */
     798              : 
     799              : static void
     800     25999906 : maybe_trim_partially_dead_store (ao_ref *ref, sbitmap live, gimple *stmt)
     801              : {
     802     25999906 :   if (is_gimple_assign (stmt)
     803     25999906 :       && TREE_CODE (gimple_assign_lhs (stmt)) != TARGET_MEM_REF)
     804              :     {
     805     24688605 :       switch (gimple_assign_rhs_code (stmt))
     806              :         {
     807       479988 :         case CONSTRUCTOR:
     808       479988 :           maybe_trim_constructor_store (ref, live, stmt, false);
     809       479988 :           break;
     810         5518 :         case COMPLEX_CST:
     811         5518 :           maybe_trim_complex_store (ref, live, stmt);
     812         5518 :           break;
     813      8777209 :         case INTEGER_CST:
     814      8777209 :           if (integer_zerop (gimple_assign_rhs1 (stmt))
     815      8777209 :               && type_has_mode_precision_p (TREE_TYPE (gimple_assign_lhs (stmt))))
     816      2833313 :             maybe_trim_constructor_store (ref, live, stmt, true);
     817              :           break;
     818              :         default:
     819              :           break;
     820              :         }
     821              :     }
     822     25999906 : }
     823              : 
     824              : /* Return TRUE if USE_REF reads bytes from LIVE where live is
     825              :    derived from REF, a write reference.
     826              : 
     827              :    While this routine may modify USE_REF, it's passed by value, not
     828              :    location.  So callers do not see those modifications.  */
     829              : 
     830              : static bool
     831      3610415 : live_bytes_read (ao_ref *use_ref, ao_ref *ref, sbitmap live)
     832              : {
     833              :   /* We have already verified that USE_REF and REF hit the same object.
     834              :      Now verify that there's actually an overlap between USE_REF and REF.  */
     835      3610415 :   HOST_WIDE_INT start, size;
     836      3610415 :   if (get_byte_range (use_ref, ref, false, &start, &size))
     837              :     {
     838              :       /* If USE_REF covers all of REF, then it will hit one or more
     839              :          live bytes.   This avoids useless iteration over the bitmap
     840              :          below.  */
     841      3610415 :       if (start == 0 && known_eq (size * 8, ref->size))
     842              :         return true;
     843              : 
     844              :       /* Now check if any of the remaining bits in use_ref are set in LIVE.  */
     845       983459 :       return bitmap_any_bit_in_range_p (live, start, (start + size - 1));
     846              :     }
     847              :   return true;
     848              : }
     849              : 
     850              : /* Callback for dse_classify_store calling for_each_index.  Verify that
     851              :    indices are invariant in the loop with backedge PHI in basic-block DATA.  */
     852              : 
     853              : static bool
     854      2656244 : check_name (tree, tree *idx, void *data)
     855              : {
     856      2656244 :   basic_block phi_bb = (basic_block) data;
     857      2656244 :   if (TREE_CODE (*idx) == SSA_NAME
     858      1782019 :       && !SSA_NAME_IS_DEFAULT_DEF (*idx)
     859      4313346 :       && dominated_by_p (CDI_DOMINATORS, gimple_bb (SSA_NAME_DEF_STMT (*idx)),
     860              :                          phi_bb))
     861              :     return false;
     862              :   return true;
     863              : }
     864              : 
     865              : /* STMT stores the value 0 into one or more memory locations
     866              :    (via memset, empty constructor, calloc call, etc).
     867              : 
     868              :    See if there is a subsequent store of the value 0 to one
     869              :    or more of the same memory location(s).  If so, the subsequent
     870              :    store is redundant and can be removed.
     871              : 
     872              :    The subsequent stores could be via memset, empty constructors,
     873              :    simple MEM stores, etc.  */
     874              : 
     875              : static void
     876      4243133 : dse_optimize_redundant_stores (gimple *stmt)
     877              : {
     878      4243133 :   int cnt = 0;
     879              : 
     880              :   /* TBAA state of STMT, if it is a call it is effectively alias-set zero.  */
     881      4243133 :   alias_set_type earlier_set = 0;
     882      4243133 :   alias_set_type earlier_base_set = 0;
     883      4243133 :   if (is_gimple_assign (stmt))
     884              :     {
     885      4184936 :       ao_ref lhs_ref;
     886      4184936 :       ao_ref_init (&lhs_ref, gimple_assign_lhs (stmt));
     887      4184936 :       earlier_set = ao_ref_alias_set (&lhs_ref);
     888      4184936 :       earlier_base_set = ao_ref_base_alias_set (&lhs_ref);
     889              :     }
     890              : 
     891              :   /* We could do something fairly complex and look through PHIs
     892              :      like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
     893              :      the effort.
     894              : 
     895              :      Look at all the immediate uses of the VDEF (which are obviously
     896              :      dominated by STMT).   See if one or more stores 0 into the same
     897              :      memory locations a STMT, if so remove the immediate use statements.  */
     898      4243133 :   tree defvar = gimple_vdef (stmt);
     899      4243133 :   imm_use_iterator ui;
     900      4243133 :   gimple *use_stmt;
     901     13753359 :   FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
     902              :     {
     903              :       /* Limit stmt walking.  */
     904      5288049 :       if (++cnt > param_dse_max_alias_queries_per_store)
     905              :         break;
     906              : 
     907              :       /* If USE_STMT stores 0 into one or more of the same locations
     908              :          as STMT and STMT would kill USE_STMT, then we can just remove
     909              :          USE_STMT.  */
     910      5288049 :       tree fndecl;
     911      5288049 :       if ((is_gimple_assign (use_stmt)
     912      3716334 :            && gimple_vdef (use_stmt)
     913      3055283 :            && (gimple_assign_single_p (use_stmt)
     914      3055283 :                && initializer_zerop (gimple_assign_rhs1 (use_stmt))))
     915      7556475 :           || (gimple_call_builtin_p (use_stmt, BUILT_IN_NORMAL)
     916       158074 :               && (fndecl = gimple_call_fndecl (use_stmt)) != NULL
     917       158074 :               && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
     918       136822 :                   || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
     919        21343 :               && integer_zerop (gimple_call_arg (use_stmt, 1))))
     920              :         {
     921      1466426 :           ao_ref write;
     922              : 
     923      1466426 :           if (!initialize_ao_ref_for_dse (use_stmt, &write))
     924              :             break;
     925              : 
     926      1445470 :           if (valid_ao_ref_for_dse (&write)
     927      1445470 :               && stmt_kills_ref_p (stmt, &write))
     928              :             {
     929         5448 :               gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
     930         5448 :               if (is_gimple_assign (use_stmt))
     931              :                 {
     932         5398 :                   ao_ref lhs_ref;
     933         5398 :                   ao_ref_init (&lhs_ref, gimple_assign_lhs (use_stmt));
     934         5398 :                   if ((earlier_set == ao_ref_alias_set (&lhs_ref)
     935          718 :                        || alias_set_subset_of (ao_ref_alias_set (&lhs_ref),
     936              :                                                earlier_set))
     937         5556 :                       && (earlier_base_set == ao_ref_base_alias_set (&lhs_ref)
     938          500 :                           || alias_set_subset_of
     939          500 :                                (ao_ref_base_alias_set (&lhs_ref),
     940              :                                                   earlier_base_set)))
     941         4748 :                     delete_dead_or_redundant_assignment (&gsi, "redundant",
     942              :                                                          need_eh_cleanup,
     943              :                                                          need_ab_cleanup);
     944              :                 }
     945           50 :               else if (is_gimple_call (use_stmt))
     946              :                 {
     947           50 :                   if ((earlier_set == 0
     948            8 :                        || alias_set_subset_of (0, earlier_set))
     949           50 :                       && (earlier_base_set == 0
     950            0 :                           || alias_set_subset_of (0, earlier_base_set)))
     951           42 :                   delete_dead_or_redundant_call (&gsi, "redundant");
     952              :                 }
     953              :               else
     954            0 :                 gcc_unreachable ();
     955              :             }
     956              :         }
     957      4243133 :     }
     958      4243133 : }
     959              : 
     960              : /* Return whether PHI contains ARG as an argument.  */
     961              : 
     962              : static bool
     963      3954587 : contains_phi_arg (gphi *phi, tree arg)
     964              : {
     965     30249983 :   for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
     966     26525497 :     if (gimple_phi_arg_def (phi, i) == arg)
     967              :       return true;
     968              :   return false;
     969              : }
     970              : 
     971              : /* Hash map of the memory use in a GIMPLE assignment to its
     972              :    data reference.  If NULL data-ref analysis isn't used.  */
     973              : static hash_map<gimple *, data_reference_p> *dse_stmt_to_dr_map;
     974              : 
     975              : /* A helper of dse_optimize_stmt.
     976              :    Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
     977              :    according to downstream uses and defs.  Sets *BY_CLOBBER_P to true
     978              :    if only clobber statements influenced the classification result.
     979              :    Returns the classification.  */
     980              : 
     981              : static dse_store_status
     982     40791478 : dse_classify_store (ao_ref *ref, gimple *stmt,
     983              :                     bool byte_tracking_enabled, sbitmap live_bytes,
     984              :                     bool *by_clobber_p, tree stop_at_vuse, int &cnt,
     985              :                     bitmap visited)
     986              : {
     987     40791478 :   gimple *temp;
     988     40791478 :   std::unique_ptr<data_reference, void(*)(data_reference_p)>
     989     40791478 :     dra (nullptr, free_data_ref);
     990              : 
     991     40791478 :   if (by_clobber_p)
     992     40211823 :     *by_clobber_p = true;
     993              : 
     994              :   /* Find the first dominated statement that clobbers (part of) the
     995              :      memory stmt stores to with no intermediate statement that may use
     996              :      part of the memory stmt stores.  That is, find a store that may
     997              :      prove stmt to be a dead store.  */
     998              :   temp = stmt;
     999    518362268 :   do
    1000              :     {
    1001    279576873 :       gimple *use_stmt;
    1002    279576873 :       imm_use_iterator ui;
    1003    279576873 :       bool fail = false;
    1004    279576873 :       tree defvar;
    1005              : 
    1006    279576873 :       if (gimple_code (temp) == GIMPLE_PHI)
    1007              :         {
    1008     19031166 :           defvar = PHI_RESULT (temp);
    1009     19031166 :           bitmap_set_bit (visited, SSA_NAME_VERSION (defvar));
    1010              :         }
    1011              :       else
    1012    521091414 :         defvar = gimple_vdef (temp);
    1013              : 
    1014    279576873 :       auto_vec<gimple *, 10> defs;
    1015    279576873 :       gphi *first_phi_def = NULL;
    1016    279576873 :       gphi *last_phi_def = NULL;
    1017              : 
    1018    279576873 :       auto_vec<tree, 10> worklist;
    1019    279576873 :       worklist.quick_push (defvar);
    1020              : 
    1021    283489725 :       do
    1022              :         {
    1023    283489725 :           defvar = worklist.pop ();
    1024              :           /* If we're instructed to stop walking at region boundary, do so.  */
    1025    283489725 :           if (defvar == stop_at_vuse)
    1026              :             return DSE_STORE_LIVE;
    1027              : 
    1028    283469944 :           use_operand_p usep;
    1029    889173181 :           FOR_EACH_IMM_USE_FAST (usep, ui, defvar)
    1030              :             {
    1031    351099612 :               use_stmt = USE_STMT (usep);
    1032              : 
    1033              :               /* Limit stmt walking.  */
    1034    351099612 :               if (++cnt > param_dse_max_alias_queries_per_store)
    1035              :                 {
    1036              :                   fail = true;
    1037              :                   break;
    1038              :                 }
    1039              : 
    1040              :               /* In simple cases we can look through PHI nodes, but we
    1041              :                  have to be careful with loops and with memory references
    1042              :                  containing operands that are also operands of PHI nodes.
    1043              :                  See gcc.c-torture/execute/20051110-*.c.  */
    1044    350839232 :               if (gphi *phi = dyn_cast <gphi *> (use_stmt))
    1045              :                 {
    1046              :                   /* Look through single-argument PHIs.  */
    1047     39232690 :                   if (gimple_phi_num_args (phi) == 1)
    1048      4636710 :                     worklist.safe_push (gimple_phi_result (phi));
    1049              :                   else
    1050              :                     {
    1051              :                       /* If we visit this PHI by following a backedge then we
    1052              :                          have to make sure ref->ref only refers to SSA names
    1053              :                          that are invariant with respect to the loop
    1054              :                          represented by this PHI node.  We handle irreducible
    1055              :                          regions by relying on backedge marking and identifying
    1056              :                          the head of the (sub-)region.  */
    1057     34595980 :                       edge e = gimple_phi_arg_edge
    1058     34595980 :                                  (phi, PHI_ARG_INDEX_FROM_USE (usep));
    1059     34595980 :                       if (e->flags & EDGE_DFS_BACK)
    1060              :                         {
    1061      3212831 :                           basic_block rgn_head
    1062      3212831 :                             = nearest_common_dominator (CDI_DOMINATORS,
    1063              :                                                         gimple_bb (phi),
    1064              :                                                         e->src);
    1065      3212831 :                           if (!for_each_index (ref->ref
    1066              :                                                ? &ref->ref : &ref->base,
    1067              :                                                check_name, rgn_head))
    1068      1487344 :                             return DSE_STORE_LIVE;
    1069              :                         }
    1070              :                       /* If we already visited this PHI ignore it for further
    1071              :                          processing.  But note we have to check each incoming
    1072              :                          edge above.  */
    1073     66217272 :                       if (!bitmap_bit_p (visited,
    1074     33108636 :                                          SSA_NAME_VERSION (PHI_RESULT (phi))))
    1075              :                         {
    1076     24594781 :                           defs.safe_push (phi);
    1077     24594781 :                           if (!first_phi_def)
    1078     20760489 :                             first_phi_def = phi;;
    1079              :                           last_phi_def = phi;
    1080              :                         }
    1081              :                     }
    1082              :                 }
    1083              :               /* If the statement is a use the store is not dead.  */
    1084    311606542 :               else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
    1085              :                 {
    1086     27128830 :                   if (dse_stmt_to_dr_map
    1087      5947162 :                       && ref->ref
    1088     32970788 :                       && is_gimple_assign (use_stmt))
    1089              :                     {
    1090      1191808 :                       if (!dra)
    1091      1187233 :                         dra.reset (create_data_ref (NULL, NULL, ref->ref, stmt,
    1092              :                                                     false, false));
    1093      1191808 :                       bool existed_p;
    1094      1191808 :                       data_reference_p &drb
    1095      1191808 :                         = dse_stmt_to_dr_map->get_or_insert (use_stmt,
    1096              :                                                              &existed_p);
    1097      1191808 :                       if (!existed_p)
    1098       721809 :                         drb = create_data_ref (NULL, NULL,
    1099              :                                                gimple_assign_rhs1 (use_stmt),
    1100              :                                                use_stmt, false, false);
    1101      1191808 :                       if (!dr_may_alias_p (dra.get (), drb, NULL))
    1102              :                         {
    1103        16576 :                           if (gimple_vdef (use_stmt))
    1104           18 :                             defs.safe_push (use_stmt);
    1105         8288 :                           continue;
    1106              :                         }
    1107              :                     }
    1108              : 
    1109              :                   /* Handle common cases where we can easily build an ao_ref
    1110              :                      structure for USE_STMT and in doing so we find that the
    1111              :                      references hit non-live bytes and thus can be ignored.
    1112              : 
    1113              :                      TODO: We can also use modref summary to handle calls.  */
    1114     27120542 :                   if (byte_tracking_enabled
    1115     27120542 :                       && is_gimple_assign (use_stmt))
    1116              :                     {
    1117      4967194 :                       ao_ref use_ref;
    1118      4967194 :                       ao_ref_init (&use_ref, gimple_assign_rhs1 (use_stmt));
    1119      4967194 :                       if (valid_ao_ref_for_dse (&use_ref)
    1120      4947167 :                           && operand_equal_p (use_ref.base, ref->base,
    1121              :                                               OEP_ADDRESS_OF)
    1122      8577609 :                           && !live_bytes_read (&use_ref, ref, live_bytes))
    1123              :                         {
    1124              :                           /* If this is a store, remember it as we possibly
    1125              :                              need to walk the defs uses.  */
    1126         3894 :                           if (gimple_vdef (use_stmt))
    1127          345 :                             defs.safe_push (use_stmt);
    1128         1947 :                           continue;
    1129              :                         }
    1130              :                     }
    1131              : 
    1132              :                   fail = true;
    1133              :                   break;
    1134              :                 }
    1135              :               /* We have visited ourselves already so ignore STMT for the
    1136              :                  purpose of chaining.  */
    1137    284477712 :               else if (use_stmt == stmt)
    1138              :                 ;
    1139              :               /* If this is a store, remember it as we possibly need to walk the
    1140              :                  defs uses.  */
    1141    891015983 :               else if (gimple_vdef (use_stmt))
    1142    244736419 :                 defs.safe_push (use_stmt);
    1143      1487344 :             }
    1144              :         }
    1145    563965200 :       while (!fail && !worklist.is_empty ());
    1146              : 
    1147    278069748 :       if (fail)
    1148              :         {
    1149              :           /* STMT might be partially dead and we may be able to reduce
    1150              :              how many memory locations it stores into.  */
    1151     27378975 :           if (byte_tracking_enabled && !gimple_clobber_p (stmt))
    1152     24689678 :             return DSE_STORE_MAYBE_PARTIAL_DEAD;
    1153              :           return DSE_STORE_LIVE;
    1154              :         }
    1155              : 
    1156              :       /* If we didn't find any definition this means the store is dead
    1157              :          if it isn't a store to global reachable memory.  In this case
    1158              :          just pretend the stmt makes itself dead.  Otherwise fail.  */
    1159    250690773 :       if (defs.is_empty ())
    1160              :         {
    1161      2502083 :           if (ref_may_alias_global_p (ref, false))
    1162              :             {
    1163        39655 :               basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (defvar));
    1164              :               /* Assume that BUILT_IN_UNREACHABLE and BUILT_IN_UNREACHABLE_TRAP
    1165              :                  do not need to keep (global) memory side-effects live.
    1166              :                  We do not have virtual operands on BUILT_IN_UNREACHABLE
    1167              :                  but we can do poor mans reachability when the last
    1168              :                  definition we want to elide is in the block that ends
    1169              :                  in such a call.  */
    1170        39655 :               if (EDGE_COUNT (def_bb->succs) == 0)
    1171        54616 :                 if (gcall *last = dyn_cast <gcall *> (*gsi_last_bb (def_bb)))
    1172          633 :                   if (gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE)
    1173          633 :                       || gimple_call_builtin_p (last,
    1174              :                                                 BUILT_IN_UNREACHABLE_TRAP))
    1175              :                     {
    1176          431 :                       if (by_clobber_p)
    1177          431 :                         *by_clobber_p = false;
    1178          431 :                       return DSE_STORE_DEAD;
    1179              :                     }
    1180        39224 :               return DSE_STORE_LIVE;
    1181              :             }
    1182              : 
    1183      2462428 :           if (by_clobber_p)
    1184      2459045 :             *by_clobber_p = false;
    1185      2462428 :           return DSE_STORE_DEAD;
    1186              :         }
    1187              : 
    1188              :       /* Process defs and remove those we need not process further.  */
    1189    512349330 :       for (unsigned i = 0; i < defs.length ();)
    1190              :         {
    1191    264224985 :           gimple *def = defs[i];
    1192    264224985 :           gimple *use_stmt;
    1193    264224985 :           use_operand_p use_p;
    1194    264224985 :           tree vdef = (gimple_code (def) == GIMPLE_PHI
    1195    286764172 :                        ? gimple_phi_result (def) : gimple_vdef (def));
    1196    264224985 :           gphi *phi_def;
    1197              :           /* If the path to check starts with a kill we do not need to
    1198              :              process it further.
    1199              :              ???  With byte tracking we need only kill the bytes currently
    1200              :              live.  */
    1201    264224985 :           if (stmt_kills_ref_p (def, ref))
    1202              :             {
    1203      2431270 :               if (by_clobber_p && !gimple_clobber_p (def))
    1204       599408 :                 *by_clobber_p = false;
    1205      2431270 :               defs.unordered_remove (i);
    1206              :             }
    1207              :           /* If the path ends here we do not need to process it further.
    1208              :              This for example happens with calls to noreturn functions.  */
    1209    261793715 :           else if (has_zero_uses (vdef))
    1210              :             {
    1211              :               /* But if the store is to global memory it is definitely
    1212              :                  not dead.  */
    1213      2628939 :               if (ref_may_alias_global_p (ref, false))
    1214        64345 :                 return DSE_STORE_LIVE;
    1215      2564594 :               defs.unordered_remove (i);
    1216              :             }
    1217              :           /* In addition to kills we can remove defs whose only use
    1218              :              is another def in defs.  That can only ever be PHIs of which
    1219              :              we track two for simplicity reasons, the first and last in
    1220              :              {first,last}_phi_def (we fail for multiple PHIs anyways).
    1221              :              We can also ignore defs that feed only into
    1222              :              already visited PHIs.  */
    1223    259164776 :           else if (single_imm_use (vdef, &use_p, &use_stmt)
    1224    259164776 :                    && (use_stmt == first_phi_def
    1225    224793404 :                        || use_stmt == last_phi_def
    1226    224700097 :                        || (gimple_code (use_stmt) == GIMPLE_PHI
    1227     14271041 :                            && bitmap_bit_p (visited,
    1228     14271041 :                                             SSA_NAME_VERSION
    1229              :                                               (PHI_RESULT (use_stmt))))))
    1230              :             {
    1231      5687636 :               defs.unordered_remove (i);
    1232      5687636 :               if (def == first_phi_def)
    1233              :                 first_phi_def = NULL;
    1234      5004317 :               else if (def == last_phi_def)
    1235       238621 :                 last_phi_def = NULL;
    1236              :             }
    1237              :           /* If def is a PHI and one of its arguments is another PHI node still
    1238              :              in consideration we can defer processing it.  */
    1239    253477140 :           else if ((phi_def = dyn_cast <gphi *> (def))
    1240     21646309 :                    && ((last_phi_def
    1241     21646309 :                         && phi_def != last_phi_def
    1242      2088105 :                         && contains_phi_arg (phi_def,
    1243              :                                              gimple_phi_result (last_phi_def)))
    1244     21536520 :                        || (first_phi_def
    1245     21536520 :                            && phi_def != first_phi_def
    1246      1866482 :                            && contains_phi_arg
    1247      1866482 :                                 (phi_def, gimple_phi_result (first_phi_def)))))
    1248              :             {
    1249       230101 :               defs.unordered_remove (i);
    1250       230101 :               if (phi_def == first_phi_def)
    1251              :                 first_phi_def = NULL;
    1252       160136 :               else if (phi_def == last_phi_def)
    1253       238621 :                 last_phi_def = NULL;
    1254              :             }
    1255              :           else
    1256    253247039 :             ++i;
    1257              :         }
    1258              : 
    1259              :       /* If all defs kill the ref we are done.  */
    1260    288915823 :       if (defs.is_empty ())
    1261              :         return DSE_STORE_DEAD;
    1262              :       /* If more than one def survives we have to analyze multiple
    1263              :          paths.  We can handle this by recursing, sharing 'visited'
    1264              :          to avoid redundant work and limiting it by shared 'cnt'.
    1265              :          For now do not bother with byte-tracking in this case.  */
    1266    248435009 :       while (defs.length () > 1)
    1267              :         {
    1268      9595459 :           if (dse_classify_store (ref, defs.last (), false, NULL,
    1269              :                                   by_clobber_p, stop_at_vuse, cnt,
    1270              :                                   visited) != DSE_STORE_DEAD)
    1271              :             break;
    1272      5912836 :           byte_tracking_enabled = false;
    1273      5912836 :           defs.pop ();
    1274              :         }
    1275              :       /* If more than one def survives fail.  */
    1276    242522173 :       if (defs.length () > 1)
    1277              :         {
    1278              :           /* STMT might be partially dead and we may be able to reduce
    1279              :              how many memory locations it stores into.  */
    1280      3682623 :           if (byte_tracking_enabled && !gimple_clobber_p (stmt))
    1281      1520389 :             return DSE_STORE_MAYBE_PARTIAL_DEAD;
    1282              :           return DSE_STORE_LIVE;
    1283              :         }
    1284    238839550 :       temp = defs[0];
    1285              : 
    1286              :       /* Track partial kills.  */
    1287    238839550 :       if (byte_tracking_enabled)
    1288              :         {
    1289    182905606 :           clear_bytes_written_by (live_bytes, temp, ref);
    1290    182905606 :           if (bitmap_empty_p (live_bytes))
    1291              :             {
    1292        54155 :               if (by_clobber_p && !gimple_clobber_p (temp))
    1293        53993 :                 *by_clobber_p = false;
    1294        54155 :               return DSE_STORE_DEAD;
    1295              :             }
    1296              :         }
    1297    279576873 :     }
    1298              :   /* Continue walking until there are no more live bytes.  */
    1299              :   while (1);
    1300     40791478 : }
    1301              : 
    1302              : dse_store_status
    1303     31196019 : dse_classify_store (ao_ref *ref, gimple *stmt,
    1304              :                     bool byte_tracking_enabled, sbitmap live_bytes,
    1305              :                     bool *by_clobber_p, tree stop_at_vuse)
    1306              : {
    1307     31196019 :   int cnt = 0;
    1308     31196019 :   auto_bitmap visited;
    1309     31196019 :   return dse_classify_store (ref, stmt, byte_tracking_enabled, live_bytes,
    1310     31196019 :                              by_clobber_p, stop_at_vuse, cnt, visited);
    1311     31196019 : }
    1312              : 
    1313              : 
    1314              : /* Delete a dead call at GSI, which is mem* call of some kind.  */
    1315              : static void
    1316         6615 : delete_dead_or_redundant_call (gimple_stmt_iterator *gsi, const char *type)
    1317              : {
    1318         6615 :   gimple *stmt = gsi_stmt (*gsi);
    1319         6615 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1320              :     {
    1321           18 :       fprintf (dump_file, "  Deleted %s call: ", type);
    1322           18 :       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
    1323           18 :       fprintf (dump_file, "\n");
    1324              :     }
    1325              : 
    1326         6615 :   basic_block bb = gimple_bb (stmt);
    1327         6615 :   tree lhs = gimple_call_lhs (stmt);
    1328         6615 :   if (lhs)
    1329              :     {
    1330         1216 :       tree ptr = gimple_call_arg (stmt, 0);
    1331         1216 :       gimple *new_stmt = gimple_build_assign (lhs, ptr);
    1332         1216 :       unlink_stmt_vdef (stmt);
    1333         1216 :       if (gsi_replace (gsi, new_stmt, true))
    1334          390 :         bitmap_set_bit (need_eh_cleanup, bb->index);
    1335              :     }
    1336              :   else
    1337              :     {
    1338              :       /* Then we need to fix the operand of the consuming stmt.  */
    1339         5399 :       unlink_stmt_vdef (stmt);
    1340              : 
    1341              :       /* Remove the dead store.  */
    1342         5399 :       if (gsi_remove (gsi, true))
    1343            0 :         bitmap_set_bit (need_eh_cleanup, bb->index);
    1344         5399 :       release_defs (stmt);
    1345              :     }
    1346         6615 : }
    1347              : 
    1348              : /* Delete a dead store at GSI, which is a gimple assignment. */
    1349              : 
    1350              : void
    1351      2064762 : delete_dead_or_redundant_assignment (gimple_stmt_iterator *gsi,
    1352              :                                      const char *type,
    1353              :                                      bitmap need_eh_cleanup,
    1354              :                                      bitmap need_ab_cleanup)
    1355              : {
    1356      2064762 :   gimple *stmt = gsi_stmt (*gsi);
    1357      2064762 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1358              :     {
    1359          111 :       fprintf (dump_file, "  Deleted %s store: ", type);
    1360          111 :       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
    1361          111 :       fprintf (dump_file, "\n");
    1362              :     }
    1363              : 
    1364              :   /* Then we need to fix the operand of the consuming stmt.  */
    1365      2064762 :   unlink_stmt_vdef (stmt);
    1366              : 
    1367              :   /* Remove the dead store.  */
    1368      2064762 :   basic_block bb = gimple_bb (stmt);
    1369      2064762 :   if (need_ab_cleanup && stmt_can_make_abnormal_goto (stmt))
    1370            4 :     bitmap_set_bit (need_ab_cleanup, bb->index);
    1371      2064762 :   if (gsi_remove (gsi, true) && need_eh_cleanup)
    1372           91 :     bitmap_set_bit (need_eh_cleanup, bb->index);
    1373              : 
    1374              :   /* And release any SSA_NAMEs set in this statement back to the
    1375              :      SSA_NAME manager.  */
    1376      2064762 :   release_defs (stmt);
    1377      2064762 : }
    1378              : 
    1379              : /* Try to prove, using modref summary, that all memory written to by a call is
    1380              :    dead and remove it.  Assume that if return value is written to memory
    1381              :    it is already proved to be dead.  */
    1382              : 
    1383              : static bool
    1384     17208403 : dse_optimize_call (gimple_stmt_iterator *gsi, sbitmap live_bytes)
    1385              : {
    1386     34229627 :   gcall *stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
    1387              : 
    1388     17022610 :   if (!stmt)
    1389              :     return false;
    1390              : 
    1391     17022610 :   tree callee = gimple_call_fndecl (stmt);
    1392              : 
    1393     17022610 :   if (!callee)
    1394              :     return false;
    1395              : 
    1396              :   /* Pure/const functions are optimized by normal DCE
    1397              :      or handled as store above.  */
    1398     16296005 :   int flags = gimple_call_flags (stmt);
    1399     16296005 :   if ((flags & (ECF_PURE|ECF_CONST|ECF_NOVOPS))
    1400           99 :       && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
    1401              :     return false;
    1402              : 
    1403     16296003 :   cgraph_node *node = cgraph_node::get (callee);
    1404     16296003 :   if (!node)
    1405              :     return false;
    1406              : 
    1407     16286299 :   if ((stmt_could_throw_p (cfun, stmt)
    1408      7132964 :        && !cfun->can_delete_dead_exceptions)
    1409     18678141 :       || ((gimple_call_flags (stmt) & ECF_NORETURN)
    1410      2098553 :           && gimple_call_ctrl_altering_p (stmt)))
    1411      6839668 :     return false;
    1412              : 
    1413              :   /* If return value is used the call is not dead.  */
    1414      9446631 :   tree lhs = gimple_call_lhs (stmt);
    1415      9446631 :   if (lhs && TREE_CODE (lhs) == SSA_NAME)
    1416              :     {
    1417      2368372 :       imm_use_iterator ui;
    1418      2368372 :       gimple *use_stmt;
    1419      4923212 :       FOR_EACH_IMM_USE_STMT (use_stmt, ui, lhs)
    1420      2507804 :         if (!is_gimple_debug (use_stmt))
    1421      2368372 :           return false;
    1422              :     }
    1423              : 
    1424              :   /* Verify that there are no side-effects except for return value
    1425              :      and memory writes tracked by modref.  */
    1426      7125295 :   modref_summary *summary = get_modref_function_summary (node);
    1427      7125295 :   if (!summary || !summary->try_dse)
    1428              :     return false;
    1429              : 
    1430        71999 :   bool by_clobber_p = false;
    1431              : 
    1432              :   /* Walk all memory writes and verify that they are dead.  */
    1433       217768 :   for (auto base_node : summary->stores->bases)
    1434       221035 :     for (auto ref_node : base_node->refs)
    1435       227091 :       for (auto access_node : ref_node->accesses)
    1436              :         {
    1437        74666 :           tree arg = access_node.get_call_arg (stmt);
    1438              : 
    1439        74666 :           if (!arg || !POINTER_TYPE_P (TREE_TYPE (arg)))
    1440        70613 :             return false;
    1441              : 
    1442        74665 :           if (integer_zerop (arg)
    1443        74676 :               && !targetm.addr_space.zero_address_valid
    1444           11 :                     (TYPE_ADDR_SPACE (TREE_TYPE (arg))))
    1445           11 :             continue;
    1446              : 
    1447        74654 :           ao_ref ref;
    1448              : 
    1449        74654 :           if (!access_node.get_ao_ref (stmt, &ref))
    1450              :             return false;
    1451        74654 :           ref.ref_alias_set = ref_node->ref;
    1452        74654 :           ref.base_alias_set = base_node->base;
    1453              : 
    1454        74654 :           bool byte_tracking_enabled
    1455        74654 :               = setup_live_bytes_from_ref (&ref, live_bytes);
    1456        74654 :           enum dse_store_status store_status;
    1457              : 
    1458        74654 :           store_status = dse_classify_store (&ref, stmt,
    1459              :                                              byte_tracking_enabled,
    1460              :                                              live_bytes, &by_clobber_p);
    1461        74654 :           if (store_status != DSE_STORE_DEAD)
    1462              :             return false;
    1463              :         }
    1464         1386 :   delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup,
    1465              :                                        need_ab_cleanup);
    1466         1386 :   return true;
    1467              : }
    1468              : 
    1469              : /* Attempt to eliminate dead stores in the statement referenced by BSI.
    1470              : 
    1471              :    A dead store is a store into a memory location which will later be
    1472              :    overwritten by another store without any intervening loads.  In this
    1473              :    case the earlier store can be deleted.
    1474              : 
    1475              :    In our SSA + virtual operand world we use immediate uses of virtual
    1476              :    operands to detect dead stores.  If a store's virtual definition
    1477              :    is used precisely once by a later store to the same location which
    1478              :    post dominates the first store, then the first store is dead.  */
    1479              : 
    1480              : static void
    1481     54833327 : dse_optimize_stmt (function *fun, gimple_stmt_iterator *gsi, sbitmap live_bytes)
    1482              : {
    1483     54833327 :   gimple *stmt = gsi_stmt (*gsi);
    1484              : 
    1485              :   /* Don't return early on *this_2(D) ={v} {CLOBBER}.  */
    1486     54833327 :   if (gimple_has_volatile_ops (stmt)
    1487     54833327 :       && (!gimple_clobber_p (stmt)
    1488      6342323 :           || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
    1489     52697029 :     return;
    1490              : 
    1491     48276833 :   ao_ref ref;
    1492              :   /* If this is not a store we can still remove dead call using
    1493              :      modref summary.  Note we specifically allow ref to be initialized
    1494              :      to a conservative may-def since we are looking for followup stores
    1495              :      to kill all of it.  */
    1496     48276833 :   if (!initialize_ao_ref_for_dse (stmt, &ref, true))
    1497              :     {
    1498     17173049 :       dse_optimize_call (gsi, live_bytes);
    1499     17173049 :       return;
    1500              :     }
    1501              : 
    1502              :   /* We know we have virtual definitions.  We can handle assignments and
    1503              :      some builtin calls.  */
    1504     31103784 :   if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
    1505     31103784 :       && !gimple_call_ctrl_altering_p (stmt))
    1506              :     {
    1507       460722 :       tree fndecl = gimple_call_fndecl (stmt);
    1508       460722 :       switch (DECL_FUNCTION_CODE (fndecl))
    1509              :         {
    1510       459032 :         case BUILT_IN_MEMCPY:
    1511       459032 :         case BUILT_IN_MEMMOVE:
    1512       459032 :         case BUILT_IN_STRNCPY:
    1513       459032 :         case BUILT_IN_MEMSET:
    1514       459032 :         case BUILT_IN_MEMCPY_CHK:
    1515       459032 :         case BUILT_IN_MEMMOVE_CHK:
    1516       459032 :         case BUILT_IN_STRNCPY_CHK:
    1517       459032 :         case BUILT_IN_MEMSET_CHK:
    1518       459032 :           {
    1519              :             /* Occasionally calls with an explicit length of zero
    1520              :                show up in the IL.  It's pointless to do analysis
    1521              :                on them, they're trivially dead.  */
    1522       459032 :             tree size = gimple_call_arg (stmt, 2);
    1523       459032 :             if (integer_zerop (size))
    1524              :               {
    1525           50 :                 delete_dead_or_redundant_call (gsi, "dead");
    1526           50 :                 return;
    1527              :               }
    1528              : 
    1529              :             /* If this is a memset call that initializes an object
    1530              :                to zero, it may be redundant with an earlier memset
    1531              :                or empty CONSTRUCTOR of a larger object.  */
    1532       458982 :             if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
    1533       361202 :                  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
    1534       459279 :                 && integer_zerop (gimple_call_arg (stmt, 1)))
    1535        56507 :               dse_optimize_redundant_stores (stmt);
    1536              : 
    1537       458982 :             enum dse_store_status store_status;
    1538       458982 :             bool byte_tracking_enabled
    1539       458982 :               = setup_live_bytes_from_ref (&ref, live_bytes);
    1540       458982 :             store_status = dse_classify_store (&ref, stmt,
    1541              :                                                byte_tracking_enabled,
    1542              :                                                live_bytes);
    1543       458982 :             if (store_status == DSE_STORE_LIVE)
    1544              :               return;
    1545              : 
    1546       148622 :             if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
    1547              :               {
    1548       142099 :                 maybe_trim_memstar_call (&ref, live_bytes, stmt);
    1549       142099 :                 return;
    1550              :               }
    1551              : 
    1552         6523 :             if (store_status == DSE_STORE_DEAD)
    1553         6523 :               delete_dead_or_redundant_call (gsi, "dead");
    1554         6523 :             return;
    1555              :           }
    1556              : 
    1557         1690 :         case BUILT_IN_CALLOC:
    1558              :           /* We already know the arguments are integer constants.  */
    1559         1690 :           dse_optimize_redundant_stores (stmt);
    1560         1690 :           return;
    1561              : 
    1562              :         default:
    1563              :           return;
    1564              :         }
    1565              :     }
    1566     30643062 :   else if (is_gimple_call (stmt)
    1567     30643062 :            && gimple_call_internal_p (stmt))
    1568              :     {
    1569       107520 :       switch (gimple_call_internal_fn (stmt))
    1570              :         {
    1571         1297 :         case IFN_LEN_STORE:
    1572         1297 :         case IFN_MASK_STORE:
    1573         1297 :         case IFN_MASK_LEN_STORE:
    1574         1297 :           {
    1575         1297 :             enum dse_store_status store_status;
    1576         1297 :             store_status = dse_classify_store (&ref, stmt, false, live_bytes);
    1577         1297 :             if (store_status == DSE_STORE_DEAD)
    1578            0 :               delete_dead_or_redundant_call (gsi, "dead");
    1579         1297 :             return;
    1580              :           }
    1581              :         default:;
    1582              :         }
    1583              :     }
    1584              : 
    1585     30641765 :   bool by_clobber_p = false;
    1586              : 
    1587              :   /* Check if this statement stores zero to a memory location,
    1588              :      and if there is a subsequent store of zero to the same
    1589              :      memory location.  If so, remove the subsequent store.  */
    1590     30641765 :   if (gimple_assign_single_p (stmt)
    1591     30641765 :       && initializer_zerop (gimple_assign_rhs1 (stmt)))
    1592      4184936 :     dse_optimize_redundant_stores (stmt);
    1593              : 
    1594              :   /* Self-assignments are zombies.  */
    1595     30641765 :   if (is_gimple_assign (stmt)
    1596     59873309 :       && operand_equal_p (gimple_assign_rhs1 (stmt),
    1597     29231544 :                           gimple_assign_lhs (stmt), 0))
    1598              :     ;
    1599              :   else
    1600              :     {
    1601     30640329 :       bool byte_tracking_enabled
    1602     30640329 :           = setup_live_bytes_from_ref (&ref, live_bytes);
    1603     30640329 :       enum dse_store_status store_status;
    1604     30640329 :       store_status = dse_classify_store (&ref, stmt,
    1605              :                                          byte_tracking_enabled,
    1606              :                                          live_bytes, &by_clobber_p);
    1607     30640329 :       if (store_status == DSE_STORE_LIVE)
    1608              :         return;
    1609              : 
    1610     28195348 :       if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
    1611              :         {
    1612     25999906 :           maybe_trim_partially_dead_store (&ref, live_bytes, stmt);
    1613     25999906 :           return;
    1614              :         }
    1615              :     }
    1616              : 
    1617              :   /* Now we know that use_stmt kills the LHS of stmt.  */
    1618              : 
    1619              :   /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
    1620              :      another clobber stmt.  */
    1621      2196878 :   if (gimple_clobber_p (stmt)
    1622      2196878 :       && !by_clobber_p)
    1623              :     return;
    1624              : 
    1625      2143463 :   if (is_gimple_call (stmt)
    1626      2143463 :       && (gimple_has_side_effects (stmt)
    1627        41455 :           || (stmt_could_throw_p (fun, stmt)
    1628            5 :               && !fun->can_delete_dead_exceptions)))
    1629              :     {
    1630              :       /* See if we can remove complete call.  */
    1631        35354 :       if (dse_optimize_call (gsi, live_bytes))
    1632              :         return;
    1633              :       /* Make sure we do not remove a return slot we cannot reconstruct
    1634              :          later.  */
    1635        35332 :       if (gimple_call_return_slot_opt_p (as_a <gcall *>(stmt))
    1636        35332 :           && (TREE_ADDRESSABLE (TREE_TYPE (gimple_call_fntype (stmt)))
    1637        14662 :               || !poly_int_tree_p
    1638        14662 :                     (TYPE_SIZE (TREE_TYPE (gimple_call_fntype (stmt))))))
    1639              :         return;
    1640        28189 :       if (dump_file && (dump_flags & TDF_DETAILS))
    1641              :         {
    1642            1 :           fprintf (dump_file, "  Deleted dead store in call LHS: ");
    1643            1 :           print_gimple_stmt (dump_file, stmt, 0, dump_flags);
    1644            1 :           fprintf (dump_file, "\n");
    1645              :         }
    1646        28189 :       gimple_call_set_lhs (stmt, NULL_TREE);
    1647        28189 :       update_stmt (stmt);
    1648              :     }
    1649      2108109 :   else if (!stmt_could_throw_p (fun, stmt)
    1650      2108109 :            || fun->can_delete_dead_exceptions)
    1651      2058285 :     delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup,
    1652              :                                          need_ab_cleanup);
    1653              : }
    1654              : 
    1655              : namespace {
    1656              : 
    1657              : const pass_data pass_data_dse =
    1658              : {
    1659              :   GIMPLE_PASS, /* type */
    1660              :   "dse", /* name */
    1661              :   OPTGROUP_NONE, /* optinfo_flags */
    1662              :   TV_TREE_DSE, /* tv_id */
    1663              :   ( PROP_cfg | PROP_ssa ), /* properties_required */
    1664              :   0, /* properties_provided */
    1665              :   0, /* properties_destroyed */
    1666              :   0, /* todo_flags_start */
    1667              :   0, /* todo_flags_finish */
    1668              : };
    1669              : 
    1670              : class pass_dse : public gimple_opt_pass
    1671              : {
    1672              : public:
    1673      1428610 :   pass_dse (gcc::context *ctxt)
    1674      2857220 :     : gimple_opt_pass (pass_data_dse, ctxt), use_dr_analysis_p (false)
    1675              :   {}
    1676              : 
    1677              :   /* opt_pass methods: */
    1678      1142888 :   opt_pass * clone () final override { return new pass_dse (m_ctxt); }
    1679       285722 :   void set_pass_param (unsigned n, bool param) final override
    1680              :     {
    1681       285722 :       gcc_assert (n == 0);
    1682       285722 :       use_dr_analysis_p = param;
    1683       285722 :     }
    1684      5568844 :   bool gate (function *) final override { return flag_tree_dse != 0; }
    1685              :   unsigned int execute (function *) final override;
    1686              : 
    1687              : private:
    1688              :   bool use_dr_analysis_p;
    1689              : }; // class pass_dse
    1690              : 
    1691              : unsigned int
    1692      5544176 : pass_dse::execute (function *fun)
    1693              : {
    1694      5544176 :   unsigned todo = 0;
    1695      5544176 :   bool released_def = false;
    1696              : 
    1697      5544176 :   need_eh_cleanup = BITMAP_ALLOC (NULL);
    1698      5544176 :   need_ab_cleanup = BITMAP_ALLOC (NULL);
    1699      5544176 :   auto_sbitmap live_bytes (param_dse_max_object_size);
    1700      5544176 :   if (flag_expensive_optimizations && use_dr_analysis_p)
    1701       959460 :     dse_stmt_to_dr_map = new hash_map<gimple *, data_reference_p>;
    1702              : 
    1703      5544176 :   renumber_gimple_stmt_uids (fun);
    1704              : 
    1705      5544176 :   calculate_dominance_info (CDI_DOMINATORS);
    1706              : 
    1707              :   /* Dead store elimination is fundamentally a reverse program order walk.  */
    1708      5544176 :   int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS);
    1709      5544176 :   auto_bitmap exit_bbs;
    1710      5544176 :   bitmap_set_bit (exit_bbs, EXIT_BLOCK);
    1711      5544176 :   edge entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun));
    1712      5544176 :   int n = rev_post_order_and_mark_dfs_back_seme (fun, entry,
    1713              :                                                  exit_bbs, false, rpo, NULL);
    1714     52712792 :   for (int i = n; i != 0; --i)
    1715              :     {
    1716     47168616 :       basic_block bb = BASIC_BLOCK_FOR_FN (fun, rpo[i-1]);
    1717     47168616 :       gimple_stmt_iterator gsi;
    1718              : 
    1719     94337232 :       for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
    1720              :         {
    1721    335224219 :           gimple *stmt = gsi_stmt (gsi);
    1722              : 
    1723    459781178 :           if (gimple_vdef (stmt))
    1724     54833327 :             dse_optimize_stmt (fun, &gsi, live_bytes);
    1725    560781784 :           else if (def_operand_p
    1726    280390892 :                      def_p = single_ssa_def_operand (stmt, SSA_OP_DEF))
    1727              :             {
    1728              :               /* When we remove dead stores make sure to also delete trivially
    1729              :                  dead SSA defs.  */
    1730     63625497 :               if (has_zero_uses (DEF_FROM_PTR (def_p))
    1731      2091584 :                   && !gimple_has_side_effects (stmt)
    1732      2081530 :                   && !is_ctrl_altering_stmt (stmt)
    1733     65705339 :                   && (!stmt_could_throw_p (fun, stmt)
    1734        91355 :                       || fun->can_delete_dead_exceptions))
    1735              :                 {
    1736      1988606 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    1737              :                     {
    1738           11 :                       fprintf (dump_file, "  Deleted trivially dead stmt: ");
    1739           11 :                       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
    1740           11 :                       fprintf (dump_file, "\n");
    1741              :                     }
    1742      1988606 :                   if (gsi_remove (&gsi, true) && need_eh_cleanup)
    1743            2 :                     bitmap_set_bit (need_eh_cleanup, bb->index);
    1744      1988606 :                   release_defs (stmt);
    1745      1988606 :                   released_def = true;
    1746              :                 }
    1747              :             }
    1748    335224219 :           if (gsi_end_p (gsi))
    1749       580492 :             gsi = gsi_last_bb (bb);
    1750              :           else
    1751    717326808 :             gsi_prev (&gsi);
    1752              :         }
    1753     47168616 :       bool removed_phi = false;
    1754     66309310 :       for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);)
    1755              :         {
    1756     19140694 :           gphi *phi = si.phi ();
    1757     19140694 :           if (has_zero_uses (gimple_phi_result (phi)))
    1758              :             {
    1759       218585 :               if (dump_file && (dump_flags & TDF_DETAILS))
    1760              :                 {
    1761            0 :                   fprintf (dump_file, "  Deleted trivially dead PHI: ");
    1762            0 :                   print_gimple_stmt (dump_file, phi, 0, dump_flags);
    1763            0 :                   fprintf (dump_file, "\n");
    1764              :                 }
    1765       218585 :               remove_phi_node (&si, true);
    1766       218585 :               removed_phi = true;
    1767       218585 :               released_def = true;
    1768              :             }
    1769              :           else
    1770     18922109 :             gsi_next (&si);
    1771              :         }
    1772     47168616 :       if (removed_phi && gimple_seq_empty_p (phi_nodes (bb)))
    1773              :         todo |= TODO_cleanup_cfg;
    1774              :     }
    1775      5544176 :   free (rpo);
    1776              : 
    1777              :   /* Removal of stores may make some EH edges dead.  Purge such edges from
    1778              :      the CFG as needed.  */
    1779      5544176 :   if (!bitmap_empty_p (need_eh_cleanup))
    1780              :     {
    1781          385 :       gimple_purge_all_dead_eh_edges (need_eh_cleanup);
    1782          385 :       todo |= TODO_cleanup_cfg;
    1783              :     }
    1784      5544176 :   if (!bitmap_empty_p (need_ab_cleanup))
    1785              :     {
    1786            4 :       gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
    1787            4 :       todo |= TODO_cleanup_cfg;
    1788              :     }
    1789              : 
    1790      5544176 :   BITMAP_FREE (need_eh_cleanup);
    1791      5544176 :   BITMAP_FREE (need_ab_cleanup);
    1792              : 
    1793      5544176 :   if (released_def)
    1794       591716 :     free_numbers_of_iterations_estimates (fun);
    1795              : 
    1796      5544176 :   if (flag_expensive_optimizations && use_dr_analysis_p)
    1797              :     {
    1798      1681269 :       for (auto i = dse_stmt_to_dr_map->begin ();
    1799      2403078 :            i != dse_stmt_to_dr_map->end (); ++i)
    1800       721809 :         free_data_ref ((*i).second);
    1801      1918920 :       delete dse_stmt_to_dr_map;
    1802       959460 :       dse_stmt_to_dr_map = NULL;
    1803              :     }
    1804              : 
    1805      5544176 :   return todo;
    1806      5544176 : }
    1807              : 
    1808              : } // anon namespace
    1809              : 
    1810              : gimple_opt_pass *
    1811       285722 : make_pass_dse (gcc::context *ctxt)
    1812              : {
    1813       285722 :   return new pass_dse (ctxt);
    1814              : }
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.