LCOV - code coverage report
Current view: top level - gcc - tree-ssa-dse.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 98.3 % 786 773
Test Date: 2026-04-20 14:57:17 Functions: 100.0 % 31 31
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Dead and redundant store elimination
       2              :    Copyright (C) 2004-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify
       7              : it under the terms of the GNU General Public License as published by
       8              : the Free Software Foundation; either version 3, or (at your option)
       9              : any later version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful,
      12              : but WITHOUT ANY WARRANTY; without even the implied warranty of
      13              : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
      14              : GNU General Public License for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : #include "config.h"
      21              : #include "system.h"
      22              : #include "coretypes.h"
      23              : #include "backend.h"
      24              : #include "rtl.h"
      25              : #include "tree.h"
      26              : #include "gimple.h"
      27              : #include "tree-pass.h"
      28              : #include "ssa.h"
      29              : #include "gimple-pretty-print.h"
      30              : #include "fold-const.h"
      31              : #include "gimple-iterator.h"
      32              : #include "tree-cfg.h"
      33              : #include "tree-dfa.h"
      34              : #include "tree-cfgcleanup.h"
      35              : #include "alias.h"
      36              : #include "tree-ssa-loop.h"
      37              : #include "tree-ssa-dse.h"
      38              : #include "builtins.h"
      39              : #include "gimple-fold.h"
      40              : #include "gimplify.h"
      41              : #include "tree-eh.h"
      42              : #include "cfganal.h"
      43              : #include "cgraph.h"
      44              : #include "ipa-modref-tree.h"
      45              : #include "ipa-modref.h"
      46              : #include "target.h"
      47              : #include "tree-ssa-loop-niter.h"
      48              : #include "cfgloop.h"
      49              : #include "tree-data-ref.h"
      50              : #include "internal-fn.h"
      51              : #include "tree-ssa.h"
      52              : 
      53              : /* This file implements dead store elimination.
      54              : 
      55              :    A dead store is a store into a memory location which will later be
      56              :    overwritten by another store without any intervening loads.  In this
      57              :    case the earlier store can be deleted or trimmed if the store
      58              :    was partially dead.
      59              : 
      60              :    A redundant store is a store into a memory location which stores
      61              :    the exact same value as a prior store to the same memory location.
      62              :    While this can often be handled by dead store elimination, removing
      63              :    the redundant store is often better than removing or trimming the
      64              :    dead store.
      65              : 
      66              :    In our SSA + virtual operand world we use immediate uses of virtual
      67              :    operands to detect these cases.  If a store's virtual definition
      68              :    is used precisely once by a later store to the same location which
      69              :    post dominates the first store, then the first store is dead.  If
      70              :    the data stored is the same, then the second store is redundant.
      71              : 
      72              :    The single use of the store's virtual definition ensures that
      73              :    there are no intervening aliased loads and the requirement that
      74              :    the second load post dominate the first ensures that if the earlier
      75              :    store executes, then the later stores will execute before the function
      76              :    exits.
      77              : 
      78              :    It may help to think of this as first moving the earlier store to
      79              :    the point immediately before the later store.  Again, the single
      80              :    use of the virtual definition and the post-dominance relationship
      81              :    ensure that such movement would be safe.  Clearly if there are
      82              :    back to back stores, then the second is makes the first dead.  If
      83              :    the second store stores the same value, then the second store is
      84              :    redundant.
      85              : 
      86              :    Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
      87              :    may also help in understanding this code since it discusses the
      88              :    relationship between dead store and redundant load elimination.  In
      89              :    fact, they are the same transformation applied to different views of
      90              :    the CFG.  */
      91              : 
      92              : static void delete_dead_or_redundant_call (gimple_stmt_iterator *, const char *);
      93              : 
      94              : /* Bitmap of blocks that have had EH statements cleaned.  We should
      95              :    remove their dead edges eventually.  */
      96              : static bitmap need_eh_cleanup;
      97              : static bitmap need_ab_cleanup;
      98              : 
      99              : /* STMT is a statement that may write into memory.  Analyze it and
     100              :    initialize WRITE to describe how STMT affects memory.  When
     101              :    MAY_DEF_OK is true then the function initializes WRITE to what
     102              :    the stmt may define.
     103              : 
     104              :    Return TRUE if the statement was analyzed, FALSE otherwise.
     105              : 
     106              :    It is always safe to return FALSE.  But typically better optimziation
     107              :    can be achieved by analyzing more statements.  */
     108              : 
     109              : static bool
     110    235089626 : initialize_ao_ref_for_dse (gimple *stmt, ao_ref *write, bool may_def_ok = false)
     111              : {
     112              :   /* It's advantageous to handle certain mem* functions.  */
     113    235089626 :   if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
     114              :     {
     115      5398220 :       switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
     116              :         {
     117      1314196 :         case BUILT_IN_MEMCPY:
     118      1314196 :         case BUILT_IN_MEMMOVE:
     119      1314196 :         case BUILT_IN_MEMSET:
     120      1314196 :         case BUILT_IN_MEMCPY_CHK:
     121      1314196 :         case BUILT_IN_MEMMOVE_CHK:
     122      1314196 :         case BUILT_IN_MEMSET_CHK:
     123      1314196 :         case BUILT_IN_STRNCPY:
     124      1314196 :         case BUILT_IN_STRNCPY_CHK:
     125      1314196 :           {
     126      1314196 :             tree size = gimple_call_arg (stmt, 2);
     127      1314196 :             tree ptr = gimple_call_arg (stmt, 0);
     128      1314196 :             ao_ref_init_from_ptr_and_size (write, ptr, size);
     129      1314196 :             return true;
     130              :           }
     131              : 
     132              :         /* A calloc call can never be dead, but it can make
     133              :            subsequent stores redundant if they store 0 into
     134              :            the same memory locations.  */
     135         3277 :         case BUILT_IN_CALLOC:
     136         3277 :           {
     137         3277 :             tree nelem = gimple_call_arg (stmt, 0);
     138         3277 :             tree selem = gimple_call_arg (stmt, 1);
     139         3277 :             tree lhs;
     140         3277 :             if (TREE_CODE (nelem) == INTEGER_CST
     141         2700 :                 && TREE_CODE (selem) == INTEGER_CST
     142         5785 :                 && (lhs = gimple_call_lhs (stmt)) != NULL_TREE)
     143              :               {
     144         2497 :                 tree size = fold_build2 (MULT_EXPR, TREE_TYPE (nelem),
     145              :                                          nelem, selem);
     146         2497 :                 ao_ref_init_from_ptr_and_size (write, lhs, size);
     147         2497 :                 return true;
     148              :               }
     149              :           }
     150              : 
     151              :         default:
     152              :           break;
     153              :         }
     154              :     }
     155    229691406 :   else if (is_gimple_call (stmt)
     156    229691406 :            && gimple_call_internal_p (stmt))
     157              :     {
     158       434869 :       switch (gimple_call_internal_fn (stmt))
     159              :         {
     160         1335 :         case IFN_LEN_STORE:
     161         1335 :         case IFN_MASK_STORE:
     162         1335 :         case IFN_MASK_LEN_STORE:
     163         1335 :           {
     164         1335 :             internal_fn ifn = gimple_call_internal_fn (stmt);
     165         1335 :             int stored_value_index = internal_fn_stored_value_index (ifn);
     166         1335 :             int len_index = internal_fn_len_index (ifn);
     167         1335 :             if (ifn == IFN_LEN_STORE)
     168              :               {
     169            0 :                 tree len = gimple_call_arg (stmt, len_index);
     170            0 :                 tree bias = gimple_call_arg (stmt, len_index + 1);
     171            0 :                 if (tree_fits_uhwi_p (len))
     172              :                   {
     173            0 :                     ao_ref_init_from_ptr_and_size (write,
     174              :                                                    gimple_call_arg (stmt, 0),
     175              :                                                    int_const_binop (MINUS_EXPR,
     176              :                                                                     len, bias));
     177            0 :                     return true;
     178              :                   }
     179              :               }
     180              :             /* We cannot initialize a must-def ao_ref (in all cases) but we
     181              :                can provide a may-def variant.  */
     182         1335 :             if (may_def_ok)
     183              :               {
     184         1297 :                 ao_ref_init_from_ptr_and_range (
     185              :                   write, gimple_call_arg (stmt, 0), true, 0, -1,
     186         1297 :                   tree_to_poly_int64 (TYPE_SIZE (
     187              :                     TREE_TYPE (gimple_call_arg (stmt, stored_value_index)))));
     188         1297 :                 return true;
     189              :               }
     190              :             break;
     191              :           }
     192              :         default:;
     193              :         }
     194              :     }
     195    233771636 :   if (tree lhs = gimple_get_lhs (stmt))
     196              :     {
     197    219632259 :       if (TREE_CODE (lhs) != SSA_NAME
     198    219632259 :           && (may_def_ok || !stmt_could_throw_p (cfun, stmt)))
     199              :         {
     200    202429428 :           ao_ref_init (write, lhs);
     201    202429428 :           return true;
     202              :         }
     203              :     }
     204              :   return false;
     205              : }
     206              : 
     207              : /* Given REF from the alias oracle, return TRUE if it is a valid
     208              :    kill memory reference for dead store elimination, false otherwise.
     209              : 
     210              :    In particular, the reference must have a known base, known maximum
     211              :    size, start at a byte offset and have a size that is one or more
     212              :    bytes.  */
     213              : 
     214              : static bool
     215    171202970 : valid_ao_ref_kill_for_dse (ao_ref *ref)
     216              : {
     217    171202970 :   return (ao_ref_base (ref)
     218    171202970 :           && known_size_p (ref->max_size)
     219    170889814 :           && maybe_ne (ref->size, 0)
     220    170871738 :           && known_eq (ref->max_size, ref->size)
     221    341569045 :           && known_ge (ref->offset, 0));
     222              : }
     223              : 
     224              : /* Given REF from the alias oracle, return TRUE if it is a valid
     225              :    load or store memory reference for dead store elimination, false otherwise.
     226              : 
     227              :    Unlike for valid_ao_ref_kill_for_dse we can accept writes where max_size
     228              :    is not same as size since we can handle conservatively the larger range.  */
     229              : 
     230              : static bool
     231     37625478 : valid_ao_ref_for_dse (ao_ref *ref)
     232              : {
     233     37625478 :   return (ao_ref_base (ref)
     234     37625478 :           && known_size_p (ref->max_size)
     235     74769989 :           && known_ge (ref->offset, 0));
     236              : }
     237              : 
     238              : /* Initialize OFFSET and SIZE to a range known to contain REF
     239              :    where the boundaries are divisible by BITS_PER_UNIT (bit still in bits).
     240              :    Return false if this is impossible.  */
     241              : 
     242              : static bool
     243    105471949 : get_byte_aligned_range_containing_ref (ao_ref *ref, poly_int64 *offset,
     244              :                                        HOST_WIDE_INT *size)
     245              : {
     246            0 :   if (!known_size_p (ref->max_size))
     247              :     return false;
     248    105471949 :   *offset = aligned_lower_bound (ref->offset, BITS_PER_UNIT);
     249    105471949 :   poly_int64 end = aligned_upper_bound (ref->offset + ref->max_size,
     250              :                                         BITS_PER_UNIT);
     251    105471949 :   return (end - *offset).is_constant (size);
     252              : }
     253              : 
     254              : /* Initialize OFFSET and SIZE to a range known to be contained REF
     255              :    where the boundaries are divisible by BITS_PER_UNIT (but still in bits).
     256              :    Return false if this is impossible.  */
     257              : 
     258              : static bool
     259     98474505 : get_byte_aligned_range_contained_in_ref (ao_ref *ref, poly_int64 *offset,
     260              :                                          HOST_WIDE_INT *size)
     261              : {
     262     98474505 :   if (!known_size_p (ref->size)
     263     98474505 :       || !known_eq (ref->size, ref->max_size))
     264              :     return false;
     265     98474505 :   *offset = aligned_upper_bound (ref->offset, BITS_PER_UNIT);
     266     98474505 :   poly_int64 end = aligned_lower_bound (ref->offset + ref->max_size,
     267              :                                         BITS_PER_UNIT);
     268              :   /* For bit accesses we can get -1 here, but also 0 sized kill is not
     269              :      useful.  */
     270     98474505 :   if (!known_gt (end, *offset))
     271              :     return false;
     272     98339309 :   return (end - *offset).is_constant (size);
     273              : }
     274              : 
     275              : /* Compute byte range (returned iN REF_OFFSET and RET_SIZE) for access COPY
     276              :    inside REF.  If KILL is true, then COPY represent a kill and the byte range
     277              :    needs to be fully contained in bit range given by COPY.  If KILL is false
     278              :    then the byte range returned must contain the range of COPY.  */
     279              : 
     280              : static bool
     281    102040825 : get_byte_range (ao_ref *copy, ao_ref *ref, bool kill,
     282              :                 HOST_WIDE_INT *ret_offset, HOST_WIDE_INT *ret_size)
     283              : {
     284    102040825 :   HOST_WIDE_INT copy_size, ref_size;
     285    102040825 :   poly_int64 copy_offset, ref_offset;
     286    102040825 :   HOST_WIDE_INT diff;
     287              : 
     288              :   /* First translate from bits to bytes, rounding to bigger or smaller ranges
     289              :      as needed.  Kills needs to be always rounded to smaller ranges while
     290              :      uses and stores to larger ranges.  */
     291    102040825 :   if (kill)
     292              :     {
     293     98474505 :       if (!get_byte_aligned_range_contained_in_ref (copy, &copy_offset,
     294              :                                                     &copy_size))
     295              :         return false;
     296              :     }
     297              :   else
     298              :     {
     299      3566320 :       if (!get_byte_aligned_range_containing_ref (copy, &copy_offset,
     300              :                                                   &copy_size))
     301              :         return false;
     302              :     }
     303              : 
     304    197781826 :   if (!get_byte_aligned_range_containing_ref (ref, &ref_offset, &ref_size)
     305              :       || !ordered_p (copy_offset, ref_offset))
     306              :     return false;
     307              : 
     308              :   /* Switch sizes from bits to bytes so we do not need to care about
     309              :      overflows.  Offset calculation needs to stay in bits until we compute
     310              :      the difference and can switch to HOST_WIDE_INT.  */
     311    101905629 :   copy_size /= BITS_PER_UNIT;
     312    101905629 :   ref_size /= BITS_PER_UNIT;
     313              : 
     314              :   /* If COPY starts before REF, then reset the beginning of
     315              :      COPY to match REF and decrease the size of COPY by the
     316              :      number of bytes removed from COPY.  */
     317    101905629 :   if (maybe_lt (copy_offset, ref_offset))
     318              :     {
     319      9303414 :       if (!(ref_offset - copy_offset).is_constant (&diff)
     320      9303414 :           || copy_size < diff / BITS_PER_UNIT)
     321              :         return false;
     322      2704526 :       copy_size -= diff / BITS_PER_UNIT;
     323      2704526 :       copy_offset = ref_offset;
     324              :     }
     325              : 
     326     95306741 :   if (!(copy_offset - ref_offset).is_constant (&diff)
     327     95306741 :       || ref_size <= diff / BITS_PER_UNIT)
     328              :     return false;
     329              : 
     330              :   /* If COPY extends beyond REF, chop off its size appropriately.  */
     331      6164628 :   HOST_WIDE_INT limit = ref_size - diff / BITS_PER_UNIT;
     332              : 
     333      6164628 :   if (copy_size > limit)
     334      1107975 :     copy_size = limit;
     335      6164628 :   *ret_size = copy_size;
     336      6164628 :   if (!(copy_offset - ref_offset).is_constant (ret_offset))
     337              :     return false;
     338      6164628 :   *ret_offset /= BITS_PER_UNIT;
     339      6164628 :   return true;
     340              : }
     341              : 
     342              : /* Update LIVE_BYTES tracking REF for write to WRITE:
     343              :    Verify we have the same base memory address, the write
     344              :    has a known size and overlaps with REF.  */
     345              : static void
     346    171202970 : clear_live_bytes_for_ref (sbitmap live_bytes, ao_ref *ref, ao_ref *write)
     347              : {
     348    171202970 :   HOST_WIDE_INT start, size;
     349              : 
     350    171202970 :   if (valid_ao_ref_kill_for_dse (write)
     351    170365826 :       && operand_equal_p (write->base, ref->base, OEP_ADDRESS_OF)
     352    269677475 :       && get_byte_range (write, ref, true, &start, &size))
     353      2598308 :     bitmap_clear_range (live_bytes, start, size);
     354    171202970 : }
     355              : 
     356              : /* Clear any bytes written by STMT from the bitmap LIVE_BYTES.  The base
     357              :    address written by STMT must match the one found in REF, which must
     358              :    have its base address previously initialized.
     359              : 
     360              :    This routine must be conservative.  If we don't know the offset or
     361              :    actual size written, assume nothing was written.  */
     362              : 
     363              : static void
     364    185172066 : clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref)
     365              : {
     366    185172066 :   ao_ref write;
     367              : 
     368    185172066 :   if (gcall *call = dyn_cast <gcall *> (stmt))
     369              :     {
     370      5594444 :       bool interposed;
     371      5594444 :       modref_summary *summary = get_modref_function_summary (call, &interposed);
     372              : 
     373      5594444 :       if (summary && !interposed)
     374       497474 :         for (auto kill : summary->kills)
     375        66873 :           if (kill.get_ao_ref (as_a <gcall *> (stmt), &write))
     376        66849 :             clear_live_bytes_for_ref (live_bytes, ref, &write);
     377              :     }
     378    185172066 :   if (!initialize_ao_ref_for_dse (stmt, &write))
     379     14035945 :     return;
     380              : 
     381    171136121 :   clear_live_bytes_for_ref (live_bytes, ref, &write);
     382              : }
     383              : 
     384              : /* REF is a memory write.  Extract relevant information from it and
     385              :    initialize the LIVE_BYTES bitmap.  If successful, return TRUE.
     386              :    Otherwise return FALSE.  */
     387              : 
     388              : static bool
     389     31236275 : setup_live_bytes_from_ref (ao_ref *ref, sbitmap live_bytes)
     390              : {
     391     31236275 :   HOST_WIDE_INT const_size;
     392     31236275 :   if (valid_ao_ref_for_dse (ref)
     393     30781317 :       && ((aligned_upper_bound (ref->offset + ref->max_size, BITS_PER_UNIT)
     394     30781317 :            - aligned_lower_bound (ref->offset,
     395     30781317 :                                   BITS_PER_UNIT)).is_constant (&const_size))
     396     30781317 :       && (const_size / BITS_PER_UNIT <= param_dse_max_object_size)
     397     61697038 :       && const_size > 1)
     398              :     {
     399     30460546 :       bitmap_clear (live_bytes);
     400     30460546 :       bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT);
     401     30460546 :       return true;
     402              :     }
     403              :   return false;
     404              : }
     405              : 
     406              : /* Compute the number of stored bytes that we can trim from the head and
     407              :    tail of REF.  LIVE is the bitmap of stores to REF that are still live.
     408              : 
     409              :    Store the number of bytes trimmed from the head and tail in TRIM_HEAD
     410              :    and TRIM_TAIL respectively.
     411              : 
     412              :    STMT is the statement being trimmed and is used for debugging dump
     413              :    output only.  */
     414              : 
     415              : static void
     416      3466255 : compute_trims (ao_ref *ref, sbitmap live, int *trim_head, int *trim_tail,
     417              :                gimple *stmt)
     418              : {
     419      3466255 :   *trim_head = 0;
     420      3466255 :   *trim_tail = 0;
     421              : 
     422              :   /* We use bitmaps biased such that ref->offset is contained in bit zero and
     423              :      the bitmap extends through ref->max_size, so we know that in the original
     424              :      bitmap bits 0 .. ref->max_size were true.  But we need to check that this
     425              :      covers the bytes of REF exactly.  */
     426      3466255 :   const unsigned int offset_align = known_alignment (ref->offset);
     427      3466255 :   const unsigned int size_align = known_alignment (ref->size);
     428      3466255 :   if ((offset_align > 0 && offset_align < BITS_PER_UNIT)
     429      3465966 :       || (size_align > 0 && size_align < BITS_PER_UNIT)
     430      6932221 :       || !known_eq (ref->size, ref->max_size))
     431        12369 :     return;
     432              : 
     433              :   /* Now identify how much, if any of the tail we can chop off.  */
     434      3453886 :   HOST_WIDE_INT const_size;
     435      3453886 :   int last_live = bitmap_last_set_bit (live);
     436      3453886 :   if (ref->size.is_constant (&const_size))
     437              :     {
     438      3453886 :       int last_orig = (const_size / BITS_PER_UNIT) - 1;
     439              :       /* We can leave inconvenient amounts on the tail as
     440              :          residual handling in mem* and str* functions is usually
     441              :          reasonably efficient.  */
     442      3453886 :       *trim_tail = last_orig - last_live;
     443              : 
     444              :       /* But don't trim away out of bounds accesses, as this defeats
     445              :          proper warnings.
     446              : 
     447              :          We could have a type with no TYPE_SIZE_UNIT or we could have a VLA
     448              :          where TYPE_SIZE_UNIT is not a constant.  */
     449      3453886 :       if (*trim_tail
     450         9849 :           && TYPE_SIZE_UNIT (TREE_TYPE (ref->base))
     451         9849 :           && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (ref->base))) == INTEGER_CST
     452      3463734 :           && compare_tree_int (TYPE_SIZE_UNIT (TREE_TYPE (ref->base)),
     453              :                                last_orig) <= 0)
     454          136 :         *trim_tail = 0;
     455              :     }
     456              : 
     457              :   /* Identify how much, if any of the head we can chop off.  */
     458      3453886 :   int first_orig = 0;
     459      3453886 :   int first_live = bitmap_first_set_bit (live);
     460      3453886 :   *trim_head = first_live - first_orig;
     461              : 
     462              :   /* If REF is aligned, try to maintain this alignment if it reduces
     463              :      the number of (power-of-two sized aligned) writes to memory.  */
     464      3453886 :   unsigned int align_bits;
     465      3453886 :   unsigned HOST_WIDE_INT bitpos;
     466      3364139 :   if ((*trim_head || *trim_tail)
     467        95162 :       && last_live - first_live >= 2
     468        94340 :       && ao_ref_alignment (ref, &align_bits, &bitpos)
     469        77246 :       && align_bits >= 32
     470        76898 :       && bitpos == 0
     471      3526983 :       && align_bits % BITS_PER_UNIT == 0)
     472              :     {
     473        73097 :       unsigned int align_units = align_bits / BITS_PER_UNIT;
     474        73097 :       if (align_units > 16)
     475              :         align_units = 16;
     476        75121 :       while ((first_live | (align_units - 1)) > (unsigned int)last_live)
     477         2024 :         align_units >>= 1;
     478              : 
     479        73097 :       if (*trim_head)
     480              :         {
     481        68810 :           unsigned int pos = first_live & (align_units - 1);
     482        76392 :           for (unsigned int i = 1; i <= align_units; i <<= 1)
     483              :             {
     484        76392 :               unsigned int mask = ~(i - 1);
     485        76392 :               unsigned int bytes = align_units - (pos & mask);
     486        76392 :               if (wi::popcount (bytes) <= 1)
     487              :                 {
     488        68810 :                   *trim_head &= mask;
     489        68810 :                   break;
     490              :                 }
     491              :             }
     492              :         }
     493              : 
     494        73097 :       if (*trim_tail)
     495              :         {
     496         6809 :           unsigned int pos = last_live & (align_units - 1);
     497        10102 :           for (unsigned int i = 1; i <= align_units; i <<= 1)
     498              :             {
     499        10102 :               int mask = i - 1;
     500        10102 :               unsigned int bytes = (pos | mask) + 1;
     501        10102 :               if ((last_live | mask) > (last_live + *trim_tail))
     502              :                 break;
     503        10102 :               if (wi::popcount (bytes) <= 1)
     504              :                 {
     505         6809 :                   unsigned int extra = (last_live | mask) - last_live;
     506         6809 :                   *trim_tail -= extra;
     507         6809 :                   break;
     508              :                 }
     509              :             }
     510              :         }
     511              :     }
     512              : 
     513      3453886 :   if ((*trim_head || *trim_tail) && dump_file && (dump_flags & TDF_DETAILS))
     514              :     {
     515           18 :       fprintf (dump_file, "  Trimming statement (head = %d, tail = %d): ",
     516              :                *trim_head, *trim_tail);
     517           18 :       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
     518           18 :       fprintf (dump_file, "\n");
     519              :     }
     520              : }
     521              : 
     522              : /* STMT initializes an object from COMPLEX_CST where one or more of the bytes
     523              :    written may be dead stores.  REF is a representation of the memory written.
     524              :    LIVE is the bitmap of stores to REF that are still live.
     525              : 
     526              :    Attempt to rewrite STMT so that only the real or the imaginary part of the
     527              :    object is actually stored.  */
     528              : 
     529              : static void
     530         5518 : maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt)
     531              : {
     532         5518 :   int trim_head, trim_tail;
     533         5518 :   compute_trims (ref, live, &trim_head, &trim_tail, stmt);
     534              : 
     535              :   /* The amount of data trimmed from the head or tail must be at
     536              :      least half the size of the object to ensure we're trimming
     537              :      the entire real or imaginary half.  By writing things this
     538              :      way we avoid more O(n) bitmap operations.  */
     539         5518 :   if (known_ge (trim_tail * 2 * BITS_PER_UNIT, ref->size))
     540              :     {
     541              :       /* TREE_REALPART is live */
     542            2 :       tree x = TREE_REALPART (gimple_assign_rhs1 (stmt));
     543            2 :       tree y = gimple_assign_lhs (stmt);
     544            2 :       y = build1 (REALPART_EXPR, TREE_TYPE (x), y);
     545            2 :       gimple_assign_set_lhs (stmt, y);
     546            2 :       gimple_assign_set_rhs1 (stmt, x);
     547              :     }
     548         5516 :   else if (known_ge (trim_head * 2 * BITS_PER_UNIT, ref->size))
     549              :     {
     550              :       /* TREE_IMAGPART is live */
     551            3 :       tree x = TREE_IMAGPART (gimple_assign_rhs1 (stmt));
     552            3 :       tree y = gimple_assign_lhs (stmt);
     553            3 :       y = build1 (IMAGPART_EXPR, TREE_TYPE (x), y);
     554            3 :       gimple_assign_set_lhs (stmt, y);
     555            3 :       gimple_assign_set_rhs1 (stmt, x);
     556              :     }
     557              : 
     558              :   /* Other cases indicate parts of both the real and imag subobjects
     559              :      are live.  We do not try to optimize those cases.  */
     560         5518 : }
     561              : 
     562              : /* STMT initializes an object using a CONSTRUCTOR where one or more of the
     563              :    bytes written are dead stores.  REF is a representation of the memory
     564              :    written.  LIVE is the bitmap of stores to REF that are still live.
     565              : 
     566              :    Attempt to rewrite STMT so that it writes fewer memory locations.
     567              : 
     568              :    The most common case for getting here is a CONSTRUCTOR with no elements
     569              :    being used to zero initialize an object.  We do not try to handle other
     570              :    cases as those would force us to fully cover the object with the
     571              :    CONSTRUCTOR node except for the components that are dead.
     572              :    Also handles integer stores of 0 which can happen with memset/memcpy optimizations.  */
     573              : 
     574              : static void
     575      3313015 : maybe_trim_constructor_store (ao_ref *ref, sbitmap live, gimple *stmt, bool was_integer_cst)
     576              : {
     577      3313015 :   tree ctor = gimple_assign_rhs1 (stmt);
     578              : 
     579              :   /* This is the only case we currently handle.  It actually seems to
     580              :      catch most cases of actual interest.  */
     581      3788389 :   gcc_assert (was_integer_cst ? integer_zerop (ctor) : CONSTRUCTOR_NELTS (ctor) == 0);
     582              : 
     583      3313015 :   int head_trim = 0;
     584      3313015 :   int tail_trim = 0;
     585      3313015 :   compute_trims (ref, live, &head_trim, &tail_trim, stmt);
     586              : 
     587              :   /* Now we want to replace the constructor initializer
     588              :      with memset (object + head_trim, 0, size - head_trim - tail_trim).  */
     589      3313015 :   if (head_trim || tail_trim)
     590              :     {
     591              :       /* We want &lhs for the MEM_REF expression.  */
     592        88530 :       tree lhs_addr = build_fold_addr_expr (gimple_assign_lhs (stmt));
     593              : 
     594        88530 :       STRIP_USELESS_TYPE_CONVERSION (lhs_addr);
     595              : 
     596        88530 :       if (! is_gimple_min_invariant (lhs_addr))
     597        17250 :         return;
     598              : 
     599              :       /* The number of bytes for the new constructor.  */
     600        71280 :       poly_int64 ref_bytes = exact_div (ref->size, BITS_PER_UNIT);
     601        71280 :       poly_int64 count = ref_bytes - head_trim - tail_trim;
     602              : 
     603              :       /* And the new type for the CONSTRUCTOR.  Essentially it's just
     604              :          a char array large enough to cover the non-trimmed parts of
     605              :          the original CONSTRUCTOR.  Note we want explicit bounds here
     606              :          so that we know how many bytes to clear when expanding the
     607              :          CONSTRUCTOR.  */
     608        71280 :       tree type = build_array_type_nelts (char_type_node, count);
     609              : 
     610              :       /* Build a suitable alias type rather than using alias set zero
     611              :          to avoid pessimizing.  */
     612        71280 :       tree alias_type = reference_alias_ptr_type (gimple_assign_lhs (stmt));
     613              : 
     614              :       /* Build a MEM_REF representing the whole accessed area, starting
     615              :          at the first byte not trimmed.  */
     616        71280 :       tree exp = fold_build2 (MEM_REF, type, lhs_addr,
     617              :                               build_int_cst (alias_type, head_trim));
     618              : 
     619              :       /* Now update STMT with a new RHS and LHS.  */
     620        71280 :       gimple_assign_set_lhs (stmt, exp);
     621        71280 :       gimple_assign_set_rhs1 (stmt, build_constructor (type, NULL));
     622              :     }
     623              : }
     624              : 
     625              : /* STMT is a memcpy, memmove or memset.  Decrement the number of bytes
     626              :    copied/set by DECREMENT.  */
     627              : static void
     628          766 : decrement_count (gimple *stmt, int decrement)
     629              : {
     630          766 :   tree *countp = gimple_call_arg_ptr (stmt, 2);
     631          766 :   gcc_assert (TREE_CODE (*countp) == INTEGER_CST);
     632         1532 :   *countp = wide_int_to_tree (TREE_TYPE (*countp), (TREE_INT_CST_LOW (*countp)
     633          766 :                                                     - decrement));
     634          766 : }
     635              : 
     636              : static void
     637          705 : increment_start_addr (gimple *stmt, tree *where, int increment)
     638              : {
     639          705 :   if (tree lhs = gimple_call_lhs (stmt))
     640            6 :     if (where == gimple_call_arg_ptr (stmt, 0))
     641              :       {
     642            6 :         gassign *newop = gimple_build_assign (lhs, unshare_expr (*where));
     643            6 :         gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
     644            6 :         gsi_insert_after (&gsi, newop, GSI_SAME_STMT);
     645            6 :         gimple_call_set_lhs (stmt, NULL_TREE);
     646            6 :         update_stmt (stmt);
     647              :       }
     648              : 
     649          705 :   if (TREE_CODE (*where) == SSA_NAME)
     650              :     {
     651          198 :       tree tem = make_ssa_name (TREE_TYPE (*where));
     652          198 :       gassign *newop
     653          198 :         = gimple_build_assign (tem, POINTER_PLUS_EXPR, *where,
     654          198 :                                build_int_cst (sizetype, increment));
     655          198 :       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
     656          198 :       gsi_insert_before (&gsi, newop, GSI_SAME_STMT);
     657          198 :       *where = tem;
     658          198 :       update_stmt (stmt);
     659          198 :       return;
     660              :     }
     661              : 
     662          507 :   *where = build_fold_addr_expr (fold_build2 (MEM_REF, char_type_node,
     663              :                                               *where,
     664              :                                               build_int_cst (ptr_type_node,
     665              :                                                              increment)));
     666          507 :   STRIP_USELESS_TYPE_CONVERSION (*where);
     667              : }
     668              : 
     669              : /* STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
     670              :    (ORIG & ~NEW) and need not be stored.  Try to rewrite STMT to reduce
     671              :    the amount of data it actually writes.
     672              : 
     673              :    Right now we only support trimming from the head or the tail of the
     674              :    memory region.  In theory we could split the mem* call, but it's
     675              :    likely of marginal value.  */
     676              : 
     677              : static void
     678       147722 : maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt)
     679              : {
     680       147722 :   int head_trim, tail_trim;
     681       147722 :   switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
     682              :     {
     683         4613 :     case BUILT_IN_STRNCPY:
     684         4613 :     case BUILT_IN_STRNCPY_CHK:
     685         4613 :       compute_trims (ref, live, &head_trim, &tail_trim, stmt);
     686         4613 :       if (head_trim)
     687              :         {
     688              :           /* Head trimming of strncpy is only possible if we can
     689              :              prove all bytes we would trim are non-zero (or we could
     690              :              turn the strncpy into memset if there must be zero
     691              :              among the head trimmed bytes).  If we don't know anything
     692              :              about those bytes, the presence or absence of '\0' bytes
     693              :              in there will affect whether it acts for the non-trimmed
     694              :              bytes as memset or memcpy/strncpy.  */
     695           74 :           c_strlen_data lendata = { };
     696           74 :           int orig_head_trim = head_trim;
     697           74 :           tree srcstr = gimple_call_arg (stmt, 1);
     698           74 :           if (!get_range_strlen (srcstr, &lendata, /*eltsize=*/1)
     699           74 :               || !tree_fits_uhwi_p (lendata.minlen))
     700            8 :             head_trim = 0;
     701           66 :           else if (tree_to_uhwi (lendata.minlen) < (unsigned) head_trim)
     702              :             {
     703           60 :               head_trim = tree_to_uhwi (lendata.minlen);
     704           60 :               if ((orig_head_trim & (UNITS_PER_WORD - 1)) == 0)
     705            0 :                 head_trim &= ~(UNITS_PER_WORD - 1);
     706              :             }
     707           74 :           if (orig_head_trim != head_trim
     708           68 :               && dump_file
     709           82 :               && (dump_flags & TDF_DETAILS))
     710            8 :             fprintf (dump_file,
     711              :                      "  Adjusting strncpy trimming to (head = %d,"
     712              :                      " tail = %d)\n", head_trim, tail_trim);
     713              :         }
     714         4613 :       goto do_memcpy;
     715              : 
     716       104879 :     case BUILT_IN_MEMCPY:
     717       104879 :     case BUILT_IN_MEMMOVE:
     718       104879 :     case BUILT_IN_MEMCPY_CHK:
     719       104879 :     case BUILT_IN_MEMMOVE_CHK:
     720       104879 :       compute_trims (ref, live, &head_trim, &tail_trim, stmt);
     721              : 
     722       109492 :     do_memcpy:
     723              :       /* Tail trimming is easy, we can just reduce the count.  */
     724       109492 :       if (tail_trim)
     725           73 :         decrement_count (stmt, tail_trim);
     726              : 
     727              :       /* Head trimming requires adjusting all the arguments.  */
     728       109492 :       if (head_trim)
     729              :         {
     730              :           /* For __*_chk need to adjust also the last argument.  */
     731          122 :           if (gimple_call_num_args (stmt) == 4)
     732              :             {
     733           49 :               tree size = gimple_call_arg (stmt, 3);
     734           49 :               if (!tree_fits_uhwi_p (size))
     735              :                 break;
     736            7 :               if (!integer_all_onesp (size))
     737              :                 {
     738            7 :                   unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
     739            7 :                   if (sz < (unsigned) head_trim)
     740              :                     break;
     741            7 :                   tree arg = wide_int_to_tree (TREE_TYPE (size),
     742            7 :                                                sz - head_trim);
     743            7 :                   gimple_call_set_arg (stmt, 3, arg);
     744              :                 }
     745              :             }
     746           80 :           tree *dst = gimple_call_arg_ptr (stmt, 0);
     747           80 :           increment_start_addr (stmt, dst, head_trim);
     748           80 :           tree *src = gimple_call_arg_ptr (stmt, 1);
     749           80 :           increment_start_addr (stmt, src, head_trim);
     750           80 :           decrement_count (stmt, head_trim);
     751              :         }
     752              :       break;
     753              : 
     754        38230 :     case BUILT_IN_MEMSET:
     755        38230 :     case BUILT_IN_MEMSET_CHK:
     756        38230 :       compute_trims (ref, live, &head_trim, &tail_trim, stmt);
     757              : 
     758              :       /* Tail trimming is easy, we can just reduce the count.  */
     759        38230 :       if (tail_trim)
     760           68 :         decrement_count (stmt, tail_trim);
     761              : 
     762              :       /* Head trimming requires adjusting all the arguments.  */
     763        38230 :       if (head_trim)
     764              :         {
     765              :           /* For __*_chk need to adjust also the last argument.  */
     766          545 :           if (gimple_call_num_args (stmt) == 4)
     767              :             {
     768            7 :               tree size = gimple_call_arg (stmt, 3);
     769            7 :               if (!tree_fits_uhwi_p (size))
     770              :                 break;
     771            7 :               if (!integer_all_onesp (size))
     772              :                 {
     773            7 :                   unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
     774            7 :                   if (sz < (unsigned) head_trim)
     775              :                     break;
     776            7 :                   tree arg = wide_int_to_tree (TREE_TYPE (size),
     777            7 :                                                sz - head_trim);
     778            7 :                   gimple_call_set_arg (stmt, 3, arg);
     779              :                 }
     780              :             }
     781          545 :           tree *dst = gimple_call_arg_ptr (stmt, 0);
     782          545 :           increment_start_addr (stmt, dst, head_trim);
     783          545 :           decrement_count (stmt, head_trim);
     784              :         }
     785              :       break;
     786              : 
     787              :     default:
     788              :       break;
     789              :     }
     790       147722 : }
     791              : 
     792              : /* STMT is a memory write where one or more bytes written are dead stores.
     793              :    REF is a representation of the memory written.  LIVE is the bitmap of
     794              :    stores to REF that are still live.
     795              : 
     796              :    Attempt to rewrite STMT so that it writes fewer memory locations.  Right
     797              :    now we only support trimming at the start or end of the memory region.
     798              :    It's not clear how much there is to be gained by trimming from the middle
     799              :    of the region.  */
     800              : 
     801              : static void
     802     26058379 : maybe_trim_partially_dead_store (ao_ref *ref, sbitmap live, gimple *stmt)
     803              : {
     804     26058379 :   if (is_gimple_assign (stmt)
     805     26058379 :       && TREE_CODE (gimple_assign_lhs (stmt)) != TARGET_MEM_REF)
     806              :     {
     807     24761443 :       switch (gimple_assign_rhs_code (stmt))
     808              :         {
     809       475374 :         case CONSTRUCTOR:
     810       475374 :           maybe_trim_constructor_store (ref, live, stmt, false);
     811       475374 :           break;
     812         5518 :         case COMPLEX_CST:
     813         5518 :           maybe_trim_complex_store (ref, live, stmt);
     814         5518 :           break;
     815      8790870 :         case INTEGER_CST:
     816      8790870 :           if (integer_zerop (gimple_assign_rhs1 (stmt))
     817      8790870 :               && type_has_mode_precision_p (TREE_TYPE (gimple_assign_lhs (stmt))))
     818      2837641 :             maybe_trim_constructor_store (ref, live, stmt, true);
     819              :           break;
     820              :         default:
     821              :           break;
     822              :         }
     823              :     }
     824     26058379 : }
     825              : 
     826              : /* Return TRUE if USE_REF reads bytes from LIVE where live is
     827              :    derived from REF, a write reference.
     828              : 
     829              :    While this routine may modify USE_REF, it's passed by value, not
     830              :    location.  So callers do not see those modifications.  */
     831              : 
     832              : static bool
     833      3566320 : live_bytes_read (ao_ref *use_ref, ao_ref *ref, sbitmap live)
     834              : {
     835              :   /* We have already verified that USE_REF and REF hit the same object.
     836              :      Now verify that there's actually an overlap between USE_REF and REF.  */
     837      3566320 :   HOST_WIDE_INT start, size;
     838      3566320 :   if (get_byte_range (use_ref, ref, false, &start, &size))
     839              :     {
     840              :       /* If USE_REF covers all of REF, then it will hit one or more
     841              :          live bytes.   This avoids useless iteration over the bitmap
     842              :          below.  */
     843      3566320 :       if (start == 0 && known_eq (size * 8, ref->size))
     844              :         return true;
     845              : 
     846              :       /* Now check if any of the remaining bits in use_ref are set in LIVE.  */
     847       959621 :       return bitmap_any_bit_in_range_p (live, start, (start + size - 1));
     848              :     }
     849              :   return true;
     850              : }
     851              : 
     852              : /* Callback for dse_classify_store calling for_each_index.  Verify that
     853              :    indices are invariant in the loop with backedge PHI in basic-block DATA.  */
     854              : 
     855              : static bool
     856      2651443 : check_name (tree, tree *idx, void *data)
     857              : {
     858      2651443 :   basic_block phi_bb = (basic_block) data;
     859      2651443 :   if (TREE_CODE (*idx) == SSA_NAME
     860      1775605 :       && !SSA_NAME_IS_DEFAULT_DEF (*idx)
     861      4300429 :       && dominated_by_p (CDI_DOMINATORS, gimple_bb (SSA_NAME_DEF_STMT (*idx)),
     862              :                          phi_bb))
     863              :     return false;
     864              :   return true;
     865              : }
     866              : 
     867              : /* STMT stores the value 0 into one or more memory locations
     868              :    (via memset, empty constructor, calloc call, etc).
     869              : 
     870              :    See if there is a subsequent store of the value 0 to one
     871              :    or more of the same memory location(s).  If so, the subsequent
     872              :    store is redundant and can be removed.
     873              : 
     874              :    The subsequent stores could be via memset, empty constructors,
     875              :    simple MEM stores, etc.  */
     876              : 
     877              : static void
     878      4240660 : dse_optimize_redundant_stores (gimple *stmt)
     879              : {
     880      4240660 :   int cnt = 0;
     881              : 
     882              :   /* TBAA state of STMT, if it is a call it is effectively alias-set zero.  */
     883      4240660 :   alias_set_type earlier_set = 0;
     884      4240660 :   alias_set_type earlier_base_set = 0;
     885      4240660 :   if (is_gimple_assign (stmt))
     886              :     {
     887      4182337 :       ao_ref lhs_ref;
     888      4182337 :       ao_ref_init (&lhs_ref, gimple_assign_lhs (stmt));
     889      4182337 :       earlier_set = ao_ref_alias_set (&lhs_ref);
     890      4182337 :       earlier_base_set = ao_ref_base_alias_set (&lhs_ref);
     891              :     }
     892              : 
     893              :   /* We could do something fairly complex and look through PHIs
     894              :      like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
     895              :      the effort.
     896              : 
     897              :      Look at all the immediate uses of the VDEF (which are obviously
     898              :      dominated by STMT).   See if one or more stores 0 into the same
     899              :      memory locations a STMT, if so remove the immediate use statements.  */
     900      4240660 :   tree defvar = gimple_vdef (stmt);
     901      4240660 :   imm_use_iterator ui;
     902      4240660 :   gimple *use_stmt;
     903     13742295 :   FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
     904              :     {
     905              :       /* Limit stmt walking.  */
     906      5281931 :       if (++cnt > param_dse_max_alias_queries_per_store)
     907              :         break;
     908              : 
     909              :       /* If USE_STMT stores 0 into one or more of the same locations
     910              :          as STMT and STMT would kill USE_STMT, then we can just remove
     911              :          USE_STMT.  */
     912      5281931 :       tree fndecl;
     913      5281931 :       if ((is_gimple_assign (use_stmt)
     914      3707885 :            && gimple_vdef (use_stmt)
     915      3050340 :            && (gimple_assign_single_p (use_stmt)
     916      3050340 :                && initializer_zerop (gimple_assign_rhs1 (use_stmt))))
     917      7542397 :           || (gimple_call_builtin_p (use_stmt, BUILT_IN_NORMAL)
     918       158501 :               && (fndecl = gimple_call_fndecl (use_stmt)) != NULL
     919       158501 :               && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
     920       137250 :                   || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
     921        21342 :               && integer_zerop (gimple_call_arg (use_stmt, 1))))
     922              :         {
     923      1465957 :           ao_ref write;
     924              : 
     925      1465957 :           if (!initialize_ao_ref_for_dse (use_stmt, &write))
     926              :             break;
     927              : 
     928      1445001 :           if (valid_ao_ref_for_dse (&write)
     929      1445001 :               && stmt_kills_ref_p (stmt, &write))
     930              :             {
     931         5449 :               gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
     932         5449 :               if (is_gimple_assign (use_stmt))
     933              :                 {
     934         5399 :                   ao_ref lhs_ref;
     935         5399 :                   ao_ref_init (&lhs_ref, gimple_assign_lhs (use_stmt));
     936         5399 :                   if ((earlier_set == ao_ref_alias_set (&lhs_ref)
     937          729 :                        || alias_set_subset_of (ao_ref_alias_set (&lhs_ref),
     938              :                                                earlier_set))
     939         5557 :                       && (earlier_base_set == ao_ref_base_alias_set (&lhs_ref)
     940          501 :                           || alias_set_subset_of
     941          501 :                                (ao_ref_base_alias_set (&lhs_ref),
     942              :                                                   earlier_base_set)))
     943         4737 :                     delete_dead_or_redundant_assignment (&gsi, "redundant",
     944              :                                                          need_eh_cleanup,
     945              :                                                          need_ab_cleanup);
     946              :                 }
     947           50 :               else if (is_gimple_call (use_stmt))
     948              :                 {
     949           50 :                   if ((earlier_set == 0
     950            8 :                        || alias_set_subset_of (0, earlier_set))
     951           50 :                       && (earlier_base_set == 0
     952            0 :                           || alias_set_subset_of (0, earlier_base_set)))
     953           42 :                   delete_dead_or_redundant_call (&gsi, "redundant");
     954              :                 }
     955              :               else
     956            0 :                 gcc_unreachable ();
     957              :             }
     958              :         }
     959      4240660 :     }
     960      4240660 : }
     961              : 
     962              : /* Return whether PHI contains ARG as an argument.  */
     963              : 
     964              : static bool
     965      4148143 : contains_phi_arg (gphi *phi, tree arg)
     966              : {
     967     31119711 :   for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
     968     27221182 :     if (gimple_phi_arg_def (phi, i) == arg)
     969              :       return true;
     970              :   return false;
     971              : }
     972              : 
     973              : /* Hash map of the memory use in a GIMPLE assignment to its
     974              :    data reference.  If NULL data-ref analysis isn't used.  */
     975              : static hash_map<gimple *, data_reference_p> *dse_stmt_to_dr_map;
     976              : 
     977              : /* A helper of dse_optimize_stmt.
     978              :    Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
     979              :    according to downstream uses and defs.  Sets *BY_CLOBBER_P to true
     980              :    if only clobber statements influenced the classification result.
     981              :    Returns the classification.  */
     982              : 
     983              : static dse_store_status
     984     40945092 : dse_classify_store (ao_ref *ref, gimple *stmt,
     985              :                     bool byte_tracking_enabled, sbitmap live_bytes,
     986              :                     bool *by_clobber_p, tree stop_at_vuse, int &cnt,
     987              :                     bitmap visited)
     988              : {
     989     40945092 :   gimple *temp;
     990     40945092 :   std::unique_ptr<data_reference, void(*)(data_reference_p)>
     991     40945092 :     dra (nullptr, free_data_ref);
     992              : 
     993     40945092 :   if (by_clobber_p)
     994     40348288 :     *by_clobber_p = true;
     995              : 
     996              :   /* Find the first dominated statement that clobbers (part of) the
     997              :      memory stmt stores to with no intermediate statement that may use
     998              :      part of the memory stmt stores.  That is, find a store that may
     999              :      prove stmt to be a dead store.  */
    1000              :   temp = stmt;
    1001    524355798 :   do
    1002              :     {
    1003    282650445 :       gimple *use_stmt;
    1004    282650445 :       imm_use_iterator ui;
    1005    282650445 :       bool fail = false;
    1006    282650445 :       tree defvar;
    1007              : 
    1008    282650445 :       if (gimple_code (temp) == GIMPLE_PHI)
    1009              :         {
    1010     19057126 :           defvar = PHI_RESULT (temp);
    1011     19057126 :           bitmap_set_bit (visited, SSA_NAME_VERSION (defvar));
    1012              :         }
    1013              :       else
    1014    527186638 :         defvar = gimple_vdef (temp);
    1015              : 
    1016    282650445 :       auto_vec<gimple *, 10> defs;
    1017    282650445 :       gphi *first_phi_def = NULL;
    1018    282650445 :       gphi *last_phi_def = NULL;
    1019              : 
    1020    282650445 :       auto_vec<tree, 10> worklist;
    1021    282650445 :       worklist.quick_push (defvar);
    1022              : 
    1023    286559341 :       do
    1024              :         {
    1025    286559341 :           defvar = worklist.pop ();
    1026              :           /* If we're instructed to stop walking at region boundary, do so.  */
    1027    286559341 :           if (defvar == stop_at_vuse)
    1028              :             return DSE_STORE_LIVE;
    1029              : 
    1030    286539519 :           use_operand_p usep;
    1031    898742301 :           FOR_EACH_IMM_USE_FAST (usep, ui, defvar)
    1032              :             {
    1033    354597733 :               use_stmt = USE_STMT (usep);
    1034              : 
    1035              :               /* Limit stmt walking.  */
    1036    354597733 :               if (++cnt > param_dse_max_alias_queries_per_store)
    1037              :                 {
    1038              :                   fail = true;
    1039              :                   break;
    1040              :                 }
    1041              : 
    1042              :               /* In simple cases we can look through PHI nodes, but we
    1043              :                  have to be careful with loops and with memory references
    1044              :                  containing operands that are also operands of PHI nodes.
    1045              :                  See gcc.c-torture/execute/20051110-*.c.  */
    1046    354329371 :               if (gphi *phi = dyn_cast <gphi *> (use_stmt))
    1047              :                 {
    1048              :                   /* Look through single-argument PHIs.  */
    1049     39369094 :                   if (gimple_phi_num_args (phi) == 1)
    1050      4640274 :                     worklist.safe_push (gimple_phi_result (phi));
    1051              :                   else
    1052              :                     {
    1053              :                       /* If we visit this PHI by following a backedge then we
    1054              :                          have to make sure ref->ref only refers to SSA names
    1055              :                          that are invariant with respect to the loop
    1056              :                          represented by this PHI node.  We handle irreducible
    1057              :                          regions by relying on backedge marking and identifying
    1058              :                          the head of the (sub-)region.  */
    1059     34728820 :                       edge e = gimple_phi_arg_edge
    1060     34728820 :                                  (phi, PHI_ARG_INDEX_FROM_USE (usep));
    1061     34728820 :                       if (e->flags & EDGE_DFS_BACK)
    1062              :                         {
    1063      3203558 :                           basic_block rgn_head
    1064      3203558 :                             = nearest_common_dominator (CDI_DOMINATORS,
    1065              :                                                         gimple_bb (phi),
    1066              :                                                         e->src);
    1067      3203558 :                           if (!for_each_index (ref->ref
    1068              :                                                ? &ref->ref : &ref->base,
    1069              :                                                check_name, rgn_head))
    1070      1480263 :                             return DSE_STORE_LIVE;
    1071              :                         }
    1072              :                       /* If we already visited this PHI ignore it for further
    1073              :                          processing.  But note we have to check each incoming
    1074              :                          edge above.  */
    1075     66497114 :                       if (!bitmap_bit_p (visited,
    1076     33248557 :                                          SSA_NAME_VERSION (PHI_RESULT (phi))))
    1077              :                         {
    1078     24742484 :                           defs.safe_push (phi);
    1079     24742484 :                           if (!first_phi_def)
    1080     20775406 :                             first_phi_def = phi;;
    1081              :                           last_phi_def = phi;
    1082              :                         }
    1083              :                     }
    1084              :                 }
    1085              :               /* If the statement is a use the store is not dead.  */
    1086    314960277 :               else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
    1087              :                 {
    1088     27196902 :                   if (dse_stmt_to_dr_map
    1089      5963545 :                       && ref->ref
    1090     33054077 :                       && is_gimple_assign (use_stmt))
    1091              :                     {
    1092      1190928 :                       if (!dra)
    1093      1185935 :                         dra.reset (create_data_ref (NULL, NULL, ref->ref, stmt,
    1094              :                                                     false, false));
    1095      1190928 :                       bool existed_p;
    1096      1190928 :                       data_reference_p &drb
    1097      1190928 :                         = dse_stmt_to_dr_map->get_or_insert (use_stmt,
    1098              :                                                              &existed_p);
    1099      1190928 :                       if (!existed_p)
    1100       723580 :                         drb = create_data_ref (NULL, NULL,
    1101              :                                                gimple_assign_rhs1 (use_stmt),
    1102              :                                                use_stmt, false, false);
    1103      1190928 :                       if (!dr_may_alias_p (dra.get (), drb, NULL))
    1104              :                         {
    1105        18010 :                           if (gimple_vdef (use_stmt))
    1106           18 :                             defs.safe_push (use_stmt);
    1107         9005 :                           continue;
    1108              :                         }
    1109              :                     }
    1110              : 
    1111              :                   /* Handle common cases where we can easily build an ao_ref
    1112              :                      structure for USE_STMT and in doing so we find that the
    1113              :                      references hit non-live bytes and thus can be ignored.
    1114              : 
    1115              :                      TODO: We can also use modref summary to handle calls.  */
    1116     27187897 :                   if (byte_tracking_enabled
    1117     27187897 :                       && is_gimple_assign (use_stmt))
    1118              :                     {
    1119      4944202 :                       ao_ref use_ref;
    1120      4944202 :                       ao_ref_init (&use_ref, gimple_assign_rhs1 (use_stmt));
    1121      4944202 :                       if (valid_ao_ref_for_dse (&use_ref)
    1122      4924068 :                           && operand_equal_p (use_ref.base, ref->base,
    1123              :                                               OEP_ADDRESS_OF)
    1124      8510522 :                           && !live_bytes_read (&use_ref, ref, live_bytes))
    1125              :                         {
    1126              :                           /* If this is a store, remember it as we possibly
    1127              :                              need to walk the defs uses.  */
    1128         4104 :                           if (gimple_vdef (use_stmt))
    1129          329 :                             defs.safe_push (use_stmt);
    1130         2052 :                           continue;
    1131              :                         }
    1132              :                     }
    1133              : 
    1134              :                   fail = true;
    1135              :                   break;
    1136              :                 }
    1137              :               /* We have visited ourselves already so ignore STMT for the
    1138              :                  purpose of chaining.  */
    1139    287763375 :               else if (use_stmt == stmt)
    1140              :                 ;
    1141              :               /* If this is a store, remember it as we possibly need to walk the
    1142              :                  defs uses.  */
    1143    901016897 :               else if (gimple_vdef (use_stmt))
    1144    247929759 :                 defs.safe_push (use_stmt);
    1145      1480263 :             }
    1146              :         }
    1147    570118512 :       while (!fail && !worklist.is_empty ());
    1148              : 
    1149    281150360 :       if (fail)
    1150              :         {
    1151              :           /* STMT might be partially dead and we may be able to reduce
    1152              :              how many memory locations it stores into.  */
    1153     27454207 :           if (byte_tracking_enabled && !gimple_clobber_p (stmt))
    1154     24708698 :             return DSE_STORE_MAYBE_PARTIAL_DEAD;
    1155              :           return DSE_STORE_LIVE;
    1156              :         }
    1157              : 
    1158              :       /* If we didn't find any definition this means the store is dead
    1159              :          if it isn't a store to global reachable memory.  In this case
    1160              :          just pretend the stmt makes itself dead.  Otherwise fail.  */
    1161    253696153 :       if (defs.is_empty ())
    1162              :         {
    1163      2485705 :           if (ref_may_alias_global_p (ref, false))
    1164              :             {
    1165        40096 :               basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (defvar));
    1166              :               /* Assume that BUILT_IN_UNREACHABLE and BUILT_IN_UNREACHABLE_TRAP
    1167              :                  do not need to keep (global) memory side-effects live.
    1168              :                  We do not have virtual operands on BUILT_IN_UNREACHABLE
    1169              :                  but we can do poor mans reachability when the last
    1170              :                  definition we want to elide is in the block that ends
    1171              :                  in such a call.  */
    1172        40096 :               if (EDGE_COUNT (def_bb->succs) == 0)
    1173        55196 :                 if (gcall *last = dyn_cast <gcall *> (*gsi_last_bb (def_bb)))
    1174          633 :                   if (gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE)
    1175          633 :                       || gimple_call_builtin_p (last,
    1176              :                                                 BUILT_IN_UNREACHABLE_TRAP))
    1177              :                     {
    1178          431 :                       if (by_clobber_p)
    1179          431 :                         *by_clobber_p = false;
    1180          431 :                       return DSE_STORE_DEAD;
    1181              :                     }
    1182        39665 :               return DSE_STORE_LIVE;
    1183              :             }
    1184              : 
    1185      2445609 :           if (by_clobber_p)
    1186      2442231 :             *by_clobber_p = false;
    1187      2445609 :           return DSE_STORE_DEAD;
    1188              :         }
    1189              : 
    1190              :       /* Process defs and remove those we need not process further.  */
    1191    518650251 :       for (unsigned i = 0; i < defs.length ();)
    1192              :         {
    1193    267504241 :           gimple *def = defs[i];
    1194    267504241 :           gimple *use_stmt;
    1195    267504241 :           use_operand_p use_p;
    1196    267504241 :           tree vdef = (gimple_code (def) == GIMPLE_PHI
    1197    290173953 :                        ? gimple_phi_result (def) : gimple_vdef (def));
    1198    267504241 :           gphi *phi_def;
    1199              :           /* If the path to check starts with a kill we do not need to
    1200              :              process it further.
    1201              :              ???  With byte tracking we need only kill the bytes currently
    1202              :              live.  */
    1203    267504241 :           if (stmt_kills_ref_p (def, ref))
    1204              :             {
    1205      2423898 :               if (by_clobber_p && !gimple_clobber_p (def))
    1206       603566 :                 *by_clobber_p = false;
    1207      2423898 :               defs.unordered_remove (i);
    1208              :             }
    1209              :           /* If the path ends here we do not need to process it further.
    1210              :              This for example happens with calls to noreturn functions.  */
    1211    265080343 :           else if (has_zero_uses (vdef))
    1212              :             {
    1213              :               /* But if the store is to global memory it is definitely
    1214              :                  not dead.  */
    1215      2650395 :               if (ref_may_alias_global_p (ref, false))
    1216        64438 :                 return DSE_STORE_LIVE;
    1217      2585957 :               defs.unordered_remove (i);
    1218              :             }
    1219              :           /* In addition to kills we can remove defs whose only use
    1220              :              is another def in defs.  That can only ever be PHIs of which
    1221              :              we track two for simplicity reasons, the first and last in
    1222              :              {first,last}_phi_def (we fail for multiple PHIs anyways).
    1223              :              We can also ignore defs that feed only into
    1224              :              already visited PHIs.  */
    1225    262429948 :           else if (single_imm_use (vdef, &use_p, &use_stmt)
    1226    262429948 :                    && (use_stmt == first_phi_def
    1227    227860006 :                        || use_stmt == last_phi_def
    1228    227762055 :                        || (gimple_code (use_stmt) == GIMPLE_PHI
    1229     14348062 :                            && bitmap_bit_p (visited,
    1230     14348062 :                                             SSA_NAME_VERSION
    1231              :                                               (PHI_RESULT (use_stmt))))))
    1232              :             {
    1233      5627011 :               defs.unordered_remove (i);
    1234      5627011 :               if (def == first_phi_def)
    1235              :                 first_phi_def = NULL;
    1236      4980677 :               else if (def == last_phi_def)
    1237       239078 :                 last_phi_def = NULL;
    1238              :             }
    1239              :           /* If def is a PHI and one of its arguments is another PHI node still
    1240              :              in consideration we can defer processing it.  */
    1241    256802937 :           else if ((phi_def = dyn_cast <gphi *> (def))
    1242     21813796 :                    && ((last_phi_def
    1243     21813796 :                         && phi_def != last_phi_def
    1244      2195543 :                         && contains_phi_arg (phi_def,
    1245              :                                              gimple_phi_result (last_phi_def)))
    1246     21685213 :                        || (first_phi_def
    1247     21685213 :                            && phi_def != first_phi_def
    1248      1952600 :                            && contains_phi_arg
    1249      1952600 :                                 (phi_def, gimple_phi_result (first_phi_def)))))
    1250              :             {
    1251       249614 :               defs.unordered_remove (i);
    1252       249614 :               if (phi_def == first_phi_def)
    1253              :                 first_phi_def = NULL;
    1254       175372 :               else if (phi_def == last_phi_def)
    1255       239078 :                 last_phi_def = NULL;
    1256              :             }
    1257              :           else
    1258    256553323 :             ++i;
    1259              :         }
    1260              : 
    1261              :       /* If all defs kill the ref we are done.  */
    1262    292091102 :       if (defs.is_empty ())
    1263              :         return DSE_STORE_DEAD;
    1264              :       /* If more than one def survives we have to analyze multiple
    1265              :          paths.  We can handle this by recursing, sharing 'visited'
    1266              :          to avoid redundant work and limiting it by shared 'cnt'.
    1267              :          For now do not bother with byte-tracking in this case.  */
    1268    251446119 :       while (defs.length () > 1)
    1269              :         {
    1270      9686731 :           if (dse_classify_store (ref, defs.last (), false, NULL,
    1271              :                                   by_clobber_p, stop_at_vuse, cnt,
    1272              :                                   visited) != DSE_STORE_DEAD)
    1273              :             break;
    1274      5874942 :           byte_tracking_enabled = false;
    1275      5874942 :           defs.pop ();
    1276              :         }
    1277              :       /* If more than one def survives fail.  */
    1278    245571177 :       if (defs.length () > 1)
    1279              :         {
    1280              :           /* STMT might be partially dead and we may be able to reduce
    1281              :              how many memory locations it stores into.  */
    1282      3811789 :           if (byte_tracking_enabled && !gimple_clobber_p (stmt))
    1283      1565131 :             return DSE_STORE_MAYBE_PARTIAL_DEAD;
    1284              :           return DSE_STORE_LIVE;
    1285              :         }
    1286    241759388 :       temp = defs[0];
    1287              : 
    1288              :       /* Track partial kills.  */
    1289    241759388 :       if (byte_tracking_enabled)
    1290              :         {
    1291    185172066 :           clear_bytes_written_by (live_bytes, temp, ref);
    1292    185172066 :           if (bitmap_empty_p (live_bytes))
    1293              :             {
    1294        54035 :               if (by_clobber_p && !gimple_clobber_p (temp))
    1295        53873 :                 *by_clobber_p = false;
    1296        54035 :               return DSE_STORE_DEAD;
    1297              :             }
    1298              :         }
    1299    282650445 :     }
    1300              :   /* Continue walking until there are no more live bytes.  */
    1301              :   while (1);
    1302     40945092 : }
    1303              : 
    1304              : dse_store_status
    1305     31258361 : dse_classify_store (ao_ref *ref, gimple *stmt,
    1306              :                     bool byte_tracking_enabled, sbitmap live_bytes,
    1307              :                     bool *by_clobber_p, tree stop_at_vuse)
    1308              : {
    1309     31258361 :   int cnt = 0;
    1310     31258361 :   auto_bitmap visited;
    1311     31258361 :   return dse_classify_store (ref, stmt, byte_tracking_enabled, live_bytes,
    1312     31258361 :                              by_clobber_p, stop_at_vuse, cnt, visited);
    1313     31258361 : }
    1314              : 
    1315              : 
    1316              : /* Delete a dead call at GSI, which is mem* call of some kind.  */
    1317              : static void
    1318         6619 : delete_dead_or_redundant_call (gimple_stmt_iterator *gsi, const char *type)
    1319              : {
    1320         6619 :   gimple *stmt = gsi_stmt (*gsi);
    1321         6619 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1322              :     {
    1323           18 :       fprintf (dump_file, "  Deleted %s call: ", type);
    1324           18 :       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
    1325           18 :       fprintf (dump_file, "\n");
    1326              :     }
    1327              : 
    1328         6619 :   basic_block bb = gimple_bb (stmt);
    1329         6619 :   tree lhs = gimple_call_lhs (stmt);
    1330         6619 :   if (lhs)
    1331              :     {
    1332         1216 :       tree ptr = gimple_call_arg (stmt, 0);
    1333         1216 :       gimple *new_stmt = gimple_build_assign (lhs, ptr);
    1334         1216 :       unlink_stmt_vdef (stmt);
    1335         1216 :       if (gsi_replace (gsi, new_stmt, true))
    1336          390 :         bitmap_set_bit (need_eh_cleanup, bb->index);
    1337              :     }
    1338              :   else
    1339              :     {
    1340              :       /* Then we need to fix the operand of the consuming stmt.  */
    1341         5403 :       unlink_stmt_vdef (stmt);
    1342              : 
    1343              :       /* Remove the dead store.  */
    1344         5403 :       if (gsi_remove (gsi, true))
    1345            3 :         bitmap_set_bit (need_eh_cleanup, bb->index);
    1346         5403 :       release_defs (stmt);
    1347              :     }
    1348         6619 : }
    1349              : 
    1350              : /* Delete a dead store at GSI, which is a gimple assignment. */
    1351              : 
    1352              : void
    1353      2055806 : delete_dead_or_redundant_assignment (gimple_stmt_iterator *gsi,
    1354              :                                      const char *type,
    1355              :                                      bitmap need_eh_cleanup,
    1356              :                                      bitmap need_ab_cleanup)
    1357              : {
    1358      2055806 :   gimple *stmt = gsi_stmt (*gsi);
    1359      2055806 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1360              :     {
    1361          111 :       fprintf (dump_file, "  Deleted %s store: ", type);
    1362          111 :       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
    1363          111 :       fprintf (dump_file, "\n");
    1364              :     }
    1365              : 
    1366              :   /* Then we need to fix the operand of the consuming stmt.  */
    1367      2055806 :   unlink_stmt_vdef (stmt);
    1368              : 
    1369              :   /* Remove the dead store.  */
    1370      2055806 :   basic_block bb = gimple_bb (stmt);
    1371      2055806 :   if (need_ab_cleanup && stmt_can_make_abnormal_goto (stmt))
    1372            4 :     bitmap_set_bit (need_ab_cleanup, bb->index);
    1373      2055806 :   if (gsi_remove (gsi, true) && need_eh_cleanup)
    1374           91 :     bitmap_set_bit (need_eh_cleanup, bb->index);
    1375              : 
    1376              :   /* And release any SSA_NAMEs set in this statement back to the
    1377              :      SSA_NAME manager.  */
    1378      2055806 :   release_defs (stmt);
    1379      2055806 : }
    1380              : 
    1381              : /* Try to prove, using modref summary, that all memory written to by a call is
    1382              :    dead and remove it.  Assume that if return value is written to memory
    1383              :    it is already proved to be dead.  */
    1384              : 
    1385              : static bool
    1386     17323504 : dse_optimize_call (gimple_stmt_iterator *gsi, sbitmap live_bytes)
    1387              : {
    1388     34457071 :   gcall *stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
    1389              : 
    1390     17135001 :   if (!stmt)
    1391              :     return false;
    1392              : 
    1393     17135001 :   tree callee = gimple_call_fndecl (stmt);
    1394              : 
    1395     17135001 :   if (!callee)
    1396              :     return false;
    1397              : 
    1398              :   /* Pure/const functions are optimized by normal DCE
    1399              :      or handled as store above.  */
    1400     16415947 :   int flags = gimple_call_flags (stmt);
    1401     16415947 :   if ((flags & (ECF_PURE|ECF_CONST|ECF_NOVOPS))
    1402           99 :       && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
    1403              :     return false;
    1404              : 
    1405     16415945 :   cgraph_node *node = cgraph_node::get (callee);
    1406     16415945 :   if (!node)
    1407              :     return false;
    1408              : 
    1409     16406067 :   if ((stmt_could_throw_p (cfun, stmt)
    1410      7202269 :        && !cfun->can_delete_dead_exceptions)
    1411     18867824 :       || ((gimple_call_flags (stmt) & ECF_NORETURN)
    1412      2103225 :           && gimple_call_ctrl_altering_p (stmt)))
    1413      6843730 :     return false;
    1414              : 
    1415              :   /* If return value is used the call is not dead.  */
    1416      9562337 :   tree lhs = gimple_call_lhs (stmt);
    1417      9562337 :   if (lhs && TREE_CODE (lhs) == SSA_NAME)
    1418              :     {
    1419      2394927 :       imm_use_iterator ui;
    1420      2394927 :       gimple *use_stmt;
    1421      4979772 :       FOR_EACH_IMM_USE_STMT (use_stmt, ui, lhs)
    1422      2536427 :         if (!is_gimple_debug (use_stmt))
    1423      2394927 :           return false;
    1424              :     }
    1425              : 
    1426              :   /* Verify that there are no side-effects except for return value
    1427              :      and memory writes tracked by modref.  */
    1428      7215828 :   modref_summary *summary = get_modref_function_summary (node);
    1429      7215828 :   if (!summary || !summary->try_dse)
    1430              :     return false;
    1431              : 
    1432        71746 :   bool by_clobber_p = false;
    1433              : 
    1434              :   /* Walk all memory writes and verify that they are dead.  */
    1435       217050 :   for (auto base_node : summary->stores->bases)
    1436       220320 :     for (auto ref_node : base_node->refs)
    1437       226481 :       for (auto access_node : ref_node->accesses)
    1438              :         {
    1439        74463 :           tree arg = access_node.get_call_arg (stmt);
    1440              : 
    1441        74463 :           if (!arg || !POINTER_TYPE_P (TREE_TYPE (arg)))
    1442        70312 :             return false;
    1443              : 
    1444        74462 :           if (integer_zerop (arg)
    1445        74474 :               && !targetm.addr_space.zero_address_valid
    1446           12 :                     (TYPE_ADDR_SPACE (TREE_TYPE (arg))))
    1447           12 :             continue;
    1448              : 
    1449        74450 :           ao_ref ref;
    1450              : 
    1451        74450 :           if (!access_node.get_ao_ref (stmt, &ref))
    1452              :             return false;
    1453        74450 :           ref.ref_alias_set = ref_node->ref;
    1454        74450 :           ref.base_alias_set = base_node->base;
    1455              : 
    1456        74450 :           bool byte_tracking_enabled
    1457        74450 :               = setup_live_bytes_from_ref (&ref, live_bytes);
    1458        74450 :           enum dse_store_status store_status;
    1459              : 
    1460        74450 :           store_status = dse_classify_store (&ref, stmt,
    1461              :                                              byte_tracking_enabled,
    1462              :                                              live_bytes, &by_clobber_p);
    1463        74450 :           if (store_status != DSE_STORE_DEAD)
    1464              :             return false;
    1465              :         }
    1466         1434 :   delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup,
    1467              :                                        need_ab_cleanup);
    1468         1434 :   return true;
    1469              : }
    1470              : 
    1471              : /* Attempt to eliminate dead stores in the statement referenced by BSI.
    1472              : 
    1473              :    A dead store is a store into a memory location which will later be
    1474              :    overwritten by another store without any intervening loads.  In this
    1475              :    case the earlier store can be deleted.
    1476              : 
    1477              :    In our SSA + virtual operand world we use immediate uses of virtual
    1478              :    operands to detect dead stores.  If a store's virtual definition
    1479              :    is used precisely once by a later store to the same location which
    1480              :    post dominates the first store, then the first store is dead.  */
    1481              : 
    1482              : static void
    1483     55053100 : dse_optimize_stmt (function *fun, gimple_stmt_iterator *gsi, sbitmap live_bytes)
    1484              : {
    1485     55053100 :   gimple *stmt = gsi_stmt (*gsi);
    1486              : 
    1487              :   /* Don't return early on *this_2(D) ={v} {CLOBBER}.  */
    1488     55053100 :   if (gimple_has_volatile_ops (stmt)
    1489     55053100 :       && (!gimple_clobber_p (stmt)
    1490      6388933 :           || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
    1491     52923490 :     return;
    1492              : 
    1493     48451603 :   ao_ref ref;
    1494              :   /* If this is not a store we can still remove dead call using
    1495              :      modref summary.  Note we specifically allow ref to be initialized
    1496              :      to a conservative may-def since we are looking for followup stores
    1497              :      to kill all of it.  */
    1498     48451603 :   if (!initialize_ao_ref_for_dse (stmt, &ref, true))
    1499              :     {
    1500     17285307 :       dse_optimize_call (gsi, live_bytes);
    1501     17285307 :       return;
    1502              :     }
    1503              : 
    1504              :   /* We know we have virtual definitions.  We can handle assignments and
    1505              :      some builtin calls.  */
    1506     31166296 :   if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
    1507     31166296 :       && !gimple_call_ctrl_altering_p (stmt))
    1508              :     {
    1509       465855 :       tree fndecl = gimple_call_fndecl (stmt);
    1510       465855 :       switch (DECL_FUNCTION_CODE (fndecl))
    1511              :         {
    1512       464164 :         case BUILT_IN_MEMCPY:
    1513       464164 :         case BUILT_IN_MEMMOVE:
    1514       464164 :         case BUILT_IN_STRNCPY:
    1515       464164 :         case BUILT_IN_MEMSET:
    1516       464164 :         case BUILT_IN_MEMCPY_CHK:
    1517       464164 :         case BUILT_IN_MEMMOVE_CHK:
    1518       464164 :         case BUILT_IN_STRNCPY_CHK:
    1519       464164 :         case BUILT_IN_MEMSET_CHK:
    1520       464164 :           {
    1521              :             /* Occasionally calls with an explicit length of zero
    1522              :                show up in the IL.  It's pointless to do analysis
    1523              :                on them, they're trivially dead.  */
    1524       464164 :             tree size = gimple_call_arg (stmt, 2);
    1525       464164 :             if (integer_zerop (size))
    1526              :               {
    1527           50 :                 delete_dead_or_redundant_call (gsi, "dead");
    1528           50 :                 return;
    1529              :               }
    1530              : 
    1531              :             /* If this is a memset call that initializes an object
    1532              :                to zero, it may be redundant with an earlier memset
    1533              :                or empty CONSTRUCTOR of a larger object.  */
    1534       464114 :             if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
    1535       367984 :                  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
    1536       464417 :                 && integer_zerop (gimple_call_arg (stmt, 1)))
    1537        56632 :               dse_optimize_redundant_stores (stmt);
    1538              : 
    1539       464114 :             enum dse_store_status store_status;
    1540       464114 :             bool byte_tracking_enabled
    1541       464114 :               = setup_live_bytes_from_ref (&ref, live_bytes);
    1542       464114 :             store_status = dse_classify_store (&ref, stmt,
    1543              :                                                byte_tracking_enabled,
    1544              :                                                live_bytes);
    1545       464114 :             if (store_status == DSE_STORE_LIVE)
    1546              :               return;
    1547              : 
    1548       154249 :             if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
    1549              :               {
    1550       147722 :                 maybe_trim_memstar_call (&ref, live_bytes, stmt);
    1551       147722 :                 return;
    1552              :               }
    1553              : 
    1554         6527 :             if (store_status == DSE_STORE_DEAD)
    1555         6527 :               delete_dead_or_redundant_call (gsi, "dead");
    1556         6527 :             return;
    1557              :           }
    1558              : 
    1559         1691 :         case BUILT_IN_CALLOC:
    1560              :           /* We already know the arguments are integer constants.  */
    1561         1691 :           dse_optimize_redundant_stores (stmt);
    1562         1691 :           return;
    1563              : 
    1564              :         default:
    1565              :           return;
    1566              :         }
    1567              :     }
    1568     30700441 :   else if (is_gimple_call (stmt)
    1569     30700441 :            && gimple_call_internal_p (stmt))
    1570              :     {
    1571       113923 :       switch (gimple_call_internal_fn (stmt))
    1572              :         {
    1573         1297 :         case IFN_LEN_STORE:
    1574         1297 :         case IFN_MASK_STORE:
    1575         1297 :         case IFN_MASK_LEN_STORE:
    1576         1297 :           {
    1577         1297 :             enum dse_store_status store_status;
    1578         1297 :             store_status = dse_classify_store (&ref, stmt, false, live_bytes);
    1579         1297 :             if (store_status == DSE_STORE_DEAD)
    1580            0 :               delete_dead_or_redundant_call (gsi, "dead");
    1581         1297 :             return;
    1582              :           }
    1583              :         default:;
    1584              :         }
    1585              :     }
    1586              : 
    1587     30699144 :   bool by_clobber_p = false;
    1588              : 
    1589              :   /* Check if this statement stores zero to a memory location,
    1590              :      and if there is a subsequent store of zero to the same
    1591              :      memory location.  If so, remove the subsequent store.  */
    1592     30699144 :   if (gimple_assign_single_p (stmt)
    1593     30699144 :       && initializer_zerop (gimple_assign_rhs1 (stmt)))
    1594      4182337 :     dse_optimize_redundant_stores (stmt);
    1595              : 
    1596              :   /* Self-assignments are zombies.  */
    1597     30699144 :   if (is_gimple_assign (stmt)
    1598     60002207 :       && operand_equal_p (gimple_assign_rhs1 (stmt),
    1599     29303063 :                           gimple_assign_lhs (stmt), 0))
    1600              :     ;
    1601              :   else
    1602              :     {
    1603     30697711 :       bool byte_tracking_enabled
    1604     30697711 :           = setup_live_bytes_from_ref (&ref, live_bytes);
    1605     30697711 :       enum dse_store_status store_status;
    1606     30697711 :       store_status = dse_classify_store (&ref, stmt,
    1607              :                                          byte_tracking_enabled,
    1608              :                                          live_bytes, &by_clobber_p);
    1609     30697711 :       if (store_status == DSE_STORE_LIVE)
    1610              :         return;
    1611              : 
    1612     28247336 :       if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
    1613              :         {
    1614     26058379 :           maybe_trim_partially_dead_store (&ref, live_bytes, stmt);
    1615     26058379 :           return;
    1616              :         }
    1617              :     }
    1618              : 
    1619              :   /* Now we know that use_stmt kills the LHS of stmt.  */
    1620              : 
    1621              :   /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
    1622              :      another clobber stmt.  */
    1623      2190390 :   if (gimple_clobber_p (stmt)
    1624      2190390 :       && !by_clobber_p)
    1625              :     return;
    1626              : 
    1627      2137317 :   if (is_gimple_call (stmt)
    1628      2137317 :       && (gimple_has_side_effects (stmt)
    1629        42240 :           || (stmt_could_throw_p (fun, stmt)
    1630            5 :               && !fun->can_delete_dead_exceptions)))
    1631              :     {
    1632              :       /* See if we can remove complete call.  */
    1633        38197 :       if (dse_optimize_call (gsi, live_bytes))
    1634              :         return;
    1635              :       /* Make sure we do not remove a return slot we cannot reconstruct
    1636              :          later.  */
    1637        38163 :       if (gimple_call_return_slot_opt_p (as_a <gcall *>(stmt))
    1638        38163 :           && (TREE_ADDRESSABLE (TREE_TYPE (gimple_call_fntype (stmt)))
    1639        14669 :               || !poly_int_tree_p
    1640        14669 :                     (TYPE_SIZE (TREE_TYPE (gimple_call_fntype (stmt))))))
    1641              :         return;
    1642        30490 :       if (dump_file && (dump_flags & TDF_DETAILS))
    1643              :         {
    1644            1 :           fprintf (dump_file, "  Deleted dead store in call LHS: ");
    1645            1 :           print_gimple_stmt (dump_file, stmt, 0, dump_flags);
    1646            1 :           fprintf (dump_file, "\n");
    1647              :         }
    1648        30490 :       gimple_call_set_lhs (stmt, NULL_TREE);
    1649        30490 :       update_stmt (stmt);
    1650              :     }
    1651      2099120 :   else if (!stmt_could_throw_p (fun, stmt)
    1652      2099120 :            || fun->can_delete_dead_exceptions)
    1653      2049292 :     delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup,
    1654              :                                          need_ab_cleanup);
    1655              : }
    1656              : 
    1657              : namespace {
    1658              : 
    1659              : const pass_data pass_data_dse =
    1660              : {
    1661              :   GIMPLE_PASS, /* type */
    1662              :   "dse", /* name */
    1663              :   OPTGROUP_NONE, /* optinfo_flags */
    1664              :   TV_TREE_DSE, /* tv_id */
    1665              :   ( PROP_cfg | PROP_ssa ), /* properties_required */
    1666              :   0, /* properties_provided */
    1667              :   0, /* properties_destroyed */
    1668              :   0, /* todo_flags_start */
    1669              :   0, /* todo_flags_finish */
    1670              : };
    1671              : 
    1672              : class pass_dse : public gimple_opt_pass
    1673              : {
    1674              : public:
    1675      1443875 :   pass_dse (gcc::context *ctxt)
    1676      2887750 :     : gimple_opt_pass (pass_data_dse, ctxt), use_dr_analysis_p (false)
    1677              :   {}
    1678              : 
    1679              :   /* opt_pass methods: */
    1680      1155100 :   opt_pass * clone () final override { return new pass_dse (m_ctxt); }
    1681       288775 :   void set_pass_param (unsigned n, bool param) final override
    1682              :     {
    1683       288775 :       gcc_assert (n == 0);
    1684       288775 :       use_dr_analysis_p = param;
    1685       288775 :     }
    1686      5597745 :   bool gate (function *) final override { return flag_tree_dse != 0; }
    1687              :   unsigned int execute (function *) final override;
    1688              : 
    1689              : private:
    1690              :   bool use_dr_analysis_p;
    1691              : }; // class pass_dse
    1692              : 
    1693              : unsigned int
    1694      5573015 : pass_dse::execute (function *fun)
    1695              : {
    1696      5573015 :   unsigned todo = 0;
    1697      5573015 :   bool released_def = false;
    1698              : 
    1699      5573015 :   need_eh_cleanup = BITMAP_ALLOC (NULL);
    1700      5573015 :   need_ab_cleanup = BITMAP_ALLOC (NULL);
    1701      5573015 :   auto_sbitmap live_bytes (param_dse_max_object_size);
    1702      5573015 :   if (flag_expensive_optimizations && use_dr_analysis_p)
    1703       960955 :     dse_stmt_to_dr_map = new hash_map<gimple *, data_reference_p>;
    1704              : 
    1705      5573015 :   renumber_gimple_stmt_uids (fun);
    1706              : 
    1707      5573015 :   calculate_dominance_info (CDI_DOMINATORS);
    1708              : 
    1709              :   /* Dead store elimination is fundamentally a reverse program order walk.  */
    1710      5573015 :   int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS);
    1711      5573015 :   auto_bitmap exit_bbs;
    1712      5573015 :   bitmap_set_bit (exit_bbs, EXIT_BLOCK);
    1713      5573015 :   edge entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun));
    1714      5573015 :   int n = rev_post_order_and_mark_dfs_back_seme (fun, entry,
    1715              :                                                  exit_bbs, false, rpo, NULL);
    1716     52620818 :   for (int i = n; i != 0; --i)
    1717              :     {
    1718     47047803 :       basic_block bb = BASIC_BLOCK_FOR_FN (fun, rpo[i-1]);
    1719     47047803 :       gimple_stmt_iterator gsi;
    1720              : 
    1721     94095606 :       for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
    1722              :         {
    1723    337320224 :           gimple *stmt = gsi_stmt (gsi);
    1724              : 
    1725    461956051 :           if (gimple_vdef (stmt))
    1726     55053100 :             dse_optimize_stmt (fun, &gsi, live_bytes);
    1727    564534248 :           else if (def_operand_p
    1728    282267124 :                      def_p = single_ssa_def_operand (stmt, SSA_OP_DEF))
    1729              :             {
    1730              :               /* When we remove dead stores make sure to also delete trivially
    1731              :                  dead SSA defs.  */
    1732     63461394 :               if (has_zero_uses (DEF_FROM_PTR (def_p))
    1733      2112300 :                   && !gimple_has_side_effects (stmt)
    1734      2102225 :                   && !is_ctrl_altering_stmt (stmt)
    1735     65561931 :                   && (!stmt_could_throw_p (fun, stmt)
    1736        91528 :                       || fun->can_delete_dead_exceptions))
    1737              :                 {
    1738      2009128 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    1739              :                     {
    1740           11 :                       fprintf (dump_file, "  Deleted trivially dead stmt: ");
    1741           11 :                       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
    1742           11 :                       fprintf (dump_file, "\n");
    1743              :                     }
    1744      2009128 :                   if (gsi_remove (&gsi, true) && need_eh_cleanup)
    1745            2 :                     bitmap_set_bit (need_eh_cleanup, bb->index);
    1746      2009128 :                   release_defs (stmt);
    1747      2009128 :                   released_def = true;
    1748              :                 }
    1749              :             }
    1750    337320224 :           if (gsi_end_p (gsi))
    1751       589682 :             gsi = gsi_last_bb (bb);
    1752              :           else
    1753    721393410 :             gsi_prev (&gsi);
    1754              :         }
    1755     47047803 :       bool removed_phi = false;
    1756     65987776 :       for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);)
    1757              :         {
    1758     18939973 :           gphi *phi = si.phi ();
    1759     18939973 :           if (has_zero_uses (gimple_phi_result (phi)))
    1760              :             {
    1761       218250 :               if (dump_file && (dump_flags & TDF_DETAILS))
    1762              :                 {
    1763            0 :                   fprintf (dump_file, "  Deleted trivially dead PHI: ");
    1764            0 :                   print_gimple_stmt (dump_file, phi, 0, dump_flags);
    1765            0 :                   fprintf (dump_file, "\n");
    1766              :                 }
    1767       218250 :               remove_phi_node (&si, true);
    1768       218250 :               removed_phi = true;
    1769       218250 :               released_def = true;
    1770              :             }
    1771              :           else
    1772     18721723 :             gsi_next (&si);
    1773              :         }
    1774     47047803 :       if (removed_phi && gimple_seq_empty_p (phi_nodes (bb)))
    1775              :         todo |= TODO_cleanup_cfg;
    1776              :     }
    1777      5573015 :   free (rpo);
    1778              : 
    1779              :   /* Removal of stores may make some EH edges dead.  Purge such edges from
    1780              :      the CFG as needed.  */
    1781      5573015 :   if (!bitmap_empty_p (need_eh_cleanup))
    1782              :     {
    1783          388 :       gimple_purge_all_dead_eh_edges (need_eh_cleanup);
    1784          388 :       todo |= TODO_cleanup_cfg;
    1785              :     }
    1786      5573015 :   if (!bitmap_empty_p (need_ab_cleanup))
    1787              :     {
    1788            4 :       gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
    1789            4 :       todo |= TODO_cleanup_cfg;
    1790              :     }
    1791              : 
    1792      5573015 :   BITMAP_FREE (need_eh_cleanup);
    1793      5573015 :   BITMAP_FREE (need_ab_cleanup);
    1794              : 
    1795      5573015 :   if (released_def)
    1796       593413 :     free_numbers_of_iterations_estimates (fun);
    1797              : 
    1798      5573015 :   if (flag_expensive_optimizations && use_dr_analysis_p)
    1799              :     {
    1800      1684535 :       for (auto i = dse_stmt_to_dr_map->begin ();
    1801      2408115 :            i != dse_stmt_to_dr_map->end (); ++i)
    1802       723580 :         free_data_ref ((*i).second);
    1803      1921910 :       delete dse_stmt_to_dr_map;
    1804       960955 :       dse_stmt_to_dr_map = NULL;
    1805              :     }
    1806              : 
    1807      5573015 :   return todo;
    1808      5573015 : }
    1809              : 
    1810              : } // anon namespace
    1811              : 
    1812              : gimple_opt_pass *
    1813       288775 : make_pass_dse (gcc::context *ctxt)
    1814              : {
    1815       288775 :   return new pass_dse (ctxt);
    1816              : }
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.