Branch data Line data Source code
1 : : /* Dead and redundant store elimination
2 : : Copyright (C) 2004-2025 Free Software Foundation, Inc.
3 : :
4 : : This file is part of GCC.
5 : :
6 : : GCC is free software; you can redistribute it and/or modify
7 : : it under the terms of the GNU General Public License as published by
8 : : the Free Software Foundation; either version 3, or (at your option)
9 : : any later version.
10 : :
11 : : GCC is distributed in the hope that it will be useful,
12 : : but WITHOUT ANY WARRANTY; without even the implied warranty of
13 : : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 : : GNU General Public License for more details.
15 : :
16 : : You should have received a copy of the GNU General Public License
17 : : along with GCC; see the file COPYING3. If not see
18 : : <http://www.gnu.org/licenses/>. */
19 : :
20 : : #include "config.h"
21 : : #include "system.h"
22 : : #include "coretypes.h"
23 : : #include "backend.h"
24 : : #include "rtl.h"
25 : : #include "tree.h"
26 : : #include "gimple.h"
27 : : #include "tree-pass.h"
28 : : #include "ssa.h"
29 : : #include "gimple-pretty-print.h"
30 : : #include "fold-const.h"
31 : : #include "gimple-iterator.h"
32 : : #include "tree-cfg.h"
33 : : #include "tree-dfa.h"
34 : : #include "tree-cfgcleanup.h"
35 : : #include "alias.h"
36 : : #include "tree-ssa-loop.h"
37 : : #include "tree-ssa-dse.h"
38 : : #include "builtins.h"
39 : : #include "gimple-fold.h"
40 : : #include "gimplify.h"
41 : : #include "tree-eh.h"
42 : : #include "cfganal.h"
43 : : #include "cgraph.h"
44 : : #include "ipa-modref-tree.h"
45 : : #include "ipa-modref.h"
46 : : #include "target.h"
47 : : #include "tree-ssa-loop-niter.h"
48 : : #include "cfgloop.h"
49 : : #include "tree-data-ref.h"
50 : : #include "internal-fn.h"
51 : : #include "tree-ssa.h"
52 : :
53 : : /* This file implements dead store elimination.
54 : :
55 : : A dead store is a store into a memory location which will later be
56 : : overwritten by another store without any intervening loads. In this
57 : : case the earlier store can be deleted or trimmed if the store
58 : : was partially dead.
59 : :
60 : : A redundant store is a store into a memory location which stores
61 : : the exact same value as a prior store to the same memory location.
62 : : While this can often be handled by dead store elimination, removing
63 : : the redundant store is often better than removing or trimming the
64 : : dead store.
65 : :
66 : : In our SSA + virtual operand world we use immediate uses of virtual
67 : : operands to detect these cases. If a store's virtual definition
68 : : is used precisely once by a later store to the same location which
69 : : post dominates the first store, then the first store is dead. If
70 : : the data stored is the same, then the second store is redundant.
71 : :
72 : : The single use of the store's virtual definition ensures that
73 : : there are no intervening aliased loads and the requirement that
74 : : the second load post dominate the first ensures that if the earlier
75 : : store executes, then the later stores will execute before the function
76 : : exits.
77 : :
78 : : It may help to think of this as first moving the earlier store to
79 : : the point immediately before the later store. Again, the single
80 : : use of the virtual definition and the post-dominance relationship
81 : : ensure that such movement would be safe. Clearly if there are
82 : : back to back stores, then the second is makes the first dead. If
83 : : the second store stores the same value, then the second store is
84 : : redundant.
85 : :
86 : : Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
87 : : may also help in understanding this code since it discusses the
88 : : relationship between dead store and redundant load elimination. In
89 : : fact, they are the same transformation applied to different views of
90 : : the CFG. */
91 : :
92 : : static void delete_dead_or_redundant_call (gimple_stmt_iterator *, const char *);
93 : :
94 : : /* Bitmap of blocks that have had EH statements cleaned. We should
95 : : remove their dead edges eventually. */
96 : : static bitmap need_eh_cleanup;
97 : : static bitmap need_ab_cleanup;
98 : :
99 : : /* STMT is a statement that may write into memory. Analyze it and
100 : : initialize WRITE to describe how STMT affects memory. When
101 : : MAY_DEF_OK is true then the function initializes WRITE to what
102 : : the stmt may define.
103 : :
104 : : Return TRUE if the statement was analyzed, FALSE otherwise.
105 : :
106 : : It is always safe to return FALSE. But typically better optimziation
107 : : can be achieved by analyzing more statements. */
108 : :
109 : : static bool
110 : 242757690 : initialize_ao_ref_for_dse (gimple *stmt, ao_ref *write, bool may_def_ok = false)
111 : : {
112 : : /* It's advantageous to handle certain mem* functions. */
113 : 242757690 : if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
114 : : {
115 : 5339171 : switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
116 : : {
117 : 1345800 : case BUILT_IN_MEMCPY:
118 : 1345800 : case BUILT_IN_MEMMOVE:
119 : 1345800 : case BUILT_IN_MEMSET:
120 : 1345800 : case BUILT_IN_MEMCPY_CHK:
121 : 1345800 : case BUILT_IN_MEMMOVE_CHK:
122 : 1345800 : case BUILT_IN_MEMSET_CHK:
123 : 1345800 : case BUILT_IN_STRNCPY:
124 : 1345800 : case BUILT_IN_STRNCPY_CHK:
125 : 1345800 : {
126 : 1345800 : tree size = gimple_call_arg (stmt, 2);
127 : 1345800 : tree ptr = gimple_call_arg (stmt, 0);
128 : 1345800 : ao_ref_init_from_ptr_and_size (write, ptr, size);
129 : 1345800 : return true;
130 : : }
131 : :
132 : : /* A calloc call can never be dead, but it can make
133 : : subsequent stores redundant if they store 0 into
134 : : the same memory locations. */
135 : 3208 : case BUILT_IN_CALLOC:
136 : 3208 : {
137 : 3208 : tree nelem = gimple_call_arg (stmt, 0);
138 : 3208 : tree selem = gimple_call_arg (stmt, 1);
139 : 3208 : tree lhs;
140 : 3208 : if (TREE_CODE (nelem) == INTEGER_CST
141 : 2650 : && TREE_CODE (selem) == INTEGER_CST
142 : 5642 : && (lhs = gimple_call_lhs (stmt)) != NULL_TREE)
143 : : {
144 : 2431 : tree size = fold_build2 (MULT_EXPR, TREE_TYPE (nelem),
145 : : nelem, selem);
146 : 2431 : ao_ref_init_from_ptr_and_size (write, lhs, size);
147 : 2431 : return true;
148 : : }
149 : : }
150 : :
151 : : default:
152 : : break;
153 : : }
154 : : }
155 : 237418519 : else if (is_gimple_call (stmt)
156 : 237418519 : && gimple_call_internal_p (stmt))
157 : : {
158 : 202333 : switch (gimple_call_internal_fn (stmt))
159 : : {
160 : 1258 : case IFN_LEN_STORE:
161 : 1258 : case IFN_MASK_STORE:
162 : 1258 : case IFN_MASK_LEN_STORE:
163 : 1258 : {
164 : 1258 : internal_fn ifn = gimple_call_internal_fn (stmt);
165 : 1258 : int stored_value_index = internal_fn_stored_value_index (ifn);
166 : 1258 : int len_index = internal_fn_len_index (ifn);
167 : 1258 : if (ifn == IFN_LEN_STORE)
168 : : {
169 : 0 : tree len = gimple_call_arg (stmt, len_index);
170 : 0 : tree bias = gimple_call_arg (stmt, len_index + 1);
171 : 0 : if (tree_fits_uhwi_p (len))
172 : : {
173 : 0 : ao_ref_init_from_ptr_and_size (write,
174 : : gimple_call_arg (stmt, 0),
175 : : int_const_binop (MINUS_EXPR,
176 : : len, bias));
177 : 0 : return true;
178 : : }
179 : : }
180 : : /* We cannot initialize a must-def ao_ref (in all cases) but we
181 : : can provide a may-def variant. */
182 : 1258 : if (may_def_ok)
183 : : {
184 : 1220 : ao_ref_init_from_ptr_and_range (
185 : : write, gimple_call_arg (stmt, 0), true, 0, -1,
186 : 1220 : tree_to_poly_int64 (TYPE_SIZE (
187 : : TREE_TYPE (gimple_call_arg (stmt, stored_value_index)))));
188 : 1220 : return true;
189 : : }
190 : : break;
191 : : }
192 : : default:;
193 : : }
194 : : }
195 : 241408239 : if (tree lhs = gimple_get_lhs (stmt))
196 : : {
197 : 227197734 : if (TREE_CODE (lhs) != SSA_NAME
198 : 227197734 : && (may_def_ok || !stmt_could_throw_p (cfun, stmt)))
199 : : {
200 : 209711735 : ao_ref_init (write, lhs);
201 : 209711735 : return true;
202 : : }
203 : : }
204 : : return false;
205 : : }
206 : :
207 : : /* Given REF from the alias oracle, return TRUE if it is a valid
208 : : kill memory reference for dead store elimination, false otherwise.
209 : :
210 : : In particular, the reference must have a known base, known maximum
211 : : size, start at a byte offset and have a size that is one or more
212 : : bytes. */
213 : :
214 : : static bool
215 : 176170525 : valid_ao_ref_kill_for_dse (ao_ref *ref)
216 : : {
217 : 176170525 : return (ao_ref_base (ref)
218 : 176170525 : && known_size_p (ref->max_size)
219 : 175855298 : && maybe_ne (ref->size, 0)
220 : 175838051 : && known_eq (ref->max_size, ref->size)
221 : 351481240 : && known_ge (ref->offset, 0));
222 : : }
223 : :
224 : : /* Given REF from the alias oracle, return TRUE if it is a valid
225 : : load or store memory reference for dead store elimination, false otherwise.
226 : :
227 : : Unlike for valid_ao_ref_kill_for_dse we can accept writes where max_size
228 : : is not same as size since we can handle conservatively the larger range. */
229 : :
230 : : static bool
231 : 40185101 : valid_ao_ref_for_dse (ao_ref *ref)
232 : : {
233 : 40185101 : return (ao_ref_base (ref)
234 : 40185101 : && known_size_p (ref->max_size)
235 : 79933988 : && known_ge (ref->offset, 0));
236 : : }
237 : :
238 : : /* Initialize OFFSET and SIZE to a range known to contain REF
239 : : where the boundaries are divisible by BITS_PER_UNIT (bit still in bits).
240 : : Return false if this is impossible. */
241 : :
242 : : static bool
243 : 110006111 : get_byte_aligned_range_containing_ref (ao_ref *ref, poly_int64 *offset,
244 : : HOST_WIDE_INT *size)
245 : : {
246 : 0 : if (!known_size_p (ref->max_size))
247 : : return false;
248 : 110006111 : *offset = aligned_lower_bound (ref->offset, BITS_PER_UNIT);
249 : 110006111 : poly_int64 end = aligned_upper_bound (ref->offset + ref->max_size,
250 : : BITS_PER_UNIT);
251 : 110006111 : return (end - *offset).is_constant (size);
252 : : }
253 : :
254 : : /* Initialize OFFSET and SIZE to a range known to be contained REF
255 : : where the boundaries are divisible by BITS_PER_UNIT (but still in bits).
256 : : Return false if this is impossible. */
257 : :
258 : : static bool
259 : 102747859 : get_byte_aligned_range_contained_in_ref (ao_ref *ref, poly_int64 *offset,
260 : : HOST_WIDE_INT *size)
261 : : {
262 : 102747859 : if (!known_size_p (ref->size)
263 : 102747859 : || !known_eq (ref->size, ref->max_size))
264 : : return false;
265 : 102747859 : *offset = aligned_upper_bound (ref->offset, BITS_PER_UNIT);
266 : 102747859 : poly_int64 end = aligned_lower_bound (ref->offset + ref->max_size,
267 : : BITS_PER_UNIT);
268 : : /* For bit accesses we can get -1 here, but also 0 sized kill is not
269 : : useful. */
270 : 102747859 : if (!known_gt (end, *offset))
271 : : return false;
272 : 102456137 : return (end - *offset).is_constant (size);
273 : : }
274 : :
275 : : /* Compute byte range (returned iN REF_OFFSET and RET_SIZE) for access COPY
276 : : inside REF. If KILL is true, then COPY represent a kill and the byte range
277 : : needs to be fully contained in bit range given by COPY. If KILL is false
278 : : then the byte range returned must contain the range of COPY. */
279 : :
280 : : static bool
281 : 106522846 : get_byte_range (ao_ref *copy, ao_ref *ref, bool kill,
282 : : HOST_WIDE_INT *ret_offset, HOST_WIDE_INT *ret_size)
283 : : {
284 : 106522846 : HOST_WIDE_INT copy_size, ref_size;
285 : 106522846 : poly_int64 copy_offset, ref_offset;
286 : 106522846 : HOST_WIDE_INT diff;
287 : :
288 : : /* First translate from bits to bytes, rounding to bigger or smaller ranges
289 : : as needed. Kills needs to be always rounded to smaller ranges while
290 : : uses and stores to larger ranges. */
291 : 106522846 : if (kill)
292 : : {
293 : 102747859 : if (!get_byte_aligned_range_contained_in_ref (copy, ©_offset,
294 : : ©_size))
295 : : return false;
296 : : }
297 : : else
298 : : {
299 : 3774987 : if (!get_byte_aligned_range_containing_ref (copy, ©_offset,
300 : : ©_size))
301 : : return false;
302 : : }
303 : :
304 : 203660497 : if (!get_byte_aligned_range_containing_ref (ref, &ref_offset, &ref_size)
305 : : || !ordered_p (copy_offset, ref_offset))
306 : : return false;
307 : :
308 : : /* Switch sizes from bits to bytes so we do not need to care about
309 : : overflows. Offset calculation needs to stay in bits until we compute
310 : : the difference and can switch to HOST_WIDE_INT. */
311 : 106231124 : copy_size /= BITS_PER_UNIT;
312 : 106231124 : ref_size /= BITS_PER_UNIT;
313 : :
314 : : /* If COPY starts before REF, then reset the beginning of
315 : : COPY to match REF and decrease the size of COPY by the
316 : : number of bytes removed from COPY. */
317 : 106231124 : if (maybe_lt (copy_offset, ref_offset))
318 : : {
319 : 9347239 : if (!(ref_offset - copy_offset).is_constant (&diff)
320 : 9347239 : || copy_size < diff / BITS_PER_UNIT)
321 : : return false;
322 : 2737726 : copy_size -= diff / BITS_PER_UNIT;
323 : 2737726 : copy_offset = ref_offset;
324 : : }
325 : :
326 : 99621611 : if (!(copy_offset - ref_offset).is_constant (&diff)
327 : 99621611 : || ref_size <= diff / BITS_PER_UNIT)
328 : : return false;
329 : :
330 : : /* If COPY extends beyond REF, chop off its size appropriately. */
331 : 9093473 : HOST_WIDE_INT limit = ref_size - diff / BITS_PER_UNIT;
332 : :
333 : 9093473 : if (copy_size > limit)
334 : 1182599 : copy_size = limit;
335 : 9093473 : *ret_size = copy_size;
336 : 9093473 : if (!(copy_offset - ref_offset).is_constant (ret_offset))
337 : : return false;
338 : 9093473 : *ret_offset /= BITS_PER_UNIT;
339 : 9093473 : return true;
340 : : }
341 : :
342 : : /* Update LIVE_BYTES tracking REF for write to WRITE:
343 : : Verify we have the same base memory address, the write
344 : : has a known size and overlaps with REF. */
345 : : static void
346 : 176170525 : clear_live_bytes_for_ref (sbitmap live_bytes, ao_ref *ref, ao_ref *write)
347 : : {
348 : 176170525 : HOST_WIDE_INT start, size;
349 : :
350 : 176170525 : if (valid_ao_ref_kill_for_dse (write)
351 : 175310460 : && operand_equal_p (write->base, ref->base, OEP_ADDRESS_OF)
352 : 278918384 : && get_byte_range (write, ref, true, &start, &size))
353 : 5318486 : bitmap_clear_range (live_bytes, start, size);
354 : 176170525 : }
355 : :
356 : : /* Clear any bytes written by STMT from the bitmap LIVE_BYTES. The base
357 : : address written by STMT must match the one found in REF, which must
358 : : have its base address previously initialized.
359 : :
360 : : This routine must be conservative. If we don't know the offset or
361 : : actual size written, assume nothing was written. */
362 : :
363 : : static void
364 : 190443363 : clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref)
365 : : {
366 : 190443363 : ao_ref write;
367 : :
368 : 190443363 : if (gcall *call = dyn_cast <gcall *> (stmt))
369 : : {
370 : 5498183 : bool interposed;
371 : 5498183 : modref_summary *summary = get_modref_function_summary (call, &interposed);
372 : :
373 : 5498183 : if (summary && !interposed)
374 : 434635 : for (auto kill : summary->kills)
375 : 53840 : if (kill.get_ao_ref (as_a <gcall *> (stmt), &write))
376 : 53816 : clear_live_bytes_for_ref (live_bytes, ref, &write);
377 : : }
378 : 190443363 : if (!initialize_ao_ref_for_dse (stmt, &write))
379 : 14326654 : return;
380 : :
381 : 176116709 : clear_live_bytes_for_ref (live_bytes, ref, &write);
382 : : }
383 : :
384 : : /* REF is a memory write. Extract relevant information from it and
385 : : initialize the LIVE_BYTES bitmap. If successful, return TRUE.
386 : : Otherwise return FALSE. */
387 : :
388 : : static bool
389 : 33533440 : setup_live_bytes_from_ref (ao_ref *ref, sbitmap live_bytes)
390 : : {
391 : 33533440 : HOST_WIDE_INT const_size;
392 : 33533440 : if (valid_ao_ref_for_dse (ref)
393 : 33120437 : && ((aligned_upper_bound (ref->offset + ref->max_size, BITS_PER_UNIT)
394 : 33120437 : - aligned_lower_bound (ref->offset,
395 : 33120437 : BITS_PER_UNIT)).is_constant (&const_size))
396 : 33120437 : && (const_size / BITS_PER_UNIT <= param_dse_max_object_size)
397 : 66254338 : && const_size > 1)
398 : : {
399 : 32720685 : bitmap_clear (live_bytes);
400 : 32720685 : bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT);
401 : 32720685 : return true;
402 : : }
403 : : return false;
404 : : }
405 : :
406 : : /* Compute the number of stored bytes that we can trim from the head and
407 : : tail of REF. LIVE is the bitmap of stores to REF that are still live.
408 : :
409 : : Store the number of bytes trimmed from the head and tail in TRIM_HEAD
410 : : and TRIM_TAIL respectively.
411 : :
412 : : STMT is the statement being trimmed and is used for debugging dump
413 : : output only. */
414 : :
415 : : static void
416 : 3537979 : compute_trims (ao_ref *ref, sbitmap live, int *trim_head, int *trim_tail,
417 : : gimple *stmt)
418 : : {
419 : 3537979 : *trim_head = 0;
420 : 3537979 : *trim_tail = 0;
421 : :
422 : : /* We use bitmaps biased such that ref->offset is contained in bit zero and
423 : : the bitmap extends through ref->max_size, so we know that in the original
424 : : bitmap bits 0 .. ref->max_size were true. But we need to check that this
425 : : covers the bytes of REF exactly. */
426 : 3537979 : const unsigned int align = known_alignment (ref->offset);
427 : 3537979 : if ((align > 0 && align < BITS_PER_UNIT)
428 : 3537979 : || !known_eq (ref->size, ref->max_size))
429 : 13117 : return;
430 : :
431 : : /* Now identify how much, if any of the tail we can chop off. */
432 : 3524862 : HOST_WIDE_INT const_size;
433 : 3524862 : int last_live = bitmap_last_set_bit (live);
434 : 3524862 : if (ref->size.is_constant (&const_size))
435 : : {
436 : 3524862 : int last_orig = (const_size / BITS_PER_UNIT) - 1;
437 : : /* We can leave inconvenient amounts on the tail as
438 : : residual handling in mem* and str* functions is usually
439 : : reasonably efficient. */
440 : 3524862 : *trim_tail = last_orig - last_live;
441 : :
442 : : /* But don't trim away out of bounds accesses, as this defeats
443 : : proper warnings.
444 : :
445 : : We could have a type with no TYPE_SIZE_UNIT or we could have a VLA
446 : : where TYPE_SIZE_UNIT is not a constant. */
447 : 3524862 : if (*trim_tail
448 : 11887 : && TYPE_SIZE_UNIT (TREE_TYPE (ref->base))
449 : 11887 : && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (ref->base))) == INTEGER_CST
450 : 3536748 : && compare_tree_int (TYPE_SIZE_UNIT (TREE_TYPE (ref->base)),
451 : : last_orig) <= 0)
452 : 126 : *trim_tail = 0;
453 : : }
454 : :
455 : : /* Identify how much, if any of the head we can chop off. */
456 : 3524862 : int first_orig = 0;
457 : 3524862 : int first_live = bitmap_first_set_bit (live);
458 : 3524862 : *trim_head = first_live - first_orig;
459 : :
460 : : /* If REF is aligned, try to maintain this alignment if it reduces
461 : : the number of (power-of-two sized aligned) writes to memory. */
462 : 3524862 : unsigned int align_bits;
463 : 3524862 : unsigned HOST_WIDE_INT bitpos;
464 : 3436276 : if ((*trim_head || *trim_tail)
465 : 96284 : && last_live - first_live >= 2
466 : 95424 : && ao_ref_alignment (ref, &align_bits, &bitpos)
467 : 78490 : && align_bits >= 32
468 : 78144 : && bitpos == 0
469 : 3599384 : && align_bits % BITS_PER_UNIT == 0)
470 : : {
471 : 74522 : unsigned int align_units = align_bits / BITS_PER_UNIT;
472 : 74522 : if (align_units > 16)
473 : : align_units = 16;
474 : 77786 : while ((first_live | (align_units - 1)) > (unsigned int)last_live)
475 : 3264 : align_units >>= 1;
476 : :
477 : 74522 : if (*trim_head)
478 : : {
479 : 68054 : unsigned int pos = first_live & (align_units - 1);
480 : 75086 : for (unsigned int i = 1; i <= align_units; i <<= 1)
481 : : {
482 : 75086 : unsigned int mask = ~(i - 1);
483 : 75086 : unsigned int bytes = align_units - (pos & mask);
484 : 75086 : if (wi::popcount (bytes) <= 1)
485 : : {
486 : 68054 : *trim_head &= mask;
487 : 68054 : break;
488 : : }
489 : : }
490 : : }
491 : :
492 : 74522 : if (*trim_tail)
493 : : {
494 : 8772 : unsigned int pos = last_live & (align_units - 1);
495 : 15717 : for (unsigned int i = 1; i <= align_units; i <<= 1)
496 : : {
497 : 15717 : int mask = i - 1;
498 : 15717 : unsigned int bytes = (pos | mask) + 1;
499 : 15717 : if ((last_live | mask) > (last_live + *trim_tail))
500 : : break;
501 : 15717 : if (wi::popcount (bytes) <= 1)
502 : : {
503 : 8772 : unsigned int extra = (last_live | mask) - last_live;
504 : 8772 : *trim_tail -= extra;
505 : 8772 : break;
506 : : }
507 : : }
508 : : }
509 : : }
510 : :
511 : 3524862 : if ((*trim_head || *trim_tail) && dump_file && (dump_flags & TDF_DETAILS))
512 : : {
513 : 18 : fprintf (dump_file, " Trimming statement (head = %d, tail = %d): ",
514 : : *trim_head, *trim_tail);
515 : 18 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
516 : 18 : fprintf (dump_file, "\n");
517 : : }
518 : : }
519 : :
520 : : /* STMT initializes an object from COMPLEX_CST where one or more of the bytes
521 : : written may be dead stores. REF is a representation of the memory written.
522 : : LIVE is the bitmap of stores to REF that are still live.
523 : :
524 : : Attempt to rewrite STMT so that only the real or the imaginary part of the
525 : : object is actually stored. */
526 : :
527 : : static void
528 : 5442 : maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt)
529 : : {
530 : 5442 : int trim_head, trim_tail;
531 : 5442 : compute_trims (ref, live, &trim_head, &trim_tail, stmt);
532 : :
533 : : /* The amount of data trimmed from the head or tail must be at
534 : : least half the size of the object to ensure we're trimming
535 : : the entire real or imaginary half. By writing things this
536 : : way we avoid more O(n) bitmap operations. */
537 : 5442 : if (known_ge (trim_tail * 2 * BITS_PER_UNIT, ref->size))
538 : : {
539 : : /* TREE_REALPART is live */
540 : 2 : tree x = TREE_REALPART (gimple_assign_rhs1 (stmt));
541 : 2 : tree y = gimple_assign_lhs (stmt);
542 : 2 : y = build1 (REALPART_EXPR, TREE_TYPE (x), y);
543 : 2 : gimple_assign_set_lhs (stmt, y);
544 : 2 : gimple_assign_set_rhs1 (stmt, x);
545 : : }
546 : 5440 : else if (known_ge (trim_head * 2 * BITS_PER_UNIT, ref->size))
547 : : {
548 : : /* TREE_IMAGPART is live */
549 : 3 : tree x = TREE_IMAGPART (gimple_assign_rhs1 (stmt));
550 : 3 : tree y = gimple_assign_lhs (stmt);
551 : 3 : y = build1 (IMAGPART_EXPR, TREE_TYPE (x), y);
552 : 3 : gimple_assign_set_lhs (stmt, y);
553 : 3 : gimple_assign_set_rhs1 (stmt, x);
554 : : }
555 : :
556 : : /* Other cases indicate parts of both the real and imag subobjects
557 : : are live. We do not try to optimize those cases. */
558 : 5442 : }
559 : :
560 : : /* STMT initializes an object using a CONSTRUCTOR where one or more of the
561 : : bytes written are dead stores. REF is a representation of the memory
562 : : written. LIVE is the bitmap of stores to REF that are still live.
563 : :
564 : : Attempt to rewrite STMT so that it writes fewer memory locations.
565 : :
566 : : The most common case for getting here is a CONSTRUCTOR with no elements
567 : : being used to zero initialize an object. We do not try to handle other
568 : : cases as those would force us to fully cover the object with the
569 : : CONSTRUCTOR node except for the components that are dead.
570 : : Also handles integer stores of 0 which can happen with memset/memcpy optimizations. */
571 : :
572 : : static void
573 : 3392407 : maybe_trim_constructor_store (ao_ref *ref, sbitmap live, gimple *stmt, bool was_integer_cst)
574 : : {
575 : 3392407 : tree ctor = gimple_assign_rhs1 (stmt);
576 : :
577 : : /* This is the only case we currently handle. It actually seems to
578 : : catch most cases of actual interest. */
579 : 3872896 : gcc_assert (was_integer_cst ? integer_zerop (ctor) : CONSTRUCTOR_NELTS (ctor) == 0);
580 : :
581 : 3392407 : int head_trim = 0;
582 : 3392407 : int tail_trim = 0;
583 : 3392407 : compute_trims (ref, live, &head_trim, &tail_trim, stmt);
584 : :
585 : : /* Now we want to replace the constructor initializer
586 : : with memset (object + head_trim, 0, size - head_trim - tail_trim). */
587 : 3392407 : if (head_trim || tail_trim)
588 : : {
589 : : /* We want &lhs for the MEM_REF expression. */
590 : 89001 : tree lhs_addr = build_fold_addr_expr (gimple_assign_lhs (stmt));
591 : :
592 : 89001 : STRIP_USELESS_TYPE_CONVERSION (lhs_addr);
593 : :
594 : 89001 : if (! is_gimple_min_invariant (lhs_addr))
595 : 17192 : return;
596 : :
597 : : /* The number of bytes for the new constructor. */
598 : 71809 : poly_int64 ref_bytes = exact_div (ref->size, BITS_PER_UNIT);
599 : 71809 : poly_int64 count = ref_bytes - head_trim - tail_trim;
600 : :
601 : : /* And the new type for the CONSTRUCTOR. Essentially it's just
602 : : a char array large enough to cover the non-trimmed parts of
603 : : the original CONSTRUCTOR. Note we want explicit bounds here
604 : : so that we know how many bytes to clear when expanding the
605 : : CONSTRUCTOR. */
606 : 71809 : tree type = build_array_type_nelts (char_type_node, count);
607 : :
608 : : /* Build a suitable alias type rather than using alias set zero
609 : : to avoid pessimizing. */
610 : 71809 : tree alias_type = reference_alias_ptr_type (gimple_assign_lhs (stmt));
611 : :
612 : : /* Build a MEM_REF representing the whole accessed area, starting
613 : : at the first byte not trimmed. */
614 : 71809 : tree exp = fold_build2 (MEM_REF, type, lhs_addr,
615 : : build_int_cst (alias_type, head_trim));
616 : :
617 : : /* Now update STMT with a new RHS and LHS. */
618 : 71809 : gimple_assign_set_lhs (stmt, exp);
619 : 71809 : gimple_assign_set_rhs1 (stmt, build_constructor (type, NULL));
620 : : }
621 : : }
622 : :
623 : : /* STMT is a memcpy, memmove or memset. Decrement the number of bytes
624 : : copied/set by DECREMENT. */
625 : : static void
626 : 732 : decrement_count (gimple *stmt, int decrement)
627 : : {
628 : 732 : tree *countp = gimple_call_arg_ptr (stmt, 2);
629 : 732 : gcc_assert (TREE_CODE (*countp) == INTEGER_CST);
630 : 1464 : *countp = wide_int_to_tree (TREE_TYPE (*countp), (TREE_INT_CST_LOW (*countp)
631 : 732 : - decrement));
632 : 732 : }
633 : :
634 : : static void
635 : 664 : increment_start_addr (gimple *stmt, tree *where, int increment)
636 : : {
637 : 664 : if (tree lhs = gimple_call_lhs (stmt))
638 : 6 : if (where == gimple_call_arg_ptr (stmt, 0))
639 : : {
640 : 6 : gassign *newop = gimple_build_assign (lhs, unshare_expr (*where));
641 : 6 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
642 : 6 : gsi_insert_after (&gsi, newop, GSI_SAME_STMT);
643 : 6 : gimple_call_set_lhs (stmt, NULL_TREE);
644 : 6 : update_stmt (stmt);
645 : : }
646 : :
647 : 664 : if (TREE_CODE (*where) == SSA_NAME)
648 : : {
649 : 168 : tree tem = make_ssa_name (TREE_TYPE (*where));
650 : 168 : gassign *newop
651 : 168 : = gimple_build_assign (tem, POINTER_PLUS_EXPR, *where,
652 : 168 : build_int_cst (sizetype, increment));
653 : 168 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
654 : 168 : gsi_insert_before (&gsi, newop, GSI_SAME_STMT);
655 : 168 : *where = tem;
656 : 168 : update_stmt (stmt);
657 : 168 : return;
658 : : }
659 : :
660 : 496 : *where = build_fold_addr_expr (fold_build2 (MEM_REF, char_type_node,
661 : : *where,
662 : : build_int_cst (ptr_type_node,
663 : : increment)));
664 : 496 : STRIP_USELESS_TYPE_CONVERSION (*where);
665 : : }
666 : :
667 : : /* STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
668 : : (ORIG & ~NEW) and need not be stored. Try to rewrite STMT to reduce
669 : : the amount of data it actually writes.
670 : :
671 : : Right now we only support trimming from the head or the tail of the
672 : : memory region. In theory we could split the mem* call, but it's
673 : : likely of marginal value. */
674 : :
675 : : static void
676 : 140130 : maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt)
677 : : {
678 : 140130 : int head_trim, tail_trim;
679 : 140130 : switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
680 : : {
681 : 4612 : case BUILT_IN_STRNCPY:
682 : 4612 : case BUILT_IN_STRNCPY_CHK:
683 : 4612 : compute_trims (ref, live, &head_trim, &tail_trim, stmt);
684 : 4612 : if (head_trim)
685 : : {
686 : : /* Head trimming of strncpy is only possible if we can
687 : : prove all bytes we would trim are non-zero (or we could
688 : : turn the strncpy into memset if there must be zero
689 : : among the head trimmed bytes). If we don't know anything
690 : : about those bytes, the presence or absence of '\0' bytes
691 : : in there will affect whether it acts for the non-trimmed
692 : : bytes as memset or memcpy/strncpy. */
693 : 74 : c_strlen_data lendata = { };
694 : 74 : int orig_head_trim = head_trim;
695 : 74 : tree srcstr = gimple_call_arg (stmt, 1);
696 : 74 : if (!get_range_strlen (srcstr, &lendata, /*eltsize=*/1)
697 : 74 : || !tree_fits_uhwi_p (lendata.minlen))
698 : 8 : head_trim = 0;
699 : 66 : else if (tree_to_uhwi (lendata.minlen) < (unsigned) head_trim)
700 : : {
701 : 60 : head_trim = tree_to_uhwi (lendata.minlen);
702 : 60 : if ((orig_head_trim & (UNITS_PER_WORD - 1)) == 0)
703 : 0 : head_trim &= ~(UNITS_PER_WORD - 1);
704 : : }
705 : 74 : if (orig_head_trim != head_trim
706 : 68 : && dump_file
707 : 82 : && (dump_flags & TDF_DETAILS))
708 : 8 : fprintf (dump_file,
709 : : " Adjusting strncpy trimming to (head = %d,"
710 : : " tail = %d)\n", head_trim, tail_trim);
711 : : }
712 : 4612 : goto do_memcpy;
713 : :
714 : 97831 : case BUILT_IN_MEMCPY:
715 : 97831 : case BUILT_IN_MEMMOVE:
716 : 97831 : case BUILT_IN_MEMCPY_CHK:
717 : 97831 : case BUILT_IN_MEMMOVE_CHK:
718 : 97831 : compute_trims (ref, live, &head_trim, &tail_trim, stmt);
719 : :
720 : 102443 : do_memcpy:
721 : : /* Tail trimming is easy, we can just reduce the count. */
722 : 102443 : if (tail_trim)
723 : 72 : decrement_count (stmt, tail_trim);
724 : :
725 : : /* Head trimming requires adjusting all the arguments. */
726 : 102443 : if (head_trim)
727 : : {
728 : : /* For __*_chk need to adjust also the last argument. */
729 : 110 : if (gimple_call_num_args (stmt) == 4)
730 : : {
731 : 49 : tree size = gimple_call_arg (stmt, 3);
732 : 49 : if (!tree_fits_uhwi_p (size))
733 : : break;
734 : 7 : if (!integer_all_onesp (size))
735 : : {
736 : 7 : unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
737 : 7 : if (sz < (unsigned) head_trim)
738 : : break;
739 : 7 : tree arg = wide_int_to_tree (TREE_TYPE (size),
740 : 7 : sz - head_trim);
741 : 7 : gimple_call_set_arg (stmt, 3, arg);
742 : : }
743 : : }
744 : 68 : tree *dst = gimple_call_arg_ptr (stmt, 0);
745 : 68 : increment_start_addr (stmt, dst, head_trim);
746 : 68 : tree *src = gimple_call_arg_ptr (stmt, 1);
747 : 68 : increment_start_addr (stmt, src, head_trim);
748 : 68 : decrement_count (stmt, head_trim);
749 : : }
750 : : break;
751 : :
752 : 37687 : case BUILT_IN_MEMSET:
753 : 37687 : case BUILT_IN_MEMSET_CHK:
754 : 37687 : compute_trims (ref, live, &head_trim, &tail_trim, stmt);
755 : :
756 : : /* Tail trimming is easy, we can just reduce the count. */
757 : 37687 : if (tail_trim)
758 : 64 : decrement_count (stmt, tail_trim);
759 : :
760 : : /* Head trimming requires adjusting all the arguments. */
761 : 37687 : if (head_trim)
762 : : {
763 : : /* For __*_chk need to adjust also the last argument. */
764 : 528 : if (gimple_call_num_args (stmt) == 4)
765 : : {
766 : 7 : tree size = gimple_call_arg (stmt, 3);
767 : 7 : if (!tree_fits_uhwi_p (size))
768 : : break;
769 : 7 : if (!integer_all_onesp (size))
770 : : {
771 : 7 : unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
772 : 7 : if (sz < (unsigned) head_trim)
773 : : break;
774 : 7 : tree arg = wide_int_to_tree (TREE_TYPE (size),
775 : 7 : sz - head_trim);
776 : 7 : gimple_call_set_arg (stmt, 3, arg);
777 : : }
778 : : }
779 : 528 : tree *dst = gimple_call_arg_ptr (stmt, 0);
780 : 528 : increment_start_addr (stmt, dst, head_trim);
781 : 528 : decrement_count (stmt, head_trim);
782 : : }
783 : : break;
784 : :
785 : : default:
786 : : break;
787 : : }
788 : 140130 : }
789 : :
790 : : /* STMT is a memory write where one or more bytes written are dead stores.
791 : : REF is a representation of the memory written. LIVE is the bitmap of
792 : : stores to REF that are still live.
793 : :
794 : : Attempt to rewrite STMT so that it writes fewer memory locations. Right
795 : : now we only support trimming at the start or end of the memory region.
796 : : It's not clear how much there is to be gained by trimming from the middle
797 : : of the region. */
798 : :
799 : : static void
800 : 26517972 : maybe_trim_partially_dead_store (ao_ref *ref, sbitmap live, gimple *stmt)
801 : : {
802 : 26517972 : if (is_gimple_assign (stmt)
803 : 26517972 : && TREE_CODE (gimple_assign_lhs (stmt)) != TARGET_MEM_REF)
804 : : {
805 : 25217475 : switch (gimple_assign_rhs_code (stmt))
806 : : {
807 : 480489 : case CONSTRUCTOR:
808 : 480489 : maybe_trim_constructor_store (ref, live, stmt, false);
809 : 480489 : break;
810 : 5442 : case COMPLEX_CST:
811 : 5442 : maybe_trim_complex_store (ref, live, stmt);
812 : 5442 : break;
813 : 8929096 : case INTEGER_CST:
814 : 8929096 : if (integer_zerop (gimple_assign_rhs1 (stmt))
815 : 8929096 : && type_has_mode_precision_p (TREE_TYPE (gimple_assign_lhs (stmt))))
816 : 2911918 : maybe_trim_constructor_store (ref, live, stmt, true);
817 : : break;
818 : : default:
819 : : break;
820 : : }
821 : : }
822 : 26517972 : }
823 : :
824 : : /* Return TRUE if USE_REF reads bytes from LIVE where live is
825 : : derived from REF, a write reference.
826 : :
827 : : While this routine may modify USE_REF, it's passed by value, not
828 : : location. So callers do not see those modifications. */
829 : :
830 : : static bool
831 : 3774987 : live_bytes_read (ao_ref *use_ref, ao_ref *ref, sbitmap live)
832 : : {
833 : : /* We have already verified that USE_REF and REF hit the same object.
834 : : Now verify that there's actually an overlap between USE_REF and REF. */
835 : 3774987 : HOST_WIDE_INT start, size;
836 : 3774987 : if (get_byte_range (use_ref, ref, false, &start, &size))
837 : : {
838 : : /* If USE_REF covers all of REF, then it will hit one or more
839 : : live bytes. This avoids useless iteration over the bitmap
840 : : below. */
841 : 3774987 : if (start == 0 && known_eq (size * 8, ref->size))
842 : : return true;
843 : :
844 : : /* Now check if any of the remaining bits in use_ref are set in LIVE. */
845 : 1042609 : return bitmap_any_bit_in_range_p (live, start, (start + size - 1));
846 : : }
847 : : return true;
848 : : }
849 : :
850 : : /* Callback for dse_classify_store calling for_each_index. Verify that
851 : : indices are invariant in the loop with backedge PHI in basic-block DATA. */
852 : :
853 : : static bool
854 : 2720026 : check_name (tree, tree *idx, void *data)
855 : : {
856 : 2720026 : basic_block phi_bb = (basic_block) data;
857 : 2720026 : if (TREE_CODE (*idx) == SSA_NAME
858 : 1742392 : && !SSA_NAME_IS_DEFAULT_DEF (*idx)
859 : 4342365 : && dominated_by_p (CDI_DOMINATORS, gimple_bb (SSA_NAME_DEF_STMT (*idx)),
860 : : phi_bb))
861 : : return false;
862 : : return true;
863 : : }
864 : :
865 : : /* STMT stores the value 0 into one or more memory locations
866 : : (via memset, empty constructor, calloc call, etc).
867 : :
868 : : See if there is a subsequent store of the value 0 to one
869 : : or more of the same memory location(s). If so, the subsequent
870 : : store is redundant and can be removed.
871 : :
872 : : The subsequent stores could be via memset, empty constructors,
873 : : simple MEM stores, etc. */
874 : :
875 : : static void
876 : 4356089 : dse_optimize_redundant_stores (gimple *stmt)
877 : : {
878 : 4356089 : int cnt = 0;
879 : :
880 : : /* TBAA state of STMT, if it is a call it is effectively alias-set zero. */
881 : 4356089 : alias_set_type earlier_set = 0;
882 : 4356089 : alias_set_type earlier_base_set = 0;
883 : 4356089 : if (is_gimple_assign (stmt))
884 : : {
885 : 4298291 : ao_ref lhs_ref;
886 : 4298291 : ao_ref_init (&lhs_ref, gimple_assign_lhs (stmt));
887 : 4298291 : earlier_set = ao_ref_alias_set (&lhs_ref);
888 : 4298291 : earlier_base_set = ao_ref_base_alias_set (&lhs_ref);
889 : : }
890 : :
891 : : /* We could do something fairly complex and look through PHIs
892 : : like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
893 : : the effort.
894 : :
895 : : Look at all the immediate uses of the VDEF (which are obviously
896 : : dominated by STMT). See if one or more stores 0 into the same
897 : : memory locations a STMT, if so remove the immediate use statements. */
898 : 4356089 : tree defvar = gimple_vdef (stmt);
899 : 4356089 : imm_use_iterator ui;
900 : 4356089 : gimple *use_stmt;
901 : 9784727 : FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
902 : : {
903 : : /* Limit stmt walking. */
904 : 5449703 : if (++cnt > param_dse_max_alias_queries_per_store)
905 : : break;
906 : :
907 : : /* If USE_STMT stores 0 into one or more of the same locations
908 : : as STMT and STMT would kill USE_STMT, then we can just remove
909 : : USE_STMT. */
910 : 5449703 : tree fndecl;
911 : 5449703 : if ((is_gimple_assign (use_stmt)
912 : 3804392 : && gimple_vdef (use_stmt)
913 : 3093344 : && (gimple_assign_single_p (use_stmt)
914 : 3093344 : && initializer_zerop (gimple_assign_rhs1 (use_stmt))))
915 : 7766189 : || (gimple_call_builtin_p (use_stmt, BUILT_IN_NORMAL)
916 : 158499 : && (fndecl = gimple_call_fndecl (use_stmt)) != NULL
917 : 158499 : && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
918 : 136878 : || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
919 : 21712 : && integer_zerop (gimple_call_arg (use_stmt, 1))))
920 : : {
921 : 1506257 : ao_ref write;
922 : :
923 : 1506257 : if (!initialize_ao_ref_for_dse (use_stmt, &write))
924 : : break;
925 : :
926 : 1485192 : if (valid_ao_ref_for_dse (&write)
927 : 1485192 : && stmt_kills_ref_p (stmt, &write))
928 : : {
929 : 4946 : gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
930 : 4946 : if (is_gimple_assign (use_stmt))
931 : : {
932 : 4897 : ao_ref lhs_ref;
933 : 4897 : ao_ref_init (&lhs_ref, gimple_assign_lhs (use_stmt));
934 : 4897 : if ((earlier_set == ao_ref_alias_set (&lhs_ref)
935 : 512 : || alias_set_subset_of (ao_ref_alias_set (&lhs_ref),
936 : : earlier_set))
937 : 5381 : && (earlier_base_set == ao_ref_base_alias_set (&lhs_ref)
938 : 816 : || alias_set_subset_of
939 : 816 : (ao_ref_base_alias_set (&lhs_ref),
940 : : earlier_base_set)))
941 : 4767 : delete_dead_or_redundant_assignment (&gsi, "redundant",
942 : : need_eh_cleanup,
943 : : need_ab_cleanup);
944 : : }
945 : 49 : else if (is_gimple_call (use_stmt))
946 : : {
947 : 49 : if ((earlier_set == 0
948 : 5 : || alias_set_subset_of (0, earlier_set))
949 : 49 : && (earlier_base_set == 0
950 : 0 : || alias_set_subset_of (0, earlier_base_set)))
951 : 44 : delete_dead_or_redundant_call (&gsi, "redundant");
952 : : }
953 : : else
954 : 0 : gcc_unreachable ();
955 : : }
956 : : }
957 : 4356089 : }
958 : 4356089 : }
959 : :
960 : : /* Return whether PHI contains ARG as an argument. */
961 : :
962 : : static bool
963 : 4125102 : contains_phi_arg (gphi *phi, tree arg)
964 : : {
965 : 29964266 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
966 : 26085823 : if (gimple_phi_arg_def (phi, i) == arg)
967 : : return true;
968 : : return false;
969 : : }
970 : :
971 : : /* Hash map of the memory use in a GIMPLE assignment to its
972 : : data reference. If NULL data-ref analysis isn't used. */
973 : : static hash_map<gimple *, data_reference_p> *dse_stmt_to_dr_map;
974 : :
975 : : /* A helper of dse_optimize_stmt.
976 : : Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
977 : : according to downstream uses and defs. Sets *BY_CLOBBER_P to true
978 : : if only clobber statements influenced the classification result.
979 : : Returns the classification. */
980 : :
981 : : static dse_store_status
982 : 43681348 : dse_classify_store (ao_ref *ref, gimple *stmt,
983 : : bool byte_tracking_enabled, sbitmap live_bytes,
984 : : bool *by_clobber_p, tree stop_at_vuse, int &cnt,
985 : : bitmap visited)
986 : : {
987 : 43681348 : gimple *temp;
988 : 43681348 : std::unique_ptr<data_reference, void(*)(data_reference_p)>
989 : 43681348 : dra (nullptr, free_data_ref);
990 : :
991 : 43681348 : if (by_clobber_p)
992 : 43147094 : *by_clobber_p = true;
993 : :
994 : : /* Find the first dominated statement that clobbers (part of) the
995 : : memory stmt stores to with no intermediate statement that may use
996 : : part of the memory stmt stores. That is, find a store that may
997 : : prove stmt to be a dead store. */
998 : : temp = stmt;
999 : 541587076 : do
1000 : : {
1001 : 292634212 : gimple *use_stmt;
1002 : 292634212 : imm_use_iterator ui;
1003 : 292634212 : bool fail = false;
1004 : 292634212 : tree defvar;
1005 : :
1006 : 292634212 : if (gimple_code (temp) == GIMPLE_PHI)
1007 : : {
1008 : 19879136 : defvar = PHI_RESULT (temp);
1009 : 19879136 : bitmap_set_bit (visited, SSA_NAME_VERSION (defvar));
1010 : : }
1011 : : else
1012 : 545510152 : defvar = gimple_vdef (temp);
1013 : :
1014 : 292634212 : auto_vec<gimple *, 10> defs;
1015 : 292634212 : gphi *first_phi_def = NULL;
1016 : 292634212 : gphi *last_phi_def = NULL;
1017 : :
1018 : 292634212 : auto_vec<tree, 10> worklist;
1019 : 292634212 : worklist.quick_push (defvar);
1020 : :
1021 : 296807749 : do
1022 : : {
1023 : 296807749 : defvar = worklist.pop ();
1024 : : /* If we're instructed to stop walking at region boundary, do so. */
1025 : 296807749 : if (defvar == stop_at_vuse)
1026 : : return DSE_STORE_LIVE;
1027 : :
1028 : 296788789 : use_operand_p usep;
1029 : 634126429 : FOR_EACH_IMM_USE_FAST (usep, ui, defvar)
1030 : : {
1031 : 367336704 : use_stmt = USE_STMT (usep);
1032 : :
1033 : : /* Limit stmt walking. */
1034 : 367336704 : if (++cnt > param_dse_max_alias_queries_per_store)
1035 : : {
1036 : : fail = true;
1037 : : break;
1038 : : }
1039 : :
1040 : : /* In simple cases we can look through PHI nodes, but we
1041 : : have to be careful with loops and with memory references
1042 : : containing operands that are also operands of PHI nodes.
1043 : : See gcc.c-torture/execute/20051110-*.c. */
1044 : 367070485 : if (gphi *phi = dyn_cast <gphi *> (use_stmt))
1045 : : {
1046 : : /* Look through single-argument PHIs. */
1047 : 40855046 : if (gimple_phi_num_args (phi) == 1)
1048 : 4941486 : worklist.safe_push (gimple_phi_result (phi));
1049 : : else
1050 : : {
1051 : : /* If we visit this PHI by following a backedge then we
1052 : : have to make sure ref->ref only refers to SSA names
1053 : : that are invariant with respect to the loop
1054 : : represented by this PHI node. We handle irreducible
1055 : : regions by relying on backedge marking and identifying
1056 : : the head of the (sub-)region. */
1057 : 35913560 : edge e = gimple_phi_arg_edge
1058 : 35913560 : (phi, PHI_ARG_INDEX_FROM_USE (usep));
1059 : 35913560 : if (e->flags & EDGE_DFS_BACK)
1060 : : {
1061 : 3296057 : basic_block rgn_head
1062 : 3296057 : = nearest_common_dominator (CDI_DOMINATORS,
1063 : : gimple_bb (phi),
1064 : : e->src);
1065 : 3296057 : if (!for_each_index (ref->ref
1066 : : ? &ref->ref : &ref->base,
1067 : : check_name, rgn_head))
1068 : : return DSE_STORE_LIVE;
1069 : : }
1070 : : /* If we already visited this PHI ignore it for further
1071 : : processing. But note we have to check each incoming
1072 : : edge above. */
1073 : 68914174 : if (!bitmap_bit_p (visited,
1074 : 34457087 : SSA_NAME_VERSION (PHI_RESULT (phi))))
1075 : : {
1076 : 25524635 : defs.safe_push (phi);
1077 : 25524635 : if (!first_phi_def)
1078 : 21470816 : first_phi_def = phi;;
1079 : : last_phi_def = phi;
1080 : : }
1081 : : }
1082 : : }
1083 : : /* If the statement is a use the store is not dead. */
1084 : 326215439 : else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
1085 : : {
1086 : 28291903 : if (dse_stmt_to_dr_map
1087 : 6239083 : && ref->ref
1088 : 34430726 : && is_gimple_assign (use_stmt))
1089 : : {
1090 : 1228429 : if (!dra)
1091 : 1224064 : dra.reset (create_data_ref (NULL, NULL, ref->ref, stmt,
1092 : : false, false));
1093 : 1228429 : bool existed_p;
1094 : 1228429 : data_reference_p &drb
1095 : 1228429 : = dse_stmt_to_dr_map->get_or_insert (use_stmt,
1096 : : &existed_p);
1097 : 1228429 : if (!existed_p)
1098 : 747876 : drb = create_data_ref (NULL, NULL,
1099 : : gimple_assign_rhs1 (use_stmt),
1100 : : use_stmt, false, false);
1101 : 1228429 : if (!dr_may_alias_p (dra.get (), drb, NULL))
1102 : : {
1103 : 15926 : if (gimple_vdef (use_stmt))
1104 : 21 : defs.safe_push (use_stmt);
1105 : 7963 : continue;
1106 : : }
1107 : : }
1108 : :
1109 : : /* Handle common cases where we can easily build an ao_ref
1110 : : structure for USE_STMT and in doing so we find that the
1111 : : references hit non-live bytes and thus can be ignored.
1112 : :
1113 : : TODO: We can also use modref summary to handle calls. */
1114 : 28283940 : if (byte_tracking_enabled
1115 : 28283940 : && is_gimple_assign (use_stmt))
1116 : : {
1117 : 5166469 : ao_ref use_ref;
1118 : 5166469 : ao_ref_init (&use_ref, gimple_assign_rhs1 (use_stmt));
1119 : 5166469 : if (valid_ao_ref_for_dse (&use_ref)
1120 : 5148873 : && operand_equal_p (use_ref.base, ref->base,
1121 : : OEP_ADDRESS_OF)
1122 : 8941456 : && !live_bytes_read (&use_ref, ref, live_bytes))
1123 : : {
1124 : : /* If this is a store, remember it as we possibly
1125 : : need to walk the defs uses. */
1126 : 15136 : if (gimple_vdef (use_stmt))
1127 : 991 : defs.safe_push (use_stmt);
1128 : 7568 : continue;
1129 : : }
1130 : : }
1131 : :
1132 : : fail = true;
1133 : : break;
1134 : : }
1135 : : /* We have visited ourselves already so ignore STMT for the
1136 : : purpose of chaining. */
1137 : 297923536 : else if (use_stmt == stmt)
1138 : : ;
1139 : : /* If this is a store, remember it as we possibly need to walk the
1140 : : defs uses. */
1141 : 933011356 : else if (gimple_vdef (use_stmt))
1142 : 256575764 : defs.safe_push (use_stmt);
1143 : : }
1144 : : }
1145 : 562122041 : while (!fail && !worklist.is_empty ());
1146 : :
1147 : 291158779 : if (fail)
1148 : : {
1149 : : /* STMT might be partially dead and we may be able to reduce
1150 : : how many memory locations it stores into. */
1151 : 28542591 : if (byte_tracking_enabled && !gimple_clobber_p (stmt))
1152 : 25240441 : return DSE_STORE_MAYBE_PARTIAL_DEAD;
1153 : : return DSE_STORE_LIVE;
1154 : : }
1155 : :
1156 : : /* If we didn't find any definition this means the store is dead
1157 : : if it isn't a store to global reachable memory. In this case
1158 : : just pretend the stmt makes itself dead. Otherwise fail. */
1159 : 262616188 : if (defs.is_empty ())
1160 : : {
1161 : 2634862 : if (ref_may_alias_global_p (ref, false))
1162 : : {
1163 : 40658 : basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (defvar));
1164 : : /* Assume that BUILT_IN_UNREACHABLE and BUILT_IN_UNREACHABLE_TRAP
1165 : : do not need to keep (global) memory side-effects live.
1166 : : We do not have virtual operands on BUILT_IN_UNREACHABLE
1167 : : but we can do poor mans reachability when the last
1168 : : definition we want to elide is in the block that ends
1169 : : in such a call. */
1170 : 40658 : if (EDGE_COUNT (def_bb->succs) == 0)
1171 : 56004 : if (gcall *last = dyn_cast <gcall *> (*gsi_last_bb (def_bb)))
1172 : 694 : if (gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE)
1173 : 694 : || gimple_call_builtin_p (last,
1174 : : BUILT_IN_UNREACHABLE_TRAP))
1175 : : {
1176 : 507 : if (by_clobber_p)
1177 : 507 : *by_clobber_p = false;
1178 : 507 : return DSE_STORE_DEAD;
1179 : : }
1180 : 40151 : return DSE_STORE_LIVE;
1181 : : }
1182 : :
1183 : 2594204 : if (by_clobber_p)
1184 : 2590524 : *by_clobber_p = false;
1185 : 2594204 : return DSE_STORE_DEAD;
1186 : : }
1187 : :
1188 : : /* Process defs and remove those we need not process further. */
1189 : 536826458 : for (unsigned i = 0; i < defs.length ();)
1190 : : {
1191 : 276910563 : gimple *def = defs[i];
1192 : 276910563 : gimple *use_stmt;
1193 : 276910563 : use_operand_p use_p;
1194 : 276910563 : tree vdef = (gimple_code (def) == GIMPLE_PHI
1195 : 300363259 : ? gimple_phi_result (def) : gimple_vdef (def));
1196 : 276910563 : gphi *phi_def;
1197 : : /* If the path to check starts with a kill we do not need to
1198 : : process it further.
1199 : : ??? With byte tracking we need only kill the bytes currently
1200 : : live. */
1201 : 276910563 : if (stmt_kills_ref_p (def, ref))
1202 : : {
1203 : 3366826 : if (by_clobber_p && !gimple_clobber_p (def))
1204 : 1083219 : *by_clobber_p = false;
1205 : 3366826 : defs.unordered_remove (i);
1206 : : }
1207 : : /* If the path ends here we do not need to process it further.
1208 : : This for example happens with calls to noreturn functions. */
1209 : 273543737 : else if (has_zero_uses (vdef))
1210 : : {
1211 : : /* But if the store is to global memory it is definitely
1212 : : not dead. */
1213 : 2781827 : if (ref_may_alias_global_p (ref, false))
1214 : 65431 : return DSE_STORE_LIVE;
1215 : 2716396 : defs.unordered_remove (i);
1216 : : }
1217 : : /* In addition to kills we can remove defs whose only use
1218 : : is another def in defs. That can only ever be PHIs of which
1219 : : we track two for simplicity reasons, the first and last in
1220 : : {first,last}_phi_def (we fail for multiple PHIs anyways).
1221 : : We can also ignore defs that feed only into
1222 : : already visited PHIs. */
1223 : 270761910 : else if (single_imm_use (vdef, &use_p, &use_stmt)
1224 : 270761910 : && (use_stmt == first_phi_def
1225 : 234985007 : || use_stmt == last_phi_def
1226 : 234874955 : || (gimple_code (use_stmt) == GIMPLE_PHI
1227 : 15201315 : && bitmap_bit_p (visited,
1228 : 15201315 : SSA_NAME_VERSION
1229 : : (PHI_RESULT (use_stmt))))))
1230 : : {
1231 : 6005273 : defs.unordered_remove (i);
1232 : 6005273 : if (def == first_phi_def)
1233 : : first_phi_def = NULL;
1234 : 5356825 : else if (def == last_phi_def)
1235 : 246896 : last_phi_def = NULL;
1236 : : }
1237 : : /* If def is a PHI and one of its arguments is another PHI node still
1238 : : in consideration we can defer processing it. */
1239 : 264756637 : else if ((phi_def = dyn_cast <gphi *> (def))
1240 : 22565625 : && ((last_phi_def
1241 : 22565625 : && phi_def != last_phi_def
1242 : 2189908 : && contains_phi_arg (phi_def,
1243 : : gimple_phi_result (last_phi_def)))
1244 : 22436576 : || (first_phi_def
1245 : 22436576 : && phi_def != first_phi_def
1246 : 1935194 : && contains_phi_arg
1247 : 1935194 : (phi_def, gimple_phi_result (first_phi_def)))))
1248 : : {
1249 : 246659 : defs.unordered_remove (i);
1250 : 246659 : if (phi_def == first_phi_def)
1251 : : first_phi_def = NULL;
1252 : 171792 : else if (phi_def == last_phi_def)
1253 : 246896 : last_phi_def = NULL;
1254 : : }
1255 : : else
1256 : 264509978 : ++i;
1257 : : }
1258 : :
1259 : : /* If all defs kill the ref we are done. */
1260 : 303597243 : if (defs.is_empty ())
1261 : : return DSE_STORE_DEAD;
1262 : : /* If more than one def survives we have to analyze multiple
1263 : : paths. We can handle this by recursing, sharing 'visited'
1264 : : to avoid redundant work and limiting it by shared 'cnt'.
1265 : : For now do not bother with byte-tracking in this case. */
1266 : 259522182 : while (defs.length () > 1)
1267 : : {
1268 : 10126761 : if (dse_classify_store (ref, defs.last (), false, NULL,
1269 : : by_clobber_p, stop_at_vuse, cnt,
1270 : : visited) != DSE_STORE_DEAD)
1271 : : break;
1272 : 6414173 : byte_tracking_enabled = false;
1273 : 6414173 : defs.pop ();
1274 : : }
1275 : : /* If more than one def survives fail. */
1276 : 253108009 : if (defs.length () > 1)
1277 : : {
1278 : : /* STMT might be partially dead and we may be able to reduce
1279 : : how many memory locations it stores into. */
1280 : 3712588 : if (byte_tracking_enabled && !gimple_clobber_p (stmt))
1281 : 1489753 : return DSE_STORE_MAYBE_PARTIAL_DEAD;
1282 : : return DSE_STORE_LIVE;
1283 : : }
1284 : 249395421 : temp = defs[0];
1285 : :
1286 : : /* Track partial kills. */
1287 : 249395421 : if (byte_tracking_enabled)
1288 : : {
1289 : 190443363 : clear_bytes_written_by (live_bytes, temp, ref);
1290 : 190443363 : if (bitmap_empty_p (live_bytes))
1291 : : {
1292 : 442557 : if (by_clobber_p && !gimple_clobber_p (temp))
1293 : 423548 : *by_clobber_p = false;
1294 : 442557 : return DSE_STORE_DEAD;
1295 : : }
1296 : : }
1297 : 292634212 : }
1298 : : /* Continue walking until there are no more live bytes. */
1299 : : while (1);
1300 : 43681348 : }
1301 : :
1302 : : dse_store_status
1303 : 33554587 : dse_classify_store (ao_ref *ref, gimple *stmt,
1304 : : bool byte_tracking_enabled, sbitmap live_bytes,
1305 : : bool *by_clobber_p, tree stop_at_vuse)
1306 : : {
1307 : 33554587 : int cnt = 0;
1308 : 33554587 : auto_bitmap visited;
1309 : 33554587 : return dse_classify_store (ref, stmt, byte_tracking_enabled, live_bytes,
1310 : 33554587 : by_clobber_p, stop_at_vuse, cnt, visited);
1311 : 33554587 : }
1312 : :
1313 : :
1314 : : /* Delete a dead call at GSI, which is mem* call of some kind. */
1315 : : static void
1316 : 6482 : delete_dead_or_redundant_call (gimple_stmt_iterator *gsi, const char *type)
1317 : : {
1318 : 6482 : gimple *stmt = gsi_stmt (*gsi);
1319 : 6482 : if (dump_file && (dump_flags & TDF_DETAILS))
1320 : : {
1321 : 18 : fprintf (dump_file, " Deleted %s call: ", type);
1322 : 18 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1323 : 18 : fprintf (dump_file, "\n");
1324 : : }
1325 : :
1326 : 6482 : basic_block bb = gimple_bb (stmt);
1327 : 6482 : tree lhs = gimple_call_lhs (stmt);
1328 : 6482 : if (lhs)
1329 : : {
1330 : 1217 : tree ptr = gimple_call_arg (stmt, 0);
1331 : 1217 : gimple *new_stmt = gimple_build_assign (lhs, ptr);
1332 : 1217 : unlink_stmt_vdef (stmt);
1333 : 1217 : if (gsi_replace (gsi, new_stmt, true))
1334 : 390 : bitmap_set_bit (need_eh_cleanup, bb->index);
1335 : : }
1336 : : else
1337 : : {
1338 : : /* Then we need to fix the operand of the consuming stmt. */
1339 : 5265 : unlink_stmt_vdef (stmt);
1340 : :
1341 : : /* Remove the dead store. */
1342 : 5265 : if (gsi_remove (gsi, true))
1343 : 0 : bitmap_set_bit (need_eh_cleanup, bb->index);
1344 : 5265 : release_defs (stmt);
1345 : : }
1346 : 6482 : }
1347 : :
1348 : : /* Delete a dead store at GSI, which is a gimple assignment. */
1349 : :
1350 : : void
1351 : 2378324 : delete_dead_or_redundant_assignment (gimple_stmt_iterator *gsi,
1352 : : const char *type,
1353 : : bitmap need_eh_cleanup,
1354 : : bitmap need_ab_cleanup)
1355 : : {
1356 : 2378324 : gimple *stmt = gsi_stmt (*gsi);
1357 : 2378324 : if (dump_file && (dump_flags & TDF_DETAILS))
1358 : : {
1359 : 112 : fprintf (dump_file, " Deleted %s store: ", type);
1360 : 112 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1361 : 112 : fprintf (dump_file, "\n");
1362 : : }
1363 : :
1364 : : /* Then we need to fix the operand of the consuming stmt. */
1365 : 2378324 : unlink_stmt_vdef (stmt);
1366 : :
1367 : : /* Remove the dead store. */
1368 : 2378324 : basic_block bb = gimple_bb (stmt);
1369 : 2378324 : if (need_ab_cleanup && stmt_can_make_abnormal_goto (stmt))
1370 : 4 : bitmap_set_bit (need_ab_cleanup, bb->index);
1371 : 2378324 : if (gsi_remove (gsi, true) && need_eh_cleanup)
1372 : 198 : bitmap_set_bit (need_eh_cleanup, bb->index);
1373 : :
1374 : : /* And release any SSA_NAMEs set in this statement back to the
1375 : : SSA_NAME manager. */
1376 : 2378324 : release_defs (stmt);
1377 : 2378324 : }
1378 : :
1379 : : /* Try to prove, using modref summary, that all memory written to by a call is
1380 : : dead and remove it. Assume that if return value is written to memory
1381 : : it is already proved to be dead. */
1382 : :
1383 : : static bool
1384 : 17386118 : dse_optimize_call (gimple_stmt_iterator *gsi, sbitmap live_bytes)
1385 : : {
1386 : 34590524 : gcall *stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1387 : :
1388 : 17205737 : if (!stmt)
1389 : : return false;
1390 : :
1391 : 17205737 : tree callee = gimple_call_fndecl (stmt);
1392 : :
1393 : 17205737 : if (!callee)
1394 : : return false;
1395 : :
1396 : : /* Pure/const functions are optimized by normal DCE
1397 : : or handled as store above. */
1398 : 16475753 : int flags = gimple_call_flags (stmt);
1399 : 16475753 : if ((flags & (ECF_PURE|ECF_CONST|ECF_NOVOPS))
1400 : 128 : && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
1401 : : return false;
1402 : :
1403 : 16475751 : cgraph_node *node = cgraph_node::get (callee);
1404 : 16475751 : if (!node)
1405 : : return false;
1406 : :
1407 : 16465601 : if ((stmt_could_throw_p (cfun, stmt)
1408 : 7288999 : && !cfun->can_delete_dead_exceptions)
1409 : 19016545 : || ((gimple_call_flags (stmt) & ECF_NORETURN)
1410 : 2105736 : && gimple_call_ctrl_altering_p (stmt)))
1411 : 6843784 : return false;
1412 : :
1413 : : /* If return value is used the call is not dead. */
1414 : 9621817 : tree lhs = gimple_call_lhs (stmt);
1415 : 9621817 : if (lhs && TREE_CODE (lhs) == SSA_NAME)
1416 : : {
1417 : 2357315 : imm_use_iterator ui;
1418 : 2357315 : gimple *use_stmt;
1419 : 2557632 : FOR_EACH_IMM_USE_STMT (use_stmt, ui, lhs)
1420 : 2491473 : if (!is_gimple_debug (use_stmt))
1421 : 2357315 : return false;
1422 : : }
1423 : :
1424 : : /* Verify that there are no side-effects except for return value
1425 : : and memory writes tracked by modref. */
1426 : 7330661 : modref_summary *summary = get_modref_function_summary (node);
1427 : 7330661 : if (!summary || !summary->try_dse)
1428 : : return false;
1429 : :
1430 : 75675 : bool by_clobber_p = false;
1431 : :
1432 : : /* Walk all memory writes and verify that they are dead. */
1433 : 228823 : for (auto base_node : summary->stores->bases)
1434 : 232370 : for (auto ref_node : base_node->refs)
1435 : 238521 : for (auto access_node : ref_node->accesses)
1436 : : {
1437 : 78451 : tree arg = access_node.get_call_arg (stmt);
1438 : :
1439 : 78451 : if (!arg || !POINTER_TYPE_P (TREE_TYPE (arg)))
1440 : 74344 : return false;
1441 : :
1442 : 78450 : if (integer_zerop (arg)
1443 : 78461 : && !targetm.addr_space.zero_address_valid
1444 : 11 : (TYPE_ADDR_SPACE (TREE_TYPE (arg))))
1445 : 11 : continue;
1446 : :
1447 : 78439 : ao_ref ref;
1448 : :
1449 : 78439 : if (!access_node.get_ao_ref (stmt, &ref))
1450 : : return false;
1451 : 78439 : ref.ref_alias_set = ref_node->ref;
1452 : 78439 : ref.base_alias_set = base_node->base;
1453 : :
1454 : 78439 : bool byte_tracking_enabled
1455 : 78439 : = setup_live_bytes_from_ref (&ref, live_bytes);
1456 : 78439 : enum dse_store_status store_status;
1457 : :
1458 : 78439 : store_status = dse_classify_store (&ref, stmt,
1459 : : byte_tracking_enabled,
1460 : : live_bytes, &by_clobber_p);
1461 : 78439 : if (store_status != DSE_STORE_DEAD)
1462 : : return false;
1463 : : }
1464 : 1331 : delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup,
1465 : : need_ab_cleanup);
1466 : 1331 : return true;
1467 : : }
1468 : :
1469 : : /* Attempt to eliminate dead stores in the statement referenced by BSI.
1470 : :
1471 : : A dead store is a store into a memory location which will later be
1472 : : overwritten by another store without any intervening loads. In this
1473 : : case the earlier store can be deleted.
1474 : :
1475 : : In our SSA + virtual operand world we use immediate uses of virtual
1476 : : operands to detect dead stores. If a store's virtual definition
1477 : : is used precisely once by a later store to the same location which
1478 : : post dominates the first store, then the first store is dead. */
1479 : :
1480 : : static void
1481 : 57302445 : dse_optimize_stmt (function *fun, gimple_stmt_iterator *gsi, sbitmap live_bytes)
1482 : : {
1483 : 57302445 : gimple *stmt = gsi_stmt (*gsi);
1484 : :
1485 : : /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
1486 : 57302445 : if (gimple_has_volatile_ops (stmt)
1487 : 57302445 : && (!gimple_clobber_p (stmt)
1488 : 8011092 : || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
1489 : 54852562 : return;
1490 : :
1491 : 50808070 : ao_ref ref;
1492 : : /* If this is not a store we can still remove dead call using
1493 : : modref summary. Note we specifically allow ref to be initialized
1494 : : to a conservative may-def since we are looking for followup stores
1495 : : to kill all of it. */
1496 : 50808070 : if (!initialize_ao_ref_for_dse (stmt, &ref, true))
1497 : : {
1498 : 17348785 : dse_optimize_call (gsi, live_bytes);
1499 : 17348785 : return;
1500 : : }
1501 : :
1502 : : /* We know we have virtual definitions. We can handle assignments and
1503 : : some builtin calls. */
1504 : 33459285 : if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1505 : : {
1506 : 432739 : tree fndecl = gimple_call_fndecl (stmt);
1507 : 432739 : switch (DECL_FUNCTION_CODE (fndecl))
1508 : : {
1509 : 431135 : case BUILT_IN_MEMCPY:
1510 : 431135 : case BUILT_IN_MEMMOVE:
1511 : 431135 : case BUILT_IN_STRNCPY:
1512 : 431135 : case BUILT_IN_MEMSET:
1513 : 431135 : case BUILT_IN_MEMCPY_CHK:
1514 : 431135 : case BUILT_IN_MEMMOVE_CHK:
1515 : 431135 : case BUILT_IN_STRNCPY_CHK:
1516 : 431135 : case BUILT_IN_MEMSET_CHK:
1517 : 431135 : {
1518 : : /* Occasionally calls with an explicit length of zero
1519 : : show up in the IL. It's pointless to do analysis
1520 : : on them, they're trivially dead. */
1521 : 431135 : tree size = gimple_call_arg (stmt, 2);
1522 : 431135 : if (integer_zerop (size))
1523 : : {
1524 : 50 : delete_dead_or_redundant_call (gsi, "dead");
1525 : 50 : return;
1526 : : }
1527 : :
1528 : : /* If this is a memset call that initializes an object
1529 : : to zero, it may be redundant with an earlier memset
1530 : : or empty CONSTRUCTOR of a larger object. */
1531 : 431085 : if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1532 : 332724 : || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
1533 : 431383 : && integer_zerop (gimple_call_arg (stmt, 1)))
1534 : 56194 : dse_optimize_redundant_stores (stmt);
1535 : :
1536 : 431085 : enum dse_store_status store_status;
1537 : 431085 : bool byte_tracking_enabled
1538 : 431085 : = setup_live_bytes_from_ref (&ref, live_bytes);
1539 : 431085 : store_status = dse_classify_store (&ref, stmt,
1540 : : byte_tracking_enabled,
1541 : : live_bytes);
1542 : 431085 : if (store_status == DSE_STORE_LIVE)
1543 : : return;
1544 : :
1545 : 146518 : if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1546 : : {
1547 : 140130 : maybe_trim_memstar_call (&ref, live_bytes, stmt);
1548 : 140130 : return;
1549 : : }
1550 : :
1551 : 6388 : if (store_status == DSE_STORE_DEAD)
1552 : 6388 : delete_dead_or_redundant_call (gsi, "dead");
1553 : 6388 : return;
1554 : : }
1555 : :
1556 : 1604 : case BUILT_IN_CALLOC:
1557 : : /* We already know the arguments are integer constants. */
1558 : 1604 : dse_optimize_redundant_stores (stmt);
1559 : 1604 : return;
1560 : :
1561 : : default:
1562 : : return;
1563 : : }
1564 : : }
1565 : 33026546 : else if (is_gimple_call (stmt)
1566 : 33026546 : && gimple_call_internal_p (stmt))
1567 : : {
1568 : 2483 : switch (gimple_call_internal_fn (stmt))
1569 : : {
1570 : 1220 : case IFN_LEN_STORE:
1571 : 1220 : case IFN_MASK_STORE:
1572 : 1220 : case IFN_MASK_LEN_STORE:
1573 : 1220 : {
1574 : 1220 : enum dse_store_status store_status;
1575 : 1220 : store_status = dse_classify_store (&ref, stmt, false, live_bytes);
1576 : 1220 : if (store_status == DSE_STORE_DEAD)
1577 : 0 : delete_dead_or_redundant_call (gsi, "dead");
1578 : 1220 : return;
1579 : : }
1580 : : default:;
1581 : : }
1582 : : }
1583 : :
1584 : 33025326 : bool by_clobber_p = false;
1585 : :
1586 : : /* Check if this statement stores zero to a memory location,
1587 : : and if there is a subsequent store of zero to the same
1588 : : memory location. If so, remove the subsequent store. */
1589 : 33025326 : if (gimple_assign_single_p (stmt)
1590 : 33025326 : && initializer_zerop (gimple_assign_rhs1 (stmt)))
1591 : 4298291 : dse_optimize_redundant_stores (stmt);
1592 : :
1593 : : /* Self-assignments are zombies. */
1594 : 33025326 : if (is_gimple_assign (stmt)
1595 : 64681320 : && operand_equal_p (gimple_assign_rhs1 (stmt),
1596 : 31655994 : gimple_assign_lhs (stmt), 0))
1597 : : ;
1598 : : else
1599 : : {
1600 : 33023916 : bool byte_tracking_enabled
1601 : 33023916 : = setup_live_bytes_from_ref (&ref, live_bytes);
1602 : 33023916 : enum dse_store_status store_status;
1603 : 33023916 : store_status = dse_classify_store (&ref, stmt,
1604 : : byte_tracking_enabled,
1605 : : live_bytes, &by_clobber_p);
1606 : 33023916 : if (store_status == DSE_STORE_LIVE)
1607 : : return;
1608 : :
1609 : 29938127 : if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1610 : : {
1611 : 26517972 : maybe_trim_partially_dead_store (&ref, live_bytes, stmt);
1612 : 26517972 : return;
1613 : : }
1614 : : }
1615 : :
1616 : : /* Now we know that use_stmt kills the LHS of stmt. */
1617 : :
1618 : : /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
1619 : : another clobber stmt. */
1620 : 3421565 : if (gimple_clobber_p (stmt)
1621 : 3421565 : && !by_clobber_p)
1622 : : return;
1623 : :
1624 : 2457322 : if (is_gimple_call (stmt)
1625 : 2457322 : && (gimple_has_side_effects (stmt)
1626 : 515 : || (stmt_could_throw_p (fun, stmt)
1627 : 5 : && !fun->can_delete_dead_exceptions)))
1628 : : {
1629 : : /* See if we can remove complete call. */
1630 : 37333 : if (dse_optimize_call (gsi, live_bytes))
1631 : : return;
1632 : : /* Make sure we do not remove a return slot we cannot reconstruct
1633 : : later. */
1634 : 37309 : if (gimple_call_return_slot_opt_p (as_a <gcall *>(stmt))
1635 : 37309 : && (TREE_ADDRESSABLE (TREE_TYPE (gimple_call_fntype (stmt)))
1636 : 15260 : || !poly_int_tree_p
1637 : 15260 : (TYPE_SIZE (TREE_TYPE (gimple_call_fntype (stmt))))))
1638 : : return;
1639 : 29894 : if (dump_file && (dump_flags & TDF_DETAILS))
1640 : : {
1641 : 1 : fprintf (dump_file, " Deleted dead store in call LHS: ");
1642 : 1 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1643 : 1 : fprintf (dump_file, "\n");
1644 : : }
1645 : 29894 : gimple_call_set_lhs (stmt, NULL_TREE);
1646 : 29894 : update_stmt (stmt);
1647 : : }
1648 : 2419989 : else if (!stmt_could_throw_p (fun, stmt)
1649 : 2419989 : || fun->can_delete_dead_exceptions)
1650 : 2371884 : delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup,
1651 : : need_ab_cleanup);
1652 : : }
1653 : :
1654 : : namespace {
1655 : :
1656 : : const pass_data pass_data_dse =
1657 : : {
1658 : : GIMPLE_PASS, /* type */
1659 : : "dse", /* name */
1660 : : OPTGROUP_NONE, /* optinfo_flags */
1661 : : TV_TREE_DSE, /* tv_id */
1662 : : ( PROP_cfg | PROP_ssa ), /* properties_required */
1663 : : 0, /* properties_provided */
1664 : : 0, /* properties_destroyed */
1665 : : 0, /* todo_flags_start */
1666 : : 0, /* todo_flags_finish */
1667 : : };
1668 : :
1669 : : class pass_dse : public gimple_opt_pass
1670 : : {
1671 : : public:
1672 : 1428445 : pass_dse (gcc::context *ctxt)
1673 : 2856890 : : gimple_opt_pass (pass_data_dse, ctxt), use_dr_analysis_p (false)
1674 : : {}
1675 : :
1676 : : /* opt_pass methods: */
1677 : 1142756 : opt_pass * clone () final override { return new pass_dse (m_ctxt); }
1678 : 285689 : void set_pass_param (unsigned n, bool param) final override
1679 : : {
1680 : 285689 : gcc_assert (n == 0);
1681 : 285689 : use_dr_analysis_p = param;
1682 : 285689 : }
1683 : 5669589 : bool gate (function *) final override { return flag_tree_dse != 0; }
1684 : : unsigned int execute (function *) final override;
1685 : :
1686 : : private:
1687 : : bool use_dr_analysis_p;
1688 : : }; // class pass_dse
1689 : :
1690 : : unsigned int
1691 : 5644957 : pass_dse::execute (function *fun)
1692 : : {
1693 : 5644957 : unsigned todo = 0;
1694 : 5644957 : bool released_def = false;
1695 : :
1696 : 5644957 : need_eh_cleanup = BITMAP_ALLOC (NULL);
1697 : 5644957 : need_ab_cleanup = BITMAP_ALLOC (NULL);
1698 : 5644957 : auto_sbitmap live_bytes (param_dse_max_object_size);
1699 : 5644957 : if (flag_expensive_optimizations && use_dr_analysis_p)
1700 : 961577 : dse_stmt_to_dr_map = new hash_map<gimple *, data_reference_p>;
1701 : :
1702 : 5644957 : renumber_gimple_stmt_uids (fun);
1703 : :
1704 : 5644957 : calculate_dominance_info (CDI_DOMINATORS);
1705 : :
1706 : : /* Dead store elimination is fundamentally a reverse program order walk. */
1707 : 5644957 : int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS);
1708 : 5644957 : auto_bitmap exit_bbs;
1709 : 5644957 : bitmap_set_bit (exit_bbs, EXIT_BLOCK);
1710 : 5644957 : edge entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun));
1711 : 5644957 : int n = rev_post_order_and_mark_dfs_back_seme (fun, entry,
1712 : : exit_bbs, false, rpo, NULL);
1713 : 54168500 : for (int i = n; i != 0; --i)
1714 : : {
1715 : 48523543 : basic_block bb = BASIC_BLOCK_FOR_FN (fun, rpo[i-1]);
1716 : 48523543 : gimple_stmt_iterator gsi;
1717 : :
1718 : 97047086 : for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
1719 : : {
1720 : 343644642 : gimple *stmt = gsi_stmt (gsi);
1721 : :
1722 : 471454331 : if (gimple_vdef (stmt))
1723 : 57302445 : dse_optimize_stmt (fun, &gsi, live_bytes);
1724 : 572684394 : else if (def_operand_p
1725 : 286342197 : def_p = single_ssa_def_operand (stmt, SSA_OP_DEF))
1726 : : {
1727 : : /* When we remove dead stores make sure to also delete trivially
1728 : : dead SSA defs. */
1729 : 64296017 : if (has_zero_uses (DEF_FROM_PTR (def_p))
1730 : 1994091 : && !gimple_has_side_effects (stmt)
1731 : 1984283 : && !is_ctrl_altering_stmt (stmt)
1732 : 66278221 : && (!stmt_could_throw_p (fun, stmt)
1733 : 88646 : || fun->can_delete_dead_exceptions))
1734 : : {
1735 : 1893677 : if (dump_file && (dump_flags & TDF_DETAILS))
1736 : : {
1737 : 11 : fprintf (dump_file, " Deleted trivially dead stmt: ");
1738 : 11 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1739 : 11 : fprintf (dump_file, "\n");
1740 : : }
1741 : 1893677 : if (gsi_remove (&gsi, true) && need_eh_cleanup)
1742 : 2 : bitmap_set_bit (need_eh_cleanup, bb->index);
1743 : 1893677 : release_defs (stmt);
1744 : 1893677 : released_def = true;
1745 : : }
1746 : : }
1747 : 343644642 : if (gsi_end_p (gsi))
1748 : 559980 : gsi = gsi_last_bb (bb);
1749 : : else
1750 : 735532837 : gsi_prev (&gsi);
1751 : : }
1752 : 48523543 : bool removed_phi = false;
1753 : 68436664 : for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);)
1754 : : {
1755 : 19913121 : gphi *phi = si.phi ();
1756 : 19913121 : if (has_zero_uses (gimple_phi_result (phi)))
1757 : : {
1758 : 263952 : if (dump_file && (dump_flags & TDF_DETAILS))
1759 : : {
1760 : 0 : fprintf (dump_file, " Deleted trivially dead PHI: ");
1761 : 0 : print_gimple_stmt (dump_file, phi, 0, dump_flags);
1762 : 0 : fprintf (dump_file, "\n");
1763 : : }
1764 : 263952 : remove_phi_node (&si, true);
1765 : 263952 : removed_phi = true;
1766 : 263952 : released_def = true;
1767 : : }
1768 : : else
1769 : 19649169 : gsi_next (&si);
1770 : : }
1771 : 48523543 : if (removed_phi && gimple_seq_empty_p (phi_nodes (bb)))
1772 : : todo |= TODO_cleanup_cfg;
1773 : : }
1774 : 5644957 : free (rpo);
1775 : :
1776 : : /* Removal of stores may make some EH edges dead. Purge such edges from
1777 : : the CFG as needed. */
1778 : 5644957 : if (!bitmap_empty_p (need_eh_cleanup))
1779 : : {
1780 : 390 : gimple_purge_all_dead_eh_edges (need_eh_cleanup);
1781 : 390 : todo |= TODO_cleanup_cfg;
1782 : : }
1783 : 5644957 : if (!bitmap_empty_p (need_ab_cleanup))
1784 : : {
1785 : 4 : gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
1786 : 4 : todo |= TODO_cleanup_cfg;
1787 : : }
1788 : :
1789 : 5644957 : BITMAP_FREE (need_eh_cleanup);
1790 : 5644957 : BITMAP_FREE (need_ab_cleanup);
1791 : :
1792 : 5644957 : if (released_def)
1793 : 613599 : free_numbers_of_iterations_estimates (fun);
1794 : :
1795 : 5644957 : if (flag_expensive_optimizations && use_dr_analysis_p)
1796 : : {
1797 : 1709453 : for (auto i = dse_stmt_to_dr_map->begin ();
1798 : 2457329 : i != dse_stmt_to_dr_map->end (); ++i)
1799 : 747876 : free_data_ref ((*i).second);
1800 : 1923154 : delete dse_stmt_to_dr_map;
1801 : 961577 : dse_stmt_to_dr_map = NULL;
1802 : : }
1803 : :
1804 : 5644957 : return todo;
1805 : 5644957 : }
1806 : :
1807 : : } // anon namespace
1808 : :
1809 : : gimple_opt_pass *
1810 : 285689 : make_pass_dse (gcc::context *ctxt)
1811 : : {
1812 : 285689 : return new pass_dse (ctxt);
1813 : : }
|