Line data Source code
1 : /* Dead and redundant store elimination
2 : Copyright (C) 2004-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify
7 : it under the terms of the GNU General Public License as published by
8 : the Free Software Foundation; either version 3, or (at your option)
9 : any later version.
10 :
11 : GCC is distributed in the hope that it will be useful,
12 : but WITHOUT ANY WARRANTY; without even the implied warranty of
13 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 : GNU General Public License for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : #include "config.h"
21 : #include "system.h"
22 : #include "coretypes.h"
23 : #include "backend.h"
24 : #include "rtl.h"
25 : #include "tree.h"
26 : #include "gimple.h"
27 : #include "tree-pass.h"
28 : #include "ssa.h"
29 : #include "gimple-pretty-print.h"
30 : #include "fold-const.h"
31 : #include "gimple-iterator.h"
32 : #include "tree-cfg.h"
33 : #include "tree-dfa.h"
34 : #include "tree-cfgcleanup.h"
35 : #include "alias.h"
36 : #include "tree-ssa-loop.h"
37 : #include "tree-ssa-dse.h"
38 : #include "builtins.h"
39 : #include "gimple-fold.h"
40 : #include "gimplify.h"
41 : #include "tree-eh.h"
42 : #include "cfganal.h"
43 : #include "cgraph.h"
44 : #include "ipa-modref-tree.h"
45 : #include "ipa-modref.h"
46 : #include "target.h"
47 : #include "tree-ssa-loop-niter.h"
48 : #include "cfgloop.h"
49 : #include "tree-data-ref.h"
50 : #include "internal-fn.h"
51 : #include "tree-ssa.h"
52 :
53 : /* This file implements dead store elimination.
54 :
55 : A dead store is a store into a memory location which will later be
56 : overwritten by another store without any intervening loads. In this
57 : case the earlier store can be deleted or trimmed if the store
58 : was partially dead.
59 :
60 : A redundant store is a store into a memory location which stores
61 : the exact same value as a prior store to the same memory location.
62 : While this can often be handled by dead store elimination, removing
63 : the redundant store is often better than removing or trimming the
64 : dead store.
65 :
66 : In our SSA + virtual operand world we use immediate uses of virtual
67 : operands to detect these cases. If a store's virtual definition
68 : is used precisely once by a later store to the same location which
69 : post dominates the first store, then the first store is dead. If
70 : the data stored is the same, then the second store is redundant.
71 :
72 : The single use of the store's virtual definition ensures that
73 : there are no intervening aliased loads and the requirement that
74 : the second load post dominate the first ensures that if the earlier
75 : store executes, then the later stores will execute before the function
76 : exits.
77 :
78 : It may help to think of this as first moving the earlier store to
79 : the point immediately before the later store. Again, the single
80 : use of the virtual definition and the post-dominance relationship
81 : ensure that such movement would be safe. Clearly if there are
82 : back to back stores, then the second is makes the first dead. If
83 : the second store stores the same value, then the second store is
84 : redundant.
85 :
86 : Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
87 : may also help in understanding this code since it discusses the
88 : relationship between dead store and redundant load elimination. In
89 : fact, they are the same transformation applied to different views of
90 : the CFG. */
91 :
92 : static void delete_dead_or_redundant_call (gimple_stmt_iterator *, const char *);
93 :
94 : /* Bitmap of blocks that have had EH statements cleaned. We should
95 : remove their dead edges eventually. */
96 : static bitmap need_eh_cleanup;
97 : static bitmap need_ab_cleanup;
98 :
99 : /* STMT is a statement that may write into memory. Analyze it and
100 : initialize WRITE to describe how STMT affects memory. When
101 : MAY_DEF_OK is true then the function initializes WRITE to what
102 : the stmt may define.
103 :
104 : Return TRUE if the statement was analyzed, FALSE otherwise.
105 :
106 : It is always safe to return FALSE. But typically better optimziation
107 : can be achieved by analyzing more statements. */
108 :
109 : static bool
110 234720517 : initialize_ao_ref_for_dse (gimple *stmt, ao_ref *write, bool may_def_ok = false)
111 : {
112 : /* It's advantageous to handle certain mem* functions. */
113 234720517 : if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
114 : {
115 5389572 : switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
116 : {
117 1304485 : case BUILT_IN_MEMCPY:
118 1304485 : case BUILT_IN_MEMMOVE:
119 1304485 : case BUILT_IN_MEMSET:
120 1304485 : case BUILT_IN_MEMCPY_CHK:
121 1304485 : case BUILT_IN_MEMMOVE_CHK:
122 1304485 : case BUILT_IN_MEMSET_CHK:
123 1304485 : case BUILT_IN_STRNCPY:
124 1304485 : case BUILT_IN_STRNCPY_CHK:
125 1304485 : {
126 1304485 : tree size = gimple_call_arg (stmt, 2);
127 1304485 : tree ptr = gimple_call_arg (stmt, 0);
128 1304485 : ao_ref_init_from_ptr_and_size (write, ptr, size);
129 1304485 : return true;
130 : }
131 :
132 : /* A calloc call can never be dead, but it can make
133 : subsequent stores redundant if they store 0 into
134 : the same memory locations. */
135 3275 : case BUILT_IN_CALLOC:
136 3275 : {
137 3275 : tree nelem = gimple_call_arg (stmt, 0);
138 3275 : tree selem = gimple_call_arg (stmt, 1);
139 3275 : tree lhs;
140 3275 : if (TREE_CODE (nelem) == INTEGER_CST
141 2698 : && TREE_CODE (selem) == INTEGER_CST
142 5781 : && (lhs = gimple_call_lhs (stmt)) != NULL_TREE)
143 : {
144 2495 : tree size = fold_build2 (MULT_EXPR, TREE_TYPE (nelem),
145 : nelem, selem);
146 2495 : ao_ref_init_from_ptr_and_size (write, lhs, size);
147 2495 : return true;
148 : }
149 : }
150 :
151 : default:
152 : break;
153 : }
154 : }
155 229330945 : else if (is_gimple_call (stmt)
156 229330945 : && gimple_call_internal_p (stmt))
157 : {
158 435041 : switch (gimple_call_internal_fn (stmt))
159 : {
160 1337 : case IFN_LEN_STORE:
161 1337 : case IFN_MASK_STORE:
162 1337 : case IFN_MASK_LEN_STORE:
163 1337 : {
164 1337 : internal_fn ifn = gimple_call_internal_fn (stmt);
165 1337 : int stored_value_index = internal_fn_stored_value_index (ifn);
166 1337 : int len_index = internal_fn_len_index (ifn);
167 1337 : if (ifn == IFN_LEN_STORE)
168 : {
169 0 : tree len = gimple_call_arg (stmt, len_index);
170 0 : tree bias = gimple_call_arg (stmt, len_index + 1);
171 0 : if (tree_fits_uhwi_p (len))
172 : {
173 0 : ao_ref_init_from_ptr_and_size (write,
174 : gimple_call_arg (stmt, 0),
175 : int_const_binop (MINUS_EXPR,
176 : len, bias));
177 0 : return true;
178 : }
179 : }
180 : /* We cannot initialize a must-def ao_ref (in all cases) but we
181 : can provide a may-def variant. */
182 1337 : if (may_def_ok)
183 : {
184 1299 : ao_ref_init_from_ptr_and_range (
185 : write, gimple_call_arg (stmt, 0), true, 0, -1,
186 1299 : tree_to_poly_int64 (TYPE_SIZE (
187 : TREE_TYPE (gimple_call_arg (stmt, stored_value_index)))));
188 1299 : return true;
189 : }
190 : break;
191 : }
192 : default:;
193 : }
194 : }
195 233412238 : if (tree lhs = gimple_get_lhs (stmt))
196 : {
197 219356369 : if (TREE_CODE (lhs) != SSA_NAME
198 219356369 : && (may_def_ok || !stmt_could_throw_p (cfun, stmt)))
199 : {
200 202212127 : ao_ref_init (write, lhs);
201 202212127 : return true;
202 : }
203 : }
204 : return false;
205 : }
206 :
207 : /* Given REF from the alias oracle, return TRUE if it is a valid
208 : kill memory reference for dead store elimination, false otherwise.
209 :
210 : In particular, the reference must have a known base, known maximum
211 : size, start at a byte offset and have a size that is one or more
212 : bytes. */
213 :
214 : static bool
215 171089993 : valid_ao_ref_kill_for_dse (ao_ref *ref)
216 : {
217 171089993 : return (ao_ref_base (ref)
218 171089993 : && known_size_p (ref->max_size)
219 170777319 : && maybe_ne (ref->size, 0)
220 170759243 : && known_eq (ref->max_size, ref->size)
221 341342984 : && known_ge (ref->offset, 0));
222 : }
223 :
224 : /* Given REF from the alias oracle, return TRUE if it is a valid
225 : load or store memory reference for dead store elimination, false otherwise.
226 :
227 : Unlike for valid_ao_ref_kill_for_dse we can accept writes where max_size
228 : is not same as size since we can handle conservatively the larger range. */
229 :
230 : static bool
231 37489846 : valid_ao_ref_for_dse (ao_ref *ref)
232 : {
233 37489846 : return (ao_ref_base (ref)
234 37489846 : && known_size_p (ref->max_size)
235 74502545 : && known_ge (ref->offset, 0));
236 : }
237 :
238 : /* Initialize OFFSET and SIZE to a range known to contain REF
239 : where the boundaries are divisible by BITS_PER_UNIT (bit still in bits).
240 : Return false if this is impossible. */
241 :
242 : static bool
243 105383329 : get_byte_aligned_range_containing_ref (ao_ref *ref, poly_int64 *offset,
244 : HOST_WIDE_INT *size)
245 : {
246 0 : if (!known_size_p (ref->max_size))
247 : return false;
248 105383329 : *offset = aligned_lower_bound (ref->offset, BITS_PER_UNIT);
249 105383329 : poly_int64 end = aligned_upper_bound (ref->offset + ref->max_size,
250 : BITS_PER_UNIT);
251 105383329 : return (end - *offset).is_constant (size);
252 : }
253 :
254 : /* Initialize OFFSET and SIZE to a range known to be contained REF
255 : where the boundaries are divisible by BITS_PER_UNIT (but still in bits).
256 : Return false if this is impossible. */
257 :
258 : static bool
259 98397153 : get_byte_aligned_range_contained_in_ref (ao_ref *ref, poly_int64 *offset,
260 : HOST_WIDE_INT *size)
261 : {
262 98397153 : if (!known_size_p (ref->size)
263 98397153 : || !known_eq (ref->size, ref->max_size))
264 : return false;
265 98397153 : *offset = aligned_upper_bound (ref->offset, BITS_PER_UNIT);
266 98397153 : poly_int64 end = aligned_lower_bound (ref->offset + ref->max_size,
267 : BITS_PER_UNIT);
268 : /* For bit accesses we can get -1 here, but also 0 sized kill is not
269 : useful. */
270 98397153 : if (!known_gt (end, *offset))
271 : return false;
272 98261889 : return (end - *offset).is_constant (size);
273 : }
274 :
275 : /* Compute byte range (returned iN REF_OFFSET and RET_SIZE) for access COPY
276 : inside REF. If KILL is true, then COPY represent a kill and the byte range
277 : needs to be fully contained in bit range given by COPY. If KILL is false
278 : then the byte range returned must contain the range of COPY. */
279 :
280 : static bool
281 101957873 : get_byte_range (ao_ref *copy, ao_ref *ref, bool kill,
282 : HOST_WIDE_INT *ret_offset, HOST_WIDE_INT *ret_size)
283 : {
284 101957873 : HOST_WIDE_INT copy_size, ref_size;
285 101957873 : poly_int64 copy_offset, ref_offset;
286 101957873 : HOST_WIDE_INT diff;
287 :
288 : /* First translate from bits to bytes, rounding to bigger or smaller ranges
289 : as needed. Kills needs to be always rounded to smaller ranges while
290 : uses and stores to larger ranges. */
291 101957873 : if (kill)
292 : {
293 98397153 : if (!get_byte_aligned_range_contained_in_ref (copy, ©_offset,
294 : ©_size))
295 : return false;
296 : }
297 : else
298 : {
299 3560720 : if (!get_byte_aligned_range_containing_ref (copy, ©_offset,
300 : ©_size))
301 : return false;
302 : }
303 :
304 197628431 : if (!get_byte_aligned_range_containing_ref (ref, &ref_offset, &ref_size)
305 : || !ordered_p (copy_offset, ref_offset))
306 : return false;
307 :
308 : /* Switch sizes from bits to bytes so we do not need to care about
309 : overflows. Offset calculation needs to stay in bits until we compute
310 : the difference and can switch to HOST_WIDE_INT. */
311 101822609 : copy_size /= BITS_PER_UNIT;
312 101822609 : ref_size /= BITS_PER_UNIT;
313 :
314 : /* If COPY starts before REF, then reset the beginning of
315 : COPY to match REF and decrease the size of COPY by the
316 : number of bytes removed from COPY. */
317 101822609 : if (maybe_lt (copy_offset, ref_offset))
318 : {
319 9289626 : if (!(ref_offset - copy_offset).is_constant (&diff)
320 9289626 : || copy_size < diff / BITS_PER_UNIT)
321 : return false;
322 2698311 : copy_size -= diff / BITS_PER_UNIT;
323 2698311 : copy_offset = ref_offset;
324 : }
325 :
326 95231294 : if (!(copy_offset - ref_offset).is_constant (&diff)
327 95231294 : || ref_size <= diff / BITS_PER_UNIT)
328 : return false;
329 :
330 : /* If COPY extends beyond REF, chop off its size appropriately. */
331 6152051 : HOST_WIDE_INT limit = ref_size - diff / BITS_PER_UNIT;
332 :
333 6152051 : if (copy_size > limit)
334 1107308 : copy_size = limit;
335 6152051 : *ret_size = copy_size;
336 6152051 : if (!(copy_offset - ref_offset).is_constant (ret_offset))
337 : return false;
338 6152051 : *ret_offset /= BITS_PER_UNIT;
339 6152051 : return true;
340 : }
341 :
342 : /* Update LIVE_BYTES tracking REF for write to WRITE:
343 : Verify we have the same base memory address, the write
344 : has a known size and overlaps with REF. */
345 : static void
346 171089993 : clear_live_bytes_for_ref (sbitmap live_bytes, ao_ref *ref, ao_ref *write)
347 : {
348 171089993 : HOST_WIDE_INT start, size;
349 :
350 171089993 : if (valid_ao_ref_kill_for_dse (write)
351 170252739 : && operand_equal_p (write->base, ref->base, OEP_ADDRESS_OF)
352 269487146 : && get_byte_range (write, ref, true, &start, &size))
353 2591331 : bitmap_clear_range (live_bytes, start, size);
354 171089993 : }
355 :
356 : /* Clear any bytes written by STMT from the bitmap LIVE_BYTES. The base
357 : address written by STMT must match the one found in REF, which must
358 : have its base address previously initialized.
359 :
360 : This routine must be conservative. If we don't know the offset or
361 : actual size written, assume nothing was written. */
362 :
363 : static void
364 185020234 : clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref)
365 : {
366 185020234 : ao_ref write;
367 :
368 185020234 : if (gcall *call = dyn_cast <gcall *> (stmt))
369 : {
370 5589936 : bool interposed;
371 5589936 : modref_summary *summary = get_modref_function_summary (call, &interposed);
372 :
373 5589936 : if (summary && !interposed)
374 497507 : for (auto kill : summary->kills)
375 66880 : if (kill.get_ao_ref (as_a <gcall *> (stmt), &write))
376 66856 : clear_live_bytes_for_ref (live_bytes, ref, &write);
377 : }
378 185020234 : if (!initialize_ao_ref_for_dse (stmt, &write))
379 13997097 : return;
380 :
381 171023137 : clear_live_bytes_for_ref (live_bytes, ref, &write);
382 : }
383 :
384 : /* REF is a memory write. Extract relevant information from it and
385 : initialize the LIVE_BYTES bitmap. If successful, return TRUE.
386 : Otherwise return FALSE. */
387 :
388 : static bool
389 31127899 : setup_live_bytes_from_ref (ao_ref *ref, sbitmap live_bytes)
390 : {
391 31127899 : HOST_WIDE_INT const_size;
392 31127899 : if (valid_ao_ref_for_dse (ref)
393 30676875 : && ((aligned_upper_bound (ref->offset + ref->max_size, BITS_PER_UNIT)
394 30676875 : - aligned_lower_bound (ref->offset,
395 30676875 : BITS_PER_UNIT)).is_constant (&const_size))
396 30676875 : && (const_size / BITS_PER_UNIT <= param_dse_max_object_size)
397 61485689 : && const_size > 1)
398 : {
399 30357573 : bitmap_clear (live_bytes);
400 30357573 : bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT);
401 30357573 : return true;
402 : }
403 : return false;
404 : }
405 :
406 : /* Compute the number of stored bytes that we can trim from the head and
407 : tail of REF. LIVE is the bitmap of stores to REF that are still live.
408 :
409 : Store the number of bytes trimmed from the head and tail in TRIM_HEAD
410 : and TRIM_TAIL respectively.
411 :
412 : STMT is the statement being trimmed and is used for debugging dump
413 : output only. */
414 :
415 : static void
416 3444144 : compute_trims (ao_ref *ref, sbitmap live, int *trim_head, int *trim_tail,
417 : gimple *stmt)
418 : {
419 3444144 : *trim_head = 0;
420 3444144 : *trim_tail = 0;
421 :
422 : /* We use bitmaps biased such that ref->offset is contained in bit zero and
423 : the bitmap extends through ref->max_size, so we know that in the original
424 : bitmap bits 0 .. ref->max_size were true. But we need to check that this
425 : covers the bytes of REF exactly. */
426 3444144 : const unsigned int offset_align = known_alignment (ref->offset);
427 3444144 : const unsigned int size_align = known_alignment (ref->size);
428 3444144 : if ((offset_align > 0 && offset_align < BITS_PER_UNIT)
429 3443855 : || (size_align > 0 && size_align < BITS_PER_UNIT)
430 6887999 : || !known_eq (ref->size, ref->max_size))
431 12369 : return;
432 :
433 : /* Now identify how much, if any of the tail we can chop off. */
434 3431775 : HOST_WIDE_INT const_size;
435 3431775 : int last_live = bitmap_last_set_bit (live);
436 3431775 : if (ref->size.is_constant (&const_size))
437 : {
438 3431775 : int last_orig = (const_size / BITS_PER_UNIT) - 1;
439 : /* We can leave inconvenient amounts on the tail as
440 : residual handling in mem* and str* functions is usually
441 : reasonably efficient. */
442 3431775 : *trim_tail = last_orig - last_live;
443 :
444 : /* But don't trim away out of bounds accesses, as this defeats
445 : proper warnings.
446 :
447 : We could have a type with no TYPE_SIZE_UNIT or we could have a VLA
448 : where TYPE_SIZE_UNIT is not a constant. */
449 3431775 : if (*trim_tail
450 9851 : && TYPE_SIZE_UNIT (TREE_TYPE (ref->base))
451 9851 : && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (ref->base))) == INTEGER_CST
452 3441625 : && compare_tree_int (TYPE_SIZE_UNIT (TREE_TYPE (ref->base)),
453 : last_orig) <= 0)
454 136 : *trim_tail = 0;
455 : }
456 :
457 : /* Identify how much, if any of the head we can chop off. */
458 3431775 : int first_orig = 0;
459 3431775 : int first_live = bitmap_first_set_bit (live);
460 3431775 : *trim_head = first_live - first_orig;
461 :
462 : /* If REF is aligned, try to maintain this alignment if it reduces
463 : the number of (power-of-two sized aligned) writes to memory. */
464 3431775 : unsigned int align_bits;
465 3431775 : unsigned HOST_WIDE_INT bitpos;
466 3342069 : if ((*trim_head || *trim_tail)
467 95123 : && last_live - first_live >= 2
468 94301 : && ao_ref_alignment (ref, &align_bits, &bitpos)
469 77201 : && align_bits >= 32
470 76851 : && bitpos == 0
471 3504826 : && align_bits % BITS_PER_UNIT == 0)
472 : {
473 73051 : unsigned int align_units = align_bits / BITS_PER_UNIT;
474 73051 : if (align_units > 16)
475 : align_units = 16;
476 75067 : while ((first_live | (align_units - 1)) > (unsigned int)last_live)
477 2016 : align_units >>= 1;
478 :
479 73051 : if (*trim_head)
480 : {
481 68764 : unsigned int pos = first_live & (align_units - 1);
482 76170 : for (unsigned int i = 1; i <= align_units; i <<= 1)
483 : {
484 76170 : unsigned int mask = ~(i - 1);
485 76170 : unsigned int bytes = align_units - (pos & mask);
486 76170 : if (wi::popcount (bytes) <= 1)
487 : {
488 68764 : *trim_head &= mask;
489 68764 : break;
490 : }
491 : }
492 : }
493 :
494 73051 : if (*trim_tail)
495 : {
496 6809 : unsigned int pos = last_live & (align_units - 1);
497 10102 : for (unsigned int i = 1; i <= align_units; i <<= 1)
498 : {
499 10102 : int mask = i - 1;
500 10102 : unsigned int bytes = (pos | mask) + 1;
501 10102 : if ((last_live | mask) > (last_live + *trim_tail))
502 : break;
503 10102 : if (wi::popcount (bytes) <= 1)
504 : {
505 6809 : unsigned int extra = (last_live | mask) - last_live;
506 6809 : *trim_tail -= extra;
507 6809 : break;
508 : }
509 : }
510 : }
511 : }
512 :
513 3431775 : if ((*trim_head || *trim_tail) && dump_file && (dump_flags & TDF_DETAILS))
514 : {
515 18 : fprintf (dump_file, " Trimming statement (head = %d, tail = %d): ",
516 : *trim_head, *trim_tail);
517 18 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
518 18 : fprintf (dump_file, "\n");
519 : }
520 : }
521 :
522 : /* STMT initializes an object from COMPLEX_CST where one or more of the bytes
523 : written may be dead stores. REF is a representation of the memory written.
524 : LIVE is the bitmap of stores to REF that are still live.
525 :
526 : Attempt to rewrite STMT so that only the real or the imaginary part of the
527 : object is actually stored. */
528 :
529 : static void
530 5520 : maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt)
531 : {
532 5520 : int trim_head, trim_tail;
533 5520 : compute_trims (ref, live, &trim_head, &trim_tail, stmt);
534 :
535 : /* The amount of data trimmed from the head or tail must be at
536 : least half the size of the object to ensure we're trimming
537 : the entire real or imaginary half. By writing things this
538 : way we avoid more O(n) bitmap operations. */
539 5520 : if (known_ge (trim_tail * 2 * BITS_PER_UNIT, ref->size))
540 : {
541 : /* TREE_REALPART is live */
542 2 : tree x = TREE_REALPART (gimple_assign_rhs1 (stmt));
543 2 : tree y = gimple_assign_lhs (stmt);
544 2 : y = build1 (REALPART_EXPR, TREE_TYPE (x), y);
545 2 : gimple_assign_set_lhs (stmt, y);
546 2 : gimple_assign_set_rhs1 (stmt, x);
547 : }
548 5518 : else if (known_ge (trim_head * 2 * BITS_PER_UNIT, ref->size))
549 : {
550 : /* TREE_IMAGPART is live */
551 3 : tree x = TREE_IMAGPART (gimple_assign_rhs1 (stmt));
552 3 : tree y = gimple_assign_lhs (stmt);
553 3 : y = build1 (IMAGPART_EXPR, TREE_TYPE (x), y);
554 3 : gimple_assign_set_lhs (stmt, y);
555 3 : gimple_assign_set_rhs1 (stmt, x);
556 : }
557 :
558 : /* Other cases indicate parts of both the real and imag subobjects
559 : are live. We do not try to optimize those cases. */
560 5520 : }
561 :
562 : /* STMT initializes an object using a CONSTRUCTOR where one or more of the
563 : bytes written are dead stores. REF is a representation of the memory
564 : written. LIVE is the bitmap of stores to REF that are still live.
565 :
566 : Attempt to rewrite STMT so that it writes fewer memory locations.
567 :
568 : The most common case for getting here is a CONSTRUCTOR with no elements
569 : being used to zero initialize an object. We do not try to handle other
570 : cases as those would force us to fully cover the object with the
571 : CONSTRUCTOR node except for the components that are dead.
572 : Also handles integer stores of 0 which can happen with memset/memcpy optimizations. */
573 :
574 : static void
575 3293447 : maybe_trim_constructor_store (ao_ref *ref, sbitmap live, gimple *stmt, bool was_integer_cst)
576 : {
577 3293447 : tree ctor = gimple_assign_rhs1 (stmt);
578 :
579 : /* This is the only case we currently handle. It actually seems to
580 : catch most cases of actual interest. */
581 3768331 : gcc_assert (was_integer_cst ? integer_zerop (ctor) : CONSTRUCTOR_NELTS (ctor) == 0);
582 :
583 3293447 : int head_trim = 0;
584 3293447 : int tail_trim = 0;
585 3293447 : compute_trims (ref, live, &head_trim, &tail_trim, stmt);
586 :
587 : /* Now we want to replace the constructor initializer
588 : with memset (object + head_trim, 0, size - head_trim - tail_trim). */
589 3293447 : if (head_trim || tail_trim)
590 : {
591 : /* We want &lhs for the MEM_REF expression. */
592 88579 : tree lhs_addr = build_fold_addr_expr (gimple_assign_lhs (stmt));
593 :
594 88579 : STRIP_USELESS_TYPE_CONVERSION (lhs_addr);
595 :
596 88579 : if (! is_gimple_min_invariant (lhs_addr))
597 17258 : return;
598 :
599 : /* The number of bytes for the new constructor. */
600 71321 : poly_int64 ref_bytes = exact_div (ref->size, BITS_PER_UNIT);
601 71321 : poly_int64 count = ref_bytes - head_trim - tail_trim;
602 :
603 : /* And the new type for the CONSTRUCTOR. Essentially it's just
604 : a char array large enough to cover the non-trimmed parts of
605 : the original CONSTRUCTOR. Note we want explicit bounds here
606 : so that we know how many bytes to clear when expanding the
607 : CONSTRUCTOR. */
608 71321 : tree type = build_array_type_nelts (char_type_node, count);
609 :
610 : /* Build a suitable alias type rather than using alias set zero
611 : to avoid pessimizing. */
612 71321 : tree alias_type = reference_alias_ptr_type (gimple_assign_lhs (stmt));
613 :
614 : /* Build a MEM_REF representing the whole accessed area, starting
615 : at the first byte not trimmed. */
616 71321 : tree exp = fold_build2 (MEM_REF, type, lhs_addr,
617 : build_int_cst (alias_type, head_trim));
618 :
619 : /* Now update STMT with a new RHS and LHS. */
620 71321 : gimple_assign_set_lhs (stmt, exp);
621 71321 : gimple_assign_set_rhs1 (stmt, build_constructor (type, NULL));
622 : }
623 : }
624 :
625 : /* STMT is a memcpy, memmove or memset. Decrement the number of bytes
626 : copied/set by DECREMENT. */
627 : static void
628 766 : decrement_count (gimple *stmt, int decrement)
629 : {
630 766 : tree *countp = gimple_call_arg_ptr (stmt, 2);
631 766 : gcc_assert (TREE_CODE (*countp) == INTEGER_CST);
632 1532 : *countp = wide_int_to_tree (TREE_TYPE (*countp), (TREE_INT_CST_LOW (*countp)
633 766 : - decrement));
634 766 : }
635 :
636 : static void
637 703 : increment_start_addr (gimple *stmt, tree *where, int increment)
638 : {
639 703 : if (tree lhs = gimple_call_lhs (stmt))
640 6 : if (where == gimple_call_arg_ptr (stmt, 0))
641 : {
642 6 : gassign *newop = gimple_build_assign (lhs, unshare_expr (*where));
643 6 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
644 6 : gsi_insert_after (&gsi, newop, GSI_SAME_STMT);
645 6 : gimple_call_set_lhs (stmt, NULL_TREE);
646 6 : update_stmt (stmt);
647 : }
648 :
649 703 : if (TREE_CODE (*where) == SSA_NAME)
650 : {
651 196 : tree tem = make_ssa_name (TREE_TYPE (*where));
652 196 : gassign *newop
653 196 : = gimple_build_assign (tem, POINTER_PLUS_EXPR, *where,
654 196 : build_int_cst (sizetype, increment));
655 196 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
656 196 : gsi_insert_before (&gsi, newop, GSI_SAME_STMT);
657 196 : *where = tem;
658 196 : update_stmt (stmt);
659 196 : return;
660 : }
661 :
662 507 : *where = build_fold_addr_expr (fold_build2 (MEM_REF, char_type_node,
663 : *where,
664 : build_int_cst (ptr_type_node,
665 : increment)));
666 507 : STRIP_USELESS_TYPE_CONVERSION (*where);
667 : }
668 :
669 : /* STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
670 : (ORIG & ~NEW) and need not be stored. Try to rewrite STMT to reduce
671 : the amount of data it actually writes.
672 :
673 : Right now we only support trimming from the head or the tail of the
674 : memory region. In theory we could split the mem* call, but it's
675 : likely of marginal value. */
676 :
677 : static void
678 145177 : maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt)
679 : {
680 145177 : int head_trim, tail_trim;
681 145177 : switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
682 : {
683 4613 : case BUILT_IN_STRNCPY:
684 4613 : case BUILT_IN_STRNCPY_CHK:
685 4613 : compute_trims (ref, live, &head_trim, &tail_trim, stmt);
686 4613 : if (head_trim)
687 : {
688 : /* Head trimming of strncpy is only possible if we can
689 : prove all bytes we would trim are non-zero (or we could
690 : turn the strncpy into memset if there must be zero
691 : among the head trimmed bytes). If we don't know anything
692 : about those bytes, the presence or absence of '\0' bytes
693 : in there will affect whether it acts for the non-trimmed
694 : bytes as memset or memcpy/strncpy. */
695 74 : c_strlen_data lendata = { };
696 74 : int orig_head_trim = head_trim;
697 74 : tree srcstr = gimple_call_arg (stmt, 1);
698 74 : if (!get_range_strlen (srcstr, &lendata, /*eltsize=*/1)
699 74 : || !tree_fits_uhwi_p (lendata.minlen))
700 8 : head_trim = 0;
701 66 : else if (tree_to_uhwi (lendata.minlen) < (unsigned) head_trim)
702 : {
703 60 : head_trim = tree_to_uhwi (lendata.minlen);
704 60 : if ((orig_head_trim & (UNITS_PER_WORD - 1)) == 0)
705 0 : head_trim &= ~(UNITS_PER_WORD - 1);
706 : }
707 74 : if (orig_head_trim != head_trim
708 68 : && dump_file
709 82 : && (dump_flags & TDF_DETAILS))
710 8 : fprintf (dump_file,
711 : " Adjusting strncpy trimming to (head = %d,"
712 : " tail = %d)\n", head_trim, tail_trim);
713 : }
714 4613 : goto do_memcpy;
715 :
716 102179 : case BUILT_IN_MEMCPY:
717 102179 : case BUILT_IN_MEMMOVE:
718 102179 : case BUILT_IN_MEMCPY_CHK:
719 102179 : case BUILT_IN_MEMMOVE_CHK:
720 102179 : compute_trims (ref, live, &head_trim, &tail_trim, stmt);
721 :
722 106792 : do_memcpy:
723 : /* Tail trimming is easy, we can just reduce the count. */
724 106792 : if (tail_trim)
725 73 : decrement_count (stmt, tail_trim);
726 :
727 : /* Head trimming requires adjusting all the arguments. */
728 106792 : if (head_trim)
729 : {
730 : /* For __*_chk need to adjust also the last argument. */
731 122 : if (gimple_call_num_args (stmt) == 4)
732 : {
733 49 : tree size = gimple_call_arg (stmt, 3);
734 49 : if (!tree_fits_uhwi_p (size))
735 : break;
736 7 : if (!integer_all_onesp (size))
737 : {
738 7 : unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
739 7 : if (sz < (unsigned) head_trim)
740 : break;
741 7 : tree arg = wide_int_to_tree (TREE_TYPE (size),
742 7 : sz - head_trim);
743 7 : gimple_call_set_arg (stmt, 3, arg);
744 : }
745 : }
746 80 : tree *dst = gimple_call_arg_ptr (stmt, 0);
747 80 : increment_start_addr (stmt, dst, head_trim);
748 80 : tree *src = gimple_call_arg_ptr (stmt, 1);
749 80 : increment_start_addr (stmt, src, head_trim);
750 80 : decrement_count (stmt, head_trim);
751 : }
752 : break;
753 :
754 38385 : case BUILT_IN_MEMSET:
755 38385 : case BUILT_IN_MEMSET_CHK:
756 38385 : compute_trims (ref, live, &head_trim, &tail_trim, stmt);
757 :
758 : /* Tail trimming is easy, we can just reduce the count. */
759 38385 : if (tail_trim)
760 70 : decrement_count (stmt, tail_trim);
761 :
762 : /* Head trimming requires adjusting all the arguments. */
763 38385 : if (head_trim)
764 : {
765 : /* For __*_chk need to adjust also the last argument. */
766 543 : if (gimple_call_num_args (stmt) == 4)
767 : {
768 7 : tree size = gimple_call_arg (stmt, 3);
769 7 : if (!tree_fits_uhwi_p (size))
770 : break;
771 7 : if (!integer_all_onesp (size))
772 : {
773 7 : unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
774 7 : if (sz < (unsigned) head_trim)
775 : break;
776 7 : tree arg = wide_int_to_tree (TREE_TYPE (size),
777 7 : sz - head_trim);
778 7 : gimple_call_set_arg (stmt, 3, arg);
779 : }
780 : }
781 543 : tree *dst = gimple_call_arg_ptr (stmt, 0);
782 543 : increment_start_addr (stmt, dst, head_trim);
783 543 : decrement_count (stmt, head_trim);
784 : }
785 : break;
786 :
787 : default:
788 : break;
789 : }
790 145177 : }
791 :
792 : /* STMT is a memory write where one or more bytes written are dead stores.
793 : REF is a representation of the memory written. LIVE is the bitmap of
794 : stores to REF that are still live.
795 :
796 : Attempt to rewrite STMT so that it writes fewer memory locations. Right
797 : now we only support trimming at the start or end of the memory region.
798 : It's not clear how much there is to be gained by trimming from the middle
799 : of the region. */
800 :
801 : static void
802 25976463 : maybe_trim_partially_dead_store (ao_ref *ref, sbitmap live, gimple *stmt)
803 : {
804 25976463 : if (is_gimple_assign (stmt)
805 25976463 : && TREE_CODE (gimple_assign_lhs (stmt)) != TARGET_MEM_REF)
806 : {
807 24686579 : switch (gimple_assign_rhs_code (stmt))
808 : {
809 474884 : case CONSTRUCTOR:
810 474884 : maybe_trim_constructor_store (ref, live, stmt, false);
811 474884 : break;
812 5520 : case COMPLEX_CST:
813 5520 : maybe_trim_complex_store (ref, live, stmt);
814 5520 : break;
815 8765210 : case INTEGER_CST:
816 8765210 : if (integer_zerop (gimple_assign_rhs1 (stmt))
817 8765210 : && type_has_mode_precision_p (TREE_TYPE (gimple_assign_lhs (stmt))))
818 2818563 : maybe_trim_constructor_store (ref, live, stmt, true);
819 : break;
820 : default:
821 : break;
822 : }
823 : }
824 25976463 : }
825 :
826 : /* Return TRUE if USE_REF reads bytes from LIVE where live is
827 : derived from REF, a write reference.
828 :
829 : While this routine may modify USE_REF, it's passed by value, not
830 : location. So callers do not see those modifications. */
831 :
832 : static bool
833 3560720 : live_bytes_read (ao_ref *use_ref, ao_ref *ref, sbitmap live)
834 : {
835 : /* We have already verified that USE_REF and REF hit the same object.
836 : Now verify that there's actually an overlap between USE_REF and REF. */
837 3560720 : HOST_WIDE_INT start, size;
838 3560720 : if (get_byte_range (use_ref, ref, false, &start, &size))
839 : {
840 : /* If USE_REF covers all of REF, then it will hit one or more
841 : live bytes. This avoids useless iteration over the bitmap
842 : below. */
843 3560720 : if (start == 0 && known_eq (size * 8, ref->size))
844 : return true;
845 :
846 : /* Now check if any of the remaining bits in use_ref are set in LIVE. */
847 959563 : return bitmap_any_bit_in_range_p (live, start, (start + size - 1));
848 : }
849 : return true;
850 : }
851 :
852 : /* Callback for dse_classify_store calling for_each_index. Verify that
853 : indices are invariant in the loop with backedge PHI in basic-block DATA. */
854 :
855 : static bool
856 2651720 : check_name (tree, tree *idx, void *data)
857 : {
858 2651720 : basic_block phi_bb = (basic_block) data;
859 2651720 : if (TREE_CODE (*idx) == SSA_NAME
860 1773816 : && !SSA_NAME_IS_DEFAULT_DEF (*idx)
861 4298169 : && dominated_by_p (CDI_DOMINATORS, gimple_bb (SSA_NAME_DEF_STMT (*idx)),
862 : phi_bb))
863 : return false;
864 : return true;
865 : }
866 :
867 : /* STMT stores the value 0 into one or more memory locations
868 : (via memset, empty constructor, calloc call, etc).
869 :
870 : See if there is a subsequent store of the value 0 to one
871 : or more of the same memory location(s). If so, the subsequent
872 : store is redundant and can be removed.
873 :
874 : The subsequent stores could be via memset, empty constructors,
875 : simple MEM stores, etc. */
876 :
877 : static void
878 4217146 : dse_optimize_redundant_stores (gimple *stmt)
879 : {
880 4217146 : int cnt = 0;
881 :
882 : /* TBAA state of STMT, if it is a call it is effectively alias-set zero. */
883 4217146 : alias_set_type earlier_set = 0;
884 4217146 : alias_set_type earlier_base_set = 0;
885 4217146 : if (is_gimple_assign (stmt))
886 : {
887 4159008 : ao_ref lhs_ref;
888 4159008 : ao_ref_init (&lhs_ref, gimple_assign_lhs (stmt));
889 4159008 : earlier_set = ao_ref_alias_set (&lhs_ref);
890 4159008 : earlier_base_set = ao_ref_base_alias_set (&lhs_ref);
891 : }
892 :
893 : /* We could do something fairly complex and look through PHIs
894 : like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
895 : the effort.
896 :
897 : Look at all the immediate uses of the VDEF (which are obviously
898 : dominated by STMT). See if one or more stores 0 into the same
899 : memory locations a STMT, if so remove the immediate use statements. */
900 4217146 : tree defvar = gimple_vdef (stmt);
901 4217146 : imm_use_iterator ui;
902 4217146 : gimple *use_stmt;
903 13661581 : FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
904 : {
905 : /* Limit stmt walking. */
906 5248199 : if (++cnt > param_dse_max_alias_queries_per_store)
907 : break;
908 :
909 : /* If USE_STMT stores 0 into one or more of the same locations
910 : as STMT and STMT would kill USE_STMT, then we can just remove
911 : USE_STMT. */
912 5248199 : tree fndecl;
913 5248199 : if ((is_gimple_assign (use_stmt)
914 3687744 : && gimple_vdef (use_stmt)
915 3034015 : && (gimple_assign_single_p (use_stmt)
916 3034015 : && initializer_zerop (gimple_assign_rhs1 (use_stmt))))
917 7496101 : || (gimple_call_builtin_p (use_stmt, BUILT_IN_NORMAL)
918 158341 : && (fndecl = gimple_call_fndecl (use_stmt)) != NULL
919 158341 : && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
920 137158 : || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
921 21274 : && integer_zerop (gimple_call_arg (use_stmt, 1))))
922 : {
923 1458312 : ao_ref write;
924 :
925 1458312 : if (!initialize_ao_ref_for_dse (use_stmt, &write))
926 : break;
927 :
928 1437402 : if (valid_ao_ref_for_dse (&write)
929 1437402 : && stmt_kills_ref_p (stmt, &write))
930 : {
931 5421 : gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
932 5421 : if (is_gimple_assign (use_stmt))
933 : {
934 5371 : ao_ref lhs_ref;
935 5371 : ao_ref_init (&lhs_ref, gimple_assign_lhs (use_stmt));
936 5371 : if ((earlier_set == ao_ref_alias_set (&lhs_ref)
937 731 : || alias_set_subset_of (ao_ref_alias_set (&lhs_ref),
938 : earlier_set))
939 5531 : && (earlier_base_set == ao_ref_base_alias_set (&lhs_ref)
940 505 : || alias_set_subset_of
941 505 : (ao_ref_base_alias_set (&lhs_ref),
942 : earlier_base_set)))
943 4709 : delete_dead_or_redundant_assignment (&gsi, "redundant",
944 : need_eh_cleanup,
945 : need_ab_cleanup);
946 : }
947 50 : else if (is_gimple_call (use_stmt))
948 : {
949 50 : if ((earlier_set == 0
950 8 : || alias_set_subset_of (0, earlier_set))
951 50 : && (earlier_base_set == 0
952 0 : || alias_set_subset_of (0, earlier_base_set)))
953 42 : delete_dead_or_redundant_call (&gsi, "redundant");
954 : }
955 : else
956 0 : gcc_unreachable ();
957 : }
958 : }
959 4217146 : }
960 4217146 : }
961 :
962 : /* Return whether PHI contains ARG as an argument. */
963 :
964 : static bool
965 4114671 : contains_phi_arg (gphi *phi, tree arg)
966 : {
967 31004586 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
968 27133720 : if (gimple_phi_arg_def (phi, i) == arg)
969 : return true;
970 : return false;
971 : }
972 :
973 : /* Hash map of the memory use in a GIMPLE assignment to its
974 : data reference. If NULL data-ref analysis isn't used. */
975 : static hash_map<gimple *, data_reference_p> *dse_stmt_to_dr_map;
976 :
977 : /* A helper of dse_optimize_stmt.
978 : Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
979 : according to downstream uses and defs. Sets *BY_CLOBBER_P to true
980 : if only clobber statements influenced the classification result.
981 : Returns the classification. */
982 :
983 : static dse_store_status
984 40804804 : dse_classify_store (ao_ref *ref, gimple *stmt,
985 : bool byte_tracking_enabled, sbitmap live_bytes,
986 : bool *by_clobber_p, tree stop_at_vuse, int &cnt,
987 : bitmap visited)
988 : {
989 40804804 : gimple *temp;
990 40804804 : std::unique_ptr<data_reference, void(*)(data_reference_p)>
991 40804804 : dra (nullptr, free_data_ref);
992 :
993 40804804 : if (by_clobber_p)
994 40214678 : *by_clobber_p = true;
995 :
996 : /* Find the first dominated statement that clobbers (part of) the
997 : memory stmt stores to with no intermediate statement that may use
998 : part of the memory stmt stores. That is, find a store that may
999 : prove stmt to be a dead store. */
1000 : temp = stmt;
1001 523643764 : do
1002 : {
1003 282224284 : gimple *use_stmt;
1004 282224284 : imm_use_iterator ui;
1005 282224284 : bool fail = false;
1006 282224284 : tree defvar;
1007 :
1008 282224284 : if (gimple_code (temp) == GIMPLE_PHI)
1009 : {
1010 18970259 : defvar = PHI_RESULT (temp);
1011 18970259 : bitmap_set_bit (visited, SSA_NAME_VERSION (defvar));
1012 : }
1013 : else
1014 526508050 : defvar = gimple_vdef (temp);
1015 :
1016 282224284 : auto_vec<gimple *, 10> defs;
1017 282224284 : gphi *first_phi_def = NULL;
1018 282224284 : gphi *last_phi_def = NULL;
1019 :
1020 282224284 : auto_vec<tree, 10> worklist;
1021 282224284 : worklist.quick_push (defvar);
1022 :
1023 286119663 : do
1024 : {
1025 286119663 : defvar = worklist.pop ();
1026 : /* If we're instructed to stop walking at region boundary, do so. */
1027 286119663 : if (defvar == stop_at_vuse)
1028 : return DSE_STORE_LIVE;
1029 :
1030 286099815 : use_operand_p usep;
1031 897267437 : FOR_EACH_IMM_USE_FAST (usep, ui, defvar)
1032 : {
1033 353902279 : use_stmt = USE_STMT (usep);
1034 :
1035 : /* Limit stmt walking. */
1036 353902279 : if (++cnt > param_dse_max_alias_queries_per_store)
1037 : {
1038 : fail = true;
1039 : break;
1040 : }
1041 :
1042 : /* In simple cases we can look through PHI nodes, but we
1043 : have to be careful with loops and with memory references
1044 : containing operands that are also operands of PHI nodes.
1045 : See gcc.c-torture/execute/20051110-*.c. */
1046 353634182 : if (gphi *phi = dyn_cast <gphi *> (use_stmt))
1047 : {
1048 : /* Look through single-argument PHIs. */
1049 39195289 : if (gimple_phi_num_args (phi) == 1)
1050 4624126 : worklist.safe_push (gimple_phi_result (phi));
1051 : else
1052 : {
1053 : /* If we visit this PHI by following a backedge then we
1054 : have to make sure ref->ref only refers to SSA names
1055 : that are invariant with respect to the loop
1056 : represented by this PHI node. We handle irreducible
1057 : regions by relying on backedge marking and identifying
1058 : the head of the (sub-)region. */
1059 34571163 : edge e = gimple_phi_arg_edge
1060 34571163 : (phi, PHI_ARG_INDEX_FROM_USE (usep));
1061 34571163 : if (e->flags & EDGE_DFS_BACK)
1062 : {
1063 3200782 : basic_block rgn_head
1064 3200782 : = nearest_common_dominator (CDI_DOMINATORS,
1065 : gimple_bb (phi),
1066 : e->src);
1067 3200782 : if (!for_each_index (ref->ref
1068 : ? &ref->ref : &ref->base,
1069 : check_name, rgn_head))
1070 1476918 : return DSE_STORE_LIVE;
1071 : }
1072 : /* If we already visited this PHI ignore it for further
1073 : processing. But note we have to check each incoming
1074 : edge above. */
1075 66188490 : if (!bitmap_bit_p (visited,
1076 33094245 : SSA_NAME_VERSION (PHI_RESULT (phi))))
1077 : {
1078 24603123 : defs.safe_push (phi);
1079 24603123 : if (!first_phi_def)
1080 20674330 : first_phi_def = phi;;
1081 : last_phi_def = phi;
1082 : }
1083 : }
1084 : }
1085 : /* If the statement is a use the store is not dead. */
1086 314438893 : else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
1087 : {
1088 27100446 : if (dse_stmt_to_dr_map
1089 5941162 : && ref->ref
1090 32937513 : && is_gimple_assign (use_stmt))
1091 : {
1092 1186307 : if (!dra)
1093 1181314 : dra.reset (create_data_ref (NULL, NULL, ref->ref, stmt,
1094 : false, false));
1095 1186307 : bool existed_p;
1096 1186307 : data_reference_p &drb
1097 1186307 : = dse_stmt_to_dr_map->get_or_insert (use_stmt,
1098 : &existed_p);
1099 1186307 : if (!existed_p)
1100 721108 : drb = create_data_ref (NULL, NULL,
1101 : gimple_assign_rhs1 (use_stmt),
1102 : use_stmt, false, false);
1103 1186307 : if (!dr_may_alias_p (dra.get (), drb, NULL))
1104 : {
1105 17922 : if (gimple_vdef (use_stmt))
1106 18 : defs.safe_push (use_stmt);
1107 8961 : continue;
1108 : }
1109 : }
1110 :
1111 : /* Handle common cases where we can easily build an ao_ref
1112 : structure for USE_STMT and in doing so we find that the
1113 : references hit non-live bytes and thus can be ignored.
1114 :
1115 : TODO: We can also use modref summary to handle calls. */
1116 27091485 : if (byte_tracking_enabled
1117 27091485 : && is_gimple_assign (use_stmt))
1118 : {
1119 4924545 : ao_ref use_ref;
1120 4924545 : ao_ref_init (&use_ref, gimple_assign_rhs1 (use_stmt));
1121 4924545 : if (valid_ao_ref_for_dse (&use_ref)
1122 4904330 : && operand_equal_p (use_ref.base, ref->base,
1123 : OEP_ADDRESS_OF)
1124 8485265 : && !live_bytes_read (&use_ref, ref, live_bytes))
1125 : {
1126 : /* If this is a store, remember it as we possibly
1127 : need to walk the defs uses. */
1128 4056 : if (gimple_vdef (use_stmt))
1129 329 : defs.safe_push (use_stmt);
1130 2028 : continue;
1131 : }
1132 : }
1133 :
1134 : fail = true;
1135 : break;
1136 : }
1137 : /* We have visited ourselves already so ignore STMT for the
1138 : purpose of chaining. */
1139 287338447 : else if (use_stmt == stmt)
1140 : ;
1141 : /* If this is a store, remember it as we possibly need to walk the
1142 : defs uses. */
1143 899571423 : else if (gimple_vdef (use_stmt))
1144 247635200 : defs.safe_push (use_stmt);
1145 1476918 : }
1146 : }
1147 569245794 : while (!fail && !worklist.is_empty ());
1148 :
1149 280727518 : if (fail)
1150 : {
1151 : /* STMT might be partially dead and we may be able to reduce
1152 : how many memory locations it stores into. */
1153 27357554 : if (byte_tracking_enabled && !gimple_clobber_p (stmt))
1154 24631047 : return DSE_STORE_MAYBE_PARTIAL_DEAD;
1155 : return DSE_STORE_LIVE;
1156 : }
1157 :
1158 : /* If we didn't find any definition this means the store is dead
1159 : if it isn't a store to global reachable memory. In this case
1160 : just pretend the stmt makes itself dead. Otherwise fail. */
1161 253369964 : if (defs.is_empty ())
1162 : {
1163 2481216 : if (ref_may_alias_global_p (ref, false))
1164 : {
1165 39475 : basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (defvar));
1166 : /* Assume that BUILT_IN_UNREACHABLE and BUILT_IN_UNREACHABLE_TRAP
1167 : do not need to keep (global) memory side-effects live.
1168 : We do not have virtual operands on BUILT_IN_UNREACHABLE
1169 : but we can do poor mans reachability when the last
1170 : definition we want to elide is in the block that ends
1171 : in such a call. */
1172 39475 : if (EDGE_COUNT (def_bb->succs) == 0)
1173 54014 : if (gcall *last = dyn_cast <gcall *> (*gsi_last_bb (def_bb)))
1174 633 : if (gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE)
1175 633 : || gimple_call_builtin_p (last,
1176 : BUILT_IN_UNREACHABLE_TRAP))
1177 : {
1178 431 : if (by_clobber_p)
1179 431 : *by_clobber_p = false;
1180 431 : return DSE_STORE_DEAD;
1181 : }
1182 39044 : return DSE_STORE_LIVE;
1183 : }
1184 :
1185 2441741 : if (by_clobber_p)
1186 2438412 : *by_clobber_p = false;
1187 2441741 : return DSE_STORE_DEAD;
1188 : }
1189 :
1190 : /* Process defs and remove those we need not process further. */
1191 517934408 : for (unsigned i = 0; i < defs.length ();)
1192 : {
1193 267109800 : gimple *def = defs[i];
1194 267109800 : gimple *use_stmt;
1195 267109800 : use_operand_p use_p;
1196 267109800 : tree vdef = (gimple_code (def) == GIMPLE_PHI
1197 289662267 : ? gimple_phi_result (def) : gimple_vdef (def));
1198 267109800 : gphi *phi_def;
1199 : /* If the path to check starts with a kill we do not need to
1200 : process it further.
1201 : ??? With byte tracking we need only kill the bytes currently
1202 : live. */
1203 267109800 : if (stmt_kills_ref_p (def, ref))
1204 : {
1205 2417051 : if (by_clobber_p && !gimple_clobber_p (def))
1206 602187 : *by_clobber_p = false;
1207 2417051 : defs.unordered_remove (i);
1208 : }
1209 : /* If the path ends here we do not need to process it further.
1210 : This for example happens with calls to noreturn functions. */
1211 264692749 : else if (has_zero_uses (vdef))
1212 : {
1213 : /* But if the store is to global memory it is definitely
1214 : not dead. */
1215 2647462 : if (ref_may_alias_global_p (ref, false))
1216 64140 : return DSE_STORE_LIVE;
1217 2583322 : defs.unordered_remove (i);
1218 : }
1219 : /* In addition to kills we can remove defs whose only use
1220 : is another def in defs. That can only ever be PHIs of which
1221 : we track two for simplicity reasons, the first and last in
1222 : {first,last}_phi_def (we fail for multiple PHIs anyways).
1223 : We can also ignore defs that feed only into
1224 : already visited PHIs. */
1225 262045287 : else if (single_imm_use (vdef, &use_p, &use_stmt)
1226 262045287 : && (use_stmt == first_phi_def
1227 227614040 : || use_stmt == last_phi_def
1228 227517926 : || (gimple_code (use_stmt) == GIMPLE_PHI
1229 14306539 : && bitmap_bit_p (visited,
1230 14306539 : SSA_NAME_VERSION
1231 : (PHI_RESULT (use_stmt))))))
1232 : {
1233 5593250 : defs.unordered_remove (i);
1234 5593250 : if (def == first_phi_def)
1235 : first_phi_def = NULL;
1236 4950622 : else if (def == last_phi_def)
1237 237851 : last_phi_def = NULL;
1238 : }
1239 : /* If def is a PHI and one of its arguments is another PHI node still
1240 : in consideration we can defer processing it. */
1241 256452037 : else if ((phi_def = dyn_cast <gphi *> (def))
1242 21702630 : && ((last_phi_def
1243 21702630 : && phi_def != last_phi_def
1244 2177102 : && contains_phi_arg (phi_def,
1245 : gimple_phi_result (last_phi_def)))
1246 21578145 : || (first_phi_def
1247 21578145 : && phi_def != first_phi_def
1248 1937569 : && contains_phi_arg
1249 1937569 : (phi_def, gimple_phi_result (first_phi_def)))))
1250 : {
1251 243805 : defs.unordered_remove (i);
1252 243805 : if (phi_def == first_phi_def)
1253 : first_phi_def = NULL;
1254 171422 : else if (phi_def == last_phi_def)
1255 237851 : last_phi_def = NULL;
1256 : }
1257 : else
1258 256208232 : ++i;
1259 : }
1260 :
1261 : /* If all defs kill the ref we are done. */
1262 291629412 : if (defs.is_empty ())
1263 : return DSE_STORE_DEAD;
1264 : /* If more than one def survives we have to analyze multiple
1265 : paths. We can handle this by recursing, sharing 'visited'
1266 : to avoid redundant work and limiting it by shared 'cnt'.
1267 : For now do not bother with byte-tracking in this case. */
1268 251128614 : while (defs.length () > 1)
1269 : {
1270 9654747 : if (dse_classify_store (ref, defs.last (), false, NULL,
1271 : by_clobber_p, stop_at_vuse, cnt,
1272 : visited) != DSE_STORE_DEAD)
1273 : break;
1274 5862794 : byte_tracking_enabled = false;
1275 5862794 : defs.pop ();
1276 : }
1277 : /* If more than one def survives fail. */
1278 245265820 : if (defs.length () > 1)
1279 : {
1280 : /* STMT might be partially dead and we may be able to reduce
1281 : how many memory locations it stores into. */
1282 3791953 : if (byte_tracking_enabled && !gimple_clobber_p (stmt))
1283 1556407 : return DSE_STORE_MAYBE_PARTIAL_DEAD;
1284 : return DSE_STORE_LIVE;
1285 : }
1286 241473867 : temp = defs[0];
1287 :
1288 : /* Track partial kills. */
1289 241473867 : if (byte_tracking_enabled)
1290 : {
1291 185020234 : clear_bytes_written_by (live_bytes, temp, ref);
1292 185020234 : if (bitmap_empty_p (live_bytes))
1293 : {
1294 54387 : if (by_clobber_p && !gimple_clobber_p (temp))
1295 54226 : *by_clobber_p = false;
1296 54387 : return DSE_STORE_DEAD;
1297 : }
1298 : }
1299 282224284 : }
1300 : /* Continue walking until there are no more live bytes. */
1301 : while (1);
1302 40804804 : }
1303 :
1304 : dse_store_status
1305 31150057 : dse_classify_store (ao_ref *ref, gimple *stmt,
1306 : bool byte_tracking_enabled, sbitmap live_bytes,
1307 : bool *by_clobber_p, tree stop_at_vuse)
1308 : {
1309 31150057 : int cnt = 0;
1310 31150057 : auto_bitmap visited;
1311 31150057 : return dse_classify_store (ref, stmt, byte_tracking_enabled, live_bytes,
1312 31150057 : by_clobber_p, stop_at_vuse, cnt, visited);
1313 31150057 : }
1314 :
1315 :
1316 : /* Delete a dead call at GSI, which is mem* call of some kind. */
1317 : static void
1318 6661 : delete_dead_or_redundant_call (gimple_stmt_iterator *gsi, const char *type)
1319 : {
1320 6661 : gimple *stmt = gsi_stmt (*gsi);
1321 6661 : if (dump_file && (dump_flags & TDF_DETAILS))
1322 : {
1323 18 : fprintf (dump_file, " Deleted %s call: ", type);
1324 18 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1325 18 : fprintf (dump_file, "\n");
1326 : }
1327 :
1328 6661 : basic_block bb = gimple_bb (stmt);
1329 6661 : tree lhs = gimple_call_lhs (stmt);
1330 6661 : if (lhs)
1331 : {
1332 1216 : tree ptr = gimple_call_arg (stmt, 0);
1333 1216 : gimple *new_stmt = gimple_build_assign (lhs, ptr);
1334 1216 : unlink_stmt_vdef (stmt);
1335 1216 : if (gsi_replace (gsi, new_stmt, true))
1336 390 : bitmap_set_bit (need_eh_cleanup, bb->index);
1337 : }
1338 : else
1339 : {
1340 : /* Then we need to fix the operand of the consuming stmt. */
1341 5445 : unlink_stmt_vdef (stmt);
1342 :
1343 : /* Remove the dead store. */
1344 5445 : if (gsi_remove (gsi, true))
1345 3 : bitmap_set_bit (need_eh_cleanup, bb->index);
1346 5445 : release_defs (stmt);
1347 : }
1348 6661 : }
1349 :
1350 : /* Delete a dead store at GSI, which is a gimple assignment. */
1351 :
1352 : void
1353 2048207 : delete_dead_or_redundant_assignment (gimple_stmt_iterator *gsi,
1354 : const char *type,
1355 : bitmap need_eh_cleanup,
1356 : bitmap need_ab_cleanup)
1357 : {
1358 2048207 : gimple *stmt = gsi_stmt (*gsi);
1359 2048207 : if (dump_file && (dump_flags & TDF_DETAILS))
1360 : {
1361 111 : fprintf (dump_file, " Deleted %s store: ", type);
1362 111 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1363 111 : fprintf (dump_file, "\n");
1364 : }
1365 :
1366 : /* Then we need to fix the operand of the consuming stmt. */
1367 2048207 : unlink_stmt_vdef (stmt);
1368 :
1369 : /* Remove the dead store. */
1370 2048207 : basic_block bb = gimple_bb (stmt);
1371 2048207 : if (need_ab_cleanup && stmt_can_make_abnormal_goto (stmt))
1372 4 : bitmap_set_bit (need_ab_cleanup, bb->index);
1373 2048207 : if (gsi_remove (gsi, true) && need_eh_cleanup)
1374 91 : bitmap_set_bit (need_eh_cleanup, bb->index);
1375 :
1376 : /* And release any SSA_NAMEs set in this statement back to the
1377 : SSA_NAME manager. */
1378 2048207 : release_defs (stmt);
1379 2048207 : }
1380 :
1381 : /* Try to prove, using modref summary, that all memory written to by a call is
1382 : dead and remove it. Assume that if return value is written to memory
1383 : it is already proved to be dead. */
1384 :
1385 : static bool
1386 17220175 : dse_optimize_call (gimple_stmt_iterator *gsi, sbitmap live_bytes)
1387 : {
1388 34250550 : gcall *stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1389 :
1390 17031765 : if (!stmt)
1391 : return false;
1392 :
1393 17031765 : tree callee = gimple_call_fndecl (stmt);
1394 :
1395 17031765 : if (!callee)
1396 : return false;
1397 :
1398 : /* Pure/const functions are optimized by normal DCE
1399 : or handled as store above. */
1400 16314738 : int flags = gimple_call_flags (stmt);
1401 16314738 : if ((flags & (ECF_PURE|ECF_CONST|ECF_NOVOPS))
1402 99 : && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
1403 : return false;
1404 :
1405 16314736 : cgraph_node *node = cgraph_node::get (callee);
1406 16314736 : if (!node)
1407 : return false;
1408 :
1409 16304900 : if ((stmt_could_throw_p (cfun, stmt)
1410 7147811 : && !cfun->can_delete_dead_exceptions)
1411 18712399 : || ((gimple_call_flags (stmt) & ECF_NORETURN)
1412 2096976 : && gimple_call_ctrl_altering_p (stmt)))
1413 6837281 : return false;
1414 :
1415 : /* If return value is used the call is not dead. */
1416 9467619 : tree lhs = gimple_call_lhs (stmt);
1417 9467619 : if (lhs && TREE_CODE (lhs) == SSA_NAME)
1418 : {
1419 2375128 : imm_use_iterator ui;
1420 2375128 : gimple *use_stmt;
1421 4937673 : FOR_EACH_IMM_USE_STMT (use_stmt, ui, lhs)
1422 2516331 : if (!is_gimple_debug (use_stmt))
1423 2375128 : return false;
1424 : }
1425 :
1426 : /* Verify that there are no side-effects except for return value
1427 : and memory writes tracked by modref. */
1428 7138705 : modref_summary *summary = get_modref_function_summary (node);
1429 7138705 : if (!summary || !summary->try_dse)
1430 : return false;
1431 :
1432 69800 : bool by_clobber_p = false;
1433 :
1434 : /* Walk all memory writes and verify that they are dead. */
1435 211168 : for (auto base_node : summary->stores->bases)
1436 214438 : for (auto ref_node : base_node->refs)
1437 220599 : for (auto access_node : ref_node->accesses)
1438 : {
1439 72517 : tree arg = access_node.get_call_arg (stmt);
1440 :
1441 72517 : if (!arg || !POINTER_TYPE_P (TREE_TYPE (arg)))
1442 68410 : return false;
1443 :
1444 72516 : if (integer_zerop (arg)
1445 72528 : && !targetm.addr_space.zero_address_valid
1446 12 : (TYPE_ADDR_SPACE (TREE_TYPE (arg))))
1447 12 : continue;
1448 :
1449 72504 : ao_ref ref;
1450 :
1451 72504 : if (!access_node.get_ao_ref (stmt, &ref))
1452 : return false;
1453 72504 : ref.ref_alias_set = ref_node->ref;
1454 72504 : ref.base_alias_set = base_node->base;
1455 :
1456 72504 : bool byte_tracking_enabled
1457 72504 : = setup_live_bytes_from_ref (&ref, live_bytes);
1458 72504 : enum dse_store_status store_status;
1459 :
1460 72504 : store_status = dse_classify_store (&ref, stmt,
1461 : byte_tracking_enabled,
1462 : live_bytes, &by_clobber_p);
1463 72504 : if (store_status != DSE_STORE_DEAD)
1464 : return false;
1465 : }
1466 1390 : delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup,
1467 : need_ab_cleanup);
1468 1390 : return true;
1469 : }
1470 :
1471 : /* Attempt to eliminate dead stores in the statement referenced by BSI.
1472 :
1473 : A dead store is a store into a memory location which will later be
1474 : overwritten by another store without any intervening loads. In this
1475 : case the earlier store can be deleted.
1476 :
1477 : In our SSA + virtual operand world we use immediate uses of virtual
1478 : operands to detect dead stores. If a store's virtual definition
1479 : is used precisely once by a later store to the same location which
1480 : post dominates the first store, then the first store is dead. */
1481 :
1482 : static void
1483 54798225 : dse_optimize_stmt (function *fun, gimple_stmt_iterator *gsi, sbitmap live_bytes)
1484 : {
1485 54798225 : gimple *stmt = gsi_stmt (*gsi);
1486 :
1487 : /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
1488 54798225 : if (gimple_has_volatile_ops (stmt)
1489 54798225 : && (!gimple_clobber_p (stmt)
1490 6333023 : || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
1491 52676269 : return;
1492 :
1493 48241971 : ao_ref ref;
1494 : /* If this is not a store we can still remove dead call using
1495 : modref summary. Note we specifically allow ref to be initialized
1496 : to a conservative may-def since we are looking for followup stores
1497 : to kill all of it. */
1498 48241971 : if (!initialize_ao_ref_for_dse (stmt, &ref, true))
1499 : {
1500 17182104 : dse_optimize_call (gsi, live_bytes);
1501 17182104 : return;
1502 : }
1503 :
1504 : /* We know we have virtual definitions. We can handle assignments and
1505 : some builtin calls. */
1506 31059867 : if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
1507 31059867 : && !gimple_call_ctrl_altering_p (stmt))
1508 : {
1509 460017 : tree fndecl = gimple_call_fndecl (stmt);
1510 460017 : switch (DECL_FUNCTION_CODE (fndecl))
1511 : {
1512 458327 : case BUILT_IN_MEMCPY:
1513 458327 : case BUILT_IN_MEMMOVE:
1514 458327 : case BUILT_IN_STRNCPY:
1515 458327 : case BUILT_IN_MEMSET:
1516 458327 : case BUILT_IN_MEMCPY_CHK:
1517 458327 : case BUILT_IN_MEMMOVE_CHK:
1518 458327 : case BUILT_IN_STRNCPY_CHK:
1519 458327 : case BUILT_IN_MEMSET_CHK:
1520 458327 : {
1521 : /* Occasionally calls with an explicit length of zero
1522 : show up in the IL. It's pointless to do analysis
1523 : on them, they're trivially dead. */
1524 458327 : tree size = gimple_call_arg (stmt, 2);
1525 458327 : if (integer_zerop (size))
1526 : {
1527 50 : delete_dead_or_redundant_call (gsi, "dead");
1528 50 : return;
1529 : }
1530 :
1531 : /* If this is a memset call that initializes an object
1532 : to zero, it may be redundant with an earlier memset
1533 : or empty CONSTRUCTOR of a larger object. */
1534 458277 : if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1535 361990 : || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
1536 458580 : && integer_zerop (gimple_call_arg (stmt, 1)))
1537 56448 : dse_optimize_redundant_stores (stmt);
1538 :
1539 458277 : enum dse_store_status store_status;
1540 458277 : bool byte_tracking_enabled
1541 458277 : = setup_live_bytes_from_ref (&ref, live_bytes);
1542 458277 : store_status = dse_classify_store (&ref, stmt,
1543 : byte_tracking_enabled,
1544 : live_bytes);
1545 458277 : if (store_status == DSE_STORE_LIVE)
1546 : return;
1547 :
1548 151746 : if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1549 : {
1550 145177 : maybe_trim_memstar_call (&ref, live_bytes, stmt);
1551 145177 : return;
1552 : }
1553 :
1554 6569 : if (store_status == DSE_STORE_DEAD)
1555 6569 : delete_dead_or_redundant_call (gsi, "dead");
1556 6569 : return;
1557 : }
1558 :
1559 1690 : case BUILT_IN_CALLOC:
1560 : /* We already know the arguments are integer constants. */
1561 1690 : dse_optimize_redundant_stores (stmt);
1562 1690 : return;
1563 :
1564 : default:
1565 : return;
1566 : }
1567 : }
1568 30599850 : else if (is_gimple_call (stmt)
1569 30599850 : && gimple_call_internal_p (stmt))
1570 : {
1571 113915 : switch (gimple_call_internal_fn (stmt))
1572 : {
1573 1299 : case IFN_LEN_STORE:
1574 1299 : case IFN_MASK_STORE:
1575 1299 : case IFN_MASK_LEN_STORE:
1576 1299 : {
1577 1299 : enum dse_store_status store_status;
1578 1299 : store_status = dse_classify_store (&ref, stmt, false, live_bytes);
1579 1299 : if (store_status == DSE_STORE_DEAD)
1580 0 : delete_dead_or_redundant_call (gsi, "dead");
1581 1299 : return;
1582 : }
1583 : default:;
1584 : }
1585 : }
1586 :
1587 30598551 : bool by_clobber_p = false;
1588 :
1589 : /* Check if this statement stores zero to a memory location,
1590 : and if there is a subsequent store of zero to the same
1591 : memory location. If so, remove the subsequent store. */
1592 30598551 : if (gimple_assign_single_p (stmt)
1593 30598551 : && initializer_zerop (gimple_assign_rhs1 (stmt)))
1594 4159008 : dse_optimize_redundant_stores (stmt);
1595 :
1596 : /* Self-assignments are zombies. */
1597 30598551 : if (is_gimple_assign (stmt)
1598 59805525 : && operand_equal_p (gimple_assign_rhs1 (stmt),
1599 29206974 : gimple_assign_lhs (stmt), 0))
1600 : ;
1601 : else
1602 : {
1603 30597118 : bool byte_tracking_enabled
1604 30597118 : = setup_live_bytes_from_ref (&ref, live_bytes);
1605 30597118 : enum dse_store_status store_status;
1606 30597118 : store_status = dse_classify_store (&ref, stmt,
1607 : byte_tracking_enabled,
1608 : live_bytes, &by_clobber_p);
1609 30597118 : if (store_status == DSE_STORE_LIVE)
1610 : return;
1611 :
1612 28158009 : if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1613 : {
1614 25976463 : maybe_trim_partially_dead_store (&ref, live_bytes, stmt);
1615 25976463 : return;
1616 : }
1617 : }
1618 :
1619 : /* Now we know that use_stmt kills the LHS of stmt. */
1620 :
1621 : /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
1622 : another clobber stmt. */
1623 2182979 : if (gimple_clobber_p (stmt)
1624 2182979 : && !by_clobber_p)
1625 : return;
1626 :
1627 2129663 : if (is_gimple_call (stmt)
1628 2129663 : && (gimple_has_side_effects (stmt)
1629 42262 : || (stmt_could_throw_p (fun, stmt)
1630 5 : && !fun->can_delete_dead_exceptions)))
1631 : {
1632 : /* See if we can remove complete call. */
1633 38071 : if (dse_optimize_call (gsi, live_bytes))
1634 : return;
1635 : /* Make sure we do not remove a return slot we cannot reconstruct
1636 : later. */
1637 38037 : if (gimple_call_return_slot_opt_p (as_a <gcall *>(stmt))
1638 38037 : && (TREE_ADDRESSABLE (TREE_TYPE (gimple_call_fntype (stmt)))
1639 14669 : || !poly_int_tree_p
1640 14669 : (TYPE_SIZE (TREE_TYPE (gimple_call_fntype (stmt))))))
1641 : return;
1642 30364 : if (dump_file && (dump_flags & TDF_DETAILS))
1643 : {
1644 1 : fprintf (dump_file, " Deleted dead store in call LHS: ");
1645 1 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1646 1 : fprintf (dump_file, "\n");
1647 : }
1648 30364 : gimple_call_set_lhs (stmt, NULL_TREE);
1649 30364 : update_stmt (stmt);
1650 : }
1651 2091592 : else if (!stmt_could_throw_p (fun, stmt)
1652 2091592 : || fun->can_delete_dead_exceptions)
1653 2041765 : delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup,
1654 : need_ab_cleanup);
1655 : }
1656 :
1657 : namespace {
1658 :
1659 : const pass_data pass_data_dse =
1660 : {
1661 : GIMPLE_PASS, /* type */
1662 : "dse", /* name */
1663 : OPTGROUP_NONE, /* optinfo_flags */
1664 : TV_TREE_DSE, /* tv_id */
1665 : ( PROP_cfg | PROP_ssa ), /* properties_required */
1666 : 0, /* properties_provided */
1667 : 0, /* properties_destroyed */
1668 : 0, /* todo_flags_start */
1669 : 0, /* todo_flags_finish */
1670 : };
1671 :
1672 : class pass_dse : public gimple_opt_pass
1673 : {
1674 : public:
1675 1440235 : pass_dse (gcc::context *ctxt)
1676 2880470 : : gimple_opt_pass (pass_data_dse, ctxt), use_dr_analysis_p (false)
1677 : {}
1678 :
1679 : /* opt_pass methods: */
1680 1152188 : opt_pass * clone () final override { return new pass_dse (m_ctxt); }
1681 288047 : void set_pass_param (unsigned n, bool param) final override
1682 : {
1683 288047 : gcc_assert (n == 0);
1684 288047 : use_dr_analysis_p = param;
1685 288047 : }
1686 5562347 : bool gate (function *) final override { return flag_tree_dse != 0; }
1687 : unsigned int execute (function *) final override;
1688 :
1689 : private:
1690 : bool use_dr_analysis_p;
1691 : }; // class pass_dse
1692 :
1693 : unsigned int
1694 5537853 : pass_dse::execute (function *fun)
1695 : {
1696 5537853 : unsigned todo = 0;
1697 5537853 : bool released_def = false;
1698 :
1699 5537853 : need_eh_cleanup = BITMAP_ALLOC (NULL);
1700 5537853 : need_ab_cleanup = BITMAP_ALLOC (NULL);
1701 5537853 : auto_sbitmap live_bytes (param_dse_max_object_size);
1702 5537853 : if (flag_expensive_optimizations && use_dr_analysis_p)
1703 957013 : dse_stmt_to_dr_map = new hash_map<gimple *, data_reference_p>;
1704 :
1705 5537853 : renumber_gimple_stmt_uids (fun);
1706 :
1707 5537853 : calculate_dominance_info (CDI_DOMINATORS);
1708 :
1709 : /* Dead store elimination is fundamentally a reverse program order walk. */
1710 5537853 : int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS);
1711 5537853 : auto_bitmap exit_bbs;
1712 5537853 : bitmap_set_bit (exit_bbs, EXIT_BLOCK);
1713 5537853 : edge entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun));
1714 5537853 : int n = rev_post_order_and_mark_dfs_back_seme (fun, entry,
1715 : exit_bbs, false, rpo, NULL);
1716 52288307 : for (int i = n; i != 0; --i)
1717 : {
1718 46750454 : basic_block bb = BASIC_BLOCK_FOR_FN (fun, rpo[i-1]);
1719 46750454 : gimple_stmt_iterator gsi;
1720 :
1721 93500908 : for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
1722 : {
1723 333366885 : gimple *stmt = gsi_stmt (gsi);
1724 :
1725 457418529 : if (gimple_vdef (stmt))
1726 54798225 : dse_optimize_stmt (fun, &gsi, live_bytes);
1727 557137320 : else if (def_operand_p
1728 278568660 : def_p = single_ssa_def_operand (stmt, SSA_OP_DEF))
1729 : {
1730 : /* When we remove dead stores make sure to also delete trivially
1731 : dead SSA defs. */
1732 63178348 : if (has_zero_uses (DEF_FROM_PTR (def_p))
1733 2102948 : && !gimple_has_side_effects (stmt)
1734 2092885 : && !is_ctrl_altering_stmt (stmt)
1735 65269543 : && (!stmt_could_throw_p (fun, stmt)
1736 91471 : || fun->can_delete_dead_exceptions))
1737 : {
1738 1999843 : if (dump_file && (dump_flags & TDF_DETAILS))
1739 : {
1740 11 : fprintf (dump_file, " Deleted trivially dead stmt: ");
1741 11 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1742 11 : fprintf (dump_file, "\n");
1743 : }
1744 1999843 : if (gsi_remove (&gsi, true) && need_eh_cleanup)
1745 2 : bitmap_set_bit (need_eh_cleanup, bb->index);
1746 1999843 : release_defs (stmt);
1747 1999843 : released_def = true;
1748 : }
1749 : }
1750 333366885 : if (gsi_end_p (gsi))
1751 589204 : gsi = gsi_last_bb (bb);
1752 : else
1753 713189622 : gsi_prev (&gsi);
1754 : }
1755 46750454 : bool removed_phi = false;
1756 65538732 : for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);)
1757 : {
1758 18788278 : gphi *phi = si.phi ();
1759 18788278 : if (has_zero_uses (gimple_phi_result (phi)))
1760 : {
1761 208158 : if (dump_file && (dump_flags & TDF_DETAILS))
1762 : {
1763 0 : fprintf (dump_file, " Deleted trivially dead PHI: ");
1764 0 : print_gimple_stmt (dump_file, phi, 0, dump_flags);
1765 0 : fprintf (dump_file, "\n");
1766 : }
1767 208158 : remove_phi_node (&si, true);
1768 208158 : removed_phi = true;
1769 208158 : released_def = true;
1770 : }
1771 : else
1772 18580120 : gsi_next (&si);
1773 : }
1774 46750454 : if (removed_phi && gimple_seq_empty_p (phi_nodes (bb)))
1775 : todo |= TODO_cleanup_cfg;
1776 : }
1777 5537853 : free (rpo);
1778 :
1779 : /* Removal of stores may make some EH edges dead. Purge such edges from
1780 : the CFG as needed. */
1781 5537853 : if (!bitmap_empty_p (need_eh_cleanup))
1782 : {
1783 388 : gimple_purge_all_dead_eh_edges (need_eh_cleanup);
1784 388 : todo |= TODO_cleanup_cfg;
1785 : }
1786 5537853 : if (!bitmap_empty_p (need_ab_cleanup))
1787 : {
1788 4 : gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
1789 4 : todo |= TODO_cleanup_cfg;
1790 : }
1791 :
1792 5537853 : BITMAP_FREE (need_eh_cleanup);
1793 5537853 : BITMAP_FREE (need_ab_cleanup);
1794 :
1795 5537853 : if (released_def)
1796 592242 : free_numbers_of_iterations_estimates (fun);
1797 :
1798 5537853 : if (flag_expensive_optimizations && use_dr_analysis_p)
1799 : {
1800 1678121 : for (auto i = dse_stmt_to_dr_map->begin ();
1801 2399229 : i != dse_stmt_to_dr_map->end (); ++i)
1802 721108 : free_data_ref ((*i).second);
1803 1914026 : delete dse_stmt_to_dr_map;
1804 957013 : dse_stmt_to_dr_map = NULL;
1805 : }
1806 :
1807 5537853 : return todo;
1808 5537853 : }
1809 :
1810 : } // anon namespace
1811 :
1812 : gimple_opt_pass *
1813 288047 : make_pass_dse (gcc::context *ctxt)
1814 : {
1815 288047 : return new pass_dse (ctxt);
1816 : }
|