Branch data Line data Source code
1 : : /* Dead and redundant store elimination
2 : : Copyright (C) 2004-2025 Free Software Foundation, Inc.
3 : :
4 : : This file is part of GCC.
5 : :
6 : : GCC is free software; you can redistribute it and/or modify
7 : : it under the terms of the GNU General Public License as published by
8 : : the Free Software Foundation; either version 3, or (at your option)
9 : : any later version.
10 : :
11 : : GCC is distributed in the hope that it will be useful,
12 : : but WITHOUT ANY WARRANTY; without even the implied warranty of
13 : : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 : : GNU General Public License for more details.
15 : :
16 : : You should have received a copy of the GNU General Public License
17 : : along with GCC; see the file COPYING3. If not see
18 : : <http://www.gnu.org/licenses/>. */
19 : :
20 : : #include "config.h"
21 : : #include "system.h"
22 : : #include "coretypes.h"
23 : : #include "backend.h"
24 : : #include "rtl.h"
25 : : #include "tree.h"
26 : : #include "gimple.h"
27 : : #include "tree-pass.h"
28 : : #include "ssa.h"
29 : : #include "gimple-pretty-print.h"
30 : : #include "fold-const.h"
31 : : #include "gimple-iterator.h"
32 : : #include "tree-cfg.h"
33 : : #include "tree-dfa.h"
34 : : #include "tree-cfgcleanup.h"
35 : : #include "alias.h"
36 : : #include "tree-ssa-loop.h"
37 : : #include "tree-ssa-dse.h"
38 : : #include "builtins.h"
39 : : #include "gimple-fold.h"
40 : : #include "gimplify.h"
41 : : #include "tree-eh.h"
42 : : #include "cfganal.h"
43 : : #include "cgraph.h"
44 : : #include "ipa-modref-tree.h"
45 : : #include "ipa-modref.h"
46 : : #include "target.h"
47 : : #include "tree-ssa-loop-niter.h"
48 : : #include "cfgloop.h"
49 : : #include "tree-data-ref.h"
50 : : #include "internal-fn.h"
51 : : #include "tree-ssa.h"
52 : :
53 : : /* This file implements dead store elimination.
54 : :
55 : : A dead store is a store into a memory location which will later be
56 : : overwritten by another store without any intervening loads. In this
57 : : case the earlier store can be deleted or trimmed if the store
58 : : was partially dead.
59 : :
60 : : A redundant store is a store into a memory location which stores
61 : : the exact same value as a prior store to the same memory location.
62 : : While this can often be handled by dead store elimination, removing
63 : : the redundant store is often better than removing or trimming the
64 : : dead store.
65 : :
66 : : In our SSA + virtual operand world we use immediate uses of virtual
67 : : operands to detect these cases. If a store's virtual definition
68 : : is used precisely once by a later store to the same location which
69 : : post dominates the first store, then the first store is dead. If
70 : : the data stored is the same, then the second store is redundant.
71 : :
72 : : The single use of the store's virtual definition ensures that
73 : : there are no intervening aliased loads and the requirement that
74 : : the second load post dominate the first ensures that if the earlier
75 : : store executes, then the later stores will execute before the function
76 : : exits.
77 : :
78 : : It may help to think of this as first moving the earlier store to
79 : : the point immediately before the later store. Again, the single
80 : : use of the virtual definition and the post-dominance relationship
81 : : ensure that such movement would be safe. Clearly if there are
82 : : back to back stores, then the second is makes the first dead. If
83 : : the second store stores the same value, then the second store is
84 : : redundant.
85 : :
86 : : Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
87 : : may also help in understanding this code since it discusses the
88 : : relationship between dead store and redundant load elimination. In
89 : : fact, they are the same transformation applied to different views of
90 : : the CFG. */
91 : :
92 : : static void delete_dead_or_redundant_call (gimple_stmt_iterator *, const char *);
93 : :
94 : : /* Bitmap of blocks that have had EH statements cleaned. We should
95 : : remove their dead edges eventually. */
96 : : static bitmap need_eh_cleanup;
97 : : static bitmap need_ab_cleanup;
98 : :
99 : : /* STMT is a statement that may write into memory. Analyze it and
100 : : initialize WRITE to describe how STMT affects memory. When
101 : : MAY_DEF_OK is true then the function initializes WRITE to what
102 : : the stmt may define.
103 : :
104 : : Return TRUE if the statement was analyzed, FALSE otherwise.
105 : :
106 : : It is always safe to return FALSE. But typically better optimziation
107 : : can be achieved by analyzing more statements. */
108 : :
109 : : static bool
110 : 214864100 : initialize_ao_ref_for_dse (gimple *stmt, ao_ref *write, bool may_def_ok = false)
111 : : {
112 : : /* It's advantageous to handle certain mem* functions. */
113 : 214864100 : if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
114 : : {
115 : 4680716 : switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
116 : : {
117 : 945242 : case BUILT_IN_MEMCPY:
118 : 945242 : case BUILT_IN_MEMMOVE:
119 : 945242 : case BUILT_IN_MEMSET:
120 : 945242 : case BUILT_IN_MEMCPY_CHK:
121 : 945242 : case BUILT_IN_MEMMOVE_CHK:
122 : 945242 : case BUILT_IN_MEMSET_CHK:
123 : 945242 : case BUILT_IN_STRNCPY:
124 : 945242 : case BUILT_IN_STRNCPY_CHK:
125 : 945242 : {
126 : 945242 : tree size = gimple_call_arg (stmt, 2);
127 : 945242 : tree ptr = gimple_call_arg (stmt, 0);
128 : 945242 : ao_ref_init_from_ptr_and_size (write, ptr, size);
129 : 945242 : return true;
130 : : }
131 : :
132 : : /* A calloc call can never be dead, but it can make
133 : : subsequent stores redundant if they store 0 into
134 : : the same memory locations. */
135 : 2749 : case BUILT_IN_CALLOC:
136 : 2749 : {
137 : 2749 : tree nelem = gimple_call_arg (stmt, 0);
138 : 2749 : tree selem = gimple_call_arg (stmt, 1);
139 : 2749 : tree lhs;
140 : 2749 : if (TREE_CODE (nelem) == INTEGER_CST
141 : 2234 : && TREE_CODE (selem) == INTEGER_CST
142 : 4839 : && (lhs = gimple_call_lhs (stmt)) != NULL_TREE)
143 : : {
144 : 2087 : tree size = fold_build2 (MULT_EXPR, TREE_TYPE (nelem),
145 : : nelem, selem);
146 : 2087 : ao_ref_init_from_ptr_and_size (write, lhs, size);
147 : 2087 : return true;
148 : : }
149 : : }
150 : :
151 : : default:
152 : : break;
153 : : }
154 : : }
155 : 210183384 : else if (is_gimple_call (stmt)
156 : 210183384 : && gimple_call_internal_p (stmt))
157 : : {
158 : 190415 : switch (gimple_call_internal_fn (stmt))
159 : : {
160 : 996 : case IFN_LEN_STORE:
161 : 996 : case IFN_MASK_STORE:
162 : 996 : case IFN_MASK_LEN_STORE:
163 : 996 : {
164 : 996 : internal_fn ifn = gimple_call_internal_fn (stmt);
165 : 996 : int stored_value_index = internal_fn_stored_value_index (ifn);
166 : 996 : int len_index = internal_fn_len_index (ifn);
167 : 996 : if (ifn == IFN_LEN_STORE)
168 : : {
169 : 0 : tree len = gimple_call_arg (stmt, len_index);
170 : 0 : tree bias = gimple_call_arg (stmt, len_index + 1);
171 : 0 : if (tree_fits_uhwi_p (len))
172 : : {
173 : 0 : ao_ref_init_from_ptr_and_size (write,
174 : : gimple_call_arg (stmt, 0),
175 : : int_const_binop (MINUS_EXPR,
176 : : len, bias));
177 : 0 : return true;
178 : : }
179 : : }
180 : : /* We cannot initialize a must-def ao_ref (in all cases) but we
181 : : can provide a may-def variant. */
182 : 996 : if (may_def_ok)
183 : : {
184 : 958 : ao_ref_init_from_ptr_and_size (
185 : : write, gimple_call_arg (stmt, 0),
186 : 958 : TYPE_SIZE_UNIT (
187 : : TREE_TYPE (gimple_call_arg (stmt, stored_value_index))));
188 : 958 : return true;
189 : : }
190 : : break;
191 : : }
192 : : default:;
193 : : }
194 : : }
195 : 213915813 : if (tree lhs = gimple_get_lhs (stmt))
196 : : {
197 : 200689229 : if (TREE_CODE (lhs) != SSA_NAME
198 : 200689229 : && (may_def_ok || !stmt_could_throw_p (cfun, stmt)))
199 : : {
200 : 185406782 : ao_ref_init (write, lhs);
201 : 185406782 : return true;
202 : : }
203 : : }
204 : : return false;
205 : : }
206 : :
207 : : /* Given REF from the alias oracle, return TRUE if it is a valid
208 : : kill memory reference for dead store elimination, false otherwise.
209 : :
210 : : In particular, the reference must have a known base, known maximum
211 : : size, start at a byte offset and have a size that is one or more
212 : : bytes. */
213 : :
214 : : static bool
215 : 155353418 : valid_ao_ref_kill_for_dse (ao_ref *ref)
216 : : {
217 : 155353418 : return (ao_ref_base (ref)
218 : 155353418 : && known_size_p (ref->max_size)
219 : 155114213 : && maybe_ne (ref->size, 0)
220 : 155096819 : && known_eq (ref->max_size, ref->size)
221 : 309905775 : && known_ge (ref->offset, 0));
222 : : }
223 : :
224 : : /* Given REF from the alias oracle, return TRUE if it is a valid
225 : : load or store memory reference for dead store elimination, false otherwise.
226 : :
227 : : Unlike for valid_ao_ref_kill_for_dse we can accept writes where max_size
228 : : is not same as size since we can handle conservatively the larger range. */
229 : :
230 : : static bool
231 : 36016355 : valid_ao_ref_for_dse (ao_ref *ref)
232 : : {
233 : 36016355 : return (ao_ref_base (ref)
234 : 36016355 : && known_size_p (ref->max_size)
235 : 71631341 : && known_ge (ref->offset, 0));
236 : : }
237 : :
238 : : /* Initialize OFFSET and SIZE to a range known to contain REF
239 : : where the boundaries are divisible by BITS_PER_UNIT (bit still in bits).
240 : : Return false if this is impossible. */
241 : :
242 : : static bool
243 : 98831846 : get_byte_aligned_range_containing_ref (ao_ref *ref, poly_int64 *offset,
244 : : HOST_WIDE_INT *size)
245 : : {
246 : 0 : if (!known_size_p (ref->max_size))
247 : : return false;
248 : 98831846 : *offset = aligned_lower_bound (ref->offset, BITS_PER_UNIT);
249 : 98831846 : poly_int64 end = aligned_upper_bound (ref->offset + ref->max_size,
250 : : BITS_PER_UNIT);
251 : 98831846 : return (end - *offset).is_constant (size);
252 : : }
253 : :
254 : : /* Initialize OFFSET and SIZE to a range known to be contained REF
255 : : where the boundaries are divisible by BITS_PER_UNIT (but still in bits).
256 : : Return false if this is impossible. */
257 : :
258 : : static bool
259 : 91523513 : get_byte_aligned_range_contained_in_ref (ao_ref *ref, poly_int64 *offset,
260 : : HOST_WIDE_INT *size)
261 : : {
262 : 91523513 : if (!known_size_p (ref->size)
263 : 91523513 : || !known_eq (ref->size, ref->max_size))
264 : : return false;
265 : 91523513 : *offset = aligned_upper_bound (ref->offset, BITS_PER_UNIT);
266 : 91523513 : poly_int64 end = aligned_lower_bound (ref->offset + ref->max_size,
267 : : BITS_PER_UNIT);
268 : : /* For bit accesses we can get -1 here, but also 0 sized kill is not
269 : : useful. */
270 : 91523513 : if (!known_gt (end, *offset))
271 : : return false;
272 : 91429258 : return (end - *offset).is_constant (size);
273 : : }
274 : :
275 : : /* Compute byte range (returned iN REF_OFFSET and RET_SIZE) for access COPY
276 : : inside REF. If KILL is true, then COPY represent a kill and the byte range
277 : : needs to be fully contained in bit range given by COPY. If KILL is false
278 : : then the byte range returned must contain the range of COPY. */
279 : :
280 : : static bool
281 : 95224807 : get_byte_range (ao_ref *copy, ao_ref *ref, bool kill,
282 : : HOST_WIDE_INT *ret_offset, HOST_WIDE_INT *ret_size)
283 : : {
284 : 95224807 : HOST_WIDE_INT copy_size, ref_size;
285 : 95224807 : poly_int64 copy_offset, ref_offset;
286 : 95224807 : HOST_WIDE_INT diff;
287 : :
288 : : /* First translate from bits to bytes, rounding to bigger or smaller ranges
289 : : as needed. Kills needs to be always rounded to smaller ranges while
290 : : uses and stores to larger ranges. */
291 : 95224807 : if (kill)
292 : : {
293 : 91523513 : if (!get_byte_aligned_range_contained_in_ref (copy, ©_offset,
294 : : ©_size))
295 : : return false;
296 : : }
297 : : else
298 : : {
299 : 3701294 : if (!get_byte_aligned_range_containing_ref (copy, ©_offset,
300 : : ©_size))
301 : : return false;
302 : : }
303 : :
304 : 182288932 : if (!get_byte_aligned_range_containing_ref (ref, &ref_offset, &ref_size)
305 : : || !ordered_p (copy_offset, ref_offset))
306 : : return false;
307 : :
308 : : /* Switch sizes from bits to bytes so we do not need to care about
309 : : overflows. Offset calculation needs to stay in bits until we compute
310 : : the difference and can switch to HOST_WIDE_INT. */
311 : 95130552 : copy_size /= BITS_PER_UNIT;
312 : 95130552 : ref_size /= BITS_PER_UNIT;
313 : :
314 : : /* If COPY starts before REF, then reset the beginning of
315 : : COPY to match REF and decrease the size of COPY by the
316 : : number of bytes removed from COPY. */
317 : 95130552 : if (maybe_lt (copy_offset, ref_offset))
318 : : {
319 : 8818526 : if (!(ref_offset - copy_offset).is_constant (&diff)
320 : 8818526 : || copy_size < diff / BITS_PER_UNIT)
321 : : return false;
322 : 2578260 : copy_size -= diff / BITS_PER_UNIT;
323 : 2578260 : copy_offset = ref_offset;
324 : : }
325 : :
326 : 88890286 : if (!(copy_offset - ref_offset).is_constant (&diff)
327 : 88890286 : || ref_size <= diff / BITS_PER_UNIT)
328 : : return false;
329 : :
330 : : /* If COPY extends beyond REF, chop off its size appropriately. */
331 : 8066427 : HOST_WIDE_INT limit = ref_size - diff / BITS_PER_UNIT;
332 : :
333 : 8066427 : if (copy_size > limit)
334 : 1145645 : copy_size = limit;
335 : 8066427 : *ret_size = copy_size;
336 : 8066427 : if (!(copy_offset - ref_offset).is_constant (ret_offset))
337 : : return false;
338 : 8066427 : *ret_offset /= BITS_PER_UNIT;
339 : 8066427 : return true;
340 : : }
341 : :
342 : : /* Update LIVE_BYTES tracking REF for write to WRITE:
343 : : Verify we have the same base memory address, the write
344 : : has a known size and overlaps with REF. */
345 : : static void
346 : 155353418 : clear_live_bytes_for_ref (sbitmap live_bytes, ao_ref *ref, ao_ref *write)
347 : : {
348 : 155353418 : HOST_WIDE_INT start, size;
349 : :
350 : 155353418 : if (valid_ao_ref_kill_for_dse (write)
351 : 154552117 : && operand_equal_p (write->base, ref->base, OEP_ADDRESS_OF)
352 : 246876931 : && get_byte_range (write, ref, true, &start, &size))
353 : 4365133 : bitmap_clear_range (live_bytes, start, size);
354 : 155353418 : }
355 : :
356 : : /* Clear any bytes written by STMT from the bitmap LIVE_BYTES. The base
357 : : address written by STMT must match the one found in REF, which must
358 : : have its base address previously initialized.
359 : :
360 : : This routine must be conservative. If we don't know the offset or
361 : : actual size written, assume nothing was written. */
362 : :
363 : : static void
364 : 167496827 : clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref)
365 : : {
366 : 167496827 : ao_ref write;
367 : :
368 : 167496827 : if (gcall *call = dyn_cast <gcall *> (stmt))
369 : : {
370 : 4583223 : bool interposed;
371 : 4583223 : modref_summary *summary = get_modref_function_summary (call, &interposed);
372 : :
373 : 4583223 : if (summary && !interposed)
374 : 424169 : for (auto kill : summary->kills)
375 : 51419 : if (kill.get_ao_ref (as_a <gcall *> (stmt), &write))
376 : 51395 : clear_live_bytes_for_ref (live_bytes, ref, &write);
377 : : }
378 : 167496827 : if (!initialize_ao_ref_for_dse (stmt, &write))
379 : 12194804 : return;
380 : :
381 : 155302023 : clear_live_bytes_for_ref (live_bytes, ref, &write);
382 : : }
383 : :
384 : : /* REF is a memory write. Extract relevant information from it and
385 : : initialize the LIVE_BYTES bitmap. If successful, return TRUE.
386 : : Otherwise return FALSE. */
387 : :
388 : : static bool
389 : 29895095 : setup_live_bytes_from_ref (ao_ref *ref, sbitmap live_bytes)
390 : : {
391 : 29895095 : HOST_WIDE_INT const_size;
392 : 29895095 : if (valid_ao_ref_for_dse (ref)
393 : 29515915 : && ((aligned_upper_bound (ref->offset + ref->max_size, BITS_PER_UNIT)
394 : 29515915 : - aligned_lower_bound (ref->offset,
395 : 29515915 : BITS_PER_UNIT)).is_constant (&const_size))
396 : 29515915 : && (const_size / BITS_PER_UNIT <= param_dse_max_object_size)
397 : 59033151 : && const_size > 1)
398 : : {
399 : 29137843 : bitmap_clear (live_bytes);
400 : 29137843 : bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT);
401 : 29137843 : return true;
402 : : }
403 : : return false;
404 : : }
405 : :
406 : : /* Compute the number of stored bytes that we can trim from the head and
407 : : tail of REF. LIVE is the bitmap of stores to REF that are still live.
408 : :
409 : : Store the number of bytes trimmed from the head and tail in TRIM_HEAD
410 : : and TRIM_TAIL respectively.
411 : :
412 : : STMT is the statement being trimmed and is used for debugging dump
413 : : output only. */
414 : :
415 : : static void
416 : 575449 : compute_trims (ao_ref *ref, sbitmap live, int *trim_head, int *trim_tail,
417 : : gimple *stmt)
418 : : {
419 : 575449 : *trim_head = 0;
420 : 575449 : *trim_tail = 0;
421 : :
422 : : /* We use bitmaps biased such that ref->offset is contained in bit zero and
423 : : the bitmap extends through ref->max_size, so we know that in the original
424 : : bitmap bits 0 .. ref->max_size were true. But we need to check that this
425 : : covers the bytes of REF exactly. */
426 : 575449 : const unsigned int align = known_alignment (ref->offset);
427 : 575449 : if ((align > 0 && align < BITS_PER_UNIT)
428 : 575449 : || !known_eq (ref->size, ref->max_size))
429 : 0 : return;
430 : :
431 : : /* Now identify how much, if any of the tail we can chop off. */
432 : 575449 : HOST_WIDE_INT const_size;
433 : 575449 : int last_live = bitmap_last_set_bit (live);
434 : 575449 : if (ref->size.is_constant (&const_size))
435 : : {
436 : 575449 : int last_orig = (const_size / BITS_PER_UNIT) - 1;
437 : : /* We can leave inconvenient amounts on the tail as
438 : : residual handling in mem* and str* functions is usually
439 : : reasonably efficient. */
440 : 575449 : *trim_tail = last_orig - last_live;
441 : :
442 : : /* But don't trim away out of bounds accesses, as this defeats
443 : : proper warnings.
444 : :
445 : : We could have a type with no TYPE_SIZE_UNIT or we could have a VLA
446 : : where TYPE_SIZE_UNIT is not a constant. */
447 : 575449 : if (*trim_tail
448 : 12640 : && TYPE_SIZE_UNIT (TREE_TYPE (ref->base))
449 : 12640 : && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (ref->base))) == INTEGER_CST
450 : 588088 : && compare_tree_int (TYPE_SIZE_UNIT (TREE_TYPE (ref->base)),
451 : : last_orig) <= 0)
452 : 113 : *trim_tail = 0;
453 : : }
454 : :
455 : : /* Identify how much, if any of the head we can chop off. */
456 : 575449 : int first_orig = 0;
457 : 575449 : int first_live = bitmap_first_set_bit (live);
458 : 575449 : *trim_head = first_live - first_orig;
459 : :
460 : : /* If REF is aligned, try to maintain this alignment if it reduces
461 : : the number of (power-of-two sized aligned) writes to memory. */
462 : 575449 : unsigned int align_bits;
463 : 575449 : unsigned HOST_WIDE_INT bitpos;
464 : 490430 : if ((*trim_head || *trim_tail)
465 : 92434 : && last_live - first_live >= 2
466 : 91354 : && ao_ref_alignment (ref, &align_bits, &bitpos)
467 : 75995 : && align_bits >= 32
468 : 75662 : && bitpos == 0
469 : 646820 : && align_bits % BITS_PER_UNIT == 0)
470 : : {
471 : 71371 : unsigned int align_units = align_bits / BITS_PER_UNIT;
472 : 71371 : if (align_units > 16)
473 : 494 : align_units = 16;
474 : 72495 : while ((first_live | (align_units - 1)) > (unsigned int)last_live)
475 : 1124 : align_units >>= 1;
476 : :
477 : 71371 : if (*trim_head)
478 : : {
479 : 65404 : unsigned int pos = first_live & (align_units - 1);
480 : 72131 : for (unsigned int i = 1; i <= align_units; i <<= 1)
481 : : {
482 : 72131 : unsigned int mask = ~(i - 1);
483 : 72131 : unsigned int bytes = align_units - (pos & mask);
484 : 72131 : if (wi::popcount (bytes) <= 1)
485 : : {
486 : 65404 : *trim_head &= mask;
487 : 65404 : break;
488 : : }
489 : : }
490 : : }
491 : :
492 : 71371 : if (*trim_tail)
493 : : {
494 : 7823 : unsigned int pos = last_live & (align_units - 1);
495 : 8031 : for (unsigned int i = 1; i <= align_units; i <<= 1)
496 : : {
497 : 8031 : int mask = i - 1;
498 : 8031 : unsigned int bytes = (pos | mask) + 1;
499 : 8031 : if ((last_live | mask) > (last_live + *trim_tail))
500 : : break;
501 : 8031 : if (wi::popcount (bytes) <= 1)
502 : : {
503 : 7823 : unsigned int extra = (last_live | mask) - last_live;
504 : 7823 : *trim_tail -= extra;
505 : 7823 : break;
506 : : }
507 : : }
508 : : }
509 : : }
510 : :
511 : 575449 : if ((*trim_head || *trim_tail) && dump_file && (dump_flags & TDF_DETAILS))
512 : : {
513 : 15 : fprintf (dump_file, " Trimming statement (head = %d, tail = %d): ",
514 : : *trim_head, *trim_tail);
515 : 15 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
516 : 15 : fprintf (dump_file, "\n");
517 : : }
518 : : }
519 : :
520 : : /* STMT initializes an object from COMPLEX_CST where one or more of the bytes
521 : : written may be dead stores. REF is a representation of the memory written.
522 : : LIVE is the bitmap of stores to REF that are still live.
523 : :
524 : : Attempt to rewrite STMT so that only the real or the imaginary part of the
525 : : object is actually stored. */
526 : :
527 : : static void
528 : 5611 : maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt)
529 : : {
530 : 5611 : int trim_head, trim_tail;
531 : 5611 : compute_trims (ref, live, &trim_head, &trim_tail, stmt);
532 : :
533 : : /* The amount of data trimmed from the head or tail must be at
534 : : least half the size of the object to ensure we're trimming
535 : : the entire real or imaginary half. By writing things this
536 : : way we avoid more O(n) bitmap operations. */
537 : 5611 : if (known_ge (trim_tail * 2 * BITS_PER_UNIT, ref->size))
538 : : {
539 : : /* TREE_REALPART is live */
540 : 2 : tree x = TREE_REALPART (gimple_assign_rhs1 (stmt));
541 : 2 : tree y = gimple_assign_lhs (stmt);
542 : 2 : y = build1 (REALPART_EXPR, TREE_TYPE (x), y);
543 : 2 : gimple_assign_set_lhs (stmt, y);
544 : 2 : gimple_assign_set_rhs1 (stmt, x);
545 : : }
546 : 5609 : else if (known_ge (trim_head * 2 * BITS_PER_UNIT, ref->size))
547 : : {
548 : : /* TREE_IMAGPART is live */
549 : 3 : tree x = TREE_IMAGPART (gimple_assign_rhs1 (stmt));
550 : 3 : tree y = gimple_assign_lhs (stmt);
551 : 3 : y = build1 (IMAGPART_EXPR, TREE_TYPE (x), y);
552 : 3 : gimple_assign_set_lhs (stmt, y);
553 : 3 : gimple_assign_set_rhs1 (stmt, x);
554 : : }
555 : :
556 : : /* Other cases indicate parts of both the real and imag subobjects
557 : : are live. We do not try to optimize those cases. */
558 : 5611 : }
559 : :
560 : : /* STMT initializes an object using a CONSTRUCTOR where one or more of the
561 : : bytes written are dead stores. REF is a representation of the memory
562 : : written. LIVE is the bitmap of stores to REF that are still live.
563 : :
564 : : Attempt to rewrite STMT so that it writes fewer memory locations.
565 : :
566 : : The most common case for getting here is a CONSTRUCTOR with no elements
567 : : being used to zero initialize an object. We do not try to handle other
568 : : cases as those would force us to fully cover the object with the
569 : : CONSTRUCTOR node except for the components that are dead. */
570 : :
571 : : static void
572 : 447504 : maybe_trim_constructor_store (ao_ref *ref, sbitmap live, gimple *stmt)
573 : : {
574 : 447504 : tree ctor = gimple_assign_rhs1 (stmt);
575 : :
576 : : /* This is the only case we currently handle. It actually seems to
577 : : catch most cases of actual interest. */
578 : 447504 : gcc_assert (CONSTRUCTOR_NELTS (ctor) == 0);
579 : :
580 : 447504 : int head_trim = 0;
581 : 447504 : int tail_trim = 0;
582 : 447504 : compute_trims (ref, live, &head_trim, &tail_trim, stmt);
583 : :
584 : : /* Now we want to replace the constructor initializer
585 : : with memset (object + head_trim, 0, size - head_trim - tail_trim). */
586 : 447504 : if (head_trim || tail_trim)
587 : : {
588 : : /* We want &lhs for the MEM_REF expression. */
589 : 86955 : tree lhs_addr = build_fold_addr_expr (gimple_assign_lhs (stmt));
590 : :
591 : 86955 : if (! is_gimple_min_invariant (lhs_addr))
592 : 15710 : return;
593 : :
594 : : /* The number of bytes for the new constructor. */
595 : 71245 : poly_int64 ref_bytes = exact_div (ref->size, BITS_PER_UNIT);
596 : 71245 : poly_int64 count = ref_bytes - head_trim - tail_trim;
597 : :
598 : : /* And the new type for the CONSTRUCTOR. Essentially it's just
599 : : a char array large enough to cover the non-trimmed parts of
600 : : the original CONSTRUCTOR. Note we want explicit bounds here
601 : : so that we know how many bytes to clear when expanding the
602 : : CONSTRUCTOR. */
603 : 71245 : tree type = build_array_type_nelts (char_type_node, count);
604 : :
605 : : /* Build a suitable alias type rather than using alias set zero
606 : : to avoid pessimizing. */
607 : 71245 : tree alias_type = reference_alias_ptr_type (gimple_assign_lhs (stmt));
608 : :
609 : : /* Build a MEM_REF representing the whole accessed area, starting
610 : : at the first byte not trimmed. */
611 : 71245 : tree exp = fold_build2 (MEM_REF, type, lhs_addr,
612 : : build_int_cst (alias_type, head_trim));
613 : :
614 : : /* Now update STMT with a new RHS and LHS. */
615 : 71245 : gimple_assign_set_lhs (stmt, exp);
616 : 71245 : gimple_assign_set_rhs1 (stmt, build_constructor (type, NULL));
617 : : }
618 : : }
619 : :
620 : : /* STMT is a memcpy, memmove or memset. Decrement the number of bytes
621 : : copied/set by DECREMENT. */
622 : : static void
623 : 1050 : decrement_count (gimple *stmt, int decrement)
624 : : {
625 : 1050 : tree *countp = gimple_call_arg_ptr (stmt, 2);
626 : 1050 : gcc_assert (TREE_CODE (*countp) == INTEGER_CST);
627 : 2100 : *countp = wide_int_to_tree (TREE_TYPE (*countp), (TREE_INT_CST_LOW (*countp)
628 : 1050 : - decrement));
629 : 1050 : }
630 : :
631 : : static void
632 : 659 : increment_start_addr (gimple *stmt, tree *where, int increment)
633 : : {
634 : 659 : if (tree lhs = gimple_call_lhs (stmt))
635 : 6 : if (where == gimple_call_arg_ptr (stmt, 0))
636 : : {
637 : 6 : gassign *newop = gimple_build_assign (lhs, unshare_expr (*where));
638 : 6 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
639 : 6 : gsi_insert_after (&gsi, newop, GSI_SAME_STMT);
640 : 6 : gimple_call_set_lhs (stmt, NULL_TREE);
641 : 6 : update_stmt (stmt);
642 : : }
643 : :
644 : 659 : if (TREE_CODE (*where) == SSA_NAME)
645 : : {
646 : 166 : tree tem = make_ssa_name (TREE_TYPE (*where));
647 : 166 : gassign *newop
648 : 166 : = gimple_build_assign (tem, POINTER_PLUS_EXPR, *where,
649 : 166 : build_int_cst (sizetype, increment));
650 : 166 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
651 : 166 : gsi_insert_before (&gsi, newop, GSI_SAME_STMT);
652 : 166 : *where = tem;
653 : 166 : update_stmt (stmt);
654 : 166 : return;
655 : : }
656 : :
657 : 493 : *where = build_fold_addr_expr (fold_build2 (MEM_REF, char_type_node,
658 : : *where,
659 : : build_int_cst (ptr_type_node,
660 : : increment)));
661 : 493 : STRIP_USELESS_TYPE_CONVERSION (*where);
662 : : }
663 : :
664 : : /* STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
665 : : (ORIG & ~NEW) and need not be stored. Try to rewrite STMT to reduce
666 : : the amount of data it actually writes.
667 : :
668 : : Right now we only support trimming from the head or the tail of the
669 : : memory region. In theory we could split the mem* call, but it's
670 : : likely of marginal value. */
671 : :
672 : : static void
673 : 122334 : maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt)
674 : : {
675 : 122334 : int head_trim, tail_trim;
676 : 122334 : switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
677 : : {
678 : 4008 : case BUILT_IN_STRNCPY:
679 : 4008 : case BUILT_IN_STRNCPY_CHK:
680 : 4008 : compute_trims (ref, live, &head_trim, &tail_trim, stmt);
681 : 4008 : if (head_trim)
682 : : {
683 : : /* Head trimming of strncpy is only possible if we can
684 : : prove all bytes we would trim are non-zero (or we could
685 : : turn the strncpy into memset if there must be zero
686 : : among the head trimmed bytes). If we don't know anything
687 : : about those bytes, the presence or absence of '\0' bytes
688 : : in there will affect whether it acts for the non-trimmed
689 : : bytes as memset or memcpy/strncpy. */
690 : 68 : c_strlen_data lendata = { };
691 : 68 : int orig_head_trim = head_trim;
692 : 68 : tree srcstr = gimple_call_arg (stmt, 1);
693 : 68 : if (!get_range_strlen (srcstr, &lendata, /*eltsize=*/1)
694 : 68 : || !tree_fits_uhwi_p (lendata.minlen))
695 : 8 : head_trim = 0;
696 : 60 : else if (tree_to_uhwi (lendata.minlen) < (unsigned) head_trim)
697 : : {
698 : 54 : head_trim = tree_to_uhwi (lendata.minlen);
699 : 54 : if ((orig_head_trim & (UNITS_PER_WORD - 1)) == 0)
700 : 0 : head_trim &= ~(UNITS_PER_WORD - 1);
701 : : }
702 : 68 : if (orig_head_trim != head_trim
703 : 62 : && dump_file
704 : 76 : && (dump_flags & TDF_DETAILS))
705 : 8 : fprintf (dump_file,
706 : : " Adjusting strncpy trimming to (head = %d,"
707 : : " tail = %d)\n", head_trim, tail_trim);
708 : : }
709 : 4008 : goto do_memcpy;
710 : :
711 : 82309 : case BUILT_IN_MEMCPY:
712 : 82309 : case BUILT_IN_MEMMOVE:
713 : 82309 : case BUILT_IN_MEMCPY_CHK:
714 : 82309 : case BUILT_IN_MEMMOVE_CHK:
715 : 82309 : compute_trims (ref, live, &head_trim, &tail_trim, stmt);
716 : :
717 : 86317 : do_memcpy:
718 : : /* Tail trimming is easy, we can just reduce the count. */
719 : 86317 : if (tail_trim)
720 : 393 : decrement_count (stmt, tail_trim);
721 : :
722 : : /* Head trimming requires adjusting all the arguments. */
723 : 86317 : if (head_trim)
724 : : {
725 : : /* For __*_chk need to adjust also the last argument. */
726 : 108 : if (gimple_call_num_args (stmt) == 4)
727 : : {
728 : 49 : tree size = gimple_call_arg (stmt, 3);
729 : 49 : if (!tree_fits_uhwi_p (size))
730 : : break;
731 : 7 : if (!integer_all_onesp (size))
732 : : {
733 : 7 : unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
734 : 7 : if (sz < (unsigned) head_trim)
735 : : break;
736 : 7 : tree arg = wide_int_to_tree (TREE_TYPE (size),
737 : 7 : sz - head_trim);
738 : 7 : gimple_call_set_arg (stmt, 3, arg);
739 : : }
740 : : }
741 : 66 : tree *dst = gimple_call_arg_ptr (stmt, 0);
742 : 66 : increment_start_addr (stmt, dst, head_trim);
743 : 66 : tree *src = gimple_call_arg_ptr (stmt, 1);
744 : 66 : increment_start_addr (stmt, src, head_trim);
745 : 66 : decrement_count (stmt, head_trim);
746 : : }
747 : : break;
748 : :
749 : 36017 : case BUILT_IN_MEMSET:
750 : 36017 : case BUILT_IN_MEMSET_CHK:
751 : 36017 : compute_trims (ref, live, &head_trim, &tail_trim, stmt);
752 : :
753 : : /* Tail trimming is easy, we can just reduce the count. */
754 : 36017 : if (tail_trim)
755 : 64 : decrement_count (stmt, tail_trim);
756 : :
757 : : /* Head trimming requires adjusting all the arguments. */
758 : 36017 : if (head_trim)
759 : : {
760 : : /* For __*_chk need to adjust also the last argument. */
761 : 527 : if (gimple_call_num_args (stmt) == 4)
762 : : {
763 : 7 : tree size = gimple_call_arg (stmt, 3);
764 : 7 : if (!tree_fits_uhwi_p (size))
765 : : break;
766 : 7 : if (!integer_all_onesp (size))
767 : : {
768 : 7 : unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
769 : 7 : if (sz < (unsigned) head_trim)
770 : : break;
771 : 7 : tree arg = wide_int_to_tree (TREE_TYPE (size),
772 : 7 : sz - head_trim);
773 : 7 : gimple_call_set_arg (stmt, 3, arg);
774 : : }
775 : : }
776 : 527 : tree *dst = gimple_call_arg_ptr (stmt, 0);
777 : 527 : increment_start_addr (stmt, dst, head_trim);
778 : 527 : decrement_count (stmt, head_trim);
779 : : }
780 : : break;
781 : :
782 : : default:
783 : : break;
784 : : }
785 : 122334 : }
786 : :
787 : : /* STMT is a memory write where one or more bytes written are dead stores.
788 : : REF is a representation of the memory written. LIVE is the bitmap of
789 : : stores to REF that are still live.
790 : :
791 : : Attempt to rewrite STMT so that it writes fewer memory locations. Right
792 : : now we only support trimming at the start or end of the memory region.
793 : : It's not clear how much there is to be gained by trimming from the middle
794 : : of the region. */
795 : :
796 : : static void
797 : 24472286 : maybe_trim_partially_dead_store (ao_ref *ref, sbitmap live, gimple *stmt)
798 : : {
799 : 24472286 : if (is_gimple_assign (stmt)
800 : 24472286 : && TREE_CODE (gimple_assign_lhs (stmt)) != TARGET_MEM_REF)
801 : : {
802 : 23228514 : switch (gimple_assign_rhs_code (stmt))
803 : : {
804 : 447504 : case CONSTRUCTOR:
805 : 447504 : maybe_trim_constructor_store (ref, live, stmt);
806 : 447504 : break;
807 : 5611 : case COMPLEX_CST:
808 : 5611 : maybe_trim_complex_store (ref, live, stmt);
809 : 5611 : break;
810 : : default:
811 : : break;
812 : : }
813 : : }
814 : 24472286 : }
815 : :
816 : : /* Return TRUE if USE_REF reads bytes from LIVE where live is
817 : : derived from REF, a write reference.
818 : :
819 : : While this routine may modify USE_REF, it's passed by value, not
820 : : location. So callers do not see those modifications. */
821 : :
822 : : static bool
823 : 3701294 : live_bytes_read (ao_ref *use_ref, ao_ref *ref, sbitmap live)
824 : : {
825 : : /* We have already verified that USE_REF and REF hit the same object.
826 : : Now verify that there's actually an overlap between USE_REF and REF. */
827 : 3701294 : HOST_WIDE_INT start, size;
828 : 3701294 : if (get_byte_range (use_ref, ref, false, &start, &size))
829 : : {
830 : : /* If USE_REF covers all of REF, then it will hit one or more
831 : : live bytes. This avoids useless iteration over the bitmap
832 : : below. */
833 : 3701294 : if (start == 0 && known_eq (size * 8, ref->size))
834 : : return true;
835 : :
836 : : /* Now check if any of the remaining bits in use_ref are set in LIVE. */
837 : 1059843 : return bitmap_bit_in_range_p (live, start, (start + size - 1));
838 : : }
839 : : return true;
840 : : }
841 : :
842 : : /* Callback for dse_classify_store calling for_each_index. Verify that
843 : : indices are invariant in the loop with backedge PHI in basic-block DATA. */
844 : :
845 : : static bool
846 : 2368292 : check_name (tree, tree *idx, void *data)
847 : : {
848 : 2368292 : basic_block phi_bb = (basic_block) data;
849 : 2368292 : if (TREE_CODE (*idx) == SSA_NAME
850 : 1596405 : && !SSA_NAME_IS_DEFAULT_DEF (*idx)
851 : 3854194 : && dominated_by_p (CDI_DOMINATORS, gimple_bb (SSA_NAME_DEF_STMT (*idx)),
852 : : phi_bb))
853 : : return false;
854 : : return true;
855 : : }
856 : :
857 : : /* STMT stores the value 0 into one or more memory locations
858 : : (via memset, empty constructor, calloc call, etc).
859 : :
860 : : See if there is a subsequent store of the value 0 to one
861 : : or more of the same memory location(s). If so, the subsequent
862 : : store is redundant and can be removed.
863 : :
864 : : The subsequent stores could be via memset, empty constructors,
865 : : simple MEM stores, etc. */
866 : :
867 : : static void
868 : 3795845 : dse_optimize_redundant_stores (gimple *stmt)
869 : : {
870 : 3795845 : int cnt = 0;
871 : :
872 : : /* TBAA state of STMT, if it is a call it is effectively alias-set zero. */
873 : 3795845 : alias_set_type earlier_set = 0;
874 : 3795845 : alias_set_type earlier_base_set = 0;
875 : 3795845 : if (is_gimple_assign (stmt))
876 : : {
877 : 3740897 : ao_ref lhs_ref;
878 : 3740897 : ao_ref_init (&lhs_ref, gimple_assign_lhs (stmt));
879 : 3740897 : earlier_set = ao_ref_alias_set (&lhs_ref);
880 : 3740897 : earlier_base_set = ao_ref_base_alias_set (&lhs_ref);
881 : : }
882 : :
883 : : /* We could do something fairly complex and look through PHIs
884 : : like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
885 : : the effort.
886 : :
887 : : Look at all the immediate uses of the VDEF (which are obviously
888 : : dominated by STMT). See if one or more stores 0 into the same
889 : : memory locations a STMT, if so remove the immediate use statements. */
890 : 3795845 : tree defvar = gimple_vdef (stmt);
891 : 3795845 : imm_use_iterator ui;
892 : 3795845 : gimple *use_stmt;
893 : 8511688 : FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
894 : : {
895 : : /* Limit stmt walking. */
896 : 4735320 : if (++cnt > param_dse_max_alias_queries_per_store)
897 : : break;
898 : :
899 : : /* If USE_STMT stores 0 into one or more of the same locations
900 : : as STMT and STMT would kill USE_STMT, then we can just remove
901 : : USE_STMT. */
902 : 4735320 : tree fndecl;
903 : 4735320 : if ((is_gimple_assign (use_stmt)
904 : 3268470 : && gimple_vdef (use_stmt)
905 : 2652140 : && (gimple_assign_single_p (use_stmt)
906 : 2652140 : && initializer_zerop (gimple_assign_rhs1 (use_stmt))))
907 : 6788523 : || (gimple_call_builtin_p (use_stmt, BUILT_IN_NORMAL)
908 : 151937 : && (fndecl = gimple_call_fndecl (use_stmt)) != NULL
909 : 151937 : && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
910 : 131336 : || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
911 : 20692 : && integer_zerop (gimple_call_arg (use_stmt, 1))))
912 : : {
913 : 1233234 : ao_ref write;
914 : :
915 : 1233234 : if (!initialize_ao_ref_for_dse (use_stmt, &write))
916 : : break;
917 : :
918 : 1213757 : if (valid_ao_ref_for_dse (&write)
919 : 1213757 : && stmt_kills_ref_p (stmt, &write))
920 : : {
921 : 5281 : gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
922 : 5281 : if (is_gimple_assign (use_stmt))
923 : : {
924 : 5238 : ao_ref lhs_ref;
925 : 5238 : ao_ref_init (&lhs_ref, gimple_assign_lhs (use_stmt));
926 : 5238 : if ((earlier_set == ao_ref_alias_set (&lhs_ref)
927 : 441 : || alias_set_subset_of (ao_ref_alias_set (&lhs_ref),
928 : : earlier_set))
929 : 5671 : && (earlier_base_set == ao_ref_base_alias_set (&lhs_ref)
930 : 723 : || alias_set_subset_of
931 : 723 : (ao_ref_base_alias_set (&lhs_ref),
932 : : earlier_base_set)))
933 : 5170 : delete_dead_or_redundant_assignment (&gsi, "redundant",
934 : : need_eh_cleanup,
935 : : need_ab_cleanup);
936 : : }
937 : 43 : else if (is_gimple_call (use_stmt))
938 : : {
939 : 43 : if ((earlier_set == 0
940 : 1 : || alias_set_subset_of (0, earlier_set))
941 : 43 : && (earlier_base_set == 0
942 : 0 : || alias_set_subset_of (0, earlier_base_set)))
943 : 42 : delete_dead_or_redundant_call (&gsi, "redundant");
944 : : }
945 : : else
946 : 0 : gcc_unreachable ();
947 : : }
948 : : }
949 : 3795845 : }
950 : 3795845 : }
951 : :
952 : : /* Return whether PHI contains ARG as an argument. */
953 : :
954 : : static bool
955 : 3143807 : contains_phi_arg (gphi *phi, tree arg)
956 : : {
957 : 23188804 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
958 : 20228750 : if (gimple_phi_arg_def (phi, i) == arg)
959 : : return true;
960 : : return false;
961 : : }
962 : :
963 : : /* Hash map of the memory use in a GIMPLE assignment to its
964 : : data reference. If NULL data-ref analysis isn't used. */
965 : : static hash_map<gimple *, data_reference_p> *dse_stmt_to_dr_map;
966 : :
967 : : /* A helper of dse_optimize_stmt.
968 : : Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
969 : : according to downstream uses and defs. Sets *BY_CLOBBER_P to true
970 : : if only clobber statements influenced the classification result.
971 : : Returns the classification. */
972 : :
973 : : static dse_store_status
974 : 36850355 : dse_classify_store (ao_ref *ref, gimple *stmt,
975 : : bool byte_tracking_enabled, sbitmap live_bytes,
976 : : bool *by_clobber_p, tree stop_at_vuse, int &cnt,
977 : : bitmap visited)
978 : : {
979 : 36850355 : gimple *temp;
980 : 36850355 : std::unique_ptr<data_reference, void(*)(data_reference_p)>
981 : 36850355 : dra (nullptr, free_data_ref);
982 : :
983 : 36850355 : if (by_clobber_p)
984 : 36371189 : *by_clobber_p = true;
985 : :
986 : : /* Find the first dominated statement that clobbers (part of) the
987 : : memory stmt stores to with no intermediate statement that may use
988 : : part of the memory stmt stores. That is, find a store that may
989 : : prove stmt to be a dead store. */
990 : : temp = stmt;
991 : 454568631 : do
992 : : {
993 : 245709493 : gimple *use_stmt;
994 : 245709493 : imm_use_iterator ui;
995 : 245709493 : bool fail = false;
996 : 245709493 : tree defvar;
997 : :
998 : 245709493 : if (gimple_code (temp) == GIMPLE_PHI)
999 : : {
1000 : 15472537 : defvar = PHI_RESULT (temp);
1001 : 15472537 : bitmap_set_bit (visited, SSA_NAME_VERSION (defvar));
1002 : : }
1003 : : else
1004 : 460473912 : defvar = gimple_vdef (temp);
1005 : :
1006 : 245709493 : auto_vec<gimple *, 10> defs;
1007 : 245709493 : gphi *first_phi_def = NULL;
1008 : 245709493 : gphi *last_phi_def = NULL;
1009 : :
1010 : 245709493 : auto_vec<tree, 10> worklist;
1011 : 245709493 : worklist.quick_push (defvar);
1012 : :
1013 : 249106120 : do
1014 : : {
1015 : 249106120 : defvar = worklist.pop ();
1016 : : /* If we're instructed to stop walking at region boundary, do so. */
1017 : 249106120 : if (defvar == stop_at_vuse)
1018 : : return DSE_STORE_LIVE;
1019 : :
1020 : 249087983 : use_operand_p usep;
1021 : 523608863 : FOR_EACH_IMM_USE_FAST (usep, ui, defvar)
1022 : : {
1023 : 301970137 : use_stmt = USE_STMT (usep);
1024 : :
1025 : : /* Limit stmt walking. */
1026 : 301970137 : if (++cnt > param_dse_max_alias_queries_per_store)
1027 : : {
1028 : : fail = true;
1029 : : break;
1030 : : }
1031 : :
1032 : : /* In simple cases we can look through PHI nodes, but we
1033 : : have to be careful with loops and with memory references
1034 : : containing operands that are also operands of PHI nodes.
1035 : : See gcc.c-torture/execute/20051110-*.c. */
1036 : 301760987 : if (gphi *phi = dyn_cast <gphi *> (use_stmt))
1037 : : {
1038 : : /* Look through single-argument PHIs. */
1039 : 30067299 : if (gimple_phi_num_args (phi) == 1)
1040 : 4206525 : worklist.safe_push (gimple_phi_result (phi));
1041 : : else
1042 : : {
1043 : : /* If we visit this PHI by following a backedge then we
1044 : : have to make sure ref->ref only refers to SSA names
1045 : : that are invariant with respect to the loop
1046 : : represented by this PHI node. We handle irreducible
1047 : : regions by relying on backedge marking and identifying
1048 : : the head of the (sub-)region. */
1049 : 25860774 : edge e = gimple_phi_arg_edge
1050 : 25860774 : (phi, PHI_ARG_INDEX_FROM_USE (usep));
1051 : 25860774 : if (e->flags & EDGE_DFS_BACK)
1052 : : {
1053 : 2868529 : basic_block rgn_head
1054 : 2868529 : = nearest_common_dominator (CDI_DOMINATORS,
1055 : : gimple_bb (phi),
1056 : : e->src);
1057 : 2868529 : if (!for_each_index (ref->ref
1058 : : ? &ref->ref : &ref->base,
1059 : : check_name, rgn_head))
1060 : : return DSE_STORE_LIVE;
1061 : : }
1062 : : /* If we already visited this PHI ignore it for further
1063 : : processing. But note we have to check each incoming
1064 : : edge above. */
1065 : 49076448 : if (!bitmap_bit_p (visited,
1066 : 24538224 : SSA_NAME_VERSION (PHI_RESULT (phi))))
1067 : : {
1068 : 19888473 : defs.safe_push (phi);
1069 : 19888473 : if (!first_phi_def)
1070 : 16841335 : first_phi_def = phi;;
1071 : : last_phi_def = phi;
1072 : : }
1073 : : }
1074 : : }
1075 : : /* If the statement is a use the store is not dead. */
1076 : 271693688 : else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
1077 : : {
1078 : 25931068 : if (dse_stmt_to_dr_map
1079 : 5707197 : && ref->ref
1080 : 31556582 : && is_gimple_assign (use_stmt))
1081 : : {
1082 : 1184542 : if (!dra)
1083 : 1180253 : dra.reset (create_data_ref (NULL, NULL, ref->ref, stmt,
1084 : : false, false));
1085 : 1184542 : bool existed_p;
1086 : 1184542 : data_reference_p &drb
1087 : 1184542 : = dse_stmt_to_dr_map->get_or_insert (use_stmt,
1088 : : &existed_p);
1089 : 1184542 : if (!existed_p)
1090 : 734991 : drb = create_data_ref (NULL, NULL,
1091 : : gimple_assign_rhs1 (use_stmt),
1092 : : use_stmt, false, false);
1093 : 1184542 : if (!dr_may_alias_p (dra.get (), drb, NULL))
1094 : : {
1095 : 15706 : if (gimple_vdef (use_stmt))
1096 : 19 : defs.safe_push (use_stmt);
1097 : 7853 : continue;
1098 : : }
1099 : : }
1100 : :
1101 : : /* Handle common cases where we can easily build an ao_ref
1102 : : structure for USE_STMT and in doing so we find that the
1103 : : references hit non-live bytes and thus can be ignored.
1104 : :
1105 : : TODO: We can also use modref summary to handle calls. */
1106 : 25923215 : if (byte_tracking_enabled
1107 : 25923215 : && is_gimple_assign (use_stmt))
1108 : : {
1109 : 4907503 : ao_ref use_ref;
1110 : 4907503 : ao_ref_init (&use_ref, gimple_assign_rhs1 (use_stmt));
1111 : 4907503 : if (valid_ao_ref_for_dse (&use_ref)
1112 : 4890166 : && operand_equal_p (use_ref.base, ref->base,
1113 : : OEP_ADDRESS_OF)
1114 : 8608797 : && !live_bytes_read (&use_ref, ref, live_bytes))
1115 : : {
1116 : : /* If this is a store, remember it as we possibly
1117 : : need to walk the defs uses. */
1118 : 11316 : if (gimple_vdef (use_stmt))
1119 : 854 : defs.safe_push (use_stmt);
1120 : 5658 : continue;
1121 : : }
1122 : : }
1123 : :
1124 : : fail = true;
1125 : : break;
1126 : : }
1127 : : /* We have visited ourselves already so ignore STMT for the
1128 : : purpose of chaining. */
1129 : 245762620 : else if (use_stmt == stmt)
1130 : : ;
1131 : : /* If this is a store, remember it as we possibly need to walk the
1132 : : defs uses. */
1133 : 765914326 : else if (gimple_vdef (use_stmt))
1134 : 213578654 : defs.safe_push (use_stmt);
1135 : : }
1136 : : }
1137 : 469404159 : while (!fail && !worklist.is_empty ());
1138 : :
1139 : 244368806 : if (fail)
1140 : : {
1141 : : /* STMT might be partially dead and we may be able to reduce
1142 : : how many memory locations it stores into. */
1143 : 26126707 : if (byte_tracking_enabled && !gimple_clobber_p (stmt))
1144 : 23363244 : return DSE_STORE_MAYBE_PARTIAL_DEAD;
1145 : : return DSE_STORE_LIVE;
1146 : : }
1147 : :
1148 : : /* If we didn't find any definition this means the store is dead
1149 : : if it isn't a store to global reachable memory. In this case
1150 : : just pretend the stmt makes itself dead. Otherwise fail. */
1151 : 218242099 : if (defs.is_empty ())
1152 : : {
1153 : 1484513 : if (ref_may_alias_global_p (ref, false))
1154 : : {
1155 : 35793 : basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (defvar));
1156 : : /* Assume that BUILT_IN_UNREACHABLE and BUILT_IN_UNREACHABLE_TRAP
1157 : : do not need to keep (global) memory side-effects live.
1158 : : We do not have virtual operands on BUILT_IN_UNREACHABLE
1159 : : but we can do poor mans reachability when the last
1160 : : definition we want to elide is in the block that ends
1161 : : in such a call. */
1162 : 35793 : if (EDGE_COUNT (def_bb->succs) == 0)
1163 : 46620 : if (gcall *last = dyn_cast <gcall *> (*gsi_last_bb (def_bb)))
1164 : 567 : if (gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE)
1165 : 567 : || gimple_call_builtin_p (last,
1166 : : BUILT_IN_UNREACHABLE_TRAP))
1167 : : {
1168 : 412 : if (by_clobber_p)
1169 : 412 : *by_clobber_p = false;
1170 : 412 : return DSE_STORE_DEAD;
1171 : : }
1172 : 35381 : return DSE_STORE_LIVE;
1173 : : }
1174 : :
1175 : 1448720 : if (by_clobber_p)
1176 : 1445139 : *by_clobber_p = false;
1177 : 1448720 : return DSE_STORE_DEAD;
1178 : : }
1179 : :
1180 : : /* Process defs and remove those we need not process further. */
1181 : 445647559 : for (unsigned i = 0; i < defs.length ();)
1182 : : {
1183 : 228946795 : gimple *def = defs[i];
1184 : 228946795 : gimple *use_stmt;
1185 : 228946795 : use_operand_p use_p;
1186 : 228946795 : tree vdef = (gimple_code (def) == GIMPLE_PHI
1187 : 247058835 : ? gimple_phi_result (def) : gimple_vdef (def));
1188 : 228946795 : gphi *phi_def;
1189 : : /* If the path to check starts with a kill we do not need to
1190 : : process it further.
1191 : : ??? With byte tracking we need only kill the bytes currently
1192 : : live. */
1193 : 228946795 : if (stmt_kills_ref_p (def, ref))
1194 : : {
1195 : 2446515 : if (by_clobber_p && !gimple_clobber_p (def))
1196 : 811422 : *by_clobber_p = false;
1197 : 2446515 : defs.unordered_remove (i);
1198 : : }
1199 : : /* If the path ends here we do not need to process it further.
1200 : : This for example happens with calls to noreturn functions. */
1201 : 226500280 : else if (has_zero_uses (vdef))
1202 : : {
1203 : : /* But if the store is to global memory it is definitely
1204 : : not dead. */
1205 : 2200336 : if (ref_may_alias_global_p (ref, false))
1206 : 56822 : return DSE_STORE_LIVE;
1207 : 2143514 : defs.unordered_remove (i);
1208 : : }
1209 : : /* In addition to kills we can remove defs whose only use
1210 : : is another def in defs. That can only ever be PHIs of which
1211 : : we track two for simplicity reasons, the first and last in
1212 : : {first,last}_phi_def (we fail for multiple PHIs anyways).
1213 : : We can also ignore defs that feed only into
1214 : : already visited PHIs. */
1215 : 224299944 : else if (single_imm_use (vdef, &use_p, &use_stmt)
1216 : 224299944 : && (use_stmt == first_phi_def
1217 : 197282283 : || use_stmt == last_phi_def
1218 : 197195867 : || (gimple_code (use_stmt) == GIMPLE_PHI
1219 : 11547797 : && bitmap_bit_p (visited,
1220 : 11547797 : SSA_NAME_VERSION
1221 : : (PHI_RESULT (use_stmt))))))
1222 : : {
1223 : 3936556 : defs.unordered_remove (i);
1224 : 3936556 : if (def == first_phi_def)
1225 : : first_phi_def = NULL;
1226 : 3599039 : else if (def == last_phi_def)
1227 : 152491 : last_phi_def = NULL;
1228 : : }
1229 : : /* If def is a PHI and one of its arguments is another PHI node still
1230 : : in consideration we can defer processing it. */
1231 : 220363388 : else if ((phi_def = dyn_cast <gphi *> (def))
1232 : 17644478 : && ((last_phi_def
1233 : 17644478 : && phi_def != last_phi_def
1234 : 1660537 : && contains_phi_arg (phi_def,
1235 : : gimple_phi_result (last_phi_def)))
1236 : 17554480 : || (first_phi_def
1237 : 17554480 : && phi_def != first_phi_def
1238 : 1483270 : && contains_phi_arg
1239 : 1483270 : (phi_def, gimple_phi_result (first_phi_def)))))
1240 : : {
1241 : 183753 : defs.unordered_remove (i);
1242 : 183753 : if (phi_def == first_phi_def)
1243 : : first_phi_def = NULL;
1244 : 128123 : else if (phi_def == last_phi_def)
1245 : 152491 : last_phi_def = NULL;
1246 : : }
1247 : : else
1248 : 220179635 : ++i;
1249 : : }
1250 : :
1251 : : /* If all defs kill the ref we are done. */
1252 : 253551119 : if (defs.is_empty ())
1253 : : return DSE_STORE_DEAD;
1254 : : /* If more than one def survives we have to analyze multiple
1255 : : paths. We can handle this by recursing, sharing 'visited'
1256 : : to avoid redundant work and limiting it by shared 'cnt'.
1257 : : For now do not bother with byte-tracking in this case. */
1258 : 216137750 : while (defs.length () > 1)
1259 : : {
1260 : 6935245 : if (dse_classify_store (ref, defs.last (), false, NULL,
1261 : : by_clobber_p, stop_at_vuse, cnt,
1262 : : visited) != DSE_STORE_DEAD)
1263 : : break;
1264 : 3898336 : byte_tracking_enabled = false;
1265 : 3898336 : defs.pop ();
1266 : : }
1267 : : /* If more than one def survives fail. */
1268 : 212239414 : if (defs.length () > 1)
1269 : : {
1270 : : /* STMT might be partially dead and we may be able to reduce
1271 : : how many memory locations it stores into. */
1272 : 3036909 : if (byte_tracking_enabled && !gimple_clobber_p (stmt))
1273 : 1285441 : return DSE_STORE_MAYBE_PARTIAL_DEAD;
1274 : : return DSE_STORE_LIVE;
1275 : : }
1276 : 209202505 : temp = defs[0];
1277 : :
1278 : : /* Track partial kills. */
1279 : 209202505 : if (byte_tracking_enabled)
1280 : : {
1281 : 167496827 : clear_bytes_written_by (live_bytes, temp, ref);
1282 : 167496827 : if (bitmap_empty_p (live_bytes))
1283 : : {
1284 : 343367 : if (by_clobber_p && !gimple_clobber_p (temp))
1285 : 335027 : *by_clobber_p = false;
1286 : 343367 : return DSE_STORE_DEAD;
1287 : : }
1288 : : }
1289 : 245709493 : }
1290 : : /* Continue walking until there are no more live bytes. */
1291 : : while (1);
1292 : 36850355 : }
1293 : :
1294 : : dse_store_status
1295 : 29915110 : dse_classify_store (ao_ref *ref, gimple *stmt,
1296 : : bool byte_tracking_enabled, sbitmap live_bytes,
1297 : : bool *by_clobber_p, tree stop_at_vuse)
1298 : : {
1299 : 29915110 : int cnt = 0;
1300 : 29915110 : auto_bitmap visited;
1301 : 29915110 : return dse_classify_store (ref, stmt, byte_tracking_enabled, live_bytes,
1302 : 29915110 : by_clobber_p, stop_at_vuse, cnt, visited);
1303 : 29915110 : }
1304 : :
1305 : :
1306 : : /* Delete a dead call at GSI, which is mem* call of some kind. */
1307 : : static void
1308 : 6027 : delete_dead_or_redundant_call (gimple_stmt_iterator *gsi, const char *type)
1309 : : {
1310 : 6027 : gimple *stmt = gsi_stmt (*gsi);
1311 : 6027 : if (dump_file && (dump_flags & TDF_DETAILS))
1312 : : {
1313 : 15 : fprintf (dump_file, " Deleted %s call: ", type);
1314 : 15 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1315 : 15 : fprintf (dump_file, "\n");
1316 : : }
1317 : :
1318 : 6027 : basic_block bb = gimple_bb (stmt);
1319 : 6027 : tree lhs = gimple_call_lhs (stmt);
1320 : 6027 : if (lhs)
1321 : : {
1322 : 1149 : tree ptr = gimple_call_arg (stmt, 0);
1323 : 1149 : gimple *new_stmt = gimple_build_assign (lhs, ptr);
1324 : 1149 : unlink_stmt_vdef (stmt);
1325 : 1149 : if (gsi_replace (gsi, new_stmt, true))
1326 : 328 : bitmap_set_bit (need_eh_cleanup, bb->index);
1327 : : }
1328 : : else
1329 : : {
1330 : : /* Then we need to fix the operand of the consuming stmt. */
1331 : 4878 : unlink_stmt_vdef (stmt);
1332 : :
1333 : : /* Remove the dead store. */
1334 : 4878 : if (gsi_remove (gsi, true))
1335 : 0 : bitmap_set_bit (need_eh_cleanup, bb->index);
1336 : 4878 : release_defs (stmt);
1337 : : }
1338 : 6027 : }
1339 : :
1340 : : /* Delete a dead store at GSI, which is a gimple assignment. */
1341 : :
1342 : : void
1343 : 1567388 : delete_dead_or_redundant_assignment (gimple_stmt_iterator *gsi,
1344 : : const char *type,
1345 : : bitmap need_eh_cleanup,
1346 : : bitmap need_ab_cleanup)
1347 : : {
1348 : 1567388 : gimple *stmt = gsi_stmt (*gsi);
1349 : 1567388 : if (dump_file && (dump_flags & TDF_DETAILS))
1350 : : {
1351 : 112 : fprintf (dump_file, " Deleted %s store: ", type);
1352 : 112 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1353 : 112 : fprintf (dump_file, "\n");
1354 : : }
1355 : :
1356 : : /* Then we need to fix the operand of the consuming stmt. */
1357 : 1567388 : unlink_stmt_vdef (stmt);
1358 : :
1359 : : /* Remove the dead store. */
1360 : 1567388 : basic_block bb = gimple_bb (stmt);
1361 : 1567388 : if (need_ab_cleanup && stmt_can_make_abnormal_goto (stmt))
1362 : 4 : bitmap_set_bit (need_ab_cleanup, bb->index);
1363 : 1567388 : if (gsi_remove (gsi, true) && need_eh_cleanup)
1364 : 210 : bitmap_set_bit (need_eh_cleanup, bb->index);
1365 : :
1366 : : /* And release any SSA_NAMEs set in this statement back to the
1367 : : SSA_NAME manager. */
1368 : 1567388 : release_defs (stmt);
1369 : 1567388 : }
1370 : :
1371 : : /* Try to prove, using modref summary, that all memory written to by a call is
1372 : : dead and remove it. Assume that if return value is written to memory
1373 : : it is already proved to be dead. */
1374 : :
1375 : : static bool
1376 : 16325020 : dse_optimize_call (gimple_stmt_iterator *gsi, sbitmap live_bytes)
1377 : : {
1378 : 32469220 : gcall *stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1379 : :
1380 : 16145275 : if (!stmt)
1381 : : return false;
1382 : :
1383 : 16145275 : tree callee = gimple_call_fndecl (stmt);
1384 : :
1385 : 16145275 : if (!callee)
1386 : : return false;
1387 : :
1388 : : /* Pure/const functions are optimized by normal DCE
1389 : : or handled as store above. */
1390 : 15460824 : int flags = gimple_call_flags (stmt);
1391 : 15460824 : if ((flags & (ECF_PURE|ECF_CONST|ECF_NOVOPS))
1392 : 105 : && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
1393 : : return false;
1394 : :
1395 : 15460822 : cgraph_node *node = cgraph_node::get (callee);
1396 : 15460822 : if (!node)
1397 : : return false;
1398 : :
1399 : 15451407 : if ((stmt_could_throw_p (cfun, stmt)
1400 : 6725568 : && !cfun->can_delete_dead_exceptions)
1401 : 17536331 : || ((gimple_call_flags (stmt) & ECF_NORETURN)
1402 : 1932186 : && gimple_call_ctrl_altering_p (stmt)))
1403 : 6572823 : return false;
1404 : :
1405 : : /* If return value is used the call is not dead. */
1406 : 8878584 : tree lhs = gimple_call_lhs (stmt);
1407 : 8878584 : if (lhs && TREE_CODE (lhs) == SSA_NAME)
1408 : : {
1409 : 2195814 : imm_use_iterator ui;
1410 : 2195814 : gimple *use_stmt;
1411 : 2363807 : FOR_EACH_IMM_USE_STMT (use_stmt, ui, lhs)
1412 : 2305734 : if (!is_gimple_debug (use_stmt))
1413 : 2195814 : return false;
1414 : : }
1415 : :
1416 : : /* Verify that there are no side-effects except for return value
1417 : : and memory writes tracked by modref. */
1418 : 6740843 : modref_summary *summary = get_modref_function_summary (node);
1419 : 6740843 : if (!summary || !summary->try_dse)
1420 : : return false;
1421 : :
1422 : 57116 : bool by_clobber_p = false;
1423 : :
1424 : : /* Walk all memory writes and verify that they are dead. */
1425 : 172706 : for (auto base_node : summary->stores->bases)
1426 : 174759 : for (auto ref_node : base_node->refs)
1427 : 177995 : for (auto access_node : ref_node->accesses)
1428 : : {
1429 : 58677 : tree arg = access_node.get_call_arg (stmt);
1430 : :
1431 : 58677 : if (!arg || !POINTER_TYPE_P (TREE_TYPE (arg)))
1432 : 56041 : return false;
1433 : :
1434 : 58676 : if (integer_zerop (arg)
1435 : 58687 : && !targetm.addr_space.zero_address_valid
1436 : 11 : (TYPE_ADDR_SPACE (TREE_TYPE (arg))))
1437 : 11 : continue;
1438 : :
1439 : 58665 : ao_ref ref;
1440 : :
1441 : 58665 : if (!access_node.get_ao_ref (stmt, &ref))
1442 : : return false;
1443 : 58665 : ref.ref_alias_set = ref_node->ref;
1444 : 58665 : ref.base_alias_set = base_node->base;
1445 : :
1446 : 58665 : bool byte_tracking_enabled
1447 : 58665 : = setup_live_bytes_from_ref (&ref, live_bytes);
1448 : 58665 : enum dse_store_status store_status;
1449 : :
1450 : 58665 : store_status = dse_classify_store (&ref, stmt,
1451 : : byte_tracking_enabled,
1452 : : live_bytes, &by_clobber_p);
1453 : 58665 : if (store_status != DSE_STORE_DEAD)
1454 : : return false;
1455 : : }
1456 : 1075 : delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup,
1457 : : need_ab_cleanup);
1458 : 1075 : return true;
1459 : : }
1460 : :
1461 : : /* Attempt to eliminate dead stores in the statement referenced by BSI.
1462 : :
1463 : : A dead store is a store into a memory location which will later be
1464 : : overwritten by another store without any intervening loads. In this
1465 : : case the earlier store can be deleted.
1466 : :
1467 : : In our SSA + virtual operand world we use immediate uses of virtual
1468 : : operands to detect dead stores. If a store's virtual definition
1469 : : is used precisely once by a later store to the same location which
1470 : : post dominates the first store, then the first store is dead. */
1471 : :
1472 : : static void
1473 : 51732593 : dse_optimize_stmt (function *fun, gimple_stmt_iterator *gsi, sbitmap live_bytes)
1474 : : {
1475 : 51732593 : gimple *stmt = gsi_stmt (*gsi);
1476 : :
1477 : : /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
1478 : 51732593 : if (gimple_has_volatile_ops (stmt)
1479 : 51732593 : && (!gimple_clobber_p (stmt)
1480 : 6522131 : || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
1481 : 50098290 : return;
1482 : :
1483 : 46134039 : ao_ref ref;
1484 : : /* If this is not a store we can still remove dead call using
1485 : : modref summary. Note we specifically allow ref to be initialized
1486 : : to a conservative may-def since we are looking for followup stores
1487 : : to kill all of it. */
1488 : 46134039 : if (!initialize_ao_ref_for_dse (stmt, &ref, true))
1489 : : {
1490 : 16294750 : dse_optimize_call (gsi, live_bytes);
1491 : 16294750 : return;
1492 : : }
1493 : :
1494 : : /* We know we have virtual definitions. We can handle assignments and
1495 : : some builtin calls. */
1496 : 29839289 : if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1497 : : {
1498 : 380086 : tree fndecl = gimple_call_fndecl (stmt);
1499 : 380086 : switch (DECL_FUNCTION_CODE (fndecl))
1500 : : {
1501 : 378703 : case BUILT_IN_MEMCPY:
1502 : 378703 : case BUILT_IN_MEMMOVE:
1503 : 378703 : case BUILT_IN_STRNCPY:
1504 : 378703 : case BUILT_IN_MEMSET:
1505 : 378703 : case BUILT_IN_MEMCPY_CHK:
1506 : 378703 : case BUILT_IN_MEMMOVE_CHK:
1507 : 378703 : case BUILT_IN_STRNCPY_CHK:
1508 : 378703 : case BUILT_IN_MEMSET_CHK:
1509 : 378703 : {
1510 : : /* Occasionally calls with an explicit length of zero
1511 : : show up in the IL. It's pointless to do analysis
1512 : : on them, they're trivially dead. */
1513 : 378703 : tree size = gimple_call_arg (stmt, 2);
1514 : 378703 : if (integer_zerop (size))
1515 : : {
1516 : 50 : delete_dead_or_redundant_call (gsi, "dead");
1517 : 50 : return;
1518 : : }
1519 : :
1520 : : /* If this is a memset call that initializes an object
1521 : : to zero, it may be redundant with an earlier memset
1522 : : or empty CONSTRUCTOR of a larger object. */
1523 : 378653 : if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1524 : 288186 : || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
1525 : 378944 : && integer_zerop (gimple_call_arg (stmt, 1)))
1526 : 53565 : dse_optimize_redundant_stores (stmt);
1527 : :
1528 : 378653 : enum dse_store_status store_status;
1529 : 378653 : bool byte_tracking_enabled
1530 : 378653 : = setup_live_bytes_from_ref (&ref, live_bytes);
1531 : 378653 : store_status = dse_classify_store (&ref, stmt,
1532 : : byte_tracking_enabled,
1533 : : live_bytes);
1534 : 378653 : if (store_status == DSE_STORE_LIVE)
1535 : : return;
1536 : :
1537 : 128269 : if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1538 : : {
1539 : 122334 : maybe_trim_memstar_call (&ref, live_bytes, stmt);
1540 : 122334 : return;
1541 : : }
1542 : :
1543 : 5935 : if (store_status == DSE_STORE_DEAD)
1544 : 5935 : delete_dead_or_redundant_call (gsi, "dead");
1545 : 5935 : return;
1546 : : }
1547 : :
1548 : 1383 : case BUILT_IN_CALLOC:
1549 : : /* We already know the arguments are integer constants. */
1550 : 1383 : dse_optimize_redundant_stores (stmt);
1551 : 1383 : return;
1552 : :
1553 : : default:
1554 : : return;
1555 : : }
1556 : : }
1557 : 29459203 : else if (is_gimple_call (stmt)
1558 : 29459203 : && gimple_call_internal_p (stmt))
1559 : : {
1560 : 2159 : switch (gimple_call_internal_fn (stmt))
1561 : : {
1562 : 958 : case IFN_LEN_STORE:
1563 : 958 : case IFN_MASK_STORE:
1564 : 958 : case IFN_MASK_LEN_STORE:
1565 : 958 : {
1566 : 958 : enum dse_store_status store_status;
1567 : 958 : store_status = dse_classify_store (&ref, stmt, false, live_bytes);
1568 : 958 : if (store_status == DSE_STORE_DEAD)
1569 : 0 : delete_dead_or_redundant_call (gsi, "dead");
1570 : 958 : return;
1571 : : }
1572 : : default:;
1573 : : }
1574 : : }
1575 : :
1576 : 29458245 : bool by_clobber_p = false;
1577 : :
1578 : : /* Check if this statement stores zero to a memory location,
1579 : : and if there is a subsequent store of zero to the same
1580 : : memory location. If so, remove the subsequent store. */
1581 : 29458245 : if (gimple_assign_single_p (stmt)
1582 : 29458245 : && initializer_zerop (gimple_assign_rhs1 (stmt)))
1583 : 3740897 : dse_optimize_redundant_stores (stmt);
1584 : :
1585 : : /* Self-assignments are zombies. */
1586 : 29458245 : if (is_gimple_assign (stmt)
1587 : 57600349 : && operand_equal_p (gimple_assign_rhs1 (stmt),
1588 : 28142104 : gimple_assign_lhs (stmt), 0))
1589 : : ;
1590 : : else
1591 : : {
1592 : 29457777 : bool byte_tracking_enabled
1593 : 29457777 : = setup_live_bytes_from_ref (&ref, live_bytes);
1594 : 29457777 : enum dse_store_status store_status;
1595 : 29457777 : store_status = dse_classify_store (&ref, stmt,
1596 : : byte_tracking_enabled,
1597 : : live_bytes, &by_clobber_p);
1598 : 29457777 : if (store_status == DSE_STORE_LIVE)
1599 : : return;
1600 : :
1601 : 26818941 : if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1602 : : {
1603 : 24472286 : maybe_trim_partially_dead_store (&ref, live_bytes, stmt);
1604 : 24472286 : return;
1605 : : }
1606 : : }
1607 : :
1608 : : /* Now we know that use_stmt kills the LHS of stmt. */
1609 : :
1610 : : /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
1611 : : another clobber stmt. */
1612 : 2347123 : if (gimple_clobber_p (stmt)
1613 : 2347123 : && !by_clobber_p)
1614 : : return;
1615 : :
1616 : 1638619 : if (is_gimple_call (stmt)
1617 : 1638619 : && (gimple_has_side_effects (stmt)
1618 : 413 : || (stmt_could_throw_p (fun, stmt)
1619 : 5 : && !fun->can_delete_dead_exceptions)))
1620 : : {
1621 : : /* See if we can remove complete call. */
1622 : 30270 : if (dse_optimize_call (gsi, live_bytes))
1623 : : return;
1624 : : /* Make sure we do not remove a return slot we cannot reconstruct
1625 : : later. */
1626 : 30242 : if (gimple_call_return_slot_opt_p (as_a <gcall *>(stmt))
1627 : 30242 : && (TREE_ADDRESSABLE (TREE_TYPE (gimple_call_fntype (stmt)))
1628 : 14739 : || !poly_int_tree_p
1629 : 14739 : (TYPE_SIZE (TREE_TYPE (gimple_call_fntype (stmt))))))
1630 : : return;
1631 : 25954 : if (dump_file && (dump_flags & TDF_DETAILS))
1632 : : {
1633 : 1 : fprintf (dump_file, " Deleted dead store in call LHS: ");
1634 : 1 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1635 : 1 : fprintf (dump_file, "\n");
1636 : : }
1637 : 25954 : gimple_call_set_lhs (stmt, NULL_TREE);
1638 : 25954 : update_stmt (stmt);
1639 : : }
1640 : 1608349 : else if (!stmt_could_throw_p (fun, stmt)
1641 : 1608349 : || fun->can_delete_dead_exceptions)
1642 : 1560845 : delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup,
1643 : : need_ab_cleanup);
1644 : : }
1645 : :
1646 : : namespace {
1647 : :
1648 : : const pass_data pass_data_dse =
1649 : : {
1650 : : GIMPLE_PASS, /* type */
1651 : : "dse", /* name */
1652 : : OPTGROUP_NONE, /* optinfo_flags */
1653 : : TV_TREE_DSE, /* tv_id */
1654 : : ( PROP_cfg | PROP_ssa ), /* properties_required */
1655 : : 0, /* properties_provided */
1656 : : 0, /* properties_destroyed */
1657 : : 0, /* todo_flags_start */
1658 : : 0, /* todo_flags_finish */
1659 : : };
1660 : :
1661 : : class pass_dse : public gimple_opt_pass
1662 : : {
1663 : : public:
1664 : 1404155 : pass_dse (gcc::context *ctxt)
1665 : 2808310 : : gimple_opt_pass (pass_data_dse, ctxt), use_dr_analysis_p (false)
1666 : : {}
1667 : :
1668 : : /* opt_pass methods: */
1669 : 1123324 : opt_pass * clone () final override { return new pass_dse (m_ctxt); }
1670 : 280831 : void set_pass_param (unsigned n, bool param) final override
1671 : : {
1672 : 280831 : gcc_assert (n == 0);
1673 : 280831 : use_dr_analysis_p = param;
1674 : 280831 : }
1675 : 5280971 : bool gate (function *) final override { return flag_tree_dse != 0; }
1676 : : unsigned int execute (function *) final override;
1677 : :
1678 : : private:
1679 : : bool use_dr_analysis_p;
1680 : : }; // class pass_dse
1681 : :
1682 : : unsigned int
1683 : 5256560 : pass_dse::execute (function *fun)
1684 : : {
1685 : 5256560 : unsigned todo = 0;
1686 : 5256560 : bool released_def = false;
1687 : :
1688 : 5256560 : need_eh_cleanup = BITMAP_ALLOC (NULL);
1689 : 5256560 : need_ab_cleanup = BITMAP_ALLOC (NULL);
1690 : 5256560 : auto_sbitmap live_bytes (param_dse_max_object_size);
1691 : 5256560 : if (flag_expensive_optimizations && use_dr_analysis_p)
1692 : 923378 : dse_stmt_to_dr_map = new hash_map<gimple *, data_reference_p>;
1693 : :
1694 : 5256560 : renumber_gimple_stmt_uids (fun);
1695 : :
1696 : 5256560 : calculate_dominance_info (CDI_DOMINATORS);
1697 : :
1698 : : /* Dead store elimination is fundamentally a reverse program order walk. */
1699 : 5256560 : int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS);
1700 : 5256560 : auto_bitmap exit_bbs;
1701 : 5256560 : bitmap_set_bit (exit_bbs, EXIT_BLOCK);
1702 : 5256560 : edge entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun));
1703 : 5256560 : int n = rev_post_order_and_mark_dfs_back_seme (fun, entry,
1704 : : exit_bbs, false, rpo, NULL);
1705 : 49579638 : for (int i = n; i != 0; --i)
1706 : : {
1707 : 44323078 : basic_block bb = BASIC_BLOCK_FOR_FN (fun, rpo[i-1]);
1708 : 44323078 : gimple_stmt_iterator gsi;
1709 : :
1710 : 88646156 : for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
1711 : : {
1712 : 294163239 : gimple *stmt = gsi_stmt (gsi);
1713 : :
1714 : 411816836 : if (gimple_vdef (stmt))
1715 : 51732593 : dse_optimize_stmt (fun, &gsi, live_bytes);
1716 : 484861292 : else if (def_operand_p
1717 : 242430646 : def_p = single_ssa_def_operand (stmt, SSA_OP_DEF))
1718 : : {
1719 : : /* When we remove dead stores make sure to also delete trivially
1720 : : dead SSA defs. */
1721 : 60188354 : if (has_zero_uses (DEF_FROM_PTR (def_p))
1722 : 1876941 : && !gimple_has_side_effects (stmt)
1723 : 1867674 : && !is_ctrl_altering_stmt (stmt)
1724 : 62053257 : && (!stmt_could_throw_p (fun, stmt)
1725 : 95138 : || fun->can_delete_dead_exceptions))
1726 : : {
1727 : 1769990 : if (dump_file && (dump_flags & TDF_DETAILS))
1728 : : {
1729 : 11 : fprintf (dump_file, " Deleted trivially dead stmt: ");
1730 : 11 : print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1731 : 11 : fprintf (dump_file, "\n");
1732 : : }
1733 : 1769990 : if (gsi_remove (&gsi, true) && need_eh_cleanup)
1734 : 18 : bitmap_set_bit (need_eh_cleanup, bb->index);
1735 : 1769990 : release_defs (stmt);
1736 : 1769990 : released_def = true;
1737 : : }
1738 : : }
1739 : 294163239 : if (gsi_end_p (gsi))
1740 : 524536 : gsi = gsi_last_bb (bb);
1741 : : else
1742 : 632387288 : gsi_prev (&gsi);
1743 : : }
1744 : 44323078 : bool removed_phi = false;
1745 : 62451470 : for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);)
1746 : : {
1747 : 18128392 : gphi *phi = si.phi ();
1748 : 18128392 : if (has_zero_uses (gimple_phi_result (phi)))
1749 : : {
1750 : 243874 : if (dump_file && (dump_flags & TDF_DETAILS))
1751 : : {
1752 : 0 : fprintf (dump_file, " Deleted trivially dead PHI: ");
1753 : 0 : print_gimple_stmt (dump_file, phi, 0, dump_flags);
1754 : 0 : fprintf (dump_file, "\n");
1755 : : }
1756 : 243874 : remove_phi_node (&si, true);
1757 : 243874 : removed_phi = true;
1758 : 243874 : released_def = true;
1759 : : }
1760 : : else
1761 : 17884518 : gsi_next (&si);
1762 : : }
1763 : 44323078 : if (removed_phi && gimple_seq_empty_p (phi_nodes (bb)))
1764 : : todo |= TODO_cleanup_cfg;
1765 : : }
1766 : 5256560 : free (rpo);
1767 : :
1768 : : /* Removal of stores may make some EH edges dead. Purge such edges from
1769 : : the CFG as needed. */
1770 : 5256560 : if (!bitmap_empty_p (need_eh_cleanup))
1771 : : {
1772 : 359 : gimple_purge_all_dead_eh_edges (need_eh_cleanup);
1773 : 359 : todo |= TODO_cleanup_cfg;
1774 : : }
1775 : 5256560 : if (!bitmap_empty_p (need_ab_cleanup))
1776 : : {
1777 : 4 : gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
1778 : 4 : todo |= TODO_cleanup_cfg;
1779 : : }
1780 : :
1781 : 5256560 : BITMAP_FREE (need_eh_cleanup);
1782 : 5256560 : BITMAP_FREE (need_ab_cleanup);
1783 : :
1784 : 5256560 : if (released_def)
1785 : 589749 : free_numbers_of_iterations_estimates (fun);
1786 : :
1787 : 5256560 : if (flag_expensive_optimizations && use_dr_analysis_p)
1788 : : {
1789 : 1658369 : for (auto i = dse_stmt_to_dr_map->begin ();
1790 : 2393360 : i != dse_stmt_to_dr_map->end (); ++i)
1791 : 734991 : free_data_ref ((*i).second);
1792 : 1846756 : delete dse_stmt_to_dr_map;
1793 : 923378 : dse_stmt_to_dr_map = NULL;
1794 : : }
1795 : :
1796 : 5256560 : return todo;
1797 : 5256560 : }
1798 : :
1799 : : } // anon namespace
1800 : :
1801 : : gimple_opt_pass *
1802 : 280831 : make_pass_dse (gcc::context *ctxt)
1803 : : {
1804 : 280831 : return new pass_dse (ctxt);
1805 : : }
|