Branch data Line data Source code
1 : : /* Dead code elimination pass for the GNU compiler.
2 : : Copyright (C) 2002-2025 Free Software Foundation, Inc.
3 : : Contributed by Ben Elliston <bje@redhat.com>
4 : : and Andrew MacLeod <amacleod@redhat.com>
5 : : Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6 : :
7 : : This file is part of GCC.
8 : :
9 : : GCC is free software; you can redistribute it and/or modify it
10 : : under the terms of the GNU General Public License as published by the
11 : : Free Software Foundation; either version 3, or (at your option) any
12 : : later version.
13 : :
14 : : GCC is distributed in the hope that it will be useful, but WITHOUT
15 : : ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 : : for more details.
18 : :
19 : : You should have received a copy of the GNU General Public License
20 : : along with GCC; see the file COPYING3. If not see
21 : : <http://www.gnu.org/licenses/>. */
22 : :
23 : : /* Dead code elimination.
24 : :
25 : : References:
26 : :
27 : : Building an Optimizing Compiler,
28 : : Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29 : :
30 : : Advanced Compiler Design and Implementation,
31 : : Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32 : :
33 : : Dead-code elimination is the removal of statements which have no
34 : : impact on the program's output. "Dead statements" have no impact
35 : : on the program's output, while "necessary statements" may have
36 : : impact on the output.
37 : :
38 : : The algorithm consists of three phases:
39 : : 1. Marking as necessary all statements known to be necessary,
40 : : e.g. most function calls, writing a value to memory, etc;
41 : : 2. Propagating necessary statements, e.g., the statements
42 : : giving values to operands in necessary statements; and
43 : : 3. Removing dead statements. */
44 : :
45 : : #include "config.h"
46 : : #include "system.h"
47 : : #include "coretypes.h"
48 : : #include "backend.h"
49 : : #include "rtl.h"
50 : : #include "tree.h"
51 : : #include "gimple.h"
52 : : #include "cfghooks.h"
53 : : #include "tree-pass.h"
54 : : #include "ssa.h"
55 : : #include "gimple-pretty-print.h"
56 : : #include "fold-const.h"
57 : : #include "calls.h"
58 : : #include "cfganal.h"
59 : : #include "tree-eh.h"
60 : : #include "gimplify.h"
61 : : #include "gimple-iterator.h"
62 : : #include "tree-cfg.h"
63 : : #include "tree-ssa-loop-niter.h"
64 : : #include "tree-into-ssa.h"
65 : : #include "tree-dfa.h"
66 : : #include "cfgloop.h"
67 : : #include "tree-scalar-evolution.h"
68 : : #include "tree-ssa-propagate.h"
69 : : #include "gimple-fold.h"
70 : : #include "tree-ssa.h"
71 : : #include "ipa-modref-tree.h"
72 : : #include "ipa-modref.h"
73 : :
74 : : static struct stmt_stats
75 : : {
76 : : int total;
77 : : int total_phis;
78 : : int removed;
79 : : int removed_phis;
80 : : } stats;
81 : :
82 : : #define STMT_NECESSARY GF_PLF_1
83 : :
84 : : static vec<gimple *> worklist;
85 : :
86 : : /* Vector indicating an SSA name has already been processed and marked
87 : : as necessary. */
88 : : static sbitmap processed;
89 : :
90 : : /* Vector indicating that the last statement of a basic block has already
91 : : been marked as necessary. */
92 : : static sbitmap last_stmt_necessary;
93 : :
94 : : /* Vector indicating that BB contains statements that are live. */
95 : : static sbitmap bb_contains_live_stmts;
96 : :
97 : : /* Before we can determine whether a control branch is dead, we need to
98 : : compute which blocks are control dependent on which edges.
99 : :
100 : : We expect each block to be control dependent on very few edges so we
101 : : use a bitmap for each block recording its edges. An array holds the
102 : : bitmap. The Ith bit in the bitmap is set if that block is dependent
103 : : on the Ith edge. */
104 : : static control_dependences *cd;
105 : :
106 : : /* Vector indicating that a basic block has already had all the edges
107 : : processed that it is control dependent on. */
108 : : static sbitmap visited_control_parents;
109 : :
110 : : /* TRUE if this pass alters the CFG (by removing control statements).
111 : : FALSE otherwise.
112 : :
113 : : If this pass alters the CFG, then it will arrange for the dominators
114 : : to be recomputed. */
115 : : static bool cfg_altered;
116 : :
117 : : /* When non-NULL holds map from basic block index into the postorder. */
118 : : static int *bb_postorder;
119 : :
120 : :
121 : : /* True if we should treat any stmt with a vdef as necessary. */
122 : :
123 : : static inline bool
124 : 254098680 : keep_all_vdefs_p ()
125 : : {
126 : 254098680 : return optimize_debug;
127 : : }
128 : :
129 : : /* 1 if CALLEE is the function __cxa_atexit.
130 : : 2 if CALLEE is the function __aeabi_atexit.
131 : : 0 otherwise. */
132 : :
133 : : static inline int
134 : 81499356 : is_cxa_atexit (const_tree callee)
135 : : {
136 : 81499356 : if (callee != NULL_TREE
137 : 81499356 : && strcmp (IDENTIFIER_POINTER (DECL_NAME (callee)), "__cxa_atexit") == 0)
138 : : return 1;
139 : 81405110 : if (callee != NULL_TREE
140 : 81405110 : && strcmp (IDENTIFIER_POINTER (DECL_NAME (callee)), "__aeabi_atexit") == 0)
141 : 0 : return 2;
142 : : return 0;
143 : : }
144 : :
145 : : /* True if STMT is a call to __cxa_atexit (or __aeabi_atexit)
146 : : and the function argument to that call is a const or pure
147 : : non-looping function decl. */
148 : :
149 : : static inline bool
150 : 81499356 : is_removable_cxa_atexit_call (gimple *stmt)
151 : : {
152 : 81499356 : tree callee = gimple_call_fndecl (stmt);
153 : 81499356 : int functype = is_cxa_atexit (callee);
154 : 81499356 : if (!functype)
155 : : return false;
156 : 94246 : if (gimple_call_num_args (stmt) != 3)
157 : : return false;
158 : :
159 : : /* The function argument is the 1st argument for __cxa_atexit
160 : : or the 2nd argument for __eabi_atexit. */
161 : 188492 : tree arg = gimple_call_arg (stmt, functype == 2 ? 1 : 0);
162 : 94246 : if (TREE_CODE (arg) != ADDR_EXPR)
163 : : return false;
164 : 94246 : arg = TREE_OPERAND (arg, 0);
165 : 94246 : if (TREE_CODE (arg) != FUNCTION_DECL)
166 : : return false;
167 : 94246 : int flags = flags_from_decl_or_type (arg);
168 : :
169 : : /* If the function is noreturn then it cannot be removed. */
170 : 94246 : if (flags & ECF_NORETURN)
171 : : return false;
172 : :
173 : : /* The function needs to be const or pure and non looping. */
174 : 94088 : return (flags & (ECF_CONST|ECF_PURE))
175 : 94088 : && !(flags & ECF_LOOPING_CONST_OR_PURE);
176 : : }
177 : :
178 : : /* If STMT is not already marked necessary, mark it, and add it to the
179 : : worklist if ADD_TO_WORKLIST is true. */
180 : :
181 : : static inline void
182 : 395541228 : mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
183 : : {
184 : 395541228 : gcc_assert (stmt);
185 : :
186 : 395541228 : if (gimple_plf (stmt, STMT_NECESSARY))
187 : : return;
188 : :
189 : 395541040 : if (dump_file && (dump_flags & TDF_DETAILS))
190 : : {
191 : 850 : fprintf (dump_file, "Marking useful stmt: ");
192 : 850 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
193 : 850 : fprintf (dump_file, "\n");
194 : : }
195 : :
196 : 395541040 : gimple_set_plf (stmt, STMT_NECESSARY, true);
197 : 395541040 : if (add_to_worklist)
198 : 99858842 : worklist.safe_push (stmt);
199 : 99858842 : if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
200 : 34605920 : bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
201 : : }
202 : :
203 : :
204 : : /* Mark the statement defining operand OP as necessary. */
205 : :
206 : : static inline void
207 : 313699658 : mark_operand_necessary (tree op)
208 : : {
209 : 313699658 : gimple *stmt;
210 : 313699658 : int ver;
211 : :
212 : 313699658 : gcc_assert (op);
213 : :
214 : 313699658 : ver = SSA_NAME_VERSION (op);
215 : 313699658 : if (bitmap_bit_p (processed, ver))
216 : : {
217 : 112652152 : stmt = SSA_NAME_DEF_STMT (op);
218 : 112652152 : gcc_assert (gimple_nop_p (stmt)
219 : : || gimple_plf (stmt, STMT_NECESSARY));
220 : 169930312 : return;
221 : : }
222 : 201047506 : bitmap_set_bit (processed, ver);
223 : :
224 : 201047506 : stmt = SSA_NAME_DEF_STMT (op);
225 : 201047506 : gcc_assert (stmt);
226 : :
227 : 201047506 : if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
228 : : return;
229 : :
230 : 143769346 : if (dump_file && (dump_flags & TDF_DETAILS))
231 : : {
232 : 262 : fprintf (dump_file, "marking necessary through ");
233 : 262 : print_generic_expr (dump_file, op);
234 : 262 : fprintf (dump_file, " stmt ");
235 : 262 : print_gimple_stmt (dump_file, stmt, 0);
236 : : }
237 : :
238 : 143769346 : gimple_set_plf (stmt, STMT_NECESSARY, true);
239 : 143769346 : if (bb_contains_live_stmts)
240 : 49316979 : bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
241 : 143769346 : worklist.safe_push (stmt);
242 : : }
243 : :
244 : : /* Return true if STMT is a call to allocation function that can be
245 : : optimized out if the memory block is never used for anything else
246 : : than NULL pointer check or free.
247 : : If NON_NULL_CHECK is false, we can further assume that return value
248 : : is never checked to be non-NULL.
249 : : Don't return true if it is called with constant size (or sizes for calloc)
250 : : and the size is excessively large (larger than PTRDIFF_MAX, for calloc
251 : : either argument larger than PTRDIFF_MAX or both constant and their product
252 : : larger than PTRDIFF_MAX). */
253 : :
254 : : static bool
255 : 33904396 : is_removable_allocation_p (gcall *stmt, bool non_null_check)
256 : : {
257 : 33904396 : int arg = -1;
258 : 33904396 : tree callee = gimple_call_fndecl (stmt), a1, a2;
259 : 33904396 : if (callee != NULL_TREE
260 : 33904396 : && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
261 : 8630534 : switch (DECL_FUNCTION_CODE (callee))
262 : : {
263 : 489838 : case BUILT_IN_MALLOC:
264 : 489838 : arg = 1;
265 : 489838 : goto do_malloc;
266 : 150 : case BUILT_IN_ALIGNED_ALLOC:
267 : 150 : arg = 2;
268 : 150 : goto do_malloc;
269 : 4737 : case BUILT_IN_CALLOC:
270 : 4737 : arg = 3;
271 : 4737 : goto do_malloc;
272 : 109147 : CASE_BUILT_IN_ALLOCA:
273 : 109147 : arg = 1;
274 : 109147 : goto do_malloc;
275 : : case BUILT_IN_STRDUP:
276 : : case BUILT_IN_STRNDUP:
277 : : arg = 0;
278 : : /* FALLTHRU */
279 : 607399 : do_malloc:
280 : 607399 : if (non_null_check)
281 : : {
282 : 97505 : if (flag_malloc_dce <= 1)
283 : : return false;
284 : : }
285 : 509894 : else if (!flag_malloc_dce)
286 : : return false;
287 : : break;
288 : :
289 : : case BUILT_IN_GOMP_ALLOC:
290 : 33904159 : arg = 2;
291 : : break;
292 : :
293 : : default:;
294 : : }
295 : :
296 : 33904159 : if (arg == -1
297 : 33904159 : && callee != NULL_TREE
298 : 31357196 : && flag_allocation_dce
299 : 31354002 : && gimple_call_from_new_or_delete (stmt)
300 : 34893101 : && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee))
301 : : arg = 1;
302 : :
303 : 33665356 : switch (arg)
304 : : {
305 : : case -1:
306 : : return false;
307 : : case 0:
308 : : return true;
309 : 842718 : case 1:
310 : 842718 : case 2:
311 : 842718 : if (gimple_call_num_args (stmt) < (unsigned) arg)
312 : : return false;
313 : 842712 : a1 = gimple_call_arg (stmt, arg - 1);
314 : 842712 : if (tree_fits_uhwi_p (a1)
315 : 842712 : && (tree_to_uhwi (a1)
316 : 480409 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
317 : : return false;
318 : : return true;
319 : 4737 : case 3:
320 : 4737 : if (gimple_call_num_args (stmt) < 2)
321 : : return false;
322 : 4737 : a1 = gimple_call_arg (stmt, 0);
323 : 4737 : a2 = gimple_call_arg (stmt, 1);
324 : 4737 : if (tree_fits_uhwi_p (a1)
325 : 4737 : && (tree_to_uhwi (a1)
326 : 3849 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
327 : : return false;
328 : 4725 : if (tree_fits_uhwi_p (a2)
329 : 4725 : && (tree_to_uhwi (a2)
330 : 4080 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
331 : : return false;
332 : 9426 : if (TREE_CODE (a1) == INTEGER_CST
333 : 3837 : && TREE_CODE (a2) == INTEGER_CST
334 : 11659 : && (wi::to_widest (a1) * wi::to_widest (a2)
335 : 11659 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
336 : : return false;
337 : : return true;
338 : : default:
339 : : gcc_unreachable ();
340 : : }
341 : : }
342 : :
343 : : /* Return true if STMT is a conditional
344 : : if (ptr != NULL)
345 : : where ptr was returned by a removable allocation function. */
346 : :
347 : : static bool
348 : 225256783 : checks_return_value_of_removable_allocation_p (gimple *stmt)
349 : : {
350 : 225256783 : gcall *def_stmt;
351 : 225256783 : return gimple_code (stmt) == GIMPLE_COND
352 : 28660360 : && (gimple_cond_code (stmt) == EQ_EXPR
353 : 20131913 : || gimple_cond_code (stmt) == NE_EXPR)
354 : 22026606 : && integer_zerop (gimple_cond_rhs (stmt))
355 : 12363881 : && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME
356 : : && (def_stmt = dyn_cast <gcall *>
357 : 12361402 : (SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt))))
358 : 228427918 : && is_removable_allocation_p (def_stmt, true);
359 : : }
360 : :
361 : :
362 : : /* Mark STMT as necessary if it obviously is. Add it to the worklist if
363 : : it can make other statements necessary.
364 : :
365 : : If AGGRESSIVE is false, control statements are conservatively marked as
366 : : necessary. */
367 : :
368 : : static void
369 : 523656848 : mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
370 : : {
371 : : /* Statements that are implicitly live. Most function calls, asm
372 : : and return statements are required. Labels and GIMPLE_BIND nodes
373 : : are kept because they are control flow, and we have no way of
374 : : knowing whether they can be removed. DCE can eliminate all the
375 : : other statements in a block, and CFG can then remove the block
376 : : and labels. */
377 : 523656848 : switch (gimple_code (stmt))
378 : : {
379 : 6597517 : case GIMPLE_PREDICT:
380 : 6597517 : case GIMPLE_LABEL:
381 : 6597517 : mark_stmt_necessary (stmt, false);
382 : 6597517 : return;
383 : :
384 : 9528405 : case GIMPLE_ASM:
385 : 9528405 : case GIMPLE_RESX:
386 : 9528405 : case GIMPLE_RETURN:
387 : 9528405 : mark_stmt_necessary (stmt, true);
388 : 9528405 : return;
389 : :
390 : 35177612 : case GIMPLE_CALL:
391 : 35177612 : {
392 : 35177612 : gcall *call = as_a <gcall *> (stmt);
393 : :
394 : : /* Never elide a noreturn call we pruned control-flow for. */
395 : 35177612 : if ((gimple_call_flags (call) & ECF_NORETURN)
396 : 35177612 : && gimple_call_ctrl_altering_p (call))
397 : : {
398 : 4703286 : mark_stmt_necessary (call, true);
399 : 4703286 : return;
400 : : }
401 : :
402 : :
403 : 30474326 : if (is_removable_allocation_p (call, false))
404 : : return;
405 : :
406 : :
407 : : /* For __cxa_atexit calls, don't mark as necessary right away. */
408 : 29875828 : if (is_removable_cxa_atexit_call (call))
409 : : return;
410 : :
411 : : /* IFN_GOACC_LOOP calls are necessary in that they are used to
412 : : represent parameter (i.e. step, bound) of a lowered OpenACC
413 : : partitioned loop. But this kind of partitioned loop might not
414 : : survive from aggressive loop removal for it has loop exit and
415 : : is assumed to be finite. Therefore, we need to explicitly mark
416 : : these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */
417 : 29875422 : if (gimple_call_internal_p (call, IFN_GOACC_LOOP))
418 : : {
419 : 27109 : mark_stmt_necessary (call, true);
420 : 27109 : return;
421 : : }
422 : : break;
423 : : }
424 : :
425 : 289331983 : case GIMPLE_DEBUG:
426 : : /* Debug temps without a value are not useful. ??? If we could
427 : : easily locate the debug temp bind stmt for a use thereof,
428 : : would could refrain from marking all debug temps here, and
429 : : mark them only if they're used. */
430 : 289331983 : if (gimple_debug_nonbind_marker_p (stmt)
431 : 218845739 : || !gimple_debug_bind_p (stmt)
432 : 218111674 : || gimple_debug_bind_has_value_p (stmt)
433 : 385229947 : || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
434 : 289084681 : mark_stmt_necessary (stmt, false);
435 : : return;
436 : :
437 : 1869 : case GIMPLE_GOTO:
438 : 1869 : gcc_assert (!simple_goto_p (stmt));
439 : 1869 : mark_stmt_necessary (stmt, true);
440 : 1869 : return;
441 : :
442 : 28690631 : case GIMPLE_COND:
443 : 28690631 : gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
444 : : /* Fall through. */
445 : :
446 : 28837366 : case GIMPLE_SWITCH:
447 : 28837366 : if (! aggressive)
448 : 19124011 : mark_stmt_necessary (stmt, true);
449 : : break;
450 : :
451 : 154140255 : case GIMPLE_ASSIGN:
452 : : /* Mark indirect CLOBBERs to be lazily removed if their SSA operands
453 : : do not prevail. That also makes control flow leading to them
454 : : not necessary in aggressive mode. */
455 : 164567220 : if (gimple_clobber_p (stmt) && !zero_ssa_operands (stmt, SSA_OP_USE))
456 : : return;
457 : : break;
458 : :
459 : : default:
460 : : break;
461 : : }
462 : :
463 : : /* If the statement has volatile operands, it needs to be preserved.
464 : : Same for statements that can alter control flow in unpredictable
465 : : ways. */
466 : 211611267 : if (gimple_has_side_effects (stmt) || is_ctrl_altering_stmt (stmt))
467 : : {
468 : 39397064 : mark_stmt_necessary (stmt, true);
469 : 39397064 : return;
470 : : }
471 : :
472 : : /* If a statement could throw, it can be deemed necessary unless we
473 : : are allowed to remove dead EH. Test this after checking for
474 : : new/delete operators since we always elide their EH. */
475 : 172214203 : if (!cfun->can_delete_dead_exceptions
476 : 172214203 : && stmt_could_throw_p (cfun, stmt))
477 : : {
478 : 5380588 : mark_stmt_necessary (stmt, true);
479 : 5380588 : return;
480 : : }
481 : :
482 : 207850249 : if ((gimple_vdef (stmt) && keep_all_vdefs_p ())
483 : 207831275 : || stmt_may_clobber_global_p (stmt, false))
484 : : {
485 : 12016314 : mark_stmt_necessary (stmt, true);
486 : 12016314 : return;
487 : : }
488 : :
489 : : return;
490 : : }
491 : :
492 : :
493 : : /* Mark the last statement of BB as necessary. */
494 : :
495 : : static bool
496 : 40232488 : mark_last_stmt_necessary (basic_block bb)
497 : : {
498 : 40232488 : if (!bitmap_set_bit (last_stmt_necessary, bb->index))
499 : : return true;
500 : :
501 : 15755717 : bitmap_set_bit (bb_contains_live_stmts, bb->index);
502 : :
503 : : /* We actually mark the statement only if it is a control statement. */
504 : 15755717 : gimple *stmt = *gsi_last_bb (bb);
505 : 15755717 : if (stmt && is_ctrl_stmt (stmt))
506 : : {
507 : 9680384 : mark_stmt_necessary (stmt, true);
508 : 9680384 : return true;
509 : : }
510 : : return false;
511 : : }
512 : :
513 : :
514 : : /* Mark control dependent edges of BB as necessary. We have to do this only
515 : : once for each basic block so we set the appropriate bit after we're done.
516 : :
517 : : When IGNORE_SELF is true, ignore BB in the list of control dependences. */
518 : :
519 : : static void
520 : 29602597 : mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
521 : : {
522 : 29602597 : bitmap_iterator bi;
523 : 29602597 : unsigned edge_number;
524 : 29602597 : bool skipped = false;
525 : :
526 : 29602597 : gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
527 : :
528 : 29602597 : if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
529 : 3288530 : return;
530 : :
531 : 65223701 : EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
532 : : 0, edge_number, bi)
533 : : {
534 : 38909634 : basic_block cd_bb = cd->get_edge_src (edge_number);
535 : :
536 : 38909634 : if (ignore_self && cd_bb == bb)
537 : : {
538 : 64470 : skipped = true;
539 : 64470 : continue;
540 : : }
541 : :
542 : 38845164 : if (!mark_last_stmt_necessary (cd_bb))
543 : 6073077 : mark_control_dependent_edges_necessary (cd_bb, false);
544 : : }
545 : :
546 : 26314067 : if (!skipped)
547 : 26249597 : bitmap_set_bit (visited_control_parents, bb->index);
548 : : }
549 : :
550 : :
551 : : /* Find obviously necessary statements. These are things like most function
552 : : calls, and stores to file level variables.
553 : :
554 : : If EL is NULL, control statements are conservatively marked as
555 : : necessary. Otherwise it contains the list of edges used by control
556 : : dependence analysis. */
557 : :
558 : : static void
559 : 7679546 : find_obviously_necessary_stmts (bool aggressive)
560 : : {
561 : 7679546 : basic_block bb;
562 : 7679546 : gimple_stmt_iterator gsi;
563 : 7679546 : edge e;
564 : 7679546 : gimple *phi, *stmt;
565 : 7679546 : int flags;
566 : :
567 : 84809522 : FOR_EACH_BB_FN (bb, cfun)
568 : : {
569 : : /* PHI nodes are never inherently necessary. */
570 : 111238319 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
571 : : {
572 : 34108343 : phi = gsi_stmt (gsi);
573 : 34108343 : gimple_set_plf (phi, STMT_NECESSARY, false);
574 : : }
575 : :
576 : : /* Check all statements in the block. */
577 : 677916800 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
578 : : {
579 : 523656848 : stmt = gsi_stmt (gsi);
580 : 523656848 : gimple_set_plf (stmt, STMT_NECESSARY, false);
581 : 523656848 : mark_stmt_if_obviously_necessary (stmt, aggressive);
582 : : }
583 : : }
584 : :
585 : : /* Pure and const functions are finite and thus have no infinite loops in
586 : : them. */
587 : 7679546 : flags = flags_from_decl_or_type (current_function_decl);
588 : 7679546 : if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
589 : 913077 : return;
590 : :
591 : : /* Prevent the empty possibly infinite loops from being removed. This is
592 : : needed to make the logic in remove_dead_stmt work to identify the
593 : : correct edge to keep when removing a controlling condition. */
594 : 6766469 : if (aggressive)
595 : : {
596 : 3108431 : if (mark_irreducible_loops ())
597 : 241008 : FOR_EACH_BB_FN (bb, cfun)
598 : : {
599 : 236004 : edge_iterator ei;
600 : 594136 : FOR_EACH_EDGE (e, ei, bb->succs)
601 : 358132 : if ((e->flags & EDGE_DFS_BACK)
602 : 358132 : && (e->flags & EDGE_IRREDUCIBLE_LOOP))
603 : : {
604 : 8939 : if (dump_file)
605 : 0 : fprintf (dump_file, "Marking back edge of irreducible "
606 : 0 : "loop %i->%i\n", e->src->index, e->dest->index);
607 : 8939 : mark_control_dependent_edges_necessary (e->dest, false);
608 : : }
609 : : }
610 : :
611 : 10989573 : for (auto loop : loops_list (cfun, 0))
612 : : /* For loops without an exit do not mark any condition. */
613 : 1664280 : if (loop->exits->next->e && !finite_loop_p (loop))
614 : : {
615 : 272049 : if (dump_file)
616 : 1 : fprintf (dump_file, "cannot prove finiteness of loop %i\n",
617 : : loop->num);
618 : 272049 : mark_control_dependent_edges_necessary (loop->latch, false);
619 : 3108431 : }
620 : : }
621 : : }
622 : :
623 : :
624 : : /* Return true if REF is based on an aliased base, otherwise false. */
625 : :
626 : : static bool
627 : 91772544 : ref_may_be_aliased (tree ref)
628 : : {
629 : 91772544 : if (TREE_CODE (ref) == WITH_SIZE_EXPR)
630 : 0 : ref = TREE_OPERAND (ref, 0);
631 : 168436854 : while (handled_component_p (ref))
632 : 76664310 : ref = TREE_OPERAND (ref, 0);
633 : 91772544 : if ((TREE_CODE (ref) == MEM_REF || TREE_CODE (ref) == TARGET_MEM_REF)
634 : 91772544 : && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
635 : 13143475 : ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
636 : 91772544 : return !(DECL_P (ref)
637 : 62030549 : && !may_be_aliased (ref));
638 : : }
639 : :
640 : : static bitmap visited = NULL;
641 : : static unsigned int longest_chain = 0;
642 : : static unsigned int total_chain = 0;
643 : : static unsigned int nr_walks = 0;
644 : : static bool chain_ovfl = false;
645 : :
646 : : /* Worker for the walker that marks reaching definitions of REF,
647 : : which is based on a non-aliased decl, necessary. It returns
648 : : true whenever the defining statement of the current VDEF is
649 : : a kill for REF, as no dominating may-defs are necessary for REF
650 : : anymore. DATA points to the basic-block that contains the
651 : : stmt that refers to REF. */
652 : :
653 : : static bool
654 : 37407163 : mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
655 : : {
656 : 37407163 : gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
657 : :
658 : : /* All stmts we visit are necessary. */
659 : 37407163 : if (! gimple_clobber_p (def_stmt))
660 : 37123291 : mark_operand_necessary (vdef);
661 : :
662 : : /* If the stmt lhs kills ref, then we can stop walking. */
663 : 37407163 : if (gimple_has_lhs (def_stmt)
664 : 27396854 : && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
665 : : /* The assignment is not necessarily carried out if it can throw
666 : : and we can catch it in the current function where we could inspect
667 : : the previous value.
668 : : ??? We only need to care about the RHS throwing. For aggregate
669 : : assignments or similar calls and non-call exceptions the LHS
670 : : might throw as well. */
671 : 35515686 : && !stmt_can_throw_internal (cfun, def_stmt))
672 : : {
673 : 23355965 : tree base, lhs = gimple_get_lhs (def_stmt);
674 : 23355965 : poly_int64 size, offset, max_size;
675 : 23355965 : bool reverse;
676 : 23355965 : ao_ref_base (ref);
677 : 23355965 : base
678 : 23355965 : = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
679 : : /* We can get MEM[symbol: sZ, index: D.8862_1] here,
680 : : so base == refd->base does not always hold. */
681 : 23355965 : if (base == ref->base)
682 : : {
683 : : /* For a must-alias check we need to be able to constrain
684 : : the accesses properly. */
685 : 21392428 : if (known_eq (size, max_size)
686 : 21392428 : && known_subrange_p (ref->offset, ref->max_size, offset, size))
687 : 8202577 : return true;
688 : : /* Or they need to be exactly the same. */
689 : 13190804 : else if (ref->ref
690 : : /* Make sure there is no induction variable involved
691 : : in the references (gcc.c-torture/execute/pr42142.c).
692 : : The simplest way is to check if the kill dominates
693 : : the use. */
694 : : /* But when both are in the same block we cannot
695 : : easily tell whether we came from a backedge
696 : : unless we decide to compute stmt UIDs
697 : : (see PR58246). */
698 : 13190804 : && (basic_block) data != gimple_bb (def_stmt)
699 : 5667241 : && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
700 : 5667241 : gimple_bb (def_stmt))
701 : 16290909 : && operand_equal_p (ref->ref, lhs, 0))
702 : : return true;
703 : : }
704 : : }
705 : :
706 : : /* Otherwise keep walking. */
707 : : return false;
708 : : }
709 : :
710 : : static void
711 : 13301382 : mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
712 : : {
713 : : /* Should have been caught before calling this function. */
714 : 13301382 : gcc_checking_assert (!keep_all_vdefs_p ());
715 : :
716 : 13301382 : unsigned int chain;
717 : 13301382 : ao_ref refd;
718 : 13301382 : gcc_assert (!chain_ovfl);
719 : 13301382 : ao_ref_init (&refd, ref);
720 : 26602764 : chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
721 : : mark_aliased_reaching_defs_necessary_1,
722 : 13301382 : gimple_bb (stmt), NULL);
723 : 13301382 : if (chain > longest_chain)
724 : 2075812 : longest_chain = chain;
725 : 13301382 : total_chain += chain;
726 : 13301382 : nr_walks++;
727 : 13301382 : }
728 : :
729 : : /* Worker for the walker that marks reaching definitions of REF, which
730 : : is not based on a non-aliased decl. For simplicity we need to end
731 : : up marking all may-defs necessary that are not based on a non-aliased
732 : : decl. The only job of this walker is to skip may-defs based on
733 : : a non-aliased decl. */
734 : :
735 : : static bool
736 : 70906471 : mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
737 : : tree vdef, void *data ATTRIBUTE_UNUSED)
738 : : {
739 : 70906471 : gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
740 : :
741 : : /* We have to skip already visited (and thus necessary) statements
742 : : to make the chaining work after we dropped back to simple mode. */
743 : 70906471 : if (chain_ovfl
744 : 70906471 : && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
745 : : {
746 : 2568817 : gcc_assert (gimple_nop_p (def_stmt)
747 : : || gimple_plf (def_stmt, STMT_NECESSARY));
748 : : return false;
749 : : }
750 : :
751 : : /* We want to skip stores to non-aliased variables. */
752 : 68337654 : if (!chain_ovfl
753 : 68337654 : && gimple_assign_single_p (def_stmt))
754 : : {
755 : 45557418 : tree lhs = gimple_assign_lhs (def_stmt);
756 : 45557418 : if (!ref_may_be_aliased (lhs))
757 : : return false;
758 : : }
759 : :
760 : : /* We want to skip statments that do not constitute stores but have
761 : : a virtual definition. */
762 : 56868472 : if (gcall *call = dyn_cast <gcall *> (def_stmt))
763 : : {
764 : 21905777 : tree callee = gimple_call_fndecl (call);
765 : 21905777 : if (callee != NULL_TREE
766 : 21905777 : && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
767 : 3663125 : switch (DECL_FUNCTION_CODE (callee))
768 : : {
769 : : case BUILT_IN_MALLOC:
770 : : case BUILT_IN_ALIGNED_ALLOC:
771 : : case BUILT_IN_CALLOC:
772 : : CASE_BUILT_IN_ALLOCA:
773 : : case BUILT_IN_STRDUP:
774 : : case BUILT_IN_STRNDUP:
775 : : case BUILT_IN_FREE:
776 : : case BUILT_IN_GOMP_ALLOC:
777 : : case BUILT_IN_GOMP_FREE:
778 : : return false;
779 : :
780 : : default:;
781 : : }
782 : :
783 : 21323737 : if (callee != NULL_TREE
784 : 20257901 : && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
785 : 20069853 : || DECL_IS_OPERATOR_DELETE_P (callee))
786 : 22038583 : && gimple_call_from_new_or_delete (call))
787 : : return false;
788 : 20619407 : if (is_removable_cxa_atexit_call (call))
789 : : return false;
790 : : }
791 : :
792 : 55581914 : if (! gimple_clobber_p (def_stmt))
793 : 49977537 : mark_operand_necessary (vdef);
794 : :
795 : : return false;
796 : : }
797 : :
798 : : static void
799 : 66447474 : mark_all_reaching_defs_necessary (gimple *stmt)
800 : : {
801 : : /* Should have been caught before calling this function. */
802 : 66447474 : gcc_checking_assert (!keep_all_vdefs_p ());
803 : 132894948 : walk_aliased_vdefs (NULL, gimple_vuse (stmt),
804 : : mark_all_reaching_defs_necessary_1, NULL, &visited);
805 : 66447474 : }
806 : :
807 : : /* Return true for PHI nodes with one or identical arguments
808 : : can be removed. */
809 : : static bool
810 : 18463733 : degenerate_phi_p (gimple *phi)
811 : : {
812 : 18463733 : unsigned int i;
813 : 18463733 : tree op = gimple_phi_arg_def (phi, 0);
814 : 19430060 : for (i = 1; i < gimple_phi_num_args (phi); i++)
815 : 16738379 : if (gimple_phi_arg_def (phi, i) != op)
816 : : return false;
817 : : return true;
818 : : }
819 : :
820 : : /* Return that NEW_CALL and DELETE_CALL are a valid pair of new
821 : : and delete operators. */
822 : :
823 : : static bool
824 : 38835 : valid_new_delete_pair_p (gimple *new_call, gimple *delete_call)
825 : : {
826 : 38835 : tree new_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call));
827 : 38835 : tree delete_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call));
828 : 38835 : return valid_new_delete_pair_p (new_asm, delete_asm);
829 : : }
830 : :
831 : : /* Propagate necessity using the operands of necessary statements.
832 : : Process the uses on each statement in the worklist, and add all
833 : : feeding statements which contribute to the calculation of this
834 : : value to the worklist.
835 : :
836 : : In conservative mode, EL is NULL. */
837 : :
838 : : static void
839 : 7679546 : propagate_necessity (bool aggressive)
840 : : {
841 : 7679546 : gimple *stmt;
842 : :
843 : 7679546 : if (dump_file && (dump_flags & TDF_DETAILS))
844 : 207 : fprintf (dump_file, "\nProcessing worklist:\n");
845 : :
846 : 251307734 : while (worklist.length () > 0)
847 : : {
848 : : /* Take STMT from worklist. */
849 : 243628188 : stmt = worklist.pop ();
850 : :
851 : 243628188 : if (dump_file && (dump_flags & TDF_DETAILS))
852 : : {
853 : 1093 : fprintf (dump_file, "processing: ");
854 : 1093 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
855 : 1093 : fprintf (dump_file, "\n");
856 : : }
857 : :
858 : 243628188 : if (aggressive)
859 : : {
860 : : /* Mark the last statement of the basic blocks on which the block
861 : : containing STMT is control dependent, but only if we haven't
862 : : already done so. */
863 : 83922899 : basic_block bb = gimple_bb (stmt);
864 : 83922899 : if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
865 : 83922899 : && !bitmap_bit_p (visited_control_parents, bb->index))
866 : 17348170 : mark_control_dependent_edges_necessary (bb, false);
867 : : }
868 : :
869 : 243628188 : if (gimple_code (stmt) == GIMPLE_PHI
870 : : /* We do not process virtual PHI nodes nor do we track their
871 : : necessity. */
872 : 280071542 : && !virtual_operand_p (gimple_phi_result (stmt)))
873 : : {
874 : : /* PHI nodes are somewhat special in that each PHI alternative has
875 : : data and control dependencies. All the statements feeding the
876 : : PHI node's arguments are always necessary. In aggressive mode,
877 : : we also consider the control dependent edges leading to the
878 : : predecessor block associated with each PHI alternative as
879 : : necessary. */
880 : 18221677 : gphi *phi = as_a <gphi *> (stmt);
881 : 18221677 : size_t k;
882 : :
883 : 59535119 : for (k = 0; k < gimple_phi_num_args (stmt); k++)
884 : : {
885 : 41313442 : tree arg = PHI_ARG_DEF (stmt, k);
886 : 41313442 : if (TREE_CODE (arg) == SSA_NAME)
887 : 30579922 : mark_operand_necessary (arg);
888 : : }
889 : :
890 : : /* For PHI operands it matters from where the control flow arrives
891 : : to the BB. Consider the following example:
892 : :
893 : : a=exp1;
894 : : b=exp2;
895 : : if (test)
896 : : ;
897 : : else
898 : : ;
899 : : c=PHI(a,b)
900 : :
901 : : We need to mark control dependence of the empty basic blocks, since they
902 : : contains computation of PHI operands.
903 : :
904 : : Doing so is too restrictive in the case the predecestor block is in
905 : : the loop. Consider:
906 : :
907 : : if (b)
908 : : {
909 : : int i;
910 : : for (i = 0; i<1000; ++i)
911 : : ;
912 : : j = 0;
913 : : }
914 : : return j;
915 : :
916 : : There is PHI for J in the BB containing return statement.
917 : : In this case the control dependence of predecestor block (that is
918 : : within the empty loop) also contains the block determining number
919 : : of iterations of the block that would prevent removing of empty
920 : : loop in this case.
921 : :
922 : : This scenario can be avoided by splitting critical edges.
923 : : To save the critical edge splitting pass we identify how the control
924 : : dependence would look like if the edge was split.
925 : :
926 : : Consider the modified CFG created from current CFG by splitting
927 : : edge B->C. In the postdominance tree of modified CFG, C' is
928 : : always child of C. There are two cases how chlids of C' can look
929 : : like:
930 : :
931 : : 1) C' is leaf
932 : :
933 : : In this case the only basic block C' is control dependent on is B.
934 : :
935 : : 2) C' has single child that is B
936 : :
937 : : In this case control dependence of C' is same as control
938 : : dependence of B in original CFG except for block B itself.
939 : : (since C' postdominate B in modified CFG)
940 : :
941 : : Now how to decide what case happens? There are two basic options:
942 : :
943 : : a) C postdominate B. Then C immediately postdominate B and
944 : : case 2 happens iff there is no other way from B to C except
945 : : the edge B->C.
946 : :
947 : : There is other way from B to C iff there is succesor of B that
948 : : is not postdominated by B. Testing this condition is somewhat
949 : : expensive, because we need to iterate all succesors of B.
950 : : We are safe to assume that this does not happen: we will mark B
951 : : as needed when processing the other path from B to C that is
952 : : conrol dependent on B and marking control dependencies of B
953 : : itself is harmless because they will be processed anyway after
954 : : processing control statement in B.
955 : :
956 : : b) C does not postdominate B. Always case 1 happens since there is
957 : : path from C to exit that does not go through B and thus also C'. */
958 : :
959 : 30763224 : if (aggressive && !degenerate_phi_p (stmt))
960 : : {
961 : 18868191 : for (k = 0; k < gimple_phi_num_args (stmt); k++)
962 : : {
963 : 13188061 : basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
964 : :
965 : 13188061 : if (gimple_bb (stmt)
966 : 13188061 : != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
967 : : {
968 : 1387324 : if (!mark_last_stmt_necessary (arg_bb))
969 : 2256 : mark_control_dependent_edges_necessary (arg_bb, false);
970 : : }
971 : 11800737 : else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
972 : 11800737 : && !bitmap_bit_p (visited_control_parents,
973 : : arg_bb->index))
974 : 5898106 : mark_control_dependent_edges_necessary (arg_bb, true);
975 : : }
976 : : }
977 : : }
978 : : else
979 : : {
980 : : /* Propagate through the operands. Examine all the USE, VUSE and
981 : : VDEF operands in this statement. Mark all the statements
982 : : which feed this statement's uses as necessary. */
983 : 225406511 : ssa_op_iter iter;
984 : 225406511 : tree use;
985 : :
986 : : /* If this is a call to free which is directly fed by an
987 : : allocation function do not mark that necessary through
988 : : processing the argument. */
989 : 225406511 : bool is_delete_operator
990 : 225406511 : = (is_gimple_call (stmt)
991 : 35149199 : && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
992 : 226355120 : && gimple_call_operator_delete_p (as_a <gcall *> (stmt)));
993 : 224656790 : if (is_delete_operator
994 : 224656790 : || gimple_call_builtin_p (stmt, BUILT_IN_FREE)
995 : 224383304 : || gimple_call_builtin_p (stmt, BUILT_IN_GOMP_FREE))
996 : : {
997 : 1023207 : tree ptr = gimple_call_arg (stmt, 0);
998 : 1023207 : gcall *def_stmt;
999 : : /* If the pointer we free is defined by an allocation
1000 : : function do not add the call to the worklist. */
1001 : 1023207 : if (TREE_CODE (ptr) == SSA_NAME
1002 : 1022697 : && (def_stmt = dyn_cast <gcall *> (SSA_NAME_DEF_STMT (ptr)))
1003 : 1195186 : && is_removable_allocation_p (def_stmt, false))
1004 : : {
1005 : 152599 : if (is_delete_operator
1006 : 152599 : && !valid_new_delete_pair_p (def_stmt, stmt))
1007 : 124 : mark_operand_necessary (gimple_call_arg (stmt, 0));
1008 : :
1009 : : /* Delete operators can have alignment and (or) size
1010 : : as next arguments. When being a SSA_NAME, they
1011 : : must be marked as necessary. Similarly GOMP_free. */
1012 : 152599 : if (gimple_call_num_args (stmt) >= 2)
1013 : 63473 : for (unsigned i = 1; i < gimple_call_num_args (stmt);
1014 : : i++)
1015 : : {
1016 : 31745 : tree arg = gimple_call_arg (stmt, i);
1017 : 31745 : if (TREE_CODE (arg) == SSA_NAME)
1018 : 6755 : mark_operand_necessary (arg);
1019 : : }
1020 : :
1021 : 97845303 : continue;
1022 : 152599 : }
1023 : : }
1024 : :
1025 : 225253912 : if (checks_return_value_of_removable_allocation_p (stmt))
1026 : 95516 : continue;
1027 : :
1028 : 421170425 : FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
1029 : 196012029 : mark_operand_necessary (use);
1030 : :
1031 : 225158396 : use = gimple_vuse (stmt);
1032 : 194950717 : if (!use)
1033 : 91825206 : continue;
1034 : :
1035 : : /* No need to search for vdefs if we intrinsicly keep them all. */
1036 : 133333190 : if (keep_all_vdefs_p ())
1037 : 69971 : continue;
1038 : :
1039 : : /* If we dropped to simple mode make all immediately
1040 : : reachable definitions necessary. */
1041 : 133263219 : if (chain_ovfl)
1042 : : {
1043 : 3787229 : mark_all_reaching_defs_necessary (stmt);
1044 : 3787229 : continue;
1045 : : }
1046 : :
1047 : : /* For statements that may load from memory (have a VUSE) we
1048 : : have to mark all reaching (may-)definitions as necessary.
1049 : : We partition this task into two cases:
1050 : : 1) explicit loads based on decls that are not aliased
1051 : : 2) implicit loads (like calls) and explicit loads not
1052 : : based on decls that are not aliased (like indirect
1053 : : references or loads from globals)
1054 : : For 1) we mark all reaching may-defs as necessary, stopping
1055 : : at dominating kills. For 2) we want to mark all dominating
1056 : : references necessary, but non-aliased ones which we handle
1057 : : in 1). By keeping a global visited bitmap for references
1058 : : we walk for 2) we avoid quadratic behavior for those. */
1059 : :
1060 : 129475990 : if (gcall *call = dyn_cast <gcall *> (stmt))
1061 : : {
1062 : 31911490 : tree callee = gimple_call_fndecl (call);
1063 : 31911490 : unsigned i;
1064 : :
1065 : 32818859 : if (callee != NULL_TREE
1066 : 30565387 : && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
1067 : 30360919 : || DECL_IS_OPERATOR_DELETE_P (callee))
1068 : 32834848 : && gimple_call_from_new_or_delete (call))
1069 : 907369 : continue;
1070 : :
1071 : 31004121 : if (is_removable_cxa_atexit_call (call))
1072 : 0 : continue;
1073 : :
1074 : : bool all_refs = false;
1075 : : /* Calls implicitly load from memory, their arguments
1076 : : in addition may explicitly perform memory loads. */
1077 : 95046938 : for (i = 0; i < gimple_call_num_args (call); ++i)
1078 : : {
1079 : 64042817 : tree arg = gimple_call_arg (call, i);
1080 : 124148748 : if (TREE_CODE (arg) == SSA_NAME
1081 : 64042817 : || is_gimple_min_invariant (arg))
1082 : 60105931 : continue;
1083 : 3936886 : if (TREE_CODE (arg) == WITH_SIZE_EXPR)
1084 : 660 : arg = TREE_OPERAND (arg, 0);
1085 : 3936886 : if (!ref_may_be_aliased (arg))
1086 : 3573116 : mark_aliased_reaching_defs_necessary (call, arg);
1087 : : else
1088 : : all_refs = true;
1089 : : }
1090 : :
1091 : 31004121 : if (!all_refs && ipa_modref_callee_reads_no_memory_p (call))
1092 : 1007413 : continue;
1093 : 29996708 : mark_all_reaching_defs_necessary (call);
1094 : : }
1095 : 97564500 : else if (gimple_assign_single_p (stmt))
1096 : : {
1097 : 89905060 : tree rhs;
1098 : : /* If this is a load mark things necessary. */
1099 : 89905060 : rhs = gimple_assign_rhs1 (stmt);
1100 : 89905060 : if (TREE_CODE (rhs) != SSA_NAME
1101 : 73009842 : && !is_gimple_min_invariant (rhs)
1102 : 141246979 : && TREE_CODE (rhs) != CONSTRUCTOR)
1103 : : {
1104 : 41591005 : if (!ref_may_be_aliased (rhs))
1105 : 9087965 : mark_aliased_reaching_defs_necessary (stmt, rhs);
1106 : : else
1107 : 32503040 : mark_all_reaching_defs_necessary (stmt);
1108 : : }
1109 : : }
1110 : 7659440 : else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1111 : : {
1112 : 7515033 : tree rhs = gimple_return_retval (return_stmt);
1113 : : /* A return statement may perform a load. */
1114 : 7515033 : if (rhs
1115 : 4171619 : && TREE_CODE (rhs) != SSA_NAME
1116 : 1350349 : && !is_gimple_min_invariant (rhs)
1117 : 8165885 : && TREE_CODE (rhs) != CONSTRUCTOR)
1118 : : {
1119 : 650852 : if (!ref_may_be_aliased (rhs))
1120 : 634762 : mark_aliased_reaching_defs_necessary (stmt, rhs);
1121 : : else
1122 : 16090 : mark_all_reaching_defs_necessary (stmt);
1123 : : }
1124 : : }
1125 : 144407 : else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
1126 : : {
1127 : 143223 : unsigned i;
1128 : 143223 : mark_all_reaching_defs_necessary (stmt);
1129 : : /* Inputs may perform loads. */
1130 : 254989 : for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1131 : : {
1132 : 111766 : tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1133 : 111766 : if (TREE_CODE (op) != SSA_NAME
1134 : 73424 : && !is_gimple_min_invariant (op)
1135 : 36383 : && TREE_CODE (op) != CONSTRUCTOR
1136 : 148149 : && !ref_may_be_aliased (op))
1137 : 5539 : mark_aliased_reaching_defs_necessary (stmt, op);
1138 : : }
1139 : : }
1140 : 1184 : else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
1141 : : {
1142 : : /* The beginning of a transaction is a memory barrier. */
1143 : : /* ??? If we were really cool, we'd only be a barrier
1144 : : for the memories touched within the transaction. */
1145 : 1184 : mark_all_reaching_defs_necessary (stmt);
1146 : : }
1147 : : else
1148 : 0 : gcc_unreachable ();
1149 : :
1150 : : /* If we over-used our alias oracle budget drop to simple
1151 : : mode. The cost metric allows quadratic behavior
1152 : : (number of uses times number of may-defs queries) up to
1153 : : a constant maximal number of queries and after that falls back to
1154 : : super-linear complexity. */
1155 : 127561208 : if (/* Constant but quadratic for small functions. */
1156 : 127561208 : total_chain > 128 * 128
1157 : : /* Linear in the number of may-defs. */
1158 : 1086249 : && total_chain > 32 * longest_chain
1159 : : /* Linear in the number of uses. */
1160 : 4745 : && total_chain > nr_walks * 32)
1161 : : {
1162 : 4584 : chain_ovfl = true;
1163 : 4584 : if (visited)
1164 : 4584 : bitmap_clear (visited);
1165 : : }
1166 : : }
1167 : : }
1168 : 7679546 : }
1169 : :
1170 : : /* Remove dead PHI nodes from block BB. */
1171 : :
1172 : : static bool
1173 : 77130033 : remove_dead_phis (basic_block bb)
1174 : : {
1175 : 77130033 : bool something_changed = false;
1176 : 77130033 : gphi *phi;
1177 : 77130033 : gphi_iterator gsi;
1178 : :
1179 : 111238376 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
1180 : : {
1181 : 34108343 : stats.total_phis++;
1182 : 34108343 : phi = gsi.phi ();
1183 : :
1184 : : /* We do not track necessity of virtual PHI nodes. Instead do
1185 : : very simple dead PHI removal here. */
1186 : 68216686 : if (virtual_operand_p (gimple_phi_result (phi)))
1187 : : {
1188 : : /* Virtual PHI nodes with one or identical arguments
1189 : : can be removed. */
1190 : 15588053 : if (!loops_state_satisfies_p (LOOP_CLOSED_SSA)
1191 : 15588053 : && degenerate_phi_p (phi))
1192 : : {
1193 : 1821656 : tree vdef = gimple_phi_result (phi);
1194 : 1821656 : tree vuse = gimple_phi_arg_def (phi, 0);
1195 : :
1196 : 1821656 : use_operand_p use_p;
1197 : 1821656 : imm_use_iterator iter;
1198 : 1821656 : gimple *use_stmt;
1199 : 4709005 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1200 : 8760191 : FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1201 : 4758077 : SET_USE (use_p, vuse);
1202 : 1821656 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1203 : 1821656 : && TREE_CODE (vuse) == SSA_NAME)
1204 : 466 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1205 : : }
1206 : : else
1207 : 13766397 : gimple_set_plf (phi, STMT_NECESSARY, true);
1208 : : }
1209 : :
1210 : 34108343 : if (!gimple_plf (phi, STMT_NECESSARY))
1211 : : {
1212 : 2120269 : something_changed = true;
1213 : 2120269 : if (dump_file && (dump_flags & TDF_DETAILS))
1214 : : {
1215 : 15 : fprintf (dump_file, "Deleting : ");
1216 : 15 : print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1217 : 15 : fprintf (dump_file, "\n");
1218 : : }
1219 : :
1220 : 2120269 : remove_phi_node (&gsi, true);
1221 : 2120269 : stats.removed_phis++;
1222 : 2120269 : continue;
1223 : : }
1224 : :
1225 : 31988074 : gsi_next (&gsi);
1226 : : }
1227 : 77130033 : return something_changed;
1228 : : }
1229 : :
1230 : :
1231 : : /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1232 : : containing I so that we don't have to look it up. */
1233 : :
1234 : : static void
1235 : 6075957 : remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb,
1236 : : vec<edge> &to_remove_edges)
1237 : : {
1238 : 6075957 : gimple *stmt = gsi_stmt (*i);
1239 : :
1240 : 6075957 : if (dump_file && (dump_flags & TDF_DETAILS))
1241 : : {
1242 : 108 : fprintf (dump_file, "Deleting : ");
1243 : 108 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1244 : 108 : fprintf (dump_file, "\n");
1245 : : }
1246 : :
1247 : 6075957 : stats.removed++;
1248 : :
1249 : : /* If we have determined that a conditional branch statement contributes
1250 : : nothing to the program, then we not only remove it, but we need to update
1251 : : the CFG. We can chose any of edges out of BB as long as we are sure to not
1252 : : close infinite loops. This is done by always choosing the edge closer to
1253 : : exit in inverted_rev_post_order_compute order. */
1254 : 6075957 : if (is_ctrl_stmt (stmt))
1255 : : {
1256 : 33159 : edge_iterator ei;
1257 : 33159 : edge e = NULL, e2;
1258 : :
1259 : : /* See if there is only one non-abnormal edge. */
1260 : 33159 : if (single_succ_p (bb))
1261 : 3 : e = single_succ_edge (bb);
1262 : : /* Otherwise chose one that is closer to bb with live statement in it.
1263 : : To be able to chose one, we compute inverted post order starting from
1264 : : all BBs with live statements. */
1265 : 3 : if (!e)
1266 : : {
1267 : 33156 : if (!bb_postorder)
1268 : : {
1269 : 18434 : int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
1270 : 18434 : int n = inverted_rev_post_order_compute (cfun, rpo,
1271 : : &bb_contains_live_stmts);
1272 : 18434 : bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1273 : 688671 : for (int i = 0; i < n; ++i)
1274 : 670237 : bb_postorder[rpo[i]] = i;
1275 : 18434 : free (rpo);
1276 : : }
1277 : 99474 : FOR_EACH_EDGE (e2, ei, bb->succs)
1278 : 66318 : if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1279 : 33162 : || bb_postorder [e->dest->index]
1280 : 33162 : >= bb_postorder [e2->dest->index])
1281 : 51982 : e = e2;
1282 : : }
1283 : 33156 : gcc_assert (e);
1284 : 33159 : e->probability = profile_probability::always ();
1285 : :
1286 : : /* The edge is no longer associated with a conditional, so it does
1287 : : not have TRUE/FALSE flags.
1288 : : We are also safe to drop EH/ABNORMAL flags and turn them into
1289 : : normal control flow, because we know that all the destinations (including
1290 : : those odd edges) are equivalent for program execution. */
1291 : 33159 : e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1292 : :
1293 : : /* The lone outgoing edge from BB will be a fallthru edge. */
1294 : 33159 : e->flags |= EDGE_FALLTHRU;
1295 : :
1296 : : /* Remove the remaining outgoing edges. */
1297 : 99480 : FOR_EACH_EDGE (e2, ei, bb->succs)
1298 : 66321 : if (e != e2)
1299 : : {
1300 : : /* If we made a BB unconditionally exit a loop or removed
1301 : : an entry into an irreducible region, then this transform
1302 : : alters the set of BBs in the loop. Schedule a fixup. */
1303 : 33162 : if (loop_exit_edge_p (bb->loop_father, e)
1304 : 33162 : || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1305 : 18527 : loops_state_set (LOOPS_NEED_FIXUP);
1306 : 33162 : to_remove_edges.safe_push (e2);
1307 : : }
1308 : : }
1309 : :
1310 : : /* If this is a store into a variable that is being optimized away,
1311 : : add a debug bind stmt if possible. */
1312 : 6075957 : if (MAY_HAVE_DEBUG_BIND_STMTS
1313 : 5511535 : && gimple_assign_single_p (stmt)
1314 : 6284523 : && is_gimple_val (gimple_assign_rhs1 (stmt)))
1315 : : {
1316 : 100673 : tree lhs = gimple_assign_lhs (stmt);
1317 : 88621 : if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1318 : 12117 : && !DECL_IGNORED_P (lhs)
1319 : 3196 : && is_gimple_reg_type (TREE_TYPE (lhs))
1320 : 57 : && !is_global_var (lhs)
1321 : 100730 : && !DECL_HAS_VALUE_EXPR_P (lhs))
1322 : : {
1323 : 57 : tree rhs = gimple_assign_rhs1 (stmt);
1324 : 57 : gdebug *note
1325 : 57 : = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1326 : 57 : gsi_insert_after (i, note, GSI_SAME_STMT);
1327 : : }
1328 : : }
1329 : :
1330 : 6075957 : unlink_stmt_vdef (stmt);
1331 : 6075957 : gsi_remove (i, true);
1332 : 6075957 : release_defs (stmt);
1333 : 6075957 : }
1334 : :
1335 : : /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1336 : : uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1337 : :
1338 : : static tree
1339 : 40 : find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1340 : : {
1341 : 40 : if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1342 : 0 : *walk_subtrees = 0;
1343 : 40 : if (*tp == (tree) data)
1344 : 20 : return *tp;
1345 : : return NULL_TREE;
1346 : : }
1347 : :
1348 : : /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1349 : : but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1350 : : into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1351 : : uses. */
1352 : :
1353 : : static void
1354 : 282887 : maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1355 : : enum tree_code subcode)
1356 : : {
1357 : 282887 : gimple *stmt = gsi_stmt (*gsi);
1358 : 282887 : tree lhs = gimple_call_lhs (stmt);
1359 : :
1360 : 282887 : if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1361 : 282743 : return;
1362 : :
1363 : 282887 : imm_use_iterator imm_iter;
1364 : 282887 : use_operand_p use_p;
1365 : 282887 : bool has_debug_uses = false;
1366 : 282887 : bool has_realpart_uses = false;
1367 : 282887 : bool has_other_uses = false;
1368 : 291156 : FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1369 : : {
1370 : 291012 : gimple *use_stmt = USE_STMT (use_p);
1371 : 291012 : if (is_gimple_debug (use_stmt))
1372 : : has_debug_uses = true;
1373 : 287115 : else if (is_gimple_assign (use_stmt)
1374 : 287009 : && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1375 : 291487 : && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1376 : : has_realpart_uses = true;
1377 : : else
1378 : : {
1379 : : has_other_uses = true;
1380 : : break;
1381 : : }
1382 : : }
1383 : :
1384 : 282887 : if (!has_realpart_uses || has_other_uses)
1385 : : return;
1386 : :
1387 : 144 : tree arg0 = gimple_call_arg (stmt, 0);
1388 : 144 : tree arg1 = gimple_call_arg (stmt, 1);
1389 : 144 : location_t loc = gimple_location (stmt);
1390 : 144 : tree type = TREE_TYPE (TREE_TYPE (lhs));
1391 : 144 : tree utype = unsigned_type_for (type);
1392 : 144 : tree result = fold_build2_loc (loc, subcode, utype,
1393 : : fold_convert_loc (loc, utype, arg0),
1394 : : fold_convert_loc (loc, utype, arg1));
1395 : 144 : result = fold_convert_loc (loc, type, result);
1396 : :
1397 : 144 : if (has_debug_uses)
1398 : : {
1399 : 20 : gimple *use_stmt;
1400 : 60 : FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1401 : : {
1402 : 40 : if (!gimple_debug_bind_p (use_stmt))
1403 : 20 : continue;
1404 : 20 : tree v = gimple_debug_bind_get_value (use_stmt);
1405 : 20 : if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1406 : : {
1407 : 20 : gimple_debug_bind_reset_value (use_stmt);
1408 : 20 : update_stmt (use_stmt);
1409 : : }
1410 : 20 : }
1411 : : }
1412 : :
1413 : 144 : if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1414 : 0 : result = drop_tree_overflow (result);
1415 : 144 : tree overflow = build_zero_cst (type);
1416 : 144 : tree ctype = build_complex_type (type);
1417 : 144 : if (TREE_CODE (result) == INTEGER_CST)
1418 : 0 : result = build_complex (ctype, result, overflow);
1419 : : else
1420 : 144 : result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1421 : : ctype, result, overflow);
1422 : :
1423 : 144 : if (dump_file && (dump_flags & TDF_DETAILS))
1424 : : {
1425 : 0 : fprintf (dump_file, "Transforming call: ");
1426 : 0 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1427 : 0 : fprintf (dump_file, "because the overflow result is never used into: ");
1428 : 0 : print_generic_stmt (dump_file, result, TDF_SLIM);
1429 : 0 : fprintf (dump_file, "\n");
1430 : : }
1431 : :
1432 : 144 : gimplify_and_update_call_from_tree (gsi, result);
1433 : : }
1434 : :
1435 : : /* Returns whether the control parents of BB are preserved. */
1436 : :
1437 : : static bool
1438 : 612653 : control_parents_preserved_p (basic_block bb)
1439 : : {
1440 : : /* If we marked the control parents from BB they are preserved. */
1441 : 612653 : if (bitmap_bit_p (visited_control_parents, bb->index))
1442 : : return true;
1443 : :
1444 : : /* But they can also end up being marked from elsewhere. */
1445 : 5733 : bitmap_iterator bi;
1446 : 5733 : unsigned edge_number;
1447 : 9057 : EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
1448 : : 0, edge_number, bi)
1449 : : {
1450 : 5820 : basic_block cd_bb = cd->get_edge_src (edge_number);
1451 : 5820 : if (cd_bb != bb
1452 : 5820 : && !bitmap_bit_p (last_stmt_necessary, cd_bb->index))
1453 : : return false;
1454 : : }
1455 : : /* And cache the result. */
1456 : 3237 : bitmap_set_bit (visited_control_parents, bb->index);
1457 : 3237 : return true;
1458 : : }
1459 : :
1460 : : /* Eliminate unnecessary statements. Any instruction not marked as necessary
1461 : : contributes nothing to the program, and can be deleted. */
1462 : :
1463 : : static bool
1464 : 7679546 : eliminate_unnecessary_stmts (bool aggressive)
1465 : : {
1466 : 7679546 : bool something_changed = false;
1467 : 7679546 : basic_block bb;
1468 : 7679546 : gimple_stmt_iterator gsi, psi;
1469 : 7679546 : gimple *stmt;
1470 : 7679546 : auto_vec<edge> to_remove_edges;
1471 : :
1472 : 7679546 : if (dump_file && (dump_flags & TDF_DETAILS))
1473 : 207 : fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1474 : :
1475 : 7679546 : bool had_setjmp = cfun->calls_setjmp;
1476 : 7679546 : clear_special_calls ();
1477 : :
1478 : : /* Walking basic blocks and statements in reverse order avoids
1479 : : releasing SSA names before any other DEFs that refer to them are
1480 : : released. This helps avoid loss of debug information, as we get
1481 : : a chance to propagate all RHSs of removed SSAs into debug uses,
1482 : : rather than only the latest ones. E.g., consider:
1483 : :
1484 : : x_3 = y_1 + z_2;
1485 : : a_5 = x_3 - b_4;
1486 : : # DEBUG a => a_5
1487 : :
1488 : : If we were to release x_3 before a_5, when we reached a_5 and
1489 : : tried to substitute it into the debug stmt, we'd see x_3 there,
1490 : : but x_3's DEF, type, etc would have already been disconnected.
1491 : : By going backwards, the debug stmt first changes to:
1492 : :
1493 : : # DEBUG a => x_3 - b_4
1494 : :
1495 : : and then to:
1496 : :
1497 : : # DEBUG a => y_1 + z_2 - b_4
1498 : :
1499 : : as desired. */
1500 : 7679546 : gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1501 : 7679546 : auto_vec<basic_block> h;
1502 : 7679546 : h = get_all_dominated_blocks (CDI_DOMINATORS,
1503 : 15359092 : single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1504 : :
1505 : 84809579 : while (h.length ())
1506 : : {
1507 : 77130033 : bb = h.pop ();
1508 : :
1509 : : /* Remove dead statements. */
1510 : 77130033 : auto_bitmap debug_seen;
1511 : 677916914 : for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1512 : : {
1513 : 523656848 : stmt = gsi_stmt (gsi);
1514 : :
1515 : 523656848 : psi = gsi;
1516 : 523656848 : gsi_prev (&psi);
1517 : :
1518 : 523656848 : stats.total++;
1519 : :
1520 : : /* We can mark a call to free as not necessary if the
1521 : : defining statement of its argument is not necessary
1522 : : (and thus is getting removed). */
1523 : 523656848 : if (gimple_plf (stmt, STMT_NECESSARY)
1524 : 523656848 : && (gimple_call_builtin_p (stmt, BUILT_IN_FREE)
1525 : 520815223 : || (is_gimple_call (stmt)
1526 : 34875713 : && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
1527 : 948609 : && gimple_call_operator_delete_p (as_a <gcall *> (stmt)))))
1528 : : {
1529 : 1023207 : tree ptr = gimple_call_arg (stmt, 0);
1530 : 1023207 : if (TREE_CODE (ptr) == SSA_NAME)
1531 : : {
1532 : 1022697 : gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1533 : 1022697 : if (!gimple_nop_p (def_stmt)
1534 : 1022697 : && !gimple_plf (def_stmt, STMT_NECESSARY))
1535 : 5445 : gimple_set_plf (stmt, STMT_NECESSARY, false);
1536 : : }
1537 : : }
1538 : : /* Conditional checking that return value of allocation is non-NULL
1539 : : can be turned to constant if the allocation itself
1540 : : is unnecesary. */
1541 : 523656848 : if (gimple_plf (stmt, STMT_NECESSARY)
1542 : 521083264 : && gimple_code (stmt) == GIMPLE_COND
1543 : 552314337 : && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
1544 : : {
1545 : 28178453 : gimple *def_stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt));
1546 : 28178453 : if (!gimple_nop_p (def_stmt)
1547 : 28178453 : && !gimple_plf (def_stmt, STMT_NECESSARY))
1548 : : {
1549 : 2871 : gcc_checking_assert
1550 : : (checks_return_value_of_removable_allocation_p (stmt));
1551 : 2871 : gimple_cond_set_lhs (as_a <gcond *>(stmt),
1552 : : build_one_cst
1553 : 2871 : (TREE_TYPE (gimple_cond_rhs (stmt))));
1554 : 2871 : update_stmt (stmt);
1555 : : }
1556 : : }
1557 : :
1558 : : /* If GSI is not necessary then remove it. */
1559 : 523656848 : if (!gimple_plf (stmt, STMT_NECESSARY))
1560 : : {
1561 : : /* Keep clobbers that we can keep live live. */
1562 : 2573584 : if (gimple_clobber_p (stmt))
1563 : : {
1564 : 1256508 : ssa_op_iter iter;
1565 : 1256508 : use_operand_p use_p;
1566 : 1256508 : bool dead = false;
1567 : 2510972 : FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1568 : : {
1569 : 1256508 : tree name = USE_FROM_PTR (use_p);
1570 : 1256508 : if (!SSA_NAME_IS_DEFAULT_DEF (name)
1571 : 1256508 : && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1572 : : {
1573 : : dead = true;
1574 : : break;
1575 : : }
1576 : : }
1577 : 2508476 : if (!dead
1578 : : /* When doing CD-DCE we have to ensure all controls
1579 : : of the stmt are still live. */
1580 : 1256508 : && (!aggressive || control_parents_preserved_p (bb)))
1581 : : {
1582 : 1251968 : bitmap_clear (debug_seen);
1583 : 1251968 : continue;
1584 : : }
1585 : : }
1586 : 1321616 : if (!is_gimple_debug (stmt))
1587 : 1074314 : something_changed = true;
1588 : 1321616 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1589 : 1321616 : continue;
1590 : 1321616 : }
1591 : 521083264 : else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1592 : : {
1593 : 35143754 : tree name = gimple_call_lhs (call_stmt);
1594 : :
1595 : 35143754 : notice_special_calls (call_stmt);
1596 : :
1597 : : /* When LHS of var = call (); is dead, simplify it into
1598 : : call (); saving one operand. */
1599 : 35143754 : if (name
1600 : 13771092 : && TREE_CODE (name) == SSA_NAME
1601 : 11535133 : && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1602 : : /* Avoid doing so for allocation calls which we
1603 : : did not mark as necessary, it will confuse the
1604 : : special logic we apply to malloc/free pair removal. */
1605 : 35230710 : && !is_removable_allocation_p (call_stmt, false))
1606 : : {
1607 : 85711 : something_changed = true;
1608 : 85711 : if (dump_file && (dump_flags & TDF_DETAILS))
1609 : : {
1610 : 0 : fprintf (dump_file, "Deleting LHS of call: ");
1611 : 0 : print_gimple_stmt (dump_file, call_stmt, 0, TDF_SLIM);
1612 : 0 : fprintf (dump_file, "\n");
1613 : : }
1614 : :
1615 : 85711 : gimple_call_set_lhs (call_stmt, NULL_TREE);
1616 : 85711 : maybe_clean_or_replace_eh_stmt (call_stmt, call_stmt);
1617 : 85711 : update_stmt (call_stmt);
1618 : 85711 : release_ssa_name (name);
1619 : :
1620 : : /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON
1621 : : without lhs is not needed. */
1622 : 85711 : if (gimple_call_internal_p (call_stmt))
1623 : 8174 : switch (gimple_call_internal_fn (call_stmt))
1624 : : {
1625 : 760 : case IFN_GOMP_SIMD_LANE:
1626 : 760 : if (gimple_call_num_args (call_stmt) >= 3
1627 : 798 : && !integer_nonzerop
1628 : 38 : (gimple_call_arg (call_stmt, 2)))
1629 : : break;
1630 : : /* FALLTHRU */
1631 : 861 : case IFN_ASAN_POISON:
1632 : 861 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1633 : 861 : break;
1634 : : default:
1635 : : break;
1636 : : }
1637 : : }
1638 : 35058043 : else if (gimple_call_internal_p (call_stmt))
1639 : 810888 : switch (gimple_call_internal_fn (call_stmt))
1640 : : {
1641 : 86564 : case IFN_ADD_OVERFLOW:
1642 : 86564 : maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1643 : 86564 : break;
1644 : 95182 : case IFN_SUB_OVERFLOW:
1645 : 95182 : maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1646 : 95182 : break;
1647 : 96767 : case IFN_MUL_OVERFLOW:
1648 : 96767 : maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1649 : 96767 : break;
1650 : 24160 : case IFN_UADDC:
1651 : 24160 : if (integer_zerop (gimple_call_arg (call_stmt, 2)))
1652 : 2396 : maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1653 : : break;
1654 : 13765 : case IFN_USUBC:
1655 : 13765 : if (integer_zerop (gimple_call_arg (call_stmt, 2)))
1656 : 1978 : maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1657 : : break;
1658 : : default:
1659 : : break;
1660 : : }
1661 : : }
1662 : 485939510 : else if (gimple_debug_bind_p (stmt))
1663 : : {
1664 : : /* We are only keeping the last debug-bind of a
1665 : : non-DEBUG_EXPR_DECL variable in a series of
1666 : : debug-bind stmts. */
1667 : 217864372 : tree var = gimple_debug_bind_get_var (stmt);
1668 : 217864372 : if (TREE_CODE (var) != DEBUG_EXPR_DECL
1669 : 217864372 : && !bitmap_set_bit (debug_seen, DECL_UID (var)))
1670 : 4753480 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1671 : 217864372 : continue;
1672 : 217864372 : }
1673 : 303218892 : bitmap_clear (debug_seen);
1674 : : }
1675 : :
1676 : : /* Remove dead PHI nodes. */
1677 : 77130033 : something_changed |= remove_dead_phis (bb);
1678 : 77130033 : }
1679 : :
1680 : : /* First remove queued edges. */
1681 : 7679546 : if (!to_remove_edges.is_empty ())
1682 : : {
1683 : : /* Remove edges. We've delayed this to not get bogus debug stmts
1684 : : during PHI node removal. */
1685 : 51596 : for (unsigned i = 0; i < to_remove_edges.length (); ++i)
1686 : 33162 : remove_edge (to_remove_edges[i]);
1687 : 18434 : cfg_altered = true;
1688 : : }
1689 : : /* When we cleared calls_setjmp we can purge all abnormal edges. Do so.
1690 : : ??? We'd like to assert that setjmp calls do not pop out of nothing
1691 : : but we currently lack a per-stmt way of noting whether a call was
1692 : : recognized as returns-twice (or rather receives-control). */
1693 : 7679546 : if (!cfun->calls_setjmp && had_setjmp)
1694 : : {
1695 : : /* Make sure we only remove the edges, not dominated blocks. Using
1696 : : gimple_purge_dead_abnormal_call_edges would do that and we
1697 : : cannot free dominators yet. */
1698 : 3686 : FOR_EACH_BB_FN (bb, cfun)
1699 : 9470 : if (gcall *stmt = safe_dyn_cast <gcall *> (*gsi_last_bb (bb)))
1700 : 2082 : if (!stmt_can_make_abnormal_goto (stmt))
1701 : : {
1702 : 737 : edge_iterator ei;
1703 : 737 : edge e;
1704 : 996 : for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1705 : : {
1706 : 259 : if (e->flags & EDGE_ABNORMAL)
1707 : : {
1708 : 112 : if (e->flags & EDGE_FALLTHRU)
1709 : 0 : e->flags &= ~EDGE_ABNORMAL;
1710 : : else
1711 : 112 : remove_edge (e);
1712 : 112 : cfg_altered = true;
1713 : : }
1714 : : else
1715 : 147 : ei_next (&ei);
1716 : : }
1717 : : }
1718 : : }
1719 : :
1720 : : /* Now remove the unreachable blocks. */
1721 : 7679546 : if (cfg_altered)
1722 : : {
1723 : 18475 : basic_block prev_bb;
1724 : :
1725 : 18475 : find_unreachable_blocks ();
1726 : :
1727 : : /* Delete all unreachable basic blocks in reverse dominator order. */
1728 : 18475 : for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1729 : 650890 : bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1730 : : {
1731 : 632415 : prev_bb = bb->prev_bb;
1732 : :
1733 : 632415 : if ((bb_contains_live_stmts
1734 : 632369 : && !bitmap_bit_p (bb_contains_live_stmts, bb->index))
1735 : 1099942 : || !(bb->flags & BB_REACHABLE))
1736 : : {
1737 : : /* Since we don't track liveness of virtual PHI nodes, it is
1738 : : possible that we rendered some PHI nodes unreachable while
1739 : : they are still in use. Mark them for renaming. */
1740 : 184790 : for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1741 : 19901 : gsi_next (&gsi))
1742 : 59701 : if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1743 : : {
1744 : 19899 : bool found = false;
1745 : 19899 : imm_use_iterator iter;
1746 : :
1747 : 20083 : FOR_EACH_IMM_USE_STMT (stmt, iter,
1748 : : gimple_phi_result (gsi.phi ()))
1749 : : {
1750 : 19475 : if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1751 : 161 : continue;
1752 : 19314 : if (gimple_code (stmt) == GIMPLE_PHI
1753 : 19314 : || gimple_plf (stmt, STMT_NECESSARY))
1754 : : {
1755 : : found = true;
1756 : : break;
1757 : : }
1758 : 19899 : }
1759 : 19899 : if (found)
1760 : 19291 : mark_virtual_phi_result_for_renaming (gsi.phi ());
1761 : : }
1762 : :
1763 : 164889 : if (!(bb->flags & BB_REACHABLE))
1764 : : {
1765 : : /* Speed up the removal of blocks that don't
1766 : : dominate others. Walking backwards, this should
1767 : : be the common case. ??? Do we need to recompute
1768 : : dominators because of cfg_altered? */
1769 : 36143 : if (!first_dom_son (CDI_DOMINATORS, bb))
1770 : 35654 : delete_basic_block (bb);
1771 : : else
1772 : : {
1773 : 489 : h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1774 : :
1775 : 2137 : while (h.length ())
1776 : : {
1777 : 1648 : bb = h.pop ();
1778 : 1648 : prev_bb = bb->prev_bb;
1779 : : /* Rearrangements to the CFG may have failed
1780 : : to update the dominators tree, so that
1781 : : formerly-dominated blocks are now
1782 : : otherwise reachable. */
1783 : 1648 : if (!!(bb->flags & BB_REACHABLE))
1784 : 0 : continue;
1785 : 1648 : delete_basic_block (bb);
1786 : : }
1787 : :
1788 : 489 : h.release ();
1789 : : }
1790 : : }
1791 : : }
1792 : : }
1793 : : }
1794 : :
1795 : 7679546 : if (bb_postorder)
1796 : 18434 : free (bb_postorder);
1797 : 7679546 : bb_postorder = NULL;
1798 : :
1799 : 7679546 : return something_changed;
1800 : 7679546 : }
1801 : :
1802 : :
1803 : : /* Print out removed statement statistics. */
1804 : :
1805 : : static void
1806 : 213 : print_stats (void)
1807 : : {
1808 : 213 : float percg;
1809 : :
1810 : 213 : percg = ((float) stats.removed / (float) stats.total) * 100;
1811 : 213 : fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1812 : : stats.removed, stats.total, (int) percg);
1813 : :
1814 : 213 : if (stats.total_phis == 0)
1815 : : percg = 0;
1816 : : else
1817 : 38 : percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1818 : :
1819 : 213 : fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1820 : : stats.removed_phis, stats.total_phis, (int) percg);
1821 : 213 : }
1822 : :
1823 : : /* Initialization for this pass. Set up the used data structures. */
1824 : :
1825 : : static void
1826 : 7679546 : tree_dce_init (bool aggressive)
1827 : : {
1828 : 7679546 : memset ((void *) &stats, 0, sizeof (stats));
1829 : :
1830 : 7679546 : if (aggressive)
1831 : : {
1832 : 3289936 : last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1833 : 3289936 : bitmap_clear (last_stmt_necessary);
1834 : 3289936 : bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1835 : 3289936 : bitmap_clear (bb_contains_live_stmts);
1836 : : }
1837 : :
1838 : 15359092 : processed = sbitmap_alloc (num_ssa_names + 1);
1839 : 7679546 : bitmap_clear (processed);
1840 : :
1841 : 7679546 : worklist.create (64);
1842 : 7679546 : cfg_altered = false;
1843 : 7679546 : }
1844 : :
1845 : : /* Cleanup after this pass. */
1846 : :
1847 : : static void
1848 : 7679546 : tree_dce_done (bool aggressive)
1849 : : {
1850 : 7679546 : if (aggressive)
1851 : : {
1852 : 3289936 : delete cd;
1853 : 3289936 : sbitmap_free (visited_control_parents);
1854 : 3289936 : sbitmap_free (last_stmt_necessary);
1855 : 3289936 : sbitmap_free (bb_contains_live_stmts);
1856 : 3289936 : bb_contains_live_stmts = NULL;
1857 : : }
1858 : :
1859 : 7679546 : sbitmap_free (processed);
1860 : :
1861 : 7679546 : worklist.release ();
1862 : 7679546 : }
1863 : :
1864 : : /* Sort PHI argument values for make_forwarders_with_degenerate_phis. */
1865 : :
1866 : : static int
1867 : 27029945 : sort_phi_args (const void *a_, const void *b_)
1868 : : {
1869 : 27029945 : auto *a = (const std::pair<edge, hashval_t> *) a_;
1870 : 27029945 : auto *b = (const std::pair<edge, hashval_t> *) b_;
1871 : 27029945 : hashval_t ha = a->second;
1872 : 27029945 : hashval_t hb = b->second;
1873 : 27029945 : if (ha < hb)
1874 : : return -1;
1875 : 16823057 : else if (ha > hb)
1876 : : return 1;
1877 : 8190671 : else if (a->first->dest_idx < b->first->dest_idx)
1878 : : return -1;
1879 : 4295412 : else if (a->first->dest_idx > b->first->dest_idx)
1880 : : return 1;
1881 : : else
1882 : 0 : return 0;
1883 : : }
1884 : :
1885 : : /* Look for a non-virtual PHIs and make a forwarder block when all PHIs
1886 : : have the same argument on a set of edges. This is to not consider
1887 : : control dependences of individual edges for same values but only for
1888 : : the common set. */
1889 : :
1890 : : static unsigned
1891 : 3289936 : make_forwarders_with_degenerate_phis (function *fn)
1892 : : {
1893 : 3289936 : unsigned todo = 0;
1894 : :
1895 : 3289936 : basic_block bb;
1896 : 31303372 : FOR_EACH_BB_FN (bb, fn)
1897 : : {
1898 : : /* Only PHIs with three or more arguments have opportunities. */
1899 : 28013436 : if (EDGE_COUNT (bb->preds) < 3)
1900 : 27732038 : continue;
1901 : : /* Do not touch loop headers or blocks with abnormal predecessors.
1902 : : ??? This is to avoid creating valid loops here, see PR103458.
1903 : : We might want to improve things to either explicitely add those
1904 : : loops or at least consider blocks with no backedges. */
1905 : 1157659 : if (bb->loop_father->header == bb
1906 : 1155833 : || bb_has_abnormal_pred (bb))
1907 : 1826 : continue;
1908 : :
1909 : : /* Take one PHI node as template to look for identical
1910 : : arguments. Build a vector of candidates forming sets
1911 : : of argument edges with equal values. Note optimality
1912 : : depends on the particular choice of the template PHI
1913 : : since equal arguments are unordered leaving other PHIs
1914 : : with more than one set of equal arguments within this
1915 : : argument range unsorted. We'd have to break ties by
1916 : : looking at other PHI nodes. */
1917 : 1154007 : gphi_iterator gsi = gsi_start_nonvirtual_phis (bb);
1918 : 1154007 : if (gsi_end_p (gsi))
1919 : 699748 : continue;
1920 : 454259 : gphi *phi = gsi.phi ();
1921 : 454259 : auto_vec<std::pair<edge, hashval_t>, 8> args;
1922 : 454259 : bool need_resort = false;
1923 : 2672407 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
1924 : : {
1925 : 2218148 : edge e = gimple_phi_arg_edge (phi, i);
1926 : : /* Skip abnormal edges since we cannot redirect them. */
1927 : 2218148 : if (e->flags & EDGE_ABNORMAL)
1928 : 2218148 : continue;
1929 : : /* Skip loop exit edges when we are in loop-closed SSA form
1930 : : since the forwarder we'd create does not have a PHI node. */
1931 : 2218148 : if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
1932 : 2218148 : && loop_exit_edge_p (e->src->loop_father, e))
1933 : 16144 : continue;
1934 : :
1935 : 2202004 : tree arg = gimple_phi_arg_def (phi, i);
1936 : 2202004 : if (!CONSTANT_CLASS_P (arg) && TREE_CODE (arg) != SSA_NAME)
1937 : 2202004 : need_resort = true;
1938 : 2202004 : args.safe_push (std::make_pair (e, iterative_hash_expr (arg, 0)));
1939 : : }
1940 : 454259 : if (args.length () < 2)
1941 : 3669 : continue;
1942 : 450590 : args.qsort (sort_phi_args);
1943 : : /* The above sorting can be different between -g and -g0, as e.g. decls
1944 : : can have different uids (-g could have bigger gaps in between them).
1945 : : So, only use that to determine which args are equal, then change
1946 : : second from hash value to smallest dest_idx of the edges which have
1947 : : equal argument and sort again. If all the phi arguments are
1948 : : constants or SSA_NAME, there is no need for the second sort, the hash
1949 : : values are stable in that case. */
1950 : 450590 : hashval_t hash = args[0].second;
1951 : 450590 : args[0].second = args[0].first->dest_idx;
1952 : 450590 : bool any_equal = false;
1953 : 2199695 : for (unsigned i = 1; i < args.length (); ++i)
1954 : 1749105 : if (hash == args[i].second
1955 : 2441480 : && operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, args[i - 1].first),
1956 : 692375 : PHI_ARG_DEF_FROM_EDGE (phi, args[i].first)))
1957 : : {
1958 : 691928 : args[i].second = args[i - 1].second;
1959 : 691928 : any_equal = true;
1960 : : }
1961 : : else
1962 : : {
1963 : 1057177 : hash = args[i].second;
1964 : 1057177 : args[i].second = args[i].first->dest_idx;
1965 : : }
1966 : 450590 : if (!any_equal)
1967 : 169192 : continue;
1968 : 281398 : if (need_resort)
1969 : 12353 : args.qsort (sort_phi_args);
1970 : :
1971 : : /* From the candidates vector now verify true candidates for
1972 : : forwarders and create them. */
1973 : 281398 : gphi *vphi = get_virtual_phi (bb);
1974 : 281398 : unsigned start = 0;
1975 : 2255922 : while (start < args.length () - 1)
1976 : : {
1977 : 710941 : unsigned i;
1978 : 2150537 : for (i = start + 1; i < args.length (); ++i)
1979 : 1994782 : if (args[start].second != args[i].second)
1980 : : break;
1981 : : /* args[start]..args[i-1] are equal. */
1982 : 710941 : if (start != i - 1)
1983 : : {
1984 : : /* Check all PHI nodes for argument equality. */
1985 : 416787 : bool equal = true;
1986 : 416787 : gphi_iterator gsi2 = gsi;
1987 : 416787 : gsi_next (&gsi2);
1988 : 885763 : for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
1989 : : {
1990 : 623706 : gphi *phi2 = gsi2.phi ();
1991 : 1247412 : if (virtual_operand_p (gimple_phi_result (phi2)))
1992 : 153353 : continue;
1993 : 470353 : tree start_arg
1994 : 470353 : = PHI_ARG_DEF_FROM_EDGE (phi2, args[start].first);
1995 : 2188824 : for (unsigned j = start + 1; j < i; ++j)
1996 : : {
1997 : 3808584 : if (!operand_equal_p (start_arg,
1998 : 1904292 : PHI_ARG_DEF_FROM_EDGE
1999 : : (phi2, args[j].first)))
2000 : : {
2001 : : /* Another PHI might have a shorter set of
2002 : : equivalent args. Go for that. */
2003 : 185821 : i = j;
2004 : 185821 : if (j == start + 1)
2005 : : equal = false;
2006 : : break;
2007 : : }
2008 : : }
2009 : : if (!equal)
2010 : : break;
2011 : : }
2012 : 416787 : if (equal)
2013 : : {
2014 : : /* If we are asked to forward all edges the block
2015 : : has all degenerate PHIs. Do nothing in that case. */
2016 : 262057 : if (start == 0
2017 : 131742 : && i == args.length ()
2018 : 267284 : && args.length () == gimple_phi_num_args (phi))
2019 : : break;
2020 : : /* Instead of using make_forwarder_block we are
2021 : : rolling our own variant knowing that the forwarder
2022 : : does not need PHI nodes apart from eventually
2023 : : a virtual one. */
2024 : 256980 : auto_vec<tree, 8> vphi_args;
2025 : 256980 : if (vphi)
2026 : : {
2027 : 161360 : vphi_args.reserve_exact (i - start);
2028 : 638818 : for (unsigned j = start; j < i; ++j)
2029 : 477458 : vphi_args.quick_push
2030 : 477458 : (PHI_ARG_DEF_FROM_EDGE (vphi, args[j].first));
2031 : : }
2032 : 256980 : free_dominance_info (fn, CDI_DOMINATORS);
2033 : 256980 : basic_block forwarder = split_edge (args[start].first);
2034 : 256980 : profile_count count = profile_count::zero ();
2035 : 256980 : bool irr = false;
2036 : 774723 : for (unsigned j = start + 1; j < i; ++j)
2037 : : {
2038 : 517743 : edge e = args[j].first;
2039 : 517743 : if (e->flags & EDGE_IRREDUCIBLE_LOOP)
2040 : 1052 : irr = true;
2041 : 517743 : redirect_edge_and_branch_force (e, forwarder);
2042 : 517743 : redirect_edge_var_map_clear (e);
2043 : 517743 : count += e->count ();
2044 : : }
2045 : 256980 : forwarder->count = count;
2046 : 256980 : if (irr)
2047 : : {
2048 : 806 : forwarder->flags |= BB_IRREDUCIBLE_LOOP;
2049 : 806 : single_succ_edge (forwarder)->flags
2050 : 806 : |= EDGE_IRREDUCIBLE_LOOP;
2051 : : }
2052 : :
2053 : 256980 : if (vphi)
2054 : : {
2055 : 161360 : tree def = copy_ssa_name (vphi_args[0]);
2056 : 161360 : gphi *vphi_copy = create_phi_node (def, forwarder);
2057 : 638818 : for (unsigned j = start; j < i; ++j)
2058 : 954916 : add_phi_arg (vphi_copy, vphi_args[j - start],
2059 : 477458 : args[j].first, UNKNOWN_LOCATION);
2060 : 161360 : SET_PHI_ARG_DEF
2061 : : (vphi, single_succ_edge (forwarder)->dest_idx, def);
2062 : : }
2063 : 256980 : todo |= TODO_cleanup_cfg;
2064 : 256980 : }
2065 : : }
2066 : : /* Continue searching for more opportunities. */
2067 : : start = i;
2068 : : }
2069 : 454259 : }
2070 : 3289936 : return todo;
2071 : : }
2072 : :
2073 : : /* Main routine to eliminate dead code.
2074 : :
2075 : : AGGRESSIVE controls the aggressiveness of the algorithm.
2076 : : In conservative mode, we ignore control dependence and simply declare
2077 : : all but the most trivially dead branches necessary. This mode is fast.
2078 : : In aggressive mode, control dependences are taken into account, which
2079 : : results in more dead code elimination, but at the cost of some time. */
2080 : :
2081 : : static unsigned int
2082 : 7679546 : perform_tree_ssa_dce (bool aggressive)
2083 : : {
2084 : 7679546 : bool something_changed = 0;
2085 : 7679546 : unsigned todo = 0;
2086 : :
2087 : : /* Preheaders are needed for SCEV to work.
2088 : : Simple lateches and recorded exits improve chances that loop will
2089 : : proved to be finite in testcases such as in loop-15.c and loop-24.c */
2090 : 7679546 : bool in_loop_pipeline = scev_initialized_p ();
2091 : 7679546 : if (aggressive && ! in_loop_pipeline)
2092 : : {
2093 : 3078158 : loop_optimizer_init (LOOPS_NORMAL
2094 : : | LOOPS_HAVE_RECORDED_EXITS);
2095 : 3078158 : scev_initialize ();
2096 : : }
2097 : :
2098 : 7679546 : if (aggressive)
2099 : 3289936 : todo |= make_forwarders_with_degenerate_phis (cfun);
2100 : :
2101 : 7679546 : calculate_dominance_info (CDI_DOMINATORS);
2102 : :
2103 : 7679546 : tree_dce_init (aggressive);
2104 : :
2105 : 7679546 : if (aggressive)
2106 : : {
2107 : : /* Compute control dependence. */
2108 : 3289936 : calculate_dominance_info (CDI_POST_DOMINATORS);
2109 : 3289936 : cd = new control_dependences ();
2110 : :
2111 : 6579872 : visited_control_parents =
2112 : 3289936 : sbitmap_alloc (last_basic_block_for_fn (cfun));
2113 : 3289936 : bitmap_clear (visited_control_parents);
2114 : :
2115 : 3289936 : mark_dfs_back_edges ();
2116 : : }
2117 : :
2118 : 7679546 : find_obviously_necessary_stmts (aggressive);
2119 : :
2120 : 7679546 : if (aggressive && ! in_loop_pipeline)
2121 : : {
2122 : 3078158 : scev_finalize ();
2123 : 3078158 : loop_optimizer_finalize ();
2124 : : }
2125 : :
2126 : 7679546 : longest_chain = 0;
2127 : 7679546 : total_chain = 0;
2128 : 7679546 : nr_walks = 0;
2129 : 7679546 : chain_ovfl = false;
2130 : 7679546 : visited = BITMAP_ALLOC (NULL);
2131 : 7679546 : propagate_necessity (aggressive);
2132 : 7679546 : BITMAP_FREE (visited);
2133 : :
2134 : 7679546 : something_changed |= eliminate_unnecessary_stmts (aggressive);
2135 : 7679546 : something_changed |= cfg_altered;
2136 : :
2137 : : /* We do not update postdominators, so free them unconditionally. */
2138 : 7679546 : free_dominance_info (CDI_POST_DOMINATORS);
2139 : :
2140 : : /* If we removed paths in the CFG, then we need to update
2141 : : dominators as well. I haven't investigated the possibility
2142 : : of incrementally updating dominators. */
2143 : 7679546 : if (cfg_altered)
2144 : 18475 : free_dominance_info (CDI_DOMINATORS);
2145 : :
2146 : 7679546 : statistics_counter_event (cfun, "Statements deleted", stats.removed);
2147 : 7679546 : statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
2148 : :
2149 : : /* Debugging dumps. */
2150 : 7679546 : if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
2151 : 213 : print_stats ();
2152 : :
2153 : 7679546 : tree_dce_done (aggressive);
2154 : :
2155 : 7679546 : if (something_changed)
2156 : : {
2157 : 782123 : free_numbers_of_iterations_estimates (cfun);
2158 : 782123 : if (in_loop_pipeline)
2159 : 60010 : scev_reset ();
2160 : : todo |= TODO_update_ssa | TODO_cleanup_cfg;
2161 : : }
2162 : 7679546 : return todo;
2163 : : }
2164 : :
2165 : : namespace {
2166 : :
2167 : : const pass_data pass_data_dce =
2168 : : {
2169 : : GIMPLE_PASS, /* type */
2170 : : "dce", /* name */
2171 : : OPTGROUP_NONE, /* optinfo_flags */
2172 : : TV_TREE_DCE, /* tv_id */
2173 : : ( PROP_cfg | PROP_ssa ), /* properties_required */
2174 : : 0, /* properties_provided */
2175 : : 0, /* properties_destroyed */
2176 : : 0, /* todo_flags_start */
2177 : : 0, /* todo_flags_finish */
2178 : : };
2179 : :
2180 : : class pass_dce_base : public gimple_opt_pass
2181 : : {
2182 : : public:
2183 : : /* opt_pass methods: */
2184 : 7682562 : bool gate (function *) final override { return flag_tree_dce != 0; }
2185 : 2808310 : void set_pass_param (unsigned n, bool param) final override
2186 : : {
2187 : 2808310 : gcc_assert (n == 0 || n == 1);
2188 : 2808310 : if (n == 0)
2189 : 1684986 : update_address_taken_p = param;
2190 : 1123324 : else if (n == 1)
2191 : 1123324 : remove_unused_locals_p = param;
2192 : 2808310 : }
2193 : :
2194 : : protected:
2195 : 3089141 : pass_dce_base (const pass_data &data, gcc::context *ctxt)
2196 : 6178282 : : gimple_opt_pass (data, ctxt)
2197 : : {}
2198 : 7679546 : unsigned int execute_dce (function *, bool aggressive)
2199 : : {
2200 : 7679546 : return (perform_tree_ssa_dce (aggressive)
2201 : 7679546 : | (remove_unused_locals_p ? TODO_remove_unused_locals : 0)
2202 : 7679546 : | (update_address_taken_p ? TODO_update_address_taken : 0));
2203 : : }
2204 : :
2205 : : private:
2206 : : bool update_address_taken_p = false;
2207 : : bool remove_unused_locals_p = false;
2208 : : }; // class pass_dce_base
2209 : :
2210 : :
2211 : : class pass_dce : public pass_dce_base
2212 : : {
2213 : : public:
2214 : 2246648 : pass_dce (gcc::context *ctxt)
2215 : 4493296 : : pass_dce_base (pass_data_dce, ctxt)
2216 : : {}
2217 : :
2218 : : /* opt_pass methods: */
2219 : 1965817 : opt_pass * clone () final override { return new pass_dce (m_ctxt); }
2220 : 4201918 : unsigned int execute (function *func) final override
2221 : : {
2222 : 4201918 : return execute_dce (func, /*aggressive=*/false);
2223 : : }
2224 : :
2225 : : }; // class pass_dce
2226 : :
2227 : : } // anon namespace
2228 : :
2229 : : gimple_opt_pass *
2230 : 280831 : make_pass_dce (gcc::context *ctxt)
2231 : : {
2232 : 280831 : return new pass_dce (ctxt);
2233 : : }
2234 : :
2235 : : namespace {
2236 : :
2237 : : const pass_data pass_data_cd_dce =
2238 : : {
2239 : : GIMPLE_PASS, /* type */
2240 : : "cddce", /* name */
2241 : : OPTGROUP_NONE, /* optinfo_flags */
2242 : : TV_TREE_CD_DCE, /* tv_id */
2243 : : ( PROP_cfg | PROP_ssa ), /* properties_required */
2244 : : 0, /* properties_provided */
2245 : : 0, /* properties_destroyed */
2246 : : 0, /* todo_flags_start */
2247 : : 0, /* todo_flags_finish */
2248 : : };
2249 : :
2250 : : class pass_cd_dce : public pass_dce_base
2251 : : {
2252 : : public:
2253 : 842493 : pass_cd_dce (gcc::context *ctxt)
2254 : 1684986 : : pass_dce_base (pass_data_cd_dce, ctxt)
2255 : : {}
2256 : :
2257 : : /* opt_pass methods: */
2258 : 561662 : opt_pass * clone () final override { return new pass_cd_dce (m_ctxt); }
2259 : 3477628 : unsigned int execute (function *func) final override
2260 : : {
2261 : 3477628 : return execute_dce (func, /*aggressive=*/optimize >= 2);
2262 : : }
2263 : :
2264 : : }; // class pass_cd_dce
2265 : :
2266 : : } // anon namespace
2267 : :
2268 : : gimple_opt_pass *
2269 : 280831 : make_pass_cd_dce (gcc::context *ctxt)
2270 : : {
2271 : 280831 : return new pass_cd_dce (ctxt);
2272 : : }
2273 : :
2274 : :
2275 : : /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
2276 : : is consumed by this function. The function has linear complexity in
2277 : : the number of dead stmts with a constant factor like the average SSA
2278 : : use operands number. */
2279 : :
2280 : : void
2281 : 35778960 : simple_dce_from_worklist (bitmap worklist, bitmap need_eh_cleanup)
2282 : : {
2283 : 35778960 : int phiremoved = 0;
2284 : 35778960 : int stmtremoved = 0;
2285 : 69731078 : while (! bitmap_empty_p (worklist))
2286 : : {
2287 : : /* Pop item. */
2288 : 33952118 : unsigned i = bitmap_clear_first_set_bit (worklist);
2289 : :
2290 : 33952118 : tree def = ssa_name (i);
2291 : : /* Removed by somebody else or still in use.
2292 : : Note use in itself for a phi node is not counted as still in use. */
2293 : 33952118 : if (!def)
2294 : 15351556 : continue;
2295 : 33813946 : if (!has_zero_uses (def))
2296 : : {
2297 : 15173129 : gimple *def_stmt = SSA_NAME_DEF_STMT (def);
2298 : :
2299 : 15173129 : if (gimple_code (def_stmt) != GIMPLE_PHI)
2300 : 15169198 : continue;
2301 : :
2302 : 2828837 : gimple *use_stmt;
2303 : 2828837 : imm_use_iterator use_iter;
2304 : 2828837 : bool canremove = true;
2305 : :
2306 : 2919475 : FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, def)
2307 : : {
2308 : : /* Ignore debug statements. */
2309 : 2915544 : if (is_gimple_debug (use_stmt))
2310 : 83154 : continue;
2311 : 2832390 : if (use_stmt != def_stmt)
2312 : : {
2313 : : canremove = false;
2314 : : break;
2315 : : }
2316 : 2828837 : }
2317 : 2828837 : if (!canremove)
2318 : 2824906 : continue;
2319 : : }
2320 : :
2321 : 18644748 : gimple *t = SSA_NAME_DEF_STMT (def);
2322 : 18644748 : if (gimple_has_side_effects (t))
2323 : 35722 : continue;
2324 : :
2325 : : /* The defining statement needs to be defining only this name.
2326 : : ASM is the only statement that can define more than one
2327 : : name. */
2328 : 18609026 : if (is_a<gasm *>(t)
2329 : 18609026 : && !single_ssa_def_operand (t, SSA_OP_ALL_DEFS))
2330 : 15 : continue;
2331 : :
2332 : : /* Don't remove statements that are needed for non-call
2333 : : eh to work. */
2334 : 18609011 : if (stmt_unremovable_because_of_non_call_eh_p (cfun, t))
2335 : 8449 : continue;
2336 : :
2337 : : /* Tell the caller that we removed a statement that might
2338 : : throw so it could cleanup the cfg for that block. */
2339 : 18600562 : if (need_eh_cleanup && stmt_could_throw_p (cfun, t))
2340 : 279 : bitmap_set_bit (need_eh_cleanup, gimple_bb (t)->index);
2341 : :
2342 : : /* Add uses to the worklist. */
2343 : 18600562 : ssa_op_iter iter;
2344 : 18600562 : use_operand_p use_p;
2345 : 52683497 : FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
2346 : : {
2347 : 15482373 : tree use = USE_FROM_PTR (use_p);
2348 : 15482373 : if (TREE_CODE (use) == SSA_NAME
2349 : 15482373 : && ! SSA_NAME_IS_DEFAULT_DEF (use))
2350 : 13807794 : bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
2351 : : }
2352 : :
2353 : : /* Remove stmt. */
2354 : 18600562 : if (dump_file && (dump_flags & TDF_DETAILS))
2355 : : {
2356 : 273 : fprintf (dump_file, "Removing dead stmt:");
2357 : 273 : print_gimple_stmt (dump_file, t, 0);
2358 : : }
2359 : 18600562 : gimple_stmt_iterator gsi = gsi_for_stmt (t);
2360 : 18600562 : if (gimple_code (t) == GIMPLE_PHI)
2361 : : {
2362 : 2579087 : remove_phi_node (&gsi, true);
2363 : 2579087 : phiremoved++;
2364 : : }
2365 : : else
2366 : : {
2367 : 16021475 : unlink_stmt_vdef (t);
2368 : 16021475 : gsi_remove (&gsi, true);
2369 : 16021475 : release_defs (t);
2370 : 16021475 : stmtremoved++;
2371 : : }
2372 : : }
2373 : 35778960 : statistics_counter_event (cfun, "PHIs removed",
2374 : : phiremoved);
2375 : 35778960 : statistics_counter_event (cfun, "Statements removed",
2376 : : stmtremoved);
2377 : 35778960 : }
|