Line data Source code
1 : /* Dead code elimination pass for the GNU compiler.
2 : Copyright (C) 2002-2026 Free Software Foundation, Inc.
3 : Contributed by Ben Elliston <bje@redhat.com>
4 : and Andrew MacLeod <amacleod@redhat.com>
5 : Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6 :
7 : This file is part of GCC.
8 :
9 : GCC is free software; you can redistribute it and/or modify it
10 : under the terms of the GNU General Public License as published by the
11 : Free Software Foundation; either version 3, or (at your option) any
12 : later version.
13 :
14 : GCC is distributed in the hope that it will be useful, but WITHOUT
15 : ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 : for more details.
18 :
19 : You should have received a copy of the GNU General Public License
20 : along with GCC; see the file COPYING3. If not see
21 : <http://www.gnu.org/licenses/>. */
22 :
23 : /* Dead code elimination.
24 :
25 : References:
26 :
27 : Building an Optimizing Compiler,
28 : Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29 :
30 : Advanced Compiler Design and Implementation,
31 : Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32 :
33 : Dead-code elimination is the removal of statements which have no
34 : impact on the program's output. "Dead statements" have no impact
35 : on the program's output, while "necessary statements" may have
36 : impact on the output.
37 :
38 : The algorithm consists of three phases:
39 : 1. Marking as necessary all statements known to be necessary,
40 : e.g. most function calls, writing a value to memory, etc;
41 : 2. Propagating necessary statements, e.g., the statements
42 : giving values to operands in necessary statements; and
43 : 3. Removing dead statements. */
44 :
45 : #include "config.h"
46 : #include "system.h"
47 : #include "coretypes.h"
48 : #include "backend.h"
49 : #include "rtl.h"
50 : #include "tree.h"
51 : #include "gimple.h"
52 : #include "cfghooks.h"
53 : #include "tree-pass.h"
54 : #include "ssa.h"
55 : #include "gimple-pretty-print.h"
56 : #include "fold-const.h"
57 : #include "calls.h"
58 : #include "cfganal.h"
59 : #include "tree-eh.h"
60 : #include "gimplify.h"
61 : #include "gimple-iterator.h"
62 : #include "tree-cfg.h"
63 : #include "tree-ssa-loop-niter.h"
64 : #include "tree-into-ssa.h"
65 : #include "tree-dfa.h"
66 : #include "cfgloop.h"
67 : #include "tree-scalar-evolution.h"
68 : #include "tree-ssa-propagate.h"
69 : #include "gimple-fold.h"
70 : #include "tree-ssa.h"
71 : #include "ipa-modref-tree.h"
72 : #include "ipa-modref.h"
73 :
74 : static struct stmt_stats
75 : {
76 : int total;
77 : int total_phis;
78 : int removed;
79 : int removed_phis;
80 : } stats;
81 :
82 : #define STMT_NECESSARY GF_PLF_1
83 :
84 : static vec<gimple *> worklist;
85 :
86 : /* Vector indicating an SSA name has already been processed and marked
87 : as necessary. */
88 : static sbitmap processed;
89 :
90 : /* Vector indicating that the last statement of a basic block has already
91 : been marked as necessary. */
92 : static sbitmap last_stmt_necessary;
93 :
94 : /* Vector indicating that BB contains statements that are live. */
95 : static sbitmap bb_contains_live_stmts;
96 :
97 : /* Before we can determine whether a control branch is dead, we need to
98 : compute which blocks are control dependent on which edges.
99 :
100 : We expect each block to be control dependent on very few edges so we
101 : use a bitmap for each block recording its edges. An array holds the
102 : bitmap. The Ith bit in the bitmap is set if that block is dependent
103 : on the Ith edge. */
104 : static control_dependences *cd;
105 :
106 : /* Vector indicating that a basic block has already had all the edges
107 : processed that it is control dependent on. */
108 : static sbitmap visited_control_parents;
109 :
110 : /* TRUE if this pass alters the CFG (by removing control statements).
111 : FALSE otherwise.
112 :
113 : If this pass alters the CFG, then it will arrange for the dominators
114 : to be recomputed. */
115 : static bool cfg_altered;
116 :
117 : /* When non-NULL holds map from basic block index into the postorder. */
118 : static int *bb_postorder;
119 :
120 :
121 : /* True if we should treat any stmt with a vdef as necessary. */
122 :
123 : static inline bool
124 267052481 : keep_all_vdefs_p ()
125 : {
126 267052481 : return optimize_debug;
127 : }
128 :
129 : /* 1 if CALLEE is the function __cxa_atexit.
130 : 2 if CALLEE is the function __aeabi_atexit.
131 : 0 otherwise. */
132 :
133 : static inline int
134 84992892 : is_cxa_atexit (const_tree callee)
135 : {
136 84992892 : if (callee != NULL_TREE
137 84992892 : && strcmp (IDENTIFIER_POINTER (DECL_NAME (callee)), "__cxa_atexit") == 0)
138 : return 1;
139 84879328 : if (callee != NULL_TREE
140 84879328 : && strcmp (IDENTIFIER_POINTER (DECL_NAME (callee)), "__aeabi_atexit") == 0)
141 0 : return 2;
142 : return 0;
143 : }
144 :
145 : /* True if STMT is a call to __cxa_atexit (or __aeabi_atexit)
146 : and the function argument to that call is a const or pure
147 : non-looping function decl. */
148 :
149 : static inline bool
150 84992892 : is_removable_cxa_atexit_call (gimple *stmt)
151 : {
152 84992892 : tree callee = gimple_call_fndecl (stmt);
153 84992892 : int functype = is_cxa_atexit (callee);
154 84992892 : if (!functype)
155 : return false;
156 113564 : if (gimple_call_num_args (stmt) != 3)
157 : return false;
158 :
159 : /* The function argument is the 1st argument for __cxa_atexit
160 : or the 2nd argument for __eabi_atexit. */
161 227128 : tree arg = gimple_call_arg (stmt, functype == 2 ? 1 : 0);
162 113564 : if (TREE_CODE (arg) != ADDR_EXPR)
163 : return false;
164 113564 : arg = TREE_OPERAND (arg, 0);
165 113564 : if (TREE_CODE (arg) != FUNCTION_DECL)
166 : return false;
167 113564 : int flags = flags_from_decl_or_type (arg);
168 :
169 : /* If the function is noreturn then it cannot be removed. */
170 113564 : if (flags & ECF_NORETURN)
171 : return false;
172 :
173 : /* The function needs to be const or pure and non looping. */
174 113406 : return (flags & (ECF_CONST|ECF_PURE))
175 113406 : && !(flags & ECF_LOOPING_CONST_OR_PURE);
176 : }
177 :
178 : /* If STMT is not already marked necessary, mark it, and add it to the
179 : worklist if ADD_TO_WORKLIST is true. */
180 :
181 : static inline void
182 463223931 : mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
183 : {
184 463223931 : gcc_assert (stmt);
185 :
186 463223931 : if (gimple_plf (stmt, STMT_NECESSARY))
187 : return;
188 :
189 463223741 : if (dump_file && (dump_flags & TDF_DETAILS))
190 : {
191 856 : fprintf (dump_file, "Marking useful stmt: ");
192 856 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
193 856 : fprintf (dump_file, "\n");
194 : }
195 :
196 463223741 : gimple_set_plf (stmt, STMT_NECESSARY, true);
197 463223741 : if (add_to_worklist)
198 106209266 : worklist.safe_push (stmt);
199 106209266 : if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
200 37087388 : bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
201 : }
202 :
203 :
204 : /* Mark the statement defining operand OP as necessary. */
205 :
206 : static inline void
207 336404410 : mark_operand_necessary (tree op)
208 : {
209 336404410 : gimple *stmt;
210 336404410 : int ver;
211 :
212 336404410 : gcc_assert (op);
213 :
214 336404410 : ver = SSA_NAME_VERSION (op);
215 336404410 : if (bitmap_bit_p (processed, ver))
216 : {
217 124125310 : stmt = SSA_NAME_DEF_STMT (op);
218 124125310 : gcc_assert (gimple_nop_p (stmt)
219 : || gimple_plf (stmt, STMT_NECESSARY));
220 184350033 : return;
221 : }
222 212279100 : bitmap_set_bit (processed, ver);
223 :
224 212279100 : stmt = SSA_NAME_DEF_STMT (op);
225 212279100 : gcc_assert (stmt);
226 :
227 212279100 : if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
228 : return;
229 :
230 152054377 : if (dump_file && (dump_flags & TDF_DETAILS))
231 : {
232 270 : fprintf (dump_file, "marking necessary through ");
233 270 : print_generic_expr (dump_file, op);
234 270 : fprintf (dump_file, " stmt ");
235 270 : print_gimple_stmt (dump_file, stmt, 0);
236 : }
237 :
238 152054377 : gimple_set_plf (stmt, STMT_NECESSARY, true);
239 152054377 : if (bb_contains_live_stmts)
240 52071203 : bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
241 152054377 : worklist.safe_push (stmt);
242 : }
243 :
244 : /* Return true if STMT is a call to allocation function that can be
245 : optimized out if the memory block is never used for anything else
246 : than NULL pointer check or free.
247 : If NON_NULL_CHECK is false, we can further assume that return value
248 : is never checked to be non-NULL.
249 : Don't return true if it is called with constant size (or sizes for calloc)
250 : and the size is excessively large (larger than PTRDIFF_MAX, for calloc
251 : either argument larger than PTRDIFF_MAX or both constant and their product
252 : larger than PTRDIFF_MAX). */
253 :
254 : static bool
255 35523876 : is_removable_allocation_p (gcall *stmt, bool non_null_check)
256 : {
257 35523876 : int arg = -1;
258 35523876 : tree callee = gimple_call_fndecl (stmt), a1, a2;
259 35523876 : if (callee != NULL_TREE
260 35523876 : && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
261 9023517 : switch (DECL_FUNCTION_CODE (callee))
262 : {
263 555189 : case BUILT_IN_MALLOC:
264 555189 : arg = 1;
265 555189 : goto do_malloc;
266 150 : case BUILT_IN_ALIGNED_ALLOC:
267 150 : arg = 2;
268 150 : goto do_malloc;
269 5343 : case BUILT_IN_CALLOC:
270 5343 : arg = 3;
271 5343 : goto do_malloc;
272 123887 : CASE_BUILT_IN_ALLOCA:
273 123887 : arg = 1;
274 123887 : goto do_malloc;
275 : case BUILT_IN_STRDUP:
276 : case BUILT_IN_STRNDUP:
277 : arg = 0;
278 : /* FALLTHRU */
279 688766 : do_malloc:
280 688766 : if (non_null_check)
281 : {
282 104087 : if (flag_malloc_dce <= 1)
283 : return false;
284 : }
285 584679 : else if (!flag_malloc_dce)
286 : return false;
287 : break;
288 :
289 : case BUILT_IN_GOMP_ALLOC:
290 35523639 : arg = 2;
291 : break;
292 :
293 : default:;
294 : }
295 :
296 35523639 : if (arg == -1
297 35523639 : && callee != NULL_TREE
298 32743470 : && flag_allocation_dce
299 32740054 : && gimple_call_from_new_or_delete (stmt)
300 36858330 : && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee))
301 : arg = 1;
302 :
303 35110029 : switch (arg)
304 : {
305 : case -1:
306 : return false;
307 : case 0:
308 : return true;
309 1098130 : case 1:
310 1098130 : case 2:
311 1098130 : if (gimple_call_num_args (stmt) < (unsigned) arg)
312 : return false;
313 1098124 : a1 = gimple_call_arg (stmt, arg - 1);
314 1098124 : if (tree_fits_uhwi_p (a1)
315 1098124 : && (tree_to_uhwi (a1)
316 564116 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
317 : return false;
318 : return true;
319 5343 : case 3:
320 5343 : if (gimple_call_num_args (stmt) < 2)
321 : return false;
322 5343 : a1 = gimple_call_arg (stmt, 0);
323 5343 : a2 = gimple_call_arg (stmt, 1);
324 5343 : if (tree_fits_uhwi_p (a1)
325 5343 : && (tree_to_uhwi (a1)
326 4334 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
327 : return false;
328 5321 : if (tree_fits_uhwi_p (a2)
329 5321 : && (tree_to_uhwi (a2)
330 4640 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
331 : return false;
332 10598 : if (TREE_CODE (a1) == INTEGER_CST
333 4302 : && TREE_CODE (a2) == INTEGER_CST
334 13167 : && (wi::to_widest (a1) * wi::to_widest (a2)
335 13167 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
336 : return false;
337 : return true;
338 : default:
339 : gcc_unreachable ();
340 : }
341 : }
342 :
343 : /* Return true if STMT is a conditional
344 : if (ptr != NULL)
345 : where ptr was returned by a removable allocation function. */
346 :
347 : static bool
348 238766990 : checks_return_value_of_removable_allocation_p (gimple *stmt)
349 : {
350 238766990 : gcall *def_stmt;
351 238766990 : return gimple_code (stmt) == GIMPLE_COND
352 31359917 : && (gimple_cond_code (stmt) == EQ_EXPR
353 21768769 : || gimple_cond_code (stmt) == NE_EXPR)
354 23902179 : && integer_zerop (gimple_cond_rhs (stmt))
355 13025794 : && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME
356 : && (def_stmt = dyn_cast <gcall *>
357 13022849 : (SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt))))
358 242065765 : && is_removable_allocation_p (def_stmt, true);
359 : }
360 :
361 :
362 : /* Mark STMT as necessary if it obviously is. Add it to the worklist if
363 : it can make other statements necessary.
364 :
365 : If AGGRESSIVE is false, control statements are conservatively marked as
366 : necessary. */
367 :
368 : static void
369 598430134 : mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
370 : {
371 : /* Statements that are implicitly live. Most function calls, asm
372 : and return statements are required. Labels and GIMPLE_BIND nodes
373 : are kept because they are control flow, and we have no way of
374 : knowing whether they can be removed. DCE can eliminate all the
375 : other statements in a block, and CFG can then remove the block
376 : and labels. */
377 598430134 : switch (gimple_code (stmt))
378 : {
379 6558547 : case GIMPLE_PREDICT:
380 6558547 : case GIMPLE_LABEL:
381 6558547 : mark_stmt_necessary (stmt, false);
382 6558547 : return;
383 :
384 9998128 : case GIMPLE_ASM:
385 9998128 : case GIMPLE_RESX:
386 9998128 : case GIMPLE_RETURN:
387 9998128 : mark_stmt_necessary (stmt, true);
388 9998128 : return;
389 :
390 37139303 : case GIMPLE_CALL:
391 37139303 : {
392 37139303 : gcall *call = as_a <gcall *> (stmt);
393 :
394 : /* Never elide a noreturn call we pruned control-flow for.
395 : Same for statements that can alter control flow in unpredictable
396 : ways. */
397 37139303 : if (gimple_call_ctrl_altering_p (call))
398 : {
399 5181457 : mark_stmt_necessary (call, true);
400 5181457 : return;
401 : }
402 :
403 31957846 : if (is_removable_allocation_p (call, false))
404 : return;
405 :
406 :
407 : /* For __cxa_atexit calls, don't mark as necessary right away. */
408 31135163 : if (is_removable_cxa_atexit_call (call))
409 : return;
410 :
411 : /* IFN_GOACC_LOOP calls are necessary in that they are used to
412 : represent parameter (i.e. step, bound) of a lowered OpenACC
413 : partitioned loop. But this kind of partitioned loop might not
414 : survive from aggressive loop removal for it has loop exit and
415 : is assumed to be finite. Therefore, we need to explicitly mark
416 : these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */
417 31134756 : if (gimple_call_internal_p (call, IFN_GOACC_LOOP))
418 : {
419 27717 : mark_stmt_necessary (call, true);
420 27717 : return;
421 : }
422 : break;
423 : }
424 :
425 350803183 : case GIMPLE_DEBUG:
426 : /* Debug temps without a value are not useful. ??? If we could
427 : easily locate the debug temp bind stmt for a use thereof,
428 : would could refrain from marking all debug temps here, and
429 : mark them only if they're used. */
430 350803183 : if (gimple_debug_nonbind_marker_p (stmt)
431 272018986 : || !gimple_debug_bind_p (stmt)
432 271032465 : || gimple_debug_bind_has_value_p (stmt)
433 473199859 : || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
434 350455928 : mark_stmt_necessary (stmt, false);
435 : return;
436 :
437 1928 : case GIMPLE_GOTO:
438 1928 : gcc_assert (!simple_goto_p (stmt));
439 1928 : mark_stmt_necessary (stmt, true);
440 1928 : return;
441 :
442 31398675 : case GIMPLE_COND:
443 31398675 : gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
444 : /* Fall through. */
445 :
446 31561555 : case GIMPLE_SWITCH:
447 31561555 : if (! aggressive)
448 20959880 : mark_stmt_necessary (stmt, true);
449 : break;
450 :
451 162329501 : case GIMPLE_ASSIGN:
452 : /* Mark indirect CLOBBERs to be lazily removed if their SSA operands
453 : do not prevail. That also makes control flow leading to them
454 : not necessary in aggressive mode. */
455 172281595 : if (gimple_clobber_p (stmt) && !zero_ssa_operands (stmt, SSA_OP_USE))
456 : return;
457 : break;
458 :
459 : default:
460 : break;
461 : }
462 :
463 : /* If the statement has volatile operands, it needs to be preserved.
464 : Same for statements that can alter control flow in unpredictable
465 : ways. */
466 224170655 : if (gimple_has_side_effects (stmt) || is_ctrl_altering_stmt (stmt))
467 : {
468 40145339 : mark_stmt_necessary (stmt, true);
469 40145339 : return;
470 : }
471 :
472 : /* If a statement could throw, it can be deemed necessary unless we
473 : are allowed to remove dead EH. Test this after checking for
474 : new/delete operators since we always elide their EH. */
475 184025316 : if (!cfun->can_delete_dead_exceptions
476 184025316 : && stmt_could_throw_p (cfun, stmt))
477 : {
478 5809805 : mark_stmt_necessary (stmt, true);
479 5809805 : return;
480 : }
481 :
482 222008502 : if ((gimple_vdef (stmt) && keep_all_vdefs_p ())
483 221989155 : || stmt_may_clobber_global_p (stmt, false))
484 : {
485 13525235 : mark_stmt_necessary (stmt, true);
486 13525235 : return;
487 : }
488 :
489 : return;
490 : }
491 :
492 :
493 : /* Mark the last statement of BB as necessary. */
494 :
495 : static bool
496 42760884 : mark_last_stmt_necessary (basic_block bb)
497 : {
498 42760884 : if (!bitmap_set_bit (last_stmt_necessary, bb->index))
499 : return true;
500 :
501 16789647 : bitmap_set_bit (bb_contains_live_stmts, bb->index);
502 :
503 : /* We actually mark the statement only if it is a control statement. */
504 16789647 : gimple *stmt = *gsi_last_bb (bb);
505 16789647 : if (stmt && is_ctrl_stmt (stmt))
506 : {
507 10559967 : mark_stmt_necessary (stmt, true);
508 10559967 : return true;
509 : }
510 : return false;
511 : }
512 :
513 :
514 : /* Mark control dependent edges of BB as necessary. We have to do this only
515 : once for each basic block so we set the appropriate bit after we're done.
516 :
517 : When IGNORE_SELF is true, ignore BB in the list of control dependences. */
518 :
519 : static void
520 31751352 : mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
521 : {
522 31751352 : bitmap_iterator bi;
523 31751352 : unsigned edge_number;
524 31751352 : bool skipped = false;
525 :
526 31751352 : gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
527 :
528 31751352 : if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
529 3493750 : return;
530 :
531 69582048 : EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
532 : 0, edge_number, bi)
533 : {
534 41324446 : basic_block cd_bb = cd->get_edge_src (edge_number);
535 :
536 41324446 : if (ignore_self && cd_bb == bb)
537 : {
538 70662 : skipped = true;
539 70662 : continue;
540 : }
541 :
542 41253784 : if (!mark_last_stmt_necessary (cd_bb))
543 6227194 : mark_control_dependent_edges_necessary (cd_bb, false);
544 : }
545 :
546 28257602 : if (!skipped)
547 28186940 : bitmap_set_bit (visited_control_parents, bb->index);
548 : }
549 :
550 :
551 : /* Find obviously necessary statements. These are things like most function
552 : calls, and stores to file level variables.
553 :
554 : If EL is NULL, control statements are conservatively marked as
555 : necessary. Otherwise it contains the list of edges used by control
556 : dependence analysis. */
557 :
558 : static void
559 8070794 : find_obviously_necessary_stmts (bool aggressive)
560 : {
561 8070794 : basic_block bb;
562 8070794 : gimple_stmt_iterator gsi;
563 8070794 : edge e;
564 8070794 : gimple *phi, *stmt;
565 8070794 : int flags;
566 :
567 89857815 : FOR_EACH_BB_FN (bb, cfun)
568 : {
569 : /* PHI nodes are never inherently necessary. */
570 117611777 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
571 : {
572 35824756 : phi = gsi_stmt (gsi);
573 35824756 : gimple_set_plf (phi, STMT_NECESSARY, false);
574 : }
575 :
576 : /* Check all statements in the block. */
577 762004176 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
578 : {
579 598430134 : stmt = gsi_stmt (gsi);
580 598430134 : gimple_set_plf (stmt, STMT_NECESSARY, false);
581 598430134 : mark_stmt_if_obviously_necessary (stmt, aggressive);
582 : }
583 : }
584 :
585 : /* Pure and const functions are finite and thus have no infinite loops in
586 : them. */
587 8070794 : flags = flags_from_decl_or_type (current_function_decl);
588 8070794 : if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
589 949271 : return;
590 :
591 : /* Prevent the empty possibly infinite loops from being removed. This is
592 : needed to make the logic in remove_dead_stmt work to identify the
593 : correct edge to keep when removing a controlling condition. */
594 7121523 : if (aggressive)
595 : {
596 3306130 : if (mark_irreducible_loops ())
597 271234 : FOR_EACH_BB_FN (bb, cfun)
598 : {
599 265629 : edge_iterator ei;
600 669769 : FOR_EACH_EDGE (e, ei, bb->succs)
601 404140 : if ((e->flags & EDGE_DFS_BACK)
602 28262 : && (e->flags & EDGE_IRREDUCIBLE_LOOP))
603 : {
604 10706 : if (dump_file)
605 0 : fprintf (dump_file, "Marking back edge of irreducible "
606 0 : "loop %i->%i\n", e->src->index, e->dest->index);
607 10706 : mark_control_dependent_edges_necessary (e->dest, false);
608 : }
609 : }
610 :
611 11704888 : for (auto loop : loops_list (cfun, 0))
612 : /* For loops without an exit do not mark any condition. */
613 1786498 : if (loop->exits->next->e && !finite_loop_p (loop))
614 : {
615 271646 : if (dump_file)
616 1 : fprintf (dump_file, "cannot prove finiteness of loop %i\n",
617 : loop->num);
618 271646 : mark_control_dependent_edges_necessary (loop->latch, false);
619 3306130 : }
620 : }
621 : }
622 :
623 :
624 : /* Return true if REF is based on an aliased base, otherwise false. */
625 :
626 : static bool
627 96020492 : ref_may_be_aliased (tree ref)
628 : {
629 96020492 : if (TREE_CODE (ref) == WITH_SIZE_EXPR)
630 0 : ref = TREE_OPERAND (ref, 0);
631 172571302 : while (handled_component_p (ref))
632 76550810 : ref = TREE_OPERAND (ref, 0);
633 96020492 : if ((TREE_CODE (ref) == MEM_REF || TREE_CODE (ref) == TARGET_MEM_REF)
634 96020492 : && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
635 15305110 : ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
636 96020492 : return !(DECL_P (ref)
637 63816602 : && !may_be_aliased (ref));
638 : }
639 :
640 : static bitmap visited = NULL;
641 : static unsigned int longest_chain = 0;
642 : static unsigned int total_chain = 0;
643 : static unsigned int nr_walks = 0;
644 : static bool chain_ovfl = false;
645 :
646 : /* Worker for the walker that marks reaching definitions of REF,
647 : which is based on a non-aliased decl, necessary. It returns
648 : true whenever the defining statement of the current VDEF is
649 : a kill for REF, as no dominating may-defs are necessary for REF
650 : anymore. DATA points to the basic-block that contains the
651 : stmt that refers to REF. */
652 :
653 : static bool
654 39577965 : mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
655 : {
656 39577965 : gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
657 :
658 : /* All stmts we visit are necessary. */
659 39577965 : if (! gimple_clobber_p (def_stmt))
660 39416303 : mark_operand_necessary (vdef);
661 :
662 : /* If the stmt lhs kills ref, then we can stop walking. */
663 39577965 : if (gimple_has_lhs (def_stmt)
664 28904303 : && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
665 : /* The assignment is not necessarily carried out if it can throw
666 : and we can catch it in the current function where we could inspect
667 : the previous value.
668 : ??? We only need to care about the RHS throwing. For aggregate
669 : assignments or similar calls and non-call exceptions the LHS
670 : might throw as well. */
671 37404968 : && !stmt_can_throw_internal (cfun, def_stmt))
672 : {
673 24841843 : tree base, lhs = gimple_get_lhs (def_stmt);
674 24841843 : poly_int64 size, offset, max_size;
675 24841843 : bool reverse;
676 24841843 : ao_ref_base (ref);
677 24841843 : base
678 24841843 : = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
679 : /* We can get MEM[symbol: sZ, index: D.8862_1] here,
680 : so base == refd->base does not always hold. */
681 24841843 : if (base == ref->base)
682 : {
683 : /* For a must-alias check we need to be able to constrain
684 : the accesses properly. */
685 22840865 : if (known_eq (size, max_size)
686 22840865 : && known_subrange_p (ref->offset, ref->max_size, offset, size))
687 8507538 : return true;
688 : /* Or they need to be exactly the same. */
689 14334278 : else if (ref->ref
690 : /* Make sure there is no induction variable involved
691 : in the references (gcc.c-torture/execute/pr42142.c).
692 : The simplest way is to check if the kill dominates
693 : the use. */
694 : /* But when both are in the same block we cannot
695 : easily tell whether we came from a backedge
696 : unless we decide to compute stmt UIDs
697 : (see PR58246). */
698 14334278 : && (basic_block) data != gimple_bb (def_stmt)
699 6899601 : && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
700 6899601 : gimple_bb (def_stmt))
701 18609880 : && operand_equal_p (ref->ref, lhs, 0))
702 : return true;
703 : }
704 : }
705 :
706 : /* Otherwise keep walking. */
707 : return false;
708 : }
709 :
710 : static void
711 13348120 : mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
712 : {
713 : /* Should have been caught before calling this function. */
714 13348120 : gcc_checking_assert (!keep_all_vdefs_p ());
715 :
716 13348120 : unsigned int chain;
717 13348120 : ao_ref refd;
718 13348120 : gcc_assert (!chain_ovfl);
719 13348120 : ao_ref_init (&refd, ref);
720 26696240 : chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
721 : mark_aliased_reaching_defs_necessary_1,
722 13348120 : gimple_bb (stmt), NULL);
723 13348120 : if (chain > longest_chain)
724 2196022 : longest_chain = chain;
725 13348120 : total_chain += chain;
726 13348120 : nr_walks++;
727 13348120 : }
728 :
729 : /* Worker for the walker that marks reaching definitions of REF, which
730 : is not based on a non-aliased decl. For simplicity we need to end
731 : up marking all may-defs necessary that are not based on a non-aliased
732 : decl. The only job of this walker is to skip may-defs based on
733 : a non-aliased decl. */
734 :
735 : static bool
736 73872351 : mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
737 : tree vdef, void *data ATTRIBUTE_UNUSED)
738 : {
739 73872351 : gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
740 :
741 : /* We have to skip already visited (and thus necessary) statements
742 : to make the chaining work after we dropped back to simple mode. */
743 73872351 : if (chain_ovfl
744 73872351 : && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
745 : {
746 2486243 : gcc_assert (gimple_nop_p (def_stmt)
747 : || gimple_plf (def_stmt, STMT_NECESSARY));
748 : return false;
749 : }
750 :
751 : /* We want to skip stores to non-aliased variables. */
752 71386108 : if (!chain_ovfl
753 71386108 : && gimple_assign_single_p (def_stmt))
754 : {
755 47573354 : tree lhs = gimple_assign_lhs (def_stmt);
756 47573354 : if (!ref_may_be_aliased (lhs))
757 : return false;
758 : }
759 :
760 : /* We want to skip statments that do not constitute stores but have
761 : a virtual definition. */
762 59520894 : if (gcall *call = dyn_cast <gcall *> (def_stmt))
763 : {
764 23016608 : tree callee = gimple_call_fndecl (call);
765 23016608 : if (callee != NULL_TREE
766 23016608 : && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
767 4067207 : switch (DECL_FUNCTION_CODE (callee))
768 : {
769 : case BUILT_IN_MALLOC:
770 : case BUILT_IN_ALIGNED_ALLOC:
771 : case BUILT_IN_CALLOC:
772 : CASE_BUILT_IN_ALLOCA:
773 : case BUILT_IN_STRDUP:
774 : case BUILT_IN_STRNDUP:
775 : case BUILT_IN_FREE:
776 : case BUILT_IN_GOMP_ALLOC:
777 : case BUILT_IN_GOMP_FREE:
778 : return false;
779 :
780 : default:;
781 : }
782 :
783 22346977 : if (callee != NULL_TREE
784 21129580 : && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
785 20791717 : || DECL_IS_OPERATOR_DELETE_P (callee))
786 23312685 : && gimple_call_from_new_or_delete (call))
787 : return false;
788 21388319 : if (is_removable_cxa_atexit_call (call))
789 : return false;
790 : }
791 :
792 57892417 : if (! gimple_clobber_p (def_stmt))
793 53177384 : mark_operand_necessary (vdef);
794 :
795 : return false;
796 : }
797 :
798 : static void
799 69721657 : mark_all_reaching_defs_necessary (gimple *stmt)
800 : {
801 : /* Should have been caught before calling this function. */
802 69721657 : gcc_checking_assert (!keep_all_vdefs_p ());
803 139443314 : walk_aliased_vdefs (NULL, gimple_vuse (stmt),
804 : mark_all_reaching_defs_necessary_1, NULL, &visited);
805 69721657 : }
806 :
807 : /* Return true for PHI nodes with one or identical arguments
808 : can be removed. */
809 : static bool
810 19390223 : degenerate_phi_p (gimple *phi)
811 : {
812 19390223 : unsigned int i;
813 19390223 : tree op = gimple_phi_arg_def (phi, 0);
814 20358104 : for (i = 1; i < gimple_phi_num_args (phi); i++)
815 17617941 : if (gimple_phi_arg_def (phi, i) != op)
816 : return false;
817 : return true;
818 : }
819 :
820 : /* Return that NEW_CALL and DELETE_CALL are a valid pair of new
821 : and delete operators. */
822 :
823 : static bool
824 51935 : valid_new_delete_pair_p (gimple *new_call, gimple *delete_call)
825 : {
826 51935 : tree new_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call));
827 51935 : tree delete_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call));
828 51935 : return valid_new_delete_pair_p (new_asm, delete_asm);
829 : }
830 :
831 : /* Propagate necessity using the operands of necessary statements.
832 : Process the uses on each statement in the worklist, and add all
833 : feeding statements which contribute to the calculation of this
834 : value to the worklist.
835 :
836 : In conservative mode, EL is NULL. */
837 :
838 : static void
839 8070794 : propagate_necessity (bool aggressive)
840 : {
841 8070794 : gimple *stmt;
842 :
843 8070794 : if (dump_file && (dump_flags & TDF_DETAILS))
844 209 : fprintf (dump_file, "\nProcessing worklist:\n");
845 :
846 266334437 : while (worklist.length () > 0)
847 : {
848 : /* Take STMT from worklist. */
849 258263643 : stmt = worklist.pop ();
850 :
851 258263643 : if (dump_file && (dump_flags & TDF_DETAILS))
852 : {
853 1107 : fprintf (dump_file, "processing: ");
854 1107 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
855 1107 : fprintf (dump_file, "\n");
856 : }
857 :
858 258263643 : if (aggressive)
859 : {
860 : /* Mark the last statement of the basic blocks on which the block
861 : containing STMT is control dependent, but only if we haven't
862 : already done so. */
863 89158591 : basic_block bb = gimple_bb (stmt);
864 89158591 : if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
865 89158591 : && !bitmap_bit_p (visited_control_parents, bb->index))
866 18864892 : mark_control_dependent_edges_necessary (bb, false);
867 : }
868 :
869 258263643 : if (gimple_code (stmt) == GIMPLE_PHI
870 : /* We do not process virtual PHI nodes nor do we track their
871 : necessity. */
872 296904125 : && !virtual_operand_p (gimple_phi_result (stmt)))
873 : {
874 : /* PHI nodes are somewhat special in that each PHI alternative has
875 : data and control dependencies. All the statements feeding the
876 : PHI node's arguments are always necessary. In aggressive mode,
877 : we also consider the control dependent edges leading to the
878 : predecessor block associated with each PHI alternative as
879 : necessary. */
880 19320241 : gphi *phi = as_a <gphi *> (stmt);
881 19320241 : size_t k;
882 :
883 64777389 : for (k = 0; k < gimple_phi_num_args (stmt); k++)
884 : {
885 45457148 : tree arg = PHI_ARG_DEF (stmt, k);
886 45457148 : if (TREE_CODE (arg) == SSA_NAME)
887 33692740 : mark_operand_necessary (arg);
888 : }
889 :
890 : /* For PHI operands it matters from where the control flow arrives
891 : to the BB. Consider the following example:
892 :
893 : a=exp1;
894 : b=exp2;
895 : if (test)
896 : ;
897 : else
898 : ;
899 : c=PHI(a,b)
900 :
901 : We need to mark control dependence of the empty basic blocks, since they
902 : contains computation of PHI operands.
903 :
904 : Doing so is too restrictive in the case the predecestor block is in
905 : the loop. Consider:
906 :
907 : if (b)
908 : {
909 : int i;
910 : for (i = 0; i<1000; ++i)
911 : ;
912 : j = 0;
913 : }
914 : return j;
915 :
916 : There is PHI for J in the BB containing return statement.
917 : In this case the control dependence of predecestor block (that is
918 : within the empty loop) also contains the block determining number
919 : of iterations of the block that would prevent removing of empty
920 : loop in this case.
921 :
922 : This scenario can be avoided by splitting critical edges.
923 : To save the critical edge splitting pass we identify how the control
924 : dependence would look like if the edge was split.
925 :
926 : Consider the modified CFG created from current CFG by splitting
927 : edge B->C. In the postdominance tree of modified CFG, C' is
928 : always child of C. There are two cases how chlids of C' can look
929 : like:
930 :
931 : 1) C' is leaf
932 :
933 : In this case the only basic block C' is control dependent on is B.
934 :
935 : 2) C' has single child that is B
936 :
937 : In this case control dependence of C' is same as control
938 : dependence of B in original CFG except for block B itself.
939 : (since C' postdominate B in modified CFG)
940 :
941 : Now how to decide what case happens? There are two basic options:
942 :
943 : a) C postdominate B. Then C immediately postdominate B and
944 : case 2 happens iff there is no other way from B to C except
945 : the edge B->C.
946 :
947 : There is other way from B to C iff there is succesor of B that
948 : is not postdominated by B. Testing this condition is somewhat
949 : expensive, because we need to iterate all succesors of B.
950 : We are safe to assume that this does not happen: we will mark B
951 : as needed when processing the other path from B to C that is
952 : conrol dependent on B and marking control dependencies of B
953 : itself is harmless because they will be processed anyway after
954 : processing control statement in B.
955 :
956 : b) C does not postdominate B. Always case 1 happens since there is
957 : path from C to exit that does not go through B and thus also C'. */
958 :
959 32601331 : if (aggressive && !degenerate_phi_p (stmt))
960 : {
961 20205118 : for (k = 0; k < gimple_phi_num_args (stmt); k++)
962 : {
963 14165967 : basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
964 :
965 14165967 : if (gimple_bb (stmt)
966 14165967 : != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
967 : {
968 1507100 : if (!mark_last_stmt_necessary (arg_bb))
969 2486 : mark_control_dependent_edges_necessary (arg_bb, false);
970 : }
971 12658867 : else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
972 12658867 : && !bitmap_bit_p (visited_control_parents,
973 : arg_bb->index))
974 6374428 : mark_control_dependent_edges_necessary (arg_bb, true);
975 : }
976 : }
977 : }
978 : else
979 : {
980 : /* Propagate through the operands. Examine all the USE, VUSE and
981 : VDEF operands in this statement. Mark all the statements
982 : which feed this statement's uses as necessary. */
983 238943402 : ssa_op_iter iter;
984 238943402 : tree use;
985 :
986 : /* If this is a call to free which is directly fed by an
987 : allocation function do not mark that necessary through
988 : processing the argument. */
989 238943402 : bool is_delete_operator
990 238943402 : = (is_gimple_call (stmt)
991 37105508 : && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
992 240224431 : && gimple_call_operator_delete_p (as_a <gcall *> (stmt)));
993 238022766 : if (is_delete_operator
994 238022766 : || gimple_call_builtin_p (stmt, BUILT_IN_FREE)
995 237713324 : || gimple_call_builtin_p (stmt, BUILT_IN_GOMP_FREE))
996 : {
997 1230078 : tree ptr = gimple_call_arg (stmt, 0);
998 1230078 : gcall *def_stmt;
999 : /* If the pointer we free is defined by an allocation
1000 : function do not add the call to the worklist. */
1001 1230078 : if (TREE_CODE (ptr) == SSA_NAME
1002 1229175 : && (def_stmt = dyn_cast <gcall *> (SSA_NAME_DEF_STMT (ptr)))
1003 1431537 : && is_removable_allocation_p (def_stmt, false))
1004 : {
1005 179522 : if (is_delete_operator
1006 179522 : && !valid_new_delete_pair_p (def_stmt, stmt))
1007 125 : mark_operand_necessary (gimple_call_arg (stmt, 0));
1008 :
1009 : /* Delete operators can have alignment and (or) size
1010 : as next arguments. When being a SSA_NAME, they
1011 : must be marked as necessary. Similarly GOMP_free. */
1012 179522 : if (gimple_call_num_args (stmt) >= 2)
1013 93520 : for (unsigned i = 1; i < gimple_call_num_args (stmt);
1014 : i++)
1015 : {
1016 46771 : tree arg = gimple_call_arg (stmt, i);
1017 46771 : if (TREE_CODE (arg) == SSA_NAME)
1018 8910 : mark_operand_necessary (arg);
1019 : }
1020 :
1021 104905951 : continue;
1022 179522 : }
1023 : }
1024 :
1025 238763880 : if (checks_return_value_of_removable_allocation_p (stmt))
1026 101969 : continue;
1027 :
1028 448770859 : FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
1029 210108948 : mark_operand_necessary (use);
1030 :
1031 238661911 : use = gimple_vuse (stmt);
1032 205673303 : if (!use)
1033 98472198 : continue;
1034 :
1035 : /* No need to search for vdefs if we intrinsicly keep them all. */
1036 140189713 : if (keep_all_vdefs_p ())
1037 70254 : continue;
1038 :
1039 : /* If we dropped to simple mode make all immediately
1040 : reachable definitions necessary. */
1041 140119459 : if (chain_ovfl)
1042 : {
1043 3645592 : mark_all_reaching_defs_necessary (stmt);
1044 3645592 : continue;
1045 : }
1046 :
1047 : /* For statements that may load from memory (have a VUSE) we
1048 : have to mark all reaching (may-)definitions as necessary.
1049 : We partition this task into two cases:
1050 : 1) explicit loads based on decls that are not aliased
1051 : 2) implicit loads (like calls) and explicit loads not
1052 : based on decls that are not aliased (like indirect
1053 : references or loads from globals)
1054 : For 1) we mark all reaching may-defs as necessary, stopping
1055 : at dominating kills. For 2) we want to mark all dominating
1056 : references necessary, but non-aliased ones which we handle
1057 : in 1). By keeping a global visited bitmap for references
1058 : we walk for 2) we avoid quadratic behavior for those. */
1059 :
1060 136473867 : if (gcall *call = dyn_cast <gcall *> (stmt))
1061 : {
1062 33697535 : tree callee = gimple_call_fndecl (call);
1063 33697535 : unsigned i;
1064 :
1065 34925660 : if (callee != NULL_TREE
1066 32180313 : && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
1067 31815414 : || DECL_IS_OPERATOR_DELETE_P (callee))
1068 34938840 : && gimple_call_from_new_or_delete (call))
1069 1228125 : continue;
1070 :
1071 32469410 : if (is_removable_cxa_atexit_call (call))
1072 0 : continue;
1073 :
1074 : bool all_refs = false;
1075 : /* Calls implicitly load from memory, their arguments
1076 : in addition may explicitly perform memory loads. */
1077 99422086 : for (i = 0; i < gimple_call_num_args (call); ++i)
1078 : {
1079 66952676 : tree arg = gimple_call_arg (call, i);
1080 129854482 : if (TREE_CODE (arg) == SSA_NAME
1081 66952676 : || is_gimple_min_invariant (arg))
1082 62901806 : continue;
1083 4050870 : if (TREE_CODE (arg) == WITH_SIZE_EXPR)
1084 664 : arg = TREE_OPERAND (arg, 0);
1085 4050870 : if (!ref_may_be_aliased (arg))
1086 3647225 : mark_aliased_reaching_defs_necessary (call, arg);
1087 : else
1088 : all_refs = true;
1089 : }
1090 :
1091 32469410 : if (!all_refs && ipa_modref_callee_reads_no_memory_p (call))
1092 1208291 : continue;
1093 31261119 : mark_all_reaching_defs_necessary (call);
1094 : }
1095 102776332 : else if (gimple_assign_single_p (stmt))
1096 : {
1097 94727988 : tree rhs;
1098 : /* If this is a load mark things necessary. */
1099 94727988 : rhs = gimple_assign_rhs1 (stmt);
1100 94727988 : if (TREE_CODE (rhs) != SSA_NAME
1101 76924545 : && !is_gimple_min_invariant (rhs)
1102 148179678 : && TREE_CODE (rhs) != CONSTRUCTOR)
1103 : {
1104 43689509 : if (!ref_may_be_aliased (rhs))
1105 9050971 : mark_aliased_reaching_defs_necessary (stmt, rhs);
1106 : else
1107 34638538 : mark_all_reaching_defs_necessary (stmt);
1108 : }
1109 : }
1110 8048344 : else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1111 : {
1112 7893009 : tree rhs = gimple_return_retval (return_stmt);
1113 : /* A return statement may perform a load. */
1114 7893009 : if (rhs
1115 4368092 : && TREE_CODE (rhs) != SSA_NAME
1116 1403603 : && !is_gimple_min_invariant (rhs)
1117 8558397 : && TREE_CODE (rhs) != CONSTRUCTOR)
1118 : {
1119 665388 : if (!ref_may_be_aliased (rhs))
1120 644315 : mark_aliased_reaching_defs_necessary (stmt, rhs);
1121 : else
1122 21073 : mark_all_reaching_defs_necessary (stmt);
1123 : }
1124 : }
1125 155335 : else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
1126 : {
1127 154151 : unsigned i;
1128 154151 : mark_all_reaching_defs_necessary (stmt);
1129 : /* Inputs may perform loads. */
1130 274539 : for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1131 : {
1132 120388 : tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1133 120388 : if (TREE_CODE (op) != SSA_NAME
1134 80521 : && !is_gimple_min_invariant (op)
1135 41371 : && TREE_CODE (op) != CONSTRUCTOR
1136 161759 : && !ref_may_be_aliased (op))
1137 5609 : mark_aliased_reaching_defs_necessary (stmt, op);
1138 : }
1139 : }
1140 1184 : else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
1141 : {
1142 : /* The beginning of a transaction is a memory barrier. */
1143 : /* ??? If we were really cool, we'd only be a barrier
1144 : for the memories touched within the transaction. */
1145 1184 : mark_all_reaching_defs_necessary (stmt);
1146 : }
1147 : else
1148 0 : gcc_unreachable ();
1149 :
1150 : /* If we over-used our alias oracle budget drop to simple
1151 : mode. The cost metric allows quadratic behavior
1152 : (number of uses times number of may-defs queries) up to
1153 : a constant maximal number of queries and after that falls back to
1154 : super-linear complexity. */
1155 134037451 : if (/* Constant but quadratic for small functions. */
1156 134037451 : total_chain > 128 * 128
1157 : /* Linear in the number of may-defs. */
1158 1170485 : && total_chain > 32 * longest_chain
1159 : /* Linear in the number of uses. */
1160 4285 : && total_chain > nr_walks * 32)
1161 : {
1162 4136 : chain_ovfl = true;
1163 4136 : if (visited)
1164 4136 : bitmap_clear (visited);
1165 : }
1166 : }
1167 : }
1168 8070794 : }
1169 :
1170 : /* Remove dead PHI nodes from block BB. */
1171 :
1172 : static bool
1173 81787131 : remove_dead_phis (basic_block bb)
1174 : {
1175 81787131 : bool something_changed = false;
1176 81787131 : gphi *phi;
1177 81787131 : gphi_iterator gsi;
1178 :
1179 117611887 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
1180 : {
1181 35824756 : stats.total_phis++;
1182 35824756 : phi = gsi.phi ();
1183 :
1184 : /* We do not track necessity of virtual PHI nodes. Instead do
1185 : very simple dead PHI removal here. */
1186 71649512 : if (virtual_operand_p (gimple_phi_result (phi)))
1187 : {
1188 : /* Virtual PHI nodes with one or identical arguments
1189 : can be removed. */
1190 16170179 : if (!loops_state_satisfies_p (LOOP_CLOSED_SSA)
1191 16170179 : && degenerate_phi_p (phi))
1192 : {
1193 1880187 : tree vdef = gimple_phi_result (phi);
1194 1880187 : tree vuse = gimple_phi_arg_def (phi, 0);
1195 :
1196 1880187 : use_operand_p use_p;
1197 1880187 : imm_use_iterator iter;
1198 1880187 : gimple *use_stmt;
1199 6808888 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1200 9256634 : FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1201 3104060 : SET_USE (use_p, vuse);
1202 1880187 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1203 1880187 : && TREE_CODE (vuse) == SSA_NAME)
1204 471 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1205 : }
1206 : else
1207 14289992 : gimple_set_plf (phi, STMT_NECESSARY, true);
1208 : }
1209 :
1210 35824756 : if (!gimple_plf (phi, STMT_NECESSARY))
1211 : {
1212 2214523 : something_changed = true;
1213 2214523 : if (dump_file && (dump_flags & TDF_DETAILS))
1214 : {
1215 20 : fprintf (dump_file, "Deleting : ");
1216 20 : print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1217 20 : fprintf (dump_file, "\n");
1218 : }
1219 :
1220 2214523 : remove_phi_node (&gsi, true);
1221 2214523 : stats.removed_phis++;
1222 2214523 : continue;
1223 : }
1224 :
1225 33610233 : gsi_next (&gsi);
1226 : }
1227 81787131 : return something_changed;
1228 : }
1229 :
1230 :
1231 : /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1232 : containing I so that we don't have to look it up. */
1233 :
1234 : static void
1235 8241594 : remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb,
1236 : vec<edge> &to_remove_edges)
1237 : {
1238 8241594 : gimple *stmt = gsi_stmt (*i);
1239 :
1240 8241594 : if (dump_file && (dump_flags & TDF_DETAILS))
1241 : {
1242 129 : fprintf (dump_file, "Deleting : ");
1243 129 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1244 129 : fprintf (dump_file, "\n");
1245 : }
1246 :
1247 8241594 : stats.removed++;
1248 :
1249 : /* If we have determined that a conditional branch statement contributes
1250 : nothing to the program, then we not only remove it, but we need to update
1251 : the CFG. We can chose any of edges out of BB as long as we are sure to not
1252 : close infinite loops. This is done by always choosing the edge closer to
1253 : exit in inverted_rev_post_order_compute order. */
1254 8241594 : if (is_ctrl_stmt (stmt))
1255 : {
1256 41898 : edge_iterator ei;
1257 41898 : edge e = NULL, e2;
1258 :
1259 : /* See if there is only one non-abnormal edge. */
1260 41898 : if (single_succ_p (bb))
1261 3 : e = single_succ_edge (bb);
1262 : /* Otherwise chose one that is closer to bb with live statement in it.
1263 : To be able to chose one, we compute inverted post order starting from
1264 : all BBs with live statements. */
1265 3 : if (!e)
1266 : {
1267 41895 : if (!bb_postorder)
1268 : {
1269 19939 : int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
1270 19939 : int n = inverted_rev_post_order_compute (cfun, rpo,
1271 : &bb_contains_live_stmts);
1272 19939 : bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1273 793236 : for (int i = 0; i < n; ++i)
1274 773297 : bb_postorder[rpo[i]] = i;
1275 19939 : free (rpo);
1276 : }
1277 125706 : FOR_EACH_EDGE (e2, ei, bb->succs)
1278 83811 : if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1279 41916 : || bb_postorder [e->dest->index]
1280 41916 : >= bb_postorder [e2->dest->index])
1281 65943 : e = e2;
1282 : }
1283 41895 : gcc_assert (e);
1284 41898 : e->probability = profile_probability::always ();
1285 :
1286 : /* The edge is no longer associated with a conditional, so it does
1287 : not have TRUE/FALSE flags.
1288 : We are also safe to drop EH/ABNORMAL flags and turn them into
1289 : normal control flow, because we know that all the destinations (including
1290 : those odd edges) are equivalent for program execution. */
1291 41898 : e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1292 :
1293 : /* The lone outgoing edge from BB will be a fallthru edge. */
1294 41898 : e->flags |= EDGE_FALLTHRU;
1295 :
1296 : /* Remove the remaining outgoing edges. */
1297 125712 : FOR_EACH_EDGE (e2, ei, bb->succs)
1298 83814 : if (e != e2)
1299 : {
1300 : /* If we made a BB unconditionally exit a loop or removed
1301 : an entry into an irreducible region, then this transform
1302 : alters the set of BBs in the loop. Schedule a fixup. */
1303 41916 : if (loop_exit_edge_p (bb->loop_father, e)
1304 41916 : || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1305 26765 : loops_state_set (LOOPS_NEED_FIXUP);
1306 41916 : to_remove_edges.safe_push (e2);
1307 : }
1308 : }
1309 :
1310 : /* If this is a store into a variable that is being optimized away,
1311 : add a debug bind stmt if possible. */
1312 8241594 : if (MAY_HAVE_DEBUG_BIND_STMTS
1313 7603423 : && gimple_assign_single_p (stmt)
1314 8528244 : && is_gimple_val (gimple_assign_rhs1 (stmt)))
1315 : {
1316 150656 : tree lhs = gimple_assign_lhs (stmt);
1317 132733 : if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1318 17971 : && !DECL_IGNORED_P (lhs)
1319 4969 : && is_gimple_reg_type (TREE_TYPE (lhs))
1320 53 : && !is_global_var (lhs)
1321 150709 : && !DECL_HAS_VALUE_EXPR_P (lhs))
1322 : {
1323 53 : tree rhs = gimple_assign_rhs1 (stmt);
1324 53 : gdebug *note
1325 53 : = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1326 53 : gsi_insert_after (i, note, GSI_SAME_STMT);
1327 : }
1328 : }
1329 :
1330 8241594 : unlink_stmt_vdef (stmt);
1331 8241594 : gsi_remove (i, true);
1332 8241594 : release_defs (stmt);
1333 8241594 : }
1334 :
1335 : /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1336 : uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1337 :
1338 : static tree
1339 26 : find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1340 : {
1341 26 : if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1342 0 : *walk_subtrees = 0;
1343 26 : if (*tp == (tree) data)
1344 13 : return *tp;
1345 : return NULL_TREE;
1346 : }
1347 :
1348 : /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1349 : but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1350 : into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1351 : uses. */
1352 :
1353 : static void
1354 284416 : maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1355 : enum tree_code subcode)
1356 : {
1357 284416 : gimple *stmt = gsi_stmt (*gsi);
1358 284416 : tree lhs = gimple_call_lhs (stmt);
1359 :
1360 284416 : if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1361 284243 : return;
1362 :
1363 284416 : imm_use_iterator imm_iter;
1364 284416 : use_operand_p use_p;
1365 284416 : bool has_debug_uses = false;
1366 284416 : bool has_realpart_uses = false;
1367 284416 : bool has_other_uses = false;
1368 576473 : FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1369 : {
1370 291884 : gimple *use_stmt = USE_STMT (use_p);
1371 291884 : if (is_gimple_debug (use_stmt))
1372 : has_debug_uses = true;
1373 288787 : else if (is_gimple_assign (use_stmt)
1374 287786 : && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1375 293331 : && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1376 : has_realpart_uses = true;
1377 : else
1378 : {
1379 : has_other_uses = true;
1380 : break;
1381 : }
1382 284416 : }
1383 :
1384 284416 : if (!has_realpart_uses || has_other_uses)
1385 : return;
1386 :
1387 173 : tree arg0 = gimple_call_arg (stmt, 0);
1388 173 : tree arg1 = gimple_call_arg (stmt, 1);
1389 173 : location_t loc = gimple_location (stmt);
1390 173 : tree type = TREE_TYPE (TREE_TYPE (lhs));
1391 173 : tree utype = unsigned_type_for (type);
1392 173 : tree result = fold_build2_loc (loc, subcode, utype,
1393 : fold_convert_loc (loc, utype, arg0),
1394 : fold_convert_loc (loc, utype, arg1));
1395 173 : result = fold_convert_loc (loc, type, result);
1396 :
1397 173 : if (has_debug_uses)
1398 : {
1399 13 : gimple *use_stmt;
1400 52 : FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1401 : {
1402 26 : if (!gimple_debug_bind_p (use_stmt))
1403 13 : continue;
1404 13 : tree v = gimple_debug_bind_get_value (use_stmt);
1405 13 : if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1406 : {
1407 13 : gimple_debug_bind_reset_value (use_stmt);
1408 13 : update_stmt (use_stmt);
1409 : }
1410 13 : }
1411 : }
1412 :
1413 173 : if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1414 0 : result = drop_tree_overflow (result);
1415 173 : tree overflow = build_zero_cst (type);
1416 173 : tree ctype = build_complex_type (type);
1417 173 : if (TREE_CODE (result) == INTEGER_CST)
1418 0 : result = build_complex (ctype, result, overflow);
1419 : else
1420 173 : result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1421 : ctype, result, overflow);
1422 :
1423 173 : if (dump_file && (dump_flags & TDF_DETAILS))
1424 : {
1425 0 : fprintf (dump_file, "Transforming call: ");
1426 0 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1427 0 : fprintf (dump_file, "because the overflow result is never used into: ");
1428 0 : print_generic_stmt (dump_file, result, TDF_SLIM);
1429 0 : fprintf (dump_file, "\n");
1430 : }
1431 :
1432 173 : gimplify_and_update_call_from_tree (gsi, result);
1433 : }
1434 :
1435 : /* Returns whether the control parents of BB are preserved. */
1436 :
1437 : static bool
1438 361908 : control_parents_preserved_p (basic_block bb)
1439 : {
1440 : /* If we marked the control parents from BB they are preserved. */
1441 361908 : if (bitmap_bit_p (visited_control_parents, bb->index))
1442 : return true;
1443 :
1444 : /* But they can also end up being marked from elsewhere. */
1445 7003 : bitmap_iterator bi;
1446 7003 : unsigned edge_number;
1447 11061 : EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
1448 : 0, edge_number, bi)
1449 : {
1450 7114 : basic_block cd_bb = cd->get_edge_src (edge_number);
1451 7114 : if (cd_bb != bb
1452 7114 : && !bitmap_bit_p (last_stmt_necessary, cd_bb->index))
1453 : return false;
1454 : }
1455 : /* And cache the result. */
1456 3947 : bitmap_set_bit (visited_control_parents, bb->index);
1457 3947 : return true;
1458 : }
1459 :
1460 : /* If basic block is empty, we can remove conditionals that controls
1461 : its execution. However in some cases the empty BB can stay live
1462 : (such as when it was a header of empty loop). In this case we
1463 : need to update its count. Since regions of dead BBs are acyclic
1464 : we simply propagate counts forward from live BBs to dead ones. */
1465 :
1466 : static void
1467 19972 : propagate_counts ()
1468 : {
1469 19972 : basic_block bb;
1470 19972 : auto_vec<basic_block, 16> queue;
1471 19972 : hash_map <basic_block, int> cnt;
1472 :
1473 706382 : FOR_EACH_BB_FN (bb, cfun)
1474 686410 : if (!bitmap_bit_p (bb_contains_live_stmts, bb->index))
1475 : {
1476 155416 : int n = 0;
1477 641818 : for (edge e : bb->preds)
1478 175570 : if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1479 175570 : && !bitmap_bit_p (bb_contains_live_stmts, e->src->index))
1480 33619 : n++;
1481 155416 : if (!n)
1482 124702 : queue.safe_push (bb);
1483 155416 : cnt.put (bb, n);
1484 : }
1485 193060 : while (!queue.is_empty ())
1486 : {
1487 153116 : basic_block bb = queue.pop ();
1488 153116 : profile_count sum = profile_count::zero ();
1489 :
1490 478433 : for (edge e : bb->preds)
1491 : {
1492 172201 : sum += e->count ();
1493 172201 : gcc_checking_assert (!cnt.get (e->src));
1494 : }
1495 : /* If we have partial profile and some counts of incomming edges are
1496 : unknown, it is probably better to keep the existing count.
1497 : We could also propagate bi-directionally. */
1498 153116 : if (sum.initialized_p () && !(sum == bb->count))
1499 : {
1500 20916 : if (dump_file && (dump_flags & TDF_DETAILS))
1501 : {
1502 5 : fprintf (dump_file, "Updating count of empty bb %i from ",
1503 : bb->index);
1504 5 : bb->count.dump (dump_file);
1505 5 : fprintf (dump_file, " to ");
1506 5 : sum.dump (dump_file);
1507 5 : fprintf (dump_file, "\n");
1508 : }
1509 20916 : bb->count = sum;
1510 : }
1511 153116 : cnt.remove (bb);
1512 612464 : for (edge e : bb->succs)
1513 153116 : if (int *n = cnt.get (e->dest))
1514 : {
1515 31319 : (*n)--;
1516 31319 : if (!*n)
1517 28414 : queue.safe_push (e->dest);
1518 : }
1519 : }
1520 : /* Do not check that all blocks has been processed, since for
1521 : empty infinite loops this is not the case. */
1522 19972 : }
1523 :
1524 : /* Eliminate unnecessary statements. Any instruction not marked as necessary
1525 : contributes nothing to the program, and can be deleted. */
1526 :
1527 : static bool
1528 8070794 : eliminate_unnecessary_stmts (bool aggressive)
1529 : {
1530 8070794 : bool something_changed = false;
1531 8070794 : basic_block bb;
1532 8070794 : gimple_stmt_iterator gsi, psi;
1533 8070794 : gimple *stmt;
1534 8070794 : auto_vec<edge> to_remove_edges;
1535 :
1536 8070794 : if (dump_file && (dump_flags & TDF_DETAILS))
1537 209 : fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1538 :
1539 8070794 : bool had_setjmp = cfun->calls_setjmp;
1540 8070794 : clear_special_calls ();
1541 :
1542 : /* Walking basic blocks and statements in reverse order avoids
1543 : releasing SSA names before any other DEFs that refer to them are
1544 : released. This helps avoid loss of debug information, as we get
1545 : a chance to propagate all RHSs of removed SSAs into debug uses,
1546 : rather than only the latest ones. E.g., consider:
1547 :
1548 : x_3 = y_1 + z_2;
1549 : a_5 = x_3 - b_4;
1550 : # DEBUG a => a_5
1551 :
1552 : If we were to release x_3 before a_5, when we reached a_5 and
1553 : tried to substitute it into the debug stmt, we'd see x_3 there,
1554 : but x_3's DEF, type, etc would have already been disconnected.
1555 : By going backwards, the debug stmt first changes to:
1556 :
1557 : # DEBUG a => x_3 - b_4
1558 :
1559 : and then to:
1560 :
1561 : # DEBUG a => y_1 + z_2 - b_4
1562 :
1563 : as desired. */
1564 8070794 : gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1565 8070794 : auto_vec<basic_block> h;
1566 8070794 : h = get_all_dominated_blocks (CDI_DOMINATORS,
1567 16141588 : single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1568 :
1569 89857925 : while (h.length ())
1570 : {
1571 81787131 : bb = h.pop ();
1572 :
1573 : /* Remove dead statements. */
1574 81787131 : auto_bitmap debug_seen;
1575 81787131 : hash_set<int_hash <location_t, 0>> locs_seen;
1576 762004396 : for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1577 : {
1578 598430134 : stmt = gsi_stmt (gsi);
1579 :
1580 598430134 : psi = gsi;
1581 598430134 : gsi_prev (&psi);
1582 :
1583 598430134 : stats.total++;
1584 :
1585 : /* We can mark a call to free as not necessary if the
1586 : defining statement of its argument is not necessary
1587 : (and thus is getting removed). */
1588 598430134 : if (gimple_plf (stmt, STMT_NECESSARY)
1589 598430134 : && (gimple_call_builtin_p (stmt, BUILT_IN_FREE)
1590 595648435 : || (is_gimple_call (stmt)
1591 36796066 : && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
1592 1281029 : && gimple_call_operator_delete_p (as_a <gcall *> (stmt)))))
1593 : {
1594 1230078 : tree ptr = gimple_call_arg (stmt, 0);
1595 1230078 : if (TREE_CODE (ptr) == SSA_NAME)
1596 : {
1597 1229175 : gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1598 1229175 : if (!gimple_nop_p (def_stmt)
1599 1229175 : && !gimple_plf (def_stmt, STMT_NECESSARY))
1600 6266 : gimple_set_plf (stmt, STMT_NECESSARY, false);
1601 : }
1602 : }
1603 : /* Conditional checking that return value of allocation is non-NULL
1604 : can be turned to constant if the allocation itself
1605 : is unnecesary. */
1606 598430134 : if (gimple_plf (stmt, STMT_NECESSARY)
1607 595951611 : && gimple_code (stmt) == GIMPLE_COND
1608 629786941 : && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
1609 : {
1610 31352728 : gimple *def_stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt));
1611 31352728 : if (!gimple_nop_p (def_stmt)
1612 31352728 : && !gimple_plf (def_stmt, STMT_NECESSARY))
1613 : {
1614 3110 : gcc_checking_assert
1615 : (checks_return_value_of_removable_allocation_p (stmt));
1616 3110 : gimple_cond_set_lhs (as_a <gcond *>(stmt),
1617 : build_one_cst
1618 3110 : (TREE_TYPE (gimple_cond_rhs (stmt))));
1619 3110 : update_stmt (stmt);
1620 : }
1621 : }
1622 :
1623 : /* If GSI is not necessary then remove it. */
1624 598430134 : if (!gimple_plf (stmt, STMT_NECESSARY))
1625 : {
1626 : /* Keep clobbers that we can keep live live. */
1627 2478523 : if (gimple_clobber_p (stmt))
1628 : {
1629 865429 : ssa_op_iter iter;
1630 865429 : use_operand_p use_p;
1631 865429 : bool dead = false;
1632 1728333 : FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1633 : {
1634 865429 : tree name = USE_FROM_PTR (use_p);
1635 865429 : if (!SSA_NAME_IS_DEFAULT_DEF (name)
1636 865429 : && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1637 : {
1638 : dead = true;
1639 : break;
1640 : }
1641 : }
1642 1725277 : if (!dead
1643 : /* When doing CD-DCE we have to ensure all controls
1644 : of the stmt are still live. */
1645 865429 : && (!aggressive || control_parents_preserved_p (bb)))
1646 : {
1647 859848 : bitmap_clear (debug_seen);
1648 859848 : continue;
1649 : }
1650 : }
1651 1618675 : if (!is_gimple_debug (stmt))
1652 1271420 : something_changed = true;
1653 1618675 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1654 1618675 : continue;
1655 1618675 : }
1656 595951611 : else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1657 : {
1658 37099242 : tree name = gimple_call_lhs (call_stmt);
1659 :
1660 37099242 : notice_special_calls (call_stmt);
1661 :
1662 : /* When LHS of var = call (); is dead, simplify it into
1663 : call (); saving one operand. */
1664 37099242 : if (name
1665 14423516 : && TREE_CODE (name) == SSA_NAME
1666 12118210 : && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1667 : /* Avoid doing so for allocation calls which we
1668 : did not mark as necessary, it will confuse the
1669 : special logic we apply to malloc/free pair removal. */
1670 37165038 : && !is_removable_allocation_p (call_stmt, false))
1671 : {
1672 65705 : something_changed = true;
1673 65705 : if (dump_file && (dump_flags & TDF_DETAILS))
1674 : {
1675 1 : fprintf (dump_file, "Deleting LHS of call: ");
1676 1 : print_gimple_stmt (dump_file, call_stmt, 0, TDF_SLIM);
1677 1 : fprintf (dump_file, "\n");
1678 : }
1679 :
1680 65705 : gimple_call_set_lhs (call_stmt, NULL_TREE);
1681 65705 : maybe_clean_or_replace_eh_stmt (call_stmt, call_stmt);
1682 65705 : update_stmt (call_stmt);
1683 65705 : release_ssa_name (name);
1684 :
1685 : /* __builtin_stack_save without lhs is not needed. */
1686 65705 : if (gimple_call_builtin_p (call_stmt, BUILT_IN_STACK_SAVE))
1687 37 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1688 : /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON
1689 : without lhs is not needed. */
1690 65668 : else if (gimple_call_internal_p (call_stmt))
1691 7526 : switch (gimple_call_internal_fn (call_stmt))
1692 : {
1693 762 : case IFN_GOMP_SIMD_LANE:
1694 762 : if (gimple_call_num_args (call_stmt) >= 3
1695 800 : && !integer_nonzerop
1696 38 : (gimple_call_arg (call_stmt, 2)))
1697 : break;
1698 : /* FALLTHRU */
1699 1006 : case IFN_ASAN_POISON:
1700 1006 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1701 1006 : break;
1702 : default:
1703 : break;
1704 : }
1705 : }
1706 37033537 : else if (gimple_call_internal_p (call_stmt))
1707 962946 : switch (gimple_call_internal_fn (call_stmt))
1708 : {
1709 87544 : case IFN_ADD_OVERFLOW:
1710 87544 : maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1711 87544 : break;
1712 95424 : case IFN_SUB_OVERFLOW:
1713 95424 : maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1714 95424 : break;
1715 96973 : case IFN_MUL_OVERFLOW:
1716 96973 : maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1717 96973 : break;
1718 24872 : case IFN_UADDC:
1719 24872 : if (integer_zerop (gimple_call_arg (call_stmt, 2)))
1720 2473 : maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1721 : break;
1722 13927 : case IFN_USUBC:
1723 13927 : if (integer_zerop (gimple_call_arg (call_stmt, 2)))
1724 2002 : maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1725 : break;
1726 : default:
1727 : break;
1728 : }
1729 : }
1730 558852369 : else if (gimple_debug_bind_p (stmt))
1731 : {
1732 : /* We are only keeping the last debug-bind of a
1733 : non-DEBUG_EXPR_DECL variable in a series of
1734 : debug-bind stmts. */
1735 270685210 : tree var = gimple_debug_bind_get_var (stmt);
1736 270685210 : if (TREE_CODE (var) != DEBUG_EXPR_DECL
1737 270685210 : && !bitmap_set_bit (debug_seen, DECL_UID (var)))
1738 6601206 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1739 270685210 : continue;
1740 270685210 : }
1741 288167159 : else if (gimple_debug_begin_stmt_p (stmt))
1742 : {
1743 : /* We are only keeping the last debug-begin in a series of
1744 : debug-begin stmts. */
1745 27470867 : if (locs_seen.add (gimple_location (stmt)))
1746 20670 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1747 27470867 : continue;
1748 : }
1749 297795534 : locs_seen.empty ();
1750 297795534 : bitmap_clear (debug_seen);
1751 : }
1752 :
1753 : /* Remove dead PHI nodes. */
1754 81787131 : something_changed |= remove_dead_phis (bb);
1755 81787131 : }
1756 :
1757 : /* First remove queued edges. */
1758 8070794 : if (!to_remove_edges.is_empty ())
1759 : {
1760 : /* Remove edges. We've delayed this to not get bogus debug stmts
1761 : during PHI node removal. */
1762 61855 : for (unsigned i = 0; i < to_remove_edges.length (); ++i)
1763 41916 : remove_edge (to_remove_edges[i]);
1764 19939 : cfg_altered = true;
1765 : }
1766 : /* When we cleared calls_setjmp we can purge all abnormal edges. Do so.
1767 : ??? We'd like to assert that setjmp calls do not pop out of nothing
1768 : but we currently lack a per-stmt way of noting whether a call was
1769 : recognized as returns-twice (or rather receives-control). */
1770 8070794 : if (!cfun->calls_setjmp && had_setjmp)
1771 : {
1772 : /* Make sure we only remove the edges, not dominated blocks. Using
1773 : gimple_purge_dead_abnormal_call_edges would do that and we
1774 : cannot free dominators yet. */
1775 3757 : FOR_EACH_BB_FN (bb, cfun)
1776 9678 : if (gcall *stmt = safe_dyn_cast <gcall *> (*gsi_last_bb (bb)))
1777 2080 : if (!stmt_can_make_abnormal_goto (stmt))
1778 : {
1779 736 : edge_iterator ei;
1780 736 : edge e;
1781 994 : for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1782 : {
1783 258 : if (e->flags & EDGE_ABNORMAL)
1784 : {
1785 112 : if (e->flags & EDGE_FALLTHRU)
1786 0 : e->flags &= ~EDGE_ABNORMAL;
1787 : else
1788 112 : remove_edge (e);
1789 112 : cfg_altered = true;
1790 : }
1791 : else
1792 146 : ei_next (&ei);
1793 : }
1794 : }
1795 : }
1796 :
1797 : /* Now remove the unreachable blocks. */
1798 8070794 : if (cfg_altered)
1799 : {
1800 19980 : basic_block prev_bb;
1801 :
1802 19980 : find_unreachable_blocks ();
1803 :
1804 : /* Delete all unreachable basic blocks in reverse dominator order. */
1805 19980 : for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1806 752419 : bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1807 : {
1808 732439 : prev_bb = bb->prev_bb;
1809 :
1810 732439 : if ((bb_contains_live_stmts
1811 732393 : && !bitmap_bit_p (bb_contains_live_stmts, bb->index))
1812 1263472 : || !(bb->flags & BB_REACHABLE))
1813 : {
1814 : /* Since we don't track liveness of virtual PHI nodes, it is
1815 : possible that we rendered some PHI nodes unreachable while
1816 : they are still in use. Mark them for renaming. */
1817 220340 : for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1818 18933 : gsi_next (&gsi))
1819 56797 : if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1820 : {
1821 18931 : bool found = false;
1822 18931 : imm_use_iterator iter;
1823 :
1824 38043 : FOR_EACH_IMM_USE_STMT (stmt, iter,
1825 : gimple_phi_result (gsi.phi ()))
1826 : {
1827 18324 : if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1828 155 : continue;
1829 18169 : if (gimple_code (stmt) == GIMPLE_PHI
1830 18169 : || gimple_plf (stmt, STMT_NECESSARY))
1831 : {
1832 : found = true;
1833 : break;
1834 : }
1835 18931 : }
1836 18931 : if (found)
1837 18143 : mark_virtual_phi_result_for_renaming (gsi.phi ());
1838 : }
1839 :
1840 201407 : if (!(bb->flags & BB_REACHABLE))
1841 : {
1842 : /* Speed up the removal of blocks that don't
1843 : dominate others. Walking backwards, this should
1844 : be the common case. ??? Do we need to recompute
1845 : dominators because of cfg_altered? */
1846 45991 : if (!first_dom_son (CDI_DOMINATORS, bb))
1847 45480 : delete_basic_block (bb);
1848 : else
1849 : {
1850 511 : h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1851 :
1852 2207 : while (h.length ())
1853 : {
1854 1696 : bb = h.pop ();
1855 1696 : prev_bb = bb->prev_bb;
1856 : /* Rearrangements to the CFG may have failed
1857 : to update the dominators tree, so that
1858 : formerly-dominated blocks are now
1859 : otherwise reachable. */
1860 1696 : if (!!(bb->flags & BB_REACHABLE))
1861 0 : continue;
1862 1696 : delete_basic_block (bb);
1863 : }
1864 :
1865 511 : h.release ();
1866 : }
1867 : }
1868 : }
1869 : }
1870 19980 : if (bb_contains_live_stmts)
1871 19972 : propagate_counts ();
1872 : }
1873 :
1874 8070794 : if (bb_postorder)
1875 19939 : free (bb_postorder);
1876 8070794 : bb_postorder = NULL;
1877 :
1878 8070794 : return something_changed;
1879 8070794 : }
1880 :
1881 :
1882 : /* Print out removed statement statistics. */
1883 :
1884 : static void
1885 215 : print_stats (void)
1886 : {
1887 215 : float percg;
1888 :
1889 215 : percg = ((float) stats.removed / (float) stats.total) * 100;
1890 215 : fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1891 : stats.removed, stats.total, (int) percg);
1892 :
1893 215 : if (stats.total_phis == 0)
1894 : percg = 0;
1895 : else
1896 42 : percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1897 :
1898 215 : fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1899 : stats.removed_phis, stats.total_phis, (int) percg);
1900 215 : }
1901 :
1902 : /* Initialization for this pass. Set up the used data structures. */
1903 :
1904 : static void
1905 8070794 : tree_dce_init (bool aggressive)
1906 : {
1907 8070794 : memset ((void *) &stats, 0, sizeof (stats));
1908 :
1909 8070794 : if (aggressive)
1910 : {
1911 3495300 : last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1912 3495300 : bitmap_clear (last_stmt_necessary);
1913 3495300 : bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1914 3495300 : bitmap_clear (bb_contains_live_stmts);
1915 : }
1916 :
1917 16141588 : processed = sbitmap_alloc (num_ssa_names + 1);
1918 8070794 : bitmap_clear (processed);
1919 :
1920 8070794 : worklist.create (64);
1921 8070794 : cfg_altered = false;
1922 8070794 : }
1923 :
1924 : /* Cleanup after this pass. */
1925 :
1926 : static void
1927 8070794 : tree_dce_done (bool aggressive)
1928 : {
1929 8070794 : if (aggressive)
1930 : {
1931 3495300 : delete cd;
1932 3495300 : sbitmap_free (visited_control_parents);
1933 3495300 : sbitmap_free (last_stmt_necessary);
1934 3495300 : sbitmap_free (bb_contains_live_stmts);
1935 3495300 : bb_contains_live_stmts = NULL;
1936 : }
1937 :
1938 8070794 : sbitmap_free (processed);
1939 :
1940 8070794 : worklist.release ();
1941 8070794 : }
1942 :
1943 :
1944 : /* Main routine to eliminate dead code.
1945 :
1946 : AGGRESSIVE controls the aggressiveness of the algorithm.
1947 : In conservative mode, we ignore control dependence and simply declare
1948 : all but the most trivially dead branches necessary. This mode is fast.
1949 : In aggressive mode, control dependences are taken into account, which
1950 : results in more dead code elimination, but at the cost of some time. */
1951 :
1952 : static unsigned int
1953 8070794 : perform_tree_ssa_dce (bool aggressive)
1954 : {
1955 8070794 : bool something_changed = 0;
1956 8070794 : unsigned todo = 0;
1957 :
1958 : /* Preheaders are needed for SCEV to work.
1959 : Simple lateches and recorded exits improve chances that loop will
1960 : proved to be finite in testcases such as in loop-15.c and loop-24.c */
1961 8070794 : bool in_loop_pipeline = scev_initialized_p ();
1962 8070794 : if (aggressive && ! in_loop_pipeline)
1963 : {
1964 3270372 : loop_optimizer_init (LOOPS_NORMAL
1965 : | LOOPS_HAVE_RECORDED_EXITS);
1966 3270372 : scev_initialize ();
1967 : }
1968 :
1969 8070794 : if (aggressive && make_forwarders_with_degenerate_phis (cfun))
1970 : todo |= TODO_cleanup_cfg;
1971 :
1972 8070794 : calculate_dominance_info (CDI_DOMINATORS);
1973 :
1974 8070794 : tree_dce_init (aggressive);
1975 :
1976 8070794 : if (aggressive)
1977 : {
1978 : /* Compute control dependence. */
1979 3495300 : calculate_dominance_info (CDI_POST_DOMINATORS);
1980 3495300 : cd = new control_dependences ();
1981 :
1982 6990600 : visited_control_parents =
1983 3495300 : sbitmap_alloc (last_basic_block_for_fn (cfun));
1984 3495300 : bitmap_clear (visited_control_parents);
1985 :
1986 3495300 : mark_dfs_back_edges ();
1987 : }
1988 :
1989 8070794 : find_obviously_necessary_stmts (aggressive);
1990 :
1991 8070794 : if (aggressive && ! in_loop_pipeline)
1992 : {
1993 3270372 : scev_finalize ();
1994 3270372 : loop_optimizer_finalize ();
1995 : }
1996 :
1997 8070794 : longest_chain = 0;
1998 8070794 : total_chain = 0;
1999 8070794 : nr_walks = 0;
2000 8070794 : chain_ovfl = false;
2001 8070794 : visited = BITMAP_ALLOC (NULL);
2002 8070794 : propagate_necessity (aggressive);
2003 8070794 : BITMAP_FREE (visited);
2004 :
2005 8070794 : something_changed |= eliminate_unnecessary_stmts (aggressive);
2006 8070794 : something_changed |= cfg_altered;
2007 :
2008 : /* We do not update postdominators, so free them unconditionally. */
2009 8070794 : free_dominance_info (CDI_POST_DOMINATORS);
2010 :
2011 : /* If we removed paths in the CFG, then we need to update
2012 : dominators as well. I haven't investigated the possibility
2013 : of incrementally updating dominators. */
2014 8070794 : if (cfg_altered)
2015 19980 : free_dominance_info (CDI_DOMINATORS);
2016 :
2017 8070794 : statistics_counter_event (cfun, "Statements deleted", stats.removed);
2018 8070794 : statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
2019 :
2020 : /* Debugging dumps. */
2021 8070794 : if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
2022 215 : print_stats ();
2023 :
2024 8070794 : tree_dce_done (aggressive);
2025 :
2026 8070794 : if (something_changed)
2027 : {
2028 822268 : free_numbers_of_iterations_estimates (cfun);
2029 822268 : if (in_loop_pipeline)
2030 65350 : scev_reset ();
2031 : todo |= TODO_update_ssa | TODO_cleanup_cfg;
2032 : }
2033 8070794 : return todo;
2034 : }
2035 :
2036 : namespace {
2037 :
2038 : const pass_data pass_data_dce =
2039 : {
2040 : GIMPLE_PASS, /* type */
2041 : "dce", /* name */
2042 : OPTGROUP_NONE, /* optinfo_flags */
2043 : TV_TREE_DCE, /* tv_id */
2044 : ( PROP_cfg | PROP_ssa ), /* properties_required */
2045 : 0, /* properties_provided */
2046 : 0, /* properties_destroyed */
2047 : 0, /* todo_flags_start */
2048 : 0, /* todo_flags_finish */
2049 : };
2050 :
2051 : class pass_dce_base : public gimple_opt_pass
2052 : {
2053 : public:
2054 : /* opt_pass methods: */
2055 8074141 : bool gate (function *) final override { return flag_tree_dce != 0; }
2056 2857220 : void set_pass_param (unsigned n, bool param) final override
2057 : {
2058 2857220 : gcc_assert (n == 0 || n == 1);
2059 2857220 : if (n == 0)
2060 1714332 : update_address_taken_p = param;
2061 1142888 : else if (n == 1)
2062 1142888 : remove_unused_locals_p = param;
2063 2857220 : }
2064 :
2065 : protected:
2066 3142942 : pass_dce_base (const pass_data &data, gcc::context *ctxt)
2067 6285884 : : gimple_opt_pass (data, ctxt)
2068 : {}
2069 8070794 : unsigned int execute_dce (function *, bool aggressive)
2070 : {
2071 8070794 : return (perform_tree_ssa_dce (aggressive)
2072 8070794 : | (remove_unused_locals_p ? TODO_remove_unused_locals : 0)
2073 8070794 : | (update_address_taken_p ? TODO_update_address_taken : 0));
2074 : }
2075 :
2076 : private:
2077 : bool update_address_taken_p = false;
2078 : bool remove_unused_locals_p = false;
2079 : }; // class pass_dce_base
2080 :
2081 :
2082 : class pass_dce : public pass_dce_base
2083 : {
2084 : public:
2085 2285776 : pass_dce (gcc::context *ctxt)
2086 4571552 : : pass_dce_base (pass_data_dce, ctxt)
2087 : {}
2088 :
2089 : /* opt_pass methods: */
2090 2000054 : opt_pass * clone () final override { return new pass_dce (m_ctxt); }
2091 4376697 : unsigned int execute (function *func) final override
2092 : {
2093 4376697 : return execute_dce (func, /*aggressive=*/false);
2094 : }
2095 :
2096 : }; // class pass_dce
2097 :
2098 : } // anon namespace
2099 :
2100 : gimple_opt_pass *
2101 285722 : make_pass_dce (gcc::context *ctxt)
2102 : {
2103 285722 : return new pass_dce (ctxt);
2104 : }
2105 :
2106 : namespace {
2107 :
2108 : const pass_data pass_data_cd_dce =
2109 : {
2110 : GIMPLE_PASS, /* type */
2111 : "cddce", /* name */
2112 : OPTGROUP_NONE, /* optinfo_flags */
2113 : TV_TREE_CD_DCE, /* tv_id */
2114 : ( PROP_cfg | PROP_ssa ), /* properties_required */
2115 : 0, /* properties_provided */
2116 : 0, /* properties_destroyed */
2117 : 0, /* todo_flags_start */
2118 : 0, /* todo_flags_finish */
2119 : };
2120 :
2121 : class pass_cd_dce : public pass_dce_base
2122 : {
2123 : public:
2124 857166 : pass_cd_dce (gcc::context *ctxt)
2125 1714332 : : pass_dce_base (pass_data_cd_dce, ctxt)
2126 : {}
2127 :
2128 : /* opt_pass methods: */
2129 571444 : opt_pass * clone () final override { return new pass_cd_dce (m_ctxt); }
2130 3694097 : unsigned int execute (function *func) final override
2131 : {
2132 3694097 : return execute_dce (func, /*aggressive=*/optimize >= 2);
2133 : }
2134 :
2135 : }; // class pass_cd_dce
2136 :
2137 : } // anon namespace
2138 :
2139 : gimple_opt_pass *
2140 285722 : make_pass_cd_dce (gcc::context *ctxt)
2141 : {
2142 285722 : return new pass_cd_dce (ctxt);
2143 : }
2144 :
2145 :
2146 : /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
2147 : is consumed by this function. The function has linear complexity in
2148 : the number of dead stmts with a constant factor like the average SSA
2149 : use operands number. */
2150 :
2151 : void
2152 37694905 : simple_dce_from_worklist (bitmap worklist, bitmap need_eh_cleanup)
2153 : {
2154 37694905 : int phiremoved = 0;
2155 37694905 : int stmtremoved = 0;
2156 71913813 : while (! bitmap_empty_p (worklist))
2157 : {
2158 : /* Pop item. */
2159 34218908 : unsigned i = bitmap_clear_first_set_bit (worklist);
2160 :
2161 34218908 : tree def = ssa_name (i);
2162 : /* Removed by somebody else or still in use.
2163 : Note use in itself for a phi node is not counted as still in use. */
2164 34218908 : if (!def)
2165 14181647 : continue;
2166 34049835 : if (!has_zero_uses (def))
2167 : {
2168 13974182 : gimple *def_stmt = SSA_NAME_DEF_STMT (def);
2169 :
2170 13974182 : if (gimple_code (def_stmt) != GIMPLE_PHI)
2171 13970350 : continue;
2172 :
2173 2661867 : imm_use_iterator use_iter;
2174 2661867 : use_operand_p use_p;
2175 2661867 : bool canremove = true;
2176 :
2177 5412706 : FOR_EACH_IMM_USE_FAST (use_p, use_iter, def)
2178 : {
2179 2747007 : gimple *use_stmt = USE_STMT (use_p);
2180 : /* Ignore debug statements. */
2181 2747007 : if (is_gimple_debug (use_stmt))
2182 83007 : continue;
2183 2664000 : if (use_stmt != def_stmt)
2184 : {
2185 : canremove = false;
2186 : break;
2187 : }
2188 2661867 : }
2189 2661867 : if (!canremove)
2190 2658035 : continue;
2191 : }
2192 :
2193 20079485 : gimple *t = SSA_NAME_DEF_STMT (def);
2194 20079485 : if (gimple_has_side_effects (t))
2195 : {
2196 39653 : if (gcall *call = dyn_cast <gcall *> (t))
2197 : {
2198 35099 : gimple_call_set_lhs (call, NULL_TREE);
2199 35099 : update_stmt (call);
2200 35099 : release_ssa_name (def);
2201 : }
2202 39653 : continue;
2203 39653 : }
2204 :
2205 : /* The defining statement needs to be defining only this name.
2206 : ASM is the only statement that can define more than one
2207 : name. */
2208 20039832 : if (is_a<gasm *>(t)
2209 20039832 : && !single_ssa_def_operand (t, SSA_OP_ALL_DEFS))
2210 15 : continue;
2211 :
2212 : /* Don't remove statements that are needed for non-call
2213 : eh to work. */
2214 20039817 : if (stmt_unremovable_because_of_non_call_eh_p (cfun, t))
2215 2556 : continue;
2216 :
2217 : /* Tell the caller that we removed a statement that might
2218 : throw so it could cleanup the cfg for that block. */
2219 20037261 : if (need_eh_cleanup && stmt_could_throw_p (cfun, t))
2220 137 : bitmap_set_bit (need_eh_cleanup, gimple_bb (t)->index);
2221 :
2222 : /* Add uses to the worklist. */
2223 20037261 : ssa_op_iter iter;
2224 20037261 : use_operand_p use_p;
2225 56667144 : FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
2226 : {
2227 16592622 : tree use = USE_FROM_PTR (use_p);
2228 16592622 : if (TREE_CODE (use) == SSA_NAME
2229 16592622 : && ! SSA_NAME_IS_DEFAULT_DEF (use))
2230 14925677 : bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
2231 : }
2232 :
2233 : /* Remove stmt. */
2234 20037261 : if (dump_file && (dump_flags & TDF_DETAILS))
2235 : {
2236 289 : fprintf (dump_file, "Removing dead stmt:");
2237 289 : print_gimple_stmt (dump_file, t, 0);
2238 : }
2239 20037261 : gimple_stmt_iterator gsi = gsi_for_stmt (t);
2240 20037261 : if (gimple_code (t) == GIMPLE_PHI)
2241 : {
2242 2531183 : remove_phi_node (&gsi, true);
2243 2531183 : phiremoved++;
2244 : }
2245 : else
2246 : {
2247 17506078 : unlink_stmt_vdef (t);
2248 17506078 : gsi_remove (&gsi, true);
2249 17506078 : release_defs (t);
2250 17506078 : stmtremoved++;
2251 : }
2252 : }
2253 37694905 : statistics_counter_event (cfun, "PHIs removed",
2254 : phiremoved);
2255 37694905 : statistics_counter_event (cfun, "Statements removed",
2256 : stmtremoved);
2257 37694905 : }
|