Branch data Line data Source code
1 : : /* Dead code elimination pass for the GNU compiler.
2 : : Copyright (C) 2002-2025 Free Software Foundation, Inc.
3 : : Contributed by Ben Elliston <bje@redhat.com>
4 : : and Andrew MacLeod <amacleod@redhat.com>
5 : : Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6 : :
7 : : This file is part of GCC.
8 : :
9 : : GCC is free software; you can redistribute it and/or modify it
10 : : under the terms of the GNU General Public License as published by the
11 : : Free Software Foundation; either version 3, or (at your option) any
12 : : later version.
13 : :
14 : : GCC is distributed in the hope that it will be useful, but WITHOUT
15 : : ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 : : for more details.
18 : :
19 : : You should have received a copy of the GNU General Public License
20 : : along with GCC; see the file COPYING3. If not see
21 : : <http://www.gnu.org/licenses/>. */
22 : :
23 : : /* Dead code elimination.
24 : :
25 : : References:
26 : :
27 : : Building an Optimizing Compiler,
28 : : Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29 : :
30 : : Advanced Compiler Design and Implementation,
31 : : Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32 : :
33 : : Dead-code elimination is the removal of statements which have no
34 : : impact on the program's output. "Dead statements" have no impact
35 : : on the program's output, while "necessary statements" may have
36 : : impact on the output.
37 : :
38 : : The algorithm consists of three phases:
39 : : 1. Marking as necessary all statements known to be necessary,
40 : : e.g. most function calls, writing a value to memory, etc;
41 : : 2. Propagating necessary statements, e.g., the statements
42 : : giving values to operands in necessary statements; and
43 : : 3. Removing dead statements. */
44 : :
45 : : #include "config.h"
46 : : #include "system.h"
47 : : #include "coretypes.h"
48 : : #include "backend.h"
49 : : #include "rtl.h"
50 : : #include "tree.h"
51 : : #include "gimple.h"
52 : : #include "cfghooks.h"
53 : : #include "tree-pass.h"
54 : : #include "ssa.h"
55 : : #include "gimple-pretty-print.h"
56 : : #include "fold-const.h"
57 : : #include "calls.h"
58 : : #include "cfganal.h"
59 : : #include "tree-eh.h"
60 : : #include "gimplify.h"
61 : : #include "gimple-iterator.h"
62 : : #include "tree-cfg.h"
63 : : #include "tree-ssa-loop-niter.h"
64 : : #include "tree-into-ssa.h"
65 : : #include "tree-dfa.h"
66 : : #include "cfgloop.h"
67 : : #include "tree-scalar-evolution.h"
68 : : #include "tree-ssa-propagate.h"
69 : : #include "gimple-fold.h"
70 : : #include "tree-ssa.h"
71 : : #include "ipa-modref-tree.h"
72 : : #include "ipa-modref.h"
73 : :
74 : : static struct stmt_stats
75 : : {
76 : : int total;
77 : : int total_phis;
78 : : int removed;
79 : : int removed_phis;
80 : : } stats;
81 : :
82 : : #define STMT_NECESSARY GF_PLF_1
83 : :
84 : : static vec<gimple *> worklist;
85 : :
86 : : /* Vector indicating an SSA name has already been processed and marked
87 : : as necessary. */
88 : : static sbitmap processed;
89 : :
90 : : /* Vector indicating that the last statement of a basic block has already
91 : : been marked as necessary. */
92 : : static sbitmap last_stmt_necessary;
93 : :
94 : : /* Vector indicating that BB contains statements that are live. */
95 : : static sbitmap bb_contains_live_stmts;
96 : :
97 : : /* Before we can determine whether a control branch is dead, we need to
98 : : compute which blocks are control dependent on which edges.
99 : :
100 : : We expect each block to be control dependent on very few edges so we
101 : : use a bitmap for each block recording its edges. An array holds the
102 : : bitmap. The Ith bit in the bitmap is set if that block is dependent
103 : : on the Ith edge. */
104 : : static control_dependences *cd;
105 : :
106 : : /* Vector indicating that a basic block has already had all the edges
107 : : processed that it is control dependent on. */
108 : : static sbitmap visited_control_parents;
109 : :
110 : : /* TRUE if this pass alters the CFG (by removing control statements).
111 : : FALSE otherwise.
112 : :
113 : : If this pass alters the CFG, then it will arrange for the dominators
114 : : to be recomputed. */
115 : : static bool cfg_altered;
116 : :
117 : : /* When non-NULL holds map from basic block index into the postorder. */
118 : : static int *bb_postorder;
119 : :
120 : :
121 : : /* True if we should treat any stmt with a vdef as necessary. */
122 : :
123 : : static inline bool
124 : 271094387 : keep_all_vdefs_p ()
125 : : {
126 : 271094387 : return optimize_debug;
127 : : }
128 : :
129 : : /* 1 if CALLEE is the function __cxa_atexit.
130 : : 2 if CALLEE is the function __aeabi_atexit.
131 : : 0 otherwise. */
132 : :
133 : : static inline int
134 : 85124767 : is_cxa_atexit (const_tree callee)
135 : : {
136 : 85124767 : if (callee != NULL_TREE
137 : 85124767 : && strcmp (IDENTIFIER_POINTER (DECL_NAME (callee)), "__cxa_atexit") == 0)
138 : : return 1;
139 : 85012959 : if (callee != NULL_TREE
140 : 85012959 : && strcmp (IDENTIFIER_POINTER (DECL_NAME (callee)), "__aeabi_atexit") == 0)
141 : 0 : return 2;
142 : : return 0;
143 : : }
144 : :
145 : : /* True if STMT is a call to __cxa_atexit (or __aeabi_atexit)
146 : : and the function argument to that call is a const or pure
147 : : non-looping function decl. */
148 : :
149 : : static inline bool
150 : 85124767 : is_removable_cxa_atexit_call (gimple *stmt)
151 : : {
152 : 85124767 : tree callee = gimple_call_fndecl (stmt);
153 : 85124767 : int functype = is_cxa_atexit (callee);
154 : 85124767 : if (!functype)
155 : : return false;
156 : 111808 : if (gimple_call_num_args (stmt) != 3)
157 : : return false;
158 : :
159 : : /* The function argument is the 1st argument for __cxa_atexit
160 : : or the 2nd argument for __eabi_atexit. */
161 : 223616 : tree arg = gimple_call_arg (stmt, functype == 2 ? 1 : 0);
162 : 111808 : if (TREE_CODE (arg) != ADDR_EXPR)
163 : : return false;
164 : 111808 : arg = TREE_OPERAND (arg, 0);
165 : 111808 : if (TREE_CODE (arg) != FUNCTION_DECL)
166 : : return false;
167 : 111808 : int flags = flags_from_decl_or_type (arg);
168 : :
169 : : /* If the function is noreturn then it cannot be removed. */
170 : 111808 : if (flags & ECF_NORETURN)
171 : : return false;
172 : :
173 : : /* The function needs to be const or pure and non looping. */
174 : 111650 : return (flags & (ECF_CONST|ECF_PURE))
175 : 111650 : && !(flags & ECF_LOOPING_CONST_OR_PURE);
176 : : }
177 : :
178 : : /* If STMT is not already marked necessary, mark it, and add it to the
179 : : worklist if ADD_TO_WORKLIST is true. */
180 : :
181 : : static inline void
182 : 460510326 : mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
183 : : {
184 : 460510326 : gcc_assert (stmt);
185 : :
186 : 460510326 : if (gimple_plf (stmt, STMT_NECESSARY))
187 : : return;
188 : :
189 : 460510138 : if (dump_file && (dump_flags & TDF_DETAILS))
190 : : {
191 : 854 : fprintf (dump_file, "Marking useful stmt: ");
192 : 854 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
193 : 854 : fprintf (dump_file, "\n");
194 : : }
195 : :
196 : 460510138 : gimple_set_plf (stmt, STMT_NECESSARY, true);
197 : 460510138 : if (add_to_worklist)
198 : 107425016 : worklist.safe_push (stmt);
199 : 107425016 : if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
200 : 37539499 : bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
201 : : }
202 : :
203 : :
204 : : /* Mark the statement defining operand OP as necessary. */
205 : :
206 : : static inline void
207 : 333754128 : mark_operand_necessary (tree op)
208 : : {
209 : 333754128 : gimple *stmt;
210 : 333754128 : int ver;
211 : :
212 : 333754128 : gcc_assert (op);
213 : :
214 : 333754128 : ver = SSA_NAME_VERSION (op);
215 : 333754128 : if (bitmap_bit_p (processed, ver))
216 : : {
217 : 120510197 : stmt = SSA_NAME_DEF_STMT (op);
218 : 120510197 : gcc_assert (gimple_nop_p (stmt)
219 : : || gimple_plf (stmt, STMT_NECESSARY));
220 : 180183222 : return;
221 : : }
222 : 213243931 : bitmap_set_bit (processed, ver);
223 : :
224 : 213243931 : stmt = SSA_NAME_DEF_STMT (op);
225 : 213243931 : gcc_assert (stmt);
226 : :
227 : 213243931 : if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
228 : : return;
229 : :
230 : 153570906 : if (dump_file && (dump_flags & TDF_DETAILS))
231 : : {
232 : 266 : fprintf (dump_file, "marking necessary through ");
233 : 266 : print_generic_expr (dump_file, op);
234 : 266 : fprintf (dump_file, " stmt ");
235 : 266 : print_gimple_stmt (dump_file, stmt, 0);
236 : : }
237 : :
238 : 153570906 : gimple_set_plf (stmt, STMT_NECESSARY, true);
239 : 153570906 : if (bb_contains_live_stmts)
240 : 52583757 : bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
241 : 153570906 : worklist.safe_push (stmt);
242 : : }
243 : :
244 : : /* Return true if STMT is a call to allocation function that can be
245 : : optimized out if the memory block is never used for anything else
246 : : than NULL pointer check or free.
247 : : If NON_NULL_CHECK is false, we can further assume that return value
248 : : is never checked to be non-NULL.
249 : : Don't return true if it is called with constant size (or sizes for calloc)
250 : : and the size is excessively large (larger than PTRDIFF_MAX, for calloc
251 : : either argument larger than PTRDIFF_MAX or both constant and their product
252 : : larger than PTRDIFF_MAX). */
253 : :
254 : : static bool
255 : 35391419 : is_removable_allocation_p (gcall *stmt, bool non_null_check)
256 : : {
257 : 35391419 : int arg = -1;
258 : 35391419 : tree callee = gimple_call_fndecl (stmt), a1, a2;
259 : 35391419 : if (callee != NULL_TREE
260 : 35391419 : && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
261 : 8975737 : switch (DECL_FUNCTION_CODE (callee))
262 : : {
263 : 510199 : case BUILT_IN_MALLOC:
264 : 510199 : arg = 1;
265 : 510199 : goto do_malloc;
266 : 150 : case BUILT_IN_ALIGNED_ALLOC:
267 : 150 : arg = 2;
268 : 150 : goto do_malloc;
269 : 5389 : case BUILT_IN_CALLOC:
270 : 5389 : arg = 3;
271 : 5389 : goto do_malloc;
272 : 111503 : CASE_BUILT_IN_ALLOCA:
273 : 111503 : arg = 1;
274 : 111503 : goto do_malloc;
275 : : case BUILT_IN_STRDUP:
276 : : case BUILT_IN_STRNDUP:
277 : : arg = 0;
278 : : /* FALLTHRU */
279 : 631438 : do_malloc:
280 : 631438 : if (non_null_check)
281 : : {
282 : 100202 : if (flag_malloc_dce <= 1)
283 : : return false;
284 : : }
285 : 531236 : else if (!flag_malloc_dce)
286 : : return false;
287 : : break;
288 : :
289 : : case BUILT_IN_GOMP_ALLOC:
290 : 35391182 : arg = 2;
291 : : break;
292 : :
293 : : default:;
294 : : }
295 : :
296 : 35391182 : if (arg == -1
297 : 35391182 : && callee != NULL_TREE
298 : 32788765 : && flag_allocation_dce
299 : 32785341 : && gimple_call_from_new_or_delete (stmt)
300 : 36476530 : && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee))
301 : : arg = 1;
302 : :
303 : 35115902 : switch (arg)
304 : : {
305 : : case -1:
306 : : return false;
307 : : case 0:
308 : : return true;
309 : 901954 : case 1:
310 : 901954 : case 2:
311 : 901954 : if (gimple_call_num_args (stmt) < (unsigned) arg)
312 : : return false;
313 : 901948 : a1 = gimple_call_arg (stmt, arg - 1);
314 : 901948 : if (tree_fits_uhwi_p (a1)
315 : 901948 : && (tree_to_uhwi (a1)
316 : 515338 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
317 : : return false;
318 : : return true;
319 : 5389 : case 3:
320 : 5389 : if (gimple_call_num_args (stmt) < 2)
321 : : return false;
322 : 5389 : a1 = gimple_call_arg (stmt, 0);
323 : 5389 : a2 = gimple_call_arg (stmt, 1);
324 : 5389 : if (tree_fits_uhwi_p (a1)
325 : 5389 : && (tree_to_uhwi (a1)
326 : 4421 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
327 : : return false;
328 : 5367 : if (tree_fits_uhwi_p (a2)
329 : 5367 : && (tree_to_uhwi (a2)
330 : 4562 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
331 : : return false;
332 : 10690 : if (TREE_CODE (a1) == INTEGER_CST
333 : 4389 : && TREE_CODE (a2) == INTEGER_CST
334 : 13139 : && (wi::to_widest (a1) * wi::to_widest (a2)
335 : 13139 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
336 : : return false;
337 : : return true;
338 : : default:
339 : : gcc_unreachable ();
340 : : }
341 : : }
342 : :
343 : : /* Return true if STMT is a conditional
344 : : if (ptr != NULL)
345 : : where ptr was returned by a removable allocation function. */
346 : :
347 : : static bool
348 : 240969873 : checks_return_value_of_removable_allocation_p (gimple *stmt)
349 : : {
350 : 240969873 : gcall *def_stmt;
351 : 240969873 : return gimple_code (stmt) == GIMPLE_COND
352 : 30945295 : && (gimple_cond_code (stmt) == EQ_EXPR
353 : 21746419 : || gimple_cond_code (stmt) == NE_EXPR)
354 : 23659193 : && integer_zerop (gimple_cond_rhs (stmt))
355 : 13115174 : && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME
356 : : && (def_stmt = dyn_cast <gcall *>
357 : 13112301 : (SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt))))
358 : 244303490 : && is_removable_allocation_p (def_stmt, true);
359 : : }
360 : :
361 : :
362 : : /* Mark STMT as necessary if it obviously is. Add it to the worklist if
363 : : it can make other statements necessary.
364 : :
365 : : If AGGRESSIVE is false, control statements are conservatively marked as
366 : : necessary. */
367 : :
368 : : static void
369 : 597084587 : mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
370 : : {
371 : : /* Statements that are implicitly live. Most function calls, asm
372 : : and return statements are required. Labels and GIMPLE_BIND nodes
373 : : are kept because they are control flow, and we have no way of
374 : : knowing whether they can be removed. DCE can eliminate all the
375 : : other statements in a block, and CFG can then remove the block
376 : : and labels. */
377 : 597084587 : switch (gimple_code (stmt))
378 : : {
379 : 7126705 : case GIMPLE_PREDICT:
380 : 7126705 : case GIMPLE_LABEL:
381 : 7126705 : mark_stmt_necessary (stmt, false);
382 : 7126705 : return;
383 : :
384 : 9996131 : case GIMPLE_ASM:
385 : 9996131 : case GIMPLE_RESX:
386 : 9996131 : case GIMPLE_RETURN:
387 : 9996131 : mark_stmt_necessary (stmt, true);
388 : 9996131 : return;
389 : :
390 : 36937195 : case GIMPLE_CALL:
391 : 36937195 : {
392 : 36937195 : gcall *call = as_a <gcall *> (stmt);
393 : :
394 : : /* Never elide a noreturn call we pruned control-flow for.
395 : : Same for statements that can alter control flow in unpredictable
396 : : ways. */
397 : 36937195 : if (gimple_call_ctrl_altering_p (call))
398 : : {
399 : 5155254 : mark_stmt_necessary (call, true);
400 : 5155254 : return;
401 : : }
402 : :
403 : 31781941 : if (is_removable_allocation_p (call, false))
404 : : return;
405 : :
406 : :
407 : : /* For __cxa_atexit calls, don't mark as necessary right away. */
408 : 31140629 : if (is_removable_cxa_atexit_call (call))
409 : : return;
410 : :
411 : : /* IFN_GOACC_LOOP calls are necessary in that they are used to
412 : : represent parameter (i.e. step, bound) of a lowered OpenACC
413 : : partitioned loop. But this kind of partitioned loop might not
414 : : survive from aggressive loop removal for it has loop exit and
415 : : is assumed to be finite. Therefore, we need to explicitly mark
416 : : these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */
417 : 31140223 : if (gimple_call_internal_p (call, IFN_GOACC_LOOP))
418 : : {
419 : 27125 : mark_stmt_necessary (call, true);
420 : 27125 : return;
421 : : }
422 : : break;
423 : : }
424 : :
425 : 346259039 : case GIMPLE_DEBUG:
426 : : /* Debug temps without a value are not useful. ??? If we could
427 : : easily locate the debug temp bind stmt for a use thereof,
428 : : would could refrain from marking all debug temps here, and
429 : : mark them only if they're used. */
430 : 346259039 : if (gimple_debug_nonbind_marker_p (stmt)
431 : 263641405 : || !gimple_debug_bind_p (stmt)
432 : 262703336 : || gimple_debug_bind_has_value_p (stmt)
433 : 462116984 : || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
434 : 345958417 : mark_stmt_necessary (stmt, false);
435 : : return;
436 : :
437 : 1869 : case GIMPLE_GOTO:
438 : 1869 : gcc_assert (!simple_goto_p (stmt));
439 : 1869 : mark_stmt_necessary (stmt, true);
440 : 1869 : return;
441 : :
442 : 30984653 : case GIMPLE_COND:
443 : 30984653 : gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
444 : : /* Fall through. */
445 : :
446 : 31162151 : case GIMPLE_SWITCH:
447 : 31162151 : if (! aggressive)
448 : 20694457 : mark_stmt_necessary (stmt, true);
449 : : break;
450 : :
451 : 165558736 : case GIMPLE_ASSIGN:
452 : : /* Mark indirect CLOBBERs to be lazily removed if their SSA operands
453 : : do not prevail. That also makes control flow leading to them
454 : : not necessary in aggressive mode. */
455 : 177839151 : if (gimple_clobber_p (stmt) && !zero_ssa_operands (stmt, SSA_OP_USE))
456 : : return;
457 : : break;
458 : :
459 : : default:
460 : : break;
461 : : }
462 : :
463 : : /* If the statement has volatile operands, it needs to be preserved.
464 : : Same for statements that can alter control flow in unpredictable
465 : : ways. */
466 : 226545199 : if (gimple_has_side_effects (stmt) || is_ctrl_altering_stmt (stmt))
467 : : {
468 : 42224494 : mark_stmt_necessary (stmt, true);
469 : 42224494 : return;
470 : : }
471 : :
472 : : /* If a statement could throw, it can be deemed necessary unless we
473 : : are allowed to remove dead EH. Test this after checking for
474 : : new/delete operators since we always elide their EH. */
475 : 184320705 : if (!cfun->can_delete_dead_exceptions
476 : 184320705 : && stmt_could_throw_p (cfun, stmt))
477 : : {
478 : 5964160 : mark_stmt_necessary (stmt, true);
479 : 5964160 : return;
480 : : }
481 : :
482 : 222352425 : if ((gimple_vdef (stmt) && keep_all_vdefs_p ())
483 : 222333385 : || stmt_may_clobber_global_p (stmt, false))
484 : : {
485 : 12936155 : mark_stmt_necessary (stmt, true);
486 : 12936155 : return;
487 : : }
488 : :
489 : : return;
490 : : }
491 : :
492 : :
493 : : /* Mark the last statement of BB as necessary. */
494 : :
495 : : static bool
496 : 43166434 : mark_last_stmt_necessary (basic_block bb)
497 : : {
498 : 43166434 : if (!bitmap_set_bit (last_stmt_necessary, bb->index))
499 : : return true;
500 : :
501 : 16854778 : bitmap_set_bit (bb_contains_live_stmts, bb->index);
502 : :
503 : : /* We actually mark the statement only if it is a control statement. */
504 : 16854778 : gimple *stmt = *gsi_last_bb (bb);
505 : 16854778 : if (stmt && is_ctrl_stmt (stmt))
506 : : {
507 : 10425559 : mark_stmt_necessary (stmt, true);
508 : 10425559 : return true;
509 : : }
510 : : return false;
511 : : }
512 : :
513 : :
514 : : /* Mark control dependent edges of BB as necessary. We have to do this only
515 : : once for each basic block so we set the appropriate bit after we're done.
516 : :
517 : : When IGNORE_SELF is true, ignore BB in the list of control dependences. */
518 : :
519 : : static void
520 : 31810256 : mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
521 : : {
522 : 31810256 : bitmap_iterator bi;
523 : 31810256 : unsigned edge_number;
524 : 31810256 : bool skipped = false;
525 : :
526 : 31810256 : gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
527 : :
528 : 31810256 : if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
529 : 3508573 : return;
530 : :
531 : 69976236 : EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
532 : : 0, edge_number, bi)
533 : : {
534 : 41674553 : basic_block cd_bb = cd->get_edge_src (edge_number);
535 : :
536 : 41674553 : if (ignore_self && cd_bb == bb)
537 : : {
538 : 67292 : skipped = true;
539 : 67292 : continue;
540 : : }
541 : :
542 : 41607261 : if (!mark_last_stmt_necessary (cd_bb))
543 : 6426905 : mark_control_dependent_edges_necessary (cd_bb, false);
544 : : }
545 : :
546 : 28301683 : if (!skipped)
547 : 28234391 : bitmap_set_bit (visited_control_parents, bb->index);
548 : : }
549 : :
550 : :
551 : : /* Find obviously necessary statements. These are things like most function
552 : : calls, and stores to file level variables.
553 : :
554 : : If EL is NULL, control statements are conservatively marked as
555 : : necessary. Otherwise it contains the list of edges used by control
556 : : dependence analysis. */
557 : :
558 : : static void
559 : 7994927 : find_obviously_necessary_stmts (bool aggressive)
560 : : {
561 : 7994927 : basic_block bb;
562 : 7994927 : gimple_stmt_iterator gsi;
563 : 7994927 : edge e;
564 : 7994927 : gimple *phi, *stmt;
565 : 7994927 : int flags;
566 : :
567 : 90946801 : FOR_EACH_BB_FN (bb, cfun)
568 : : {
569 : : /* PHI nodes are never inherently necessary. */
570 : 120060772 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
571 : : {
572 : 37108898 : phi = gsi_stmt (gsi);
573 : 37108898 : gimple_set_plf (phi, STMT_NECESSARY, false);
574 : : }
575 : :
576 : : /* Check all statements in the block. */
577 : 762988335 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
578 : : {
579 : 597084587 : stmt = gsi_stmt (gsi);
580 : 597084587 : gimple_set_plf (stmt, STMT_NECESSARY, false);
581 : 597084587 : mark_stmt_if_obviously_necessary (stmt, aggressive);
582 : : }
583 : : }
584 : :
585 : : /* Pure and const functions are finite and thus have no infinite loops in
586 : : them. */
587 : 7994927 : flags = flags_from_decl_or_type (current_function_decl);
588 : 7994927 : if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
589 : 914952 : return;
590 : :
591 : : /* Prevent the empty possibly infinite loops from being removed. This is
592 : : needed to make the logic in remove_dead_stmt work to identify the
593 : : correct edge to keep when removing a controlling condition. */
594 : 7079975 : if (aggressive)
595 : : {
596 : 3327464 : if (mark_irreducible_loops ())
597 : 284825 : FOR_EACH_BB_FN (bb, cfun)
598 : : {
599 : 279317 : edge_iterator ei;
600 : 703187 : FOR_EACH_EDGE (e, ei, bb->succs)
601 : 423870 : if ((e->flags & EDGE_DFS_BACK)
602 : 27336 : && (e->flags & EDGE_IRREDUCIBLE_LOOP))
603 : : {
604 : 10521 : if (dump_file)
605 : 0 : fprintf (dump_file, "Marking back edge of irreducible "
606 : 0 : "loop %i->%i\n", e->src->index, e->dest->index);
607 : 10521 : mark_control_dependent_edges_necessary (e->dest, false);
608 : : }
609 : : }
610 : :
611 : 11756227 : for (auto loop : loops_list (cfun, 0))
612 : : /* For loops without an exit do not mark any condition. */
613 : 1773835 : if (loop->exits->next->e && !finite_loop_p (loop))
614 : : {
615 : 279034 : if (dump_file)
616 : 1 : fprintf (dump_file, "cannot prove finiteness of loop %i\n",
617 : : loop->num);
618 : 279034 : mark_control_dependent_edges_necessary (loop->latch, false);
619 : 3327464 : }
620 : : }
621 : : }
622 : :
623 : :
624 : : /* Return true if REF is based on an aliased base, otherwise false. */
625 : :
626 : : static bool
627 : 99946780 : ref_may_be_aliased (tree ref)
628 : : {
629 : 99946780 : if (TREE_CODE (ref) == WITH_SIZE_EXPR)
630 : 0 : ref = TREE_OPERAND (ref, 0);
631 : 177235762 : while (handled_component_p (ref))
632 : 77288982 : ref = TREE_OPERAND (ref, 0);
633 : 99946780 : if ((TREE_CODE (ref) == MEM_REF || TREE_CODE (ref) == TARGET_MEM_REF)
634 : 99946780 : && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
635 : 17165539 : ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
636 : 99946780 : return !(DECL_P (ref)
637 : 67316712 : && !may_be_aliased (ref));
638 : : }
639 : :
640 : : static bitmap visited = NULL;
641 : : static unsigned int longest_chain = 0;
642 : : static unsigned int total_chain = 0;
643 : : static unsigned int nr_walks = 0;
644 : : static bool chain_ovfl = false;
645 : :
646 : : /* Worker for the walker that marks reaching definitions of REF,
647 : : which is based on a non-aliased decl, necessary. It returns
648 : : true whenever the defining statement of the current VDEF is
649 : : a kill for REF, as no dominating may-defs are necessary for REF
650 : : anymore. DATA points to the basic-block that contains the
651 : : stmt that refers to REF. */
652 : :
653 : : static bool
654 : 38529460 : mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
655 : : {
656 : 38529460 : gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
657 : :
658 : : /* All stmts we visit are necessary. */
659 : 38529460 : if (! gimple_clobber_p (def_stmt))
660 : 38160936 : mark_operand_necessary (vdef);
661 : :
662 : : /* If the stmt lhs kills ref, then we can stop walking. */
663 : 38529460 : if (gimple_has_lhs (def_stmt)
664 : 28407890 : && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
665 : : /* The assignment is not necessarily carried out if it can throw
666 : : and we can catch it in the current function where we could inspect
667 : : the previous value.
668 : : ??? We only need to care about the RHS throwing. For aggregate
669 : : assignments or similar calls and non-call exceptions the LHS
670 : : might throw as well. */
671 : 36786894 : && !stmt_can_throw_internal (cfun, def_stmt))
672 : : {
673 : 24529445 : tree base, lhs = gimple_get_lhs (def_stmt);
674 : 24529445 : poly_int64 size, offset, max_size;
675 : 24529445 : bool reverse;
676 : 24529445 : ao_ref_base (ref);
677 : 24529445 : base
678 : 24529445 : = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
679 : : /* We can get MEM[symbol: sZ, index: D.8862_1] here,
680 : : so base == refd->base does not always hold. */
681 : 24529445 : if (base == ref->base)
682 : : {
683 : : /* For a must-alias check we need to be able to constrain
684 : : the accesses properly. */
685 : 22535646 : if (known_eq (size, max_size)
686 : 22535646 : && known_subrange_p (ref->offset, ref->max_size, offset, size))
687 : 9093910 : return true;
688 : : /* Or they need to be exactly the same. */
689 : 13442693 : else if (ref->ref
690 : : /* Make sure there is no induction variable involved
691 : : in the references (gcc.c-torture/execute/pr42142.c).
692 : : The simplest way is to check if the kill dominates
693 : : the use. */
694 : : /* But when both are in the same block we cannot
695 : : easily tell whether we came from a backedge
696 : : unless we decide to compute stmt UIDs
697 : : (see PR58246). */
698 : 13442693 : && (basic_block) data != gimple_bb (def_stmt)
699 : 5946013 : && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
700 : 5946013 : gimple_bb (def_stmt))
701 : 16721757 : && operand_equal_p (ref->ref, lhs, 0))
702 : : return true;
703 : : }
704 : : }
705 : :
706 : : /* Otherwise keep walking. */
707 : : return false;
708 : : }
709 : :
710 : : static void
711 : 13946267 : mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
712 : : {
713 : : /* Should have been caught before calling this function. */
714 : 13946267 : gcc_checking_assert (!keep_all_vdefs_p ());
715 : :
716 : 13946267 : unsigned int chain;
717 : 13946267 : ao_ref refd;
718 : 13946267 : gcc_assert (!chain_ovfl);
719 : 13946267 : ao_ref_init (&refd, ref);
720 : 27892534 : chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
721 : : mark_aliased_reaching_defs_necessary_1,
722 : 13946267 : gimple_bb (stmt), NULL);
723 : 13946267 : if (chain > longest_chain)
724 : 2304059 : longest_chain = chain;
725 : 13946267 : total_chain += chain;
726 : 13946267 : nr_walks++;
727 : 13946267 : }
728 : :
729 : : /* Worker for the walker that marks reaching definitions of REF, which
730 : : is not based on a non-aliased decl. For simplicity we need to end
731 : : up marking all may-defs necessary that are not based on a non-aliased
732 : : decl. The only job of this walker is to skip may-defs based on
733 : : a non-aliased decl. */
734 : :
735 : : static bool
736 : 76593182 : mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
737 : : tree vdef, void *data ATTRIBUTE_UNUSED)
738 : : {
739 : 76593182 : gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
740 : :
741 : : /* We have to skip already visited (and thus necessary) statements
742 : : to make the chaining work after we dropped back to simple mode. */
743 : 76593182 : if (chain_ovfl
744 : 76593182 : && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
745 : : {
746 : 2621174 : gcc_assert (gimple_nop_p (def_stmt)
747 : : || gimple_plf (def_stmt, STMT_NECESSARY));
748 : : return false;
749 : : }
750 : :
751 : : /* We want to skip stores to non-aliased variables. */
752 : 73972008 : if (!chain_ovfl
753 : 73972008 : && gimple_assign_single_p (def_stmt))
754 : : {
755 : 50306283 : tree lhs = gimple_assign_lhs (def_stmt);
756 : 50306283 : if (!ref_may_be_aliased (lhs))
757 : : return false;
758 : : }
759 : :
760 : : /* We want to skip statments that do not constitute stores but have
761 : : a virtual definition. */
762 : 60948958 : if (gcall *call = dyn_cast <gcall *> (def_stmt))
763 : : {
764 : 22832505 : tree callee = gimple_call_fndecl (call);
765 : 22832505 : if (callee != NULL_TREE
766 : 22832505 : && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
767 : 3918858 : switch (DECL_FUNCTION_CODE (callee))
768 : : {
769 : : case BUILT_IN_MALLOC:
770 : : case BUILT_IN_ALIGNED_ALLOC:
771 : : case BUILT_IN_CALLOC:
772 : : CASE_BUILT_IN_ALLOCA:
773 : : case BUILT_IN_STRDUP:
774 : : case BUILT_IN_STRNDUP:
775 : : case BUILT_IN_FREE:
776 : : case BUILT_IN_GOMP_ALLOC:
777 : : case BUILT_IN_GOMP_FREE:
778 : : return false;
779 : :
780 : : default:;
781 : : }
782 : :
783 : 22224421 : if (callee != NULL_TREE
784 : 21108435 : && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
785 : 20894750 : || DECL_IS_OPERATOR_DELETE_P (callee))
786 : 22998130 : && gimple_call_from_new_or_delete (call))
787 : : return false;
788 : 21461996 : if (is_removable_cxa_atexit_call (call))
789 : : return false;
790 : : }
791 : :
792 : 59578261 : if (! gimple_clobber_p (def_stmt))
793 : 53201130 : mark_operand_necessary (vdef);
794 : :
795 : : return false;
796 : : }
797 : :
798 : : static void
799 : 70534976 : mark_all_reaching_defs_necessary (gimple *stmt)
800 : : {
801 : : /* Should have been caught before calling this function. */
802 : 70534976 : gcc_checking_assert (!keep_all_vdefs_p ());
803 : 141069952 : walk_aliased_vdefs (NULL, gimple_vuse (stmt),
804 : : mark_all_reaching_defs_necessary_1, NULL, &visited);
805 : 70534976 : }
806 : :
807 : : /* Return true for PHI nodes with one or identical arguments
808 : : can be removed. */
809 : : static bool
810 : 20039037 : degenerate_phi_p (gimple *phi)
811 : : {
812 : 20039037 : unsigned int i;
813 : 20039037 : tree op = gimple_phi_arg_def (phi, 0);
814 : 21091800 : for (i = 1; i < gimple_phi_num_args (phi); i++)
815 : 18203402 : if (gimple_phi_arg_def (phi, i) != op)
816 : : return false;
817 : : return true;
818 : : }
819 : :
820 : : /* Return that NEW_CALL and DELETE_CALL are a valid pair of new
821 : : and delete operators. */
822 : :
823 : : static bool
824 : 49190 : valid_new_delete_pair_p (gimple *new_call, gimple *delete_call)
825 : : {
826 : 49190 : tree new_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call));
827 : 49190 : tree delete_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call));
828 : 49190 : return valid_new_delete_pair_p (new_asm, delete_asm);
829 : : }
830 : :
831 : : /* Propagate necessity using the operands of necessary statements.
832 : : Process the uses on each statement in the worklist, and add all
833 : : feeding statements which contribute to the calculation of this
834 : : value to the worklist.
835 : :
836 : : In conservative mode, EL is NULL. */
837 : :
838 : : static void
839 : 7994927 : propagate_necessity (bool aggressive)
840 : : {
841 : 7994927 : gimple *stmt;
842 : :
843 : 7994927 : if (dump_file && (dump_flags & TDF_DETAILS))
844 : 208 : fprintf (dump_file, "\nProcessing worklist:\n");
845 : :
846 : 268990849 : while (worklist.length () > 0)
847 : : {
848 : : /* Take STMT from worklist. */
849 : 260995922 : stmt = worklist.pop ();
850 : :
851 : 260995922 : if (dump_file && (dump_flags & TDF_DETAILS))
852 : : {
853 : 1101 : fprintf (dump_file, "processing: ");
854 : 1101 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
855 : 1101 : fprintf (dump_file, "\n");
856 : : }
857 : :
858 : 260995922 : if (aggressive)
859 : : {
860 : : /* Mark the last statement of the basic blocks on which the block
861 : : containing STMT is control dependent, but only if we haven't
862 : : already done so. */
863 : 90123256 : basic_block bb = gimple_bb (stmt);
864 : 90123256 : if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
865 : 90123256 : && !bitmap_bit_p (visited_control_parents, bb->index))
866 : 18686015 : mark_control_dependent_edges_necessary (bb, false);
867 : : }
868 : :
869 : 260995922 : if (gimple_code (stmt) == GIMPLE_PHI
870 : : /* We do not process virtual PHI nodes nor do we track their
871 : : necessity. */
872 : 300718958 : && !virtual_operand_p (gimple_phi_result (stmt)))
873 : : {
874 : : /* PHI nodes are somewhat special in that each PHI alternative has
875 : : data and control dependencies. All the statements feeding the
876 : : PHI node's arguments are always necessary. In aggressive mode,
877 : : we also consider the control dependent edges leading to the
878 : : predecessor block associated with each PHI alternative as
879 : : necessary. */
880 : 19861518 : gphi *phi = as_a <gphi *> (stmt);
881 : 19861518 : size_t k;
882 : :
883 : 64766371 : for (k = 0; k < gimple_phi_num_args (stmt); k++)
884 : : {
885 : 44904853 : tree arg = PHI_ARG_DEF (stmt, k);
886 : 44904853 : if (TREE_CODE (arg) == SSA_NAME)
887 : 33346400 : mark_operand_necessary (arg);
888 : : }
889 : :
890 : : /* For PHI operands it matters from where the control flow arrives
891 : : to the BB. Consider the following example:
892 : :
893 : : a=exp1;
894 : : b=exp2;
895 : : if (test)
896 : : ;
897 : : else
898 : : ;
899 : : c=PHI(a,b)
900 : :
901 : : We need to mark control dependence of the empty basic blocks, since they
902 : : contains computation of PHI operands.
903 : :
904 : : Doing so is too restrictive in the case the predecestor block is in
905 : : the loop. Consider:
906 : :
907 : : if (b)
908 : : {
909 : : int i;
910 : : for (i = 0; i<1000; ++i)
911 : : ;
912 : : j = 0;
913 : : }
914 : : return j;
915 : :
916 : : There is PHI for J in the BB containing return statement.
917 : : In this case the control dependence of predecestor block (that is
918 : : within the empty loop) also contains the block determining number
919 : : of iterations of the block that would prevent removing of empty
920 : : loop in this case.
921 : :
922 : : This scenario can be avoided by splitting critical edges.
923 : : To save the critical edge splitting pass we identify how the control
924 : : dependence would look like if the edge was split.
925 : :
926 : : Consider the modified CFG created from current CFG by splitting
927 : : edge B->C. In the postdominance tree of modified CFG, C' is
928 : : always child of C. There are two cases how chlids of C' can look
929 : : like:
930 : :
931 : : 1) C' is leaf
932 : :
933 : : In this case the only basic block C' is control dependent on is B.
934 : :
935 : : 2) C' has single child that is B
936 : :
937 : : In this case control dependence of C' is same as control
938 : : dependence of B in original CFG except for block B itself.
939 : : (since C' postdominate B in modified CFG)
940 : :
941 : : Now how to decide what case happens? There are two basic options:
942 : :
943 : : a) C postdominate B. Then C immediately postdominate B and
944 : : case 2 happens iff there is no other way from B to C except
945 : : the edge B->C.
946 : :
947 : : There is other way from B to C iff there is succesor of B that
948 : : is not postdominated by B. Testing this condition is somewhat
949 : : expensive, because we need to iterate all succesors of B.
950 : : We are safe to assume that this does not happen: we will mark B
951 : : as needed when processing the other path from B to C that is
952 : : conrol dependent on B and marking control dependencies of B
953 : : itself is harmless because they will be processed anyway after
954 : : processing control statement in B.
955 : :
956 : : b) C does not postdominate B. Always case 1 happens since there is
957 : : path from C to exit that does not go through B and thus also C'. */
958 : :
959 : 33541255 : if (aggressive && !degenerate_phi_p (stmt))
960 : : {
961 : 20502130 : for (k = 0; k < gimple_phi_num_args (stmt); k++)
962 : : {
963 : 14320349 : basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
964 : :
965 : 14320349 : if (gimple_bb (stmt)
966 : 14320349 : != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
967 : : {
968 : 1559173 : if (!mark_last_stmt_necessary (arg_bb))
969 : 2314 : mark_control_dependent_edges_necessary (arg_bb, false);
970 : : }
971 : 12761176 : else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
972 : 12761176 : && !bitmap_bit_p (visited_control_parents,
973 : : arg_bb->index))
974 : 6405467 : mark_control_dependent_edges_necessary (arg_bb, true);
975 : : }
976 : : }
977 : : }
978 : : else
979 : : {
980 : : /* Propagate through the operands. Examine all the USE, VUSE and
981 : : VDEF operands in this statement. Mark all the statements
982 : : which feed this statement's uses as necessary. */
983 : 241134404 : ssa_op_iter iter;
984 : 241134404 : tree use;
985 : :
986 : : /* If this is a call to free which is directly fed by an
987 : : allocation function do not mark that necessary through
988 : : processing the argument. */
989 : 241134404 : bool is_delete_operator
990 : 241134404 : = (is_gimple_call (stmt)
991 : 36908157 : && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
992 : 242168859 : && gimple_call_operator_delete_p (as_a <gcall *> (stmt)));
993 : 240324747 : if (is_delete_operator
994 : 240324747 : || gimple_call_builtin_p (stmt, BUILT_IN_FREE)
995 : 240039218 : || gimple_call_builtin_p (stmt, BUILT_IN_GOMP_FREE))
996 : : {
997 : 1095186 : tree ptr = gimple_call_arg (stmt, 0);
998 : 1095186 : gcall *def_stmt;
999 : : /* If the pointer we free is defined by an allocation
1000 : : function do not add the call to the worklist. */
1001 : 1095186 : if (TREE_CODE (ptr) == SSA_NAME
1002 : 1094676 : && (def_stmt = dyn_cast <gcall *> (SSA_NAME_DEF_STMT (ptr)))
1003 : 1283383 : && is_removable_allocation_p (def_stmt, false))
1004 : : {
1005 : 167477 : if (is_delete_operator
1006 : 167477 : && !valid_new_delete_pair_p (def_stmt, stmt))
1007 : 134 : mark_operand_necessary (gimple_call_arg (stmt, 0));
1008 : :
1009 : : /* Delete operators can have alignment and (or) size
1010 : : as next arguments. When being a SSA_NAME, they
1011 : : must be marked as necessary. Similarly GOMP_free. */
1012 : 167477 : if (gimple_call_num_args (stmt) >= 2)
1013 : 83879 : for (unsigned i = 1; i < gimple_call_num_args (stmt);
1014 : : i++)
1015 : : {
1016 : 41948 : tree arg = gimple_call_arg (stmt, i);
1017 : 41948 : if (TREE_CODE (arg) == SSA_NAME)
1018 : 7768 : mark_operand_necessary (arg);
1019 : : }
1020 : :
1021 : 104290351 : continue;
1022 : 167477 : }
1023 : : }
1024 : :
1025 : 240966927 : if (checks_return_value_of_removable_allocation_p (stmt))
1026 : 98277 : continue;
1027 : :
1028 : 449906410 : FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
1029 : 209037760 : mark_operand_necessary (use);
1030 : :
1031 : 240868650 : use = gimple_vuse (stmt);
1032 : 208174021 : if (!use)
1033 : 98251386 : continue;
1034 : :
1035 : : /* No need to search for vdefs if we intrinsicly keep them all. */
1036 : 142617264 : if (keep_all_vdefs_p ())
1037 : 70141 : continue;
1038 : :
1039 : : /* If we dropped to simple mode make all immediately
1040 : : reachable definitions necessary. */
1041 : 142547123 : if (chain_ovfl)
1042 : : {
1043 : 3660132 : mark_all_reaching_defs_necessary (stmt);
1044 : 3660132 : continue;
1045 : : }
1046 : :
1047 : : /* For statements that may load from memory (have a VUSE) we
1048 : : have to mark all reaching (may-)definitions as necessary.
1049 : : We partition this task into two cases:
1050 : : 1) explicit loads based on decls that are not aliased
1051 : : 2) implicit loads (like calls) and explicit loads not
1052 : : based on decls that are not aliased (like indirect
1053 : : references or loads from globals)
1054 : : For 1) we mark all reaching may-defs as necessary, stopping
1055 : : at dominating kills. For 2) we want to mark all dominating
1056 : : references necessary, but non-aliased ones which we handle
1057 : : in 1). By keeping a global visited bitmap for references
1058 : : we walk for 2) we avoid quadratic behavior for those. */
1059 : :
1060 : 138886991 : if (gcall *call = dyn_cast <gcall *> (stmt))
1061 : : {
1062 : 33506615 : tree callee = gimple_call_fndecl (call);
1063 : 33506615 : unsigned i;
1064 : :
1065 : 34491088 : if (callee != NULL_TREE
1066 : 32104756 : && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
1067 : 31873920 : || DECL_IS_OPERATOR_DELETE_P (callee))
1068 : 34508709 : && gimple_call_from_new_or_delete (call))
1069 : 984473 : continue;
1070 : :
1071 : 32522142 : if (is_removable_cxa_atexit_call (call))
1072 : 0 : continue;
1073 : :
1074 : : bool all_refs = false;
1075 : : /* Calls implicitly load from memory, their arguments
1076 : : in addition may explicitly perform memory loads. */
1077 : 99870917 : for (i = 0; i < gimple_call_num_args (call); ++i)
1078 : : {
1079 : 67348775 : tree arg = gimple_call_arg (call, i);
1080 : 130341081 : if (TREE_CODE (arg) == SSA_NAME
1081 : 67348775 : || is_gimple_min_invariant (arg))
1082 : 62992306 : continue;
1083 : 4356469 : if (TREE_CODE (arg) == WITH_SIZE_EXPR)
1084 : 660 : arg = TREE_OPERAND (arg, 0);
1085 : 4356469 : if (!ref_may_be_aliased (arg))
1086 : 3959997 : mark_aliased_reaching_defs_necessary (call, arg);
1087 : : else
1088 : : all_refs = true;
1089 : : }
1090 : :
1091 : 32522142 : if (!all_refs && ipa_modref_callee_reads_no_memory_p (call))
1092 : 1058465 : continue;
1093 : 31463677 : mark_all_reaching_defs_necessary (call);
1094 : : }
1095 : 105380376 : else if (gimple_assign_single_p (stmt))
1096 : : {
1097 : 97426608 : tree rhs;
1098 : : /* If this is a load mark things necessary. */
1099 : 97426608 : rhs = gimple_assign_rhs1 (stmt);
1100 : 97426608 : if (TREE_CODE (rhs) != SSA_NAME
1101 : 79544766 : && !is_gimple_min_invariant (rhs)
1102 : 153594532 : && TREE_CODE (rhs) != CONSTRUCTOR)
1103 : : {
1104 : 44535670 : if (!ref_may_be_aliased (rhs))
1105 : 9289976 : mark_aliased_reaching_defs_necessary (stmt, rhs);
1106 : : else
1107 : 35245694 : mark_all_reaching_defs_necessary (stmt);
1108 : : }
1109 : : }
1110 : 7953768 : else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1111 : : {
1112 : 7809319 : tree rhs = gimple_return_retval (return_stmt);
1113 : : /* A return statement may perform a load. */
1114 : 7809319 : if (rhs
1115 : 4333232 : && TREE_CODE (rhs) != SSA_NAME
1116 : 1433317 : && !is_gimple_min_invariant (rhs)
1117 : 8521071 : && TREE_CODE (rhs) != CONSTRUCTOR)
1118 : : {
1119 : 711752 : if (!ref_may_be_aliased (rhs))
1120 : 690728 : mark_aliased_reaching_defs_necessary (stmt, rhs);
1121 : : else
1122 : 21024 : mark_all_reaching_defs_necessary (stmt);
1123 : : }
1124 : : }
1125 : 144449 : else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
1126 : : {
1127 : 143265 : unsigned i;
1128 : 143265 : mark_all_reaching_defs_necessary (stmt);
1129 : : /* Inputs may perform loads. */
1130 : 255437 : for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1131 : : {
1132 : 112172 : tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1133 : 112172 : if (TREE_CODE (op) != SSA_NAME
1134 : 73774 : && !is_gimple_min_invariant (op)
1135 : 36606 : && TREE_CODE (op) != CONSTRUCTOR
1136 : 148778 : && !ref_may_be_aliased (op))
1137 : 5566 : mark_aliased_reaching_defs_necessary (stmt, op);
1138 : : }
1139 : : }
1140 : 1184 : else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
1141 : : {
1142 : : /* The beginning of a transaction is a memory barrier. */
1143 : : /* ??? If we were really cool, we'd only be a barrier
1144 : : for the memories touched within the transaction. */
1145 : 1184 : mark_all_reaching_defs_necessary (stmt);
1146 : : }
1147 : : else
1148 : 0 : gcc_unreachable ();
1149 : :
1150 : : /* If we over-used our alias oracle budget drop to simple
1151 : : mode. The cost metric allows quadratic behavior
1152 : : (number of uses times number of may-defs queries) up to
1153 : : a constant maximal number of queries and after that falls back to
1154 : : super-linear complexity. */
1155 : 136844053 : if (/* Constant but quadratic for small functions. */
1156 : 136844053 : total_chain > 128 * 128
1157 : : /* Linear in the number of may-defs. */
1158 : 1144988 : && total_chain > 32 * longest_chain
1159 : : /* Linear in the number of uses. */
1160 : 4503 : && total_chain > nr_walks * 32)
1161 : : {
1162 : 4342 : chain_ovfl = true;
1163 : 4342 : if (visited)
1164 : 4342 : bitmap_clear (visited);
1165 : : }
1166 : : }
1167 : : }
1168 : 7994927 : }
1169 : :
1170 : : /* Remove dead PHI nodes from block BB. */
1171 : :
1172 : : static bool
1173 : 82951939 : remove_dead_phis (basic_block bb)
1174 : : {
1175 : 82951939 : bool something_changed = false;
1176 : 82951939 : gphi *phi;
1177 : 82951939 : gphi_iterator gsi;
1178 : :
1179 : 120060837 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
1180 : : {
1181 : 37108898 : stats.total_phis++;
1182 : 37108898 : phi = gsi.phi ();
1183 : :
1184 : : /* We do not track necessity of virtual PHI nodes. Instead do
1185 : : very simple dead PHI removal here. */
1186 : 74217796 : if (virtual_operand_p (gimple_phi_result (phi)))
1187 : : {
1188 : : /* Virtual PHI nodes with one or identical arguments
1189 : : can be removed. */
1190 : 16924382 : if (!loops_state_satisfies_p (LOOP_CLOSED_SSA)
1191 : 16924382 : && degenerate_phi_p (phi))
1192 : : {
1193 : 1969891 : tree vdef = gimple_phi_result (phi);
1194 : 1969891 : tree vuse = gimple_phi_arg_def (phi, 0);
1195 : :
1196 : 1969891 : use_operand_p use_p;
1197 : 1969891 : imm_use_iterator iter;
1198 : 1969891 : gimple *use_stmt;
1199 : 5066646 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1200 : 9396653 : FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1201 : 5119840 : SET_USE (use_p, vuse);
1202 : 1969891 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1203 : 1969891 : && TREE_CODE (vuse) == SSA_NAME)
1204 : 466 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1205 : : }
1206 : : else
1207 : 14954491 : gimple_set_plf (phi, STMT_NECESSARY, true);
1208 : : }
1209 : :
1210 : 37108898 : if (!gimple_plf (phi, STMT_NECESSARY))
1211 : : {
1212 : 2292889 : something_changed = true;
1213 : 2292889 : if (dump_file && (dump_flags & TDF_DETAILS))
1214 : : {
1215 : 20 : fprintf (dump_file, "Deleting : ");
1216 : 20 : print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1217 : 20 : fprintf (dump_file, "\n");
1218 : : }
1219 : :
1220 : 2292889 : remove_phi_node (&gsi, true);
1221 : 2292889 : stats.removed_phis++;
1222 : 2292889 : continue;
1223 : : }
1224 : :
1225 : 34816009 : gsi_next (&gsi);
1226 : : }
1227 : 82951939 : return something_changed;
1228 : : }
1229 : :
1230 : :
1231 : : /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1232 : : containing I so that we don't have to look it up. */
1233 : :
1234 : : static void
1235 : 7435518 : remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb,
1236 : : vec<edge> &to_remove_edges)
1237 : : {
1238 : 7435518 : gimple *stmt = gsi_stmt (*i);
1239 : :
1240 : 7435518 : if (dump_file && (dump_flags & TDF_DETAILS))
1241 : : {
1242 : 112 : fprintf (dump_file, "Deleting : ");
1243 : 112 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1244 : 112 : fprintf (dump_file, "\n");
1245 : : }
1246 : :
1247 : 7435518 : stats.removed++;
1248 : :
1249 : : /* If we have determined that a conditional branch statement contributes
1250 : : nothing to the program, then we not only remove it, but we need to update
1251 : : the CFG. We can chose any of edges out of BB as long as we are sure to not
1252 : : close infinite loops. This is done by always choosing the edge closer to
1253 : : exit in inverted_rev_post_order_compute order. */
1254 : 7435518 : if (is_ctrl_stmt (stmt))
1255 : : {
1256 : 42323 : edge_iterator ei;
1257 : 42323 : edge e = NULL, e2;
1258 : :
1259 : : /* See if there is only one non-abnormal edge. */
1260 : 42323 : if (single_succ_p (bb))
1261 : 3 : e = single_succ_edge (bb);
1262 : : /* Otherwise chose one that is closer to bb with live statement in it.
1263 : : To be able to chose one, we compute inverted post order starting from
1264 : : all BBs with live statements. */
1265 : 3 : if (!e)
1266 : : {
1267 : 42320 : if (!bb_postorder)
1268 : : {
1269 : 20385 : int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
1270 : 20385 : int n = inverted_rev_post_order_compute (cfun, rpo,
1271 : : &bb_contains_live_stmts);
1272 : 20385 : bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1273 : 836716 : for (int i = 0; i < n; ++i)
1274 : 816331 : bb_postorder[rpo[i]] = i;
1275 : 20385 : free (rpo);
1276 : : }
1277 : 126970 : FOR_EACH_EDGE (e2, ei, bb->succs)
1278 : 84650 : if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1279 : 42330 : || bb_postorder [e->dest->index]
1280 : 42330 : >= bb_postorder [e2->dest->index])
1281 : 67086 : e = e2;
1282 : : }
1283 : 42320 : gcc_assert (e);
1284 : 42323 : e->probability = profile_probability::always ();
1285 : :
1286 : : /* The edge is no longer associated with a conditional, so it does
1287 : : not have TRUE/FALSE flags.
1288 : : We are also safe to drop EH/ABNORMAL flags and turn them into
1289 : : normal control flow, because we know that all the destinations (including
1290 : : those odd edges) are equivalent for program execution. */
1291 : 42323 : e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1292 : :
1293 : : /* The lone outgoing edge from BB will be a fallthru edge. */
1294 : 42323 : e->flags |= EDGE_FALLTHRU;
1295 : :
1296 : : /* Remove the remaining outgoing edges. */
1297 : 126976 : FOR_EACH_EDGE (e2, ei, bb->succs)
1298 : 84653 : if (e != e2)
1299 : : {
1300 : : /* If we made a BB unconditionally exit a loop or removed
1301 : : an entry into an irreducible region, then this transform
1302 : : alters the set of BBs in the loop. Schedule a fixup. */
1303 : 42330 : if (loop_exit_edge_p (bb->loop_father, e)
1304 : 42330 : || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1305 : 26207 : loops_state_set (LOOPS_NEED_FIXUP);
1306 : 42330 : to_remove_edges.safe_push (e2);
1307 : : }
1308 : : }
1309 : :
1310 : : /* If this is a store into a variable that is being optimized away,
1311 : : add a debug bind stmt if possible. */
1312 : 7435518 : if (MAY_HAVE_DEBUG_BIND_STMTS
1313 : 6846233 : && gimple_assign_single_p (stmt)
1314 : 7733796 : && is_gimple_val (gimple_assign_rhs1 (stmt)))
1315 : : {
1316 : 163708 : tree lhs = gimple_assign_lhs (stmt);
1317 : 139822 : if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1318 : 23931 : && !DECL_IGNORED_P (lhs)
1319 : 5891 : && is_gimple_reg_type (TREE_TYPE (lhs))
1320 : 58 : && !is_global_var (lhs)
1321 : 163766 : && !DECL_HAS_VALUE_EXPR_P (lhs))
1322 : : {
1323 : 58 : tree rhs = gimple_assign_rhs1 (stmt);
1324 : 58 : gdebug *note
1325 : 58 : = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1326 : 58 : gsi_insert_after (i, note, GSI_SAME_STMT);
1327 : : }
1328 : : }
1329 : :
1330 : 7435518 : unlink_stmt_vdef (stmt);
1331 : 7435518 : gsi_remove (i, true);
1332 : 7435518 : release_defs (stmt);
1333 : 7435518 : }
1334 : :
1335 : : /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1336 : : uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1337 : :
1338 : : static tree
1339 : 26 : find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1340 : : {
1341 : 26 : if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1342 : 0 : *walk_subtrees = 0;
1343 : 26 : if (*tp == (tree) data)
1344 : 13 : return *tp;
1345 : : return NULL_TREE;
1346 : : }
1347 : :
1348 : : /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1349 : : but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1350 : : into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1351 : : uses. */
1352 : :
1353 : : static void
1354 : 288334 : maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1355 : : enum tree_code subcode)
1356 : : {
1357 : 288334 : gimple *stmt = gsi_stmt (*gsi);
1358 : 288334 : tree lhs = gimple_call_lhs (stmt);
1359 : :
1360 : 288334 : if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1361 : 288182 : return;
1362 : :
1363 : 288334 : imm_use_iterator imm_iter;
1364 : 288334 : use_operand_p use_p;
1365 : 288334 : bool has_debug_uses = false;
1366 : 288334 : bool has_realpart_uses = false;
1367 : 288334 : bool has_other_uses = false;
1368 : 297170 : FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1369 : : {
1370 : 297018 : gimple *use_stmt = USE_STMT (use_p);
1371 : 297018 : if (is_gimple_debug (use_stmt))
1372 : : has_debug_uses = true;
1373 : 292604 : else if (is_gimple_assign (use_stmt)
1374 : 291570 : && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1375 : 297026 : && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1376 : : has_realpart_uses = true;
1377 : : else
1378 : : {
1379 : : has_other_uses = true;
1380 : : break;
1381 : : }
1382 : : }
1383 : :
1384 : 288334 : if (!has_realpart_uses || has_other_uses)
1385 : : return;
1386 : :
1387 : 152 : tree arg0 = gimple_call_arg (stmt, 0);
1388 : 152 : tree arg1 = gimple_call_arg (stmt, 1);
1389 : 152 : location_t loc = gimple_location (stmt);
1390 : 152 : tree type = TREE_TYPE (TREE_TYPE (lhs));
1391 : 152 : tree utype = unsigned_type_for (type);
1392 : 152 : tree result = fold_build2_loc (loc, subcode, utype,
1393 : : fold_convert_loc (loc, utype, arg0),
1394 : : fold_convert_loc (loc, utype, arg1));
1395 : 152 : result = fold_convert_loc (loc, type, result);
1396 : :
1397 : 152 : if (has_debug_uses)
1398 : : {
1399 : 13 : gimple *use_stmt;
1400 : 39 : FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1401 : : {
1402 : 26 : if (!gimple_debug_bind_p (use_stmt))
1403 : 13 : continue;
1404 : 13 : tree v = gimple_debug_bind_get_value (use_stmt);
1405 : 13 : if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1406 : : {
1407 : 13 : gimple_debug_bind_reset_value (use_stmt);
1408 : 13 : update_stmt (use_stmt);
1409 : : }
1410 : 13 : }
1411 : : }
1412 : :
1413 : 152 : if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1414 : 0 : result = drop_tree_overflow (result);
1415 : 152 : tree overflow = build_zero_cst (type);
1416 : 152 : tree ctype = build_complex_type (type);
1417 : 152 : if (TREE_CODE (result) == INTEGER_CST)
1418 : 0 : result = build_complex (ctype, result, overflow);
1419 : : else
1420 : 152 : result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1421 : : ctype, result, overflow);
1422 : :
1423 : 152 : if (dump_file && (dump_flags & TDF_DETAILS))
1424 : : {
1425 : 0 : fprintf (dump_file, "Transforming call: ");
1426 : 0 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1427 : 0 : fprintf (dump_file, "because the overflow result is never used into: ");
1428 : 0 : print_generic_stmt (dump_file, result, TDF_SLIM);
1429 : 0 : fprintf (dump_file, "\n");
1430 : : }
1431 : :
1432 : 152 : gimplify_and_update_call_from_tree (gsi, result);
1433 : : }
1434 : :
1435 : : /* Returns whether the control parents of BB are preserved. */
1436 : :
1437 : : static bool
1438 : 668458 : control_parents_preserved_p (basic_block bb)
1439 : : {
1440 : : /* If we marked the control parents from BB they are preserved. */
1441 : 668458 : if (bitmap_bit_p (visited_control_parents, bb->index))
1442 : : return true;
1443 : :
1444 : : /* But they can also end up being marked from elsewhere. */
1445 : 6465 : bitmap_iterator bi;
1446 : 6465 : unsigned edge_number;
1447 : 10044 : EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
1448 : : 0, edge_number, bi)
1449 : : {
1450 : 6564 : basic_block cd_bb = cd->get_edge_src (edge_number);
1451 : 6564 : if (cd_bb != bb
1452 : 6564 : && !bitmap_bit_p (last_stmt_necessary, cd_bb->index))
1453 : : return false;
1454 : : }
1455 : : /* And cache the result. */
1456 : 3480 : bitmap_set_bit (visited_control_parents, bb->index);
1457 : 3480 : return true;
1458 : : }
1459 : :
1460 : : /* Eliminate unnecessary statements. Any instruction not marked as necessary
1461 : : contributes nothing to the program, and can be deleted. */
1462 : :
1463 : : static bool
1464 : 7994927 : eliminate_unnecessary_stmts (bool aggressive)
1465 : : {
1466 : 7994927 : bool something_changed = false;
1467 : 7994927 : basic_block bb;
1468 : 7994927 : gimple_stmt_iterator gsi, psi;
1469 : 7994927 : gimple *stmt;
1470 : 7994927 : auto_vec<edge> to_remove_edges;
1471 : :
1472 : 7994927 : if (dump_file && (dump_flags & TDF_DETAILS))
1473 : 208 : fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1474 : :
1475 : 7994927 : bool had_setjmp = cfun->calls_setjmp;
1476 : 7994927 : clear_special_calls ();
1477 : :
1478 : : /* Walking basic blocks and statements in reverse order avoids
1479 : : releasing SSA names before any other DEFs that refer to them are
1480 : : released. This helps avoid loss of debug information, as we get
1481 : : a chance to propagate all RHSs of removed SSAs into debug uses,
1482 : : rather than only the latest ones. E.g., consider:
1483 : :
1484 : : x_3 = y_1 + z_2;
1485 : : a_5 = x_3 - b_4;
1486 : : # DEBUG a => a_5
1487 : :
1488 : : If we were to release x_3 before a_5, when we reached a_5 and
1489 : : tried to substitute it into the debug stmt, we'd see x_3 there,
1490 : : but x_3's DEF, type, etc would have already been disconnected.
1491 : : By going backwards, the debug stmt first changes to:
1492 : :
1493 : : # DEBUG a => x_3 - b_4
1494 : :
1495 : : and then to:
1496 : :
1497 : : # DEBUG a => y_1 + z_2 - b_4
1498 : :
1499 : : as desired. */
1500 : 7994927 : gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1501 : 7994927 : auto_vec<basic_block> h;
1502 : 7994927 : h = get_all_dominated_blocks (CDI_DOMINATORS,
1503 : 15989854 : single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1504 : :
1505 : 90946866 : while (h.length ())
1506 : : {
1507 : 82951939 : bb = h.pop ();
1508 : :
1509 : : /* Remove dead statements. */
1510 : 82951939 : auto_bitmap debug_seen;
1511 : 82951939 : hash_set<int_hash <location_t, 0>> locs_seen;
1512 : 762988465 : for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1513 : : {
1514 : 597084587 : stmt = gsi_stmt (gsi);
1515 : :
1516 : 597084587 : psi = gsi;
1517 : 597084587 : gsi_prev (&psi);
1518 : :
1519 : 597084587 : stats.total++;
1520 : :
1521 : : /* We can mark a call to free as not necessary if the
1522 : : defining statement of its argument is not necessary
1523 : : (and thus is getting removed). */
1524 : 597084587 : if (gimple_plf (stmt, STMT_NECESSARY)
1525 : 597084587 : && (gimple_call_builtin_p (stmt, BUILT_IN_FREE)
1526 : 593933997 : || (is_gimple_call (stmt)
1527 : 36622628 : && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
1528 : 1034455 : && gimple_call_operator_delete_p (as_a <gcall *> (stmt)))))
1529 : : {
1530 : 1095186 : tree ptr = gimple_call_arg (stmt, 0);
1531 : 1095186 : if (TREE_CODE (ptr) == SSA_NAME)
1532 : : {
1533 : 1094676 : gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1534 : 1094676 : if (!gimple_nop_p (def_stmt)
1535 : 1094676 : && !gimple_plf (def_stmt, STMT_NECESSARY))
1536 : 5908 : gimple_set_plf (stmt, STMT_NECESSARY, false);
1537 : : }
1538 : : }
1539 : : /* Conditional checking that return value of allocation is non-NULL
1540 : : can be turned to constant if the allocation itself
1541 : : is unnecesary. */
1542 : 597084587 : if (gimple_plf (stmt, STMT_NECESSARY)
1543 : 594213618 : && gimple_code (stmt) == GIMPLE_COND
1544 : 628026936 : && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
1545 : : {
1546 : 30938366 : gimple *def_stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt));
1547 : 30938366 : if (!gimple_nop_p (def_stmt)
1548 : 30938366 : && !gimple_plf (def_stmt, STMT_NECESSARY))
1549 : : {
1550 : 2946 : gcc_checking_assert
1551 : : (checks_return_value_of_removable_allocation_p (stmt));
1552 : 2946 : gimple_cond_set_lhs (as_a <gcond *>(stmt),
1553 : : build_one_cst
1554 : 2946 : (TREE_TYPE (gimple_cond_rhs (stmt))));
1555 : 2946 : update_stmt (stmt);
1556 : : }
1557 : : }
1558 : :
1559 : : /* If GSI is not necessary then remove it. */
1560 : 597084587 : if (!gimple_plf (stmt, STMT_NECESSARY))
1561 : : {
1562 : : /* Keep clobbers that we can keep live live. */
1563 : 2870969 : if (gimple_clobber_p (stmt))
1564 : : {
1565 : 1331547 : ssa_op_iter iter;
1566 : 1331547 : use_operand_p use_p;
1567 : 1331547 : bool dead = false;
1568 : 2660897 : FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1569 : : {
1570 : 1331547 : tree name = USE_FROM_PTR (use_p);
1571 : 1331547 : if (!SSA_NAME_IS_DEFAULT_DEF (name)
1572 : 1331547 : && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1573 : : {
1574 : : dead = true;
1575 : : break;
1576 : : }
1577 : : }
1578 : 2657912 : if (!dead
1579 : : /* When doing CD-DCE we have to ensure all controls
1580 : : of the stmt are still live. */
1581 : 1331547 : && (!aggressive || control_parents_preserved_p (bb)))
1582 : : {
1583 : 1326365 : bitmap_clear (debug_seen);
1584 : 1326365 : continue;
1585 : : }
1586 : : }
1587 : 1544604 : if (!is_gimple_debug (stmt))
1588 : 1243982 : something_changed = true;
1589 : 1544604 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1590 : 1544604 : continue;
1591 : 1544604 : }
1592 : 594213618 : else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1593 : : {
1594 : 36902249 : tree name = gimple_call_lhs (call_stmt);
1595 : :
1596 : 36902249 : notice_special_calls (call_stmt);
1597 : :
1598 : : /* When LHS of var = call (); is dead, simplify it into
1599 : : call (); saving one operand. */
1600 : 36902249 : if (name
1601 : 14457541 : && TREE_CODE (name) == SSA_NAME
1602 : 11964790 : && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1603 : : /* Avoid doing so for allocation calls which we
1604 : : did not mark as necessary, it will confuse the
1605 : : special logic we apply to malloc/free pair removal. */
1606 : 36989913 : && !is_removable_allocation_p (call_stmt, false))
1607 : : {
1608 : 86419 : something_changed = true;
1609 : 86419 : if (dump_file && (dump_flags & TDF_DETAILS))
1610 : : {
1611 : 0 : fprintf (dump_file, "Deleting LHS of call: ");
1612 : 0 : print_gimple_stmt (dump_file, call_stmt, 0, TDF_SLIM);
1613 : 0 : fprintf (dump_file, "\n");
1614 : : }
1615 : :
1616 : 86419 : gimple_call_set_lhs (call_stmt, NULL_TREE);
1617 : 86419 : maybe_clean_or_replace_eh_stmt (call_stmt, call_stmt);
1618 : 86419 : update_stmt (call_stmt);
1619 : 86419 : release_ssa_name (name);
1620 : :
1621 : : /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON
1622 : : without lhs is not needed. */
1623 : 86419 : if (gimple_call_internal_p (call_stmt))
1624 : 8173 : switch (gimple_call_internal_fn (call_stmt))
1625 : : {
1626 : 760 : case IFN_GOMP_SIMD_LANE:
1627 : 760 : if (gimple_call_num_args (call_stmt) >= 3
1628 : 798 : && !integer_nonzerop
1629 : 38 : (gimple_call_arg (call_stmt, 2)))
1630 : : break;
1631 : : /* FALLTHRU */
1632 : 862 : case IFN_ASAN_POISON:
1633 : 862 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1634 : 862 : break;
1635 : : default:
1636 : : break;
1637 : : }
1638 : : }
1639 : 36815830 : else if (gimple_call_internal_p (call_stmt))
1640 : 823888 : switch (gimple_call_internal_fn (call_stmt))
1641 : : {
1642 : 89355 : case IFN_ADD_OVERFLOW:
1643 : 89355 : maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1644 : 89355 : break;
1645 : 95344 : case IFN_SUB_OVERFLOW:
1646 : 95344 : maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1647 : 95344 : break;
1648 : 99185 : case IFN_MUL_OVERFLOW:
1649 : 99185 : maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1650 : 99185 : break;
1651 : 24800 : case IFN_UADDC:
1652 : 24800 : if (integer_zerop (gimple_call_arg (call_stmt, 2)))
1653 : 2458 : maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1654 : : break;
1655 : 13879 : case IFN_USUBC:
1656 : 13879 : if (integer_zerop (gimple_call_arg (call_stmt, 2)))
1657 : 1992 : maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1658 : : break;
1659 : : default:
1660 : : break;
1661 : : }
1662 : : }
1663 : 557311369 : else if (gimple_debug_bind_p (stmt))
1664 : : {
1665 : : /* We are only keeping the last debug-bind of a
1666 : : non-DEBUG_EXPR_DECL variable in a series of
1667 : : debug-bind stmts. */
1668 : 262402714 : tree var = gimple_debug_bind_get_var (stmt);
1669 : 262402714 : if (TREE_CODE (var) != DEBUG_EXPR_DECL
1670 : 262402714 : && !bitmap_set_bit (debug_seen, DECL_UID (var)))
1671 : 5854424 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1672 : 262402714 : continue;
1673 : 262402714 : }
1674 : 294908655 : else if (gimple_debug_begin_stmt_p (stmt))
1675 : : {
1676 : : /* We are only keeping the last debug-begin in a series of
1677 : : debug-begin stmts. */
1678 : 28353446 : if (locs_seen.add (gimple_location (stmt)))
1679 : 35628 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1680 : 28353446 : continue;
1681 : : }
1682 : 303457458 : locs_seen.empty ();
1683 : 303457458 : bitmap_clear (debug_seen);
1684 : : }
1685 : :
1686 : : /* Remove dead PHI nodes. */
1687 : 82951939 : something_changed |= remove_dead_phis (bb);
1688 : 82951939 : }
1689 : :
1690 : : /* First remove queued edges. */
1691 : 7994927 : if (!to_remove_edges.is_empty ())
1692 : : {
1693 : : /* Remove edges. We've delayed this to not get bogus debug stmts
1694 : : during PHI node removal. */
1695 : 62715 : for (unsigned i = 0; i < to_remove_edges.length (); ++i)
1696 : 42330 : remove_edge (to_remove_edges[i]);
1697 : 20385 : cfg_altered = true;
1698 : : }
1699 : : /* When we cleared calls_setjmp we can purge all abnormal edges. Do so.
1700 : : ??? We'd like to assert that setjmp calls do not pop out of nothing
1701 : : but we currently lack a per-stmt way of noting whether a call was
1702 : : recognized as returns-twice (or rather receives-control). */
1703 : 7994927 : if (!cfun->calls_setjmp && had_setjmp)
1704 : : {
1705 : : /* Make sure we only remove the edges, not dominated blocks. Using
1706 : : gimple_purge_dead_abnormal_call_edges would do that and we
1707 : : cannot free dominators yet. */
1708 : 3750 : FOR_EACH_BB_FN (bb, cfun)
1709 : 9662 : if (gcall *stmt = safe_dyn_cast <gcall *> (*gsi_last_bb (bb)))
1710 : 2082 : if (!stmt_can_make_abnormal_goto (stmt))
1711 : : {
1712 : 737 : edge_iterator ei;
1713 : 737 : edge e;
1714 : 996 : for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1715 : : {
1716 : 259 : if (e->flags & EDGE_ABNORMAL)
1717 : : {
1718 : 112 : if (e->flags & EDGE_FALLTHRU)
1719 : 0 : e->flags &= ~EDGE_ABNORMAL;
1720 : : else
1721 : 112 : remove_edge (e);
1722 : 112 : cfg_altered = true;
1723 : : }
1724 : : else
1725 : 147 : ei_next (&ei);
1726 : : }
1727 : : }
1728 : : }
1729 : :
1730 : : /* Now remove the unreachable blocks. */
1731 : 7994927 : if (cfg_altered)
1732 : : {
1733 : 20426 : basic_block prev_bb;
1734 : :
1735 : 20426 : find_unreachable_blocks ();
1736 : :
1737 : : /* Delete all unreachable basic blocks in reverse dominator order. */
1738 : 20426 : for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1739 : 794987 : bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1740 : : {
1741 : 774561 : prev_bb = bb->prev_bb;
1742 : :
1743 : 774561 : if ((bb_contains_live_stmts
1744 : 774515 : && !bitmap_bit_p (bb_contains_live_stmts, bb->index))
1745 : 1335006 : || !(bb->flags & BB_REACHABLE))
1746 : : {
1747 : : /* Since we don't track liveness of virtual PHI nodes, it is
1748 : : possible that we rendered some PHI nodes unreachable while
1749 : : they are still in use. Mark them for renaming. */
1750 : 238486 : for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1751 : 24369 : gsi_next (&gsi))
1752 : 73105 : if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1753 : : {
1754 : 24367 : bool found = false;
1755 : 24367 : imm_use_iterator iter;
1756 : :
1757 : 24561 : FOR_EACH_IMM_USE_STMT (stmt, iter,
1758 : : gimple_phi_result (gsi.phi ()))
1759 : : {
1760 : 23793 : if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1761 : 163 : continue;
1762 : 23630 : if (gimple_code (stmt) == GIMPLE_PHI
1763 : 23630 : || gimple_plf (stmt, STMT_NECESSARY))
1764 : : {
1765 : : found = true;
1766 : : break;
1767 : : }
1768 : 24367 : }
1769 : 24367 : if (found)
1770 : 23599 : mark_virtual_phi_result_for_renaming (gsi.phi ());
1771 : : }
1772 : :
1773 : 214117 : if (!(bb->flags & BB_REACHABLE))
1774 : : {
1775 : : /* Speed up the removal of blocks that don't
1776 : : dominate others. Walking backwards, this should
1777 : : be the common case. ??? Do we need to recompute
1778 : : dominators because of cfg_altered? */
1779 : 45724 : if (!first_dom_son (CDI_DOMINATORS, bb))
1780 : 45189 : delete_basic_block (bb);
1781 : : else
1782 : : {
1783 : 535 : h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1784 : :
1785 : 2275 : while (h.length ())
1786 : : {
1787 : 1740 : bb = h.pop ();
1788 : 1740 : prev_bb = bb->prev_bb;
1789 : : /* Rearrangements to the CFG may have failed
1790 : : to update the dominators tree, so that
1791 : : formerly-dominated blocks are now
1792 : : otherwise reachable. */
1793 : 1740 : if (!!(bb->flags & BB_REACHABLE))
1794 : 0 : continue;
1795 : 1740 : delete_basic_block (bb);
1796 : : }
1797 : :
1798 : 535 : h.release ();
1799 : : }
1800 : : }
1801 : : }
1802 : : }
1803 : : }
1804 : :
1805 : 7994927 : if (bb_postorder)
1806 : 20385 : free (bb_postorder);
1807 : 7994927 : bb_postorder = NULL;
1808 : :
1809 : 7994927 : return something_changed;
1810 : 7994927 : }
1811 : :
1812 : :
1813 : : /* Print out removed statement statistics. */
1814 : :
1815 : : static void
1816 : 214 : print_stats (void)
1817 : : {
1818 : 214 : float percg;
1819 : :
1820 : 214 : percg = ((float) stats.removed / (float) stats.total) * 100;
1821 : 214 : fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1822 : : stats.removed, stats.total, (int) percg);
1823 : :
1824 : 214 : if (stats.total_phis == 0)
1825 : : percg = 0;
1826 : : else
1827 : 42 : percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1828 : :
1829 : 214 : fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1830 : : stats.removed_phis, stats.total_phis, (int) percg);
1831 : 214 : }
1832 : :
1833 : : /* Initialization for this pass. Set up the used data structures. */
1834 : :
1835 : : static void
1836 : 7994927 : tree_dce_init (bool aggressive)
1837 : : {
1838 : 7994927 : memset ((void *) &stats, 0, sizeof (stats));
1839 : :
1840 : 7994927 : if (aggressive)
1841 : : {
1842 : 3510021 : last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1843 : 3510021 : bitmap_clear (last_stmt_necessary);
1844 : 3510021 : bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1845 : 3510021 : bitmap_clear (bb_contains_live_stmts);
1846 : : }
1847 : :
1848 : 15989854 : processed = sbitmap_alloc (num_ssa_names + 1);
1849 : 7994927 : bitmap_clear (processed);
1850 : :
1851 : 7994927 : worklist.create (64);
1852 : 7994927 : cfg_altered = false;
1853 : 7994927 : }
1854 : :
1855 : : /* Cleanup after this pass. */
1856 : :
1857 : : static void
1858 : 7994927 : tree_dce_done (bool aggressive)
1859 : : {
1860 : 7994927 : if (aggressive)
1861 : : {
1862 : 3510021 : delete cd;
1863 : 3510021 : sbitmap_free (visited_control_parents);
1864 : 3510021 : sbitmap_free (last_stmt_necessary);
1865 : 3510021 : sbitmap_free (bb_contains_live_stmts);
1866 : 3510021 : bb_contains_live_stmts = NULL;
1867 : : }
1868 : :
1869 : 7994927 : sbitmap_free (processed);
1870 : :
1871 : 7994927 : worklist.release ();
1872 : 7994927 : }
1873 : :
1874 : : /* Sort PHI argument values for make_forwarders_with_degenerate_phis. */
1875 : :
1876 : : static int
1877 : 28134943 : sort_phi_args (const void *a_, const void *b_)
1878 : : {
1879 : 28134943 : auto *a = (const std::pair<edge, hashval_t> *) a_;
1880 : 28134943 : auto *b = (const std::pair<edge, hashval_t> *) b_;
1881 : 28134943 : hashval_t ha = a->second;
1882 : 28134943 : hashval_t hb = b->second;
1883 : 28134943 : if (ha < hb)
1884 : : return -1;
1885 : 17423635 : else if (ha > hb)
1886 : : return 1;
1887 : 8388160 : else if (a->first->dest_idx < b->first->dest_idx)
1888 : : return -1;
1889 : 4397191 : else if (a->first->dest_idx > b->first->dest_idx)
1890 : : return 1;
1891 : : else
1892 : 0 : return 0;
1893 : : }
1894 : :
1895 : : /* Look for a non-virtual PHIs and make a forwarder block when all PHIs
1896 : : have the same argument on a set of edges. This is to not consider
1897 : : control dependences of individual edges for same values but only for
1898 : : the common set. */
1899 : :
1900 : : static unsigned
1901 : 3510021 : make_forwarders_with_degenerate_phis (function *fn)
1902 : : {
1903 : 3510021 : unsigned todo = 0;
1904 : :
1905 : 3510021 : basic_block bb;
1906 : 33633596 : FOR_EACH_BB_FN (bb, fn)
1907 : : {
1908 : : /* Only PHIs with three or more arguments have opportunities. */
1909 : 30123575 : if (EDGE_COUNT (bb->preds) < 3)
1910 : 29814024 : continue;
1911 : : /* Do not touch loop headers or blocks with abnormal predecessors.
1912 : : ??? This is to avoid creating valid loops here, see PR103458.
1913 : : We might want to improve things to either explicitely add those
1914 : : loops or at least consider blocks with no backedges. */
1915 : 1271039 : if (bb->loop_father->header == bb
1916 : 1269189 : || bb_has_abnormal_pred (bb))
1917 : 1850 : continue;
1918 : :
1919 : : /* Take one PHI node as template to look for identical
1920 : : arguments. Build a vector of candidates forming sets
1921 : : of argument edges with equal values. Note optimality
1922 : : depends on the particular choice of the template PHI
1923 : : since equal arguments are unordered leaving other PHIs
1924 : : with more than one set of equal arguments within this
1925 : : argument range unsorted. We'd have to break ties by
1926 : : looking at other PHI nodes. */
1927 : 1267339 : gphi_iterator gsi = gsi_start_nonvirtual_phis (bb);
1928 : 1267339 : if (gsi_end_p (gsi))
1929 : 760599 : continue;
1930 : 506740 : gphi *phi = gsi.phi ();
1931 : 506740 : auto_vec<std::pair<edge, hashval_t>, 8> args;
1932 : 506740 : bool need_resort = false;
1933 : 2920189 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
1934 : : {
1935 : 2413449 : edge e = gimple_phi_arg_edge (phi, i);
1936 : : /* Skip abnormal edges since we cannot redirect them. */
1937 : 2413449 : if (e->flags & EDGE_ABNORMAL)
1938 : 2413449 : continue;
1939 : : /* Skip loop exit edges when we are in loop-closed SSA form
1940 : : since the forwarder we'd create does not have a PHI node. */
1941 : 2413449 : if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
1942 : 2413449 : && loop_exit_edge_p (e->src->loop_father, e))
1943 : 17851 : continue;
1944 : :
1945 : 2395598 : tree arg = gimple_phi_arg_def (phi, i);
1946 : 2395598 : if (!CONSTANT_CLASS_P (arg) && TREE_CODE (arg) != SSA_NAME)
1947 : 2395598 : need_resort = true;
1948 : 2395598 : args.safe_push (std::make_pair (e, iterative_hash_expr (arg, 0)));
1949 : : }
1950 : 506740 : if (args.length () < 2)
1951 : 4205 : continue;
1952 : 502535 : args.qsort (sort_phi_args);
1953 : : /* The above sorting can be different between -g and -g0, as e.g. decls
1954 : : can have different uids (-g could have bigger gaps in between them).
1955 : : So, only use that to determine which args are equal, then change
1956 : : second from hash value to smallest dest_idx of the edges which have
1957 : : equal argument and sort again. If all the phi arguments are
1958 : : constants or SSA_NAME, there is no need for the second sort, the hash
1959 : : values are stable in that case. */
1960 : 502535 : hashval_t hash = args[0].second;
1961 : 502535 : args[0].second = args[0].first->dest_idx;
1962 : 502535 : bool any_equal = false;
1963 : 2392827 : for (unsigned i = 1; i < args.length (); ++i)
1964 : 1890292 : if (hash == args[i].second
1965 : 2622521 : && operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, args[i - 1].first),
1966 : 732229 : PHI_ARG_DEF_FROM_EDGE (phi, args[i].first)))
1967 : : {
1968 : 731753 : args[i].second = args[i - 1].second;
1969 : 731753 : any_equal = true;
1970 : : }
1971 : : else
1972 : : {
1973 : 1158539 : hash = args[i].second;
1974 : 1158539 : args[i].second = args[i].first->dest_idx;
1975 : : }
1976 : 502535 : if (!any_equal)
1977 : 192984 : continue;
1978 : 309551 : if (need_resort)
1979 : 15087 : args.qsort (sort_phi_args);
1980 : :
1981 : : /* From the candidates vector now verify true candidates for
1982 : : forwarders and create them. */
1983 : 309551 : gphi *vphi = get_virtual_phi (bb);
1984 : 309551 : unsigned start = 0;
1985 : 2467759 : while (start < args.length () - 1)
1986 : : {
1987 : 774956 : unsigned i;
1988 : 2247008 : for (i = start + 1; i < args.length (); ++i)
1989 : 2072913 : if (args[start].second != args[i].second)
1990 : : break;
1991 : : /* args[start]..args[i-1] are equal. */
1992 : 774956 : if (start != i - 1)
1993 : : {
1994 : : /* Check all PHI nodes for argument equality. */
1995 : 450986 : bool equal = true;
1996 : 450986 : gphi_iterator gsi2 = gsi;
1997 : 450986 : gsi_next (&gsi2);
1998 : 961821 : for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
1999 : : {
2000 : 676577 : gphi *phi2 = gsi2.phi ();
2001 : 1353154 : if (virtual_operand_p (gimple_phi_result (phi2)))
2002 : 171321 : continue;
2003 : 505256 : tree start_arg
2004 : 505256 : = PHI_ARG_DEF_FROM_EDGE (phi2, args[start].first);
2005 : 2252989 : for (unsigned j = start + 1; j < i; ++j)
2006 : : {
2007 : 3891824 : if (!operand_equal_p (start_arg,
2008 : 1945912 : PHI_ARG_DEF_FROM_EDGE
2009 : : (phi2, args[j].first)))
2010 : : {
2011 : : /* Another PHI might have a shorter set of
2012 : : equivalent args. Go for that. */
2013 : 198179 : i = j;
2014 : 198179 : if (j == start + 1)
2015 : : equal = false;
2016 : : break;
2017 : : }
2018 : : }
2019 : : if (!equal)
2020 : : break;
2021 : : }
2022 : 450986 : if (equal)
2023 : : {
2024 : : /* If we are asked to forward all edges the block
2025 : : has all degenerate PHIs. Do nothing in that case. */
2026 : 285244 : if (start == 0
2027 : 141181 : && i == args.length ()
2028 : 290780 : && args.length () == gimple_phi_num_args (phi))
2029 : : break;
2030 : : /* Instead of using make_forwarder_block we are
2031 : : rolling our own variant knowing that the forwarder
2032 : : does not need PHI nodes apart from eventually
2033 : : a virtual one. */
2034 : 279841 : auto_vec<tree, 8> vphi_args;
2035 : 279841 : if (vphi)
2036 : : {
2037 : 178146 : vphi_args.reserve_exact (i - start);
2038 : 694368 : for (unsigned j = start; j < i; ++j)
2039 : 516222 : vphi_args.quick_push
2040 : 516222 : (PHI_ARG_DEF_FROM_EDGE (vphi, args[j].first));
2041 : : }
2042 : 279841 : free_dominance_info (fn, CDI_DOMINATORS);
2043 : 279841 : basic_block forwarder = split_edge (args[start].first);
2044 : 279841 : profile_count count = profile_count::zero ();
2045 : 279841 : bool irr = false;
2046 : 824970 : for (unsigned j = start + 1; j < i; ++j)
2047 : : {
2048 : 545129 : edge e = args[j].first;
2049 : 545129 : if (e->flags & EDGE_IRREDUCIBLE_LOOP)
2050 : 1308 : irr = true;
2051 : 545129 : redirect_edge_and_branch_force (e, forwarder);
2052 : 545129 : redirect_edge_var_map_clear (e);
2053 : 545129 : count += e->count ();
2054 : : }
2055 : 279841 : forwarder->count = count;
2056 : 279841 : if (irr)
2057 : : {
2058 : 1073 : forwarder->flags |= BB_IRREDUCIBLE_LOOP;
2059 : 1073 : single_succ_edge (forwarder)->flags
2060 : 1073 : |= EDGE_IRREDUCIBLE_LOOP;
2061 : : }
2062 : :
2063 : 279841 : if (vphi)
2064 : : {
2065 : 178146 : tree def = copy_ssa_name (vphi_args[0]);
2066 : 178146 : gphi *vphi_copy = create_phi_node (def, forwarder);
2067 : 694368 : for (unsigned j = start; j < i; ++j)
2068 : 1032444 : add_phi_arg (vphi_copy, vphi_args[j - start],
2069 : 516222 : args[j].first, UNKNOWN_LOCATION);
2070 : 178146 : SET_PHI_ARG_DEF
2071 : : (vphi, single_succ_edge (forwarder)->dest_idx, def);
2072 : : }
2073 : 279841 : todo |= TODO_cleanup_cfg;
2074 : 279841 : }
2075 : : }
2076 : : /* Continue searching for more opportunities. */
2077 : : start = i;
2078 : : }
2079 : 506740 : }
2080 : 3510021 : return todo;
2081 : : }
2082 : :
2083 : : /* Main routine to eliminate dead code.
2084 : :
2085 : : AGGRESSIVE controls the aggressiveness of the algorithm.
2086 : : In conservative mode, we ignore control dependence and simply declare
2087 : : all but the most trivially dead branches necessary. This mode is fast.
2088 : : In aggressive mode, control dependences are taken into account, which
2089 : : results in more dead code elimination, but at the cost of some time. */
2090 : :
2091 : : static unsigned int
2092 : 7994927 : perform_tree_ssa_dce (bool aggressive)
2093 : : {
2094 : 7994927 : bool something_changed = 0;
2095 : 7994927 : unsigned todo = 0;
2096 : :
2097 : : /* Preheaders are needed for SCEV to work.
2098 : : Simple lateches and recorded exits improve chances that loop will
2099 : : proved to be finite in testcases such as in loop-15.c and loop-24.c */
2100 : 7994927 : bool in_loop_pipeline = scev_initialized_p ();
2101 : 7994927 : if (aggressive && ! in_loop_pipeline)
2102 : : {
2103 : 3287855 : loop_optimizer_init (LOOPS_NORMAL
2104 : : | LOOPS_HAVE_RECORDED_EXITS);
2105 : 3287855 : scev_initialize ();
2106 : : }
2107 : :
2108 : 7994927 : if (aggressive)
2109 : 3510021 : todo |= make_forwarders_with_degenerate_phis (cfun);
2110 : :
2111 : 7994927 : calculate_dominance_info (CDI_DOMINATORS);
2112 : :
2113 : 7994927 : tree_dce_init (aggressive);
2114 : :
2115 : 7994927 : if (aggressive)
2116 : : {
2117 : : /* Compute control dependence. */
2118 : 3510021 : calculate_dominance_info (CDI_POST_DOMINATORS);
2119 : 3510021 : cd = new control_dependences ();
2120 : :
2121 : 7020042 : visited_control_parents =
2122 : 3510021 : sbitmap_alloc (last_basic_block_for_fn (cfun));
2123 : 3510021 : bitmap_clear (visited_control_parents);
2124 : :
2125 : 3510021 : mark_dfs_back_edges ();
2126 : : }
2127 : :
2128 : 7994927 : find_obviously_necessary_stmts (aggressive);
2129 : :
2130 : 7994927 : if (aggressive && ! in_loop_pipeline)
2131 : : {
2132 : 3287855 : scev_finalize ();
2133 : 3287855 : loop_optimizer_finalize ();
2134 : : }
2135 : :
2136 : 7994927 : longest_chain = 0;
2137 : 7994927 : total_chain = 0;
2138 : 7994927 : nr_walks = 0;
2139 : 7994927 : chain_ovfl = false;
2140 : 7994927 : visited = BITMAP_ALLOC (NULL);
2141 : 7994927 : propagate_necessity (aggressive);
2142 : 7994927 : BITMAP_FREE (visited);
2143 : :
2144 : 7994927 : something_changed |= eliminate_unnecessary_stmts (aggressive);
2145 : 7994927 : something_changed |= cfg_altered;
2146 : :
2147 : : /* We do not update postdominators, so free them unconditionally. */
2148 : 7994927 : free_dominance_info (CDI_POST_DOMINATORS);
2149 : :
2150 : : /* If we removed paths in the CFG, then we need to update
2151 : : dominators as well. I haven't investigated the possibility
2152 : : of incrementally updating dominators. */
2153 : 7994927 : if (cfg_altered)
2154 : 20426 : free_dominance_info (CDI_DOMINATORS);
2155 : :
2156 : 7994927 : statistics_counter_event (cfun, "Statements deleted", stats.removed);
2157 : 7994927 : statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
2158 : :
2159 : : /* Debugging dumps. */
2160 : 7994927 : if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
2161 : 214 : print_stats ();
2162 : :
2163 : 7994927 : tree_dce_done (aggressive);
2164 : :
2165 : 7994927 : if (something_changed)
2166 : : {
2167 : 839834 : free_numbers_of_iterations_estimates (cfun);
2168 : 839834 : if (in_loop_pipeline)
2169 : 62197 : scev_reset ();
2170 : : todo |= TODO_update_ssa | TODO_cleanup_cfg;
2171 : : }
2172 : 7994927 : return todo;
2173 : : }
2174 : :
2175 : : namespace {
2176 : :
2177 : : const pass_data pass_data_dce =
2178 : : {
2179 : : GIMPLE_PASS, /* type */
2180 : : "dce", /* name */
2181 : : OPTGROUP_NONE, /* optinfo_flags */
2182 : : TV_TREE_DCE, /* tv_id */
2183 : : ( PROP_cfg | PROP_ssa ), /* properties_required */
2184 : : 0, /* properties_provided */
2185 : : 0, /* properties_destroyed */
2186 : : 0, /* todo_flags_start */
2187 : : 0, /* todo_flags_finish */
2188 : : };
2189 : :
2190 : : class pass_dce_base : public gimple_opt_pass
2191 : : {
2192 : : public:
2193 : : /* opt_pass methods: */
2194 : 7998155 : bool gate (function *) final override { return flag_tree_dce != 0; }
2195 : 2850810 : void set_pass_param (unsigned n, bool param) final override
2196 : : {
2197 : 2850810 : gcc_assert (n == 0 || n == 1);
2198 : 2850810 : if (n == 0)
2199 : 1710486 : update_address_taken_p = param;
2200 : 1140324 : else if (n == 1)
2201 : 1140324 : remove_unused_locals_p = param;
2202 : 2850810 : }
2203 : :
2204 : : protected:
2205 : 3135891 : pass_dce_base (const pass_data &data, gcc::context *ctxt)
2206 : 6271782 : : gimple_opt_pass (data, ctxt)
2207 : : {}
2208 : 7994927 : unsigned int execute_dce (function *, bool aggressive)
2209 : : {
2210 : 7994927 : return (perform_tree_ssa_dce (aggressive)
2211 : 7994927 : | (remove_unused_locals_p ? TODO_remove_unused_locals : 0)
2212 : 7994927 : | (update_address_taken_p ? TODO_update_address_taken : 0));
2213 : : }
2214 : :
2215 : : private:
2216 : : bool update_address_taken_p = false;
2217 : : bool remove_unused_locals_p = false;
2218 : : }; // class pass_dce_base
2219 : :
2220 : :
2221 : : class pass_dce : public pass_dce_base
2222 : : {
2223 : : public:
2224 : 2280648 : pass_dce (gcc::context *ctxt)
2225 : 4561296 : : pass_dce_base (pass_data_dce, ctxt)
2226 : : {}
2227 : :
2228 : : /* opt_pass methods: */
2229 : 1995567 : opt_pass * clone () final override { return new pass_dce (m_ctxt); }
2230 : 4295290 : unsigned int execute (function *func) final override
2231 : : {
2232 : 4295290 : return execute_dce (func, /*aggressive=*/false);
2233 : : }
2234 : :
2235 : : }; // class pass_dce
2236 : :
2237 : : } // anon namespace
2238 : :
2239 : : gimple_opt_pass *
2240 : 285081 : make_pass_dce (gcc::context *ctxt)
2241 : : {
2242 : 285081 : return new pass_dce (ctxt);
2243 : : }
2244 : :
2245 : : namespace {
2246 : :
2247 : : const pass_data pass_data_cd_dce =
2248 : : {
2249 : : GIMPLE_PASS, /* type */
2250 : : "cddce", /* name */
2251 : : OPTGROUP_NONE, /* optinfo_flags */
2252 : : TV_TREE_CD_DCE, /* tv_id */
2253 : : ( PROP_cfg | PROP_ssa ), /* properties_required */
2254 : : 0, /* properties_provided */
2255 : : 0, /* properties_destroyed */
2256 : : 0, /* todo_flags_start */
2257 : : 0, /* todo_flags_finish */
2258 : : };
2259 : :
2260 : : class pass_cd_dce : public pass_dce_base
2261 : : {
2262 : : public:
2263 : 855243 : pass_cd_dce (gcc::context *ctxt)
2264 : 1710486 : : pass_dce_base (pass_data_cd_dce, ctxt)
2265 : : {}
2266 : :
2267 : : /* opt_pass methods: */
2268 : 570162 : opt_pass * clone () final override { return new pass_cd_dce (m_ctxt); }
2269 : 3699637 : unsigned int execute (function *func) final override
2270 : : {
2271 : 3699637 : return execute_dce (func, /*aggressive=*/optimize >= 2);
2272 : : }
2273 : :
2274 : : }; // class pass_cd_dce
2275 : :
2276 : : } // anon namespace
2277 : :
2278 : : gimple_opt_pass *
2279 : 285081 : make_pass_cd_dce (gcc::context *ctxt)
2280 : : {
2281 : 285081 : return new pass_cd_dce (ctxt);
2282 : : }
2283 : :
2284 : :
2285 : : /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
2286 : : is consumed by this function. The function has linear complexity in
2287 : : the number of dead stmts with a constant factor like the average SSA
2288 : : use operands number. */
2289 : :
2290 : : void
2291 : 37583220 : simple_dce_from_worklist (bitmap worklist, bitmap need_eh_cleanup)
2292 : : {
2293 : 37583220 : int phiremoved = 0;
2294 : 37583220 : int stmtremoved = 0;
2295 : 72516035 : while (! bitmap_empty_p (worklist))
2296 : : {
2297 : : /* Pop item. */
2298 : 34932815 : unsigned i = bitmap_clear_first_set_bit (worklist);
2299 : :
2300 : 34932815 : tree def = ssa_name (i);
2301 : : /* Removed by somebody else or still in use.
2302 : : Note use in itself for a phi node is not counted as still in use. */
2303 : 34932815 : if (!def)
2304 : 14842576 : continue;
2305 : 34769090 : if (!has_zero_uses (def))
2306 : : {
2307 : 14644577 : gimple *def_stmt = SSA_NAME_DEF_STMT (def);
2308 : :
2309 : 14644577 : if (gimple_code (def_stmt) != GIMPLE_PHI)
2310 : 14640218 : continue;
2311 : :
2312 : 2823056 : gimple *use_stmt;
2313 : 2823056 : imm_use_iterator use_iter;
2314 : 2823056 : bool canremove = true;
2315 : :
2316 : 2926859 : FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, def)
2317 : : {
2318 : : /* Ignore debug statements. */
2319 : 2922500 : if (is_gimple_debug (use_stmt))
2320 : 95483 : continue;
2321 : 2827017 : if (use_stmt != def_stmt)
2322 : : {
2323 : : canremove = false;
2324 : : break;
2325 : : }
2326 : 2823056 : }
2327 : 2823056 : if (!canremove)
2328 : 2818697 : continue;
2329 : : }
2330 : :
2331 : 20128872 : gimple *t = SSA_NAME_DEF_STMT (def);
2332 : 20128872 : if (gimple_has_side_effects (t))
2333 : 36153 : continue;
2334 : :
2335 : : /* The defining statement needs to be defining only this name.
2336 : : ASM is the only statement that can define more than one
2337 : : name. */
2338 : 20092719 : if (is_a<gasm *>(t)
2339 : 20092719 : && !single_ssa_def_operand (t, SSA_OP_ALL_DEFS))
2340 : 15 : continue;
2341 : :
2342 : : /* Don't remove statements that are needed for non-call
2343 : : eh to work. */
2344 : 20092704 : if (stmt_unremovable_because_of_non_call_eh_p (cfun, t))
2345 : 2465 : continue;
2346 : :
2347 : : /* Tell the caller that we removed a statement that might
2348 : : throw so it could cleanup the cfg for that block. */
2349 : 20090239 : if (need_eh_cleanup && stmt_could_throw_p (cfun, t))
2350 : 206 : bitmap_set_bit (need_eh_cleanup, gimple_bb (t)->index);
2351 : :
2352 : : /* Add uses to the worklist. */
2353 : 20090239 : ssa_op_iter iter;
2354 : 20090239 : use_operand_p use_p;
2355 : 56956829 : FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
2356 : : {
2357 : 16776351 : tree use = USE_FROM_PTR (use_p);
2358 : 16776351 : if (TREE_CODE (use) == SSA_NAME
2359 : 16776351 : && ! SSA_NAME_IS_DEFAULT_DEF (use))
2360 : 14919784 : bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
2361 : : }
2362 : :
2363 : : /* Remove stmt. */
2364 : 20090239 : if (dump_file && (dump_flags & TDF_DETAILS))
2365 : : {
2366 : 287 : fprintf (dump_file, "Removing dead stmt:");
2367 : 287 : print_gimple_stmt (dump_file, t, 0);
2368 : : }
2369 : 20090239 : gimple_stmt_iterator gsi = gsi_for_stmt (t);
2370 : 20090239 : if (gimple_code (t) == GIMPLE_PHI)
2371 : : {
2372 : 2885771 : remove_phi_node (&gsi, true);
2373 : 2885771 : phiremoved++;
2374 : : }
2375 : : else
2376 : : {
2377 : 17204468 : unlink_stmt_vdef (t);
2378 : 17204468 : gsi_remove (&gsi, true);
2379 : 17204468 : release_defs (t);
2380 : 17204468 : stmtremoved++;
2381 : : }
2382 : : }
2383 : 37583220 : statistics_counter_event (cfun, "PHIs removed",
2384 : : phiremoved);
2385 : 37583220 : statistics_counter_event (cfun, "Statements removed",
2386 : : stmtremoved);
2387 : 37583220 : }
|