Line data Source code
1 : /* Dead code elimination pass for the GNU compiler.
2 : Copyright (C) 2002-2026 Free Software Foundation, Inc.
3 : Contributed by Ben Elliston <bje@redhat.com>
4 : and Andrew MacLeod <amacleod@redhat.com>
5 : Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6 :
7 : This file is part of GCC.
8 :
9 : GCC is free software; you can redistribute it and/or modify it
10 : under the terms of the GNU General Public License as published by the
11 : Free Software Foundation; either version 3, or (at your option) any
12 : later version.
13 :
14 : GCC is distributed in the hope that it will be useful, but WITHOUT
15 : ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 : for more details.
18 :
19 : You should have received a copy of the GNU General Public License
20 : along with GCC; see the file COPYING3. If not see
21 : <http://www.gnu.org/licenses/>. */
22 :
23 : /* Dead code elimination.
24 :
25 : References:
26 :
27 : Building an Optimizing Compiler,
28 : Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29 :
30 : Advanced Compiler Design and Implementation,
31 : Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32 :
33 : Dead-code elimination is the removal of statements which have no
34 : impact on the program's output. "Dead statements" have no impact
35 : on the program's output, while "necessary statements" may have
36 : impact on the output.
37 :
38 : The algorithm consists of three phases:
39 : 1. Marking as necessary all statements known to be necessary,
40 : e.g. most function calls, writing a value to memory, etc;
41 : 2. Propagating necessary statements, e.g., the statements
42 : giving values to operands in necessary statements; and
43 : 3. Removing dead statements. */
44 :
45 : #include "config.h"
46 : #include "system.h"
47 : #include "coretypes.h"
48 : #include "backend.h"
49 : #include "rtl.h"
50 : #include "tree.h"
51 : #include "gimple.h"
52 : #include "cfghooks.h"
53 : #include "tree-pass.h"
54 : #include "ssa.h"
55 : #include "gimple-pretty-print.h"
56 : #include "fold-const.h"
57 : #include "calls.h"
58 : #include "cfganal.h"
59 : #include "tree-eh.h"
60 : #include "gimplify.h"
61 : #include "gimple-iterator.h"
62 : #include "tree-cfg.h"
63 : #include "tree-ssa-loop-niter.h"
64 : #include "tree-into-ssa.h"
65 : #include "tree-dfa.h"
66 : #include "cfgloop.h"
67 : #include "tree-scalar-evolution.h"
68 : #include "tree-ssa-propagate.h"
69 : #include "gimple-fold.h"
70 : #include "tree-ssa.h"
71 : #include "ipa-modref-tree.h"
72 : #include "ipa-modref.h"
73 : #include "memmodel.h"
74 :
75 : static struct stmt_stats
76 : {
77 : int total;
78 : int total_phis;
79 : int removed;
80 : int removed_phis;
81 : } stats;
82 :
83 : #define STMT_NECESSARY GF_PLF_1
84 :
85 : static vec<gimple *> worklist;
86 :
87 : /* Vector indicating an SSA name has already been processed and marked
88 : as necessary. */
89 : static sbitmap processed;
90 :
91 : /* Vector indicating that the last statement of a basic block has already
92 : been marked as necessary. */
93 : static sbitmap last_stmt_necessary;
94 :
95 : /* Vector indicating that BB contains statements that are live. */
96 : static sbitmap bb_contains_live_stmts;
97 :
98 : /* Before we can determine whether a control branch is dead, we need to
99 : compute which blocks are control dependent on which edges.
100 :
101 : We expect each block to be control dependent on very few edges so we
102 : use a bitmap for each block recording its edges. An array holds the
103 : bitmap. The Ith bit in the bitmap is set if that block is dependent
104 : on the Ith edge. */
105 : static control_dependences *cd;
106 :
107 : /* Vector indicating that a basic block has already had all the edges
108 : processed that it is control dependent on. */
109 : static sbitmap visited_control_parents;
110 :
111 : /* TRUE if this pass alters the CFG (by removing control statements).
112 : FALSE otherwise.
113 :
114 : If this pass alters the CFG, then it will arrange for the dominators
115 : to be recomputed. */
116 : static bool cfg_altered;
117 :
118 : /* When non-NULL holds map from basic block index into the postorder. */
119 : static int *bb_postorder;
120 :
121 :
122 : /* True if we should treat any stmt with a vdef as necessary. */
123 :
124 : static inline bool
125 266049482 : keep_all_vdefs_p ()
126 : {
127 266049482 : return optimize_debug;
128 : }
129 :
130 : /* 1 if CALLEE is the function __cxa_atexit.
131 : 2 if CALLEE is the function __aeabi_atexit.
132 : 0 otherwise. */
133 :
134 : static inline int
135 84898748 : is_cxa_atexit (const_tree callee)
136 : {
137 84898748 : if (callee != NULL_TREE
138 84898748 : && strcmp (IDENTIFIER_POINTER (DECL_NAME (callee)), "__cxa_atexit") == 0)
139 : return 1;
140 84785096 : if (callee != NULL_TREE
141 84785096 : && strcmp (IDENTIFIER_POINTER (DECL_NAME (callee)), "__aeabi_atexit") == 0)
142 0 : return 2;
143 : return 0;
144 : }
145 :
146 : /* True if STMT is a call to __cxa_atexit (or __aeabi_atexit)
147 : and the function argument to that call is a const or pure
148 : non-looping function decl. */
149 :
150 : static inline bool
151 84898748 : is_removable_cxa_atexit_call (gimple *stmt)
152 : {
153 84898748 : tree callee = gimple_call_fndecl (stmt);
154 84898748 : int functype = is_cxa_atexit (callee);
155 84898748 : if (!functype)
156 : return false;
157 113652 : if (gimple_call_num_args (stmt) != 3)
158 : return false;
159 :
160 : /* The function argument is the 1st argument for __cxa_atexit
161 : or the 2nd argument for __eabi_atexit. */
162 227304 : tree arg = gimple_call_arg (stmt, functype == 2 ? 1 : 0);
163 113652 : if (TREE_CODE (arg) != ADDR_EXPR)
164 : return false;
165 113652 : arg = TREE_OPERAND (arg, 0);
166 113652 : if (TREE_CODE (arg) != FUNCTION_DECL)
167 : return false;
168 113652 : int flags = flags_from_decl_or_type (arg);
169 :
170 : /* If the function is noreturn then it cannot be removed. */
171 113652 : if (flags & ECF_NORETURN)
172 : return false;
173 :
174 : /* The function needs to be const or pure and non looping. */
175 113494 : return (flags & (ECF_CONST|ECF_PURE))
176 113494 : && !(flags & ECF_LOOPING_CONST_OR_PURE);
177 : }
178 :
179 : /* If STMT is not already marked necessary, mark it, and add it to the
180 : worklist if ADD_TO_WORKLIST is true. */
181 :
182 : static inline void
183 458679545 : mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
184 : {
185 458679545 : gcc_assert (stmt);
186 :
187 458679545 : if (gimple_plf (stmt, STMT_NECESSARY))
188 : return;
189 :
190 458679355 : if (dump_file && (dump_flags & TDF_DETAILS))
191 : {
192 852 : fprintf (dump_file, "Marking useful stmt: ");
193 852 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
194 852 : fprintf (dump_file, "\n");
195 : }
196 :
197 458679355 : gimple_set_plf (stmt, STMT_NECESSARY, true);
198 458679355 : if (add_to_worklist)
199 105628355 : worklist.safe_push (stmt);
200 105628355 : if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
201 36890138 : bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
202 : }
203 :
204 :
205 : /* Mark the statement defining operand OP as necessary. */
206 :
207 : static inline void
208 333343596 : mark_operand_necessary (tree op)
209 : {
210 333343596 : gimple *stmt;
211 333343596 : int ver;
212 :
213 333343596 : gcc_assert (op);
214 :
215 333343596 : ver = SSA_NAME_VERSION (op);
216 333343596 : if (bitmap_bit_p (processed, ver))
217 : {
218 122698312 : stmt = SSA_NAME_DEF_STMT (op);
219 122698312 : gcc_assert (gimple_nop_p (stmt)
220 : || gimple_plf (stmt, STMT_NECESSARY));
221 182778265 : return;
222 : }
223 210645284 : bitmap_set_bit (processed, ver);
224 :
225 210645284 : stmt = SSA_NAME_DEF_STMT (op);
226 210645284 : gcc_assert (stmt);
227 :
228 210645284 : if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
229 : return;
230 :
231 150565331 : if (dump_file && (dump_flags & TDF_DETAILS))
232 : {
233 266 : fprintf (dump_file, "marking necessary through ");
234 266 : print_generic_expr (dump_file, op);
235 266 : fprintf (dump_file, " stmt ");
236 266 : print_gimple_stmt (dump_file, stmt, 0);
237 : }
238 :
239 150565331 : gimple_set_plf (stmt, STMT_NECESSARY, true);
240 150565331 : if (bb_contains_live_stmts)
241 51585633 : bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
242 150565331 : worklist.safe_push (stmt);
243 : }
244 :
245 : /* Return true if STMT is a call to allocation function that can be
246 : optimized out if the memory block is never used for anything else
247 : than NULL pointer check or free.
248 : If NON_NULL_CHECK is false, we can further assume that return value
249 : is never checked to be non-NULL.
250 : Don't return true if it is called with constant size (or sizes for calloc)
251 : and the size is excessively large (larger than PTRDIFF_MAX, for calloc
252 : either argument larger than PTRDIFF_MAX or both constant and their product
253 : larger than PTRDIFF_MAX). */
254 :
255 : static bool
256 35510211 : is_removable_allocation_p (gcall *stmt, bool non_null_check)
257 : {
258 35510211 : int arg = -1;
259 35510211 : tree callee = gimple_call_fndecl (stmt), a1, a2;
260 35510211 : if (callee != NULL_TREE
261 35510211 : && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
262 9037960 : switch (DECL_FUNCTION_CODE (callee))
263 : {
264 561637 : case BUILT_IN_MALLOC:
265 561637 : arg = 1;
266 561637 : goto do_malloc;
267 150 : case BUILT_IN_ALIGNED_ALLOC:
268 150 : arg = 2;
269 150 : goto do_malloc;
270 5338 : case BUILT_IN_CALLOC:
271 5338 : arg = 3;
272 5338 : goto do_malloc;
273 122729 : CASE_BUILT_IN_ALLOCA:
274 122729 : arg = 1;
275 122729 : goto do_malloc;
276 : case BUILT_IN_STRDUP:
277 : case BUILT_IN_STRNDUP:
278 : arg = 0;
279 : /* FALLTHRU */
280 694051 : do_malloc:
281 694051 : if (non_null_check)
282 : {
283 105000 : if (flag_malloc_dce <= 1)
284 : return false;
285 : }
286 589051 : else if (!flag_malloc_dce)
287 : return false;
288 : break;
289 :
290 : case BUILT_IN_GOMP_ALLOC:
291 35509974 : arg = 2;
292 : break;
293 :
294 : default:;
295 : }
296 :
297 35509974 : if (arg == -1
298 35509974 : && callee != NULL_TREE
299 32738365 : && flag_allocation_dce
300 32734949 : && gimple_call_from_new_or_delete (stmt)
301 36819374 : && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee))
302 : arg = 1;
303 :
304 35086827 : switch (arg)
305 : {
306 : case -1:
307 : return false;
308 : case 0:
309 : return true;
310 1112957 : case 1:
311 1112957 : case 2:
312 1112957 : if (gimple_call_num_args (stmt) < (unsigned) arg)
313 : return false;
314 1112951 : a1 = gimple_call_arg (stmt, arg - 1);
315 1112951 : if (tree_fits_uhwi_p (a1)
316 1112951 : && (tree_to_uhwi (a1)
317 569624 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
318 : return false;
319 : return true;
320 5338 : case 3:
321 5338 : if (gimple_call_num_args (stmt) < 2)
322 : return false;
323 5338 : a1 = gimple_call_arg (stmt, 0);
324 5338 : a2 = gimple_call_arg (stmt, 1);
325 5338 : if (tree_fits_uhwi_p (a1)
326 5338 : && (tree_to_uhwi (a1)
327 4329 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
328 : return false;
329 5316 : if (tree_fits_uhwi_p (a2)
330 5316 : && (tree_to_uhwi (a2)
331 4635 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
332 : return false;
333 10588 : if (TREE_CODE (a1) == INTEGER_CST
334 4297 : && TREE_CODE (a2) == INTEGER_CST
335 13152 : && (wi::to_widest (a1) * wi::to_widest (a2)
336 13152 : > tree_to_uhwi (TYPE_MAX_VALUE (ptrdiff_type_node))))
337 : return false;
338 : return true;
339 : default:
340 : gcc_unreachable ();
341 : }
342 : }
343 :
344 : /* Return true if STMT is a conditional
345 : if (ptr != NULL)
346 : where ptr was returned by a removable allocation function. */
347 :
348 : static bool
349 237187574 : checks_return_value_of_removable_allocation_p (gimple *stmt)
350 : {
351 237187574 : gcall *def_stmt;
352 237187574 : return gimple_code (stmt) == GIMPLE_COND
353 30984692 : && (gimple_cond_code (stmt) == EQ_EXPR
354 21558440 : || gimple_cond_code (stmt) == NE_EXPR)
355 23632842 : && integer_zerop (gimple_cond_rhs (stmt))
356 12981421 : && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME
357 : && (def_stmt = dyn_cast <gcall *>
358 12978476 : (SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt))))
359 240502500 : && is_removable_allocation_p (def_stmt, true);
360 : }
361 :
362 :
363 : /* Mark STMT as necessary if it obviously is. Add it to the worklist if
364 : it can make other statements necessary.
365 :
366 : If AGGRESSIVE is false, control statements are conservatively marked as
367 : necessary. */
368 :
369 : static void
370 592856853 : mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
371 : {
372 : /* Statements that are implicitly live. Most function calls, asm
373 : and return statements are required. Labels and GIMPLE_BIND nodes
374 : are kept because they are control flow, and we have no way of
375 : knowing whether they can be removed. DCE can eliminate all the
376 : other statements in a block, and CFG can then remove the block
377 : and labels. */
378 592856853 : switch (gimple_code (stmt))
379 : {
380 6435437 : case GIMPLE_PREDICT:
381 6435437 : case GIMPLE_LABEL:
382 6435437 : mark_stmt_necessary (stmt, false);
383 6435437 : return;
384 :
385 9985789 : case GIMPLE_ASM:
386 9985789 : case GIMPLE_RESX:
387 9985789 : case GIMPLE_RETURN:
388 9985789 : mark_stmt_necessary (stmt, true);
389 9985789 : return;
390 :
391 37071409 : case GIMPLE_CALL:
392 37071409 : {
393 37071409 : gcall *call = as_a <gcall *> (stmt);
394 :
395 : /* Never elide a noreturn call we pruned control-flow for.
396 : Same for statements that can alter control flow in unpredictable
397 : ways. */
398 37071409 : if (gimple_call_ctrl_altering_p (call))
399 : {
400 5142839 : mark_stmt_necessary (call, true);
401 5142839 : return;
402 : }
403 :
404 31928570 : if (is_removable_allocation_p (call, false))
405 : return;
406 :
407 :
408 : /* For __cxa_atexit calls, don't mark as necessary right away. */
409 31092269 : if (is_removable_cxa_atexit_call (call))
410 : return;
411 :
412 : /* A relaxed atomic load with no LHS has no observable effect:
413 : the value is discarded and relaxed ordering provides no
414 : inter-thread synchronisation guarantee. Don't mark it
415 : necessary so DCE can remove it. */
416 31091862 : if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
417 6328691 : switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
418 : {
419 419605 : case BUILT_IN_ATOMIC_LOAD_1:
420 419605 : case BUILT_IN_ATOMIC_LOAD_2:
421 419605 : case BUILT_IN_ATOMIC_LOAD_4:
422 419605 : case BUILT_IN_ATOMIC_LOAD_8:
423 419605 : case BUILT_IN_ATOMIC_LOAD_16:
424 419605 : {
425 419605 : tree model_arg = gimple_call_arg (call, 1);
426 419605 : if (TREE_CODE (model_arg) == INTEGER_CST
427 419605 : && is_mm_relaxed (memmodel_from_int (tree_to_uhwi (model_arg))))
428 : return;
429 : break;
430 : }
431 : default:
432 : break;
433 : }
434 :
435 : /* IFN_GOACC_LOOP calls are necessary in that they are used to
436 : represent parameter (i.e. step, bound) of a lowered OpenACC
437 : partitioned loop. But this kind of partitioned loop might not
438 : survive from aggressive loop removal for it has loop exit and
439 : is assumed to be finite. Therefore, we need to explicitly mark
440 : these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */
441 31000095 : if (gimple_call_internal_p (call, IFN_GOACC_LOOP))
442 : {
443 27733 : mark_stmt_necessary (call, true);
444 27733 : return;
445 : }
446 : break;
447 : }
448 :
449 346993131 : case GIMPLE_DEBUG:
450 : /* Debug temps without a value are not useful. ??? If we could
451 : easily locate the debug temp bind stmt for a use thereof,
452 : would could refrain from marking all debug temps here, and
453 : mark them only if they're used. */
454 346993131 : if (gimple_debug_nonbind_marker_p (stmt)
455 268954435 : || !gimple_debug_bind_p (stmt)
456 267958756 : || gimple_debug_bind_has_value_p (stmt)
457 468406757 : || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
458 346615563 : mark_stmt_necessary (stmt, false);
459 : return;
460 :
461 1934 : case GIMPLE_GOTO:
462 1934 : gcc_assert (!simple_goto_p (stmt));
463 1934 : mark_stmt_necessary (stmt, true);
464 1934 : return;
465 :
466 31019173 : case GIMPLE_COND:
467 31019173 : gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
468 : /* Fall through. */
469 :
470 31166825 : case GIMPLE_SWITCH:
471 31166825 : if (! aggressive)
472 20686198 : mark_stmt_necessary (stmt, true);
473 : break;
474 :
475 161164259 : case GIMPLE_ASSIGN:
476 : /* Mark indirect CLOBBERs to be lazily removed if their SSA operands
477 : do not prevail. That also makes control flow leading to them
478 : not necessary in aggressive mode. */
479 171110502 : if (gimple_clobber_p (stmt) && !zero_ssa_operands (stmt, SSA_OP_USE))
480 : return;
481 : break;
482 :
483 : default:
484 : break;
485 : }
486 :
487 : /* If the statement has volatile operands, it needs to be preserved.
488 : Same for statements that can alter control flow in unpredictable
489 : ways. */
490 222491993 : if (gimple_has_side_effects (stmt) || is_ctrl_altering_stmt (stmt))
491 : {
492 40006196 : mark_stmt_necessary (stmt, true);
493 40006196 : return;
494 : }
495 :
496 : /* If a statement could throw, it can be deemed necessary unless we
497 : are allowed to remove dead EH. Test this after checking for
498 : new/delete operators since we always elide their EH. */
499 182485797 : if (!cfun->can_delete_dead_exceptions
500 182485797 : && stmt_could_throw_p (cfun, stmt))
501 : {
502 5808347 : mark_stmt_necessary (stmt, true);
503 5808347 : return;
504 : }
505 :
506 220425150 : if ((gimple_vdef (stmt) && keep_all_vdefs_p ())
507 220406141 : || stmt_may_clobber_global_p (stmt, false))
508 : {
509 13526338 : mark_stmt_necessary (stmt, true);
510 13526338 : return;
511 : }
512 :
513 : return;
514 : }
515 :
516 :
517 : /* Mark the last statement of BB as necessary. */
518 :
519 : static bool
520 42271908 : mark_last_stmt_necessary (basic_block bb)
521 : {
522 42271908 : if (!bitmap_set_bit (last_stmt_necessary, bb->index))
523 : return true;
524 :
525 16652056 : bitmap_set_bit (bb_contains_live_stmts, bb->index);
526 :
527 : /* We actually mark the statement only if it is a control statement. */
528 16652056 : gimple *stmt = *gsi_last_bb (bb);
529 16652056 : if (stmt && is_ctrl_stmt (stmt))
530 : {
531 10443171 : mark_stmt_necessary (stmt, true);
532 10443171 : return true;
533 : }
534 : return false;
535 : }
536 :
537 :
538 : /* Mark control dependent edges of BB as necessary. We have to do this only
539 : once for each basic block so we set the appropriate bit after we're done.
540 :
541 : When IGNORE_SELF is true, ignore BB in the list of control dependences. */
542 :
543 : static void
544 31457099 : mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
545 : {
546 31457099 : bitmap_iterator bi;
547 31457099 : unsigned edge_number;
548 31457099 : bool skipped = false;
549 :
550 31457099 : gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
551 :
552 31457099 : if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
553 3488059 : return;
554 :
555 68902497 : EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
556 : 0, edge_number, bi)
557 : {
558 40933457 : basic_block cd_bb = cd->get_edge_src (edge_number);
559 :
560 40933457 : if (ignore_self && cd_bb == bb)
561 : {
562 70402 : skipped = true;
563 70402 : continue;
564 : }
565 :
566 40863055 : if (!mark_last_stmt_necessary (cd_bb))
567 6206399 : mark_control_dependent_edges_necessary (cd_bb, false);
568 : }
569 :
570 27969040 : if (!skipped)
571 27898638 : bitmap_set_bit (visited_control_parents, bb->index);
572 : }
573 :
574 :
575 : /* Find obviously necessary statements. These are things like most function
576 : calls, and stores to file level variables.
577 :
578 : If EL is NULL, control statements are conservatively marked as
579 : necessary. Otherwise it contains the list of edges used by control
580 : dependence analysis. */
581 :
582 : static void
583 8056990 : find_obviously_necessary_stmts (bool aggressive)
584 : {
585 8056990 : basic_block bb;
586 8056990 : gimple_stmt_iterator gsi;
587 8056990 : edge e;
588 8056990 : gimple *phi, *stmt;
589 8056990 : int flags;
590 :
591 88907361 : FOR_EACH_BB_FN (bb, cfun)
592 : {
593 : /* PHI nodes are never inherently necessary. */
594 115917725 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
595 : {
596 35067354 : phi = gsi_stmt (gsi);
597 35067354 : gimple_set_plf (phi, STMT_NECESSARY, false);
598 : }
599 :
600 : /* Check all statements in the block. */
601 754557595 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
602 : {
603 592856853 : stmt = gsi_stmt (gsi);
604 592856853 : gimple_set_plf (stmt, STMT_NECESSARY, false);
605 592856853 : mark_stmt_if_obviously_necessary (stmt, aggressive);
606 : }
607 : }
608 :
609 : /* Pure and const functions are finite and thus have no infinite loops in
610 : them. */
611 8056990 : flags = flags_from_decl_or_type (current_function_decl);
612 8056990 : if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
613 951408 : return;
614 :
615 : /* Prevent the empty possibly infinite loops from being removed. This is
616 : needed to make the logic in remove_dead_stmt work to identify the
617 : correct edge to keep when removing a controlling condition. */
618 7105582 : if (aggressive)
619 : {
620 3299684 : if (mark_irreducible_loops ())
621 244763 : FOR_EACH_BB_FN (bb, cfun)
622 : {
623 239426 : edge_iterator ei;
624 602803 : FOR_EACH_EDGE (e, ei, bb->succs)
625 363377 : if ((e->flags & EDGE_DFS_BACK)
626 26401 : && (e->flags & EDGE_IRREDUCIBLE_LOOP))
627 : {
628 10390 : if (dump_file)
629 0 : fprintf (dump_file, "Marking back edge of irreducible "
630 0 : "loop %i->%i\n", e->src->index, e->dest->index);
631 10390 : mark_control_dependent_edges_necessary (e->dest, false);
632 : }
633 : }
634 :
635 11671925 : for (auto loop : loops_list (cfun, 0))
636 : /* For loops without an exit do not mark any condition. */
637 1772873 : if (loop->exits->next->e && !finite_loop_p (loop))
638 : {
639 269500 : if (dump_file)
640 1 : fprintf (dump_file, "cannot prove finiteness of loop %i\n",
641 : loop->num);
642 269500 : mark_control_dependent_edges_necessary (loop->latch, false);
643 3299684 : }
644 : }
645 : }
646 :
647 :
648 : /* Return true if REF is based on an aliased base, otherwise false. */
649 :
650 : static bool
651 95559335 : ref_may_be_aliased (tree ref)
652 : {
653 95559335 : if (TREE_CODE (ref) == WITH_SIZE_EXPR)
654 0 : ref = TREE_OPERAND (ref, 0);
655 171173612 : while (handled_component_p (ref))
656 75614277 : ref = TREE_OPERAND (ref, 0);
657 95559335 : if ((TREE_CODE (ref) == MEM_REF || TREE_CODE (ref) == TARGET_MEM_REF)
658 95559335 : && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
659 15392758 : ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
660 95559335 : return !(DECL_P (ref)
661 63633942 : && !may_be_aliased (ref));
662 : }
663 :
664 : static bitmap visited = NULL;
665 : static unsigned int longest_chain = 0;
666 : static unsigned int total_chain = 0;
667 : static unsigned int nr_walks = 0;
668 : static bool chain_ovfl = false;
669 :
670 : /* Worker for the walker that marks reaching definitions of REF,
671 : which is based on a non-aliased decl, necessary. It returns
672 : true whenever the defining statement of the current VDEF is
673 : a kill for REF, as no dominating may-defs are necessary for REF
674 : anymore. DATA points to the basic-block that contains the
675 : stmt that refers to REF. */
676 :
677 : static bool
678 39513868 : mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
679 : {
680 39513868 : gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
681 :
682 : /* All stmts we visit are necessary. */
683 39513868 : if (! gimple_clobber_p (def_stmt))
684 39347523 : mark_operand_necessary (vdef);
685 :
686 : /* If the stmt lhs kills ref, then we can stop walking. */
687 39513868 : if (gimple_has_lhs (def_stmt)
688 28681461 : && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
689 : /* The assignment is not necessarily carried out if it can throw
690 : and we can catch it in the current function where we could inspect
691 : the previous value.
692 : ??? We only need to care about the RHS throwing. For aggregate
693 : assignments or similar calls and non-call exceptions the LHS
694 : might throw as well. */
695 37254706 : && !stmt_can_throw_internal (cfun, def_stmt))
696 : {
697 24594685 : tree base, lhs = gimple_get_lhs (def_stmt);
698 24594685 : poly_int64 size, offset, max_size;
699 24594685 : bool reverse;
700 24594685 : ao_ref_base (ref);
701 24594685 : base
702 24594685 : = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
703 : /* We can get MEM[symbol: sZ, index: D.8862_1] here,
704 : so base == refd->base does not always hold. */
705 24594685 : if (base == ref->base)
706 : {
707 : /* For a must-alias check we need to be able to constrain
708 : the accesses properly. */
709 22570521 : if (known_eq (size, max_size)
710 22570521 : && known_subrange_p (ref->offset, ref->max_size, offset, size))
711 8371128 : return true;
712 : /* Or they need to be exactly the same. */
713 14200346 : else if (ref->ref
714 : /* Make sure there is no induction variable involved
715 : in the references (gcc.c-torture/execute/pr42142.c).
716 : The simplest way is to check if the kill dominates
717 : the use. */
718 : /* But when both are in the same block we cannot
719 : easily tell whether we came from a backedge
720 : unless we decide to compute stmt UIDs
721 : (see PR58246). */
722 14200346 : && (basic_block) data != gimple_bb (def_stmt)
723 6775867 : && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
724 6775867 : gimple_bb (def_stmt))
725 18397583 : && operand_equal_p (ref->ref, lhs, 0))
726 : return true;
727 : }
728 : }
729 :
730 : /* Otherwise keep walking. */
731 : return false;
732 : }
733 :
734 : static void
735 13285013 : mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
736 : {
737 : /* Should have been caught before calling this function. */
738 13285013 : gcc_checking_assert (!keep_all_vdefs_p ());
739 :
740 13285013 : unsigned int chain;
741 13285013 : ao_ref refd;
742 13285013 : gcc_assert (!chain_ovfl);
743 13285013 : ao_ref_init (&refd, ref);
744 26570026 : chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
745 : mark_aliased_reaching_defs_necessary_1,
746 13285013 : gimple_bb (stmt), NULL);
747 13285013 : if (chain > longest_chain)
748 2177858 : longest_chain = chain;
749 13285013 : total_chain += chain;
750 13285013 : nr_walks++;
751 13285013 : }
752 :
753 : /* Worker for the walker that marks reaching definitions of REF, which
754 : is not based on a non-aliased decl. For simplicity we need to end
755 : up marking all may-defs necessary that are not based on a non-aliased
756 : decl. The only job of this walker is to skip may-defs based on
757 : a non-aliased decl. */
758 :
759 : static bool
760 74024231 : mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
761 : tree vdef, void *data ATTRIBUTE_UNUSED)
762 : {
763 74024231 : gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
764 :
765 : /* We have to skip already visited (and thus necessary) statements
766 : to make the chaining work after we dropped back to simple mode. */
767 74024231 : if (chain_ovfl
768 74024231 : && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
769 : {
770 2575757 : gcc_assert (gimple_nop_p (def_stmt)
771 : || gimple_plf (def_stmt, STMT_NECESSARY));
772 : return false;
773 : }
774 :
775 : /* We want to skip stores to non-aliased variables. */
776 71448474 : if (!chain_ovfl
777 71448474 : && gimple_assign_single_p (def_stmt))
778 : {
779 47586467 : tree lhs = gimple_assign_lhs (def_stmt);
780 47586467 : if (!ref_may_be_aliased (lhs))
781 : return false;
782 : }
783 :
784 : /* We want to skip statments that do not constitute stores but have
785 : a virtual definition. */
786 59583424 : if (gcall *call = dyn_cast <gcall *> (def_stmt))
787 : {
788 23014251 : tree callee = gimple_call_fndecl (call);
789 23014251 : if (callee != NULL_TREE
790 23014251 : && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
791 4069162 : switch (DECL_FUNCTION_CODE (callee))
792 : {
793 : case BUILT_IN_MALLOC:
794 : case BUILT_IN_ALIGNED_ALLOC:
795 : case BUILT_IN_CALLOC:
796 : CASE_BUILT_IN_ALLOCA:
797 : case BUILT_IN_STRDUP:
798 : case BUILT_IN_STRNDUP:
799 : case BUILT_IN_FREE:
800 : case BUILT_IN_GOMP_ALLOC:
801 : case BUILT_IN_GOMP_FREE:
802 : return false;
803 :
804 : default:;
805 : }
806 :
807 22339405 : if (callee != NULL_TREE
808 21134372 : && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
809 20786137 : || DECL_IS_OPERATOR_DELETE_P (callee))
810 23301291 : && gimple_call_from_new_or_delete (call))
811 : return false;
812 21384624 : if (is_removable_cxa_atexit_call (call))
813 : return false;
814 : }
815 :
816 57953609 : if (! gimple_clobber_p (def_stmt))
817 53198053 : mark_operand_necessary (vdef);
818 :
819 : return false;
820 : }
821 :
822 : static void
823 69339265 : mark_all_reaching_defs_necessary (gimple *stmt)
824 : {
825 : /* Should have been caught before calling this function. */
826 69339265 : gcc_checking_assert (!keep_all_vdefs_p ());
827 138678530 : walk_aliased_vdefs (NULL, gimple_vuse (stmt),
828 : mark_all_reaching_defs_necessary_1, NULL, &visited);
829 69339265 : }
830 :
831 : /* Return true for PHI nodes with one or identical arguments
832 : can be removed. */
833 : static bool
834 19036115 : degenerate_phi_p (gimple *phi)
835 : {
836 19036115 : unsigned int i;
837 19036115 : tree op = gimple_phi_arg_def (phi, 0);
838 19970183 : for (i = 1; i < gimple_phi_num_args (phi); i++)
839 17261341 : if (gimple_phi_arg_def (phi, i) != op)
840 : return false;
841 : return true;
842 : }
843 :
844 : /* Return that NEW_CALL and DELETE_CALL are a valid pair of new
845 : and delete operators. */
846 :
847 : static bool
848 50838 : valid_new_delete_pair_p (gimple *new_call, gimple *delete_call)
849 : {
850 50838 : tree new_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call));
851 50838 : tree delete_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call));
852 50838 : return valid_new_delete_pair_p (new_asm, delete_asm);
853 : }
854 :
855 : /* Propagate necessity using the operands of necessary statements.
856 : Process the uses on each statement in the worklist, and add all
857 : feeding statements which contribute to the calculation of this
858 : value to the worklist.
859 :
860 : In conservative mode, EL is NULL. */
861 :
862 : static void
863 8056990 : propagate_necessity (bool aggressive)
864 : {
865 8056990 : gimple *stmt;
866 :
867 8056990 : if (dump_file && (dump_flags & TDF_DETAILS))
868 208 : fprintf (dump_file, "\nProcessing worklist:\n");
869 :
870 264250676 : while (worklist.length () > 0)
871 : {
872 : /* Take STMT from worklist. */
873 256193686 : stmt = worklist.pop ();
874 :
875 256193686 : if (dump_file && (dump_flags & TDF_DETAILS))
876 : {
877 1099 : fprintf (dump_file, "processing: ");
878 1099 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
879 1099 : fprintf (dump_file, "\n");
880 : }
881 :
882 256193686 : if (aggressive)
883 : {
884 : /* Mark the last statement of the basic blocks on which the block
885 : containing STMT is control dependent, but only if we haven't
886 : already done so. */
887 88475771 : basic_block bb = gimple_bb (stmt);
888 88475771 : if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
889 88475771 : && !bitmap_bit_p (visited_control_parents, bb->index))
890 18715194 : mark_control_dependent_edges_necessary (bb, false);
891 : }
892 :
893 256193686 : if (gimple_code (stmt) == GIMPLE_PHI
894 : /* We do not process virtual PHI nodes nor do we track their
895 : necessity. */
896 293852556 : && !virtual_operand_p (gimple_phi_result (stmt)))
897 : {
898 : /* PHI nodes are somewhat special in that each PHI alternative has
899 : data and control dependencies. All the statements feeding the
900 : PHI node's arguments are always necessary. In aggressive mode,
901 : we also consider the control dependent edges leading to the
902 : predecessor block associated with each PHI alternative as
903 : necessary. */
904 18829435 : gphi *phi = as_a <gphi *> (stmt);
905 18829435 : size_t k;
906 :
907 62971753 : for (k = 0; k < gimple_phi_num_args (stmt); k++)
908 : {
909 44142318 : tree arg = PHI_ARG_DEF (stmt, k);
910 44142318 : if (TREE_CODE (arg) == SSA_NAME)
911 32715145 : mark_operand_necessary (arg);
912 : }
913 :
914 : /* For PHI operands it matters from where the control flow arrives
915 : to the BB. Consider the following example:
916 :
917 : a=exp1;
918 : b=exp2;
919 : if (test)
920 : ;
921 : else
922 : ;
923 : c=PHI(a,b)
924 :
925 : We need to mark control dependence of the empty basic blocks, since they
926 : contains computation of PHI operands.
927 :
928 : Doing so is too restrictive in the case the predecestor block is in
929 : the loop. Consider:
930 :
931 : if (b)
932 : {
933 : int i;
934 : for (i = 0; i<1000; ++i)
935 : ;
936 : j = 0;
937 : }
938 : return j;
939 :
940 : There is PHI for J in the BB containing return statement.
941 : In this case the control dependence of predecestor block (that is
942 : within the empty loop) also contains the block determining number
943 : of iterations of the block that would prevent removing of empty
944 : loop in this case.
945 :
946 : This scenario can be avoided by splitting critical edges.
947 : To save the critical edge splitting pass we identify how the control
948 : dependence would look like if the edge was split.
949 :
950 : Consider the modified CFG created from current CFG by splitting
951 : edge B->C. In the postdominance tree of modified CFG, C' is
952 : always child of C. There are two cases how chlids of C' can look
953 : like:
954 :
955 : 1) C' is leaf
956 :
957 : In this case the only basic block C' is control dependent on is B.
958 :
959 : 2) C' has single child that is B
960 :
961 : In this case control dependence of C' is same as control
962 : dependence of B in original CFG except for block B itself.
963 : (since C' postdominate B in modified CFG)
964 :
965 : Now how to decide what case happens? There are two basic options:
966 :
967 : a) C postdominate B. Then C immediately postdominate B and
968 : case 2 happens iff there is no other way from B to C except
969 : the edge B->C.
970 :
971 : There is other way from B to C iff there is succesor of B that
972 : is not postdominated by B. Testing this condition is somewhat
973 : expensive, because we need to iterate all succesors of B.
974 : We are safe to assume that this does not happen: we will mark B
975 : as needed when processing the other path from B to C that is
976 : conrol dependent on B and marking control dependencies of B
977 : itself is harmless because they will be processed anyway after
978 : processing control statement in B.
979 :
980 : b) C does not postdominate B. Always case 1 happens since there is
981 : path from C to exit that does not go through B and thus also C'. */
982 :
983 31771511 : if (aggressive && !degenerate_phi_p (stmt))
984 : {
985 19627257 : for (k = 0; k < gimple_phi_num_args (stmt); k++)
986 : {
987 13739898 : basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
988 :
989 13739898 : if (gimple_bb (stmt)
990 13739898 : != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
991 : {
992 1408853 : if (!mark_last_stmt_necessary (arg_bb))
993 2486 : mark_control_dependent_edges_necessary (arg_bb, false);
994 : }
995 12331045 : else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
996 12331045 : && !bitmap_bit_p (visited_control_parents,
997 : arg_bb->index))
998 6253130 : mark_control_dependent_edges_necessary (arg_bb, true);
999 : }
1000 : }
1001 : }
1002 : else
1003 : {
1004 : /* Propagate through the operands. Examine all the USE, VUSE and
1005 : VDEF operands in this statement. Mark all the statements
1006 : which feed this statement's uses as necessary. */
1007 237364251 : ssa_op_iter iter;
1008 237364251 : tree use;
1009 :
1010 : /* If this is a call to free which is directly fed by an
1011 : allocation function do not mark that necessary through
1012 : processing the argument. */
1013 237364251 : bool is_delete_operator
1014 237364251 : = (is_gimple_call (stmt)
1015 37037093 : && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
1016 238621070 : && gimple_call_operator_delete_p (as_a <gcall *> (stmt)));
1017 236478438 : if (is_delete_operator
1018 236478438 : || gimple_call_builtin_p (stmt, BUILT_IN_FREE)
1019 236163590 : || gimple_call_builtin_p (stmt, BUILT_IN_GOMP_FREE))
1020 : {
1021 1200661 : tree ptr = gimple_call_arg (stmt, 0);
1022 1200661 : gcall *def_stmt;
1023 : /* If the pointer we free is defined by an allocation
1024 : function do not add the call to the worklist. */
1025 1200661 : if (TREE_CODE (ptr) == SSA_NAME
1026 1199758 : && (def_stmt = dyn_cast <gcall *> (SSA_NAME_DEF_STMT (ptr)))
1027 1403094 : && is_removable_allocation_p (def_stmt, false))
1028 : {
1029 179812 : if (is_delete_operator
1030 179812 : && !valid_new_delete_pair_p (def_stmt, stmt))
1031 125 : mark_operand_necessary (gimple_call_arg (stmt, 0));
1032 :
1033 : /* Delete operators can have alignment and (or) size
1034 : as next arguments. When being a SSA_NAME, they
1035 : must be marked as necessary. Similarly GOMP_free. */
1036 179812 : if (gimple_call_num_args (stmt) >= 2)
1037 91200 : for (unsigned i = 1; i < gimple_call_num_args (stmt);
1038 : i++)
1039 : {
1040 45611 : tree arg = gimple_call_arg (stmt, i);
1041 45611 : if (TREE_CODE (arg) == SSA_NAME)
1042 8782 : mark_operand_necessary (arg);
1043 : }
1044 :
1045 103907297 : continue;
1046 179812 : }
1047 : }
1048 :
1049 237184439 : if (checks_return_value_of_removable_allocation_p (stmt))
1050 102858 : continue;
1051 :
1052 445155549 : FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
1053 208073968 : mark_operand_necessary (use);
1054 :
1055 237081581 : use = gimple_vuse (stmt);
1056 204487568 : if (!use)
1057 97404077 : continue;
1058 :
1059 : /* No need to search for vdefs if we intrinsicly keep them all. */
1060 139677504 : if (keep_all_vdefs_p ())
1061 68041 : continue;
1062 :
1063 : /* If we dropped to simple mode make all immediately
1064 : reachable definitions necessary. */
1065 139609463 : if (chain_ovfl)
1066 : {
1067 3726504 : mark_all_reaching_defs_necessary (stmt);
1068 3726504 : continue;
1069 : }
1070 :
1071 : /* For statements that may load from memory (have a VUSE) we
1072 : have to mark all reaching (may-)definitions as necessary.
1073 : We partition this task into two cases:
1074 : 1) explicit loads based on decls that are not aliased
1075 : 2) implicit loads (like calls) and explicit loads not
1076 : based on decls that are not aliased (like indirect
1077 : references or loads from globals)
1078 : For 1) we mark all reaching may-defs as necessary, stopping
1079 : at dominating kills. For 2) we want to mark all dominating
1080 : references necessary, but non-aliased ones which we handle
1081 : in 1). By keeping a global visited bitmap for references
1082 : we walk for 2) we avoid quadratic behavior for those. */
1083 :
1084 135882959 : if (gcall *call = dyn_cast <gcall *> (stmt))
1085 : {
1086 33626520 : tree callee = gimple_call_fndecl (call);
1087 33626520 : unsigned i;
1088 :
1089 34831185 : if (callee != NULL_TREE
1090 32125264 : && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
1091 31749925 : || DECL_IS_OPERATOR_DELETE_P (callee))
1092 34844165 : && gimple_call_from_new_or_delete (call))
1093 1204665 : continue;
1094 :
1095 32421855 : if (is_removable_cxa_atexit_call (call))
1096 0 : continue;
1097 :
1098 : bool all_refs = false;
1099 : /* Calls implicitly load from memory, their arguments
1100 : in addition may explicitly perform memory loads. */
1101 99204529 : for (i = 0; i < gimple_call_num_args (call); ++i)
1102 : {
1103 66782674 : tree arg = gimple_call_arg (call, i);
1104 129521937 : if (TREE_CODE (arg) == SSA_NAME
1105 66782674 : || is_gimple_min_invariant (arg))
1106 62739263 : continue;
1107 4043411 : if (TREE_CODE (arg) == WITH_SIZE_EXPR)
1108 664 : arg = TREE_OPERAND (arg, 0);
1109 4043411 : if (!ref_may_be_aliased (arg))
1110 3648055 : mark_aliased_reaching_defs_necessary (call, arg);
1111 : else
1112 : all_refs = true;
1113 : }
1114 :
1115 32421855 : if (!all_refs && ipa_modref_callee_reads_no_memory_p (call))
1116 1221340 : continue;
1117 31200515 : mark_all_reaching_defs_necessary (call);
1118 : }
1119 102256439 : else if (gimple_assign_single_p (stmt))
1120 : {
1121 94216724 : tree rhs;
1122 : /* If this is a load mark things necessary. */
1123 94216724 : rhs = gimple_assign_rhs1 (stmt);
1124 94216724 : if (TREE_CODE (rhs) != SSA_NAME
1125 76429345 : && !is_gimple_min_invariant (rhs)
1126 147195956 : && TREE_CODE (rhs) != CONSTRUCTOR)
1127 : {
1128 43228021 : if (!ref_may_be_aliased (rhs))
1129 8997276 : mark_aliased_reaching_defs_necessary (stmt, rhs);
1130 : else
1131 34230745 : mark_all_reaching_defs_necessary (stmt);
1132 : }
1133 : }
1134 8039715 : else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1135 : {
1136 7880232 : tree rhs = gimple_return_retval (return_stmt);
1137 : /* A return statement may perform a load. */
1138 7880232 : if (rhs
1139 4361638 : && TREE_CODE (rhs) != SSA_NAME
1140 1397799 : && !is_gimple_min_invariant (rhs)
1141 8536323 : && TREE_CODE (rhs) != CONSTRUCTOR)
1142 : {
1143 656091 : if (!ref_may_be_aliased (rhs))
1144 634073 : mark_aliased_reaching_defs_necessary (stmt, rhs);
1145 : else
1146 22018 : mark_all_reaching_defs_necessary (stmt);
1147 : }
1148 : }
1149 159483 : else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
1150 : {
1151 158299 : unsigned i;
1152 158299 : mark_all_reaching_defs_necessary (stmt);
1153 : /* Inputs may perform loads. */
1154 286641 : for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1155 : {
1156 128342 : tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1157 128342 : if (TREE_CODE (op) != SSA_NAME
1158 84490 : && !is_gimple_min_invariant (op)
1159 45345 : && TREE_CODE (op) != CONSTRUCTOR
1160 173687 : && !ref_may_be_aliased (op))
1161 5609 : mark_aliased_reaching_defs_necessary (stmt, op);
1162 : }
1163 : }
1164 1184 : else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
1165 : {
1166 : /* The beginning of a transaction is a memory barrier. */
1167 : /* ??? If we were really cool, we'd only be a barrier
1168 : for the memories touched within the transaction. */
1169 1184 : mark_all_reaching_defs_necessary (stmt);
1170 : }
1171 : else
1172 0 : gcc_unreachable ();
1173 :
1174 : /* If we over-used our alias oracle budget drop to simple
1175 : mode. The cost metric allows quadratic behavior
1176 : (number of uses times number of may-defs queries) up to
1177 : a constant maximal number of queries and after that falls back to
1178 : super-linear complexity. */
1179 133456954 : if (/* Constant but quadratic for small functions. */
1180 133456954 : total_chain > 128 * 128
1181 : /* Linear in the number of may-defs. */
1182 1185205 : && total_chain > 32 * longest_chain
1183 : /* Linear in the number of uses. */
1184 4521 : && total_chain > nr_walks * 32)
1185 : {
1186 4372 : chain_ovfl = true;
1187 4372 : if (visited)
1188 4372 : bitmap_clear (visited);
1189 : }
1190 : }
1191 : }
1192 8056990 : }
1193 :
1194 : /* Remove dead PHI nodes from block BB. */
1195 :
1196 : static bool
1197 80850481 : remove_dead_phis (basic_block bb)
1198 : {
1199 80850481 : bool something_changed = false;
1200 80850481 : gphi *phi;
1201 80850481 : gphi_iterator gsi;
1202 :
1203 115917835 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
1204 : {
1205 35067354 : stats.total_phis++;
1206 35067354 : phi = gsi.phi ();
1207 :
1208 : /* We do not track necessity of virtual PHI nodes. Instead do
1209 : very simple dead PHI removal here. */
1210 70134708 : if (virtual_operand_p (gimple_phi_result (phi)))
1211 : {
1212 : /* Virtual PHI nodes with one or identical arguments
1213 : can be removed. */
1214 15918149 : if (!loops_state_satisfies_p (LOOP_CLOSED_SSA)
1215 15918149 : && degenerate_phi_p (phi))
1216 : {
1217 1862502 : tree vdef = gimple_phi_result (phi);
1218 1862502 : tree vuse = gimple_phi_arg_def (phi, 0);
1219 :
1220 1862502 : use_operand_p use_p;
1221 1862502 : imm_use_iterator iter;
1222 1862502 : gimple *use_stmt;
1223 6726497 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1224 9112487 : FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1225 3055497 : SET_USE (use_p, vuse);
1226 1862502 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1227 1862502 : && TREE_CODE (vuse) == SSA_NAME)
1228 471 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1229 : }
1230 : else
1231 14055647 : gimple_set_plf (phi, STMT_NECESSARY, true);
1232 : }
1233 :
1234 35067354 : if (!gimple_plf (phi, STMT_NECESSARY))
1235 : {
1236 2182272 : something_changed = true;
1237 2182272 : if (dump_file && (dump_flags & TDF_DETAILS))
1238 : {
1239 18 : fprintf (dump_file, "Deleting : ");
1240 18 : print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1241 18 : fprintf (dump_file, "\n");
1242 : }
1243 :
1244 2182272 : remove_phi_node (&gsi, true);
1245 2182272 : stats.removed_phis++;
1246 2182272 : continue;
1247 : }
1248 :
1249 32885082 : gsi_next (&gsi);
1250 : }
1251 80850481 : return something_changed;
1252 : }
1253 :
1254 :
1255 : /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1256 : containing I so that we don't have to look it up. */
1257 :
1258 : static void
1259 8207015 : remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb,
1260 : vec<edge> &to_remove_edges)
1261 : {
1262 8207015 : gimple *stmt = gsi_stmt (*i);
1263 :
1264 8207015 : if (dump_file && (dump_flags & TDF_DETAILS))
1265 : {
1266 125 : fprintf (dump_file, "Deleting : ");
1267 125 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1268 125 : fprintf (dump_file, "\n");
1269 : }
1270 :
1271 8207015 : stats.removed++;
1272 :
1273 : /* If we have determined that a conditional branch statement contributes
1274 : nothing to the program, then we not only remove it, but we need to update
1275 : the CFG. We can chose any of edges out of BB as long as we are sure to not
1276 : close infinite loops. This is done by always choosing the edge closer to
1277 : exit in inverted_rev_post_order_compute order. */
1278 8207015 : if (is_ctrl_stmt (stmt))
1279 : {
1280 37646 : edge_iterator ei;
1281 37646 : edge e = NULL, e2;
1282 :
1283 : /* See if there is only one non-abnormal edge. */
1284 37646 : if (single_succ_p (bb))
1285 3 : e = single_succ_edge (bb);
1286 : /* Otherwise chose one that is closer to bb with live statement in it.
1287 : To be able to chose one, we compute inverted post order starting from
1288 : all BBs with live statements. */
1289 3 : if (!e)
1290 : {
1291 37643 : if (!bb_postorder)
1292 : {
1293 19582 : int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
1294 19582 : int n = inverted_rev_post_order_compute (cfun, rpo,
1295 : &bb_contains_live_stmts);
1296 19582 : bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1297 776176 : for (int i = 0; i < n; ++i)
1298 756594 : bb_postorder[rpo[i]] = i;
1299 19582 : free (rpo);
1300 : }
1301 112950 : FOR_EACH_EDGE (e2, ei, bb->succs)
1302 75307 : if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1303 37664 : || bb_postorder [e->dest->index]
1304 37664 : >= bb_postorder [e2->dest->index])
1305 57945 : e = e2;
1306 : }
1307 37643 : gcc_assert (e);
1308 37646 : e->probability = profile_probability::always ();
1309 :
1310 : /* The edge is no longer associated with a conditional, so it does
1311 : not have TRUE/FALSE flags.
1312 : We are also safe to drop EH/ABNORMAL flags and turn them into
1313 : normal control flow, because we know that all the destinations (including
1314 : those odd edges) are equivalent for program execution. */
1315 37646 : e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1316 :
1317 : /* The lone outgoing edge from BB will be a fallthru edge. */
1318 37646 : e->flags |= EDGE_FALLTHRU;
1319 :
1320 : /* Remove the remaining outgoing edges. */
1321 112956 : FOR_EACH_EDGE (e2, ei, bb->succs)
1322 75310 : if (e != e2)
1323 : {
1324 : /* If we made a BB unconditionally exit a loop or removed
1325 : an entry into an irreducible region, then this transform
1326 : alters the set of BBs in the loop. Schedule a fixup. */
1327 37664 : if (loop_exit_edge_p (bb->loop_father, e)
1328 37664 : || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1329 21879 : loops_state_set (LOOPS_NEED_FIXUP);
1330 37664 : to_remove_edges.safe_push (e2);
1331 : }
1332 : }
1333 :
1334 : /* If this is a store into a variable that is being optimized away,
1335 : add a debug bind stmt if possible. */
1336 8207015 : if (MAY_HAVE_DEBUG_BIND_STMTS
1337 7572681 : && gimple_assign_single_p (stmt)
1338 8478014 : && is_gimple_val (gimple_assign_rhs1 (stmt)))
1339 : {
1340 138212 : tree lhs = gimple_assign_lhs (stmt);
1341 123111 : if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1342 15149 : && !DECL_IGNORED_P (lhs)
1343 4281 : && is_gimple_reg_type (TREE_TYPE (lhs))
1344 53 : && !is_global_var (lhs)
1345 138265 : && !DECL_HAS_VALUE_EXPR_P (lhs))
1346 : {
1347 53 : tree rhs = gimple_assign_rhs1 (stmt);
1348 53 : gdebug *note
1349 53 : = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1350 53 : gsi_insert_after (i, note, GSI_SAME_STMT);
1351 : }
1352 : }
1353 :
1354 8207015 : unlink_stmt_vdef (stmt);
1355 8207015 : gsi_remove (i, true);
1356 8207015 : release_defs (stmt);
1357 8207015 : }
1358 :
1359 : /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1360 : uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1361 :
1362 : static tree
1363 26 : find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1364 : {
1365 26 : if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1366 0 : *walk_subtrees = 0;
1367 26 : if (*tp == (tree) data)
1368 13 : return *tp;
1369 : return NULL_TREE;
1370 : }
1371 :
1372 : /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1373 : but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1374 : into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1375 : uses. */
1376 :
1377 : static void
1378 283681 : maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1379 : enum tree_code subcode)
1380 : {
1381 283681 : gimple *stmt = gsi_stmt (*gsi);
1382 283681 : tree lhs = gimple_call_lhs (stmt);
1383 :
1384 283681 : if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1385 283508 : return;
1386 :
1387 283681 : imm_use_iterator imm_iter;
1388 283681 : use_operand_p use_p;
1389 283681 : bool has_debug_uses = false;
1390 283681 : bool has_realpart_uses = false;
1391 283681 : bool has_other_uses = false;
1392 575257 : FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1393 : {
1394 291403 : gimple *use_stmt = USE_STMT (use_p);
1395 291403 : if (is_gimple_debug (use_stmt))
1396 : has_debug_uses = true;
1397 288032 : else if (is_gimple_assign (use_stmt)
1398 287051 : && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1399 292556 : && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1400 : has_realpart_uses = true;
1401 : else
1402 : {
1403 : has_other_uses = true;
1404 : break;
1405 : }
1406 283681 : }
1407 :
1408 283681 : if (!has_realpart_uses || has_other_uses)
1409 : return;
1410 :
1411 173 : tree arg0 = gimple_call_arg (stmt, 0);
1412 173 : tree arg1 = gimple_call_arg (stmt, 1);
1413 173 : location_t loc = gimple_location (stmt);
1414 173 : tree type = TREE_TYPE (TREE_TYPE (lhs));
1415 173 : tree utype = unsigned_type_for (type);
1416 173 : tree result = fold_build2_loc (loc, subcode, utype,
1417 : fold_convert_loc (loc, utype, arg0),
1418 : fold_convert_loc (loc, utype, arg1));
1419 173 : result = fold_convert_loc (loc, type, result);
1420 :
1421 173 : if (has_debug_uses)
1422 : {
1423 13 : gimple *use_stmt;
1424 52 : FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1425 : {
1426 26 : if (!gimple_debug_bind_p (use_stmt))
1427 13 : continue;
1428 13 : tree v = gimple_debug_bind_get_value (use_stmt);
1429 13 : if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1430 : {
1431 13 : gimple_debug_bind_reset_value (use_stmt);
1432 13 : update_stmt (use_stmt);
1433 : }
1434 13 : }
1435 : }
1436 :
1437 173 : if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1438 0 : result = drop_tree_overflow (result);
1439 173 : tree overflow = build_zero_cst (type);
1440 173 : tree ctype = build_complex_type (type);
1441 173 : if (TREE_CODE (result) == INTEGER_CST)
1442 0 : result = build_complex (ctype, result, overflow);
1443 : else
1444 173 : result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1445 : ctype, result, overflow);
1446 :
1447 173 : if (dump_file && (dump_flags & TDF_DETAILS))
1448 : {
1449 0 : fprintf (dump_file, "Transforming call: ");
1450 0 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1451 0 : fprintf (dump_file, "because the overflow result is never used into: ");
1452 0 : print_generic_stmt (dump_file, result, TDF_SLIM);
1453 0 : fprintf (dump_file, "\n");
1454 : }
1455 :
1456 173 : gimplify_and_update_call_from_tree (gsi, result);
1457 : }
1458 :
1459 : /* Returns whether the control parents of BB are preserved. */
1460 :
1461 : static bool
1462 357136 : control_parents_preserved_p (basic_block bb)
1463 : {
1464 : /* If we marked the control parents from BB they are preserved. */
1465 357136 : if (bitmap_bit_p (visited_control_parents, bb->index))
1466 : return true;
1467 :
1468 : /* But they can also end up being marked from elsewhere. */
1469 7021 : bitmap_iterator bi;
1470 7021 : unsigned edge_number;
1471 11092 : EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
1472 : 0, edge_number, bi)
1473 : {
1474 7130 : basic_block cd_bb = cd->get_edge_src (edge_number);
1475 7130 : if (cd_bb != bb
1476 7130 : && !bitmap_bit_p (last_stmt_necessary, cd_bb->index))
1477 : return false;
1478 : }
1479 : /* And cache the result. */
1480 3962 : bitmap_set_bit (visited_control_parents, bb->index);
1481 3962 : return true;
1482 : }
1483 :
1484 : /* If basic block is empty, we can remove conditionals that controls
1485 : its execution. However in some cases the empty BB can stay live
1486 : (such as when it was a header of empty loop). In this case we
1487 : need to update its count. Since regions of dead BBs are acyclic
1488 : we simply propagate counts forward from live BBs to dead ones. */
1489 :
1490 : static void
1491 19615 : propagate_counts ()
1492 : {
1493 19615 : basic_block bb;
1494 19615 : auto_vec<basic_block, 16> queue;
1495 19615 : hash_map <basic_block, int> cnt;
1496 :
1497 694252 : FOR_EACH_BB_FN (bb, cfun)
1498 674637 : if (!bitmap_bit_p (bb_contains_live_stmts, bb->index))
1499 : {
1500 147624 : int n = 0;
1501 611564 : for (edge e : bb->preds)
1502 168692 : if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1503 168692 : && !bitmap_bit_p (bb_contains_live_stmts, e->src->index))
1504 31335 : n++;
1505 147624 : if (!n)
1506 119172 : queue.safe_push (bb);
1507 147624 : cnt.put (bb, n);
1508 : }
1509 184522 : while (!queue.is_empty ())
1510 : {
1511 145292 : basic_block bb = queue.pop ();
1512 145292 : profile_count sum = profile_count::zero ();
1513 :
1514 455860 : for (edge e : bb->preds)
1515 : {
1516 165276 : sum += e->count ();
1517 165276 : gcc_checking_assert (!cnt.get (e->src));
1518 : }
1519 : /* If we have partial profile and some counts of incomming edges are
1520 : unknown, it is probably better to keep the existing count.
1521 : We could also propagate bi-directionally. */
1522 145292 : if (sum.initialized_p () && !(sum == bb->count))
1523 : {
1524 18822 : if (dump_file && (dump_flags & TDF_DETAILS))
1525 : {
1526 3 : fprintf (dump_file, "Updating count of empty bb %i from ",
1527 : bb->index);
1528 3 : bb->count.dump (dump_file);
1529 3 : fprintf (dump_file, " to ");
1530 3 : sum.dump (dump_file);
1531 3 : fprintf (dump_file, "\n");
1532 : }
1533 18822 : bb->count = sum;
1534 : }
1535 145292 : cnt.remove (bb);
1536 581168 : for (edge e : bb->succs)
1537 145292 : if (int *n = cnt.get (e->dest))
1538 : {
1539 29003 : (*n)--;
1540 29003 : if (!*n)
1541 26120 : queue.safe_push (e->dest);
1542 : }
1543 : }
1544 : /* Do not check that all blocks has been processed, since for
1545 : empty infinite loops this is not the case. */
1546 19615 : }
1547 :
1548 : /* Eliminate unnecessary statements. Any instruction not marked as necessary
1549 : contributes nothing to the program, and can be deleted. */
1550 :
1551 : static bool
1552 8056990 : eliminate_unnecessary_stmts (bool aggressive)
1553 : {
1554 8056990 : bool something_changed = false;
1555 8056990 : basic_block bb;
1556 8056990 : gimple_stmt_iterator gsi, psi;
1557 8056990 : gimple *stmt;
1558 8056990 : auto_vec<edge> to_remove_edges;
1559 :
1560 8056990 : if (dump_file && (dump_flags & TDF_DETAILS))
1561 208 : fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1562 :
1563 8056990 : bool had_setjmp = cfun->calls_setjmp;
1564 8056990 : clear_special_calls ();
1565 :
1566 : /* Walking basic blocks and statements in reverse order avoids
1567 : releasing SSA names before any other DEFs that refer to them are
1568 : released. This helps avoid loss of debug information, as we get
1569 : a chance to propagate all RHSs of removed SSAs into debug uses,
1570 : rather than only the latest ones. E.g., consider:
1571 :
1572 : x_3 = y_1 + z_2;
1573 : a_5 = x_3 - b_4;
1574 : # DEBUG a => a_5
1575 :
1576 : If we were to release x_3 before a_5, when we reached a_5 and
1577 : tried to substitute it into the debug stmt, we'd see x_3 there,
1578 : but x_3's DEF, type, etc would have already been disconnected.
1579 : By going backwards, the debug stmt first changes to:
1580 :
1581 : # DEBUG a => x_3 - b_4
1582 :
1583 : and then to:
1584 :
1585 : # DEBUG a => y_1 + z_2 - b_4
1586 :
1587 : as desired. */
1588 8056990 : gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1589 8056990 : auto_vec<basic_block> h;
1590 8056990 : h = get_all_dominated_blocks (CDI_DOMINATORS,
1591 16113980 : single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1592 :
1593 88907471 : while (h.length ())
1594 : {
1595 80850481 : bb = h.pop ();
1596 :
1597 : /* Remove dead statements. */
1598 80850481 : auto_bitmap debug_seen;
1599 80850481 : hash_set<int_hash <location_t, 0>> locs_seen;
1600 754557815 : for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1601 : {
1602 592856853 : stmt = gsi_stmt (gsi);
1603 :
1604 592856853 : psi = gsi;
1605 592856853 : gsi_prev (&psi);
1606 :
1607 592856853 : stats.total++;
1608 :
1609 : /* We can mark a call to free as not necessary if the
1610 : defining statement of its argument is not necessary
1611 : (and thus is getting removed). */
1612 592856853 : if (gimple_plf (stmt, STMT_NECESSARY)
1613 592856853 : && (gimple_call_builtin_p (stmt, BUILT_IN_FREE)
1614 590100403 : || (is_gimple_call (stmt)
1615 36722245 : && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
1616 1256819 : && gimple_call_operator_delete_p (as_a <gcall *> (stmt)))))
1617 : {
1618 1200661 : tree ptr = gimple_call_arg (stmt, 0);
1619 1200661 : if (TREE_CODE (ptr) == SSA_NAME)
1620 : {
1621 1199758 : gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1622 1199758 : if (!gimple_nop_p (def_stmt)
1623 1199758 : && !gimple_plf (def_stmt, STMT_NECESSARY))
1624 6366 : gimple_set_plf (stmt, STMT_NECESSARY, false);
1625 : }
1626 : }
1627 : /* Conditional checking that return value of allocation is non-NULL
1628 : can be turned to constant if the allocation itself
1629 : is unnecesary. */
1630 592856853 : if (gimple_plf (stmt, STMT_NECESSARY)
1631 590408885 : && gimple_code (stmt) == GIMPLE_COND
1632 623838410 : && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
1633 : {
1634 30977437 : gimple *def_stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt));
1635 30977437 : if (!gimple_nop_p (def_stmt)
1636 30977437 : && !gimple_plf (def_stmt, STMT_NECESSARY))
1637 : {
1638 3135 : gcc_checking_assert
1639 : (checks_return_value_of_removable_allocation_p (stmt));
1640 3135 : gimple_cond_set_lhs (as_a <gcond *>(stmt),
1641 : build_one_cst
1642 3135 : (TREE_TYPE (gimple_cond_rhs (stmt))));
1643 3135 : update_stmt (stmt);
1644 : }
1645 : }
1646 :
1647 : /* If GSI is not necessary then remove it. */
1648 592856853 : if (!gimple_plf (stmt, STMT_NECESSARY))
1649 : {
1650 : /* Keep clobbers that we can keep live live. */
1651 2447968 : if (gimple_clobber_p (stmt))
1652 : {
1653 849522 : ssa_op_iter iter;
1654 849522 : use_operand_p use_p;
1655 849522 : bool dead = false;
1656 1696518 : FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1657 : {
1658 849522 : tree name = USE_FROM_PTR (use_p);
1659 849522 : if (!SSA_NAME_IS_DEFAULT_DEF (name)
1660 849522 : && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1661 : {
1662 : dead = true;
1663 : break;
1664 : }
1665 : }
1666 1693459 : if (!dead
1667 : /* When doing CD-DCE we have to ensure all controls
1668 : of the stmt are still live. */
1669 849522 : && (!aggressive || control_parents_preserved_p (bb)))
1670 : {
1671 843937 : bitmap_clear (debug_seen);
1672 843937 : continue;
1673 : }
1674 : }
1675 1604031 : if (!is_gimple_debug (stmt))
1676 1226463 : something_changed = true;
1677 1604031 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1678 1604031 : continue;
1679 1604031 : }
1680 590408885 : else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1681 : {
1682 37030727 : tree name = gimple_call_lhs (call_stmt);
1683 :
1684 37030727 : notice_special_calls (call_stmt);
1685 :
1686 : /* When LHS of var = call (); is dead, simplify it into
1687 : call (); saving one operand. */
1688 37030727 : if (name
1689 14404630 : && TREE_CODE (name) == SSA_NAME
1690 12137027 : && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1691 : /* Avoid doing so for allocation calls which we
1692 : did not mark as necessary, it will confuse the
1693 : special logic we apply to malloc/free pair removal. */
1694 37095009 : && !is_removable_allocation_p (call_stmt, false))
1695 : {
1696 64191 : something_changed = true;
1697 64191 : if (dump_file && (dump_flags & TDF_DETAILS))
1698 : {
1699 1 : fprintf (dump_file, "Deleting LHS of call: ");
1700 1 : print_gimple_stmt (dump_file, call_stmt, 0, TDF_SLIM);
1701 1 : fprintf (dump_file, "\n");
1702 : }
1703 :
1704 64191 : gimple_call_set_lhs (call_stmt, NULL_TREE);
1705 64191 : maybe_clean_or_replace_eh_stmt (call_stmt, call_stmt);
1706 64191 : update_stmt (call_stmt);
1707 64191 : release_ssa_name (name);
1708 :
1709 : /* __builtin_stack_save without lhs is not needed. */
1710 64191 : if (gimple_call_builtin_p (call_stmt, BUILT_IN_STACK_SAVE))
1711 37 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1712 : /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON
1713 : without lhs is not needed. */
1714 64154 : else if (gimple_call_internal_p (call_stmt))
1715 7532 : switch (gimple_call_internal_fn (call_stmt))
1716 : {
1717 762 : case IFN_GOMP_SIMD_LANE:
1718 762 : if (gimple_call_num_args (call_stmt) >= 3
1719 800 : && !integer_nonzerop
1720 38 : (gimple_call_arg (call_stmt, 2)))
1721 : break;
1722 : /* FALLTHRU */
1723 1012 : case IFN_ASAN_POISON:
1724 1012 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1725 1012 : break;
1726 : default:
1727 : break;
1728 : }
1729 : }
1730 36966536 : else if (gimple_call_internal_p (call_stmt))
1731 981389 : switch (gimple_call_internal_fn (call_stmt))
1732 : {
1733 87061 : case IFN_ADD_OVERFLOW:
1734 87061 : maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1735 87061 : break;
1736 95458 : case IFN_SUB_OVERFLOW:
1737 95458 : maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1738 95458 : break;
1739 96683 : case IFN_MUL_OVERFLOW:
1740 96683 : maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1741 96683 : break;
1742 25604 : case IFN_UADDC:
1743 25604 : if (integer_zerop (gimple_call_arg (call_stmt, 2)))
1744 2475 : maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1745 : break;
1746 14661 : case IFN_USUBC:
1747 14661 : if (integer_zerop (gimple_call_arg (call_stmt, 2)))
1748 2004 : maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1749 : break;
1750 : default:
1751 : break;
1752 : }
1753 : }
1754 553378158 : else if (gimple_debug_bind_p (stmt))
1755 : {
1756 : /* We are only keeping the last debug-bind of a
1757 : non-DEBUG_EXPR_DECL variable in a series of
1758 : debug-bind stmts. */
1759 267581188 : tree var = gimple_debug_bind_get_var (stmt);
1760 267581188 : if (TREE_CODE (var) != DEBUG_EXPR_DECL
1761 267581188 : && !bitmap_set_bit (debug_seen, DECL_UID (var)))
1762 6581270 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1763 267581188 : continue;
1764 267581188 : }
1765 285796970 : else if (gimple_debug_begin_stmt_p (stmt))
1766 : {
1767 : /* We are only keeping the last debug-begin in a series of
1768 : debug-begin stmts. */
1769 27195328 : if (locs_seen.add (gimple_location (stmt)))
1770 20665 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1771 27195328 : continue;
1772 : }
1773 295632369 : locs_seen.empty ();
1774 295632369 : bitmap_clear (debug_seen);
1775 : }
1776 :
1777 : /* Remove dead PHI nodes. */
1778 80850481 : something_changed |= remove_dead_phis (bb);
1779 80850481 : }
1780 :
1781 : /* First remove queued edges. */
1782 8056990 : if (!to_remove_edges.is_empty ())
1783 : {
1784 : /* Remove edges. We've delayed this to not get bogus debug stmts
1785 : during PHI node removal. */
1786 57246 : for (unsigned i = 0; i < to_remove_edges.length (); ++i)
1787 37664 : remove_edge (to_remove_edges[i]);
1788 19582 : cfg_altered = true;
1789 : }
1790 : /* When we cleared calls_setjmp we can purge all abnormal edges. Do so.
1791 : ??? We'd like to assert that setjmp calls do not pop out of nothing
1792 : but we currently lack a per-stmt way of noting whether a call was
1793 : recognized as returns-twice (or rather receives-control). */
1794 8056990 : if (!cfun->calls_setjmp && had_setjmp)
1795 : {
1796 : /* Make sure we only remove the edges, not dominated blocks. Using
1797 : gimple_purge_dead_abnormal_call_edges would do that and we
1798 : cannot free dominators yet. */
1799 3757 : FOR_EACH_BB_FN (bb, cfun)
1800 9678 : if (gcall *stmt = safe_dyn_cast <gcall *> (*gsi_last_bb (bb)))
1801 2080 : if (!stmt_can_make_abnormal_goto (stmt))
1802 : {
1803 736 : edge_iterator ei;
1804 736 : edge e;
1805 994 : for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1806 : {
1807 258 : if (e->flags & EDGE_ABNORMAL)
1808 : {
1809 112 : if (e->flags & EDGE_FALLTHRU)
1810 0 : e->flags &= ~EDGE_ABNORMAL;
1811 : else
1812 112 : remove_edge (e);
1813 112 : cfg_altered = true;
1814 : }
1815 : else
1816 146 : ei_next (&ei);
1817 : }
1818 : }
1819 : }
1820 :
1821 : /* Now remove the unreachable blocks. */
1822 8056990 : if (cfg_altered)
1823 : {
1824 19623 : basic_block prev_bb;
1825 :
1826 19623 : find_unreachable_blocks ();
1827 :
1828 : /* Delete all unreachable basic blocks in reverse dominator order. */
1829 19623 : for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1830 736068 : bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1831 : {
1832 716445 : prev_bb = bb->prev_bb;
1833 :
1834 716445 : if ((bb_contains_live_stmts
1835 716399 : && !bitmap_bit_p (bb_contains_live_stmts, bb->index))
1836 1243497 : || !(bb->flags & BB_REACHABLE))
1837 : {
1838 : /* Since we don't track liveness of virtual PHI nodes, it is
1839 : possible that we rendered some PHI nodes unreachable while
1840 : they are still in use. Mark them for renaming. */
1841 206815 : for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1842 17421 : gsi_next (&gsi))
1843 52261 : if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1844 : {
1845 17419 : bool found = false;
1846 17419 : imm_use_iterator iter;
1847 :
1848 35019 : FOR_EACH_IMM_USE_STMT (stmt, iter,
1849 : gimple_phi_result (gsi.phi ()))
1850 : {
1851 16809 : if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1852 155 : continue;
1853 16654 : if (gimple_code (stmt) == GIMPLE_PHI
1854 16654 : || gimple_plf (stmt, STMT_NECESSARY))
1855 : {
1856 : found = true;
1857 : break;
1858 : }
1859 17419 : }
1860 17419 : if (found)
1861 16628 : mark_virtual_phi_result_for_renaming (gsi.phi ());
1862 : }
1863 :
1864 189394 : if (!(bb->flags & BB_REACHABLE))
1865 : {
1866 : /* Speed up the removal of blocks that don't
1867 : dominate others. Walking backwards, this should
1868 : be the common case. ??? Do we need to recompute
1869 : dominators because of cfg_altered? */
1870 41770 : if (!first_dom_son (CDI_DOMINATORS, bb))
1871 41258 : delete_basic_block (bb);
1872 : else
1873 : {
1874 512 : h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1875 :
1876 2214 : while (h.length ())
1877 : {
1878 1702 : bb = h.pop ();
1879 1702 : prev_bb = bb->prev_bb;
1880 : /* Rearrangements to the CFG may have failed
1881 : to update the dominators tree, so that
1882 : formerly-dominated blocks are now
1883 : otherwise reachable. */
1884 1702 : if (!!(bb->flags & BB_REACHABLE))
1885 0 : continue;
1886 1702 : delete_basic_block (bb);
1887 : }
1888 :
1889 512 : h.release ();
1890 : }
1891 : }
1892 : }
1893 : }
1894 19623 : if (bb_contains_live_stmts)
1895 19615 : propagate_counts ();
1896 : }
1897 :
1898 8056990 : if (bb_postorder)
1899 19582 : free (bb_postorder);
1900 8056990 : bb_postorder = NULL;
1901 :
1902 8056990 : return something_changed;
1903 8056990 : }
1904 :
1905 :
1906 : /* Print out removed statement statistics. */
1907 :
1908 : static void
1909 214 : print_stats (void)
1910 : {
1911 214 : float percg;
1912 :
1913 214 : percg = ((float) stats.removed / (float) stats.total) * 100;
1914 214 : fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1915 : stats.removed, stats.total, (int) percg);
1916 :
1917 214 : if (stats.total_phis == 0)
1918 : percg = 0;
1919 : else
1920 41 : percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1921 :
1922 214 : fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1923 : stats.removed_phis, stats.total_phis, (int) percg);
1924 214 : }
1925 :
1926 : /* Initialization for this pass. Set up the used data structures. */
1927 :
1928 : static void
1929 8056990 : tree_dce_init (bool aggressive)
1930 : {
1931 8056990 : memset ((void *) &stats, 0, sizeof (stats));
1932 :
1933 8056990 : if (aggressive)
1934 : {
1935 3489609 : last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1936 3489609 : bitmap_clear (last_stmt_necessary);
1937 3489609 : bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1938 3489609 : bitmap_clear (bb_contains_live_stmts);
1939 : }
1940 :
1941 16113980 : processed = sbitmap_alloc (num_ssa_names + 1);
1942 8056990 : bitmap_clear (processed);
1943 :
1944 8056990 : worklist.create (64);
1945 8056990 : cfg_altered = false;
1946 8056990 : }
1947 :
1948 : /* Cleanup after this pass. */
1949 :
1950 : static void
1951 8056990 : tree_dce_done (bool aggressive)
1952 : {
1953 8056990 : if (aggressive)
1954 : {
1955 3489609 : delete cd;
1956 3489609 : sbitmap_free (visited_control_parents);
1957 3489609 : sbitmap_free (last_stmt_necessary);
1958 3489609 : sbitmap_free (bb_contains_live_stmts);
1959 3489609 : bb_contains_live_stmts = NULL;
1960 : }
1961 :
1962 8056990 : sbitmap_free (processed);
1963 :
1964 8056990 : worklist.release ();
1965 8056990 : }
1966 :
1967 :
1968 : /* Main routine to eliminate dead code.
1969 :
1970 : AGGRESSIVE controls the aggressiveness of the algorithm.
1971 : In conservative mode, we ignore control dependence and simply declare
1972 : all but the most trivially dead branches necessary. This mode is fast.
1973 : In aggressive mode, control dependences are taken into account, which
1974 : results in more dead code elimination, but at the cost of some time. */
1975 :
1976 : static unsigned int
1977 8056990 : perform_tree_ssa_dce (bool aggressive)
1978 : {
1979 8056990 : bool something_changed = 0;
1980 8056990 : unsigned todo = 0;
1981 :
1982 : /* Preheaders are needed for SCEV to work.
1983 : Simple lateches and recorded exits improve chances that loop will
1984 : proved to be finite in testcases such as in loop-15.c and loop-24.c */
1985 8056990 : bool in_loop_pipeline = scev_initialized_p ();
1986 8056990 : if (aggressive && ! in_loop_pipeline)
1987 : {
1988 3266213 : loop_optimizer_init (LOOPS_NORMAL
1989 : | LOOPS_HAVE_RECORDED_EXITS);
1990 3266213 : scev_initialize ();
1991 : }
1992 :
1993 8056990 : if (aggressive && make_forwarders_with_degenerate_phis (cfun))
1994 : todo |= TODO_cleanup_cfg;
1995 :
1996 8056990 : calculate_dominance_info (CDI_DOMINATORS);
1997 :
1998 8056990 : tree_dce_init (aggressive);
1999 :
2000 8056990 : if (aggressive)
2001 : {
2002 : /* Compute control dependence. */
2003 3489609 : calculate_dominance_info (CDI_POST_DOMINATORS);
2004 3489609 : cd = new control_dependences ();
2005 :
2006 6979218 : visited_control_parents =
2007 3489609 : sbitmap_alloc (last_basic_block_for_fn (cfun));
2008 3489609 : bitmap_clear (visited_control_parents);
2009 :
2010 3489609 : mark_dfs_back_edges ();
2011 : }
2012 :
2013 8056990 : find_obviously_necessary_stmts (aggressive);
2014 :
2015 8056990 : if (aggressive && ! in_loop_pipeline)
2016 : {
2017 3266213 : scev_finalize ();
2018 3266213 : loop_optimizer_finalize ();
2019 : }
2020 :
2021 8056990 : longest_chain = 0;
2022 8056990 : total_chain = 0;
2023 8056990 : nr_walks = 0;
2024 8056990 : chain_ovfl = false;
2025 8056990 : visited = BITMAP_ALLOC (NULL);
2026 8056990 : propagate_necessity (aggressive);
2027 8056990 : BITMAP_FREE (visited);
2028 :
2029 8056990 : something_changed |= eliminate_unnecessary_stmts (aggressive);
2030 8056990 : something_changed |= cfg_altered;
2031 :
2032 : /* We do not update postdominators, so free them unconditionally. */
2033 8056990 : free_dominance_info (CDI_POST_DOMINATORS);
2034 :
2035 : /* If we removed paths in the CFG, then we need to update
2036 : dominators as well. I haven't investigated the possibility
2037 : of incrementally updating dominators. */
2038 8056990 : if (cfg_altered)
2039 19623 : free_dominance_info (CDI_DOMINATORS);
2040 :
2041 8056990 : statistics_counter_event (cfun, "Statements deleted", stats.removed);
2042 8056990 : statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
2043 :
2044 : /* Debugging dumps. */
2045 8056990 : if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
2046 214 : print_stats ();
2047 :
2048 8056990 : tree_dce_done (aggressive);
2049 :
2050 8056990 : if (something_changed)
2051 : {
2052 815087 : free_numbers_of_iterations_estimates (cfun);
2053 815087 : if (in_loop_pipeline)
2054 65506 : scev_reset ();
2055 : todo |= TODO_update_ssa | TODO_cleanup_cfg;
2056 : }
2057 8056990 : return todo;
2058 : }
2059 :
2060 : namespace {
2061 :
2062 : const pass_data pass_data_dce =
2063 : {
2064 : GIMPLE_PASS, /* type */
2065 : "dce", /* name */
2066 : OPTGROUP_NONE, /* optinfo_flags */
2067 : TV_TREE_DCE, /* tv_id */
2068 : ( PROP_cfg | PROP_ssa ), /* properties_required */
2069 : 0, /* properties_provided */
2070 : 0, /* properties_destroyed */
2071 : 0, /* todo_flags_start */
2072 : 0, /* todo_flags_finish */
2073 : };
2074 :
2075 : class pass_dce_base : public gimple_opt_pass
2076 : {
2077 : public:
2078 : /* opt_pass methods: */
2079 8060443 : bool gate (function *) final override { return flag_tree_dce != 0; }
2080 2880470 : void set_pass_param (unsigned n, bool param) final override
2081 : {
2082 2880470 : gcc_assert (n == 0 || n == 1);
2083 2880470 : if (n == 0)
2084 1728282 : update_address_taken_p = param;
2085 1152188 : else if (n == 1)
2086 1152188 : remove_unused_locals_p = param;
2087 2880470 : }
2088 :
2089 : protected:
2090 3168517 : pass_dce_base (const pass_data &data, gcc::context *ctxt)
2091 6337034 : : gimple_opt_pass (data, ctxt)
2092 : {}
2093 8056990 : unsigned int execute_dce (function *, bool aggressive)
2094 : {
2095 8056990 : return (perform_tree_ssa_dce (aggressive)
2096 8056990 : | (remove_unused_locals_p ? TODO_remove_unused_locals : 0)
2097 8056990 : | (update_address_taken_p ? TODO_update_address_taken : 0));
2098 : }
2099 :
2100 : private:
2101 : bool update_address_taken_p = false;
2102 : bool remove_unused_locals_p = false;
2103 : }; // class pass_dce_base
2104 :
2105 :
2106 : class pass_dce : public pass_dce_base
2107 : {
2108 : public:
2109 2304376 : pass_dce (gcc::context *ctxt)
2110 4608752 : : pass_dce_base (pass_data_dce, ctxt)
2111 : {}
2112 :
2113 : /* opt_pass methods: */
2114 2016329 : opt_pass * clone () final override { return new pass_dce (m_ctxt); }
2115 4367028 : unsigned int execute (function *func) final override
2116 : {
2117 4367028 : return execute_dce (func, /*aggressive=*/false);
2118 : }
2119 :
2120 : }; // class pass_dce
2121 :
2122 : } // anon namespace
2123 :
2124 : gimple_opt_pass *
2125 288047 : make_pass_dce (gcc::context *ctxt)
2126 : {
2127 288047 : return new pass_dce (ctxt);
2128 : }
2129 :
2130 : namespace {
2131 :
2132 : const pass_data pass_data_cd_dce =
2133 : {
2134 : GIMPLE_PASS, /* type */
2135 : "cddce", /* name */
2136 : OPTGROUP_NONE, /* optinfo_flags */
2137 : TV_TREE_CD_DCE, /* tv_id */
2138 : ( PROP_cfg | PROP_ssa ), /* properties_required */
2139 : 0, /* properties_provided */
2140 : 0, /* properties_destroyed */
2141 : 0, /* todo_flags_start */
2142 : 0, /* todo_flags_finish */
2143 : };
2144 :
2145 : class pass_cd_dce : public pass_dce_base
2146 : {
2147 : public:
2148 864141 : pass_cd_dce (gcc::context *ctxt)
2149 1728282 : : pass_dce_base (pass_data_cd_dce, ctxt)
2150 : {}
2151 :
2152 : /* opt_pass methods: */
2153 576094 : opt_pass * clone () final override { return new pass_cd_dce (m_ctxt); }
2154 3689962 : unsigned int execute (function *func) final override
2155 : {
2156 3689962 : return execute_dce (func, /*aggressive=*/optimize >= 2);
2157 : }
2158 :
2159 : }; // class pass_cd_dce
2160 :
2161 : } // anon namespace
2162 :
2163 : gimple_opt_pass *
2164 288047 : make_pass_cd_dce (gcc::context *ctxt)
2165 : {
2166 288047 : return new pass_cd_dce (ctxt);
2167 : }
2168 :
2169 :
2170 : /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
2171 : is consumed by this function. The function has linear complexity in
2172 : the number of dead stmts with a constant factor like the average SSA
2173 : use operands number. */
2174 :
2175 : void
2176 37675300 : simple_dce_from_worklist (bitmap worklist, bitmap need_eh_cleanup)
2177 : {
2178 37675300 : int phiremoved = 0;
2179 37675300 : int stmtremoved = 0;
2180 71666195 : while (! bitmap_empty_p (worklist))
2181 : {
2182 : /* Pop item. */
2183 33990895 : unsigned i = bitmap_clear_first_set_bit (worklist);
2184 :
2185 33990895 : tree def = ssa_name (i);
2186 : /* Removed by somebody else or still in use.
2187 : Note use in itself for a phi node is not counted as still in use. */
2188 33990895 : if (!def)
2189 14092833 : continue;
2190 33827471 : if (!has_zero_uses (def))
2191 : {
2192 13891426 : gimple *def_stmt = SSA_NAME_DEF_STMT (def);
2193 :
2194 13891426 : if (gimple_code (def_stmt) != GIMPLE_PHI)
2195 13887339 : continue;
2196 :
2197 2580436 : imm_use_iterator use_iter;
2198 2580436 : use_operand_p use_p;
2199 2580436 : bool canremove = true;
2200 :
2201 5245102 : FOR_EACH_IMM_USE_FAST (use_p, use_iter, def)
2202 : {
2203 2660579 : gimple *use_stmt = USE_STMT (use_p);
2204 : /* Ignore debug statements. */
2205 2660579 : if (is_gimple_debug (use_stmt))
2206 78099 : continue;
2207 2582480 : if (use_stmt != def_stmt)
2208 : {
2209 : canremove = false;
2210 : break;
2211 : }
2212 2580436 : }
2213 2580436 : if (!canremove)
2214 2576349 : continue;
2215 : }
2216 :
2217 19940132 : gimple *t = SSA_NAME_DEF_STMT (def);
2218 19940132 : if (gimple_has_side_effects (t))
2219 : {
2220 39505 : if (gcall *call = dyn_cast <gcall *> (t))
2221 : {
2222 34943 : gimple_call_set_lhs (call, NULL_TREE);
2223 34943 : update_stmt (call);
2224 34943 : release_ssa_name (def);
2225 : }
2226 39505 : continue;
2227 39505 : }
2228 :
2229 : /* The defining statement needs to be defining only this name.
2230 : ASM is the only statement that can define more than one
2231 : name. */
2232 19900627 : if (is_a<gasm *>(t)
2233 19900627 : && !single_ssa_def_operand (t, SSA_OP_ALL_DEFS))
2234 15 : continue;
2235 :
2236 : /* Don't remove statements that are needed for non-call
2237 : eh to work. */
2238 19900612 : if (stmt_unremovable_because_of_non_call_eh_p (cfun, t))
2239 2550 : continue;
2240 :
2241 : /* Tell the caller that we removed a statement that might
2242 : throw so it could cleanup the cfg for that block. */
2243 19898062 : if (need_eh_cleanup && stmt_could_throw_p (cfun, t))
2244 138 : bitmap_set_bit (need_eh_cleanup, gimple_bb (t)->index);
2245 :
2246 : /* Add uses to the worklist. */
2247 19898062 : ssa_op_iter iter;
2248 19898062 : use_operand_p use_p;
2249 56228601 : FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
2250 : {
2251 16432477 : tree use = USE_FROM_PTR (use_p);
2252 16432477 : if (TREE_CODE (use) == SSA_NAME
2253 16432477 : && ! SSA_NAME_IS_DEFAULT_DEF (use))
2254 14836229 : bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
2255 : }
2256 :
2257 : /* Remove stmt. */
2258 19898062 : if (dump_file && (dump_flags & TDF_DETAILS))
2259 : {
2260 311 : fprintf (dump_file, "Removing dead stmt:");
2261 311 : print_gimple_stmt (dump_file, t, 0);
2262 : }
2263 19898062 : gimple_stmt_iterator gsi = gsi_for_stmt (t);
2264 19898062 : if (gimple_code (t) == GIMPLE_PHI)
2265 : {
2266 2411855 : remove_phi_node (&gsi, true);
2267 2411855 : phiremoved++;
2268 : }
2269 : else
2270 : {
2271 17486207 : unlink_stmt_vdef (t);
2272 17486207 : gsi_remove (&gsi, true);
2273 17486207 : release_defs (t);
2274 17486207 : stmtremoved++;
2275 : }
2276 : }
2277 37675300 : statistics_counter_event (cfun, "PHIs removed",
2278 : phiremoved);
2279 37675300 : statistics_counter_event (cfun, "Statements removed",
2280 : stmtremoved);
2281 37675300 : }
|