Branch data Line data Source code
1 : : /* Dead code elimination pass for the GNU compiler.
2 : : Copyright (C) 2002-2024 Free Software Foundation, Inc.
3 : : Contributed by Ben Elliston <bje@redhat.com>
4 : : and Andrew MacLeod <amacleod@redhat.com>
5 : : Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6 : :
7 : : This file is part of GCC.
8 : :
9 : : GCC is free software; you can redistribute it and/or modify it
10 : : under the terms of the GNU General Public License as published by the
11 : : Free Software Foundation; either version 3, or (at your option) any
12 : : later version.
13 : :
14 : : GCC is distributed in the hope that it will be useful, but WITHOUT
15 : : ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 : : for more details.
18 : :
19 : : You should have received a copy of the GNU General Public License
20 : : along with GCC; see the file COPYING3. If not see
21 : : <http://www.gnu.org/licenses/>. */
22 : :
23 : : /* Dead code elimination.
24 : :
25 : : References:
26 : :
27 : : Building an Optimizing Compiler,
28 : : Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29 : :
30 : : Advanced Compiler Design and Implementation,
31 : : Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32 : :
33 : : Dead-code elimination is the removal of statements which have no
34 : : impact on the program's output. "Dead statements" have no impact
35 : : on the program's output, while "necessary statements" may have
36 : : impact on the output.
37 : :
38 : : The algorithm consists of three phases:
39 : : 1. Marking as necessary all statements known to be necessary,
40 : : e.g. most function calls, writing a value to memory, etc;
41 : : 2. Propagating necessary statements, e.g., the statements
42 : : giving values to operands in necessary statements; and
43 : : 3. Removing dead statements. */
44 : :
45 : : #include "config.h"
46 : : #include "system.h"
47 : : #include "coretypes.h"
48 : : #include "backend.h"
49 : : #include "rtl.h"
50 : : #include "tree.h"
51 : : #include "gimple.h"
52 : : #include "cfghooks.h"
53 : : #include "tree-pass.h"
54 : : #include "ssa.h"
55 : : #include "gimple-pretty-print.h"
56 : : #include "fold-const.h"
57 : : #include "calls.h"
58 : : #include "cfganal.h"
59 : : #include "tree-eh.h"
60 : : #include "gimplify.h"
61 : : #include "gimple-iterator.h"
62 : : #include "tree-cfg.h"
63 : : #include "tree-ssa-loop-niter.h"
64 : : #include "tree-into-ssa.h"
65 : : #include "tree-dfa.h"
66 : : #include "cfgloop.h"
67 : : #include "tree-scalar-evolution.h"
68 : : #include "tree-ssa-propagate.h"
69 : : #include "gimple-fold.h"
70 : : #include "tree-ssa.h"
71 : :
72 : : static struct stmt_stats
73 : : {
74 : : int total;
75 : : int total_phis;
76 : : int removed;
77 : : int removed_phis;
78 : : } stats;
79 : :
80 : : #define STMT_NECESSARY GF_PLF_1
81 : :
82 : : static vec<gimple *> worklist;
83 : :
84 : : /* Vector indicating an SSA name has already been processed and marked
85 : : as necessary. */
86 : : static sbitmap processed;
87 : :
88 : : /* Vector indicating that the last statement of a basic block has already
89 : : been marked as necessary. */
90 : : static sbitmap last_stmt_necessary;
91 : :
92 : : /* Vector indicating that BB contains statements that are live. */
93 : : static sbitmap bb_contains_live_stmts;
94 : :
95 : : /* Before we can determine whether a control branch is dead, we need to
96 : : compute which blocks are control dependent on which edges.
97 : :
98 : : We expect each block to be control dependent on very few edges so we
99 : : use a bitmap for each block recording its edges. An array holds the
100 : : bitmap. The Ith bit in the bitmap is set if that block is dependent
101 : : on the Ith edge. */
102 : : static control_dependences *cd;
103 : :
104 : : /* Vector indicating that a basic block has already had all the edges
105 : : processed that it is control dependent on. */
106 : : static sbitmap visited_control_parents;
107 : :
108 : : /* TRUE if this pass alters the CFG (by removing control statements).
109 : : FALSE otherwise.
110 : :
111 : : If this pass alters the CFG, then it will arrange for the dominators
112 : : to be recomputed. */
113 : : static bool cfg_altered;
114 : :
115 : : /* When non-NULL holds map from basic block index into the postorder. */
116 : : static int *bb_postorder;
117 : :
118 : :
119 : : /* True if we should treat any stmt with a vdef as necessary. */
120 : :
121 : : static inline bool
122 : 246574466 : keep_all_vdefs_p ()
123 : : {
124 : 246574466 : return optimize_debug;
125 : : }
126 : :
127 : : /* If STMT is not already marked necessary, mark it, and add it to the
128 : : worklist if ADD_TO_WORKLIST is true. */
129 : :
130 : : static inline void
131 : 359876605 : mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
132 : : {
133 : 359876605 : gcc_assert (stmt);
134 : :
135 : 359876605 : if (gimple_plf (stmt, STMT_NECESSARY))
136 : : return;
137 : :
138 : 359876416 : if (dump_file && (dump_flags & TDF_DETAILS))
139 : : {
140 : 802 : fprintf (dump_file, "Marking useful stmt: ");
141 : 802 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
142 : 802 : fprintf (dump_file, "\n");
143 : : }
144 : :
145 : 359876416 : gimple_set_plf (stmt, STMT_NECESSARY, true);
146 : 359876416 : if (add_to_worklist)
147 : 96076268 : worklist.safe_push (stmt);
148 : 96076268 : if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
149 : 33290562 : bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
150 : : }
151 : :
152 : :
153 : : /* Mark the statement defining operand OP as necessary. */
154 : :
155 : : static inline void
156 : 299823226 : mark_operand_necessary (tree op)
157 : : {
158 : 299823226 : gimple *stmt;
159 : 299823226 : int ver;
160 : :
161 : 299823226 : gcc_assert (op);
162 : :
163 : 299823226 : ver = SSA_NAME_VERSION (op);
164 : 299823226 : if (bitmap_bit_p (processed, ver))
165 : : {
166 : 107633263 : stmt = SSA_NAME_DEF_STMT (op);
167 : 107633263 : gcc_assert (gimple_nop_p (stmt)
168 : : || gimple_plf (stmt, STMT_NECESSARY));
169 : 164102017 : return;
170 : : }
171 : 192189963 : bitmap_set_bit (processed, ver);
172 : :
173 : 192189963 : stmt = SSA_NAME_DEF_STMT (op);
174 : 192189963 : gcc_assert (stmt);
175 : :
176 : 192189963 : if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
177 : : return;
178 : :
179 : 135721209 : if (dump_file && (dump_flags & TDF_DETAILS))
180 : : {
181 : 360 : fprintf (dump_file, "marking necessary through ");
182 : 360 : print_generic_expr (dump_file, op);
183 : 360 : fprintf (dump_file, " stmt ");
184 : 360 : print_gimple_stmt (dump_file, stmt, 0);
185 : : }
186 : :
187 : 135721209 : gimple_set_plf (stmt, STMT_NECESSARY, true);
188 : 135721209 : if (bb_contains_live_stmts)
189 : 46572584 : bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
190 : 135721209 : worklist.safe_push (stmt);
191 : : }
192 : :
193 : :
194 : : /* Mark STMT as necessary if it obviously is. Add it to the worklist if
195 : : it can make other statements necessary.
196 : :
197 : : If AGGRESSIVE is false, control statements are conservatively marked as
198 : : necessary. */
199 : :
200 : : static void
201 : 481752398 : mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
202 : : {
203 : : /* Statements that are implicitly live. Most function calls, asm
204 : : and return statements are required. Labels and GIMPLE_BIND nodes
205 : : are kept because they are control flow, and we have no way of
206 : : knowing whether they can be removed. DCE can eliminate all the
207 : : other statements in a block, and CFG can then remove the block
208 : : and labels. */
209 : 481752398 : switch (gimple_code (stmt))
210 : : {
211 : 6248038 : case GIMPLE_PREDICT:
212 : 6248038 : case GIMPLE_LABEL:
213 : 6248038 : mark_stmt_necessary (stmt, false);
214 : 6248038 : return;
215 : :
216 : 9272906 : case GIMPLE_ASM:
217 : 9272906 : case GIMPLE_RESX:
218 : 9272906 : case GIMPLE_RETURN:
219 : 9272906 : mark_stmt_necessary (stmt, true);
220 : 9272906 : return;
221 : :
222 : 33900505 : case GIMPLE_CALL:
223 : 33900505 : {
224 : : /* Never elide a noreturn call we pruned control-flow for. */
225 : 33900505 : if ((gimple_call_flags (stmt) & ECF_NORETURN)
226 : 33900505 : && gimple_call_ctrl_altering_p (stmt))
227 : : {
228 : 4339497 : mark_stmt_necessary (stmt, true);
229 : 4339497 : return;
230 : : }
231 : :
232 : 29561008 : tree callee = gimple_call_fndecl (stmt);
233 : 29561008 : if (callee != NULL_TREE
234 : 29561008 : && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
235 : 6263043 : switch (DECL_FUNCTION_CODE (callee))
236 : : {
237 : : case BUILT_IN_MALLOC:
238 : : case BUILT_IN_ALIGNED_ALLOC:
239 : : case BUILT_IN_CALLOC:
240 : : CASE_BUILT_IN_ALLOCA:
241 : : case BUILT_IN_STRDUP:
242 : : case BUILT_IN_STRNDUP:
243 : : case BUILT_IN_GOMP_ALLOC:
244 : : return;
245 : :
246 : : default:;
247 : : }
248 : :
249 : 29188425 : if (callee != NULL_TREE
250 : 27367519 : && flag_allocation_dce
251 : 56554911 : && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee))
252 : : return;
253 : :
254 : : /* IFN_GOACC_LOOP calls are necessary in that they are used to
255 : : represent parameter (i.e. step, bound) of a lowered OpenACC
256 : : partitioned loop. But this kind of partitioned loop might not
257 : : survive from aggressive loop removal for it has loop exit and
258 : : is assumed to be finite. Therefore, we need to explicitly mark
259 : : these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */
260 : 28997096 : if (gimple_call_internal_p (stmt, IFN_GOACC_LOOP))
261 : : {
262 : 25522 : mark_stmt_necessary (stmt, true);
263 : 25522 : return;
264 : : }
265 : : break;
266 : : }
267 : :
268 : 257746098 : case GIMPLE_DEBUG:
269 : : /* Debug temps without a value are not useful. ??? If we could
270 : : easily locate the debug temp bind stmt for a use thereof,
271 : : would could refrain from marking all debug temps here, and
272 : : mark them only if they're used. */
273 : 257746098 : if (gimple_debug_nonbind_marker_p (stmt)
274 : 193757613 : || !gimple_debug_bind_p (stmt)
275 : 193113810 : || gimple_debug_bind_has_value_p (stmt)
276 : 342625515 : || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
277 : 257552110 : mark_stmt_necessary (stmt, false);
278 : : return;
279 : :
280 : 1871 : case GIMPLE_GOTO:
281 : 1871 : gcc_assert (!simple_goto_p (stmt));
282 : 1871 : mark_stmt_necessary (stmt, true);
283 : 1871 : return;
284 : :
285 : 26989792 : case GIMPLE_COND:
286 : 26989792 : gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
287 : : /* Fall through. */
288 : :
289 : 27133751 : case GIMPLE_SWITCH:
290 : 27133751 : if (! aggressive)
291 : 17952734 : mark_stmt_necessary (stmt, true);
292 : : break;
293 : :
294 : 147406553 : case GIMPLE_ASSIGN:
295 : : /* Mark indirect CLOBBERs to be lazily removed if their SSA operands
296 : : do not prevail. That also makes control flow leading to them
297 : : not necessary in aggressive mode. */
298 : 157133189 : if (gimple_clobber_p (stmt) && !zero_ssa_operands (stmt, SSA_OP_USE))
299 : : return;
300 : : break;
301 : :
302 : : default:
303 : : break;
304 : : }
305 : :
306 : : /* If the statement has volatile operands, it needs to be preserved.
307 : : Same for statements that can alter control flow in unpredictable
308 : : ways. */
309 : 202293944 : if (gimple_has_side_effects (stmt) || is_ctrl_altering_stmt (stmt))
310 : : {
311 : 37949874 : mark_stmt_necessary (stmt, true);
312 : 37949874 : return;
313 : : }
314 : :
315 : : /* If a statement could throw, it can be deemed necessary unless we
316 : : are allowed to remove dead EH. Test this after checking for
317 : : new/delete operators since we always elide their EH. */
318 : 164344070 : if (!cfun->can_delete_dead_exceptions
319 : 164344070 : && stmt_could_throw_p (cfun, stmt))
320 : : {
321 : 5827340 : mark_stmt_necessary (stmt, true);
322 : 5827340 : return;
323 : : }
324 : :
325 : 198180332 : if ((gimple_vdef (stmt) && keep_all_vdefs_p ())
326 : 198161213 : || stmt_may_clobber_global_p (stmt, false))
327 : : {
328 : 11554024 : mark_stmt_necessary (stmt, true);
329 : 11554024 : return;
330 : : }
331 : :
332 : : return;
333 : : }
334 : :
335 : :
336 : : /* Mark the last statement of BB as necessary. */
337 : :
338 : : static bool
339 : 38223775 : mark_last_stmt_necessary (basic_block bb)
340 : : {
341 : 38223775 : if (!bitmap_set_bit (last_stmt_necessary, bb->index))
342 : : return true;
343 : :
344 : 15034066 : bitmap_set_bit (bb_contains_live_stmts, bb->index);
345 : :
346 : : /* We actually mark the statement only if it is a control statement. */
347 : 15034066 : gimple *stmt = *gsi_last_bb (bb);
348 : 15034066 : if (stmt && is_ctrl_stmt (stmt))
349 : : {
350 : 9152689 : mark_stmt_necessary (stmt, true);
351 : 9152689 : return true;
352 : : }
353 : : return false;
354 : : }
355 : :
356 : :
357 : : /* Mark control dependent edges of BB as necessary. We have to do this only
358 : : once for each basic block so we set the appropriate bit after we're done.
359 : :
360 : : When IGNORE_SELF is true, ignore BB in the list of control dependences. */
361 : :
362 : : static void
363 : 28170203 : mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
364 : : {
365 : 28170203 : bitmap_iterator bi;
366 : 28170203 : unsigned edge_number;
367 : 28170203 : bool skipped = false;
368 : :
369 : 28170203 : gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
370 : :
371 : 28170203 : if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
372 : 3185375 : return;
373 : :
374 : 61952094 : EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
375 : : 0, edge_number, bi)
376 : : {
377 : 36967266 : basic_block cd_bb = cd->get_edge_src (edge_number);
378 : :
379 : 36967266 : if (ignore_self && cd_bb == bb)
380 : : {
381 : 61696 : skipped = true;
382 : 61696 : continue;
383 : : }
384 : :
385 : 36905570 : if (!mark_last_stmt_necessary (cd_bb))
386 : 5879229 : mark_control_dependent_edges_necessary (cd_bb, false);
387 : : }
388 : :
389 : 24984828 : if (!skipped)
390 : 24923132 : bitmap_set_bit (visited_control_parents, bb->index);
391 : : }
392 : :
393 : :
394 : : /* Find obviously necessary statements. These are things like most function
395 : : calls, and stores to file level variables.
396 : :
397 : : If EL is NULL, control statements are conservatively marked as
398 : : necessary. Otherwise it contains the list of edges used by control
399 : : dependence analysis. */
400 : :
401 : : static void
402 : 7461431 : find_obviously_necessary_stmts (bool aggressive)
403 : : {
404 : 7461431 : basic_block bb;
405 : 7461431 : gimple_stmt_iterator gsi;
406 : 7461431 : edge e;
407 : 7461431 : gimple *phi, *stmt;
408 : 7461431 : int flags;
409 : :
410 : 80313429 : FOR_EACH_BB_FN (bb, cfun)
411 : : {
412 : : /* PHI nodes are never inherently necessary. */
413 : 104748211 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
414 : : {
415 : 31896213 : phi = gsi_stmt (gsi);
416 : 31896213 : gimple_set_plf (phi, STMT_NECESSARY, false);
417 : : }
418 : :
419 : : /* Check all statements in the block. */
420 : 627456394 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
421 : : {
422 : 481752398 : stmt = gsi_stmt (gsi);
423 : 481752398 : gimple_set_plf (stmt, STMT_NECESSARY, false);
424 : 481752398 : mark_stmt_if_obviously_necessary (stmt, aggressive);
425 : : }
426 : : }
427 : :
428 : : /* Pure and const functions are finite and thus have no infinite loops in
429 : : them. */
430 : 7461431 : flags = flags_from_decl_or_type (current_function_decl);
431 : 7461431 : if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
432 : 868763 : return;
433 : :
434 : : /* Prevent the empty possibly infinite loops from being removed. This is
435 : : needed to make the logic in remove_dead_stmt work to identify the
436 : : correct edge to keep when removing a controlling condition. */
437 : 6592668 : if (aggressive)
438 : : {
439 : 3014329 : if (mark_irreducible_loops ())
440 : 198172 : FOR_EACH_BB_FN (bb, cfun)
441 : : {
442 : 193458 : edge_iterator ei;
443 : 484607 : FOR_EACH_EDGE (e, ei, bb->succs)
444 : 291149 : if ((e->flags & EDGE_DFS_BACK)
445 : 291149 : && (e->flags & EDGE_IRREDUCIBLE_LOOP))
446 : : {
447 : 8182 : if (dump_file)
448 : 0 : fprintf (dump_file, "Marking back edge of irreducible "
449 : 0 : "loop %i->%i\n", e->src->index, e->dest->index);
450 : 8182 : mark_control_dependent_edges_necessary (e->dest, false);
451 : : }
452 : : }
453 : :
454 : 10595689 : for (auto loop : loops_list (cfun, 0))
455 : : /* For loops without an exit do not mark any condition. */
456 : 1552702 : if (loop->exits->next->e && !finite_loop_p (loop))
457 : : {
458 : 264895 : if (dump_file)
459 : 1 : fprintf (dump_file, "cannot prove finiteness of loop %i\n",
460 : : loop->num);
461 : 264895 : mark_control_dependent_edges_necessary (loop->latch, false);
462 : 3014329 : }
463 : : }
464 : : }
465 : :
466 : :
467 : : /* Return true if REF is based on an aliased base, otherwise false. */
468 : :
469 : : static bool
470 : 89138356 : ref_may_be_aliased (tree ref)
471 : : {
472 : 89138356 : if (TREE_CODE (ref) == WITH_SIZE_EXPR)
473 : 0 : ref = TREE_OPERAND (ref, 0);
474 : 162311555 : while (handled_component_p (ref))
475 : 73173199 : ref = TREE_OPERAND (ref, 0);
476 : 89138356 : if ((TREE_CODE (ref) == MEM_REF || TREE_CODE (ref) == TARGET_MEM_REF)
477 : 89138356 : && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
478 : 12662853 : ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
479 : 89138356 : return !(DECL_P (ref)
480 : 60169196 : && !may_be_aliased (ref));
481 : : }
482 : :
483 : : static bitmap visited = NULL;
484 : : static unsigned int longest_chain = 0;
485 : : static unsigned int total_chain = 0;
486 : : static unsigned int nr_walks = 0;
487 : : static bool chain_ovfl = false;
488 : :
489 : : /* Worker for the walker that marks reaching definitions of REF,
490 : : which is based on a non-aliased decl, necessary. It returns
491 : : true whenever the defining statement of the current VDEF is
492 : : a kill for REF, as no dominating may-defs are necessary for REF
493 : : anymore. DATA points to the basic-block that contains the
494 : : stmt that refers to REF. */
495 : :
496 : : static bool
497 : 36261937 : mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
498 : : {
499 : 36261937 : gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
500 : :
501 : : /* All stmts we visit are necessary. */
502 : 36261937 : if (! gimple_clobber_p (def_stmt))
503 : 35995146 : mark_operand_necessary (vdef);
504 : :
505 : : /* If the stmt lhs kills ref, then we can stop walking. */
506 : 36261937 : if (gimple_has_lhs (def_stmt)
507 : 26536943 : && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
508 : : /* The assignment is not necessarily carried out if it can throw
509 : : and we can catch it in the current function where we could inspect
510 : : the previous value.
511 : : ??? We only need to care about the RHS throwing. For aggregate
512 : : assignments or similar calls and non-call exceptions the LHS
513 : : might throw as well. */
514 : 34503659 : && !stmt_can_throw_internal (cfun, def_stmt))
515 : : {
516 : 22641985 : tree base, lhs = gimple_get_lhs (def_stmt);
517 : 22641985 : poly_int64 size, offset, max_size;
518 : 22641985 : bool reverse;
519 : 22641985 : ao_ref_base (ref);
520 : 22641985 : base
521 : 22641985 : = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
522 : : /* We can get MEM[symbol: sZ, index: D.8862_1] here,
523 : : so base == refd->base does not always hold. */
524 : 22641985 : if (base == ref->base)
525 : : {
526 : : /* For a must-alias check we need to be able to constrain
527 : : the accesses properly. */
528 : 20665708 : if (known_eq (size, max_size)
529 : 20665708 : && known_subrange_p (ref->offset, ref->max_size, offset, size))
530 : 8135301 : return true;
531 : : /* Or they need to be exactly the same. */
532 : 12531321 : else if (ref->ref
533 : : /* Make sure there is no induction variable involved
534 : : in the references (gcc.c-torture/execute/pr42142.c).
535 : : The simplest way is to check if the kill dominates
536 : : the use. */
537 : : /* But when both are in the same block we cannot
538 : : easily tell whether we came from a backedge
539 : : unless we decide to compute stmt UIDs
540 : : (see PR58246). */
541 : 12531321 : && (basic_block) data != gimple_bb (def_stmt)
542 : 5400621 : && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
543 : 5400621 : gimple_bb (def_stmt))
544 : 15482005 : && operand_equal_p (ref->ref, lhs, 0))
545 : : return true;
546 : : }
547 : : }
548 : :
549 : : /* Otherwise keep walking. */
550 : : return false;
551 : : }
552 : :
553 : : static void
554 : 12982741 : mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
555 : : {
556 : : /* Should have been caught before calling this function. */
557 : 12982741 : gcc_checking_assert (!keep_all_vdefs_p ());
558 : :
559 : 12982741 : unsigned int chain;
560 : 12982741 : ao_ref refd;
561 : 12982741 : gcc_assert (!chain_ovfl);
562 : 12982741 : ao_ref_init (&refd, ref);
563 : 25965482 : chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
564 : : mark_aliased_reaching_defs_necessary_1,
565 : 12982741 : gimple_bb (stmt), NULL);
566 : 12982741 : if (chain > longest_chain)
567 : 2007393 : longest_chain = chain;
568 : 12982741 : total_chain += chain;
569 : 12982741 : nr_walks++;
570 : 12982741 : }
571 : :
572 : : /* Worker for the walker that marks reaching definitions of REF, which
573 : : is not based on a non-aliased decl. For simplicity we need to end
574 : : up marking all may-defs necessary that are not based on a non-aliased
575 : : decl. The only job of this walker is to skip may-defs based on
576 : : a non-aliased decl. */
577 : :
578 : : static bool
579 : 69084262 : mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
580 : : tree vdef, void *data ATTRIBUTE_UNUSED)
581 : : {
582 : 69084262 : gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
583 : :
584 : : /* We have to skip already visited (and thus necessary) statements
585 : : to make the chaining work after we dropped back to simple mode. */
586 : 69084262 : if (chain_ovfl
587 : 69084262 : && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
588 : : {
589 : 2555930 : gcc_assert (gimple_nop_p (def_stmt)
590 : : || gimple_plf (def_stmt, STMT_NECESSARY));
591 : : return false;
592 : : }
593 : :
594 : : /* We want to skip stores to non-aliased variables. */
595 : 66528332 : if (!chain_ovfl
596 : 66528332 : && gimple_assign_single_p (def_stmt))
597 : : {
598 : 44355725 : tree lhs = gimple_assign_lhs (def_stmt);
599 : 44355725 : if (!ref_may_be_aliased (lhs))
600 : : return false;
601 : : }
602 : :
603 : : /* We want to skip statments that do not constitute stores but have
604 : : a virtual definition. */
605 : 54640074 : if (gcall *call = dyn_cast <gcall *> (def_stmt))
606 : : {
607 : 21234139 : tree callee = gimple_call_fndecl (call);
608 : 21234139 : if (callee != NULL_TREE
609 : 21234139 : && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
610 : 3420301 : switch (DECL_FUNCTION_CODE (callee))
611 : : {
612 : : case BUILT_IN_MALLOC:
613 : : case BUILT_IN_ALIGNED_ALLOC:
614 : : case BUILT_IN_CALLOC:
615 : : CASE_BUILT_IN_ALLOCA:
616 : : case BUILT_IN_FREE:
617 : : case BUILT_IN_GOMP_ALLOC:
618 : : case BUILT_IN_GOMP_FREE:
619 : : return false;
620 : :
621 : : default:;
622 : : }
623 : :
624 : 20695728 : if (callee != NULL_TREE
625 : 19653850 : && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
626 : 19477825 : || DECL_IS_OPERATOR_DELETE_P (callee))
627 : 21353043 : && gimple_call_from_new_or_delete (call))
628 : : return false;
629 : : }
630 : :
631 : 54012862 : if (! gimple_clobber_p (def_stmt))
632 : 48768768 : mark_operand_necessary (vdef);
633 : :
634 : : return false;
635 : : }
636 : :
637 : : static void
638 : 65490238 : mark_all_reaching_defs_necessary (gimple *stmt)
639 : : {
640 : : /* Should have been caught before calling this function. */
641 : 65490238 : gcc_checking_assert (!keep_all_vdefs_p ());
642 : 130980476 : walk_aliased_vdefs (NULL, gimple_vuse (stmt),
643 : : mark_all_reaching_defs_necessary_1, NULL, &visited);
644 : 65490238 : }
645 : :
646 : : /* Return true for PHI nodes with one or identical arguments
647 : : can be removed. */
648 : : static bool
649 : 17493463 : degenerate_phi_p (gimple *phi)
650 : : {
651 : 17493463 : unsigned int i;
652 : 17493463 : tree op = gimple_phi_arg_def (phi, 0);
653 : 18449407 : for (i = 1; i < gimple_phi_num_args (phi); i++)
654 : 15917720 : if (gimple_phi_arg_def (phi, i) != op)
655 : : return false;
656 : : return true;
657 : : }
658 : :
659 : : /* Return that NEW_CALL and DELETE_CALL are a valid pair of new
660 : : and delete operators. */
661 : :
662 : : static bool
663 : 20559 : valid_new_delete_pair_p (gimple *new_call, gimple *delete_call)
664 : : {
665 : 20559 : tree new_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call));
666 : 20559 : tree delete_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call));
667 : 20559 : return valid_new_delete_pair_p (new_asm, delete_asm);
668 : : }
669 : :
670 : : /* Propagate necessity using the operands of necessary statements.
671 : : Process the uses on each statement in the worklist, and add all
672 : : feeding statements which contribute to the calculation of this
673 : : value to the worklist.
674 : :
675 : : In conservative mode, EL is NULL. */
676 : :
677 : : static void
678 : 7461431 : propagate_necessity (bool aggressive)
679 : : {
680 : 7461431 : gimple *stmt;
681 : :
682 : 7461431 : if (dump_file && (dump_flags & TDF_DETAILS))
683 : 213 : fprintf (dump_file, "\nProcessing worklist:\n");
684 : :
685 : 239258908 : while (worklist.length () > 0)
686 : : {
687 : : /* Take STMT from worklist. */
688 : 231797477 : stmt = worklist.pop ();
689 : :
690 : 231797477 : if (dump_file && (dump_flags & TDF_DETAILS))
691 : : {
692 : 1157 : fprintf (dump_file, "processing: ");
693 : 1157 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
694 : 1157 : fprintf (dump_file, "\n");
695 : : }
696 : :
697 : 231797477 : if (aggressive)
698 : : {
699 : : /* Mark the last statement of the basic blocks on which the block
700 : : containing STMT is control dependent, but only if we haven't
701 : : already done so. */
702 : 79863146 : basic_block bb = gimple_bb (stmt);
703 : 79863146 : if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
704 : 79863146 : && !bitmap_bit_p (visited_control_parents, bb->index))
705 : 16491727 : mark_control_dependent_edges_necessary (bb, false);
706 : : }
707 : :
708 : 231797477 : if (gimple_code (stmt) == GIMPLE_PHI
709 : : /* We do not process virtual PHI nodes nor do we track their
710 : : necessity. */
711 : 265189993 : && !virtual_operand_p (gimple_phi_result (stmt)))
712 : : {
713 : : /* PHI nodes are somewhat special in that each PHI alternative has
714 : : data and control dependencies. All the statements feeding the
715 : : PHI node's arguments are always necessary. In aggressive mode,
716 : : we also consider the control dependent edges leading to the
717 : : predecessor block associated with each PHI alternative as
718 : : necessary. */
719 : 16696258 : gphi *phi = as_a <gphi *> (stmt);
720 : 16696258 : size_t k;
721 : :
722 : 54818059 : for (k = 0; k < gimple_phi_num_args (stmt); k++)
723 : : {
724 : 38121801 : tree arg = PHI_ARG_DEF (stmt, k);
725 : 38121801 : if (TREE_CODE (arg) == SSA_NAME)
726 : 28120483 : mark_operand_necessary (arg);
727 : : }
728 : :
729 : : /* For PHI operands it matters from where the control flow arrives
730 : : to the BB. Consider the following example:
731 : :
732 : : a=exp1;
733 : : b=exp2;
734 : : if (test)
735 : : ;
736 : : else
737 : : ;
738 : : c=PHI(a,b)
739 : :
740 : : We need to mark control dependence of the empty basic blocks, since they
741 : : contains computation of PHI operands.
742 : :
743 : : Doing so is too restrictive in the case the predecestor block is in
744 : : the loop. Consider:
745 : :
746 : : if (b)
747 : : {
748 : : int i;
749 : : for (i = 0; i<1000; ++i)
750 : : ;
751 : : j = 0;
752 : : }
753 : : return j;
754 : :
755 : : There is PHI for J in the BB containing return statement.
756 : : In this case the control dependence of predecestor block (that is
757 : : within the empty loop) also contains the block determining number
758 : : of iterations of the block that would prevent removing of empty
759 : : loop in this case.
760 : :
761 : : This scenario can be avoided by splitting critical edges.
762 : : To save the critical edge splitting pass we identify how the control
763 : : dependence would look like if the edge was split.
764 : :
765 : : Consider the modified CFG created from current CFG by splitting
766 : : edge B->C. In the postdominance tree of modified CFG, C' is
767 : : always child of C. There are two cases how chlids of C' can look
768 : : like:
769 : :
770 : : 1) C' is leaf
771 : :
772 : : In this case the only basic block C' is control dependent on is B.
773 : :
774 : : 2) C' has single child that is B
775 : :
776 : : In this case control dependence of C' is same as control
777 : : dependence of B in original CFG except for block B itself.
778 : : (since C' postdominate B in modified CFG)
779 : :
780 : : Now how to decide what case happens? There are two basic options:
781 : :
782 : : a) C postdominate B. Then C immediately postdominate B and
783 : : case 2 happens iff there is no other way from B to C except
784 : : the edge B->C.
785 : :
786 : : There is other way from B to C iff there is succesor of B that
787 : : is not postdominated by B. Testing this condition is somewhat
788 : : expensive, because we need to iterate all succesors of B.
789 : : We are safe to assume that this does not happen: we will mark B
790 : : as needed when processing the other path from B to C that is
791 : : conrol dependent on B and marking control dependencies of B
792 : : itself is harmless because they will be processed anyway after
793 : : processing control statement in B.
794 : :
795 : : b) C does not postdominate B. Always case 1 happens since there is
796 : : path from C to exit that does not go through B and thus also C'. */
797 : :
798 : 28154160 : if (aggressive && !degenerate_phi_p (stmt))
799 : : {
800 : 17451452 : for (k = 0; k < gimple_phi_num_args (stmt); k++)
801 : : {
802 : 12213096 : basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
803 : :
804 : 12213096 : if (gimple_bb (stmt)
805 : 12213096 : != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
806 : : {
807 : 1318205 : if (!mark_last_stmt_necessary (arg_bb))
808 : 2148 : mark_control_dependent_edges_necessary (arg_bb, false);
809 : : }
810 : 10894891 : else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
811 : 10894891 : && !bitmap_bit_p (visited_control_parents,
812 : : arg_bb->index))
813 : 5524022 : mark_control_dependent_edges_necessary (arg_bb, true);
814 : : }
815 : : }
816 : : }
817 : : else
818 : : {
819 : : /* Propagate through the operands. Examine all the USE, VUSE and
820 : : VDEF operands in this statement. Mark all the statements
821 : : which feed this statement's uses as necessary. */
822 : 215101219 : ssa_op_iter iter;
823 : 215101219 : tree use;
824 : :
825 : : /* If this is a call to free which is directly fed by an
826 : : allocation function do not mark that necessary through
827 : : processing the argument. */
828 : 215101219 : bool is_delete_operator
829 : 215101219 : = (is_gimple_call (stmt)
830 : 33876534 : && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
831 : 215269134 : && gimple_call_operator_delete_p (as_a <gcall *> (stmt)));
832 : 214994489 : if (is_delete_operator
833 : 214994489 : || gimple_call_builtin_p (stmt, BUILT_IN_FREE)
834 : 214750777 : || gimple_call_builtin_p (stmt, BUILT_IN_GOMP_FREE))
835 : : {
836 : 350442 : tree ptr = gimple_call_arg (stmt, 0);
837 : 350442 : gcall *def_stmt;
838 : 350442 : tree def_callee;
839 : : /* If the pointer we free is defined by an allocation
840 : : function do not add the call to the worklist. */
841 : 350442 : if (TREE_CODE (ptr) == SSA_NAME
842 : 349888 : && (def_stmt = dyn_cast <gcall *> (SSA_NAME_DEF_STMT (ptr)))
843 : 135991 : && (def_callee = gimple_call_fndecl (def_stmt))
844 : 486327 : && ((DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
845 : 102746 : && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
846 : 102740 : || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
847 : 3433 : || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC
848 : 2800 : || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_GOMP_ALLOC))
849 : 35880 : || (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (def_callee)
850 : 20510 : && gimple_call_from_new_or_delete (def_stmt))))
851 : : {
852 : 120497 : if (is_delete_operator
853 : 120497 : && !valid_new_delete_pair_p (def_stmt, stmt))
854 : 136 : mark_operand_necessary (gimple_call_arg (stmt, 0));
855 : :
856 : : /* Delete operators can have alignment and (or) size
857 : : as next arguments. When being a SSA_NAME, they
858 : : must be marked as necessary. Similarly GOMP_free. */
859 : 120497 : if (gimple_call_num_args (stmt) >= 2)
860 : 25776 : for (unsigned i = 1; i < gimple_call_num_args (stmt);
861 : : i++)
862 : : {
863 : 12888 : tree arg = gimple_call_arg (stmt, i);
864 : 12888 : if (TREE_CODE (arg) == SSA_NAME)
865 : 64 : mark_operand_necessary (arg);
866 : : }
867 : :
868 : 91344154 : continue;
869 : 120497 : }
870 : : }
871 : :
872 : 401919351 : FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
873 : 186938629 : mark_operand_necessary (use);
874 : :
875 : 214980722 : use = gimple_vuse (stmt);
876 : 186475767 : if (!use)
877 : 86542837 : continue;
878 : :
879 : : /* No need to search for vdefs if we intrinsicly keep them all. */
880 : 128437885 : if (keep_all_vdefs_p ())
881 : 69762 : continue;
882 : :
883 : : /* If we dropped to simple mode make all immediately
884 : : reachable definitions necessary. */
885 : 128368123 : if (chain_ovfl)
886 : : {
887 : 3742609 : mark_all_reaching_defs_necessary (stmt);
888 : 3742609 : continue;
889 : : }
890 : :
891 : : /* For statements that may load from memory (have a VUSE) we
892 : : have to mark all reaching (may-)definitions as necessary.
893 : : We partition this task into two cases:
894 : : 1) explicit loads based on decls that are not aliased
895 : : 2) implicit loads (like calls) and explicit loads not
896 : : based on decls that are not aliased (like indirect
897 : : references or loads from globals)
898 : : For 1) we mark all reaching may-defs as necessary, stopping
899 : : at dominating kills. For 2) we want to mark all dominating
900 : : references necessary, but non-aliased ones which we handle
901 : : in 1). By keeping a global visited bitmap for references
902 : : we walk for 2) we avoid quadratic behavior for those. */
903 : :
904 : 124625514 : if (gcall *call = dyn_cast <gcall *> (stmt))
905 : : {
906 : 31045003 : tree callee = gimple_call_fndecl (call);
907 : 31045003 : unsigned i;
908 : :
909 : : /* Calls to functions that are merely acting as barriers
910 : : or that only store to memory do not make any previous
911 : : stores necessary. */
912 : 31767401 : if (callee != NULL_TREE
913 : 29727016 : && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
914 : 37781841 : && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
915 : : || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
916 : : || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
917 : : || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
918 : : || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
919 : : || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
920 : : || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
921 : : || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
922 : : || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
923 : : || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
924 : : || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
925 : 722398 : continue;
926 : :
927 : 30468656 : if (callee != NULL_TREE
928 : 29004618 : && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
929 : 28813728 : || DECL_IS_OPERATOR_DELETE_P (callee))
930 : 31197081 : && gimple_call_from_new_or_delete (call))
931 : 146051 : continue;
932 : :
933 : : /* Calls implicitly load from memory, their arguments
934 : : in addition may explicitly perform memory loads. */
935 : 30176554 : mark_all_reaching_defs_necessary (call);
936 : 92358163 : for (i = 0; i < gimple_call_num_args (call); ++i)
937 : : {
938 : 62181609 : tree arg = gimple_call_arg (call, i);
939 : 120523149 : if (TREE_CODE (arg) == SSA_NAME
940 : 62181609 : || is_gimple_min_invariant (arg))
941 : 58341540 : continue;
942 : 3840069 : if (TREE_CODE (arg) == WITH_SIZE_EXPR)
943 : 660 : arg = TREE_OPERAND (arg, 0);
944 : 3840069 : if (!ref_may_be_aliased (arg))
945 : 3478214 : mark_aliased_reaching_defs_necessary (call, arg);
946 : : }
947 : : }
948 : 93580511 : else if (gimple_assign_single_p (stmt))
949 : : {
950 : 86113850 : tree rhs;
951 : : /* If this is a load mark things necessary. */
952 : 86113850 : rhs = gimple_assign_rhs1 (stmt);
953 : 86113850 : if (TREE_CODE (rhs) != SSA_NAME
954 : 69985573 : && !is_gimple_min_invariant (rhs)
955 : 135328213 : && TREE_CODE (rhs) != CONSTRUCTOR)
956 : : {
957 : 40268372 : if (!ref_may_be_aliased (rhs))
958 : 8880617 : mark_aliased_reaching_defs_necessary (stmt, rhs);
959 : : else
960 : 31387755 : mark_all_reaching_defs_necessary (stmt);
961 : : }
962 : : }
963 : 7466661 : else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
964 : : {
965 : 7297784 : tree rhs = gimple_return_retval (return_stmt);
966 : : /* A return statement may perform a load. */
967 : 7297784 : if (rhs
968 : 4044002 : && TREE_CODE (rhs) != SSA_NAME
969 : 1309115 : && !is_gimple_min_invariant (rhs)
970 : 7930634 : && TREE_CODE (rhs) != CONSTRUCTOR)
971 : : {
972 : 632850 : if (!ref_may_be_aliased (rhs))
973 : 618407 : mark_aliased_reaching_defs_necessary (stmt, rhs);
974 : : else
975 : 14443 : mark_all_reaching_defs_necessary (stmt);
976 : : }
977 : : }
978 : 168877 : else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
979 : : {
980 : 167557 : unsigned i;
981 : 167557 : mark_all_reaching_defs_necessary (stmt);
982 : : /* Inputs may perform loads. */
983 : 287354 : for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
984 : : {
985 : 119797 : tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
986 : 119797 : if (TREE_CODE (op) != SSA_NAME
987 : 78624 : && !is_gimple_min_invariant (op)
988 : 41340 : && TREE_CODE (op) != CONSTRUCTOR
989 : 161137 : && !ref_may_be_aliased (op))
990 : 5503 : mark_aliased_reaching_defs_necessary (stmt, op);
991 : : }
992 : : }
993 : 1320 : else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
994 : : {
995 : : /* The beginning of a transaction is a memory barrier. */
996 : : /* ??? If we were really cool, we'd only be a barrier
997 : : for the memories touched within the transaction. */
998 : 1320 : mark_all_reaching_defs_necessary (stmt);
999 : : }
1000 : : else
1001 : 0 : gcc_unreachable ();
1002 : :
1003 : : /* If we over-used our alias oracle budget drop to simple
1004 : : mode. The cost metric allows quadratic behavior
1005 : : (number of uses times number of may-defs queries) up to
1006 : : a constant maximal number of queries and after that falls back to
1007 : : super-linear complexity. */
1008 : 123757065 : if (/* Constant but quadratic for small functions. */
1009 : 123757065 : total_chain > 128 * 128
1010 : : /* Linear in the number of may-defs. */
1011 : 1104586 : && total_chain > 32 * longest_chain
1012 : : /* Linear in the number of uses. */
1013 : 4720 : && total_chain > nr_walks * 32)
1014 : : {
1015 : 4559 : chain_ovfl = true;
1016 : 4559 : if (visited)
1017 : 4559 : bitmap_clear (visited);
1018 : : }
1019 : : }
1020 : : }
1021 : 7461431 : }
1022 : :
1023 : : /* Remove dead PHI nodes from block BB. */
1024 : :
1025 : : static bool
1026 : 72852033 : remove_dead_phis (basic_block bb)
1027 : : {
1028 : 72852033 : bool something_changed = false;
1029 : 72852033 : gphi *phi;
1030 : 72852033 : gphi_iterator gsi;
1031 : :
1032 : 104748246 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
1033 : : {
1034 : 31896213 : stats.total_phis++;
1035 : 31896213 : phi = gsi.phi ();
1036 : :
1037 : : /* We do not track necessity of virtual PHI nodes. Instead do
1038 : : very simple dead PHI removal here. */
1039 : 63792426 : if (virtual_operand_p (gimple_phi_result (phi)))
1040 : : {
1041 : : /* Virtual PHI nodes with one or identical arguments
1042 : : can be removed. */
1043 : 14951029 : if (!loops_state_satisfies_p (LOOP_CLOSED_SSA)
1044 : 14951029 : && degenerate_phi_p (phi))
1045 : : {
1046 : 1718852 : tree vdef = gimple_phi_result (phi);
1047 : 1718852 : tree vuse = gimple_phi_arg_def (phi, 0);
1048 : :
1049 : 1718852 : use_operand_p use_p;
1050 : 1718852 : imm_use_iterator iter;
1051 : 1718852 : gimple *use_stmt;
1052 : 4398828 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1053 : 8134154 : FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1054 : 4445941 : SET_USE (use_p, vuse);
1055 : 1718852 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1056 : 1718852 : && TREE_CODE (vuse) == SSA_NAME)
1057 : 461 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1058 : : }
1059 : : else
1060 : 13232177 : gimple_set_plf (phi, STMT_NECESSARY, true);
1061 : : }
1062 : :
1063 : 31896213 : if (!gimple_plf (phi, STMT_NECESSARY))
1064 : : {
1065 : 1967778 : something_changed = true;
1066 : 1967778 : if (dump_file && (dump_flags & TDF_DETAILS))
1067 : : {
1068 : 17 : fprintf (dump_file, "Deleting : ");
1069 : 17 : print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1070 : 17 : fprintf (dump_file, "\n");
1071 : : }
1072 : :
1073 : 1967778 : remove_phi_node (&gsi, true);
1074 : 1967778 : stats.removed_phis++;
1075 : 1967778 : continue;
1076 : : }
1077 : :
1078 : 29928435 : gsi_next (&gsi);
1079 : : }
1080 : 72852033 : return something_changed;
1081 : : }
1082 : :
1083 : :
1084 : : /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1085 : : containing I so that we don't have to look it up. */
1086 : :
1087 : : static void
1088 : 5442982 : remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb,
1089 : : vec<edge> &to_remove_edges)
1090 : : {
1091 : 5442982 : gimple *stmt = gsi_stmt (*i);
1092 : :
1093 : 5442982 : if (dump_file && (dump_flags & TDF_DETAILS))
1094 : : {
1095 : 121 : fprintf (dump_file, "Deleting : ");
1096 : 121 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1097 : 121 : fprintf (dump_file, "\n");
1098 : : }
1099 : :
1100 : 5442982 : stats.removed++;
1101 : :
1102 : : /* If we have determined that a conditional branch statement contributes
1103 : : nothing to the program, then we not only remove it, but we need to update
1104 : : the CFG. We can chose any of edges out of BB as long as we are sure to not
1105 : : close infinite loops. This is done by always choosing the edge closer to
1106 : : exit in inverted_rev_post_order_compute order. */
1107 : 5442982 : if (is_ctrl_stmt (stmt))
1108 : : {
1109 : 28517 : edge_iterator ei;
1110 : 28517 : edge e = NULL, e2;
1111 : :
1112 : : /* See if there is only one non-abnormal edge. */
1113 : 28517 : if (single_succ_p (bb))
1114 : 4 : e = single_succ_edge (bb);
1115 : : /* Otherwise chose one that is closer to bb with live statement in it.
1116 : : To be able to chose one, we compute inverted post order starting from
1117 : : all BBs with live statements. */
1118 : 4 : if (!e)
1119 : : {
1120 : 28513 : if (!bb_postorder)
1121 : : {
1122 : 16770 : int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
1123 : 16770 : int n = inverted_rev_post_order_compute (cfun, rpo,
1124 : : &bb_contains_live_stmts);
1125 : 16770 : bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1126 : 631084 : for (int i = 0; i < n; ++i)
1127 : 614314 : bb_postorder[rpo[i]] = i;
1128 : 16770 : free (rpo);
1129 : : }
1130 : 85555 : FOR_EACH_EDGE (e2, ei, bb->succs)
1131 : 57042 : if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1132 : 28529 : || bb_postorder [e->dest->index]
1133 : 28529 : >= bb_postorder [e2->dest->index])
1134 : 45531 : e = e2;
1135 : : }
1136 : 28513 : gcc_assert (e);
1137 : 28517 : e->probability = profile_probability::always ();
1138 : :
1139 : : /* The edge is no longer associated with a conditional, so it does
1140 : : not have TRUE/FALSE flags.
1141 : : We are also safe to drop EH/ABNORMAL flags and turn them into
1142 : : normal control flow, because we know that all the destinations (including
1143 : : those odd edges) are equivalent for program execution. */
1144 : 28517 : e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1145 : :
1146 : : /* The lone outgoing edge from BB will be a fallthru edge. */
1147 : 28517 : e->flags |= EDGE_FALLTHRU;
1148 : :
1149 : : /* Remove the remaining outgoing edges. */
1150 : 85563 : FOR_EACH_EDGE (e2, ei, bb->succs)
1151 : 57046 : if (e != e2)
1152 : : {
1153 : : /* If we made a BB unconditionally exit a loop or removed
1154 : : an entry into an irreducible region, then this transform
1155 : : alters the set of BBs in the loop. Schedule a fixup. */
1156 : 28529 : if (loop_exit_edge_p (bb->loop_father, e)
1157 : 28529 : || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1158 : 16462 : loops_state_set (LOOPS_NEED_FIXUP);
1159 : 28529 : to_remove_edges.safe_push (e2);
1160 : : }
1161 : : }
1162 : :
1163 : : /* If this is a store into a variable that is being optimized away,
1164 : : add a debug bind stmt if possible. */
1165 : 5442982 : if (MAY_HAVE_DEBUG_BIND_STMTS
1166 : 4855894 : && gimple_assign_single_p (stmt)
1167 : 5971592 : && is_gimple_val (gimple_assign_rhs1 (stmt)))
1168 : : {
1169 : 379480 : tree lhs = gimple_assign_lhs (stmt);
1170 : 288951 : if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1171 : 90948 : && !DECL_IGNORED_P (lhs)
1172 : 55318 : && is_gimple_reg_type (TREE_TYPE (lhs))
1173 : 131 : && !is_global_var (lhs)
1174 : 379611 : && !DECL_HAS_VALUE_EXPR_P (lhs))
1175 : : {
1176 : 131 : tree rhs = gimple_assign_rhs1 (stmt);
1177 : 131 : gdebug *note
1178 : 131 : = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1179 : 131 : gsi_insert_after (i, note, GSI_SAME_STMT);
1180 : : }
1181 : : }
1182 : :
1183 : 5442982 : unlink_stmt_vdef (stmt);
1184 : 5442982 : gsi_remove (i, true);
1185 : 5442982 : release_defs (stmt);
1186 : 5442982 : }
1187 : :
1188 : : /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1189 : : uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1190 : :
1191 : : static tree
1192 : 40 : find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1193 : : {
1194 : 40 : if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1195 : 0 : *walk_subtrees = 0;
1196 : 40 : if (*tp == (tree) data)
1197 : 20 : return *tp;
1198 : : return NULL_TREE;
1199 : : }
1200 : :
1201 : : /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1202 : : but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1203 : : into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1204 : : uses. */
1205 : :
1206 : : static void
1207 : 285401 : maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1208 : : enum tree_code subcode)
1209 : : {
1210 : 285401 : gimple *stmt = gsi_stmt (*gsi);
1211 : 285401 : tree lhs = gimple_call_lhs (stmt);
1212 : :
1213 : 285401 : if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1214 : 285257 : return;
1215 : :
1216 : 285401 : imm_use_iterator imm_iter;
1217 : 285401 : use_operand_p use_p;
1218 : 285401 : bool has_debug_uses = false;
1219 : 285401 : bool has_realpart_uses = false;
1220 : 285401 : bool has_other_uses = false;
1221 : 291604 : FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1222 : : {
1223 : 291460 : gimple *use_stmt = USE_STMT (use_p);
1224 : 291460 : if (is_gimple_debug (use_stmt))
1225 : : has_debug_uses = true;
1226 : 289597 : else if (is_gimple_assign (use_stmt)
1227 : 289491 : && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1228 : 293937 : && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1229 : : has_realpart_uses = true;
1230 : : else
1231 : : {
1232 : : has_other_uses = true;
1233 : : break;
1234 : : }
1235 : : }
1236 : :
1237 : 285401 : if (!has_realpart_uses || has_other_uses)
1238 : : return;
1239 : :
1240 : 144 : tree arg0 = gimple_call_arg (stmt, 0);
1241 : 144 : tree arg1 = gimple_call_arg (stmt, 1);
1242 : 144 : location_t loc = gimple_location (stmt);
1243 : 144 : tree type = TREE_TYPE (TREE_TYPE (lhs));
1244 : 144 : tree utype = unsigned_type_for (type);
1245 : 144 : tree result = fold_build2_loc (loc, subcode, utype,
1246 : : fold_convert_loc (loc, utype, arg0),
1247 : : fold_convert_loc (loc, utype, arg1));
1248 : 144 : result = fold_convert_loc (loc, type, result);
1249 : :
1250 : 144 : if (has_debug_uses)
1251 : : {
1252 : 20 : gimple *use_stmt;
1253 : 60 : FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1254 : : {
1255 : 40 : if (!gimple_debug_bind_p (use_stmt))
1256 : 20 : continue;
1257 : 20 : tree v = gimple_debug_bind_get_value (use_stmt);
1258 : 20 : if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1259 : : {
1260 : 20 : gimple_debug_bind_reset_value (use_stmt);
1261 : 20 : update_stmt (use_stmt);
1262 : : }
1263 : 20 : }
1264 : : }
1265 : :
1266 : 144 : if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1267 : 0 : result = drop_tree_overflow (result);
1268 : 144 : tree overflow = build_zero_cst (type);
1269 : 144 : tree ctype = build_complex_type (type);
1270 : 144 : if (TREE_CODE (result) == INTEGER_CST)
1271 : 0 : result = build_complex (ctype, result, overflow);
1272 : : else
1273 : 144 : result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1274 : : ctype, result, overflow);
1275 : :
1276 : 144 : if (dump_file && (dump_flags & TDF_DETAILS))
1277 : : {
1278 : 0 : fprintf (dump_file, "Transforming call: ");
1279 : 0 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1280 : 0 : fprintf (dump_file, "because the overflow result is never used into: ");
1281 : 0 : print_generic_stmt (dump_file, result, TDF_SLIM);
1282 : 0 : fprintf (dump_file, "\n");
1283 : : }
1284 : :
1285 : 144 : gimplify_and_update_call_from_tree (gsi, result);
1286 : : }
1287 : :
1288 : : /* Returns whether the control parents of BB are preserved. */
1289 : :
1290 : : static bool
1291 : 600962 : control_parents_preserved_p (basic_block bb)
1292 : : {
1293 : : /* If we marked the control parents from BB they are preserved. */
1294 : 600962 : if (bitmap_bit_p (visited_control_parents, bb->index))
1295 : : return true;
1296 : :
1297 : : /* But they can also end up being marked from elsewhere. */
1298 : 5651 : bitmap_iterator bi;
1299 : 5651 : unsigned edge_number;
1300 : 8903 : EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
1301 : : 0, edge_number, bi)
1302 : : {
1303 : 5731 : basic_block cd_bb = cd->get_edge_src (edge_number);
1304 : 5731 : if (cd_bb != bb
1305 : 5731 : && !bitmap_bit_p (last_stmt_necessary, cd_bb->index))
1306 : : return false;
1307 : : }
1308 : : /* And cache the result. */
1309 : 3172 : bitmap_set_bit (visited_control_parents, bb->index);
1310 : 3172 : return true;
1311 : : }
1312 : :
1313 : : /* Eliminate unnecessary statements. Any instruction not marked as necessary
1314 : : contributes nothing to the program, and can be deleted. */
1315 : :
1316 : : static bool
1317 : 7461431 : eliminate_unnecessary_stmts (bool aggressive)
1318 : : {
1319 : 7461431 : bool something_changed = false;
1320 : 7461431 : basic_block bb;
1321 : 7461431 : gimple_stmt_iterator gsi, psi;
1322 : 7461431 : gimple *stmt;
1323 : 7461431 : tree call;
1324 : 7461431 : auto_vec<edge> to_remove_edges;
1325 : :
1326 : 7461431 : if (dump_file && (dump_flags & TDF_DETAILS))
1327 : 213 : fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1328 : :
1329 : 7461431 : bool had_setjmp = cfun->calls_setjmp;
1330 : 7461431 : clear_special_calls ();
1331 : :
1332 : : /* Walking basic blocks and statements in reverse order avoids
1333 : : releasing SSA names before any other DEFs that refer to them are
1334 : : released. This helps avoid loss of debug information, as we get
1335 : : a chance to propagate all RHSs of removed SSAs into debug uses,
1336 : : rather than only the latest ones. E.g., consider:
1337 : :
1338 : : x_3 = y_1 + z_2;
1339 : : a_5 = x_3 - b_4;
1340 : : # DEBUG a => a_5
1341 : :
1342 : : If we were to release x_3 before a_5, when we reached a_5 and
1343 : : tried to substitute it into the debug stmt, we'd see x_3 there,
1344 : : but x_3's DEF, type, etc would have already been disconnected.
1345 : : By going backwards, the debug stmt first changes to:
1346 : :
1347 : : # DEBUG a => x_3 - b_4
1348 : :
1349 : : and then to:
1350 : :
1351 : : # DEBUG a => y_1 + z_2 - b_4
1352 : :
1353 : : as desired. */
1354 : 7461431 : gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1355 : 7461431 : auto_vec<basic_block> h;
1356 : 7461431 : h = get_all_dominated_blocks (CDI_DOMINATORS,
1357 : 14922862 : single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1358 : :
1359 : 80313464 : while (h.length ())
1360 : : {
1361 : 72852033 : bb = h.pop ();
1362 : :
1363 : : /* Remove dead statements. */
1364 : 72852033 : auto_bitmap debug_seen;
1365 : 627456464 : for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1366 : : {
1367 : 481752398 : stmt = gsi_stmt (gsi);
1368 : :
1369 : 481752398 : psi = gsi;
1370 : 481752398 : gsi_prev (&psi);
1371 : :
1372 : 481752398 : stats.total++;
1373 : :
1374 : : /* We can mark a call to free as not necessary if the
1375 : : defining statement of its argument is not necessary
1376 : : (and thus is getting removed). */
1377 : 481752398 : if (gimple_plf (stmt, STMT_NECESSARY)
1378 : 481752398 : && (gimple_call_builtin_p (stmt, BUILT_IN_FREE)
1379 : 478657655 : || (is_gimple_call (stmt)
1380 : 33632822 : && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
1381 : 167915 : && gimple_call_operator_delete_p (as_a <gcall *> (stmt)))))
1382 : : {
1383 : 350442 : tree ptr = gimple_call_arg (stmt, 0);
1384 : 350442 : if (TREE_CODE (ptr) == SSA_NAME)
1385 : : {
1386 : 349888 : gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1387 : 349888 : if (!gimple_nop_p (def_stmt)
1388 : 349888 : && !gimple_plf (def_stmt, STMT_NECESSARY))
1389 : 2268 : gimple_set_plf (stmt, STMT_NECESSARY, false);
1390 : : }
1391 : : }
1392 : :
1393 : : /* If GSI is not necessary then remove it. */
1394 : 481752398 : if (!gimple_plf (stmt, STMT_NECESSARY))
1395 : : {
1396 : : /* Keep clobbers that we can keep live live. */
1397 : 2853299 : if (gimple_clobber_p (stmt))
1398 : : {
1399 : 1260610 : ssa_op_iter iter;
1400 : 1260610 : use_operand_p use_p;
1401 : 1260610 : bool dead = false;
1402 : 2519191 : FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1403 : : {
1404 : 1260610 : tree name = USE_FROM_PTR (use_p);
1405 : 1260610 : if (!SSA_NAME_IS_DEFAULT_DEF (name)
1406 : 1260610 : && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1407 : : {
1408 : : dead = true;
1409 : : break;
1410 : : }
1411 : : }
1412 : 2516712 : if (!dead
1413 : : /* When doing CD-DCE we have to ensure all controls
1414 : : of the stmt are still live. */
1415 : 1260610 : && (!aggressive || control_parents_preserved_p (bb)))
1416 : : {
1417 : 1256102 : bitmap_clear (debug_seen);
1418 : 1256102 : continue;
1419 : : }
1420 : : }
1421 : 1597197 : if (!is_gimple_debug (stmt))
1422 : 1403209 : something_changed = true;
1423 : 1597197 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1424 : 1597197 : continue;
1425 : 1597197 : }
1426 : 478899099 : else if (is_gimple_call (stmt))
1427 : : {
1428 : 33874266 : tree name = gimple_call_lhs (stmt);
1429 : :
1430 : 33874266 : notice_special_calls (as_a <gcall *> (stmt));
1431 : :
1432 : : /* When LHS of var = call (); is dead, simplify it into
1433 : : call (); saving one operand. */
1434 : 33874266 : if (name
1435 : 13539954 : && TREE_CODE (name) == SSA_NAME
1436 : 11287547 : && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1437 : : /* Avoid doing so for allocation calls which we
1438 : : did not mark as necessary, it will confuse the
1439 : : special logic we apply to malloc/free pair removal. */
1440 : 33959270 : && (!(call = gimple_call_fndecl (stmt))
1441 : 74956 : || ((DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1442 : 14227 : || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1443 : : && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1444 : : && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1445 : : && !ALLOCA_FUNCTION_CODE_P
1446 : : (DECL_FUNCTION_CODE (call))))
1447 : 73729 : && !DECL_IS_REPLACEABLE_OPERATOR_NEW_P (call))))
1448 : : {
1449 : 83571 : something_changed = true;
1450 : 83571 : if (dump_file && (dump_flags & TDF_DETAILS))
1451 : : {
1452 : 0 : fprintf (dump_file, "Deleting LHS of call: ");
1453 : 0 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1454 : 0 : fprintf (dump_file, "\n");
1455 : : }
1456 : :
1457 : 83571 : gimple_call_set_lhs (stmt, NULL_TREE);
1458 : 83571 : maybe_clean_or_replace_eh_stmt (stmt, stmt);
1459 : 83571 : update_stmt (stmt);
1460 : 83571 : release_ssa_name (name);
1461 : :
1462 : : /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON
1463 : : without lhs is not needed. */
1464 : 83571 : if (gimple_call_internal_p (stmt))
1465 : 7654 : switch (gimple_call_internal_fn (stmt))
1466 : : {
1467 : 761 : case IFN_GOMP_SIMD_LANE:
1468 : 761 : if (gimple_call_num_args (stmt) >= 3
1469 : 761 : && !integer_nonzerop (gimple_call_arg (stmt, 2)))
1470 : : break;
1471 : : /* FALLTHRU */
1472 : 825 : case IFN_ASAN_POISON:
1473 : 825 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1474 : 825 : break;
1475 : : default:
1476 : : break;
1477 : : }
1478 : : }
1479 : 33790695 : else if (gimple_call_internal_p (stmt))
1480 : 779374 : switch (gimple_call_internal_fn (stmt))
1481 : : {
1482 : 88654 : case IFN_ADD_OVERFLOW:
1483 : 88654 : maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1484 : 88654 : break;
1485 : 95174 : case IFN_SUB_OVERFLOW:
1486 : 95174 : maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1487 : 95174 : break;
1488 : 97224 : case IFN_MUL_OVERFLOW:
1489 : 97224 : maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1490 : 97224 : break;
1491 : 23986 : case IFN_UADDC:
1492 : 23986 : if (integer_zerop (gimple_call_arg (stmt, 2)))
1493 : 2386 : maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1494 : : break;
1495 : 13628 : case IFN_USUBC:
1496 : 13628 : if (integer_zerop (gimple_call_arg (stmt, 2)))
1497 : 1963 : maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1498 : : break;
1499 : : default:
1500 : : break;
1501 : : }
1502 : : }
1503 : 445024833 : else if (gimple_debug_bind_p (stmt))
1504 : : {
1505 : : /* We are only keeping the last debug-bind of a
1506 : : non-DEBUG_EXPR_DECL variable in a series of
1507 : : debug-bind stmts. */
1508 : 192919822 : tree var = gimple_debug_bind_get_var (stmt);
1509 : 192919822 : if (TREE_CODE (var) != DEBUG_EXPR_DECL
1510 : 192919822 : && !bitmap_set_bit (debug_seen, DECL_UID (var)))
1511 : 3844960 : remove_dead_stmt (&gsi, bb, to_remove_edges);
1512 : 192919822 : continue;
1513 : 192919822 : }
1514 : 285979277 : bitmap_clear (debug_seen);
1515 : : }
1516 : :
1517 : : /* Remove dead PHI nodes. */
1518 : 72852033 : something_changed |= remove_dead_phis (bb);
1519 : 72852033 : }
1520 : :
1521 : : /* First remove queued edges. */
1522 : 7461431 : if (!to_remove_edges.is_empty ())
1523 : : {
1524 : : /* Remove edges. We've delayed this to not get bogus debug stmts
1525 : : during PHI node removal. */
1526 : 45299 : for (unsigned i = 0; i < to_remove_edges.length (); ++i)
1527 : 28529 : remove_edge (to_remove_edges[i]);
1528 : 16770 : cfg_altered = true;
1529 : : }
1530 : : /* When we cleared calls_setjmp we can purge all abnormal edges. Do so.
1531 : : ??? We'd like to assert that setjmp calls do not pop out of nothing
1532 : : but we currently lack a per-stmt way of noting whether a call was
1533 : : recognized as returns-twice (or rather receives-control). */
1534 : 7461431 : if (!cfun->calls_setjmp && had_setjmp)
1535 : : {
1536 : : /* Make sure we only remove the edges, not dominated blocks. Using
1537 : : gimple_purge_dead_abnormal_call_edges would do that and we
1538 : : cannot free dominators yet. */
1539 : 3654 : FOR_EACH_BB_FN (bb, cfun)
1540 : 9384 : if (gcall *stmt = safe_dyn_cast <gcall *> (*gsi_last_bb (bb)))
1541 : 2071 : if (!stmt_can_make_abnormal_goto (stmt))
1542 : : {
1543 : 725 : edge_iterator ei;
1544 : 725 : edge e;
1545 : 966 : for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1546 : : {
1547 : 241 : if (e->flags & EDGE_ABNORMAL)
1548 : : {
1549 : 100 : if (e->flags & EDGE_FALLTHRU)
1550 : 0 : e->flags &= ~EDGE_ABNORMAL;
1551 : : else
1552 : 100 : remove_edge (e);
1553 : 100 : cfg_altered = true;
1554 : : }
1555 : : else
1556 : 141 : ei_next (&ei);
1557 : : }
1558 : : }
1559 : : }
1560 : :
1561 : : /* Now remove the unreachable blocks. */
1562 : 7461431 : if (cfg_altered)
1563 : : {
1564 : 16805 : basic_block prev_bb;
1565 : :
1566 : 16805 : find_unreachable_blocks ();
1567 : :
1568 : : /* Delete all unreachable basic blocks in reverse dominator order. */
1569 : 16805 : for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1570 : 596638 : bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1571 : : {
1572 : 579833 : prev_bb = bb->prev_bb;
1573 : :
1574 : 579833 : if ((bb_contains_live_stmts
1575 : 579787 : && !bitmap_bit_p (bb_contains_live_stmts, bb->index))
1576 : 1011690 : || !(bb->flags & BB_REACHABLE))
1577 : : {
1578 : : /* Since we don't track liveness of virtual PHI nodes, it is
1579 : : possible that we rendered some PHI nodes unreachable while
1580 : : they are still in use. Mark them for renaming. */
1581 : 166434 : for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1582 : 18463 : gsi_next (&gsi))
1583 : 55387 : if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1584 : : {
1585 : 18461 : bool found = false;
1586 : 18461 : imm_use_iterator iter;
1587 : :
1588 : 18648 : FOR_EACH_IMM_USE_STMT (stmt, iter,
1589 : : gimple_phi_result (gsi.phi ()))
1590 : : {
1591 : 18072 : if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1592 : 162 : continue;
1593 : 17910 : if (gimple_code (stmt) == GIMPLE_PHI
1594 : 17910 : || gimple_plf (stmt, STMT_NECESSARY))
1595 : : {
1596 : : found = true;
1597 : : break;
1598 : : }
1599 : 18461 : }
1600 : 18461 : if (found)
1601 : 17885 : mark_virtual_phi_result_for_renaming (gsi.phi ());
1602 : : }
1603 : :
1604 : 147971 : if (!(bb->flags & BB_REACHABLE))
1605 : : {
1606 : : /* Speed up the removal of blocks that don't
1607 : : dominate others. Walking backwards, this should
1608 : : be the common case. ??? Do we need to recompute
1609 : : dominators because of cfg_altered? */
1610 : 30381 : if (!first_dom_son (CDI_DOMINATORS, bb))
1611 : 29908 : delete_basic_block (bb);
1612 : : else
1613 : : {
1614 : 473 : h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1615 : :
1616 : 2059 : while (h.length ())
1617 : : {
1618 : 1586 : bb = h.pop ();
1619 : 1586 : prev_bb = bb->prev_bb;
1620 : : /* Rearrangements to the CFG may have failed
1621 : : to update the dominators tree, so that
1622 : : formerly-dominated blocks are now
1623 : : otherwise reachable. */
1624 : 1586 : if (!!(bb->flags & BB_REACHABLE))
1625 : 0 : continue;
1626 : 1586 : delete_basic_block (bb);
1627 : : }
1628 : :
1629 : 473 : h.release ();
1630 : : }
1631 : : }
1632 : : }
1633 : : }
1634 : : }
1635 : :
1636 : 7461431 : if (bb_postorder)
1637 : 16770 : free (bb_postorder);
1638 : 7461431 : bb_postorder = NULL;
1639 : :
1640 : 7461431 : return something_changed;
1641 : 7461431 : }
1642 : :
1643 : :
1644 : : /* Print out removed statement statistics. */
1645 : :
1646 : : static void
1647 : 219 : print_stats (void)
1648 : : {
1649 : 219 : float percg;
1650 : :
1651 : 219 : percg = ((float) stats.removed / (float) stats.total) * 100;
1652 : 219 : fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1653 : : stats.removed, stats.total, (int) percg);
1654 : :
1655 : 219 : if (stats.total_phis == 0)
1656 : : percg = 0;
1657 : : else
1658 : 27 : percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1659 : :
1660 : 219 : fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1661 : : stats.removed_phis, stats.total_phis, (int) percg);
1662 : 219 : }
1663 : :
1664 : : /* Initialization for this pass. Set up the used data structures. */
1665 : :
1666 : : static void
1667 : 7461431 : tree_dce_init (bool aggressive)
1668 : : {
1669 : 7461431 : memset ((void *) &stats, 0, sizeof (stats));
1670 : :
1671 : 7461431 : if (aggressive)
1672 : : {
1673 : 3186762 : last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1674 : 3186762 : bitmap_clear (last_stmt_necessary);
1675 : 3186762 : bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1676 : 3186762 : bitmap_clear (bb_contains_live_stmts);
1677 : : }
1678 : :
1679 : 14922862 : processed = sbitmap_alloc (num_ssa_names + 1);
1680 : 7461431 : bitmap_clear (processed);
1681 : :
1682 : 7461431 : worklist.create (64);
1683 : 7461431 : cfg_altered = false;
1684 : 7461431 : }
1685 : :
1686 : : /* Cleanup after this pass. */
1687 : :
1688 : : static void
1689 : 7461431 : tree_dce_done (bool aggressive)
1690 : : {
1691 : 7461431 : if (aggressive)
1692 : : {
1693 : 3186762 : delete cd;
1694 : 3186762 : sbitmap_free (visited_control_parents);
1695 : 3186762 : sbitmap_free (last_stmt_necessary);
1696 : 3186762 : sbitmap_free (bb_contains_live_stmts);
1697 : 3186762 : bb_contains_live_stmts = NULL;
1698 : : }
1699 : :
1700 : 7461431 : sbitmap_free (processed);
1701 : :
1702 : 7461431 : worklist.release ();
1703 : 7461431 : }
1704 : :
1705 : : /* Sort PHI argument values for make_forwarders_with_degenerate_phis. */
1706 : :
1707 : : static int
1708 : 26119500 : sort_phi_args (const void *a_, const void *b_)
1709 : : {
1710 : 26119500 : auto *a = (const std::pair<edge, hashval_t> *) a_;
1711 : 26119500 : auto *b = (const std::pair<edge, hashval_t> *) b_;
1712 : 26119500 : hashval_t ha = a->second;
1713 : 26119500 : hashval_t hb = b->second;
1714 : 26119500 : if (ha < hb)
1715 : : return -1;
1716 : 16108634 : else if (ha > hb)
1717 : : return 1;
1718 : 7592282 : else if (a->first->dest_idx < b->first->dest_idx)
1719 : : return -1;
1720 : 3978789 : else if (a->first->dest_idx > b->first->dest_idx)
1721 : : return 1;
1722 : : else
1723 : 0 : return 0;
1724 : : }
1725 : :
1726 : : /* Look for a non-virtual PHIs and make a forwarder block when all PHIs
1727 : : have the same argument on a set of edges. This is to not consider
1728 : : control dependences of individual edges for same values but only for
1729 : : the common set. */
1730 : :
1731 : : static unsigned
1732 : 3186762 : make_forwarders_with_degenerate_phis (function *fn)
1733 : : {
1734 : 3186762 : unsigned todo = 0;
1735 : :
1736 : 3186762 : basic_block bb;
1737 : 29717495 : FOR_EACH_BB_FN (bb, fn)
1738 : : {
1739 : : /* Only PHIs with three or more arguments have opportunities. */
1740 : 26530733 : if (EDGE_COUNT (bb->preds) < 3)
1741 : 26260726 : continue;
1742 : : /* Do not touch loop headers or blocks with abnormal predecessors.
1743 : : ??? This is to avoid creating valid loops here, see PR103458.
1744 : : We might want to improve things to either explicitely add those
1745 : : loops or at least consider blocks with no backedges. */
1746 : 1115958 : if (bb->loop_father->header == bb
1747 : 1114247 : || bb_has_abnormal_pred (bb))
1748 : 1711 : continue;
1749 : :
1750 : : /* Take one PHI node as template to look for identical
1751 : : arguments. Build a vector of candidates forming sets
1752 : : of argument edges with equal values. Note optimality
1753 : : depends on the particular choice of the template PHI
1754 : : since equal arguments are unordered leaving other PHIs
1755 : : with more than one set of equal arguments within this
1756 : : argument range unsorted. We'd have to break ties by
1757 : : looking at other PHI nodes. */
1758 : 1112536 : gphi_iterator gsi = gsi_start_nonvirtual_phis (bb);
1759 : 1112536 : if (gsi_end_p (gsi))
1760 : 678459 : continue;
1761 : 434077 : gphi *phi = gsi.phi ();
1762 : 434077 : auto_vec<std::pair<edge, hashval_t>, 8> args;
1763 : 434077 : bool need_resort = false;
1764 : 2556970 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
1765 : : {
1766 : 2122893 : edge e = gimple_phi_arg_edge (phi, i);
1767 : : /* Skip abnormal edges since we cannot redirect them. */
1768 : 2122893 : if (e->flags & EDGE_ABNORMAL)
1769 : 2122893 : continue;
1770 : : /* Skip loop exit edges when we are in loop-closed SSA form
1771 : : since the forwarder we'd create does not have a PHI node. */
1772 : 2122893 : if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
1773 : 2122893 : && loop_exit_edge_p (e->src->loop_father, e))
1774 : 17349 : continue;
1775 : :
1776 : 2105544 : tree arg = gimple_phi_arg_def (phi, i);
1777 : 2105544 : if (!CONSTANT_CLASS_P (arg) && TREE_CODE (arg) != SSA_NAME)
1778 : 2105544 : need_resort = true;
1779 : 2105544 : args.safe_push (std::make_pair (e, iterative_hash_expr (arg, 0)));
1780 : : }
1781 : 434077 : if (args.length () < 2)
1782 : 3916 : continue;
1783 : 430161 : args.qsort (sort_phi_args);
1784 : : /* The above sorting can be different between -g and -g0, as e.g. decls
1785 : : can have different uids (-g could have bigger gaps in between them).
1786 : : So, only use that to determine which args are equal, then change
1787 : : second from hash value to smallest dest_idx of the edges which have
1788 : : equal argument and sort again. If all the phi arguments are
1789 : : constants or SSA_NAME, there is no need for the second sort, the hash
1790 : : values are stable in that case. */
1791 : 430161 : hashval_t hash = args[0].second;
1792 : 430161 : args[0].second = args[0].first->dest_idx;
1793 : 430161 : bool any_equal = false;
1794 : 4206660 : for (unsigned i = 1; i < args.length (); ++i)
1795 : 1673169 : if (hash == args[i].second
1796 : 2322731 : && operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, args[i - 1].first),
1797 : 649562 : PHI_ARG_DEF_FROM_EDGE (phi, args[i].first)))
1798 : : {
1799 : 649113 : args[i].second = args[i - 1].second;
1800 : 649113 : any_equal = true;
1801 : : }
1802 : : else
1803 : : {
1804 : 1024056 : hash = args[i].second;
1805 : 1024056 : args[i].second = args[i].first->dest_idx;
1806 : : }
1807 : 430161 : if (!any_equal)
1808 : 160154 : continue;
1809 : 270007 : if (need_resort)
1810 : 12568 : args.qsort (sort_phi_args);
1811 : :
1812 : : /* From the candidates vector now verify true candidates for
1813 : : forwarders and create them. */
1814 : 270007 : gphi *vphi = get_virtual_phi (bb);
1815 : 270007 : unsigned start = 0;
1816 : 2199883 : while (start < args.length () - 1)
1817 : : {
1818 : 699654 : unsigned i;
1819 : 2051877 : for (i = start + 1; i < args.length (); ++i)
1820 : 1895731 : if (args[start].second != args[i].second)
1821 : : break;
1822 : : /* args[start]..args[i-1] are equal. */
1823 : 699654 : if (start != i - 1)
1824 : : {
1825 : : /* Check all PHI nodes for argument equality. */
1826 : 399961 : bool equal = true;
1827 : 399961 : gphi_iterator gsi2 = gsi;
1828 : 399961 : gsi_next (&gsi2);
1829 : 862685 : for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
1830 : : {
1831 : 608567 : gphi *phi2 = gsi2.phi ();
1832 : 1217134 : if (virtual_operand_p (gimple_phi_result (phi2)))
1833 : 154547 : continue;
1834 : 454020 : tree start_arg
1835 : 454020 : = PHI_ARG_DEF_FROM_EDGE (phi2, args[start].first);
1836 : 2201113 : for (unsigned j = start + 1; j < i; ++j)
1837 : : {
1838 : 3848822 : if (!operand_equal_p (start_arg,
1839 : 1924411 : PHI_ARG_DEF_FROM_EDGE
1840 : : (phi2, args[j].first)))
1841 : : {
1842 : : /* Another PHI might have a shorter set of
1843 : : equivalent args. Go for that. */
1844 : 177318 : i = j;
1845 : 177318 : if (j == start + 1)
1846 : 145843 : equal = false;
1847 : : break;
1848 : : }
1849 : : }
1850 : 454020 : if (!equal)
1851 : : break;
1852 : : }
1853 : 399961 : if (equal)
1854 : : {
1855 : : /* If we are asked to forward all edges the block
1856 : : has all degenerate PHIs. Do nothing in that case. */
1857 : 254118 : if (start == 0
1858 : 123756 : && i == args.length ()
1859 : 259045 : && args.length () == gimple_phi_num_args (phi))
1860 : : break;
1861 : : /* Instead of using make_forwarder_block we are
1862 : : rolling our own variant knowing that the forwarder
1863 : : does not need PHI nodes apart from eventually
1864 : : a virtual one. */
1865 : 249395 : auto_vec<tree, 8> vphi_args;
1866 : 249395 : if (vphi)
1867 : : {
1868 : 164778 : vphi_args.reserve_exact (i - start);
1869 : 641174 : for (unsigned j = start; j < i; ++j)
1870 : 476396 : vphi_args.quick_push
1871 : 476396 : (PHI_ARG_DEF_FROM_EDGE (vphi, args[j].first));
1872 : : }
1873 : 249395 : free_dominance_info (fn, CDI_DOMINATORS);
1874 : 249395 : basic_block forwarder = split_edge (args[start].first);
1875 : 249395 : profile_count count = profile_count::zero ();
1876 : 733996 : for (unsigned j = start + 1; j < i; ++j)
1877 : : {
1878 : 484601 : edge e = args[j].first;
1879 : 484601 : redirect_edge_and_branch_force (e, forwarder);
1880 : 484601 : redirect_edge_var_map_clear (e);
1881 : 484601 : count += e->count ();
1882 : : }
1883 : 249395 : forwarder->count = count;
1884 : 249395 : if (vphi)
1885 : : {
1886 : 164778 : tree def = copy_ssa_name (vphi_args[0]);
1887 : 164778 : gphi *vphi_copy = create_phi_node (def, forwarder);
1888 : 641174 : for (unsigned j = start; j < i; ++j)
1889 : 952792 : add_phi_arg (vphi_copy, vphi_args[j - start],
1890 : 476396 : args[j].first, UNKNOWN_LOCATION);
1891 : 164778 : SET_PHI_ARG_DEF
1892 : : (vphi, single_succ_edge (forwarder)->dest_idx, def);
1893 : : }
1894 : 249395 : todo |= TODO_cleanup_cfg;
1895 : 249395 : }
1896 : : }
1897 : : /* Continue searching for more opportunities. */
1898 : : start = i;
1899 : : }
1900 : 434077 : }
1901 : 3186762 : return todo;
1902 : : }
1903 : :
1904 : : /* Main routine to eliminate dead code.
1905 : :
1906 : : AGGRESSIVE controls the aggressiveness of the algorithm.
1907 : : In conservative mode, we ignore control dependence and simply declare
1908 : : all but the most trivially dead branches necessary. This mode is fast.
1909 : : In aggressive mode, control dependences are taken into account, which
1910 : : results in more dead code elimination, but at the cost of some time.
1911 : :
1912 : : FIXME: Aggressive mode before PRE doesn't work currently because
1913 : : the dominance info is not invalidated after DCE1. This is
1914 : : not an issue right now because we only run aggressive DCE
1915 : : as the last tree SSA pass, but keep this in mind when you
1916 : : start experimenting with pass ordering. */
1917 : :
1918 : : static unsigned int
1919 : 7461431 : perform_tree_ssa_dce (bool aggressive)
1920 : : {
1921 : 7461431 : bool something_changed = 0;
1922 : 7461431 : unsigned todo = 0;
1923 : :
1924 : : /* Preheaders are needed for SCEV to work.
1925 : : Simple lateches and recorded exits improve chances that loop will
1926 : : proved to be finite in testcases such as in loop-15.c and loop-24.c */
1927 : 7461431 : bool in_loop_pipeline = scev_initialized_p ();
1928 : 7461431 : if (aggressive && ! in_loop_pipeline)
1929 : : {
1930 : 2983467 : loop_optimizer_init (LOOPS_NORMAL
1931 : : | LOOPS_HAVE_RECORDED_EXITS);
1932 : 2983467 : scev_initialize ();
1933 : : }
1934 : :
1935 : 7461431 : if (aggressive)
1936 : 3186762 : todo |= make_forwarders_with_degenerate_phis (cfun);
1937 : :
1938 : 7461431 : calculate_dominance_info (CDI_DOMINATORS);
1939 : :
1940 : 7461431 : tree_dce_init (aggressive);
1941 : :
1942 : 7461431 : if (aggressive)
1943 : : {
1944 : : /* Compute control dependence. */
1945 : 3186762 : calculate_dominance_info (CDI_POST_DOMINATORS);
1946 : 3186762 : cd = new control_dependences ();
1947 : :
1948 : 6373524 : visited_control_parents =
1949 : 3186762 : sbitmap_alloc (last_basic_block_for_fn (cfun));
1950 : 3186762 : bitmap_clear (visited_control_parents);
1951 : :
1952 : 3186762 : mark_dfs_back_edges ();
1953 : : }
1954 : :
1955 : 7461431 : find_obviously_necessary_stmts (aggressive);
1956 : :
1957 : 7461431 : if (aggressive && ! in_loop_pipeline)
1958 : : {
1959 : 2983467 : scev_finalize ();
1960 : 2983467 : loop_optimizer_finalize ();
1961 : : }
1962 : :
1963 : 7461431 : longest_chain = 0;
1964 : 7461431 : total_chain = 0;
1965 : 7461431 : nr_walks = 0;
1966 : 7461431 : chain_ovfl = false;
1967 : 7461431 : visited = BITMAP_ALLOC (NULL);
1968 : 7461431 : propagate_necessity (aggressive);
1969 : 7461431 : BITMAP_FREE (visited);
1970 : :
1971 : 7461431 : something_changed |= eliminate_unnecessary_stmts (aggressive);
1972 : 7461431 : something_changed |= cfg_altered;
1973 : :
1974 : : /* We do not update postdominators, so free them unconditionally. */
1975 : 7461431 : free_dominance_info (CDI_POST_DOMINATORS);
1976 : :
1977 : : /* If we removed paths in the CFG, then we need to update
1978 : : dominators as well. I haven't investigated the possibility
1979 : : of incrementally updating dominators. */
1980 : 7461431 : if (cfg_altered)
1981 : 16805 : free_dominance_info (CDI_DOMINATORS);
1982 : :
1983 : 7461431 : statistics_counter_event (cfun, "Statements deleted", stats.removed);
1984 : 7461431 : statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1985 : :
1986 : : /* Debugging dumps. */
1987 : 7461431 : if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1988 : 219 : print_stats ();
1989 : :
1990 : 7461431 : tree_dce_done (aggressive);
1991 : :
1992 : 7461431 : if (something_changed)
1993 : : {
1994 : 770662 : free_numbers_of_iterations_estimates (cfun);
1995 : 770662 : if (in_loop_pipeline)
1996 : 52044 : scev_reset ();
1997 : 770662 : todo |= TODO_update_ssa | TODO_cleanup_cfg;
1998 : : }
1999 : 7461431 : return todo;
2000 : : }
2001 : :
2002 : : /* Pass entry points. */
2003 : : static unsigned int
2004 : 4093303 : tree_ssa_dce (void)
2005 : : {
2006 : 0 : return perform_tree_ssa_dce (/*aggressive=*/false);
2007 : : }
2008 : :
2009 : : static unsigned int
2010 : 3368128 : tree_ssa_cd_dce (void)
2011 : : {
2012 : 0 : return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
2013 : : }
2014 : :
2015 : : namespace {
2016 : :
2017 : : const pass_data pass_data_dce =
2018 : : {
2019 : : GIMPLE_PASS, /* type */
2020 : : "dce", /* name */
2021 : : OPTGROUP_NONE, /* optinfo_flags */
2022 : : TV_TREE_DCE, /* tv_id */
2023 : : ( PROP_cfg | PROP_ssa ), /* properties_required */
2024 : : 0, /* properties_provided */
2025 : : 0, /* properties_destroyed */
2026 : : 0, /* todo_flags_start */
2027 : : 0, /* todo_flags_finish */
2028 : : };
2029 : :
2030 : : class pass_dce : public gimple_opt_pass
2031 : : {
2032 : : public:
2033 : 2284936 : pass_dce (gcc::context *ctxt)
2034 : 4569872 : : gimple_opt_pass (pass_data_dce, ctxt), update_address_taken_p (false)
2035 : : {}
2036 : :
2037 : : /* opt_pass methods: */
2038 : 1999319 : opt_pass * clone () final override { return new pass_dce (m_ctxt); }
2039 : 285617 : void set_pass_param (unsigned n, bool param) final override
2040 : : {
2041 : 285617 : gcc_assert (n == 0);
2042 : 285617 : update_address_taken_p = param;
2043 : 285617 : }
2044 : 4095155 : bool gate (function *) final override { return flag_tree_dce != 0; }
2045 : 4093303 : unsigned int execute (function *) final override
2046 : : {
2047 : 4093303 : return (tree_ssa_dce ()
2048 : 4093303 : | (update_address_taken_p ? TODO_update_address_taken : 0));
2049 : : }
2050 : :
2051 : : private:
2052 : : bool update_address_taken_p;
2053 : : }; // class pass_dce
2054 : :
2055 : : } // anon namespace
2056 : :
2057 : : gimple_opt_pass *
2058 : 285617 : make_pass_dce (gcc::context *ctxt)
2059 : : {
2060 : 285617 : return new pass_dce (ctxt);
2061 : : }
2062 : :
2063 : : namespace {
2064 : :
2065 : : const pass_data pass_data_cd_dce =
2066 : : {
2067 : : GIMPLE_PASS, /* type */
2068 : : "cddce", /* name */
2069 : : OPTGROUP_NONE, /* optinfo_flags */
2070 : : TV_TREE_CD_DCE, /* tv_id */
2071 : : ( PROP_cfg | PROP_ssa ), /* properties_required */
2072 : : 0, /* properties_provided */
2073 : : 0, /* properties_destroyed */
2074 : : 0, /* todo_flags_start */
2075 : : 0, /* todo_flags_finish */
2076 : : };
2077 : :
2078 : : class pass_cd_dce : public gimple_opt_pass
2079 : : {
2080 : : public:
2081 : 856851 : pass_cd_dce (gcc::context *ctxt)
2082 : 1713702 : : gimple_opt_pass (pass_data_cd_dce, ctxt), update_address_taken_p (false)
2083 : : {}
2084 : :
2085 : : /* opt_pass methods: */
2086 : 571234 : opt_pass * clone () final override { return new pass_cd_dce (m_ctxt); }
2087 : 856851 : void set_pass_param (unsigned n, bool param) final override
2088 : : {
2089 : 856851 : gcc_assert (n == 0);
2090 : 856851 : update_address_taken_p = param;
2091 : 856851 : }
2092 : 3369272 : bool gate (function *) final override { return flag_tree_dce != 0; }
2093 : 3368128 : unsigned int execute (function *) final override
2094 : : {
2095 : 3368128 : return (tree_ssa_cd_dce ()
2096 : 3368128 : | (update_address_taken_p ? TODO_update_address_taken : 0));
2097 : : }
2098 : :
2099 : : private:
2100 : : bool update_address_taken_p;
2101 : : }; // class pass_cd_dce
2102 : :
2103 : : } // anon namespace
2104 : :
2105 : : gimple_opt_pass *
2106 : 285617 : make_pass_cd_dce (gcc::context *ctxt)
2107 : : {
2108 : 285617 : return new pass_cd_dce (ctxt);
2109 : : }
2110 : :
2111 : :
2112 : : /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
2113 : : is consumed by this function. The function has linear complexity in
2114 : : the number of dead stmts with a constant factor like the average SSA
2115 : : use operands number. */
2116 : :
2117 : : void
2118 : 33553378 : simple_dce_from_worklist (bitmap worklist, bitmap need_eh_cleanup)
2119 : : {
2120 : 33553378 : int phiremoved = 0;
2121 : 33553378 : int stmtremoved = 0;
2122 : 64910889 : while (! bitmap_empty_p (worklist))
2123 : : {
2124 : : /* Pop item. */
2125 : 31357511 : unsigned i = bitmap_clear_first_set_bit (worklist);
2126 : :
2127 : 31357511 : tree def = ssa_name (i);
2128 : : /* Removed by somebody else or still in use.
2129 : : Note use in itself for a phi node is not counted as still in use. */
2130 : 31357511 : if (!def)
2131 : 14863150 : continue;
2132 : 30154597 : if (!has_zero_uses (def))
2133 : : {
2134 : 13619252 : gimple *def_stmt = SSA_NAME_DEF_STMT (def);
2135 : :
2136 : 13619252 : if (gimple_code (def_stmt) != GIMPLE_PHI)
2137 : 13615549 : continue;
2138 : :
2139 : 2466454 : gimple *use_stmt;
2140 : 2466454 : imm_use_iterator use_iter;
2141 : 2466454 : bool canremove = true;
2142 : :
2143 : 2542581 : FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, def)
2144 : : {
2145 : : /* Ignore debug statements. */
2146 : 2538878 : if (is_gimple_debug (use_stmt))
2147 : 68950 : continue;
2148 : 2469928 : if (use_stmt != def_stmt)
2149 : : {
2150 : : canremove = false;
2151 : : break;
2152 : : }
2153 : 2466454 : }
2154 : 2466454 : if (!canremove)
2155 : 2462751 : continue;
2156 : : }
2157 : :
2158 : 16539048 : gimple *t = SSA_NAME_DEF_STMT (def);
2159 : 16539048 : if (gimple_has_side_effects (t))
2160 : 36775 : continue;
2161 : :
2162 : : /* The defining statement needs to be defining only this name.
2163 : : ASM is the only statement that can define more than one
2164 : : name. */
2165 : 16502273 : if (is_a<gasm *>(t)
2166 : 16502273 : && !single_ssa_def_operand (t, SSA_OP_ALL_DEFS))
2167 : 15 : continue;
2168 : :
2169 : : /* Don't remove statements that are needed for non-call
2170 : : eh to work. */
2171 : 16502258 : if (stmt_unremovable_because_of_non_call_eh_p (cfun, t))
2172 : 7897 : continue;
2173 : :
2174 : : /* Tell the caller that we removed a statement that might
2175 : : throw so it could cleanup the cfg for that block. */
2176 : 16494361 : if (need_eh_cleanup && stmt_could_throw_p (cfun, t))
2177 : 337 : bitmap_set_bit (need_eh_cleanup, gimple_bb (t)->index);
2178 : :
2179 : : /* Add uses to the worklist. */
2180 : 16494361 : ssa_op_iter iter;
2181 : 16494361 : use_operand_p use_p;
2182 : 46746342 : FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
2183 : : {
2184 : 13757620 : tree use = USE_FROM_PTR (use_p);
2185 : 13757620 : if (TREE_CODE (use) == SSA_NAME
2186 : 13757620 : && ! SSA_NAME_IS_DEFAULT_DEF (use))
2187 : 12257107 : bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
2188 : : }
2189 : :
2190 : : /* Remove stmt. */
2191 : 16494361 : if (dump_file && (dump_flags & TDF_DETAILS))
2192 : : {
2193 : 219 : fprintf (dump_file, "Removing dead stmt:");
2194 : 219 : print_gimple_stmt (dump_file, t, 0);
2195 : : }
2196 : 16494361 : gimple_stmt_iterator gsi = gsi_for_stmt (t);
2197 : 16494361 : if (gimple_code (t) == GIMPLE_PHI)
2198 : : {
2199 : 2230787 : remove_phi_node (&gsi, true);
2200 : 2230787 : phiremoved++;
2201 : : }
2202 : : else
2203 : : {
2204 : 14263574 : unlink_stmt_vdef (t);
2205 : 14263574 : gsi_remove (&gsi, true);
2206 : 14263574 : release_defs (t);
2207 : 14263574 : stmtremoved++;
2208 : : }
2209 : : }
2210 : 33553378 : statistics_counter_event (cfun, "PHIs removed",
2211 : : phiremoved);
2212 : 33553378 : statistics_counter_event (cfun, "Statements removed",
2213 : : stmtremoved);
2214 : 33553378 : }
|