Branch data Line data Source code
1 : : /* CFG cleanup for trees.
2 : : Copyright (C) 2001-2025 Free Software Foundation, Inc.
3 : :
4 : : This file is part of GCC.
5 : :
6 : : GCC is free software; you can redistribute it and/or modify
7 : : it under the terms of the GNU General Public License as published by
8 : : the Free Software Foundation; either version 3, or (at your option)
9 : : any later version.
10 : :
11 : : GCC is distributed in the hope that it will be useful,
12 : : but WITHOUT ANY WARRANTY; without even the implied warranty of
13 : : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 : : GNU General Public License for more details.
15 : :
16 : : You should have received a copy of the GNU General Public License
17 : : along with GCC; see the file COPYING3. If not see
18 : : <http://www.gnu.org/licenses/>. */
19 : :
20 : : #include "config.h"
21 : : #include "system.h"
22 : : #include "coretypes.h"
23 : : #include "backend.h"
24 : : #include "rtl.h"
25 : : #include "tree.h"
26 : : #include "gimple.h"
27 : : #include "cfghooks.h"
28 : : #include "tree-pass.h"
29 : : #include "ssa.h"
30 : : #include "diagnostic-core.h"
31 : : #include "fold-const.h"
32 : : #include "cfganal.h"
33 : : #include "cfgcleanup.h"
34 : : #include "tree-eh.h"
35 : : #include "gimplify.h"
36 : : #include "gimple-iterator.h"
37 : : #include "tree-cfg.h"
38 : : #include "tree-ssa-loop-manip.h"
39 : : #include "tree-dfa.h"
40 : : #include "tree-ssa.h"
41 : : #include "cfgloop.h"
42 : : #include "tree-scalar-evolution.h"
43 : : #include "gimple-match.h"
44 : : #include "gimple-fold.h"
45 : : #include "tree-ssa-loop-niter.h"
46 : : #include "cgraph.h"
47 : : #include "tree-into-ssa.h"
48 : : #include "tree-cfgcleanup.h"
49 : : #include "gimple-pretty-print.h"
50 : : #include "target.h"
51 : :
52 : :
53 : : /* The set of blocks in that at least one of the following changes happened:
54 : : -- the statement at the end of the block was changed
55 : : -- the block was newly created
56 : : -- the set of the predecessors of the block changed
57 : : -- the set of the successors of the block changed
58 : : ??? Maybe we could track these changes separately, since they determine
59 : : what cleanups it makes sense to try on the block. */
60 : : bitmap cfgcleanup_altered_bbs;
61 : :
62 : : /* Remove any fallthru edge from EV. Return true if an edge was removed. */
63 : :
64 : : static bool
65 : 22454075 : remove_fallthru_edge (vec<edge, va_gc> *ev)
66 : : {
67 : 22454075 : edge_iterator ei;
68 : 22454075 : edge e;
69 : :
70 : 25195590 : FOR_EACH_EDGE (e, ei, ev)
71 : 2773835 : if ((e->flags & EDGE_FALLTHRU) != 0)
72 : : {
73 : 32320 : if (e->flags & EDGE_COMPLEX)
74 : 0 : e->flags &= ~EDGE_FALLTHRU;
75 : : else
76 : 32320 : remove_edge_and_dominated_blocks (e);
77 : 32320 : return true;
78 : : }
79 : : return false;
80 : : }
81 : :
82 : : /* Convert a SWTCH with single non-default case to gcond and replace it
83 : : at GSI. */
84 : :
85 : : static bool
86 : 923715 : convert_single_case_switch (gswitch *swtch, gimple_stmt_iterator &gsi)
87 : : {
88 : 923715 : if (gimple_switch_num_labels (swtch) != 2)
89 : : return false;
90 : :
91 : 11692 : tree index = gimple_switch_index (swtch);
92 : 11692 : tree label = gimple_switch_label (swtch, 1);
93 : 11692 : tree low = CASE_LOW (label);
94 : 11692 : tree high = CASE_HIGH (label);
95 : :
96 : 11692 : basic_block default_bb = gimple_switch_default_bb (cfun, swtch);
97 : 11692 : basic_block case_bb = label_to_block (cfun, CASE_LABEL (label));
98 : :
99 : 11692 : basic_block bb = gimple_bb (swtch);
100 : 11692 : gcond *cond;
101 : :
102 : : /* Replace switch statement with condition statement. */
103 : 11692 : if (high)
104 : : {
105 : 1127 : tree lhs, rhs;
106 : 1127 : if (range_check_type (TREE_TYPE (index)) == NULL_TREE)
107 : 0 : return false;
108 : 1127 : generate_range_test (bb, index, low, high, &lhs, &rhs);
109 : 1127 : cond = gimple_build_cond (LE_EXPR, lhs, rhs, NULL_TREE, NULL_TREE);
110 : : }
111 : : else
112 : 10565 : cond = gimple_build_cond (EQ_EXPR, index,
113 : 10565 : fold_convert (TREE_TYPE (index), low),
114 : : NULL_TREE, NULL_TREE);
115 : :
116 : 11692 : gsi_replace (&gsi, cond, true);
117 : :
118 : : /* Update edges. */
119 : 11692 : edge case_edge = find_edge (bb, case_bb);
120 : 11692 : edge default_edge = find_edge (bb, default_bb);
121 : :
122 : 11692 : case_edge->flags |= EDGE_TRUE_VALUE;
123 : 11692 : default_edge->flags |= EDGE_FALSE_VALUE;
124 : 11692 : return true;
125 : : }
126 : :
127 : : /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 of STMT in BB by
128 : : swapping edges of the BB. */
129 : : bool
130 : 181415008 : canonicalize_bool_cond (gcond *stmt, basic_block bb)
131 : : {
132 : 181415008 : tree rhs1 = gimple_cond_lhs (stmt);
133 : 181415008 : tree rhs2 = gimple_cond_rhs (stmt);
134 : 181415008 : enum tree_code code = gimple_cond_code (stmt);
135 : 181415008 : if (code != EQ_EXPR && code != NE_EXPR)
136 : : return false;
137 : 137171625 : if (TREE_CODE (TREE_TYPE (rhs1)) != BOOLEAN_TYPE
138 : 137171625 : && (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
139 : 79460724 : || TYPE_PRECISION (TREE_TYPE (rhs1)) != 1))
140 : : return false;
141 : :
142 : : /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
143 : 24287016 : if (code == EQ_EXPR && !integer_zerop (rhs2))
144 : : return false;
145 : 24154067 : if (code == NE_EXPR && !integer_onep (rhs2))
146 : : return false;
147 : :
148 : 135556 : gimple_cond_set_code (stmt, NE_EXPR);
149 : 135556 : gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
150 : 135556 : EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
151 : 135556 : EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
152 : :
153 : 135556 : if (dump_file)
154 : : {
155 : 1 : fprintf (dump_file, " Swapped '");
156 : 1 : print_gimple_expr (dump_file, stmt, 0);
157 : 1 : fprintf (dump_file, "'\n");
158 : : }
159 : : return true;
160 : : }
161 : :
162 : : /* Disconnect an unreachable block in the control expression starting
163 : : at block BB. */
164 : :
165 : : static bool
166 : 163279937 : cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi)
167 : : {
168 : 163279937 : edge taken_edge;
169 : 163279937 : bool retval = false;
170 : 163279937 : gimple *stmt = gsi_stmt (gsi);
171 : :
172 : 163279937 : if (!single_succ_p (bb))
173 : : {
174 : 163274128 : edge e;
175 : 163274128 : edge_iterator ei;
176 : 163274128 : bool warned;
177 : 163274128 : tree val = NULL_TREE;
178 : :
179 : : /* Try to convert a switch with just a single non-default case to
180 : : GIMPLE condition. */
181 : 163274128 : if (gimple_code (stmt) == GIMPLE_SWITCH
182 : 163274128 : && convert_single_case_switch (as_a<gswitch *> (stmt), gsi))
183 : 11692 : stmt = gsi_stmt (gsi);
184 : :
185 : 163274128 : if (gimple_code (stmt) == GIMPLE_COND)
186 : 162362105 : canonicalize_bool_cond (as_a<gcond*> (stmt), bb);
187 : :
188 : 163274128 : fold_defer_overflow_warnings ();
189 : 163274128 : switch (gimple_code (stmt))
190 : : {
191 : 162362105 : case GIMPLE_COND:
192 : 162362105 : {
193 : 162362105 : gimple_match_op res_op;
194 : 162362105 : if (gimple_simplify (stmt, &res_op, NULL, no_follow_ssa_edges,
195 : : no_follow_ssa_edges)
196 : 162362105 : && res_op.code == INTEGER_CST)
197 : 2529164 : val = res_op.ops[0];
198 : : }
199 : 162362105 : break;
200 : :
201 : 912023 : case GIMPLE_SWITCH:
202 : 912023 : val = gimple_switch_index (as_a <gswitch *> (stmt));
203 : 912023 : break;
204 : :
205 : 163274128 : default:
206 : 163274128 : ;
207 : : }
208 : 163274128 : taken_edge = find_taken_edge (bb, val);
209 : 163274128 : if (!taken_edge)
210 : : {
211 : 160730823 : fold_undefer_and_ignore_overflow_warnings ();
212 : 160730823 : return false;
213 : : }
214 : :
215 : : /* Remove all the edges except the one that is always executed. */
216 : 2543305 : warned = false;
217 : 7667607 : for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
218 : : {
219 : 5124302 : if (e != taken_edge)
220 : : {
221 : 2580997 : if (!warned)
222 : : {
223 : 2543305 : fold_undefer_overflow_warnings
224 : 2543305 : (true, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
225 : 2543305 : warned = true;
226 : : }
227 : :
228 : 2580997 : taken_edge->probability += e->probability;
229 : 2580997 : remove_edge_and_dominated_blocks (e);
230 : 2580997 : retval = true;
231 : : }
232 : : else
233 : 2543305 : ei_next (&ei);
234 : : }
235 : 2543305 : if (!warned)
236 : 0 : fold_undefer_and_ignore_overflow_warnings ();
237 : : }
238 : : else
239 : 5809 : taken_edge = single_succ_edge (bb);
240 : :
241 : 2549114 : bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
242 : 2549114 : gsi_remove (&gsi, true);
243 : 2549114 : taken_edge->flags = EDGE_FALLTHRU;
244 : :
245 : 2549114 : return retval;
246 : : }
247 : :
248 : : /* Cleanup the GF_CALL_CTRL_ALTERING flag according to
249 : : to updated gimple_call_flags. */
250 : :
251 : : static void
252 : 369894705 : cleanup_call_ctrl_altering_flag (basic_block bb, gimple *bb_end)
253 : : {
254 : 369894705 : if (!is_gimple_call (bb_end)
255 : 69755339 : || !gimple_call_ctrl_altering_p (bb_end)
256 : 392801846 : || (/* IFN_UNIQUE should be the last insn, to make checking for it
257 : : as cheap as possible. */
258 : 22907141 : gimple_call_internal_p (bb_end)
259 : 426704 : && gimple_call_internal_unique_p (bb_end)))
260 : : return;
261 : :
262 : 22505935 : int flags = gimple_call_flags (bb_end);
263 : 22505935 : if (!(flags & ECF_NORETURN)
264 : 81925 : && (((flags & (ECF_CONST | ECF_PURE))
265 : 1935 : && !(flags & ECF_LOOPING_CONST_OR_PURE))
266 : 81782 : || (flags & ECF_LEAF)))
267 : 143 : gimple_call_set_ctrl_altering (bb_end, false);
268 : : else
269 : : {
270 : 22505792 : edge_iterator ei;
271 : 22505792 : edge e;
272 : 22505792 : bool found = false;
273 : 25317251 : FOR_EACH_EDGE (e, ei, bb->succs)
274 : 2926297 : if (e->flags & EDGE_FALLTHRU)
275 : : found = true;
276 : 2817219 : else if (e->flags & EDGE_ABNORMAL)
277 : : {
278 : : found = false;
279 : : break;
280 : : }
281 : : /* If there's no abnormal edge and a fallthru edge the call
282 : : isn't control-altering anymore. */
283 : 22505792 : if (found)
284 : 29513 : gimple_call_set_ctrl_altering (bb_end, false);
285 : : }
286 : : }
287 : :
288 : : /* Try to remove superfluous control structures in basic block BB. Returns
289 : : true if anything changes. */
290 : :
291 : : static bool
292 : 410622765 : cleanup_control_flow_bb (basic_block bb)
293 : : {
294 : 410622765 : gimple_stmt_iterator gsi;
295 : 410622765 : bool retval = false;
296 : 410622765 : gimple *stmt;
297 : :
298 : : /* If the last statement of the block could throw and now cannot,
299 : : we need to prune cfg. */
300 : 410622765 : retval |= gimple_purge_dead_eh_edges (bb);
301 : :
302 : 410622765 : gsi = gsi_last_nondebug_bb (bb);
303 : 410622765 : if (gsi_end_p (gsi))
304 : : return retval;
305 : :
306 : 369894705 : stmt = gsi_stmt (gsi);
307 : :
308 : : /* Try to cleanup ctrl altering flag for call which ends bb. */
309 : 369894705 : cleanup_call_ctrl_altering_flag (bb, stmt);
310 : :
311 : 369894705 : if (gimple_code (stmt) == GIMPLE_COND
312 : 369894705 : || gimple_code (stmt) == GIMPLE_SWITCH)
313 : : {
314 : 326559874 : gcc_checking_assert (gsi_stmt (gsi_last_bb (bb)) == stmt);
315 : 163279937 : retval |= cleanup_control_expr_graph (bb, gsi);
316 : : }
317 : 206614768 : else if (gimple_code (stmt) == GIMPLE_GOTO
318 : 6274 : && TREE_CODE (gimple_goto_dest (stmt)) == ADDR_EXPR
319 : 206614997 : && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt), 0))
320 : : == LABEL_DECL))
321 : : {
322 : : /* If we had a computed goto which has a compile-time determinable
323 : : destination, then we can eliminate the goto. */
324 : 171 : edge e;
325 : 171 : tree label;
326 : 171 : edge_iterator ei;
327 : 171 : basic_block target_block;
328 : :
329 : 342 : gcc_checking_assert (gsi_stmt (gsi_last_bb (bb)) == stmt);
330 : : /* First look at all the outgoing edges. Delete any outgoing
331 : : edges which do not go to the right block. For the one
332 : : edge which goes to the right block, fix up its flags. */
333 : 171 : label = TREE_OPERAND (gimple_goto_dest (stmt), 0);
334 : 171 : if (DECL_CONTEXT (label) != cfun->decl)
335 : 8 : return retval;
336 : 163 : target_block = label_to_block (cfun, label);
337 : 411 : for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
338 : : {
339 : 248 : if (e->dest != target_block)
340 : 91 : remove_edge_and_dominated_blocks (e);
341 : : else
342 : : {
343 : : /* Turn off the EDGE_ABNORMAL flag. */
344 : 157 : e->flags &= ~EDGE_ABNORMAL;
345 : :
346 : : /* And set EDGE_FALLTHRU. */
347 : 157 : e->flags |= EDGE_FALLTHRU;
348 : 157 : ei_next (&ei);
349 : : }
350 : : }
351 : :
352 : 163 : bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
353 : 163 : bitmap_set_bit (cfgcleanup_altered_bbs, target_block->index);
354 : :
355 : : /* Remove the GOTO_EXPR as it is not needed. The CFG has all the
356 : : relevant information we need. */
357 : 163 : gsi_remove (&gsi, true);
358 : 163 : retval = true;
359 : : }
360 : :
361 : : /* Check for indirect calls that have been turned into
362 : : noreturn calls. */
363 : 206614597 : else if (is_gimple_call (stmt)
364 : 206614597 : && gimple_call_noreturn_p (stmt))
365 : : {
366 : : /* If there are debug stmts after the noreturn call, remove them
367 : : now, they should be all unreachable anyway. */
368 : 22454252 : for (gsi_next (&gsi); !gsi_end_p (gsi); )
369 : 177 : gsi_remove (&gsi, true);
370 : 22454075 : if (remove_fallthru_edge (bb->succs))
371 : 32320 : retval = true;
372 : 22454075 : tree lhs = gimple_call_lhs (stmt);
373 : 22454075 : if (!lhs
374 : 22454075 : || !should_remove_lhs_p (lhs))
375 : 22454056 : gimple_call_set_ctrl_altering (stmt, true);
376 : : }
377 : :
378 : : return retval;
379 : : }
380 : :
381 : : /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
382 : : those alternatives are equal in each of the PHI nodes, then return
383 : : true, else return false. */
384 : :
385 : : bool
386 : 4415463 : phi_alternatives_equal (basic_block dest, edge e1, edge e2)
387 : : {
388 : 4415463 : int n1 = e1->dest_idx;
389 : 4415463 : int n2 = e2->dest_idx;
390 : 4415463 : gphi_iterator gsi;
391 : :
392 : 4946831 : for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
393 : : {
394 : 4891137 : gphi *phi = gsi.phi ();
395 : 4891137 : tree val1 = gimple_phi_arg_def (phi, n1);
396 : 4891137 : tree val2 = gimple_phi_arg_def (phi, n2);
397 : :
398 : 4891137 : gcc_assert (val1 != NULL_TREE);
399 : 4891137 : gcc_assert (val2 != NULL_TREE);
400 : :
401 : 4891137 : if (!operand_equal_for_phi_arg_p (val1, val2))
402 : : return false;
403 : : }
404 : :
405 : : return true;
406 : : }
407 : :
408 : : /* Move debug stmts from the forwarder block SRC to DEST or PRED. */
409 : :
410 : : static void
411 : 28638992 : move_debug_stmts_from_forwarder (basic_block src,
412 : : basic_block dest, bool dest_single_pred_p,
413 : : basic_block pred, bool pred_single_succ_p)
414 : : {
415 : 28638992 : if (!MAY_HAVE_DEBUG_STMTS)
416 : 9848971 : return;
417 : :
418 : : /* If we cannot move to the destination but to the predecessor do that. */
419 : 18860962 : if (!dest_single_pred_p && pred_single_succ_p)
420 : : {
421 : 70946 : gimple_stmt_iterator gsi_to = gsi_last_bb (pred);
422 : 70946 : if (gsi_end_p (gsi_to) || !stmt_ends_bb_p (gsi_stmt (gsi_to)))
423 : : {
424 : 70941 : for (gimple_stmt_iterator gsi = gsi_after_labels (src);
425 : 257596 : !gsi_end_p (gsi);)
426 : : {
427 : 186655 : gimple *debug = gsi_stmt (gsi);
428 : 186655 : gcc_assert (is_gimple_debug (debug));
429 : 186655 : gsi_move_after (&gsi, &gsi_to);
430 : : }
431 : 70941 : return;
432 : : }
433 : : }
434 : :
435 : : /* Else move to DEST or drop/reset them. */
436 : 18790021 : gimple_stmt_iterator gsi_to = gsi_after_labels (dest);
437 : 32354656 : for (gimple_stmt_iterator gsi = gsi_after_labels (src); !gsi_end_p (gsi);)
438 : : {
439 : 13564635 : gimple *debug = gsi_stmt (gsi);
440 : 13564635 : gcc_assert (is_gimple_debug (debug));
441 : : /* Move debug binds anyway, but not anything else like begin-stmt
442 : : markers unless they are always valid at the destination. */
443 : 13564635 : if (dest_single_pred_p
444 : 13564635 : || gimple_debug_bind_p (debug))
445 : : {
446 : 12551363 : gsi_move_before (&gsi, &gsi_to);
447 : : /* Reset debug-binds that are not always valid at the destination.
448 : : Simply dropping them can cause earlier values to become live,
449 : : generating wrong debug information.
450 : : ??? There are several things we could improve here. For
451 : : one we might be able to move stmts to the predecessor.
452 : : For anther, if the debug stmt is immediately followed by a
453 : : (debug) definition in the destination (on a post-dominated path?)
454 : : we can elide it without any bad effects. */
455 : 12551363 : if (!dest_single_pred_p)
456 : : {
457 : 6014574 : gimple_debug_bind_reset_value (debug);
458 : 6014574 : update_stmt (debug);
459 : : }
460 : : }
461 : : else
462 : 1013272 : gsi_next (&gsi);
463 : : }
464 : : }
465 : :
466 : : /* Return true if basic block BB does nothing except pass control
467 : : flow to another block and that we can safely insert a label at
468 : : the start of the successor block and was removed.
469 : :
470 : : As a precondition, we require that BB be not equal to
471 : : the entry block.
472 : : If CAN_SPLIT is true, we can split the edge to have
473 : : another bb with with the phi. */
474 : :
475 : : static bool
476 : 437129310 : maybe_remove_forwarder_block (basic_block bb, bool can_split = false)
477 : : {
478 : 437129310 : gimple_stmt_iterator gsi;
479 : 437129310 : location_t locus;
480 : :
481 : : /* BB must have a single outgoing edge. */
482 : 833290583 : if (!single_succ_p (bb)
483 : : /* BB may not be a predecessor of the exit block. */
484 : 204310180 : || single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
485 : : /* Nor should this be an infinite loop. */
486 : 171654814 : || single_succ (bb) == bb
487 : : /* BB may not have an abnormal outgoing edge. */
488 : 596353116 : || (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
489 : : return false;
490 : :
491 : 171515436 : gcc_checking_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun));
492 : :
493 : 171515436 : locus = single_succ_edge (bb)->goto_locus;
494 : :
495 : : /* There should not be an edge coming from entry, or an EH edge. */
496 : 171515436 : {
497 : 171515436 : edge_iterator ei;
498 : 171515436 : edge e;
499 : :
500 : 354423606 : FOR_EACH_EDGE (e, ei, bb->preds)
501 : 207606960 : if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || (e->flags & EDGE_EH))
502 : 24698790 : return false;
503 : : /* If goto_locus of any of the edges differs, prevent removing
504 : : the forwarder block when not optimizing. */
505 : 184364062 : else if (!optimize
506 : 5368286 : && (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
507 : 4858909 : || LOCATION_LOCUS (locus) != UNKNOWN_LOCATION)
508 : 185820088 : && e->goto_locus != locus)
509 : : return false;
510 : : }
511 : :
512 : : /* If this bb has a single predecessor and that predecssor
513 : : has a single successor, this bb will be merged with the
514 : : predecessor so ignore it for removing of the forwarder block. */
515 : 146816646 : if (single_pred_p (bb)
516 : 146816646 : && single_succ_p (single_pred_edge (bb)->src))
517 : : return false;
518 : :
519 : 138446920 : bool has_phi = !gimple_seq_empty_p (phi_nodes (bb));
520 : 138446920 : basic_block dest = single_succ_edge (bb)->dest;
521 : :
522 : : /* If there is an abnormal edge to basic block BB, but not into
523 : : dest, problems might occur during removal of the phi node at out
524 : : of ssa due to overlapping live ranges of registers.
525 : :
526 : : If there is an abnormal edge in DEST, the problems would occur
527 : : anyway since cleanup_dead_labels would then merge the labels for
528 : : two different eh regions, and rest of exception handling code
529 : : does not like it.
530 : :
531 : : So if there is an abnormal edge to BB, proceed only if there is
532 : : no abnormal edge to DEST and there are no phi nodes in DEST.
533 : : If the BB has phi, we don't want to deal with abnormal edges either. */
534 : 138446920 : if (bb_has_abnormal_pred (bb)
535 : 138446920 : && (bb_has_abnormal_pred (dest)
536 : 60687 : || !gimple_seq_empty_p (phi_nodes (dest))
537 : 53330 : || has_phi))
538 : : return false;
539 : :
540 : : /* When we have a phi, we have to feed into another
541 : : basic block with PHI nodes. */
542 : 138435593 : if (has_phi && gimple_seq_empty_p (phi_nodes (dest)))
543 : : return false;
544 : :
545 : : /* Now walk through the statements backward. We can ignore labels,
546 : : anything else means this is not a forwarder block. */
547 : 527742328 : for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
548 : : {
549 : 227084865 : gimple *stmt = gsi_stmt (gsi);
550 : :
551 : 227084865 : switch (gimple_code (stmt))
552 : : {
553 : 1190583 : case GIMPLE_LABEL:
554 : 1190583 : if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt)))
555 : 1190583 : || EH_LANDING_PAD_NR (gimple_label_label (as_a <glabel *> (stmt))))
556 : : return false;
557 : 1190276 : if (!optimize
558 : 29625 : && (gimple_has_location (stmt)
559 : 2156 : || LOCATION_LOCUS (locus) != UNKNOWN_LOCATION)
560 : 1217745 : && gimple_location (stmt) != locus)
561 : : return false;
562 : : break;
563 : :
564 : : /* ??? For now, hope there's a corresponding debug
565 : : assignment at the destination. */
566 : : case GIMPLE_DEBUG:
567 : : break;
568 : :
569 : : default:
570 : : return false;
571 : : }
572 : : }
573 : : /* If BB has PHIs and does not dominate DEST,
574 : : then the PHI nodes at DEST must be the only
575 : : users of the results of the PHI nodes at BB.
576 : : So only check when BB dominates dest. */
577 : 36786299 : if (has_phi
578 : 36786299 : && dominated_by_p (CDI_DOMINATORS, dest, bb))
579 : : {
580 : 2533272 : gphi_iterator gsi;
581 : 2533272 : unsigned int dest_idx = single_succ_edge (bb)->dest_idx;
582 : :
583 : : /* BB dominates DEST. There may be many users of the PHI
584 : : nodes in BB. However, there is still a trivial case we
585 : : can handle. If the result of every PHI in BB is used
586 : : only by a PHI in DEST, then we can trivially merge the
587 : : PHI nodes from BB into DEST. */
588 : 5287160 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
589 : 2753888 : gsi_next (&gsi))
590 : : {
591 : 3718461 : gphi *phi = gsi.phi ();
592 : 3718461 : tree result = gimple_phi_result (phi);
593 : 3718461 : use_operand_p imm_use;
594 : 3718461 : gimple *use_stmt;
595 : :
596 : : /* If the PHI's result is never used, then we can just
597 : : ignore it an. */
598 : 3718461 : if (has_zero_uses (result))
599 : 73708 : continue;
600 : :
601 : : /* Get the single use of the result of this PHI node. */
602 : 3644753 : if (!single_imm_use (result, &imm_use, &use_stmt)
603 : 3061447 : || gimple_code (use_stmt) != GIMPLE_PHI
604 : 2777116 : || gimple_bb (use_stmt) != dest
605 : 6326178 : || gimple_phi_arg_def (use_stmt, dest_idx) != result)
606 : 964573 : return false;
607 : : }
608 : : }
609 : :
610 : 35821726 : if (current_loops)
611 : : {
612 : : /* Protect loop headers. */
613 : 34910141 : if (bb_loop_header_p (bb))
614 : : return false;
615 : :
616 : : /* Protect loop preheaders and latches if requested. */
617 : 34640788 : if (dest->loop_father->header == dest)
618 : : {
619 : 12227192 : if (bb->loop_father == dest->loop_father)
620 : : {
621 : 7232790 : if (loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES))
622 : : return false;
623 : : /* If bb doesn't have a single predecessor we'd make this
624 : : loop have multiple latches. Don't do that if that
625 : : would in turn require disambiguating them. */
626 : 6376438 : if (!single_pred_p (bb)
627 : 6092377 : && !loops_state_satisfies_p
628 : 284061 : (LOOPS_MAY_HAVE_MULTIPLE_LATCHES))
629 : : return false;
630 : : }
631 : : /* cleanup_tree_cfg_noloop just created the loop preheader, don't
632 : : remove it if it has phis. */
633 : 4994402 : else if (bb->loop_father == loop_outer (dest->loop_father)
634 : 4982093 : && !has_phi
635 : 8867519 : && !loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS))
636 : : ;
637 : : else
638 : : /* Always preserve other edges into loop headers that are
639 : : not simple latches or preheaders. */
640 : : return false;
641 : : }
642 : : }
643 : :
644 : 32885971 : edge succ = single_succ_edge (bb), e, s;
645 : 32885971 : gimple *stmt;
646 : 32885971 : gimple_stmt_iterator gsi_to;
647 : :
648 : : /* If there are phi nodes in DEST, and some of the blocks that are
649 : : predecessors of BB are also predecessors of DEST, check that the
650 : : phi node arguments match.
651 : : Otherwise we have to split the edge and that becomes
652 : : a "forwarder" again. */
653 : 32885971 : if ((!can_split || !has_phi)
654 : 32885971 : && !gimple_seq_empty_p (phi_nodes (dest)))
655 : : {
656 : 27158370 : edge_iterator ei;
657 : 53029714 : FOR_EACH_EDGE (e, ei, bb->preds)
658 : : {
659 : 30118323 : s = find_edge (e->src, dest);
660 : 30118323 : if (!s)
661 : 25816616 : continue;
662 : :
663 : 4301707 : if (!phi_alternatives_equal (dest, succ, s))
664 : 4246979 : return false;
665 : : }
666 : : }
667 : :
668 : 28638992 : basic_block pred = NULL;
669 : 28638992 : if (single_pred_p (bb))
670 : 26755076 : pred = single_pred (bb);
671 : 28638992 : bool dest_single_pred_p = single_pred_p (dest);
672 : :
673 : : /* Redirect the edges. */
674 : 60347626 : for (edge_iterator ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
675 : : {
676 : 31708634 : if (cfgcleanup_altered_bbs)
677 : 31455299 : bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
678 : 31708634 : s = find_edge (e->src, dest);
679 : :
680 : : /* See if we can split the edge if we already have an edge from src to dest. */
681 : 31708634 : if (can_split && has_phi)
682 : : /* PHI arguments are different. Create a forwarder block by
683 : : splitting E so that we can merge PHI arguments on E to
684 : : DEST. */
685 : 68616 : if (s && !phi_alternatives_equal (dest, s, succ))
686 : 18195 : e = single_succ_edge (split_edge (e));
687 : :
688 : 31708634 : if (e->flags & EDGE_ABNORMAL)
689 : : {
690 : : /* If there is an abnormal edge, redirect it anyway, and
691 : : move the labels to the new block to make it legal. */
692 : 183 : s = redirect_edge_succ_nodup (e, dest);
693 : : }
694 : : else
695 : 31708451 : s = redirect_edge_and_branch (e, dest);
696 : :
697 : 31708634 : if (s == e)
698 : : {
699 : : /* If we merge the forwarder with phis into a loop header
700 : : verify if we are creating another loop latch edge.
701 : : If so, reset number of iteration information of the loop. */
702 : 31593194 : if (has_phi
703 : 3544026 : && dest->loop_father
704 : 3544026 : && dest->loop_father->header == dest
705 : 32367984 : && dominated_by_p (CDI_DOMINATORS, e->src, dest))
706 : : {
707 : 771886 : dest->loop_father->any_upper_bound = false;
708 : 771886 : dest->loop_father->any_likely_upper_bound = false;
709 : 771886 : free_numbers_of_iterations_estimates (dest->loop_father);
710 : : }
711 : : /* Copy arguments for the phi nodes, since the edge was not
712 : : here before. */
713 : 31593194 : copy_phi_arg_into_existing_phi (succ, s, has_phi);
714 : : }
715 : : else
716 : 115440 : redirect_edge_var_map_clear (s);
717 : : }
718 : :
719 : : /* Move nonlocal labels and computed goto targets as well as user
720 : : defined labels and labels with an EH landing pad number to the
721 : : new block, so that the redirection of the abnormal edges works,
722 : : jump targets end up in a sane place and debug information for
723 : : labels is retained. */
724 : 28638992 : gsi_to = gsi_start_bb (dest);
725 : 57624694 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
726 : : {
727 : 2958637 : stmt = gsi_stmt (gsi);
728 : 2958637 : if (is_gimple_debug (stmt))
729 : : break;
730 : :
731 : : /* Forwarder blocks can only contain labels and debug stmts, and
732 : : labels must come first, so if we get to this point, we know
733 : : we're looking at a label. */
734 : 346710 : tree decl = gimple_label_label (as_a <glabel *> (stmt));
735 : 346710 : if (EH_LANDING_PAD_NR (decl) != 0
736 : 346710 : || DECL_NONLOCAL (decl)
737 : 346710 : || FORCED_LABEL (decl)
738 : 688005 : || !DECL_ARTIFICIAL (decl))
739 : 35224 : gsi_move_before (&gsi, &gsi_to);
740 : : else
741 : 311486 : gsi_next (&gsi);
742 : : }
743 : :
744 : : /* Move debug statements. Reset them if the destination does not
745 : : have a single predecessor. */
746 : 57277984 : move_debug_stmts_from_forwarder (bb, dest, dest_single_pred_p,
747 : 83929097 : pred, pred && single_succ_p (pred));
748 : :
749 : 28638992 : if (cfgcleanup_altered_bbs)
750 : 28427638 : bitmap_set_bit (cfgcleanup_altered_bbs, dest->index);
751 : :
752 : : /* Update the dominators. */
753 : 28638992 : basic_block dom, dombb, domdest;
754 : :
755 : 28638992 : dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
756 : 28638992 : domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
757 : 28638992 : if (domdest == bb)
758 : : {
759 : : /* Shortcut to avoid calling (relatively expensive)
760 : : nearest_common_dominator unless necessary. */
761 : : dom = dombb;
762 : : }
763 : : else
764 : 21623676 : dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
765 : :
766 : 28638992 : set_immediate_dominator (CDI_DOMINATORS, dest, dom);
767 : :
768 : : /* Adjust latch infomation of BB's parent loop as otherwise
769 : : the cfg hook has a hard time not to kill the loop. */
770 : 28638992 : if (current_loops && bb->loop_father->latch == bb)
771 : 5998539 : bb->loop_father->latch = pred;
772 : :
773 : : /* And kill the forwarder block. */
774 : 28638992 : delete_basic_block (bb);
775 : :
776 : 28638992 : return true;
777 : : }
778 : :
779 : : /* STMT is a call that has been discovered noreturn. Split the
780 : : block to prepare fixing up the CFG and remove LHS.
781 : : Return true if cleanup-cfg needs to run. */
782 : :
783 : : bool
784 : 6195268 : fixup_noreturn_call (gimple *stmt)
785 : : {
786 : 6195268 : basic_block bb = gimple_bb (stmt);
787 : 6195268 : bool changed = false;
788 : :
789 : 6195268 : if (gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
790 : : return false;
791 : :
792 : : /* First split basic block if stmt is not last. */
793 : 12386752 : if (stmt != gsi_stmt (gsi_last_bb (bb)))
794 : : {
795 : 54553 : if (stmt == gsi_stmt (gsi_last_nondebug_bb (bb)))
796 : : {
797 : : /* Don't split if there are only debug stmts
798 : : after stmt, that can result in -fcompare-debug
799 : : failures. Remove the debug stmts instead,
800 : : they should be all unreachable anyway. */
801 : 6756 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
802 : 51189 : for (gsi_next (&gsi); !gsi_end_p (gsi); )
803 : 44433 : gsi_remove (&gsi, true);
804 : : }
805 : : else
806 : : {
807 : 47797 : split_block (bb, stmt);
808 : 47797 : changed = true;
809 : : }
810 : : }
811 : :
812 : : /* If there is an LHS, remove it, but only if its type has fixed size.
813 : : The LHS will need to be recreated during RTL expansion and creating
814 : : temporaries of variable-sized types is not supported. Also don't
815 : : do this with TREE_ADDRESSABLE types, as assign_temp will abort.
816 : : Drop LHS regardless of TREE_ADDRESSABLE, if the function call
817 : : has been changed into a call that does not return a value, like
818 : : __builtin_unreachable or __cxa_pure_virtual. */
819 : 6193376 : tree lhs = gimple_call_lhs (stmt);
820 : 6193376 : if (lhs
821 : 6193376 : && (should_remove_lhs_p (lhs)
822 : 414 : || VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))))
823 : : {
824 : 5482 : gimple_call_set_lhs (stmt, NULL_TREE);
825 : :
826 : : /* We need to fix up the SSA name to avoid checking errors. */
827 : 5482 : if (TREE_CODE (lhs) == SSA_NAME)
828 : : {
829 : 5279 : tree new_var = create_tmp_reg (TREE_TYPE (lhs));
830 : 5279 : SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, new_var);
831 : 5279 : SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
832 : 5279 : set_ssa_default_def (cfun, new_var, lhs);
833 : : }
834 : :
835 : 5482 : update_stmt (stmt);
836 : : }
837 : :
838 : : /* Mark the call as altering control flow. */
839 : 6193376 : if (!gimple_call_ctrl_altering_p (stmt))
840 : : {
841 : 93515 : gimple_call_set_ctrl_altering (stmt, true);
842 : 93515 : changed = true;
843 : : }
844 : :
845 : : return changed;
846 : : }
847 : :
848 : : /* Return true if we want to merge BB1 and BB2 into a single block. */
849 : :
850 : : static bool
851 : 424050047 : want_merge_blocks_p (basic_block bb1, basic_block bb2)
852 : : {
853 : 424050047 : if (!can_merge_blocks_p (bb1, bb2))
854 : : return false;
855 : 28894301 : gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb1);
856 : 28894301 : if (gsi_end_p (gsi) || !stmt_can_terminate_bb_p (gsi_stmt (gsi)))
857 : 25647875 : return true;
858 : 3246426 : return bb1->count.ok_for_merging (bb2->count);
859 : : }
860 : :
861 : :
862 : : /* Tries to cleanup cfg in basic block BB by merging blocks. Returns
863 : : true if anything changes. */
864 : :
865 : : static bool
866 : 403150454 : cleanup_tree_cfg_bb (basic_block bb)
867 : : {
868 : 403150454 : if (maybe_remove_forwarder_block (bb))
869 : : return true;
870 : :
871 : : /* If there is a merge opportunity with the predecessor
872 : : do nothing now but wait until we process the predecessor.
873 : : This happens when we visit BBs in a non-optimal order and
874 : : avoids quadratic behavior with adjusting stmts BB pointer. */
875 : 374722816 : if (single_pred_p (bb)
876 : 646833014 : && want_merge_blocks_p (single_pred (bb), bb))
877 : : /* But make sure we _do_ visit it. When we remove unreachable paths
878 : : ending in a backedge we fail to mark the destinations predecessors
879 : : as changed. */
880 : 11810028 : bitmap_set_bit (cfgcleanup_altered_bbs, single_pred (bb)->index);
881 : :
882 : : /* Merging the blocks may create new opportunities for folding
883 : : conditional branches (due to the elimination of single-valued PHI
884 : : nodes). */
885 : 362912788 : else if (single_succ_p (bb)
886 : 504125978 : && want_merge_blocks_p (bb, single_succ (bb)))
887 : : {
888 : 17084209 : merge_blocks (bb, single_succ (bb));
889 : 17084209 : return true;
890 : : }
891 : :
892 : : return false;
893 : : }
894 : :
895 : : /* Return true if E is an EDGE_ABNORMAL edge for returns_twice calls,
896 : : i.e. one going from .ABNORMAL_DISPATCHER to basic block which doesn't
897 : : start with a forced or nonlocal label. Calls which return twice can return
898 : : the second time only if they are called normally the first time, so basic
899 : : blocks which can be only entered through these abnormal edges but not
900 : : normally are effectively unreachable as well. Additionally ignore
901 : : __builtin_setjmp_receiver starting blocks, which have one FORCED_LABEL
902 : : and which are always only reachable through EDGE_ABNORMAL edge. They are
903 : : handled in cleanup_control_flow_pre. */
904 : :
905 : : static bool
906 : 355873696 : maybe_dead_abnormal_edge_p (edge e)
907 : : {
908 : 355873696 : if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL)
909 : : return false;
910 : :
911 : 147404 : gimple_stmt_iterator gsi = gsi_start_nondebug_after_labels_bb (e->src);
912 : 147404 : gimple *g = gsi_stmt (gsi);
913 : 147404 : if (!g || !gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
914 : : return false;
915 : :
916 : 19900 : tree target = NULL_TREE;
917 : 52147 : for (gsi = gsi_start_bb (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
918 : 32240 : if (glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
919 : : {
920 : 18512 : tree this_target = gimple_label_label (label_stmt);
921 : 18512 : if (DECL_NONLOCAL (this_target))
922 : : return false;
923 : 12347 : if (FORCED_LABEL (this_target))
924 : : {
925 : 12347 : if (target)
926 : : return false;
927 : : target = this_target;
928 : : }
929 : : }
930 : : else
931 : : break;
932 : :
933 : 13735 : if (target)
934 : : {
935 : : /* If there was a single FORCED_LABEL, check for
936 : : __builtin_setjmp_receiver with address of that label. */
937 : 12347 : if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
938 : 0 : gsi_next_nondebug (&gsi);
939 : 12347 : if (gsi_end_p (gsi))
940 : : return false;
941 : 12347 : if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_SETJMP_RECEIVER))
942 : : return false;
943 : :
944 : 12347 : tree arg = gimple_call_arg (gsi_stmt (gsi), 0);
945 : 12347 : if (TREE_CODE (arg) != ADDR_EXPR || TREE_OPERAND (arg, 0) != target)
946 : : return false;
947 : : }
948 : : return true;
949 : : }
950 : :
951 : : /* If BB is a basic block ending with __builtin_setjmp_setup, return edge
952 : : from .ABNORMAL_DISPATCHER basic block to corresponding
953 : : __builtin_setjmp_receiver basic block, otherwise return NULL. */
954 : : static edge
955 : 355872284 : builtin_setjmp_setup_bb (basic_block bb)
956 : : {
957 : 355872284 : if (EDGE_COUNT (bb->succs) != 2
958 : 345202100 : || ((EDGE_SUCC (bb, 0)->flags
959 : 158038912 : & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL
960 : 157944171 : && (EDGE_SUCC (bb, 1)->flags
961 : 157944171 : & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL))
962 : : return NULL;
963 : :
964 : 165756 : gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
965 : 165756 : if (gsi_end_p (gsi)
966 : 165756 : || !gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_SETJMP_SETUP))
967 : 153433 : return NULL;
968 : :
969 : 12323 : tree arg = gimple_call_arg (gsi_stmt (gsi), 1);
970 : 12323 : if (TREE_CODE (arg) != ADDR_EXPR
971 : 12323 : || TREE_CODE (TREE_OPERAND (arg, 0)) != LABEL_DECL)
972 : : return NULL;
973 : :
974 : 12323 : basic_block recv_bb = label_to_block (cfun, TREE_OPERAND (arg, 0));
975 : 355872284 : if (EDGE_COUNT (recv_bb->preds) != 1
976 : 12323 : || (EDGE_PRED (recv_bb, 0)->flags
977 : 12323 : & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL
978 : 24646 : || (EDGE_SUCC (bb, 0)->dest != EDGE_PRED (recv_bb, 0)->src
979 : 12323 : && EDGE_SUCC (bb, 1)->dest != EDGE_PRED (recv_bb, 0)->src))
980 : : return NULL;
981 : :
982 : : /* EDGE_PRED (recv_bb, 0)->src should be the .ABNORMAL_DISPATCHER bb. */
983 : : return EDGE_PRED (recv_bb, 0);
984 : : }
985 : :
986 : : /* Do cleanup_control_flow_bb in PRE order. */
987 : :
988 : : static bool
989 : 25738101 : cleanup_control_flow_pre ()
990 : : {
991 : 25738101 : bool retval = false;
992 : :
993 : : /* We want remove_edge_and_dominated_blocks to only remove edges,
994 : : not dominated blocks which it does when dom info isn't available.
995 : : Pretend so. */
996 : 25738101 : dom_state saved_state = dom_info_state (CDI_DOMINATORS);
997 : 25738101 : set_dom_info_availability (CDI_DOMINATORS, DOM_NONE);
998 : :
999 : 25738101 : auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
1000 : 25738101 : auto_sbitmap visited (last_basic_block_for_fn (cfun));
1001 : 25738101 : bitmap_clear (visited);
1002 : :
1003 : 25738101 : vec<edge, va_gc> *setjmp_vec = NULL;
1004 : 25738101 : auto_vec<basic_block, 4> abnormal_dispatchers;
1005 : :
1006 : 25738101 : stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
1007 : :
1008 : 902641090 : while (! stack.is_empty ())
1009 : : {
1010 : : /* Look at the edge on the top of the stack. */
1011 : 876902989 : edge_iterator ei = stack.last ();
1012 : 876902989 : basic_block dest = ei_edge (ei)->dest;
1013 : :
1014 : 876902989 : if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1015 : 851562482 : && !bitmap_bit_p (visited, dest->index)
1016 : 1232789008 : && (ei_container (ei) == setjmp_vec
1017 : 355873696 : || !maybe_dead_abnormal_edge_p (ei_edge (ei))))
1018 : : {
1019 : 355872284 : bitmap_set_bit (visited, dest->index);
1020 : : /* We only possibly remove edges from DEST here, leaving
1021 : : possibly unreachable code in the IL. */
1022 : 355872284 : retval |= cleanup_control_flow_bb (dest);
1023 : :
1024 : : /* Check for __builtin_setjmp_setup. Edges from .ABNORMAL_DISPATCH
1025 : : to __builtin_setjmp_receiver will be normally ignored by
1026 : : maybe_dead_abnormal_edge_p. If DEST is a visited
1027 : : __builtin_setjmp_setup, queue edge from .ABNORMAL_DISPATCH
1028 : : to __builtin_setjmp_receiver, so that it will be visited too. */
1029 : 355872284 : if (edge e = builtin_setjmp_setup_bb (dest))
1030 : : {
1031 : 12323 : vec_safe_push (setjmp_vec, e);
1032 : 12323 : if (vec_safe_length (setjmp_vec) == 1)
1033 : 6466 : stack.quick_push (ei_start (setjmp_vec));
1034 : : }
1035 : :
1036 : 355872284 : if ((ei_edge (ei)->flags
1037 : 355872284 : & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
1038 : : {
1039 : 145992 : gimple_stmt_iterator gsi
1040 : 145992 : = gsi_start_nondebug_after_labels_bb (dest);
1041 : 145992 : gimple *g = gsi_stmt (gsi);
1042 : 145992 : if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
1043 : 25406 : abnormal_dispatchers.safe_push (dest);
1044 : : }
1045 : :
1046 : 1232775273 : if (EDGE_COUNT (dest->succs) > 0)
1047 : 333485532 : stack.quick_push (ei_start (dest->succs));
1048 : : }
1049 : : else
1050 : : {
1051 : 521030705 : if (!ei_one_before_end_p (ei))
1052 : 161800606 : ei_next (&stack.last ());
1053 : : else
1054 : : {
1055 : 359230099 : if (ei_container (ei) == setjmp_vec)
1056 : 6466 : vec_safe_truncate (setjmp_vec, 0);
1057 : 359230099 : stack.pop ();
1058 : : }
1059 : : }
1060 : : }
1061 : :
1062 : 25738101 : vec_free (setjmp_vec);
1063 : :
1064 : : /* If we've marked .ABNORMAL_DISPATCHER basic block(s) as visited
1065 : : above, but haven't marked any of their successors as visited,
1066 : : unmark them now, so that they can be removed as useless. */
1067 : 77239709 : for (basic_block dispatcher_bb : abnormal_dispatchers)
1068 : : {
1069 : 25406 : edge e;
1070 : 25406 : edge_iterator ei;
1071 : 25497 : FOR_EACH_EDGE (e, ei, dispatcher_bb->succs)
1072 : 25406 : if (bitmap_bit_p (visited, e->dest->index))
1073 : : break;
1074 : 25406 : if (e == NULL)
1075 : 91 : bitmap_clear_bit (visited, dispatcher_bb->index);
1076 : : }
1077 : :
1078 : 25738101 : set_dom_info_availability (CDI_DOMINATORS, saved_state);
1079 : :
1080 : : /* We are deleting BBs in non-reverse dominator order, make sure
1081 : : insert_debug_temps_for_defs is prepared for that. */
1082 : 25738101 : if (retval)
1083 : 851523 : free_dominance_info (CDI_DOMINATORS);
1084 : :
1085 : : /* Remove all now (and previously) unreachable blocks. */
1086 : 404537695 : for (int i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); ++i)
1087 : : {
1088 : 378799594 : basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1089 : 378799594 : if (bb && !bitmap_bit_p (visited, bb->index))
1090 : : {
1091 : 4592828 : if (!retval)
1092 : 779541 : free_dominance_info (CDI_DOMINATORS);
1093 : 4592828 : delete_basic_block (bb);
1094 : 4592828 : retval = true;
1095 : : }
1096 : : }
1097 : :
1098 : 25738101 : return retval;
1099 : 25738101 : }
1100 : :
1101 : : static bool
1102 : 163529 : mfb_keep_latches (edge e)
1103 : : {
1104 : 163529 : return !((dom_info_available_p (CDI_DOMINATORS)
1105 : 27230 : && dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1106 : 154376 : || (e->flags & EDGE_DFS_BACK));
1107 : : }
1108 : :
1109 : : /* Remove unreachable blocks and other miscellaneous clean up work.
1110 : : Return true if the flowgraph was modified, false otherwise. */
1111 : :
1112 : : static bool
1113 : 25738101 : cleanup_tree_cfg_noloop (unsigned ssa_update_flags)
1114 : : {
1115 : 25738101 : timevar_push (TV_TREE_CLEANUP_CFG);
1116 : :
1117 : : /* Ensure that we have single entries into loop headers. Otherwise
1118 : : if one of the entries is becoming a latch due to CFG cleanup
1119 : : (from formerly being part of an irreducible region) then we mess
1120 : : up loop fixup and associate the old loop with a different region
1121 : : which makes niter upper bounds invalid. See for example PR80549.
1122 : : This needs to be done before we remove trivially dead edges as
1123 : : we need to capture the dominance state before the pending transform. */
1124 : 25738101 : if (current_loops)
1125 : : {
1126 : : /* This needs backedges or dominators. */
1127 : 22792884 : if (!dom_info_available_p (CDI_DOMINATORS))
1128 : 6914714 : mark_dfs_back_edges ();
1129 : :
1130 : 95132858 : for (loop_p loop : *get_loops (cfun))
1131 : 49547090 : if (loop && loop->header)
1132 : : {
1133 : 42125940 : basic_block bb = loop->header;
1134 : 42125940 : edge_iterator ei;
1135 : 42125940 : edge e;
1136 : 42125940 : bool found_latch = false;
1137 : 42125940 : bool any_abnormal = false;
1138 : 42125940 : unsigned n = 0;
1139 : : /* We are only interested in preserving existing loops, but
1140 : : we need to check whether they are still real and of course
1141 : : if we need to add a preheader at all. */
1142 : 81281434 : FOR_EACH_EDGE (e, ei, bb->preds)
1143 : : {
1144 : 39155494 : if (e->flags & EDGE_ABNORMAL)
1145 : : {
1146 : : any_abnormal = true;
1147 : : break;
1148 : : }
1149 : 39155494 : if ((dom_info_available_p (CDI_DOMINATORS)
1150 : 28566718 : && dominated_by_p (CDI_DOMINATORS, e->src, bb))
1151 : 53303785 : || (e->flags & EDGE_DFS_BACK))
1152 : : {
1153 : 19763556 : found_latch = true;
1154 : 19763556 : continue;
1155 : : }
1156 : 19391938 : n++;
1157 : : }
1158 : : /* If we have more than one entry to the loop header
1159 : : create a forwarder. */
1160 : 42125940 : if (found_latch && ! any_abnormal && n > 1)
1161 : : {
1162 : 49795 : edge fallthru = make_forwarder_block (bb, mfb_keep_latches,
1163 : : NULL);
1164 : 49795 : loop->header = fallthru->dest;
1165 : 49795 : if (! loops_state_satisfies_p (LOOPS_NEED_FIXUP))
1166 : : {
1167 : : /* The loop updating from the CFG hook is incomplete
1168 : : when we have multiple latches, fixup manually. */
1169 : 27897 : remove_bb_from_loops (fallthru->src);
1170 : 27897 : loop_p cloop = loop;
1171 : 87830 : FOR_EACH_EDGE (e, ei, fallthru->src->preds)
1172 : 59933 : cloop = find_common_loop (cloop, e->src->loop_father);
1173 : 27897 : add_bb_to_loop (fallthru->src, cloop);
1174 : : }
1175 : : }
1176 : : }
1177 : : }
1178 : :
1179 : : /* Prepare the worklists of altered blocks. */
1180 : 25738101 : cfgcleanup_altered_bbs = BITMAP_ALLOC (NULL);
1181 : :
1182 : : /* Start by iterating over all basic blocks in PRE order looking for
1183 : : edge removal opportunities. Do this first because incoming SSA form
1184 : : may be invalid and we want to avoid performing SSA related tasks such
1185 : : as propgating out a PHI node during BB merging in that state. This
1186 : : also gets rid of unreachable blocks. */
1187 : 25738101 : bool changed = cleanup_control_flow_pre ();
1188 : :
1189 : : /* After doing the above SSA form should be valid (or an update SSA
1190 : : should be required). */
1191 : 25738101 : if (ssa_update_flags)
1192 : : {
1193 : 16904918 : timevar_pop (TV_TREE_CLEANUP_CFG);
1194 : 16904918 : update_ssa (ssa_update_flags);
1195 : 16904918 : timevar_push (TV_TREE_CLEANUP_CFG);
1196 : : }
1197 : :
1198 : : /* Compute dominator info which we need for the iterative process below.
1199 : : Avoid computing the fast query DFS numbers since any block merging
1200 : : done will invalidate them anyway. */
1201 : 25738101 : if (!dom_info_available_p (CDI_DOMINATORS))
1202 : 8700263 : calculate_dominance_info (CDI_DOMINATORS, false);
1203 : : else
1204 : 17037838 : checking_verify_dominators (CDI_DOMINATORS);
1205 : :
1206 : : /* During forwarder block cleanup, we may redirect edges out of
1207 : : SWITCH_EXPRs, which can get expensive. So we want to enable
1208 : : recording of edge to CASE_LABEL_EXPR. */
1209 : 25738101 : start_recording_case_labels ();
1210 : :
1211 : : /* Continue by iterating over all basic blocks looking for BB merging
1212 : : opportunities. We cannot use FOR_EACH_BB_FN for the BB iteration
1213 : : since the basic blocks may get removed. */
1214 : 25738101 : unsigned n = last_basic_block_for_fn (cfun);
1215 : 404537695 : for (unsigned i = NUM_FIXED_BLOCKS; i < n; i++)
1216 : : {
1217 : 378799594 : basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1218 : 378799594 : if (bb)
1219 : 348399973 : changed |= cleanup_tree_cfg_bb (bb);
1220 : : }
1221 : :
1222 : : /* Now process the altered blocks, as long as any are available. */
1223 : 88685359 : while (!bitmap_empty_p (cfgcleanup_altered_bbs))
1224 : : {
1225 : 62947258 : unsigned i = bitmap_clear_first_set_bit (cfgcleanup_altered_bbs);
1226 : 62947258 : if (i < NUM_FIXED_BLOCKS)
1227 : 0 : continue;
1228 : :
1229 : 62947258 : basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1230 : 62947258 : if (!bb)
1231 : 8196777 : continue;
1232 : :
1233 : : /* BB merging done by cleanup_tree_cfg_bb can end up propagating
1234 : : out single-argument PHIs which in turn can expose
1235 : : cleanup_control_flow_bb opportunities so we have to repeat
1236 : : that here. */
1237 : 54750481 : changed |= cleanup_control_flow_bb (bb);
1238 : 54750481 : changed |= cleanup_tree_cfg_bb (bb);
1239 : : }
1240 : :
1241 : 25738101 : end_recording_case_labels ();
1242 : 25738101 : BITMAP_FREE (cfgcleanup_altered_bbs);
1243 : :
1244 : 25738101 : gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1245 : :
1246 : : /* Do not renumber blocks if the SCEV cache is active, it is indexed by
1247 : : basic-block numbers. */
1248 : 25738101 : if (! scev_initialized_p ())
1249 : 25432237 : compact_blocks ();
1250 : :
1251 : 25738101 : checking_verify_flow_info ();
1252 : :
1253 : 25738101 : timevar_pop (TV_TREE_CLEANUP_CFG);
1254 : :
1255 : 25738101 : if (changed && current_loops)
1256 : : {
1257 : : /* Removing edges and/or blocks may make recorded bounds refer
1258 : : to stale GIMPLE stmts now, so clear them. */
1259 : 6217283 : free_numbers_of_iterations_estimates (cfun);
1260 : 6217283 : loops_state_set (LOOPS_NEED_FIXUP);
1261 : : }
1262 : :
1263 : 25738101 : return changed;
1264 : : }
1265 : :
1266 : : /* Repairs loop structures. */
1267 : :
1268 : : static void
1269 : 7927423 : repair_loop_structures (void)
1270 : : {
1271 : 7927423 : bitmap changed_bbs;
1272 : 7927423 : unsigned n_new_or_deleted_loops;
1273 : :
1274 : 7927423 : calculate_dominance_info (CDI_DOMINATORS);
1275 : :
1276 : 7927423 : timevar_push (TV_REPAIR_LOOPS);
1277 : 7927423 : changed_bbs = BITMAP_ALLOC (NULL);
1278 : 7927423 : n_new_or_deleted_loops = fix_loop_structure (changed_bbs);
1279 : :
1280 : : /* This usually does nothing. But sometimes parts of cfg that originally
1281 : : were inside a loop get out of it due to edge removal (since they
1282 : : become unreachable by back edges from latch). Also a former
1283 : : irreducible loop can become reducible - in this case force a full
1284 : : rewrite into loop-closed SSA form. */
1285 : 7927423 : if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
1286 : 7927423 : && (!bitmap_empty_p (changed_bbs) || n_new_or_deleted_loops))
1287 : 24172 : rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1288 : :
1289 : 7927423 : BITMAP_FREE (changed_bbs);
1290 : :
1291 : 7927423 : checking_verify_loop_structure ();
1292 : 7927423 : scev_reset ();
1293 : :
1294 : 7927423 : timevar_pop (TV_REPAIR_LOOPS);
1295 : 7927423 : }
1296 : :
1297 : : /* Cleanup cfg and repair loop structures. */
1298 : :
1299 : : bool
1300 : 25738101 : cleanup_tree_cfg (unsigned ssa_update_flags)
1301 : : {
1302 : 25738101 : bool changed = cleanup_tree_cfg_noloop (ssa_update_flags);
1303 : :
1304 : 25738101 : if (current_loops != NULL
1305 : 25738101 : && loops_state_satisfies_p (LOOPS_NEED_FIXUP))
1306 : 7927423 : repair_loop_structures ();
1307 : :
1308 : 25738101 : return changed;
1309 : : }
1310 : :
1311 : : /* This pass merges PHI nodes if one feeds into another. For example,
1312 : : suppose we have the following:
1313 : :
1314 : : goto <bb 9> (<L9>);
1315 : :
1316 : : <L8>:;
1317 : : tem_17 = foo ();
1318 : :
1319 : : # tem_6 = PHI <tem_17(8), tem_23(7)>;
1320 : : <L9>:;
1321 : :
1322 : : # tem_3 = PHI <tem_6(9), tem_2(5)>;
1323 : : <L10>:;
1324 : :
1325 : : Then we merge the first PHI node into the second one like so:
1326 : :
1327 : : goto <bb 9> (<L10>);
1328 : :
1329 : : <L8>:;
1330 : : tem_17 = foo ();
1331 : :
1332 : : # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
1333 : : <L10>:;
1334 : : */
1335 : :
1336 : : namespace {
1337 : :
1338 : : const pass_data pass_data_merge_phi =
1339 : : {
1340 : : GIMPLE_PASS, /* type */
1341 : : "mergephi", /* name */
1342 : : OPTGROUP_NONE, /* optinfo_flags */
1343 : : TV_TREE_MERGE_PHI, /* tv_id */
1344 : : ( PROP_cfg | PROP_ssa ), /* properties_required */
1345 : : 0, /* properties_provided */
1346 : : 0, /* properties_destroyed */
1347 : : 0, /* todo_flags_start */
1348 : : 0, /* todo_flags_finish */
1349 : : };
1350 : :
1351 : : class pass_merge_phi : public gimple_opt_pass
1352 : : {
1353 : : public:
1354 : 867906 : pass_merge_phi (gcc::context *ctxt)
1355 : 1735812 : : gimple_opt_pass (pass_data_merge_phi, ctxt)
1356 : : {}
1357 : :
1358 : : /* opt_pass methods: */
1359 : 578604 : opt_pass * clone () final override { return new pass_merge_phi (m_ctxt); }
1360 : : unsigned int execute (function *) final override;
1361 : :
1362 : : }; // class pass_merge_phi
1363 : :
1364 : : unsigned int
1365 : 4567877 : pass_merge_phi::execute (function *fun)
1366 : : {
1367 : 4567877 : int forwarder_removed = 0;
1368 : 4567877 : calculate_dominance_info (CDI_DOMINATORS);
1369 : :
1370 : : /* Find all PHI nodes that we may be able to merge. */
1371 : 4567877 : unsigned n = last_basic_block_for_fn (fun);
1372 : 38615175 : for (unsigned i = NUM_FIXED_BLOCKS; i < n; i++)
1373 : : {
1374 : 34047298 : basic_block bb = BASIC_BLOCK_FOR_FN (fun, i);
1375 : 34047298 : if (!bb)
1376 : 68442 : continue;
1377 : :
1378 : : /* Look for a forwarder block with PHI nodes. */
1379 : 33978856 : if (maybe_remove_forwarder_block (bb, true))
1380 : 211354 : forwarder_removed++;
1381 : : }
1382 : :
1383 : : /* Removing forwarder blocks can cause formerly irreducible loops
1384 : : to become reducible if we merged two entry blocks. */
1385 : 4567877 : if (forwarder_removed != 0
1386 : 92917 : && current_loops)
1387 : 92917 : loops_state_set (LOOPS_NEED_FIXUP);
1388 : :
1389 : 4567877 : statistics_counter_event (fun, "Forwarder blocks removed",
1390 : : forwarder_removed);
1391 : 4567877 : return 0;
1392 : : }
1393 : :
1394 : : } // anon namespace
1395 : :
1396 : : gimple_opt_pass *
1397 : 289302 : make_pass_merge_phi (gcc::context *ctxt)
1398 : : {
1399 : 289302 : return new pass_merge_phi (ctxt);
1400 : : }
1401 : :
1402 : : /* Pass: cleanup the CFG just before expanding trees to RTL.
1403 : : This is just a round of label cleanups and case node grouping
1404 : : because after the tree optimizers have run such cleanups may
1405 : : be necessary. */
1406 : :
1407 : : static unsigned int
1408 : 1472875 : execute_cleanup_cfg_post_optimizing (void)
1409 : : {
1410 : 1472875 : unsigned int todo = execute_fixup_cfg ();
1411 : 1472875 : if (cleanup_tree_cfg ())
1412 : : {
1413 : 395914 : todo &= ~TODO_cleanup_cfg;
1414 : 395914 : todo |= TODO_update_ssa;
1415 : : }
1416 : 1472875 : maybe_remove_unreachable_handlers ();
1417 : 1472875 : cleanup_dead_labels ();
1418 : 1472875 : if (group_case_labels ())
1419 : 0 : todo |= TODO_cleanup_cfg;
1420 : :
1421 : 1472875 : basic_block bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1422 : 1472875 : gimple_stmt_iterator gsi = gsi_start_nondebug_after_labels_bb (bb);
1423 : : /* If the first (and only) bb and the only non debug
1424 : : statement is __builtin_unreachable call, then replace it with a trap
1425 : : so the function is at least one instruction in size. */
1426 : 1472875 : if (!gsi_end_p (gsi)
1427 : 1472875 : && gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
1428 : : {
1429 : 340 : if (targetm.have_trap ())
1430 : : {
1431 : 680 : gimple_call_set_fndecl (gsi_stmt (gsi), builtin_decl_implicit (BUILT_IN_UNREACHABLE_TRAP));
1432 : 340 : update_stmt (gsi_stmt (gsi));
1433 : : }
1434 : : /* If the target does not have a trap, convert it into an infinite loop. */
1435 : : else
1436 : : {
1437 : 0 : gsi_remove (&gsi, true);
1438 : 0 : make_single_succ_edge (bb, bb, EDGE_FALLTHRU);
1439 : 0 : fix_loop_structure (NULL);
1440 : : }
1441 : : }
1442 : :
1443 : 1472875 : if ((flag_compare_debug_opt || flag_compare_debug)
1444 : 3910 : && flag_dump_final_insns)
1445 : : {
1446 : 3910 : FILE *final_output = fopen (flag_dump_final_insns, "a");
1447 : :
1448 : 3910 : if (!final_output)
1449 : : {
1450 : 0 : error ("could not open final insn dump file %qs: %m",
1451 : : flag_dump_final_insns);
1452 : 0 : flag_dump_final_insns = NULL;
1453 : : }
1454 : : else
1455 : : {
1456 : 3910 : int save_unnumbered = flag_dump_unnumbered;
1457 : 3910 : int save_noaddr = flag_dump_noaddr;
1458 : :
1459 : 3910 : flag_dump_noaddr = flag_dump_unnumbered = 1;
1460 : 3910 : fprintf (final_output, "\n");
1461 : 3910 : dump_enumerated_decls (final_output,
1462 : : dump_flags | TDF_SLIM | TDF_NOUID);
1463 : 3910 : flag_dump_noaddr = save_noaddr;
1464 : 3910 : flag_dump_unnumbered = save_unnumbered;
1465 : 3910 : if (fclose (final_output))
1466 : : {
1467 : 0 : error ("could not close final insn dump file %qs: %m",
1468 : : flag_dump_final_insns);
1469 : 0 : flag_dump_final_insns = NULL;
1470 : : }
1471 : : }
1472 : : }
1473 : 1472875 : return todo;
1474 : : }
1475 : :
1476 : : namespace {
1477 : :
1478 : : const pass_data pass_data_cleanup_cfg_post_optimizing =
1479 : : {
1480 : : GIMPLE_PASS, /* type */
1481 : : "optimized", /* name */
1482 : : OPTGROUP_NONE, /* optinfo_flags */
1483 : : TV_TREE_CLEANUP_CFG, /* tv_id */
1484 : : PROP_cfg, /* properties_required */
1485 : : 0, /* properties_provided */
1486 : : 0, /* properties_destroyed */
1487 : : 0, /* todo_flags_start */
1488 : : TODO_remove_unused_locals, /* todo_flags_finish */
1489 : : };
1490 : :
1491 : : class pass_cleanup_cfg_post_optimizing : public gimple_opt_pass
1492 : : {
1493 : : public:
1494 : 289302 : pass_cleanup_cfg_post_optimizing (gcc::context *ctxt)
1495 : 578604 : : gimple_opt_pass (pass_data_cleanup_cfg_post_optimizing, ctxt)
1496 : : {}
1497 : :
1498 : : /* opt_pass methods: */
1499 : 1472875 : unsigned int execute (function *) final override
1500 : : {
1501 : 1472875 : return execute_cleanup_cfg_post_optimizing ();
1502 : : }
1503 : :
1504 : : }; // class pass_cleanup_cfg_post_optimizing
1505 : :
1506 : : } // anon namespace
1507 : :
1508 : : gimple_opt_pass *
1509 : 289302 : make_pass_cleanup_cfg_post_optimizing (gcc::context *ctxt)
1510 : : {
1511 : 289302 : return new pass_cleanup_cfg_post_optimizing (ctxt);
1512 : : }
1513 : :
1514 : :
1515 : : /* Delete all unreachable basic blocks and update callgraph.
1516 : : Doing so is somewhat nontrivial because we need to update all clones and
1517 : : remove inline function that become unreachable. */
1518 : :
1519 : : bool
1520 : 1628063 : delete_unreachable_blocks_update_callgraph (cgraph_node *dst_node,
1521 : : bool update_clones)
1522 : : {
1523 : 1628063 : bool changed = false;
1524 : 1628063 : basic_block b, next_bb;
1525 : :
1526 : 1628063 : find_unreachable_blocks ();
1527 : :
1528 : : /* Delete all unreachable basic blocks. */
1529 : :
1530 : 1628063 : for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
1531 : 30841691 : != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
1532 : : {
1533 : 29213628 : next_bb = b->next_bb;
1534 : :
1535 : 29213628 : if (!(b->flags & BB_REACHABLE))
1536 : : {
1537 : 36980 : gimple_stmt_iterator bsi;
1538 : :
1539 : 147332 : for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
1540 : : {
1541 : 73372 : struct cgraph_edge *e;
1542 : 73372 : struct cgraph_node *node;
1543 : :
1544 : 73372 : dst_node->remove_stmt_references (gsi_stmt (bsi));
1545 : :
1546 : 73372 : if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
1547 : 73372 : &&(e = dst_node->get_edge (gsi_stmt (bsi))) != NULL)
1548 : : {
1549 : 867 : if (!e->inline_failed)
1550 : 0 : e->callee->remove_symbol_and_inline_clones (dst_node);
1551 : : else
1552 : 867 : cgraph_edge::remove (e);
1553 : : }
1554 : 73372 : if (update_clones && dst_node->clones)
1555 : 0 : for (node = dst_node->clones; node != dst_node;)
1556 : : {
1557 : 0 : node->remove_stmt_references (gsi_stmt (bsi));
1558 : 0 : if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
1559 : 0 : && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
1560 : : {
1561 : 0 : if (!e->inline_failed)
1562 : 0 : e->callee->remove_symbol_and_inline_clones (dst_node);
1563 : : else
1564 : 0 : cgraph_edge::remove (e);
1565 : : }
1566 : :
1567 : 0 : if (node->clones)
1568 : : node = node->clones;
1569 : 0 : else if (node->next_sibling_clone)
1570 : : node = node->next_sibling_clone;
1571 : : else
1572 : : {
1573 : 0 : while (node != dst_node && !node->next_sibling_clone)
1574 : 0 : node = node->clone_of;
1575 : 0 : if (node != dst_node)
1576 : 0 : node = node->next_sibling_clone;
1577 : : }
1578 : : }
1579 : : }
1580 : 36980 : delete_basic_block (b);
1581 : 36980 : changed = true;
1582 : : }
1583 : : }
1584 : :
1585 : 1628063 : return changed;
1586 : : }
1587 : :
|