LCOV - code coverage report
Current view: top level - gcc - tree-cfgcleanup.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 95.9 % 653 626
Test Date: 2026-05-11 19:44:49 Functions: 100.0 % 26 26
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* CFG cleanup for trees.
       2              :    Copyright (C) 2001-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify
       7              : it under the terms of the GNU General Public License as published by
       8              : the Free Software Foundation; either version 3, or (at your option)
       9              : any later version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful,
      12              : but WITHOUT ANY WARRANTY; without even the implied warranty of
      13              : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
      14              : GNU General Public License for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : #include "config.h"
      21              : #include "system.h"
      22              : #include "coretypes.h"
      23              : #include "backend.h"
      24              : #include "rtl.h"
      25              : #include "tree.h"
      26              : #include "gimple.h"
      27              : #include "cfghooks.h"
      28              : #include "tree-pass.h"
      29              : #include "ssa.h"
      30              : #include "diagnostic-core.h"
      31              : #include "fold-const.h"
      32              : #include "cfganal.h"
      33              : #include "cfgcleanup.h"
      34              : #include "tree-eh.h"
      35              : #include "gimplify.h"
      36              : #include "gimple-iterator.h"
      37              : #include "tree-cfg.h"
      38              : #include "tree-ssa-loop-manip.h"
      39              : #include "tree-dfa.h"
      40              : #include "tree-ssa.h"
      41              : #include "cfgloop.h"
      42              : #include "tree-scalar-evolution.h"
      43              : #include "gimple-match.h"
      44              : #include "gimple-fold.h"
      45              : #include "tree-ssa-loop-niter.h"
      46              : #include "cgraph.h"
      47              : #include "tree-into-ssa.h"
      48              : #include "tree-cfgcleanup.h"
      49              : #include "gimple-pretty-print.h"
      50              : #include "target.h"
      51              : 
      52              : 
      53              : /* The set of blocks in that at least one of the following changes happened:
      54              :    -- the statement at the end of the block was changed
      55              :    -- the block was newly created
      56              :    -- the set of the predecessors of the block changed
      57              :    -- the set of the successors of the block changed
      58              :    ??? Maybe we could track these changes separately, since they determine
      59              :        what cleanups it makes sense to try on the block.  */
      60              : bitmap cfgcleanup_altered_bbs;
      61              : 
      62              : /* Remove any fallthru edge from EV.  Return true if an edge was removed.  */
      63              : 
      64              : static bool
      65     22555162 : remove_fallthru_edge (vec<edge, va_gc> *ev)
      66              : {
      67     22555162 :   edge_iterator ei;
      68     22555162 :   edge e;
      69              : 
      70     25247271 :   FOR_EACH_EDGE (e, ei, ev)
      71      2726123 :     if ((e->flags & EDGE_FALLTHRU) != 0)
      72              :       {
      73        34014 :         if (e->flags & EDGE_COMPLEX)
      74            0 :           e->flags &= ~EDGE_FALLTHRU;
      75              :         else
      76        34014 :           remove_edge_and_dominated_blocks (e);
      77        34014 :         return true;
      78              :       }
      79              :   return false;
      80              : }
      81              : 
      82              : /* Convert a SWTCH with single non-default case to gcond and replace it
      83              :    at GSI.  */
      84              : 
      85              : static bool
      86       733247 : convert_single_case_switch (gswitch *swtch, gimple_stmt_iterator &gsi)
      87              : {
      88       733247 :   if (gimple_switch_num_labels (swtch) != 2)
      89              :     return false;
      90              : 
      91        11805 :   tree index = gimple_switch_index (swtch);
      92        11805 :   tree label = gimple_switch_label (swtch, 1);
      93        11805 :   tree low = CASE_LOW (label);
      94        11805 :   tree high = CASE_HIGH (label);
      95              : 
      96        11805 :   basic_block default_bb = gimple_switch_default_bb (cfun, swtch);
      97        11805 :   basic_block case_bb = label_to_block (cfun, CASE_LABEL (label));
      98              : 
      99        11805 :   basic_block bb = gimple_bb (swtch);
     100        11805 :   gcond *cond;
     101              : 
     102              :   /* Replace switch statement with condition statement.  */
     103        11805 :   if (high)
     104              :     {
     105         1025 :       tree lhs, rhs;
     106         1025 :       if (range_check_type (TREE_TYPE (index)) == NULL_TREE)
     107            0 :         return false;
     108         1025 :       generate_range_test (bb, index, low, high, &lhs, &rhs);
     109         1025 :       cond = gimple_build_cond (LE_EXPR, lhs, rhs, NULL_TREE, NULL_TREE);
     110              :     }
     111              :   else
     112        10780 :     cond = gimple_build_cond (EQ_EXPR, index,
     113        10780 :                               fold_convert (TREE_TYPE (index), low),
     114              :                               NULL_TREE, NULL_TREE);
     115              : 
     116        11805 :   gsi_replace (&gsi, cond, true);
     117              : 
     118              :   /* Update edges.  */
     119        11805 :   edge case_edge = find_edge (bb, case_bb);
     120        11805 :   edge default_edge = find_edge (bb, default_bb);
     121              : 
     122        11805 :   case_edge->flags |= EDGE_TRUE_VALUE;
     123        11805 :   default_edge->flags |= EDGE_FALSE_VALUE;
     124        11805 :   return true;
     125              : }
     126              : 
     127              : /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 of STMT in BB by
     128              :    swapping edges of the BB.  */
     129              : bool
     130    178213717 : canonicalize_bool_cond (gcond *stmt, basic_block bb)
     131              : {
     132    178213717 :   tree rhs1 = gimple_cond_lhs (stmt);
     133    178213717 :   tree rhs2 = gimple_cond_rhs (stmt);
     134    178213717 :   enum tree_code code = gimple_cond_code (stmt);
     135    178213717 :   if (code != EQ_EXPR && code != NE_EXPR)
     136              :     return false;
     137    133787219 :   if (TREE_CODE (TREE_TYPE (rhs1)) != BOOLEAN_TYPE
     138    133787219 :       && (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
     139     77110709 :            || TYPE_PRECISION (TREE_TYPE (rhs1)) != 1))
     140              :     return false;
     141              : 
     142              :   /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges.  */
     143     23544604 :   if (code == EQ_EXPR && !integer_zerop (rhs2))
     144              :     return false;
     145     23398490 :   if (code == NE_EXPR && !integer_onep (rhs2))
     146              :     return false;
     147              : 
     148       131586 :   gimple_cond_set_code (stmt, NE_EXPR);
     149       131586 :   gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
     150       131586 :   EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
     151       131586 :   EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
     152              : 
     153       131586 :   if (dump_file)
     154              :     {
     155            1 :       fprintf (dump_file, "  Swapped '");
     156            1 :       print_gimple_expr (dump_file, stmt, 0);
     157            1 :       fprintf (dump_file, "'\n");
     158              :     }
     159              :   return true;
     160              : }
     161              : 
     162              : /* Disconnect an unreachable block in the control expression starting
     163              :    at block BB.  */
     164              : 
     165              : static bool
     166    160015409 : cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi)
     167              : {
     168    160015409 :   edge taken_edge;
     169    160015409 :   bool retval = false;
     170    160015409 :   gimple *stmt = gsi_stmt (gsi);
     171              : 
     172    160015409 :   if (!single_succ_p (bb))
     173              :     {
     174    160009208 :       edge e;
     175    160009208 :       edge_iterator ei;
     176    160009208 :       bool warned;
     177    160009208 :       tree val = NULL_TREE;
     178              : 
     179              :       /* Try to convert a switch with just a single non-default case to
     180              :          GIMPLE condition.  */
     181    160009208 :       if (gimple_code (stmt) == GIMPLE_SWITCH
     182    160009208 :           && convert_single_case_switch (as_a<gswitch *> (stmt), gsi))
     183        11805 :         stmt = gsi_stmt (gsi);
     184              : 
     185    160009208 :       if (gimple_code (stmt) == GIMPLE_COND)
     186    159287766 :         canonicalize_bool_cond (as_a<gcond*> (stmt), bb);
     187              : 
     188    160009208 :       fold_defer_overflow_warnings ();
     189    160009208 :       switch (gimple_code (stmt))
     190              :         {
     191    159287766 :         case GIMPLE_COND:
     192    159287766 :           {
     193    159287766 :             gimple_match_op res_op;
     194    159287766 :             if (gimple_simplify (stmt, &res_op, NULL, no_follow_ssa_edges,
     195              :                                  no_follow_ssa_edges)
     196    159287766 :                 && res_op.code == INTEGER_CST)
     197      2640169 :               val = res_op.ops[0];
     198              :           }
     199    159287766 :           break;
     200              : 
     201       721442 :         case GIMPLE_SWITCH:
     202       721442 :           val = gimple_switch_index (as_a <gswitch *> (stmt));
     203       721442 :           break;
     204              : 
     205    160009208 :         default:
     206    160009208 :           ;
     207              :         }
     208    160009208 :       taken_edge = find_taken_edge (bb, val);
     209    160009208 :       if (!taken_edge)
     210              :         {
     211    157358129 :           fold_undefer_and_ignore_overflow_warnings ();
     212    157358129 :           return false;
     213              :         }
     214              : 
     215              :       /* Remove all the edges except the one that is always executed.  */
     216      2651079 :       warned = false;
     217      7980677 :       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
     218              :         {
     219      5329598 :           if (e != taken_edge)
     220              :             {
     221      2678519 :               if (!warned)
     222              :                 {
     223      2651079 :                   fold_undefer_overflow_warnings
     224      2651079 :                     (true, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
     225      2651079 :                   warned = true;
     226              :                 }
     227              : 
     228      2678519 :               taken_edge->probability += e->probability;
     229      2678519 :               remove_edge_and_dominated_blocks (e);
     230      2678519 :               retval = true;
     231              :             }
     232              :           else
     233      2651079 :             ei_next (&ei);
     234              :         }
     235      2651079 :       if (!warned)
     236            0 :         fold_undefer_and_ignore_overflow_warnings ();
     237              :     }
     238              :   else
     239         6201 :     taken_edge = single_succ_edge (bb);
     240              : 
     241      2657280 :   bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
     242      2657280 :   gsi_remove (&gsi, true);
     243      2657280 :   taken_edge->flags &= ~(EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
     244      2657280 :   taken_edge->flags |= EDGE_FALLTHRU;
     245              : 
     246      2657280 :   return retval;
     247              : }
     248              : 
     249              : /* Cleanup the GF_CALL_CTRL_ALTERING flag according to
     250              :    to updated gimple_call_flags.  */
     251              : 
     252              : static void
     253    357476857 : cleanup_call_ctrl_altering_flag (basic_block bb, gimple *bb_end)
     254              : {
     255    357476857 :   if (!is_gimple_call (bb_end)
     256     67484514 :       || !gimple_call_ctrl_altering_p (bb_end)
     257    380497967 :       || (/* IFN_UNIQUE should be the last insn, to make checking for it
     258              :              as cheap as possible.  */
     259     23021110 :           gimple_call_internal_p (bb_end)
     260       438417 :           && gimple_call_internal_unique_p (bb_end)))
     261              :     return;
     262              : 
     263     22608228 :   int flags = gimple_call_flags (bb_end);
     264     22608228 :   if (!(flags & ECF_NORETURN)
     265        84333 :       && (((flags & (ECF_CONST | ECF_PURE))
     266         1940 :            && !(flags & ECF_LOOPING_CONST_OR_PURE))
     267        84163 :           || (flags & ECF_LEAF)))
     268          170 :     gimple_call_set_ctrl_altering (bb_end, false);
     269              :   else
     270              :     {
     271     22608058 :       edge_iterator ei;
     272     22608058 :       edge e;
     273     22608058 :       bool found = false;
     274     25375421 :       FOR_EACH_EDGE (e, ei, bb->succs)
     275      2884636 :         if (e->flags & EDGE_FALLTHRU)
     276              :           found = true;
     277      2771757 :         else if (e->flags & EDGE_ABNORMAL)
     278              :           {
     279              :             found = false;
     280              :             break;
     281              :           }
     282              :       /* If there's no abnormal edge and a fallthru edge the call
     283              :          isn't control-altering anymore.  */
     284     22608058 :       if (found)
     285        31228 :         gimple_call_set_ctrl_altering (bb_end, false);
     286              :     }
     287              : }
     288              : 
     289              : /* Try to remove superfluous control structures in basic block BB.  Returns
     290              :    true if anything changes.  */
     291              : 
     292              : static bool
     293    397280256 : cleanup_control_flow_bb (basic_block bb)
     294              : {
     295    397280256 :   gimple_stmt_iterator gsi;
     296    397280256 :   bool retval = false;
     297    397280256 :   gimple *stmt;
     298              : 
     299              :   /* If the last statement of the block could throw and now cannot,
     300              :      we need to prune cfg.  */
     301    397280256 :   retval |= gimple_purge_dead_eh_edges (bb);
     302              : 
     303    397280256 :   gsi = gsi_last_nondebug_bb (bb);
     304    397280256 :   if (gsi_end_p (gsi))
     305              :     return retval;
     306              : 
     307    357476857 :   stmt = gsi_stmt (gsi);
     308              : 
     309              :   /* Try to cleanup ctrl altering flag for call which ends bb.  */
     310    357476857 :   cleanup_call_ctrl_altering_flag (bb, stmt);
     311              : 
     312    357476857 :   if (gimple_code (stmt) == GIMPLE_COND
     313    357476857 :       || gimple_code (stmt) == GIMPLE_SWITCH)
     314              :     {
     315    320030818 :       gcc_checking_assert (gsi_stmt (gsi_last_bb (bb)) == stmt);
     316    160015409 :       retval |= cleanup_control_expr_graph (bb, gsi);
     317              :     }
     318    197461448 :   else if (gimple_code (stmt) == GIMPLE_GOTO
     319         6248 :            && TREE_CODE (gimple_goto_dest (stmt)) == ADDR_EXPR
     320    197461821 :            && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt), 0))
     321              :                == LABEL_DECL))
     322              :     {
     323              :       /* If we had a computed goto which has a compile-time determinable
     324              :          destination, then we can eliminate the goto.  */
     325          171 :       edge e;
     326          171 :       tree label;
     327          171 :       edge_iterator ei;
     328          171 :       basic_block target_block;
     329              : 
     330          342 :       gcc_checking_assert (gsi_stmt (gsi_last_bb (bb)) == stmt);
     331              :       /* First look at all the outgoing edges.  Delete any outgoing
     332              :          edges which do not go to the right block.  For the one
     333              :          edge which goes to the right block, fix up its flags.  */
     334          171 :       label = TREE_OPERAND (gimple_goto_dest (stmt), 0);
     335          171 :       if (DECL_CONTEXT (label) != cfun->decl)
     336            8 :         return retval;
     337          163 :       target_block = label_to_block (cfun, label);
     338          411 :       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
     339              :         {
     340          248 :           if (e->dest != target_block)
     341           91 :             remove_edge_and_dominated_blocks (e);
     342              :           else
     343              :             {
     344              :               /* Turn off the EDGE_ABNORMAL flag.  */
     345          157 :               e->flags &= ~EDGE_ABNORMAL;
     346              : 
     347              :               /* And set EDGE_FALLTHRU.  */
     348          157 :               e->flags |= EDGE_FALLTHRU;
     349          157 :               ei_next (&ei);
     350              :             }
     351              :         }
     352              : 
     353          163 :       bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
     354          163 :       bitmap_set_bit (cfgcleanup_altered_bbs, target_block->index);
     355              : 
     356              :       /* Remove the GOTO_EXPR as it is not needed.  The CFG has all the
     357              :          relevant information we need.  */
     358          163 :       gsi_remove (&gsi, true);
     359          163 :       retval = true;
     360              :     }
     361              : 
     362              :   /* Check for indirect calls that have been turned into
     363              :      noreturn calls.  */
     364    197461277 :   else if (is_gimple_call (stmt)
     365    197461277 :            && gimple_call_noreturn_p (stmt))
     366              :     {
     367              :       /* If there are debug stmts after the noreturn call, remove them
     368              :          now, they should be all unreachable anyway.  */
     369     22555339 :       for (gsi_next (&gsi); !gsi_end_p (gsi); )
     370          177 :         gsi_remove (&gsi, true);
     371     22555162 :       if (remove_fallthru_edge (bb->succs))
     372        34014 :         retval = true;
     373     22555162 :       tree lhs = gimple_call_lhs (stmt);
     374     22555162 :       if (!lhs
     375     22555162 :           || !should_remove_lhs_p (lhs))
     376     22555143 :         gimple_call_set_ctrl_altering (stmt, true);
     377              :     }
     378              : 
     379              :   return retval;
     380              : }
     381              : 
     382              : /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
     383              :    those alternatives are equal in each of the PHI nodes, then return
     384              :    true, else return false.  */
     385              : 
     386              : bool
     387      3880094 : phi_alternatives_equal (basic_block dest, edge e1, edge e2)
     388              : {
     389      3880094 :   int n1 = e1->dest_idx;
     390      3880094 :   int n2 = e2->dest_idx;
     391      3880094 :   gphi_iterator gsi;
     392              : 
     393      4249551 :   for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
     394              :     {
     395      4190047 :       gphi *phi = gsi.phi ();
     396      4190047 :       tree val1 = gimple_phi_arg_def (phi, n1);
     397      4190047 :       tree val2 = gimple_phi_arg_def (phi, n2);
     398              : 
     399      4190047 :       gcc_assert (val1 != NULL_TREE);
     400      4190047 :       gcc_assert (val2 != NULL_TREE);
     401              : 
     402      4190047 :       if (!operand_equal_for_phi_arg_p (val1, val2))
     403              :         return false;
     404              :     }
     405              : 
     406              :   return true;
     407              : }
     408              : 
     409              : /* Move debug stmts from the forwarder block SRC to DEST or PRED.  */
     410              : 
     411              : static void
     412     27735378 : move_debug_stmts_from_forwarder (basic_block src,
     413              :                                  basic_block dest, bool dest_single_pred_p,
     414              :                                  basic_block pred, bool pred_single_succ_p)
     415              : {
     416     27735378 :   if (!MAY_HAVE_DEBUG_STMTS)
     417      9841463 :     return;
     418              : 
     419              :   /* If we cannot move to the destination but to the predecessor do that.  */
     420     17967190 :   if (!dest_single_pred_p && pred_single_succ_p)
     421              :     {
     422        73280 :       gimple_stmt_iterator gsi_to = gsi_last_bb (pred);
     423        73280 :       if (gsi_end_p (gsi_to) || !stmt_ends_bb_p (gsi_stmt (gsi_to)))
     424              :         {
     425        73275 :           for (gimple_stmt_iterator gsi = gsi_after_labels (src);
     426       251339 :                !gsi_end_p (gsi);)
     427              :             {
     428       178064 :               gimple *debug = gsi_stmt (gsi);
     429       178064 :               gcc_assert (is_gimple_debug (debug));
     430       178064 :               gsi_move_after (&gsi, &gsi_to);
     431              :             }
     432        73275 :           return;
     433              :         }
     434              :     }
     435              : 
     436              :   /* Else move to DEST or drop/reset them.  */
     437     17893915 :   gimple_stmt_iterator gsi_to = gsi_after_labels (dest);
     438     33806567 :   for (gimple_stmt_iterator gsi = gsi_after_labels (src); !gsi_end_p (gsi);)
     439              :     {
     440     15912652 :       gimple *debug = gsi_stmt (gsi);
     441     15912652 :       gcc_assert (is_gimple_debug (debug));
     442              :       /* Move debug binds anyway, but not anything else like begin-stmt
     443              :          markers unless they are always valid at the destination.  */
     444     15912652 :       if (dest_single_pred_p
     445     15912652 :           || gimple_debug_bind_p (debug))
     446              :         {
     447     14854614 :           gsi_move_before (&gsi, &gsi_to);
     448              :           /* Reset debug-binds that are not always valid at the destination.
     449              :              Simply dropping them can cause earlier values to become live,
     450              :              generating wrong debug information.
     451              :              ???  There are several things we could improve here.  For
     452              :              one we might be able to move stmts to the predecessor.
     453              :              For anther, if the debug stmt is immediately followed by a
     454              :              (debug) definition in the destination (on a post-dominated path?)
     455              :              we can elide it without any bad effects.  */
     456     14854614 :           if (!dest_single_pred_p)
     457              :             {
     458      6252109 :               gimple_debug_bind_reset_value (debug);
     459      6252109 :               update_stmt (debug);
     460              :             }
     461              :         }
     462              :       else
     463      1058038 :         gsi_next (&gsi);
     464              :     }
     465              : }
     466              : 
     467              : /* Return true if basic block BB does nothing except pass control
     468              :    flow to another block and that we can safely insert a label at
     469              :    the start of the successor block and was removed.
     470              : 
     471              :    As a precondition, we require that BB be not equal to
     472              :    the entry block.
     473              :    If CAN_SPLIT is true, we can split the edge to have
     474              :    another bb with with the phi.  */
     475              : 
     476              : static bool
     477    423366836 : maybe_remove_forwarder_block (basic_block bb, bool can_split = false)
     478              : {
     479    423366836 :   gimple_stmt_iterator gsi;
     480    423366836 :   location_t locus;
     481              : 
     482              :   /* BB must have a single outgoing edge.  */
     483    806804668 :   if (!single_succ_p (bb)
     484              :       /* BB may not be a predecessor of the exit block.  */
     485    197308635 :       || single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
     486              :       /* Nor should this be an infinite loop.  */
     487    165430548 :       || single_succ (bb) == bb
     488              :       /* BB may not have an abnormal outgoing edge.  */
     489    576503387 :       || (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
     490              :     return false;
     491              : 
     492    165292641 :   gcc_checking_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun));
     493              : 
     494    165292641 :   locus = single_succ_edge (bb)->goto_locus;
     495              : 
     496              :   /* There should not be an edge coming from entry, or an EH edge.  */
     497    165292641 :   {
     498    165292641 :     edge_iterator ei;
     499    165292641 :     edge e;
     500              : 
     501    343708392 :     FOR_EACH_EDGE (e, ei, bb->preds)
     502    200971549 :       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || (e->flags & EDGE_EH))
     503     22555798 :         return false;
     504              :       /* If goto_locus of any of the edges differs, prevent removing
     505              :          the forwarder block when not optimizing.  */
     506    179810809 :       else if (!optimize
     507      5250331 :                && (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
     508      4777220 :                    || LOCATION_LOCUS (locus) != UNKNOWN_LOCATION)
     509    181206006 :                && e->goto_locus != locus)
     510              :         return false;
     511              :   }
     512              : 
     513              :   /* If this bb has a single predecessor and that predecssor
     514              :      has a single successor, this bb will be merged with the
     515              :      predecessor so ignore it for removing of the forwarder block. */
     516    142736843 :   if (single_pred_p (bb)
     517    142736843 :       && single_succ_p (single_pred_edge (bb)->src))
     518              :     return false;
     519              : 
     520    134469542 :   bool has_phi = !gimple_seq_empty_p (phi_nodes (bb));
     521    134469542 :   basic_block dest = single_succ_edge (bb)->dest;
     522              : 
     523              :   /* If the destination block consists of a nonlocal label or is a
     524              :      EH landing pad, do not merge it.  */
     525    134469542 :   if (glabel *label_stmt = safe_dyn_cast <glabel *> (first_stmt (dest)))
     526      8310141 :     if (DECL_NONLOCAL (gimple_label_label (label_stmt))
     527      8310141 :         || EH_LANDING_PAD_NR (gimple_label_label (label_stmt)))
     528              :     return false;
     529              : 
     530              :   /* If there is an abnormal edge to basic block BB, but not into
     531              :      dest, problems might occur during removal of the phi node at out
     532              :      of ssa due to overlapping live ranges of registers.
     533              : 
     534              :      If there is an abnormal edge in DEST, the problems would occur
     535              :      anyway since cleanup_dead_labels would then merge the labels for
     536              :      two different eh regions, and rest of exception handling code
     537              :      does not like it.
     538              : 
     539              :      So if there is an abnormal edge to BB, proceed only if there is
     540              :      no abnormal edge to DEST and there are no phi nodes in DEST.
     541              :      If the BB has phi, we don't want to deal with abnormal edges either. */
     542    131042337 :   if (bb_has_abnormal_pred (bb)
     543    131042337 :       && (bb_has_abnormal_pred (dest)
     544        60728 :           || !gimple_seq_empty_p (phi_nodes (dest))
     545        53634 :           || has_phi))
     546              :     return false;
     547              : 
     548              :   /* When we have a phi, we have to feed into another
     549              :      basic block with PHI nodes.  */
     550    131031305 :   if (has_phi && gimple_seq_empty_p (phi_nodes (dest)))
     551              :     return false;
     552              : 
     553              :   /* Now walk through the statements backward.  We can ignore labels,
     554              :      anything else means this is not a forwarder block.  */
     555    536235952 :   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
     556              :     {
     557    232382294 :       gimple *stmt = gsi_stmt (gsi);
     558              : 
     559    232382294 :       switch (gimple_code (stmt))
     560              :         {
     561       871802 :         case GIMPLE_LABEL:
     562       871802 :           if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt)))
     563       871802 :               || EH_LANDING_PAD_NR (gimple_label_label (as_a <glabel *> (stmt))))
     564              :             return false;
     565       871495 :           if (!optimize
     566        29652 :               && (gimple_has_location (stmt)
     567         2184 :                   || LOCATION_LOCUS (locus) != UNKNOWN_LOCATION)
     568       898963 :               && gimple_location (stmt) != locus)
     569              :             return false;
     570              :           break;
     571              : 
     572              :           /* ??? For now, hope there's a corresponding debug
     573              :              assignment at the destination.  */
     574              :         case GIMPLE_DEBUG:
     575              :           break;
     576              : 
     577              :         default:
     578              :           return false;
     579              :         }
     580              :     }
     581              :   /* If BB has PHIs and does not dominate DEST,
     582              :      then the PHI nodes at DEST must be the only
     583              :      users of the results of the PHI nodes at BB.
     584              :      So only check when BB dominates dest.  */
     585     35735682 :   if (has_phi
     586     35735682 :       && dominated_by_p (CDI_DOMINATORS, dest, bb))
     587              :     {
     588      2276886 :       gphi_iterator gsi;
     589      2276886 :       unsigned int dest_idx = single_succ_edge (bb)->dest_idx;
     590              : 
     591              :       /* BB dominates DEST.  There may be many users of the PHI
     592              :          nodes in BB.  However, there is still a trivial case we
     593              :          can handle.  If the result of every PHI in BB is used
     594              :          only by a PHI in DEST, then we can trivially merge the
     595              :          PHI nodes from BB into DEST.  */
     596      4673375 :       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
     597      2396489 :            gsi_next (&gsi))
     598              :         {
     599      3207064 :           gphi *phi = gsi.phi ();
     600      3207064 :           tree result = gimple_phi_result (phi);
     601      3207064 :           use_operand_p imm_use;
     602      3207064 :           gimple *use_stmt;
     603              : 
     604              :           /* If the PHI's result is never used, then we can just
     605              :              ignore it an.  */
     606      3207064 :           if (has_zero_uses (result))
     607        71599 :             continue;
     608              : 
     609              :           /* Get the single use of the result of this PHI node.  */
     610      3135465 :           if (!single_imm_use (result, &imm_use, &use_stmt)
     611      2671879 :               || gimple_code (use_stmt) != GIMPLE_PHI
     612      2397165 :               || gimple_bb (use_stmt) != dest
     613      5461569 :               || gimple_phi_arg_def (use_stmt, dest_idx) != result)
     614       810575 :             return false;
     615              :         }
     616              :     }
     617              : 
     618     34925107 :   if (current_loops)
     619              :     {
     620              :       /* Protect loop headers.  */
     621     34031542 :       if (bb_loop_header_p (bb))
     622              :         return false;
     623              : 
     624              :       /* Protect loop preheaders and latches if requested.  */
     625     33874043 :       if (dest->loop_father->header == dest)
     626              :         {
     627     12311514 :           if (bb->loop_father == dest->loop_father)
     628              :             {
     629      7357546 :               if (loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES))
     630              :                 return false;
     631              :               /* If bb doesn't have a single predecessor we'd make this
     632              :                  loop have multiple latches.  Don't do that if that
     633              :                  would in turn require disambiguating them over again.  */
     634    401212335 :               if (!single_pred_p (bb))
     635              :                 return false;
     636              :             }
     637              :           /* cleanup_tree_cfg_noloop just created the loop preheader, don't
     638              :              remove it if it has phis.  */
     639      4953968 :           else if (bb->loop_father == loop_outer (dest->loop_father)
     640      4941693 :                    && !has_phi
     641      8782394 :                    && !loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS))
     642              :             ;
     643              :           else
     644              :             /* Always preserve other edges into loop headers that are
     645              :                not simple latches or preheaders.  */
     646              :             return false;
     647              :         }
     648              :     }
     649              : 
     650     31445101 :   edge succ = single_succ_edge (bb), e, s;
     651     31445101 :   gimple *stmt;
     652     31445101 :   gimple_stmt_iterator gsi_to;
     653              : 
     654              :   /* If there are phi nodes in DEST, and some of the blocks that are
     655              :      predecessors of BB are also predecessors of DEST, check that the
     656              :      phi node arguments match.
     657              :      Otherwise we have to split the edge and that becomes
     658              :      a "forwarder" again.  */
     659     31445101 :   if ((!can_split || !has_phi)
     660     31445101 :       && !gimple_seq_empty_p (phi_nodes (dest)))
     661              :     {
     662     25640545 :       edge_iterator ei;
     663     50085853 :       FOR_EACH_EDGE (e, ei, bb->preds)
     664              :         {
     665     28155031 :           s = find_edge (e->src, dest);
     666     28155031 :           if (!s)
     667     24386315 :             continue;
     668              : 
     669      3768716 :           if (!phi_alternatives_equal (dest, succ, s))
     670      3709723 :             return false;
     671              :         }
     672              :     }
     673              : 
     674     27735378 :   basic_block pred = NULL;
     675     27735378 :   if (single_pred_p (bb))
     676     26142705 :     pred = single_pred (bb);
     677     27735378 :   bool dest_single_pred_p = single_pred_p (dest);
     678              : 
     679              :   /* Redirect the edges.  */
     680     58073623 :   for (edge_iterator ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
     681              :     {
     682     30338245 :       if (cfgcleanup_altered_bbs)
     683     30108389 :         bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
     684     30338245 :       s = find_edge (e->src, dest);
     685              : 
     686              :       /* See if we can split the edge if we already have an edge from src to dest.  */
     687     30338245 :       if (can_split && has_phi)
     688              :         /* PHI arguments are different.  Create a forwarder block by
     689              :            splitting E so that we can merge PHI arguments on E to
     690              :            DEST.  */
     691        44281 :         if (s && !phi_alternatives_equal (dest, s, succ))
     692        16377 :           e = single_succ_edge (split_edge (e));
     693              : 
     694     30338245 :       if (e->flags & EDGE_ABNORMAL)
     695              :         {
     696              :           /* If there is an abnormal edge, redirect it anyway, and
     697              :              move the labels to the new block to make it legal.  */
     698          183 :           s = redirect_edge_succ_nodup (e, dest);
     699              :         }
     700              :       else
     701     30338062 :         s = redirect_edge_and_branch (e, dest);
     702              : 
     703     30338245 :       if (s == e)
     704              :         {
     705              :           /* If we merge the forwarder with phis into a loop header
     706              :              verify if we are creating another loop latch edge.
     707              :              If so, reset number of iteration information of the loop.  */
     708     30220753 :           if (has_phi
     709      2803915 :               && dest->loop_father
     710      2803915 :               && dest->loop_father->header == dest
     711     30244476 :               && dominated_by_p (CDI_DOMINATORS, e->src, dest))
     712              :             {
     713        23723 :               dest->loop_father->any_upper_bound = false;
     714        23723 :               dest->loop_father->any_likely_upper_bound = false;
     715        23723 :               free_numbers_of_iterations_estimates (dest->loop_father);
     716              :             }
     717              :           /* Copy arguments for the phi nodes, since the edge was not
     718              :              here before.  */
     719     30220753 :           copy_phi_arg_into_existing_phi (succ, s, has_phi);
     720              :         }
     721              :       else
     722       117492 :         redirect_edge_var_map_clear (s);
     723              :     }
     724              : 
     725              :   /* Move nonlocal labels and computed goto targets as well as user
     726              :      defined labels and labels with an EH landing pad number to the
     727              :      new block, so that the redirection of the abnormal edges works,
     728              :      jump targets end up in a sane place and debug information for
     729              :      labels is retained.  */
     730     27735378 :   gsi_to = gsi_start_bb (dest);
     731     55732759 :   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
     732              :     {
     733      2971386 :       stmt = gsi_stmt (gsi);
     734      2971386 :       if (is_gimple_debug (stmt))
     735              :         break;
     736              : 
     737              :       /* Forwarder blocks can only contain labels and debug stmts, and
     738              :          labels must come first, so if we get to this point, we know
     739              :          we're looking at a label.  */
     740       262003 :       tree decl = gimple_label_label (as_a <glabel *> (stmt));
     741       262003 :       if (EH_LANDING_PAD_NR (decl) != 0
     742       262003 :           || DECL_NONLOCAL (decl)
     743       262003 :           || FORCED_LABEL (decl)
     744       517473 :           || !DECL_ARTIFICIAL (decl))
     745        31083 :         gsi_move_before (&gsi, &gsi_to);
     746              :       else
     747       230920 :         gsi_next (&gsi);
     748              :     }
     749              : 
     750              :   /* Move debug statements.  Reset them if the destination does not
     751              :      have a single predecessor.  */
     752     55470756 :   move_debug_stmts_from_forwarder (bb, dest, dest_single_pred_p,
     753     81506185 :                                    pred, pred && single_succ_p (pred));
     754              : 
     755     27735378 :   if (cfgcleanup_altered_bbs)
     756     27532700 :     bitmap_set_bit (cfgcleanup_altered_bbs, dest->index);
     757              : 
     758              :   /* Update the dominators.  */
     759     27735378 :   basic_block dom, dombb, domdest;
     760              : 
     761     27735378 :   dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
     762     27735378 :   domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
     763     27735378 :   if (domdest == bb)
     764              :     {
     765              :       /* Shortcut to avoid calling (relatively expensive)
     766              :          nearest_common_dominator unless necessary.  */
     767              :       dom = dombb;
     768              :     }
     769              :   else
     770     20711645 :     dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
     771              : 
     772     27735378 :   set_immediate_dominator (CDI_DOMINATORS, dest, dom);
     773              : 
     774              :   /* Adjust latch infomation of BB's parent loop as otherwise
     775              :      the cfg hook has a hard time not to kill the loop.  */
     776     27735378 :   if (current_loops && bb->loop_father->latch == bb)
     777      5573816 :     bb->loop_father->latch = pred;
     778              : 
     779              :   /* And kill the forwarder block.  */
     780     27735378 :   delete_basic_block (bb);
     781              : 
     782     27735378 :   return true;
     783              : }
     784              : 
     785              : /* STMT is a call that has been discovered noreturn.  Split the
     786              :    block to prepare fixing up the CFG and remove LHS.
     787              :    Return true if cleanup-cfg needs to run.  */
     788              : 
     789              : bool
     790      6340285 : fixup_noreturn_call (gimple *stmt)
     791              : {
     792      6340285 :   basic_block bb = gimple_bb (stmt);
     793      6340285 :   bool changed = false;
     794              : 
     795      6340285 :   if (gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
     796              :     return false;
     797              : 
     798              :   /* First split basic block if stmt is not last.  */
     799     12676810 :   if (stmt != gsi_stmt (gsi_last_bb (bb)))
     800              :     {
     801        45858 :       if (stmt == gsi_stmt (gsi_last_nondebug_bb (bb)))
     802              :         {
     803              :           /* Don't split if there are only debug stmts
     804              :              after stmt, that can result in -fcompare-debug
     805              :              failures.  Remove the debug stmts instead,
     806              :              they should be all unreachable anyway.  */
     807         6709 :           gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
     808        58917 :           for (gsi_next (&gsi); !gsi_end_p (gsi); )
     809        52208 :             gsi_remove (&gsi, true);
     810              :         }
     811              :       else
     812              :         {
     813        39149 :           split_block (bb, stmt);
     814        39149 :           changed = true;
     815              :         }
     816              :     }
     817              : 
     818              :   /* If there is an LHS, remove it, but only if its type has fixed size.
     819              :      The LHS will need to be recreated during RTL expansion and creating
     820              :      temporaries of variable-sized types is not supported.  Also don't
     821              :      do this with TREE_ADDRESSABLE types, as assign_temp will abort.
     822              :      Drop LHS regardless of TREE_ADDRESSABLE, if the function call
     823              :      has been changed into a call that does not return a value, like
     824              :      __builtin_unreachable or __cxa_pure_virtual.  */
     825      6338405 :   tree lhs = gimple_call_lhs (stmt);
     826      6338405 :   if (lhs
     827      6338405 :       && (should_remove_lhs_p (lhs)
     828          406 :           || VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))))
     829              :     {
     830         4128 :       gimple_call_set_lhs (stmt, NULL_TREE);
     831              : 
     832              :       /* We need to fix up the SSA name to avoid checking errors.  */
     833         4128 :       if (TREE_CODE (lhs) == SSA_NAME)
     834              :         {
     835         3927 :           tree new_var = create_tmp_reg (TREE_TYPE (lhs));
     836         3927 :           SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, new_var);
     837         3927 :           SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
     838         3927 :           set_ssa_default_def (cfun, new_var, lhs);
     839              :         }
     840              : 
     841         4128 :       update_stmt (stmt);
     842              :     }
     843              : 
     844              :   /* Mark the call as altering control flow.  */
     845      6338405 :   if (!gimple_call_ctrl_altering_p (stmt))
     846              :     {
     847        87609 :       gimple_call_set_ctrl_altering (stmt, true);
     848        87609 :       changed = true;
     849              :     }
     850              : 
     851              :   return changed;
     852              : }
     853              : 
     854              : /* Return true if we want to merge BB1 and BB2 into a single block.  */
     855              : 
     856              : static bool
     857    409446075 : want_merge_blocks_p (basic_block bb1, basic_block bb2)
     858              : {
     859    409446075 :   if (!can_merge_blocks_p (bb1, bb2))
     860              :     return false;
     861     28292386 :   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb1);
     862     28292386 :   if (gsi_end_p (gsi) || !stmt_can_terminate_bb_p (gsi_stmt (gsi)))
     863     25100170 :     return true;
     864      3192216 :   return bb1->count.ok_for_merging (bb2->count);
     865              : }
     866              : 
     867              : 
     868              : /* Tries to cleanup cfg in basic block BB by merging blocks.  Returns
     869              :    true if anything changes.  */
     870              : 
     871              : static bool
     872    390013228 : cleanup_tree_cfg_bb (basic_block bb)
     873              : {
     874    390013228 :   if (maybe_remove_forwarder_block (bb))
     875              :     return true;
     876              : 
     877              :   /* If there is a merge opportunity with the predecessor
     878              :      do nothing now but wait until we process the predecessor.
     879              :      This happens when we visit BBs in a non-optimal order and
     880              :      avoids quadratic behavior with adjusting stmts BB pointer.  */
     881    362480528 :   if (single_pred_p (bb)
     882    625680904 :       && want_merge_blocks_p (single_pred (bb), bb))
     883              :     /* But make sure we _do_ visit it.  When we remove unreachable paths
     884              :        ending in a backedge we fail to mark the destinations predecessors
     885              :        as changed.  */
     886     11596805 :     bitmap_set_bit (cfgcleanup_altered_bbs, single_pred (bb)->index);
     887              : 
     888              :   /* Merging the blocks may create new opportunities for folding
     889              :      conditional branches (due to the elimination of single-valued PHI
     890              :      nodes).  */
     891    350883723 :   else if (single_succ_p (bb)
     892    486462267 :            && want_merge_blocks_p (bb, single_succ (bb)))
     893              :     {
     894     16695517 :       merge_blocks (bb, single_succ (bb));
     895     16695517 :       return true;
     896              :     }
     897              : 
     898              :   return false;
     899              : }
     900              : 
     901              : /* Return true if E is an EDGE_ABNORMAL edge for returns_twice calls,
     902              :    i.e. one going from .ABNORMAL_DISPATCHER to basic block which doesn't
     903              :    start with a forced or nonlocal label.  Calls which return twice can return
     904              :    the second time only if they are called normally the first time, so basic
     905              :    blocks which can be only entered through these abnormal edges but not
     906              :    normally are effectively unreachable as well.  Additionally ignore
     907              :    __builtin_setjmp_receiver starting blocks, which have one FORCED_LABEL
     908              :    and which are always only reachable through EDGE_ABNORMAL edge.  They are
     909              :    handled in cleanup_control_flow_pre.  */
     910              : 
     911              : static bool
     912    344411791 : maybe_dead_abnormal_edge_p (edge e)
     913              : {
     914    344411791 :   if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL)
     915              :     return false;
     916              : 
     917       147448 :   gimple_stmt_iterator gsi = gsi_start_nondebug_after_labels_bb (e->src);
     918       147448 :   gimple *g = gsi_stmt (gsi);
     919       147448 :   if (!g || !gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
     920              :     return false;
     921              : 
     922        19565 :   tree target = NULL_TREE;
     923        51123 :   for (gsi = gsi_start_bb (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
     924        31551 :     if (glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
     925              :       {
     926        17904 :         tree this_target = gimple_label_label (label_stmt);
     927        17904 :         if (DECL_NONLOCAL (this_target))
     928              :           return false;
     929        11993 :         if (FORCED_LABEL (this_target))
     930              :           {
     931        11993 :             if (target)
     932              :               return false;
     933              :             target = this_target;
     934              :           }
     935              :       }
     936              :     else
     937              :       break;
     938              : 
     939        13654 :   if (target)
     940              :     {
     941              :       /* If there was a single FORCED_LABEL, check for
     942              :          __builtin_setjmp_receiver with address of that label.  */
     943        11993 :       if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
     944            0 :         gsi_next_nondebug (&gsi);
     945        11993 :       if (gsi_end_p (gsi))
     946              :         return false;
     947        11993 :       if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_SETJMP_RECEIVER))
     948              :         return false;
     949              : 
     950        11993 :       tree arg = gimple_call_arg (gsi_stmt (gsi), 0);
     951        11993 :       if (TREE_CODE (arg) != ADDR_EXPR || TREE_OPERAND (arg, 0) != target)
     952              :         return false;
     953              :     }
     954              :   return true;
     955              : }
     956              : 
     957              : /* If BB is a basic block ending with __builtin_setjmp_setup, return edge
     958              :    from .ABNORMAL_DISPATCHER basic block to corresponding
     959              :    __builtin_setjmp_receiver basic block, otherwise return NULL.  */
     960              : static edge
     961    344410106 : builtin_setjmp_setup_bb (basic_block bb)
     962              : {
     963    344410106 :   if (EDGE_COUNT (bb->succs) != 2
     964    333802128 :       || ((EDGE_SUCC (bb, 0)->flags
     965    152799393 :            & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL
     966    152703978 :           && (EDGE_SUCC (bb, 1)->flags
     967    152703978 :               & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL))
     968              :     return NULL;
     969              : 
     970       167244 :   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
     971       167244 :   if (gsi_end_p (gsi)
     972       167244 :       || !gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_SETJMP_SETUP))
     973       155275 :     return NULL;
     974              : 
     975        11969 :   tree arg = gimple_call_arg (gsi_stmt (gsi), 1);
     976        11969 :   if (TREE_CODE (arg) != ADDR_EXPR
     977        11969 :       || TREE_CODE (TREE_OPERAND (arg, 0)) != LABEL_DECL)
     978              :     return NULL;
     979              : 
     980        11969 :   basic_block recv_bb = label_to_block (cfun, TREE_OPERAND (arg, 0));
     981    344410106 :   if (EDGE_COUNT (recv_bb->preds) != 1
     982        11969 :       || (EDGE_PRED (recv_bb, 0)->flags
     983        11969 :           & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL
     984        23938 :       || (EDGE_SUCC (bb, 0)->dest != EDGE_PRED (recv_bb, 0)->src
     985        11969 :           && EDGE_SUCC (bb, 1)->dest != EDGE_PRED (recv_bb, 0)->src))
     986              :     return NULL;
     987              : 
     988              :   /* EDGE_PRED (recv_bb, 0)->src should be the .ABNORMAL_DISPATCHER bb.  */
     989              :   return EDGE_PRED (recv_bb, 0);
     990              : }
     991              : 
     992              : /* Do cleanup_control_flow_bb in PRE order.  */
     993              : 
     994              : static bool
     995     25088474 : cleanup_control_flow_pre ()
     996              : {
     997     25088474 :   bool retval = false;
     998              : 
     999              :   /* We want remove_edge_and_dominated_blocks to only remove edges,
    1000              :      not dominated blocks which it does when dom info isn't available.
    1001              :      Pretend so.  */
    1002     25088474 :   dom_state saved_state = dom_info_state (CDI_DOMINATORS);
    1003     25088474 :   set_dom_info_availability (CDI_DOMINATORS, DOM_NONE);
    1004              : 
    1005     25088474 :   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
    1006     25088474 :   auto_sbitmap visited (last_basic_block_for_fn (cfun));
    1007     25088474 :   bitmap_clear (visited);
    1008              : 
    1009     25088474 :   vec<edge, va_gc> *setjmp_vec = NULL;
    1010     25088474 :   auto_vec<basic_block, 4> abnormal_dispatchers;
    1011              : 
    1012     25088474 :   stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
    1013              : 
    1014    872080645 :   while (! stack.is_empty ())
    1015              :     {
    1016              :       /* Look at the edge on the top of the stack.  */
    1017    846992171 :       edge_iterator ei = stack.last ();
    1018    846992171 :       basic_block dest = ei_edge (ei)->dest;
    1019              : 
    1020    846992171 :       if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
    1021    822293018 :           && !bitmap_bit_p (visited, dest->index)
    1022   1191415931 :           && (ei_container (ei) == setjmp_vec
    1023    344411791 :               || !maybe_dead_abnormal_edge_p (ei_edge (ei))))
    1024              :         {
    1025    344410106 :           bitmap_set_bit (visited, dest->index);
    1026              :           /* We only possibly remove edges from DEST here, leaving
    1027              :              possibly unreachable code in the IL.  */
    1028    344410106 :           retval |= cleanup_control_flow_bb (dest);
    1029              : 
    1030              :           /* Check for __builtin_setjmp_setup.  Edges from .ABNORMAL_DISPATCH
    1031              :              to __builtin_setjmp_receiver will be normally ignored by
    1032              :              maybe_dead_abnormal_edge_p.  If DEST is a visited
    1033              :              __builtin_setjmp_setup, queue edge from .ABNORMAL_DISPATCH
    1034              :              to __builtin_setjmp_receiver, so that it will be visited too.  */
    1035    344410106 :           if (edge e = builtin_setjmp_setup_bb (dest))
    1036              :             {
    1037        11969 :               vec_safe_push (setjmp_vec, e);
    1038        11969 :               if (vec_safe_length (setjmp_vec) == 1)
    1039         6318 :                 stack.quick_push (ei_start (setjmp_vec));
    1040              :             }
    1041              : 
    1042    344410106 :           if ((ei_edge (ei)->flags
    1043    344410106 :                & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
    1044              :             {
    1045       145763 :               gimple_stmt_iterator gsi
    1046       145763 :                 = gsi_start_nondebug_after_labels_bb (dest);
    1047       145763 :               gimple *g = gsi_stmt (gsi);
    1048       145763 :               if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
    1049        25446 :                 abnormal_dispatchers.safe_push (dest);
    1050              :             }
    1051              : 
    1052   1191402277 :           if (EDGE_COUNT (dest->succs) > 0)
    1053    321894956 :             stack.quick_push (ei_start (dest->succs));
    1054              :         }
    1055              :       else
    1056              :         {
    1057    502582065 :           if (!ei_one_before_end_p (ei))
    1058    155592317 :             ei_next (&stack.last ());
    1059              :           else
    1060              :             {
    1061    346989748 :               if (ei_container (ei) == setjmp_vec)
    1062         6318 :                 vec_safe_truncate (setjmp_vec, 0);
    1063    346989748 :               stack.pop ();
    1064              :             }
    1065              :         }
    1066              :     }
    1067              : 
    1068     25088474 :   vec_free (setjmp_vec);
    1069              : 
    1070              :   /* If we've marked .ABNORMAL_DISPATCHER basic block(s) as visited
    1071              :      above, but haven't marked any of their successors as visited,
    1072              :      unmark them now, so that they can be removed as useless.  */
    1073     75290868 :   for (basic_block dispatcher_bb : abnormal_dispatchers)
    1074              :     {
    1075        25446 :       edge e;
    1076        25446 :       edge_iterator ei;
    1077        25537 :       FOR_EACH_EDGE (e, ei, dispatcher_bb->succs)
    1078        25446 :         if (bitmap_bit_p (visited, e->dest->index))
    1079              :           break;
    1080        25446 :       if (e == NULL)
    1081           91 :         bitmap_clear_bit (visited, dispatcher_bb->index);
    1082              :     }
    1083              : 
    1084     25088474 :   set_dom_info_availability (CDI_DOMINATORS, saved_state);
    1085              : 
    1086              :   /* We are deleting BBs in non-reverse dominator order, make sure
    1087              :      insert_debug_temps_for_defs is prepared for that.  */
    1088     25088474 :   if (retval)
    1089       869032 :     free_dominance_info (CDI_DOMINATORS);
    1090              : 
    1091              :   /* Remove all now (and previously) unreachable blocks.  */
    1092    393020339 :   for (int i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); ++i)
    1093              :     {
    1094    367931865 :       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
    1095    367931865 :       if (bb && !bitmap_bit_p (visited, bb->index))
    1096              :         {
    1097      4739413 :           if (!retval)
    1098       751442 :             free_dominance_info (CDI_DOMINATORS);
    1099      4739413 :           delete_basic_block (bb);
    1100      4739413 :           retval = true;
    1101              :         }
    1102              :     }
    1103              : 
    1104     25088474 :   return retval;
    1105     25088474 : }
    1106              : 
    1107              : /* Callback function for make_forwarder_block which returns
    1108              :    true when E is not a latch.  */
    1109              : 
    1110              : static bool
    1111       151368 : mfb_keep_latches (edge e, void*)
    1112              : {
    1113       151368 :   return !((dom_info_available_p (CDI_DOMINATORS)
    1114        23548 :             && dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
    1115       144056 :            || (e->flags & EDGE_DFS_BACK));
    1116              : }
    1117              : 
    1118              : /* Remove unreachable blocks and other miscellaneous clean up work.
    1119              :    Return true if the flowgraph was modified, false otherwise.  */
    1120              : 
    1121              : static bool
    1122     25088474 : cleanup_tree_cfg_noloop (unsigned ssa_update_flags)
    1123              : {
    1124     25088474 :   timevar_push (TV_TREE_CLEANUP_CFG);
    1125              : 
    1126              :   /* Ensure that we have single entries into loop headers.  Otherwise
    1127              :      if one of the entries is becoming a latch due to CFG cleanup
    1128              :      (from formerly being part of an irreducible region) then we mess
    1129              :      up loop fixup and associate the old loop with a different region
    1130              :      which makes niter upper bounds invalid.  See for example PR80549.
    1131              :      This needs to be done before we remove trivially dead edges as
    1132              :      we need to capture the dominance state before the pending transform.  */
    1133     25088474 :   if (current_loops)
    1134              :     {
    1135              :       /* This needs backedges or dominators.  */
    1136     22215402 :       if (!dom_info_available_p (CDI_DOMINATORS))
    1137      6727749 :         mark_dfs_back_edges ();
    1138              : 
    1139     92511353 :       for (loop_p loop : *get_loops (cfun))
    1140     48080549 :         if (loop && loop->header)
    1141              :           {
    1142     40999336 :             basic_block bb = loop->header;
    1143     40999336 :             edge_iterator ei;
    1144     40999336 :             edge e;
    1145     40999336 :             bool found_latch = false;
    1146     40999336 :             bool any_abnormal = false;
    1147     40999336 :             unsigned n = 0;
    1148              :             /* We are only interested in preserving existing loops, but
    1149              :                we need to check whether they are still real and of course
    1150              :                if we need to add a preheader at all.  */
    1151     78722092 :             FOR_EACH_EDGE (e, ei, bb->preds)
    1152              :               {
    1153     37722756 :                 if (e->flags & EDGE_ABNORMAL)
    1154              :                   {
    1155              :                     any_abnormal = true;
    1156              :                     break;
    1157              :                   }
    1158     37722756 :                 if ((dom_info_available_p (CDI_DOMINATORS)
    1159     27535203 :                      && dominated_by_p (CDI_DOMINATORS, e->src, bb))
    1160     51475565 :                     || (e->flags & EDGE_DFS_BACK))
    1161              :                   {
    1162     18883740 :                     found_latch = true;
    1163     18883740 :                     continue;
    1164              :                   }
    1165     18839016 :                 n++;
    1166              :               }
    1167              :             /* If we have more than one entry to the loop header
    1168              :                create a forwarder.  */
    1169     40999336 :             if (found_latch && ! any_abnormal && n > 1)
    1170              :               {
    1171        46801 :                 edge fallthru = make_forwarder_block (bb, mfb_keep_latches, NULL);
    1172        46801 :                 loop->header = fallthru->dest;
    1173        46801 :                 if (! loops_state_satisfies_p (LOOPS_NEED_FIXUP))
    1174              :                   {
    1175              :                     /* The loop updating from the CFG hook is incomplete
    1176              :                        when we have multiple latches, fixup manually.  */
    1177        26454 :                     remove_bb_from_loops (fallthru->src);
    1178        26454 :                     loop_p cloop = loop;
    1179        83080 :                     FOR_EACH_EDGE (e, ei, fallthru->src->preds)
    1180        56626 :                       cloop = find_common_loop (cloop, e->src->loop_father);
    1181        26454 :                     add_bb_to_loop (fallthru->src, cloop);
    1182              :                   }
    1183              :               }
    1184              :           }
    1185              :     }
    1186              : 
    1187              :   /* Prepare the worklists of altered blocks.  */
    1188     25088474 :   cfgcleanup_altered_bbs = BITMAP_ALLOC (NULL);
    1189              : 
    1190              :   /* Start by iterating over all basic blocks in PRE order looking for
    1191              :      edge removal opportunities.  Do this first because incoming SSA form
    1192              :      may be invalid and we want to avoid performing SSA related tasks such
    1193              :      as propgating out a PHI node during BB merging in that state.  This
    1194              :      also gets rid of unreachable blocks.  */
    1195     25088474 :   bool changed = cleanup_control_flow_pre ();
    1196              : 
    1197              :   /* After doing the above SSA form should be valid (or an update SSA
    1198              :      should be required).  */
    1199     25088474 :   if (ssa_update_flags)
    1200              :     {
    1201     16436088 :       timevar_pop (TV_TREE_CLEANUP_CFG);
    1202     16436088 :       update_ssa (ssa_update_flags);
    1203     16436088 :       timevar_push (TV_TREE_CLEANUP_CFG);
    1204              :     }
    1205              : 
    1206              :   /* Compute dominator info which we need for the iterative process below.
    1207              :      Avoid computing the fast query DFS numbers since any block merging
    1208              :      done will invalidate them anyway.  */
    1209     25088474 :   if (!dom_info_available_p (CDI_DOMINATORS))
    1210      8498172 :     calculate_dominance_info (CDI_DOMINATORS, false);
    1211              :   else
    1212     16590302 :     checking_verify_dominators (CDI_DOMINATORS);
    1213              : 
    1214              :   /* During forwarder block cleanup, we may redirect edges out of
    1215              :      SWITCH_EXPRs, which can get expensive.  So we want to enable
    1216              :      recording of edge to CASE_LABEL_EXPR.  */
    1217     25088474 :   start_recording_case_labels ();
    1218              : 
    1219              :   /* Continue by iterating over all basic blocks looking for BB merging
    1220              :      opportunities.  We cannot use FOR_EACH_BB_FN for the BB iteration
    1221              :      since the basic blocks may get removed.  */
    1222     25088474 :   unsigned n = last_basic_block_for_fn (cfun);
    1223    393020339 :   for (unsigned i = NUM_FIXED_BLOCKS; i < n; i++)
    1224              :     {
    1225    367931865 :       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
    1226    367931865 :       if (bb)
    1227    337143078 :         changed |= cleanup_tree_cfg_bb (bb);
    1228              :     }
    1229              : 
    1230              :   /* Now process the altered blocks, as long as any are available.  */
    1231     86125081 :   while (!bitmap_empty_p (cfgcleanup_altered_bbs))
    1232              :     {
    1233     61036607 :       unsigned i = bitmap_clear_first_set_bit (cfgcleanup_altered_bbs);
    1234     61036607 :       if (i < NUM_FIXED_BLOCKS)
    1235            0 :         continue;
    1236              : 
    1237     61036607 :       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
    1238     61036607 :       if (!bb)
    1239      8166457 :         continue;
    1240              : 
    1241              :       /* BB merging done by cleanup_tree_cfg_bb can end up propagating
    1242              :          out single-argument PHIs which in turn can expose
    1243              :          cleanup_control_flow_bb opportunities so we have to repeat
    1244              :          that here.  */
    1245     52870150 :       changed |= cleanup_control_flow_bb (bb);
    1246     52870150 :       changed |= cleanup_tree_cfg_bb (bb);
    1247              :     }
    1248              : 
    1249     25088474 :   end_recording_case_labels ();
    1250     25088474 :   BITMAP_FREE (cfgcleanup_altered_bbs);
    1251              : 
    1252     25088474 :   gcc_assert (dom_info_available_p (CDI_DOMINATORS));
    1253              : 
    1254              :   /* Do not renumber blocks if the SCEV cache is active, it is indexed by
    1255              :      basic-block numbers.  */
    1256     25088474 :   if (! scev_initialized_p ())
    1257     24783281 :     compact_blocks ();
    1258              : 
    1259     25088474 :   checking_verify_flow_info ();
    1260              : 
    1261     25088474 :   timevar_pop (TV_TREE_CLEANUP_CFG);
    1262              : 
    1263     25088474 :   if (changed && current_loops)
    1264              :     {
    1265              :       /* Removing edges and/or blocks may make recorded bounds refer
    1266              :          to stale GIMPLE stmts now, so clear them.  */
    1267      5994182 :       free_numbers_of_iterations_estimates (cfun);
    1268      5994182 :       loops_state_set (LOOPS_NEED_FIXUP);
    1269              :     }
    1270              : 
    1271     25088474 :   return changed;
    1272              : }
    1273              : 
    1274              : /* Repairs loop structures.  */
    1275              : 
    1276              : static void
    1277      7659803 : repair_loop_structures (void)
    1278              : {
    1279      7659803 :   bitmap changed_bbs;
    1280      7659803 :   unsigned n_new_or_deleted_loops;
    1281              : 
    1282      7659803 :   calculate_dominance_info (CDI_DOMINATORS);
    1283              : 
    1284      7659803 :   timevar_push (TV_REPAIR_LOOPS);
    1285      7659803 :   changed_bbs = BITMAP_ALLOC (NULL);
    1286      7659803 :   n_new_or_deleted_loops = fix_loop_structure (changed_bbs);
    1287              : 
    1288              :   /* This usually does nothing.  But sometimes parts of cfg that originally
    1289              :      were inside a loop get out of it due to edge removal (since they
    1290              :      become unreachable by back edges from latch).  Also a former
    1291              :      irreducible loop can become reducible - in this case force a full
    1292              :      rewrite into loop-closed SSA form.  */
    1293      7659803 :   if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
    1294      7659803 :       && (!bitmap_empty_p (changed_bbs) || n_new_or_deleted_loops))
    1295        25279 :     rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
    1296              : 
    1297      7659803 :   BITMAP_FREE (changed_bbs);
    1298              : 
    1299      7659803 :   checking_verify_loop_structure ();
    1300      7659803 :   scev_reset ();
    1301              : 
    1302      7659803 :   timevar_pop (TV_REPAIR_LOOPS);
    1303      7659803 : }
    1304              : 
    1305              : /* Cleanup cfg and repair loop structures.  */
    1306              : 
    1307              : bool
    1308     25088474 : cleanup_tree_cfg (unsigned ssa_update_flags)
    1309              : {
    1310     25088474 :   bool changed = cleanup_tree_cfg_noloop (ssa_update_flags);
    1311              : 
    1312     25088474 :   if (current_loops != NULL
    1313     25088474 :       && loops_state_satisfies_p (LOOPS_NEED_FIXUP))
    1314      7659803 :     repair_loop_structures ();
    1315              : 
    1316     25088474 :   return changed;
    1317              : }
    1318              : 
    1319              : /* This pass merges PHI nodes if one feeds into another.  For example,
    1320              :    suppose we have the following:
    1321              : 
    1322              :   goto <bb 9> (<L9>);
    1323              : 
    1324              : <L8>:;
    1325              :   tem_17 = foo ();
    1326              : 
    1327              :   # tem_6 = PHI <tem_17(8), tem_23(7)>;
    1328              : <L9>:;
    1329              : 
    1330              :   # tem_3 = PHI <tem_6(9), tem_2(5)>;
    1331              : <L10>:;
    1332              : 
    1333              :   Then we merge the first PHI node into the second one like so:
    1334              : 
    1335              :   goto <bb 9> (<L10>);
    1336              : 
    1337              : <L8>:;
    1338              :   tem_17 = foo ();
    1339              : 
    1340              :   # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
    1341              : <L10>:;
    1342              : */
    1343              : 
    1344              : namespace {
    1345              : 
    1346              : const pass_data pass_data_merge_phi =
    1347              : {
    1348              :   GIMPLE_PASS, /* type */
    1349              :   "mergephi", /* name */
    1350              :   OPTGROUP_NONE, /* optinfo_flags */
    1351              :   TV_TREE_MERGE_PHI, /* tv_id */
    1352              :   ( PROP_cfg | PROP_ssa ), /* properties_required */
    1353              :   0, /* properties_provided */
    1354              :   0, /* properties_destroyed */
    1355              :   0, /* todo_flags_start */
    1356              :   0, /* todo_flags_finish */
    1357              : };
    1358              : 
    1359              : class pass_merge_phi : public gimple_opt_pass
    1360              : {
    1361              : public:
    1362       864141 :   pass_merge_phi (gcc::context *ctxt)
    1363      1728282 :     : gimple_opt_pass (pass_data_merge_phi, ctxt)
    1364              :   {}
    1365              : 
    1366              :   /* opt_pass methods: */
    1367       576094 :   opt_pass * clone () final override { return new pass_merge_phi (m_ctxt); }
    1368              :   unsigned int execute (function *) final override;
    1369              : 
    1370              : }; // class pass_merge_phi
    1371              : 
    1372              : unsigned int
    1373      4490560 : pass_merge_phi::execute (function *fun)
    1374              : {
    1375      4490560 :   int forwarder_removed = 0;
    1376      4490560 :   calculate_dominance_info (CDI_DOMINATORS);
    1377              : 
    1378              :   /* Find all PHI nodes that we may be able to merge.  */
    1379      4490560 :   unsigned n = last_basic_block_for_fn (fun);
    1380     37915665 :   for (unsigned i = NUM_FIXED_BLOCKS; i < n; i++)
    1381              :     {
    1382     33425105 :       basic_block bb = BASIC_BLOCK_FOR_FN (fun, i);
    1383     33425105 :       if (!bb)
    1384        71497 :         continue;
    1385              : 
    1386              :       /* Look for a forwarder block with PHI nodes.  */
    1387     33353608 :       if (maybe_remove_forwarder_block (bb, true))
    1388       202678 :         forwarder_removed++;
    1389              :     }
    1390              : 
    1391              :   /* Removing forwarder blocks can cause formerly irreducible loops
    1392              :      to become reducible if we merged two entry blocks.  */
    1393      4490560 :   if (forwarder_removed != 0
    1394        86969 :       && current_loops)
    1395        86969 :     loops_state_set (LOOPS_NEED_FIXUP);
    1396              : 
    1397      4490560 :   statistics_counter_event (fun, "Forwarder blocks removed",
    1398              :                             forwarder_removed);
    1399      4490560 :   return 0;
    1400              : }
    1401              : 
    1402              : } // anon namespace
    1403              : 
    1404              : gimple_opt_pass *
    1405       288047 : make_pass_merge_phi (gcc::context *ctxt)
    1406              : {
    1407       288047 :   return new pass_merge_phi (ctxt);
    1408              : }
    1409              : 
    1410              : /* Pass: cleanup the CFG just before expanding trees to RTL.
    1411              :    This is just a round of label cleanups and case node grouping
    1412              :    because after the tree optimizers have run such cleanups may
    1413              :    be necessary.  */
    1414              : 
    1415              : static unsigned int
    1416      1475189 : execute_cleanup_cfg_post_optimizing (void)
    1417              : {
    1418      1475189 :   unsigned int todo = execute_fixup_cfg ();
    1419      1475189 :   if (cleanup_tree_cfg ())
    1420       394718 :     todo |= TODO_update_ssa;
    1421      1475189 :   maybe_remove_unreachable_handlers ();
    1422      1475189 :   cleanup_dead_labels ();
    1423      1475189 :   if (group_case_labels () && cleanup_tree_cfg ())
    1424            0 :     todo |= TODO_update_ssa;
    1425              : 
    1426              :   /* When optimizing undo the merging of forwarder blocks
    1427              :      that phis for better out of ssa expansion.  */
    1428      1475189 :   if (optimize)
    1429      1041826 :     make_forwarders_with_degenerate_phis (cfun, true);
    1430              : 
    1431              :   /* Make sure todo does not have cleanup cfg as we don't want
    1432              :      remove the forwarder blocks we just created. cleanup cfg
    1433              :      has already happened.  */
    1434      1475189 :   todo &= ~TODO_cleanup_cfg;
    1435              : 
    1436      1475189 :   basic_block bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
    1437      1475189 :   gimple_stmt_iterator gsi = gsi_start_nondebug_after_labels_bb (bb);
    1438              :   /* If the first (and only) bb and the only non debug
    1439              :      statement is __builtin_unreachable call, then replace it with a trap
    1440              :      so the function is at least one instruction in size.  */
    1441      1475189 :   if (!gsi_end_p (gsi)
    1442      1475189 :       && gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
    1443              :     {
    1444          348 :       if (targetm.have_trap ())
    1445              :         {
    1446          696 :           gimple_call_set_fndecl (gsi_stmt (gsi), builtin_decl_implicit (BUILT_IN_UNREACHABLE_TRAP));
    1447          348 :           update_stmt (gsi_stmt (gsi));
    1448              :         }
    1449              :       /* If the target does not have a trap, convert it into an infinite loop. */
    1450              :       else
    1451              :         {
    1452            0 :           gsi_remove (&gsi, true);
    1453            0 :           make_single_succ_edge (bb, bb, EDGE_FALLTHRU);
    1454            0 :           fix_loop_structure (NULL);
    1455              :         }
    1456              :     }
    1457              : 
    1458      1475189 :   if ((flag_compare_debug_opt || flag_compare_debug)
    1459         3912 :       && flag_dump_final_insns)
    1460              :     {
    1461         3912 :       FILE *final_output = fopen (flag_dump_final_insns, "a");
    1462              : 
    1463         3912 :       if (!final_output)
    1464              :         {
    1465            0 :           error ("could not open final insn dump file %qs: %m",
    1466              :                  flag_dump_final_insns);
    1467            0 :           flag_dump_final_insns = NULL;
    1468              :         }
    1469              :       else
    1470              :         {
    1471         3912 :           int save_unnumbered = flag_dump_unnumbered;
    1472         3912 :           int save_noaddr = flag_dump_noaddr;
    1473              : 
    1474         3912 :           flag_dump_noaddr = flag_dump_unnumbered = 1;
    1475         3912 :           fprintf (final_output, "\n");
    1476         3912 :           dump_enumerated_decls (final_output,
    1477              :                                  dump_flags | TDF_SLIM | TDF_NOUID);
    1478         3912 :           flag_dump_noaddr = save_noaddr;
    1479         3912 :           flag_dump_unnumbered = save_unnumbered;
    1480         3912 :           if (fclose (final_output))
    1481              :             {
    1482            0 :               error ("could not close final insn dump file %qs: %m",
    1483              :                      flag_dump_final_insns);
    1484            0 :               flag_dump_final_insns = NULL;
    1485              :             }
    1486              :         }
    1487              :     }
    1488      1475189 :   return todo;
    1489              : }
    1490              : 
    1491              : namespace {
    1492              : 
    1493              : const pass_data pass_data_cleanup_cfg_post_optimizing =
    1494              : {
    1495              :   GIMPLE_PASS, /* type */
    1496              :   "optimized", /* name */
    1497              :   OPTGROUP_NONE, /* optinfo_flags */
    1498              :   TV_TREE_CLEANUP_CFG, /* tv_id */
    1499              :   PROP_cfg, /* properties_required */
    1500              :   0, /* properties_provided */
    1501              :   0, /* properties_destroyed */
    1502              :   0, /* todo_flags_start */
    1503              :   TODO_remove_unused_locals, /* todo_flags_finish */
    1504              : };
    1505              : 
    1506              : class pass_cleanup_cfg_post_optimizing : public gimple_opt_pass
    1507              : {
    1508              : public:
    1509       288047 :   pass_cleanup_cfg_post_optimizing (gcc::context *ctxt)
    1510       576094 :     : gimple_opt_pass (pass_data_cleanup_cfg_post_optimizing, ctxt)
    1511              :   {}
    1512              : 
    1513              :   /* opt_pass methods: */
    1514      1475189 :   unsigned int execute (function *) final override
    1515              :     {
    1516      1475189 :       return execute_cleanup_cfg_post_optimizing ();
    1517              :     }
    1518              : 
    1519              : }; // class pass_cleanup_cfg_post_optimizing
    1520              : 
    1521              : } // anon namespace
    1522              : 
    1523              : gimple_opt_pass *
    1524       288047 : make_pass_cleanup_cfg_post_optimizing (gcc::context *ctxt)
    1525              : {
    1526       288047 :   return new pass_cleanup_cfg_post_optimizing (ctxt);
    1527              : }
    1528              : 
    1529              : 
    1530              : /* Delete all unreachable basic blocks and update callgraph.
    1531              :    Doing so is somewhat nontrivial because we need to update all clones and
    1532              :    remove inline function that become unreachable.  */
    1533              : 
    1534              : bool
    1535      1562580 : delete_unreachable_blocks_update_callgraph (cgraph_node *dst_node,
    1536              :                                             bool update_clones)
    1537              : {
    1538      1562580 :   bool changed = false;
    1539      1562580 :   basic_block b, next_bb;
    1540              : 
    1541      1562580 :   find_unreachable_blocks ();
    1542              : 
    1543              :   /* Delete all unreachable basic blocks.  */
    1544              : 
    1545      1562580 :   for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
    1546     30088484 :        != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
    1547              :     {
    1548     28525904 :       next_bb = b->next_bb;
    1549              : 
    1550     28525904 :       if (!(b->flags & BB_REACHABLE))
    1551              :         {
    1552        37676 :           gimple_stmt_iterator bsi;
    1553              : 
    1554       146241 :           for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
    1555              :             {
    1556        70889 :               struct cgraph_edge *e;
    1557        70889 :               struct cgraph_node *node;
    1558              : 
    1559        70889 :               dst_node->remove_stmt_references (gsi_stmt (bsi));
    1560              : 
    1561        70889 :               if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
    1562        70889 :                   &&(e = dst_node->get_edge (gsi_stmt (bsi))) != NULL)
    1563              :                 {
    1564          892 :                   if (!e->inline_failed)
    1565            0 :                     e->callee->remove_symbol_and_inline_clones (dst_node);
    1566              :                   else
    1567          892 :                     cgraph_edge::remove (e);
    1568              :                 }
    1569        70889 :               if (update_clones && dst_node->clones)
    1570            0 :                 for (node = dst_node->clones; node != dst_node;)
    1571              :                   {
    1572            0 :                     node->remove_stmt_references (gsi_stmt (bsi));
    1573            0 :                     if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
    1574            0 :                         && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
    1575              :                       {
    1576            0 :                         if (!e->inline_failed)
    1577            0 :                           e->callee->remove_symbol_and_inline_clones (dst_node);
    1578              :                         else
    1579            0 :                           cgraph_edge::remove (e);
    1580              :                       }
    1581              : 
    1582            0 :                     if (node->clones)
    1583              :                       node = node->clones;
    1584            0 :                     else if (node->next_sibling_clone)
    1585              :                       node = node->next_sibling_clone;
    1586              :                     else
    1587              :                       {
    1588            0 :                         while (node != dst_node && !node->next_sibling_clone)
    1589            0 :                           node = node->clone_of;
    1590            0 :                         if (node != dst_node)
    1591            0 :                           node = node->next_sibling_clone;
    1592              :                       }
    1593              :                   }
    1594              :             }
    1595        37676 :           delete_basic_block (b);
    1596        37676 :           changed = true;
    1597              :         }
    1598              :     }
    1599              : 
    1600      1562580 :   return changed;
    1601              : }
    1602              : 
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.