LCOV - code coverage report
Current view: top level - gcc - tree-cfgcleanup.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 95.9 % 652 625
Test Date: 2026-02-28 14:20:25 Functions: 100.0 % 26 26
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* CFG cleanup for trees.
       2              :    Copyright (C) 2001-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify
       7              : it under the terms of the GNU General Public License as published by
       8              : the Free Software Foundation; either version 3, or (at your option)
       9              : any later version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful,
      12              : but WITHOUT ANY WARRANTY; without even the implied warranty of
      13              : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
      14              : GNU General Public License for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : #include "config.h"
      21              : #include "system.h"
      22              : #include "coretypes.h"
      23              : #include "backend.h"
      24              : #include "rtl.h"
      25              : #include "tree.h"
      26              : #include "gimple.h"
      27              : #include "cfghooks.h"
      28              : #include "tree-pass.h"
      29              : #include "ssa.h"
      30              : #include "diagnostic-core.h"
      31              : #include "fold-const.h"
      32              : #include "cfganal.h"
      33              : #include "cfgcleanup.h"
      34              : #include "tree-eh.h"
      35              : #include "gimplify.h"
      36              : #include "gimple-iterator.h"
      37              : #include "tree-cfg.h"
      38              : #include "tree-ssa-loop-manip.h"
      39              : #include "tree-dfa.h"
      40              : #include "tree-ssa.h"
      41              : #include "cfgloop.h"
      42              : #include "tree-scalar-evolution.h"
      43              : #include "gimple-match.h"
      44              : #include "gimple-fold.h"
      45              : #include "tree-ssa-loop-niter.h"
      46              : #include "cgraph.h"
      47              : #include "tree-into-ssa.h"
      48              : #include "tree-cfgcleanup.h"
      49              : #include "gimple-pretty-print.h"
      50              : #include "target.h"
      51              : 
      52              : 
      53              : /* The set of blocks in that at least one of the following changes happened:
      54              :    -- the statement at the end of the block was changed
      55              :    -- the block was newly created
      56              :    -- the set of the predecessors of the block changed
      57              :    -- the set of the successors of the block changed
      58              :    ??? Maybe we could track these changes separately, since they determine
      59              :        what cleanups it makes sense to try on the block.  */
      60              : bitmap cfgcleanup_altered_bbs;
      61              : 
      62              : /* Remove any fallthru edge from EV.  Return true if an edge was removed.  */
      63              : 
      64              : static bool
      65     22677940 : remove_fallthru_edge (vec<edge, va_gc> *ev)
      66              : {
      67     22677940 :   edge_iterator ei;
      68     22677940 :   edge e;
      69              : 
      70     25385366 :   FOR_EACH_EDGE (e, ei, ev)
      71      2741271 :     if ((e->flags & EDGE_FALLTHRU) != 0)
      72              :       {
      73        33845 :         if (e->flags & EDGE_COMPLEX)
      74            0 :           e->flags &= ~EDGE_FALLTHRU;
      75              :         else
      76        33845 :           remove_edge_and_dominated_blocks (e);
      77        33845 :         return true;
      78              :       }
      79              :   return false;
      80              : }
      81              : 
      82              : /* Convert a SWTCH with single non-default case to gcond and replace it
      83              :    at GSI.  */
      84              : 
      85              : static bool
      86       813854 : convert_single_case_switch (gswitch *swtch, gimple_stmt_iterator &gsi)
      87              : {
      88       813854 :   if (gimple_switch_num_labels (swtch) != 2)
      89              :     return false;
      90              : 
      91        11643 :   tree index = gimple_switch_index (swtch);
      92        11643 :   tree label = gimple_switch_label (swtch, 1);
      93        11643 :   tree low = CASE_LOW (label);
      94        11643 :   tree high = CASE_HIGH (label);
      95              : 
      96        11643 :   basic_block default_bb = gimple_switch_default_bb (cfun, swtch);
      97        11643 :   basic_block case_bb = label_to_block (cfun, CASE_LABEL (label));
      98              : 
      99        11643 :   basic_block bb = gimple_bb (swtch);
     100        11643 :   gcond *cond;
     101              : 
     102              :   /* Replace switch statement with condition statement.  */
     103        11643 :   if (high)
     104              :     {
     105         1027 :       tree lhs, rhs;
     106         1027 :       if (range_check_type (TREE_TYPE (index)) == NULL_TREE)
     107            0 :         return false;
     108         1027 :       generate_range_test (bb, index, low, high, &lhs, &rhs);
     109         1027 :       cond = gimple_build_cond (LE_EXPR, lhs, rhs, NULL_TREE, NULL_TREE);
     110              :     }
     111              :   else
     112        10616 :     cond = gimple_build_cond (EQ_EXPR, index,
     113        10616 :                               fold_convert (TREE_TYPE (index), low),
     114              :                               NULL_TREE, NULL_TREE);
     115              : 
     116        11643 :   gsi_replace (&gsi, cond, true);
     117              : 
     118              :   /* Update edges.  */
     119        11643 :   edge case_edge = find_edge (bb, case_bb);
     120        11643 :   edge default_edge = find_edge (bb, default_bb);
     121              : 
     122        11643 :   case_edge->flags |= EDGE_TRUE_VALUE;
     123        11643 :   default_edge->flags |= EDGE_FALSE_VALUE;
     124        11643 :   return true;
     125              : }
     126              : 
     127              : /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 of STMT in BB by
     128              :    swapping edges of the BB.  */
     129              : bool
     130    180061011 : canonicalize_bool_cond (gcond *stmt, basic_block bb)
     131              : {
     132    180061011 :   tree rhs1 = gimple_cond_lhs (stmt);
     133    180061011 :   tree rhs2 = gimple_cond_rhs (stmt);
     134    180061011 :   enum tree_code code = gimple_cond_code (stmt);
     135    180061011 :   if (code != EQ_EXPR && code != NE_EXPR)
     136              :     return false;
     137    135235925 :   if (TREE_CODE (TREE_TYPE (rhs1)) != BOOLEAN_TYPE
     138    135235925 :       && (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
     139     78003796 :            || TYPE_PRECISION (TREE_TYPE (rhs1)) != 1))
     140              :     return false;
     141              : 
     142              :   /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges.  */
     143     23623304 :   if (code == EQ_EXPR && !integer_zerop (rhs2))
     144              :     return false;
     145     23497284 :   if (code == NE_EXPR && !integer_onep (rhs2))
     146              :     return false;
     147              : 
     148       133103 :   gimple_cond_set_code (stmt, NE_EXPR);
     149       133103 :   gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
     150       133103 :   EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
     151       133103 :   EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
     152              : 
     153       133103 :   if (dump_file)
     154              :     {
     155            1 :       fprintf (dump_file, "  Swapped '");
     156            1 :       print_gimple_expr (dump_file, stmt, 0);
     157            1 :       fprintf (dump_file, "'\n");
     158              :     }
     159              :   return true;
     160              : }
     161              : 
     162              : /* Disconnect an unreachable block in the control expression starting
     163              :    at block BB.  */
     164              : 
     165              : static bool
     166    161831543 : cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi)
     167              : {
     168    161831543 :   edge taken_edge;
     169    161831543 :   bool retval = false;
     170    161831543 :   gimple *stmt = gsi_stmt (gsi);
     171              : 
     172    161831543 :   if (!single_succ_p (bb))
     173              :     {
     174    161825416 :       edge e;
     175    161825416 :       edge_iterator ei;
     176    161825416 :       bool warned;
     177    161825416 :       tree val = NULL_TREE;
     178              : 
     179              :       /* Try to convert a switch with just a single non-default case to
     180              :          GIMPLE condition.  */
     181    161825416 :       if (gimple_code (stmt) == GIMPLE_SWITCH
     182    161825416 :           && convert_single_case_switch (as_a<gswitch *> (stmt), gsi))
     183        11643 :         stmt = gsi_stmt (gsi);
     184              : 
     185    161825416 :       if (gimple_code (stmt) == GIMPLE_COND)
     186    161023205 :         canonicalize_bool_cond (as_a<gcond*> (stmt), bb);
     187              : 
     188    161825416 :       fold_defer_overflow_warnings ();
     189    161825416 :       switch (gimple_code (stmt))
     190              :         {
     191    161023205 :         case GIMPLE_COND:
     192    161023205 :           {
     193    161023205 :             gimple_match_op res_op;
     194    161023205 :             if (gimple_simplify (stmt, &res_op, NULL, no_follow_ssa_edges,
     195              :                                  no_follow_ssa_edges)
     196    161023205 :                 && res_op.code == INTEGER_CST)
     197      2648092 :               val = res_op.ops[0];
     198              :           }
     199    161023205 :           break;
     200              : 
     201       802211 :         case GIMPLE_SWITCH:
     202       802211 :           val = gimple_switch_index (as_a <gswitch *> (stmt));
     203       802211 :           break;
     204              : 
     205    161825416 :         default:
     206    161825416 :           ;
     207              :         }
     208    161825416 :       taken_edge = find_taken_edge (bb, val);
     209    161825416 :       if (!taken_edge)
     210              :         {
     211    159164749 :           fold_undefer_and_ignore_overflow_warnings ();
     212    159164749 :           return false;
     213              :         }
     214              : 
     215              :       /* Remove all the edges except the one that is always executed.  */
     216      2660667 :       warned = false;
     217      8014561 :       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
     218              :         {
     219      5353894 :           if (e != taken_edge)
     220              :             {
     221      2693227 :               if (!warned)
     222              :                 {
     223      2660667 :                   fold_undefer_overflow_warnings
     224      2660667 :                     (true, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
     225      2660667 :                   warned = true;
     226              :                 }
     227              : 
     228      2693227 :               taken_edge->probability += e->probability;
     229      2693227 :               remove_edge_and_dominated_blocks (e);
     230      2693227 :               retval = true;
     231              :             }
     232              :           else
     233      2660667 :             ei_next (&ei);
     234              :         }
     235      2660667 :       if (!warned)
     236            0 :         fold_undefer_and_ignore_overflow_warnings ();
     237              :     }
     238              :   else
     239         6127 :     taken_edge = single_succ_edge (bb);
     240              : 
     241      2666794 :   bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
     242      2666794 :   gsi_remove (&gsi, true);
     243      2666794 :   taken_edge->flags = EDGE_FALLTHRU;
     244              : 
     245      2666794 :   return retval;
     246              : }
     247              : 
     248              : /* Cleanup the GF_CALL_CTRL_ALTERING flag according to
     249              :    to updated gimple_call_flags.  */
     250              : 
     251              : static void
     252    360918389 : cleanup_call_ctrl_altering_flag (basic_block bb, gimple *bb_end)
     253              : {
     254    360918389 :   if (!is_gimple_call (bb_end)
     255     67998580 :       || !gimple_call_ctrl_altering_p (bb_end)
     256    384064228 :       || (/* IFN_UNIQUE should be the last insn, to make checking for it
     257              :              as cheap as possible.  */
     258     23145839 :           gimple_call_internal_p (bb_end)
     259       438458 :           && gimple_call_internal_unique_p (bb_end)))
     260              :     return;
     261              : 
     262     22732969 :   int flags = gimple_call_flags (bb_end);
     263     22732969 :   if (!(flags & ECF_NORETURN)
     264        84368 :       && (((flags & (ECF_CONST | ECF_PURE))
     265         1953 :            && !(flags & ECF_LOOPING_CONST_OR_PURE))
     266        84198 :           || (flags & ECF_LEAF)))
     267          170 :     gimple_call_set_ctrl_altering (bb_end, false);
     268              :   else
     269              :     {
     270     22732799 :       edge_iterator ei;
     271     22732799 :       edge e;
     272     22732799 :       bool found = false;
     273     25515238 :       FOR_EACH_EDGE (e, ei, bb->succs)
     274      2899840 :         if (e->flags & EDGE_FALLTHRU)
     275              :           found = true;
     276      2787109 :         else if (e->flags & EDGE_ABNORMAL)
     277              :           {
     278              :             found = false;
     279              :             break;
     280              :           }
     281              :       /* If there's no abnormal edge and a fallthru edge the call
     282              :          isn't control-altering anymore.  */
     283     22732799 :       if (found)
     284        31045 :         gimple_call_set_ctrl_altering (bb_end, false);
     285              :     }
     286              : }
     287              : 
     288              : /* Try to remove superfluous control structures in basic block BB.  Returns
     289              :    true if anything changes.  */
     290              : 
     291              : static bool
     292    401155875 : cleanup_control_flow_bb (basic_block bb)
     293              : {
     294    401155875 :   gimple_stmt_iterator gsi;
     295    401155875 :   bool retval = false;
     296    401155875 :   gimple *stmt;
     297              : 
     298              :   /* If the last statement of the block could throw and now cannot,
     299              :      we need to prune cfg.  */
     300    401155875 :   retval |= gimple_purge_dead_eh_edges (bb);
     301              : 
     302    401155875 :   gsi = gsi_last_nondebug_bb (bb);
     303    401155875 :   if (gsi_end_p (gsi))
     304              :     return retval;
     305              : 
     306    360918389 :   stmt = gsi_stmt (gsi);
     307              : 
     308              :   /* Try to cleanup ctrl altering flag for call which ends bb.  */
     309    360918389 :   cleanup_call_ctrl_altering_flag (bb, stmt);
     310              : 
     311    360918389 :   if (gimple_code (stmt) == GIMPLE_COND
     312    360918389 :       || gimple_code (stmt) == GIMPLE_SWITCH)
     313              :     {
     314    323663086 :       gcc_checking_assert (gsi_stmt (gsi_last_bb (bb)) == stmt);
     315    161831543 :       retval |= cleanup_control_expr_graph (bb, gsi);
     316              :     }
     317    199086846 :   else if (gimple_code (stmt) == GIMPLE_GOTO
     318         6277 :            && TREE_CODE (gimple_goto_dest (stmt)) == ADDR_EXPR
     319    199087223 :            && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt), 0))
     320              :                == LABEL_DECL))
     321              :     {
     322              :       /* If we had a computed goto which has a compile-time determinable
     323              :          destination, then we can eliminate the goto.  */
     324          171 :       edge e;
     325          171 :       tree label;
     326          171 :       edge_iterator ei;
     327          171 :       basic_block target_block;
     328              : 
     329          342 :       gcc_checking_assert (gsi_stmt (gsi_last_bb (bb)) == stmt);
     330              :       /* First look at all the outgoing edges.  Delete any outgoing
     331              :          edges which do not go to the right block.  For the one
     332              :          edge which goes to the right block, fix up its flags.  */
     333          171 :       label = TREE_OPERAND (gimple_goto_dest (stmt), 0);
     334          171 :       if (DECL_CONTEXT (label) != cfun->decl)
     335            8 :         return retval;
     336          163 :       target_block = label_to_block (cfun, label);
     337          411 :       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
     338              :         {
     339          248 :           if (e->dest != target_block)
     340           91 :             remove_edge_and_dominated_blocks (e);
     341              :           else
     342              :             {
     343              :               /* Turn off the EDGE_ABNORMAL flag.  */
     344          157 :               e->flags &= ~EDGE_ABNORMAL;
     345              : 
     346              :               /* And set EDGE_FALLTHRU.  */
     347          157 :               e->flags |= EDGE_FALLTHRU;
     348          157 :               ei_next (&ei);
     349              :             }
     350              :         }
     351              : 
     352          163 :       bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
     353          163 :       bitmap_set_bit (cfgcleanup_altered_bbs, target_block->index);
     354              : 
     355              :       /* Remove the GOTO_EXPR as it is not needed.  The CFG has all the
     356              :          relevant information we need.  */
     357          163 :       gsi_remove (&gsi, true);
     358          163 :       retval = true;
     359              :     }
     360              : 
     361              :   /* Check for indirect calls that have been turned into
     362              :      noreturn calls.  */
     363    199086675 :   else if (is_gimple_call (stmt)
     364    199086675 :            && gimple_call_noreturn_p (stmt))
     365              :     {
     366              :       /* If there are debug stmts after the noreturn call, remove them
     367              :          now, they should be all unreachable anyway.  */
     368     22678117 :       for (gsi_next (&gsi); !gsi_end_p (gsi); )
     369          177 :         gsi_remove (&gsi, true);
     370     22677940 :       if (remove_fallthru_edge (bb->succs))
     371        33845 :         retval = true;
     372     22677940 :       tree lhs = gimple_call_lhs (stmt);
     373     22677940 :       if (!lhs
     374     22677940 :           || !should_remove_lhs_p (lhs))
     375     22677921 :         gimple_call_set_ctrl_altering (stmt, true);
     376              :     }
     377              : 
     378              :   return retval;
     379              : }
     380              : 
     381              : /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
     382              :    those alternatives are equal in each of the PHI nodes, then return
     383              :    true, else return false.  */
     384              : 
     385              : bool
     386      4075748 : phi_alternatives_equal (basic_block dest, edge e1, edge e2)
     387              : {
     388      4075748 :   int n1 = e1->dest_idx;
     389      4075748 :   int n2 = e2->dest_idx;
     390      4075748 :   gphi_iterator gsi;
     391              : 
     392      4510599 :   for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
     393              :     {
     394      4450057 :       gphi *phi = gsi.phi ();
     395      4450057 :       tree val1 = gimple_phi_arg_def (phi, n1);
     396      4450057 :       tree val2 = gimple_phi_arg_def (phi, n2);
     397              : 
     398      4450057 :       gcc_assert (val1 != NULL_TREE);
     399      4450057 :       gcc_assert (val2 != NULL_TREE);
     400              : 
     401      4450057 :       if (!operand_equal_for_phi_arg_p (val1, val2))
     402              :         return false;
     403              :     }
     404              : 
     405              :   return true;
     406              : }
     407              : 
     408              : /* Move debug stmts from the forwarder block SRC to DEST or PRED.  */
     409              : 
     410              : static void
     411     28140341 : move_debug_stmts_from_forwarder (basic_block src,
     412              :                                  basic_block dest, bool dest_single_pred_p,
     413              :                                  basic_block pred, bool pred_single_succ_p)
     414              : {
     415     28140341 :   if (!MAY_HAVE_DEBUG_STMTS)
     416      9859864 :     return;
     417              : 
     418              :   /* If we cannot move to the destination but to the predecessor do that.  */
     419     18354837 :   if (!dest_single_pred_p && pred_single_succ_p)
     420              :     {
     421        74365 :       gimple_stmt_iterator gsi_to = gsi_last_bb (pred);
     422        74365 :       if (gsi_end_p (gsi_to) || !stmt_ends_bb_p (gsi_stmt (gsi_to)))
     423              :         {
     424        74360 :           for (gimple_stmt_iterator gsi = gsi_after_labels (src);
     425       250786 :                !gsi_end_p (gsi);)
     426              :             {
     427       176426 :               gimple *debug = gsi_stmt (gsi);
     428       176426 :               gcc_assert (is_gimple_debug (debug));
     429       176426 :               gsi_move_after (&gsi, &gsi_to);
     430              :             }
     431        74360 :           return;
     432              :         }
     433              :     }
     434              : 
     435              :   /* Else move to DEST or drop/reset them.  */
     436     18280477 :   gimple_stmt_iterator gsi_to = gsi_after_labels (dest);
     437     34003159 :   for (gimple_stmt_iterator gsi = gsi_after_labels (src); !gsi_end_p (gsi);)
     438              :     {
     439     15722682 :       gimple *debug = gsi_stmt (gsi);
     440     15722682 :       gcc_assert (is_gimple_debug (debug));
     441              :       /* Move debug binds anyway, but not anything else like begin-stmt
     442              :          markers unless they are always valid at the destination.  */
     443     15722682 :       if (dest_single_pred_p
     444     15722682 :           || gimple_debug_bind_p (debug))
     445              :         {
     446     14694745 :           gsi_move_before (&gsi, &gsi_to);
     447              :           /* Reset debug-binds that are not always valid at the destination.
     448              :              Simply dropping them can cause earlier values to become live,
     449              :              generating wrong debug information.
     450              :              ???  There are several things we could improve here.  For
     451              :              one we might be able to move stmts to the predecessor.
     452              :              For anther, if the debug stmt is immediately followed by a
     453              :              (debug) definition in the destination (on a post-dominated path?)
     454              :              we can elide it without any bad effects.  */
     455     14694745 :           if (!dest_single_pred_p)
     456              :             {
     457      6138874 :               gimple_debug_bind_reset_value (debug);
     458      6138874 :               update_stmt (debug);
     459              :             }
     460              :         }
     461              :       else
     462      1027937 :         gsi_next (&gsi);
     463              :     }
     464              : }
     465              : 
     466              : /* Return true if basic block BB does nothing except pass control
     467              :    flow to another block and that we can safely insert a label at
     468              :    the start of the successor block and was removed.
     469              : 
     470              :    As a precondition, we require that BB be not equal to
     471              :    the entry block.
     472              :    If CAN_SPLIT is true, we can split the edge to have
     473              :    another bb with with the phi.  */
     474              : 
     475              : static bool
     476    427457627 : maybe_remove_forwarder_block (basic_block bb, bool can_split = false)
     477              : {
     478    427457627 :   gimple_stmt_iterator gsi;
     479    427457627 :   location_t locus;
     480              : 
     481              :   /* BB must have a single outgoing edge.  */
     482    814597028 :   if (!single_succ_p (bb)
     483              :       /* BB may not be a predecessor of the exit block.  */
     484    199166083 :       || single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
     485              :       /* Nor should this be an infinite loop.  */
     486    167218426 :       || single_succ (bb) == bb
     487              :       /* BB may not have an abnormal outgoing edge.  */
     488    582397357 :       || (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
     489              :     return false;
     490              : 
     491    167079982 :   gcc_checking_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun));
     492              : 
     493    167079982 :   locus = single_succ_edge (bb)->goto_locus;
     494              : 
     495              :   /* There should not be an edge coming from entry, or an EH edge.  */
     496    167079982 :   {
     497    167079982 :     edge_iterator ei;
     498    167079982 :     edge e;
     499              : 
     500    347392379 :     FOR_EACH_EDGE (e, ei, bb->preds)
     501    202882468 :       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || (e->flags & EDGE_EH))
     502     22570071 :         return false;
     503              :       /* If goto_locus of any of the edges differs, prevent removing
     504              :          the forwarder block when not optimizing.  */
     505    181691392 :       else if (!optimize
     506      5224642 :                && (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
     507      4759844 :                    || LOCATION_LOCUS (locus) != UNKNOWN_LOCATION)
     508    183070526 :                && e->goto_locus != locus)
     509              :         return false;
     510              :   }
     511              : 
     512              :   /* If this bb has a single predecessor and that predecssor
     513              :      has a single successor, this bb will be merged with the
     514              :      predecessor so ignore it for removing of the forwarder block. */
     515    144509911 :   if (single_pred_p (bb)
     516    144509911 :       && single_succ_p (single_pred_edge (bb)->src))
     517              :     return false;
     518              : 
     519    136290661 :   bool has_phi = !gimple_seq_empty_p (phi_nodes (bb));
     520    136290661 :   basic_block dest = single_succ_edge (bb)->dest;
     521              : 
     522              :   /* If the destination block consists of a nonlocal label or is a
     523              :      EH landing pad, do not merge it.  */
     524    136290661 :   if (glabel *label_stmt = safe_dyn_cast <glabel *> (first_stmt (dest)))
     525      8556925 :     if (DECL_NONLOCAL (gimple_label_label (label_stmt))
     526      8556925 :         || EH_LANDING_PAD_NR (gimple_label_label (label_stmt)))
     527              :     return false;
     528              : 
     529              :   /* If there is an abnormal edge to basic block BB, but not into
     530              :      dest, problems might occur during removal of the phi node at out
     531              :      of ssa due to overlapping live ranges of registers.
     532              : 
     533              :      If there is an abnormal edge in DEST, the problems would occur
     534              :      anyway since cleanup_dead_labels would then merge the labels for
     535              :      two different eh regions, and rest of exception handling code
     536              :      does not like it.
     537              : 
     538              :      So if there is an abnormal edge to BB, proceed only if there is
     539              :      no abnormal edge to DEST and there are no phi nodes in DEST.
     540              :      If the BB has phi, we don't want to deal with abnormal edges either. */
     541    132836014 :   if (bb_has_abnormal_pred (bb)
     542    132836014 :       && (bb_has_abnormal_pred (dest)
     543        60705 :           || !gimple_seq_empty_p (phi_nodes (dest))
     544        53615 :           || has_phi))
     545              :     return false;
     546              : 
     547              :   /* When we have a phi, we have to feed into another
     548              :      basic block with PHI nodes.  */
     549    132824970 :   if (has_phi && gimple_seq_empty_p (phi_nodes (dest)))
     550              :     return false;
     551              : 
     552              :   /* Now walk through the statements backward.  We can ignore labels,
     553              :      anything else means this is not a forwarder block.  */
     554    545351836 :   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
     555              :     {
     556    236292499 :       gimple *stmt = gsi_stmt (gsi);
     557              : 
     558    236292499 :       switch (gimple_code (stmt))
     559              :         {
     560       993812 :         case GIMPLE_LABEL:
     561       993812 :           if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt)))
     562       993812 :               || EH_LANDING_PAD_NR (gimple_label_label (as_a <glabel *> (stmt))))
     563              :             return false;
     564       993505 :           if (!optimize
     565        29651 :               && (gimple_has_location (stmt)
     566         2184 :                   || LOCATION_LOCUS (locus) != UNKNOWN_LOCATION)
     567      1020972 :               && gimple_location (stmt) != locus)
     568              :             return false;
     569              :           break;
     570              : 
     571              :           /* ??? For now, hope there's a corresponding debug
     572              :              assignment at the destination.  */
     573              :         case GIMPLE_DEBUG:
     574              :           break;
     575              : 
     576              :         default:
     577              :           return false;
     578              :         }
     579              :     }
     580              :   /* If BB has PHIs and does not dominate DEST,
     581              :      then the PHI nodes at DEST must be the only
     582              :      users of the results of the PHI nodes at BB.
     583              :      So only check when BB dominates dest.  */
     584     36383419 :   if (has_phi
     585     36383419 :       && dominated_by_p (CDI_DOMINATORS, dest, bb))
     586              :     {
     587      2306723 :       gphi_iterator gsi;
     588      2306723 :       unsigned int dest_idx = single_succ_edge (bb)->dest_idx;
     589              : 
     590              :       /* BB dominates DEST.  There may be many users of the PHI
     591              :          nodes in BB.  However, there is still a trivial case we
     592              :          can handle.  If the result of every PHI in BB is used
     593              :          only by a PHI in DEST, then we can trivially merge the
     594              :          PHI nodes from BB into DEST.  */
     595      4739282 :       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
     596      2432559 :            gsi_next (&gsi))
     597              :         {
     598      3268638 :           gphi *phi = gsi.phi ();
     599      3268638 :           tree result = gimple_phi_result (phi);
     600      3268638 :           use_operand_p imm_use;
     601      3268638 :           gimple *use_stmt;
     602              : 
     603              :           /* If the PHI's result is never used, then we can just
     604              :              ignore it an.  */
     605      3268638 :           if (has_zero_uses (result))
     606        73389 :             continue;
     607              : 
     608              :           /* Get the single use of the result of this PHI node.  */
     609      3195249 :           if (!single_imm_use (result, &imm_use, &use_stmt)
     610      2721724 :               || gimple_code (use_stmt) != GIMPLE_PHI
     611      2444448 :               || gimple_bb (use_stmt) != dest
     612      5555645 :               || gimple_phi_arg_def (use_stmt, dest_idx) != result)
     613       836079 :             return false;
     614              :         }
     615              :     }
     616              : 
     617     35547340 :   if (current_loops)
     618              :     {
     619              :       /* Protect loop headers.  */
     620     34637613 :       if (bb_loop_header_p (bb))
     621              :         return false;
     622              : 
     623              :       /* Protect loop preheaders and latches if requested.  */
     624     34477666 :       if (dest->loop_father->header == dest)
     625              :         {
     626     12473019 :           if (bb->loop_father == dest->loop_father)
     627              :             {
     628      7448034 :               if (loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES))
     629              :                 return false;
     630              :               /* If bb doesn't have a single predecessor we'd make this
     631              :                  loop have multiple latches.  Don't do that if that
     632              :                  would in turn require disambiguating them over again.  */
     633    404971350 :               if (!single_pred_p (bb))
     634              :                 return false;
     635              :             }
     636              :           /* cleanup_tree_cfg_noloop just created the loop preheader, don't
     637              :              remove it if it has phis.  */
     638      5024985 :           else if (bb->loop_father == loop_outer (dest->loop_father)
     639      5012776 :                    && !has_phi
     640      8925497 :                    && !loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS))
     641              :             ;
     642              :           else
     643              :             /* Always preserve other edges into loop headers that are
     644              :                not simple latches or preheaders.  */
     645              :             return false;
     646              :         }
     647              :     }
     648              : 
     649     32044031 :   edge succ = single_succ_edge (bb), e, s;
     650     32044031 :   gimple *stmt;
     651     32044031 :   gimple_stmt_iterator gsi_to;
     652              : 
     653              :   /* If there are phi nodes in DEST, and some of the blocks that are
     654              :      predecessors of BB are also predecessors of DEST, check that the
     655              :      phi node arguments match.
     656              :      Otherwise we have to split the edge and that becomes
     657              :      a "forwarder" again.  */
     658     32044031 :   if ((!can_split || !has_phi)
     659     32044031 :       && !gimple_seq_empty_p (phi_nodes (dest)))
     660              :     {
     661     26246354 :       edge_iterator ei;
     662     51098982 :       FOR_EACH_EDGE (e, ei, bb->preds)
     663              :         {
     664     28756318 :           s = find_edge (e->src, dest);
     665     28756318 :           if (!s)
     666     24792587 :             continue;
     667              : 
     668      3963731 :           if (!phi_alternatives_equal (dest, succ, s))
     669      3903690 :             return false;
     670              :         }
     671              :     }
     672              : 
     673     28140341 :   basic_block pred = NULL;
     674     28140341 :   if (single_pred_p (bb))
     675     26527351 :     pred = single_pred (bb);
     676     28140341 :   bool dest_single_pred_p = single_pred_p (dest);
     677              : 
     678              :   /* Redirect the edges.  */
     679     58893918 :   for (edge_iterator ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
     680              :     {
     681     30753577 :       if (cfgcleanup_altered_bbs)
     682     30521876 :         bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
     683     30753577 :       s = find_edge (e->src, dest);
     684              : 
     685              :       /* See if we can split the edge if we already have an edge from src to dest.  */
     686     30753577 :       if (can_split && has_phi)
     687              :         /* PHI arguments are different.  Create a forwarder block by
     688              :            splitting E so that we can merge PHI arguments on E to
     689              :            DEST.  */
     690        46737 :         if (s && !phi_alternatives_equal (dest, s, succ))
     691        16813 :           e = single_succ_edge (split_edge (e));
     692              : 
     693     30753577 :       if (e->flags & EDGE_ABNORMAL)
     694              :         {
     695              :           /* If there is an abnormal edge, redirect it anyway, and
     696              :              move the labels to the new block to make it legal.  */
     697          183 :           s = redirect_edge_succ_nodup (e, dest);
     698              :         }
     699              :       else
     700     30753394 :         s = redirect_edge_and_branch (e, dest);
     701              : 
     702     30753577 :       if (s == e)
     703              :         {
     704              :           /* If we merge the forwarder with phis into a loop header
     705              :              verify if we are creating another loop latch edge.
     706              :              If so, reset number of iteration information of the loop.  */
     707     30634939 :           if (has_phi
     708      2848476 :               && dest->loop_father
     709      2848476 :               && dest->loop_father->header == dest
     710     30659554 :               && dominated_by_p (CDI_DOMINATORS, e->src, dest))
     711              :             {
     712        24615 :               dest->loop_father->any_upper_bound = false;
     713        24615 :               dest->loop_father->any_likely_upper_bound = false;
     714        24615 :               free_numbers_of_iterations_estimates (dest->loop_father);
     715              :             }
     716              :           /* Copy arguments for the phi nodes, since the edge was not
     717              :              here before.  */
     718     30634939 :           copy_phi_arg_into_existing_phi (succ, s, has_phi);
     719              :         }
     720              :       else
     721       118638 :         redirect_edge_var_map_clear (s);
     722              :     }
     723              : 
     724              :   /* Move nonlocal labels and computed goto targets as well as user
     725              :      defined labels and labels with an EH landing pad number to the
     726              :      new block, so that the redirection of the abnormal edges works,
     727              :      jump targets end up in a sane place and debug information for
     728              :      labels is retained.  */
     729     28140341 :   gsi_to = gsi_start_bb (dest);
     730     56575181 :   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
     731              :     {
     732      2900531 :       stmt = gsi_stmt (gsi);
     733      2900531 :       if (is_gimple_debug (stmt))
     734              :         break;
     735              : 
     736              :       /* Forwarder blocks can only contain labels and debug stmts, and
     737              :          labels must come first, so if we get to this point, we know
     738              :          we're looking at a label.  */
     739       294499 :       tree decl = gimple_label_label (as_a <glabel *> (stmt));
     740       294499 :       if (EH_LANDING_PAD_NR (decl) != 0
     741       294499 :           || DECL_NONLOCAL (decl)
     742       294499 :           || FORCED_LABEL (decl)
     743       583663 :           || !DECL_ARTIFICIAL (decl))
     744        29866 :         gsi_move_before (&gsi, &gsi_to);
     745              :       else
     746       264633 :         gsi_next (&gsi);
     747              :     }
     748              : 
     749              :   /* Move debug statements.  Reset them if the destination does not
     750              :      have a single predecessor.  */
     751     56280682 :   move_debug_stmts_from_forwarder (bb, dest, dest_single_pred_p,
     752     82699835 :                                    pred, pred && single_succ_p (pred));
     753              : 
     754     28140341 :   if (cfgcleanup_altered_bbs)
     755     27936828 :     bitmap_set_bit (cfgcleanup_altered_bbs, dest->index);
     756              : 
     757              :   /* Update the dominators.  */
     758     28140341 :   basic_block dom, dombb, domdest;
     759              : 
     760     28140341 :   dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
     761     28140341 :   domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
     762     28140341 :   if (domdest == bb)
     763              :     {
     764              :       /* Shortcut to avoid calling (relatively expensive)
     765              :          nearest_common_dominator unless necessary.  */
     766              :       dom = dombb;
     767              :     }
     768              :   else
     769     21040140 :     dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
     770              : 
     771     28140341 :   set_immediate_dominator (CDI_DOMINATORS, dest, dom);
     772              : 
     773              :   /* Adjust latch infomation of BB's parent loop as otherwise
     774              :      the cfg hook has a hard time not to kill the loop.  */
     775     28140341 :   if (current_loops && bb->loop_father->latch == bb)
     776      5646995 :     bb->loop_father->latch = pred;
     777              : 
     778              :   /* And kill the forwarder block.  */
     779     28140341 :   delete_basic_block (bb);
     780              : 
     781     28140341 :   return true;
     782              : }
     783              : 
     784              : /* STMT is a call that has been discovered noreturn.  Split the
     785              :    block to prepare fixing up the CFG and remove LHS.
     786              :    Return true if cleanup-cfg needs to run.  */
     787              : 
     788              : bool
     789      6346998 : fixup_noreturn_call (gimple *stmt)
     790              : {
     791      6346998 :   basic_block bb = gimple_bb (stmt);
     792      6346998 :   bool changed = false;
     793              : 
     794      6346998 :   if (gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
     795              :     return false;
     796              : 
     797              :   /* First split basic block if stmt is not last.  */
     798     12690212 :   if (stmt != gsi_stmt (gsi_last_bb (bb)))
     799              :     {
     800        51952 :       if (stmt == gsi_stmt (gsi_last_nondebug_bb (bb)))
     801              :         {
     802              :           /* Don't split if there are only debug stmts
     803              :              after stmt, that can result in -fcompare-debug
     804              :              failures.  Remove the debug stmts instead,
     805              :              they should be all unreachable anyway.  */
     806         6723 :           gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
     807        59281 :           for (gsi_next (&gsi); !gsi_end_p (gsi); )
     808        52558 :             gsi_remove (&gsi, true);
     809              :         }
     810              :       else
     811              :         {
     812        45229 :           split_block (bb, stmt);
     813        45229 :           changed = true;
     814              :         }
     815              :     }
     816              : 
     817              :   /* If there is an LHS, remove it, but only if its type has fixed size.
     818              :      The LHS will need to be recreated during RTL expansion and creating
     819              :      temporaries of variable-sized types is not supported.  Also don't
     820              :      do this with TREE_ADDRESSABLE types, as assign_temp will abort.
     821              :      Drop LHS regardless of TREE_ADDRESSABLE, if the function call
     822              :      has been changed into a call that does not return a value, like
     823              :      __builtin_unreachable or __cxa_pure_virtual.  */
     824      6345106 :   tree lhs = gimple_call_lhs (stmt);
     825      6345106 :   if (lhs
     826      6345106 :       && (should_remove_lhs_p (lhs)
     827          406 :           || VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))))
     828              :     {
     829         4217 :       gimple_call_set_lhs (stmt, NULL_TREE);
     830              : 
     831              :       /* We need to fix up the SSA name to avoid checking errors.  */
     832         4217 :       if (TREE_CODE (lhs) == SSA_NAME)
     833              :         {
     834         4017 :           tree new_var = create_tmp_reg (TREE_TYPE (lhs));
     835         4017 :           SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, new_var);
     836         4017 :           SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
     837         4017 :           set_ssa_default_def (cfun, new_var, lhs);
     838              :         }
     839              : 
     840         4217 :       update_stmt (stmt);
     841              :     }
     842              : 
     843              :   /* Mark the call as altering control flow.  */
     844      6345106 :   if (!gimple_call_ctrl_altering_p (stmt))
     845              :     {
     846        95060 :       gimple_call_set_ctrl_altering (stmt, true);
     847        95060 :       changed = true;
     848              :     }
     849              : 
     850              :   return changed;
     851              : }
     852              : 
     853              : /* Return true if we want to merge BB1 and BB2 into a single block.  */
     854              : 
     855              : static bool
     856    412745709 : want_merge_blocks_p (basic_block bb1, basic_block bb2)
     857              : {
     858    412745709 :   if (!can_merge_blocks_p (bb1, bb2))
     859              :     return false;
     860     28201649 :   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb1);
     861     28201649 :   if (gsi_end_p (gsi) || !stmt_can_terminate_bb_p (gsi_stmt (gsi)))
     862     24984520 :     return true;
     863      3217129 :   return bb1->count.ok_for_merging (bb2->count);
     864              : }
     865              : 
     866              : 
     867              : /* Tries to cleanup cfg in basic block BB by merging blocks.  Returns
     868              :    true if anything changes.  */
     869              : 
     870              : static bool
     871    393885086 : cleanup_tree_cfg_bb (basic_block bb)
     872              : {
     873    393885086 :   if (maybe_remove_forwarder_block (bb))
     874              :     return true;
     875              : 
     876              :   /* If there is a merge opportunity with the predecessor
     877              :      do nothing now but wait until we process the predecessor.
     878              :      This happens when we visit BBs in a non-optimal order and
     879              :      avoids quadratic behavior with adjusting stmts BB pointer.  */
     880    365948258 :   if (single_pred_p (bb)
     881    631067612 :       && want_merge_blocks_p (single_pred (bb), bb))
     882              :     /* But make sure we _do_ visit it.  When we remove unreachable paths
     883              :        ending in a backedge we fail to mark the destinations predecessors
     884              :        as changed.  */
     885     11539114 :     bitmap_set_bit (cfgcleanup_altered_bbs, single_pred (bb)->index);
     886              : 
     887              :   /* Merging the blocks may create new opportunities for folding
     888              :      conditional branches (due to the elimination of single-valued PHI
     889              :      nodes).  */
     890    354409144 :   else if (single_succ_p (bb)
     891    491387943 :            && want_merge_blocks_p (bb, single_succ (bb)))
     892              :     {
     893     16662471 :       merge_blocks (bb, single_succ (bb));
     894     16662471 :       return true;
     895              :     }
     896              : 
     897              :   return false;
     898              : }
     899              : 
     900              : /* Return true if E is an EDGE_ABNORMAL edge for returns_twice calls,
     901              :    i.e. one going from .ABNORMAL_DISPATCHER to basic block which doesn't
     902              :    start with a forced or nonlocal label.  Calls which return twice can return
     903              :    the second time only if they are called normally the first time, so basic
     904              :    blocks which can be only entered through these abnormal edges but not
     905              :    normally are effectively unreachable as well.  Additionally ignore
     906              :    __builtin_setjmp_receiver starting blocks, which have one FORCED_LABEL
     907              :    and which are always only reachable through EDGE_ABNORMAL edge.  They are
     908              :    handled in cleanup_control_flow_pre.  */
     909              : 
     910              : static bool
     911    347922441 : maybe_dead_abnormal_edge_p (edge e)
     912              : {
     913    347922441 :   if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL)
     914              :     return false;
     915              : 
     916       147551 :   gimple_stmt_iterator gsi = gsi_start_nondebug_after_labels_bb (e->src);
     917       147551 :   gimple *g = gsi_stmt (gsi);
     918       147551 :   if (!g || !gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
     919              :     return false;
     920              : 
     921        19613 :   tree target = NULL_TREE;
     922        51225 :   for (gsi = gsi_start_bb (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
     923        31605 :     if (glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
     924              :       {
     925        17952 :         tree this_target = gimple_label_label (label_stmt);
     926        17952 :         if (DECL_NONLOCAL (this_target))
     927              :           return false;
     928        11999 :         if (FORCED_LABEL (this_target))
     929              :           {
     930        11999 :             if (target)
     931              :               return false;
     932              :             target = this_target;
     933              :           }
     934              :       }
     935              :     else
     936              :       break;
     937              : 
     938        13660 :   if (target)
     939              :     {
     940              :       /* If there was a single FORCED_LABEL, check for
     941              :          __builtin_setjmp_receiver with address of that label.  */
     942        11999 :       if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
     943            0 :         gsi_next_nondebug (&gsi);
     944        11999 :       if (gsi_end_p (gsi))
     945              :         return false;
     946        11999 :       if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_SETJMP_RECEIVER))
     947              :         return false;
     948              : 
     949        11999 :       tree arg = gimple_call_arg (gsi_stmt (gsi), 0);
     950        11999 :       if (TREE_CODE (arg) != ADDR_EXPR || TREE_OPERAND (arg, 0) != target)
     951              :         return false;
     952              :     }
     953              :   return true;
     954              : }
     955              : 
     956              : /* If BB is a basic block ending with __builtin_setjmp_setup, return edge
     957              :    from .ABNORMAL_DISPATCHER basic block to corresponding
     958              :    __builtin_setjmp_receiver basic block, otherwise return NULL.  */
     959              : static edge
     960    347920756 : builtin_setjmp_setup_bb (basic_block bb)
     961              : {
     962    347920756 :   if (EDGE_COUNT (bb->succs) != 2
     963    337331669 :       || ((EDGE_SUCC (bb, 0)->flags
     964    154363316 :            & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL
     965    154267916 :           && (EDGE_SUCC (bb, 1)->flags
     966    154267916 :               & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL))
     967              :     return NULL;
     968              : 
     969       167244 :   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
     970       167244 :   if (gsi_end_p (gsi)
     971       167244 :       || !gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_SETJMP_SETUP))
     972       155269 :     return NULL;
     973              : 
     974        11975 :   tree arg = gimple_call_arg (gsi_stmt (gsi), 1);
     975        11975 :   if (TREE_CODE (arg) != ADDR_EXPR
     976        11975 :       || TREE_CODE (TREE_OPERAND (arg, 0)) != LABEL_DECL)
     977              :     return NULL;
     978              : 
     979        11975 :   basic_block recv_bb = label_to_block (cfun, TREE_OPERAND (arg, 0));
     980    347920756 :   if (EDGE_COUNT (recv_bb->preds) != 1
     981        11975 :       || (EDGE_PRED (recv_bb, 0)->flags
     982        11975 :           & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL
     983        23950 :       || (EDGE_SUCC (bb, 0)->dest != EDGE_PRED (recv_bb, 0)->src
     984        11975 :           && EDGE_SUCC (bb, 1)->dest != EDGE_PRED (recv_bb, 0)->src))
     985              :     return NULL;
     986              : 
     987              :   /* EDGE_PRED (recv_bb, 0)->src should be the .ABNORMAL_DISPATCHER bb.  */
     988              :   return EDGE_PRED (recv_bb, 0);
     989              : }
     990              : 
     991              : /* Do cleanup_control_flow_bb in PRE order.  */
     992              : 
     993              : static bool
     994     25135807 : cleanup_control_flow_pre ()
     995              : {
     996     25135807 :   bool retval = false;
     997              : 
     998              :   /* We want remove_edge_and_dominated_blocks to only remove edges,
     999              :      not dominated blocks which it does when dom info isn't available.
    1000              :      Pretend so.  */
    1001     25135807 :   dom_state saved_state = dom_info_state (CDI_DOMINATORS);
    1002     25135807 :   set_dom_info_availability (CDI_DOMINATORS, DOM_NONE);
    1003              : 
    1004     25135807 :   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
    1005     25135807 :   auto_sbitmap visited (last_basic_block_for_fn (cfun));
    1006     25135807 :   bitmap_clear (visited);
    1007              : 
    1008     25135807 :   vec<edge, va_gc> *setjmp_vec = NULL;
    1009     25135807 :   auto_vec<basic_block, 4> abnormal_dispatchers;
    1010              : 
    1011     25135807 :   stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
    1012              : 
    1013    880993718 :   while (! stack.is_empty ())
    1014              :     {
    1015              :       /* Look at the edge on the top of the stack.  */
    1016    855857911 :       edge_iterator ei = stack.last ();
    1017    855857911 :       basic_block dest = ei_edge (ei)->dest;
    1018              : 
    1019    855857911 :       if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
    1020    831112780 :           && !bitmap_bit_p (visited, dest->index)
    1021   1203792327 :           && (ei_container (ei) == setjmp_vec
    1022    347922441 :               || !maybe_dead_abnormal_edge_p (ei_edge (ei))))
    1023              :         {
    1024    347920756 :           bitmap_set_bit (visited, dest->index);
    1025              :           /* We only possibly remove edges from DEST here, leaving
    1026              :              possibly unreachable code in the IL.  */
    1027    347920756 :           retval |= cleanup_control_flow_bb (dest);
    1028              : 
    1029              :           /* Check for __builtin_setjmp_setup.  Edges from .ABNORMAL_DISPATCH
    1030              :              to __builtin_setjmp_receiver will be normally ignored by
    1031              :              maybe_dead_abnormal_edge_p.  If DEST is a visited
    1032              :              __builtin_setjmp_setup, queue edge from .ABNORMAL_DISPATCH
    1033              :              to __builtin_setjmp_receiver, so that it will be visited too.  */
    1034    347920756 :           if (edge e = builtin_setjmp_setup_bb (dest))
    1035              :             {
    1036        11975 :               vec_safe_push (setjmp_vec, e);
    1037        11975 :               if (vec_safe_length (setjmp_vec) == 1)
    1038         6324 :                 stack.quick_push (ei_start (setjmp_vec));
    1039              :             }
    1040              : 
    1041    347920756 :           if ((ei_edge (ei)->flags
    1042    347920756 :                & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
    1043              :             {
    1044       145866 :               gimple_stmt_iterator gsi
    1045       145866 :                 = gsi_start_nondebug_after_labels_bb (dest);
    1046       145866 :               gimple *g = gsi_stmt (gsi);
    1047       145866 :               if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
    1048        25499 :                 abnormal_dispatchers.safe_push (dest);
    1049              :             }
    1050              : 
    1051   1203778667 :           if (EDGE_COUNT (dest->succs) > 0)
    1052    325277297 :             stack.quick_push (ei_start (dest->succs));
    1053              :         }
    1054              :       else
    1055              :         {
    1056    507937155 :           if (!ei_one_before_end_p (ei))
    1057    157517727 :             ei_next (&stack.last ());
    1058              :           else
    1059              :             {
    1060    350419428 :               if (ei_container (ei) == setjmp_vec)
    1061         6324 :                 vec_safe_truncate (setjmp_vec, 0);
    1062    350419428 :               stack.pop ();
    1063              :             }
    1064              :         }
    1065              :     }
    1066              : 
    1067     25135807 :   vec_free (setjmp_vec);
    1068              : 
    1069              :   /* If we've marked .ABNORMAL_DISPATCHER basic block(s) as visited
    1070              :      above, but haven't marked any of their successors as visited,
    1071              :      unmark them now, so that they can be removed as useless.  */
    1072     75432920 :   for (basic_block dispatcher_bb : abnormal_dispatchers)
    1073              :     {
    1074        25499 :       edge e;
    1075        25499 :       edge_iterator ei;
    1076        25590 :       FOR_EACH_EDGE (e, ei, dispatcher_bb->succs)
    1077        25499 :         if (bitmap_bit_p (visited, e->dest->index))
    1078              :           break;
    1079        25499 :       if (e == NULL)
    1080           91 :         bitmap_clear_bit (visited, dispatcher_bb->index);
    1081              :     }
    1082              : 
    1083     25135807 :   set_dom_info_availability (CDI_DOMINATORS, saved_state);
    1084              : 
    1085              :   /* We are deleting BBs in non-reverse dominator order, make sure
    1086              :      insert_debug_temps_for_defs is prepared for that.  */
    1087     25135807 :   if (retval)
    1088       894041 :     free_dominance_info (CDI_DOMINATORS);
    1089              : 
    1090              :   /* Remove all now (and previously) unreachable blocks.  */
    1091    396611026 :   for (int i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); ++i)
    1092              :     {
    1093    371475219 :       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
    1094    371475219 :       if (bb && !bitmap_bit_p (visited, bb->index))
    1095              :         {
    1096      4777939 :           if (!retval)
    1097       745111 :             free_dominance_info (CDI_DOMINATORS);
    1098      4777939 :           delete_basic_block (bb);
    1099      4777939 :           retval = true;
    1100              :         }
    1101              :     }
    1102              : 
    1103     25135807 :   return retval;
    1104     25135807 : }
    1105              : 
    1106              : static bool
    1107       153633 : mfb_keep_latches (edge e)
    1108              : {
    1109       153633 :   return !((dom_info_available_p (CDI_DOMINATORS)
    1110        23856 :             && dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
    1111       146252 :            || (e->flags & EDGE_DFS_BACK));
    1112              : }
    1113              : 
    1114              : /* Remove unreachable blocks and other miscellaneous clean up work.
    1115              :    Return true if the flowgraph was modified, false otherwise.  */
    1116              : 
    1117              : static bool
    1118     25135807 : cleanup_tree_cfg_noloop (unsigned ssa_update_flags)
    1119              : {
    1120     25135807 :   timevar_push (TV_TREE_CLEANUP_CFG);
    1121              : 
    1122              :   /* Ensure that we have single entries into loop headers.  Otherwise
    1123              :      if one of the entries is becoming a latch due to CFG cleanup
    1124              :      (from formerly being part of an irreducible region) then we mess
    1125              :      up loop fixup and associate the old loop with a different region
    1126              :      which makes niter upper bounds invalid.  See for example PR80549.
    1127              :      This needs to be done before we remove trivially dead edges as
    1128              :      we need to capture the dominance state before the pending transform.  */
    1129     25135807 :   if (current_loops)
    1130              :     {
    1131              :       /* This needs backedges or dominators.  */
    1132     22266591 :       if (!dom_info_available_p (CDI_DOMINATORS))
    1133      6738190 :         mark_dfs_back_edges ();
    1134              : 
    1135     93048441 :       for (loop_p loop : *get_loops (cfun))
    1136     48515259 :         if (loop && loop->header)
    1137              :           {
    1138     41250791 :             basic_block bb = loop->header;
    1139     41250791 :             edge_iterator ei;
    1140     41250791 :             edge e;
    1141     41250791 :             bool found_latch = false;
    1142     41250791 :             bool any_abnormal = false;
    1143     41250791 :             unsigned n = 0;
    1144              :             /* We are only interested in preserving existing loops, but
    1145              :                we need to check whether they are still real and of course
    1146              :                if we need to add a preheader at all.  */
    1147     79375020 :             FOR_EACH_EDGE (e, ei, bb->preds)
    1148              :               {
    1149     38124229 :                 if (e->flags & EDGE_ABNORMAL)
    1150              :                   {
    1151              :                     any_abnormal = true;
    1152              :                     break;
    1153              :                   }
    1154     38124229 :                 if ((dom_info_available_p (CDI_DOMINATORS)
    1155     27788187 :                      && dominated_by_p (CDI_DOMINATORS, e->src, bb))
    1156     52003595 :                     || (e->flags & EDGE_DFS_BACK))
    1157              :                   {
    1158     19084051 :                     found_latch = true;
    1159     19084051 :                     continue;
    1160              :                   }
    1161     19040178 :                 n++;
    1162              :               }
    1163              :             /* If we have more than one entry to the loop header
    1164              :                create a forwarder.  */
    1165     41250791 :             if (found_latch && ! any_abnormal && n > 1)
    1166              :               {
    1167        47499 :                 edge fallthru = make_forwarder_block (bb, mfb_keep_latches,
    1168              :                                                       NULL);
    1169        47499 :                 loop->header = fallthru->dest;
    1170        47499 :                 if (! loops_state_satisfies_p (LOOPS_NEED_FIXUP))
    1171              :                   {
    1172              :                     /* The loop updating from the CFG hook is incomplete
    1173              :                        when we have multiple latches, fixup manually.  */
    1174        26644 :                     remove_bb_from_loops (fallthru->src);
    1175        26644 :                     loop_p cloop = loop;
    1176        83814 :                     FOR_EACH_EDGE (e, ei, fallthru->src->preds)
    1177        57170 :                       cloop = find_common_loop (cloop, e->src->loop_father);
    1178        26644 :                     add_bb_to_loop (fallthru->src, cloop);
    1179              :                   }
    1180              :               }
    1181              :           }
    1182              :     }
    1183              : 
    1184              :   /* Prepare the worklists of altered blocks.  */
    1185     25135807 :   cfgcleanup_altered_bbs = BITMAP_ALLOC (NULL);
    1186              : 
    1187              :   /* Start by iterating over all basic blocks in PRE order looking for
    1188              :      edge removal opportunities.  Do this first because incoming SSA form
    1189              :      may be invalid and we want to avoid performing SSA related tasks such
    1190              :      as propgating out a PHI node during BB merging in that state.  This
    1191              :      also gets rid of unreachable blocks.  */
    1192     25135807 :   bool changed = cleanup_control_flow_pre ();
    1193              : 
    1194              :   /* After doing the above SSA form should be valid (or an update SSA
    1195              :      should be required).  */
    1196     25135807 :   if (ssa_update_flags)
    1197              :     {
    1198     16469437 :       timevar_pop (TV_TREE_CLEANUP_CFG);
    1199     16469437 :       update_ssa (ssa_update_flags);
    1200     16469437 :       timevar_push (TV_TREE_CLEANUP_CFG);
    1201              :     }
    1202              : 
    1203              :   /* Compute dominator info which we need for the iterative process below.
    1204              :      Avoid computing the fast query DFS numbers since any block merging
    1205              :      done will invalidate them anyway.  */
    1206     25135807 :   if (!dom_info_available_p (CDI_DOMINATORS))
    1207      8500916 :     calculate_dominance_info (CDI_DOMINATORS, false);
    1208              :   else
    1209     16634891 :     checking_verify_dominators (CDI_DOMINATORS);
    1210              : 
    1211              :   /* During forwarder block cleanup, we may redirect edges out of
    1212              :      SWITCH_EXPRs, which can get expensive.  So we want to enable
    1213              :      recording of edge to CASE_LABEL_EXPR.  */
    1214     25135807 :   start_recording_case_labels ();
    1215              : 
    1216              :   /* Continue by iterating over all basic blocks looking for BB merging
    1217              :      opportunities.  We cannot use FOR_EACH_BB_FN for the BB iteration
    1218              :      since the basic blocks may get removed.  */
    1219     25135807 :   unsigned n = last_basic_block_for_fn (cfun);
    1220    396611026 :   for (unsigned i = NUM_FIXED_BLOCKS; i < n; i++)
    1221              :     {
    1222    371475219 :       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
    1223    371475219 :       if (bb)
    1224    340649967 :         changed |= cleanup_tree_cfg_bb (bb);
    1225              :     }
    1226              : 
    1227              :   /* Now process the altered blocks, as long as any are available.  */
    1228     86430228 :   while (!bitmap_empty_p (cfgcleanup_altered_bbs))
    1229              :     {
    1230     61294421 :       unsigned i = bitmap_clear_first_set_bit (cfgcleanup_altered_bbs);
    1231     61294421 :       if (i < NUM_FIXED_BLOCKS)
    1232            0 :         continue;
    1233              : 
    1234     61294421 :       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
    1235     61294421 :       if (!bb)
    1236      8059302 :         continue;
    1237              : 
    1238              :       /* BB merging done by cleanup_tree_cfg_bb can end up propagating
    1239              :          out single-argument PHIs which in turn can expose
    1240              :          cleanup_control_flow_bb opportunities so we have to repeat
    1241              :          that here.  */
    1242     53235119 :       changed |= cleanup_control_flow_bb (bb);
    1243     53235119 :       changed |= cleanup_tree_cfg_bb (bb);
    1244              :     }
    1245              : 
    1246     25135807 :   end_recording_case_labels ();
    1247     25135807 :   BITMAP_FREE (cfgcleanup_altered_bbs);
    1248              : 
    1249     25135807 :   gcc_assert (dom_info_available_p (CDI_DOMINATORS));
    1250              : 
    1251              :   /* Do not renumber blocks if the SCEV cache is active, it is indexed by
    1252              :      basic-block numbers.  */
    1253     25135807 :   if (! scev_initialized_p ())
    1254     24829276 :     compact_blocks ();
    1255              : 
    1256     25135807 :   checking_verify_flow_info ();
    1257              : 
    1258     25135807 :   timevar_pop (TV_TREE_CLEANUP_CFG);
    1259              : 
    1260     25135807 :   if (changed && current_loops)
    1261              :     {
    1262              :       /* Removing edges and/or blocks may make recorded bounds refer
    1263              :          to stale GIMPLE stmts now, so clear them.  */
    1264      6042479 :       free_numbers_of_iterations_estimates (cfun);
    1265      6042479 :       loops_state_set (LOOPS_NEED_FIXUP);
    1266              :     }
    1267              : 
    1268     25135807 :   return changed;
    1269              : }
    1270              : 
    1271              : /* Repairs loop structures.  */
    1272              : 
    1273              : static void
    1274      7712778 : repair_loop_structures (void)
    1275              : {
    1276      7712778 :   bitmap changed_bbs;
    1277      7712778 :   unsigned n_new_or_deleted_loops;
    1278              : 
    1279      7712778 :   calculate_dominance_info (CDI_DOMINATORS);
    1280              : 
    1281      7712778 :   timevar_push (TV_REPAIR_LOOPS);
    1282      7712778 :   changed_bbs = BITMAP_ALLOC (NULL);
    1283      7712778 :   n_new_or_deleted_loops = fix_loop_structure (changed_bbs);
    1284              : 
    1285              :   /* This usually does nothing.  But sometimes parts of cfg that originally
    1286              :      were inside a loop get out of it due to edge removal (since they
    1287              :      become unreachable by back edges from latch).  Also a former
    1288              :      irreducible loop can become reducible - in this case force a full
    1289              :      rewrite into loop-closed SSA form.  */
    1290      7712778 :   if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
    1291      7712778 :       && (!bitmap_empty_p (changed_bbs) || n_new_or_deleted_loops))
    1292        25286 :     rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
    1293              : 
    1294      7712778 :   BITMAP_FREE (changed_bbs);
    1295              : 
    1296      7712778 :   checking_verify_loop_structure ();
    1297      7712778 :   scev_reset ();
    1298              : 
    1299      7712778 :   timevar_pop (TV_REPAIR_LOOPS);
    1300      7712778 : }
    1301              : 
    1302              : /* Cleanup cfg and repair loop structures.  */
    1303              : 
    1304              : bool
    1305     25135807 : cleanup_tree_cfg (unsigned ssa_update_flags)
    1306              : {
    1307     25135807 :   bool changed = cleanup_tree_cfg_noloop (ssa_update_flags);
    1308              : 
    1309     25135807 :   if (current_loops != NULL
    1310     25135807 :       && loops_state_satisfies_p (LOOPS_NEED_FIXUP))
    1311      7712778 :     repair_loop_structures ();
    1312              : 
    1313     25135807 :   return changed;
    1314              : }
    1315              : 
    1316              : /* This pass merges PHI nodes if one feeds into another.  For example,
    1317              :    suppose we have the following:
    1318              : 
    1319              :   goto <bb 9> (<L9>);
    1320              : 
    1321              : <L8>:;
    1322              :   tem_17 = foo ();
    1323              : 
    1324              :   # tem_6 = PHI <tem_17(8), tem_23(7)>;
    1325              : <L9>:;
    1326              : 
    1327              :   # tem_3 = PHI <tem_6(9), tem_2(5)>;
    1328              : <L10>:;
    1329              : 
    1330              :   Then we merge the first PHI node into the second one like so:
    1331              : 
    1332              :   goto <bb 9> (<L10>);
    1333              : 
    1334              : <L8>:;
    1335              :   tem_17 = foo ();
    1336              : 
    1337              :   # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
    1338              : <L10>:;
    1339              : */
    1340              : 
    1341              : namespace {
    1342              : 
    1343              : const pass_data pass_data_merge_phi =
    1344              : {
    1345              :   GIMPLE_PASS, /* type */
    1346              :   "mergephi", /* name */
    1347              :   OPTGROUP_NONE, /* optinfo_flags */
    1348              :   TV_TREE_MERGE_PHI, /* tv_id */
    1349              :   ( PROP_cfg | PROP_ssa ), /* properties_required */
    1350              :   0, /* properties_provided */
    1351              :   0, /* properties_destroyed */
    1352              :   0, /* todo_flags_start */
    1353              :   0, /* todo_flags_finish */
    1354              : };
    1355              : 
    1356              : class pass_merge_phi : public gimple_opt_pass
    1357              : {
    1358              : public:
    1359       857166 :   pass_merge_phi (gcc::context *ctxt)
    1360      1714332 :     : gimple_opt_pass (pass_data_merge_phi, ctxt)
    1361              :   {}
    1362              : 
    1363              :   /* opt_pass methods: */
    1364       571444 :   opt_pass * clone () final override { return new pass_merge_phi (m_ctxt); }
    1365              :   unsigned int execute (function *) final override;
    1366              : 
    1367              : }; // class pass_merge_phi
    1368              : 
    1369              : unsigned int
    1370      4495152 : pass_merge_phi::execute (function *fun)
    1371              : {
    1372      4495152 :   int forwarder_removed = 0;
    1373      4495152 :   calculate_dominance_info (CDI_DOMINATORS);
    1374              : 
    1375              :   /* Find all PHI nodes that we may be able to merge.  */
    1376      4495152 :   unsigned n = last_basic_block_for_fn (fun);
    1377     38137855 :   for (unsigned i = NUM_FIXED_BLOCKS; i < n; i++)
    1378              :     {
    1379     33642703 :       basic_block bb = BASIC_BLOCK_FOR_FN (fun, i);
    1380     33642703 :       if (!bb)
    1381        70162 :         continue;
    1382              : 
    1383              :       /* Look for a forwarder block with PHI nodes.  */
    1384     33572541 :       if (maybe_remove_forwarder_block (bb, true))
    1385       203513 :         forwarder_removed++;
    1386              :     }
    1387              : 
    1388              :   /* Removing forwarder blocks can cause formerly irreducible loops
    1389              :      to become reducible if we merged two entry blocks.  */
    1390      4495152 :   if (forwarder_removed != 0
    1391        87339 :       && current_loops)
    1392        87339 :     loops_state_set (LOOPS_NEED_FIXUP);
    1393              : 
    1394      4495152 :   statistics_counter_event (fun, "Forwarder blocks removed",
    1395              :                             forwarder_removed);
    1396      4495152 :   return 0;
    1397              : }
    1398              : 
    1399              : } // anon namespace
    1400              : 
    1401              : gimple_opt_pass *
    1402       285722 : make_pass_merge_phi (gcc::context *ctxt)
    1403              : {
    1404       285722 :   return new pass_merge_phi (ctxt);
    1405              : }
    1406              : 
    1407              : /* Pass: cleanup the CFG just before expanding trees to RTL.
    1408              :    This is just a round of label cleanups and case node grouping
    1409              :    because after the tree optimizers have run such cleanups may
    1410              :    be necessary.  */
    1411              : 
    1412              : static unsigned int
    1413      1472137 : execute_cleanup_cfg_post_optimizing (void)
    1414              : {
    1415      1472137 :   unsigned int todo = execute_fixup_cfg ();
    1416      1472137 :   if (cleanup_tree_cfg ())
    1417       398570 :     todo |= TODO_update_ssa;
    1418      1472137 :   maybe_remove_unreachable_handlers ();
    1419      1472137 :   cleanup_dead_labels ();
    1420      1472137 :   if (group_case_labels () && cleanup_tree_cfg ())
    1421            0 :     todo |= TODO_update_ssa;
    1422              : 
    1423              :   /* When optimizing undo the merging of forwarder blocks
    1424              :      that phis for better out of ssa expansion.  */
    1425      1472137 :   if (optimize)
    1426      1044019 :     make_forwarders_with_degenerate_phis (cfun, true);
    1427              : 
    1428              :   /* Make sure todo does not have cleanup cfg as we don't want
    1429              :      remove the forwarder blocks we just created. cleanup cfg
    1430              :      has already happened.  */
    1431      1472137 :   todo &= ~TODO_cleanup_cfg;
    1432              : 
    1433      1472137 :   basic_block bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
    1434      1472137 :   gimple_stmt_iterator gsi = gsi_start_nondebug_after_labels_bb (bb);
    1435              :   /* If the first (and only) bb and the only non debug
    1436              :      statement is __builtin_unreachable call, then replace it with a trap
    1437              :      so the function is at least one instruction in size.  */
    1438      1472137 :   if (!gsi_end_p (gsi)
    1439      1472137 :       && gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
    1440              :     {
    1441          348 :       if (targetm.have_trap ())
    1442              :         {
    1443          696 :           gimple_call_set_fndecl (gsi_stmt (gsi), builtin_decl_implicit (BUILT_IN_UNREACHABLE_TRAP));
    1444          348 :           update_stmt (gsi_stmt (gsi));
    1445              :         }
    1446              :       /* If the target does not have a trap, convert it into an infinite loop. */
    1447              :       else
    1448              :         {
    1449            0 :           gsi_remove (&gsi, true);
    1450            0 :           make_single_succ_edge (bb, bb, EDGE_FALLTHRU);
    1451            0 :           fix_loop_structure (NULL);
    1452              :         }
    1453              :     }
    1454              : 
    1455      1472137 :   if ((flag_compare_debug_opt || flag_compare_debug)
    1456         3912 :       && flag_dump_final_insns)
    1457              :     {
    1458         3912 :       FILE *final_output = fopen (flag_dump_final_insns, "a");
    1459              : 
    1460         3912 :       if (!final_output)
    1461              :         {
    1462            0 :           error ("could not open final insn dump file %qs: %m",
    1463              :                  flag_dump_final_insns);
    1464            0 :           flag_dump_final_insns = NULL;
    1465              :         }
    1466              :       else
    1467              :         {
    1468         3912 :           int save_unnumbered = flag_dump_unnumbered;
    1469         3912 :           int save_noaddr = flag_dump_noaddr;
    1470              : 
    1471         3912 :           flag_dump_noaddr = flag_dump_unnumbered = 1;
    1472         3912 :           fprintf (final_output, "\n");
    1473         3912 :           dump_enumerated_decls (final_output,
    1474              :                                  dump_flags | TDF_SLIM | TDF_NOUID);
    1475         3912 :           flag_dump_noaddr = save_noaddr;
    1476         3912 :           flag_dump_unnumbered = save_unnumbered;
    1477         3912 :           if (fclose (final_output))
    1478              :             {
    1479            0 :               error ("could not close final insn dump file %qs: %m",
    1480              :                      flag_dump_final_insns);
    1481            0 :               flag_dump_final_insns = NULL;
    1482              :             }
    1483              :         }
    1484              :     }
    1485      1472137 :   return todo;
    1486              : }
    1487              : 
    1488              : namespace {
    1489              : 
    1490              : const pass_data pass_data_cleanup_cfg_post_optimizing =
    1491              : {
    1492              :   GIMPLE_PASS, /* type */
    1493              :   "optimized", /* name */
    1494              :   OPTGROUP_NONE, /* optinfo_flags */
    1495              :   TV_TREE_CLEANUP_CFG, /* tv_id */
    1496              :   PROP_cfg, /* properties_required */
    1497              :   0, /* properties_provided */
    1498              :   0, /* properties_destroyed */
    1499              :   0, /* todo_flags_start */
    1500              :   TODO_remove_unused_locals, /* todo_flags_finish */
    1501              : };
    1502              : 
    1503              : class pass_cleanup_cfg_post_optimizing : public gimple_opt_pass
    1504              : {
    1505              : public:
    1506       285722 :   pass_cleanup_cfg_post_optimizing (gcc::context *ctxt)
    1507       571444 :     : gimple_opt_pass (pass_data_cleanup_cfg_post_optimizing, ctxt)
    1508              :   {}
    1509              : 
    1510              :   /* opt_pass methods: */
    1511      1472137 :   unsigned int execute (function *) final override
    1512              :     {
    1513      1472137 :       return execute_cleanup_cfg_post_optimizing ();
    1514              :     }
    1515              : 
    1516              : }; // class pass_cleanup_cfg_post_optimizing
    1517              : 
    1518              : } // anon namespace
    1519              : 
    1520              : gimple_opt_pass *
    1521       285722 : make_pass_cleanup_cfg_post_optimizing (gcc::context *ctxt)
    1522              : {
    1523       285722 :   return new pass_cleanup_cfg_post_optimizing (ctxt);
    1524              : }
    1525              : 
    1526              : 
    1527              : /* Delete all unreachable basic blocks and update callgraph.
    1528              :    Doing so is somewhat nontrivial because we need to update all clones and
    1529              :    remove inline function that become unreachable.  */
    1530              : 
    1531              : bool
    1532      1560611 : delete_unreachable_blocks_update_callgraph (cgraph_node *dst_node,
    1533              :                                             bool update_clones)
    1534              : {
    1535      1560611 :   bool changed = false;
    1536      1560611 :   basic_block b, next_bb;
    1537              : 
    1538      1560611 :   find_unreachable_blocks ();
    1539              : 
    1540              :   /* Delete all unreachable basic blocks.  */
    1541              : 
    1542      1560611 :   for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
    1543     30070541 :        != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
    1544              :     {
    1545     28509930 :       next_bb = b->next_bb;
    1546              : 
    1547     28509930 :       if (!(b->flags & BB_REACHABLE))
    1548              :         {
    1549        35539 :           gimple_stmt_iterator bsi;
    1550              : 
    1551       134302 :           for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
    1552              :             {
    1553        63224 :               struct cgraph_edge *e;
    1554        63224 :               struct cgraph_node *node;
    1555              : 
    1556        63224 :               dst_node->remove_stmt_references (gsi_stmt (bsi));
    1557              : 
    1558        63224 :               if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
    1559        63224 :                   &&(e = dst_node->get_edge (gsi_stmt (bsi))) != NULL)
    1560              :                 {
    1561          896 :                   if (!e->inline_failed)
    1562            0 :                     e->callee->remove_symbol_and_inline_clones (dst_node);
    1563              :                   else
    1564          896 :                     cgraph_edge::remove (e);
    1565              :                 }
    1566        63224 :               if (update_clones && dst_node->clones)
    1567            0 :                 for (node = dst_node->clones; node != dst_node;)
    1568              :                   {
    1569            0 :                     node->remove_stmt_references (gsi_stmt (bsi));
    1570            0 :                     if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
    1571            0 :                         && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
    1572              :                       {
    1573            0 :                         if (!e->inline_failed)
    1574            0 :                           e->callee->remove_symbol_and_inline_clones (dst_node);
    1575              :                         else
    1576            0 :                           cgraph_edge::remove (e);
    1577              :                       }
    1578              : 
    1579            0 :                     if (node->clones)
    1580              :                       node = node->clones;
    1581            0 :                     else if (node->next_sibling_clone)
    1582              :                       node = node->next_sibling_clone;
    1583              :                     else
    1584              :                       {
    1585            0 :                         while (node != dst_node && !node->next_sibling_clone)
    1586            0 :                           node = node->clone_of;
    1587            0 :                         if (node != dst_node)
    1588            0 :                           node = node->next_sibling_clone;
    1589              :                       }
    1590              :                   }
    1591              :             }
    1592        35539 :           delete_basic_block (b);
    1593        35539 :           changed = true;
    1594              :         }
    1595              :     }
    1596              : 
    1597      1560611 :   return changed;
    1598              : }
    1599              : 
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.