LCOV - code coverage report
Current view: top level - gcc - cfgcleanup.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 93.2 % 1431 1333
Test Date: 2026-02-28 14:20:25 Functions: 100.0 % 39 39
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Control flow optimization code for GNU compiler.
       2              :    Copyright (C) 1987-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify it under
       7              : the terms of the GNU General Public License as published by the Free
       8              : Software Foundation; either version 3, or (at your option) any later
       9              : version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      12              : WARRANTY; without even the implied warranty of MERCHANTABILITY or
      13              : FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      14              : for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : /* This file contains optimizer of the control flow.  The main entry point is
      21              :    cleanup_cfg.  Following optimizations are performed:
      22              : 
      23              :    - Unreachable blocks removal
      24              :    - Edge forwarding (edge to the forwarder block is forwarded to its
      25              :      successor.  Simplification of the branch instruction is performed by
      26              :      underlying infrastructure so branch can be converted to simplejump or
      27              :      eliminated).
      28              :    - Cross jumping (tail merging)
      29              :    - Conditional jump-around-simplejump simplification
      30              :    - Basic block merging.  */
      31              : 
      32              : #include "config.h"
      33              : #include "system.h"
      34              : #include "coretypes.h"
      35              : #include "backend.h"
      36              : #include "target.h"
      37              : #include "rtl.h"
      38              : #include "tree.h"
      39              : #include "cfghooks.h"
      40              : #include "df.h"
      41              : #include "memmodel.h"
      42              : #include "tm_p.h"
      43              : #include "insn-config.h"
      44              : #include "emit-rtl.h"
      45              : #include "cselib.h"
      46              : #include "tree-pass.h"
      47              : #include "cfgloop.h"
      48              : #include "cfgrtl.h"
      49              : #include "cfganal.h"
      50              : #include "cfgbuild.h"
      51              : #include "cfgcleanup.h"
      52              : #include "dce.h"
      53              : #include "dbgcnt.h"
      54              : #include "rtl-iter.h"
      55              : #include "regs.h"
      56              : #include "function-abi.h"
      57              : 
      58              : #define FORWARDER_BLOCK_P(BB) ((BB)->flags & BB_FORWARDER_BLOCK)
      59              : 
      60              : /* Set to true when we are running first pass of try_optimize_cfg loop.  */
      61              : static bool first_pass;
      62              : 
      63              : /* Set to true if crossjumps occurred in the latest run of try_optimize_cfg.  */
      64              : static bool crossjumps_occurred;
      65              : 
      66              : /* Set to true if we couldn't run an optimization due to stale liveness
      67              :    information; we should run df_analyze to enable more opportunities.  */
      68              : static bool block_was_dirty;
      69              : 
      70              : static bool try_crossjump_to_edge (int, edge, edge, enum replace_direction);
      71              : static bool try_crossjump_bb (int, basic_block);
      72              : static bool outgoing_edges_match (int, basic_block, basic_block);
      73              : static enum replace_direction old_insns_match_p (int, rtx_insn *, rtx_insn *);
      74              : 
      75              : static void merge_blocks_move_predecessor_nojumps (basic_block, basic_block);
      76              : static void merge_blocks_move_successor_nojumps (basic_block, basic_block);
      77              : static bool try_optimize_cfg (int);
      78              : static bool try_simplify_condjump (basic_block);
      79              : static bool try_forward_edges (int, basic_block);
      80              : static edge thread_jump (edge, basic_block);
      81              : static bool mark_effect (rtx, bitmap);
      82              : static void notice_new_block (basic_block);
      83              : static void update_forwarder_flag (basic_block);
      84              : static void merge_memattrs (rtx, rtx);
      85              : 
      86              : /* Set flags for newly created block.  */
      87              : 
      88              : static void
      89        21104 : notice_new_block (basic_block bb)
      90              : {
      91        21104 :   if (!bb)
      92              :     return;
      93              : 
      94        15164 :   if (forwarder_block_p (bb))
      95        15164 :     bb->flags |= BB_FORWARDER_BLOCK;
      96              : }
      97              : 
      98              : /* Recompute forwarder flag after block has been modified.  */
      99              : 
     100              : static void
     101    312584959 : update_forwarder_flag (basic_block bb)
     102              : {
     103    312584959 :   if (forwarder_block_p (bb))
     104     16409003 :     bb->flags |= BB_FORWARDER_BLOCK;
     105              :   else
     106    296175956 :     bb->flags &= ~BB_FORWARDER_BLOCK;
     107    312584959 : }
     108              : 
     109              : /* Simplify a conditional jump around an unconditional jump.
     110              :    Return true if something changed.  */
     111              : 
     112              : static bool
     113     34164653 : try_simplify_condjump (basic_block cbranch_block)
     114              : {
     115     34164653 :   basic_block jump_block, jump_dest_block, cbranch_dest_block;
     116     34164653 :   edge cbranch_jump_edge, cbranch_fallthru_edge;
     117     34164653 :   rtx_insn *cbranch_insn;
     118              : 
     119              :   /* Verify that there are exactly two successors.  */
     120     34164653 :   if (EDGE_COUNT (cbranch_block->succs) != 2)
     121              :     return false;
     122              : 
     123              :   /* Verify that we've got a normal conditional branch at the end
     124              :      of the block.  */
     125     17164160 :   cbranch_insn = BB_END (cbranch_block);
     126     17164160 :   if (!any_condjump_p (cbranch_insn))
     127              :     return false;
     128              : 
     129     15109291 :   cbranch_fallthru_edge = FALLTHRU_EDGE (cbranch_block);
     130     15109291 :   cbranch_jump_edge = BRANCH_EDGE (cbranch_block);
     131              : 
     132              :   /* The next block must not have multiple predecessors, must not
     133              :      be the last block in the function, and must contain just the
     134              :      unconditional jump.  */
     135     15109291 :   jump_block = cbranch_fallthru_edge->dest;
     136     49266832 :   if (!single_pred_p (jump_block)
     137     13429527 :       || jump_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
     138     28373870 :       || !FORWARDER_BLOCK_P (jump_block))
     139              :     return false;
     140      1702158 :   jump_dest_block = single_succ (jump_block);
     141              : 
     142              :   /* If we are partitioning hot/cold basic blocks, we don't want to
     143              :      mess up unconditional or indirect jumps that cross between hot
     144              :      and cold sections.
     145              : 
     146              :      Basic block partitioning may result in some jumps that appear to
     147              :      be optimizable (or blocks that appear to be mergeable), but which really
     148              :      must be left untouched (they are required to make it safely across
     149              :      partition boundaries).  See the comments at the top of
     150              :      bb-reorder.cc:partition_hot_cold_basic_blocks for complete details.  */
     151              : 
     152      1702158 :   if (BB_PARTITION (jump_block) != BB_PARTITION (jump_dest_block)
     153      1662214 :       || (cbranch_jump_edge->flags & EDGE_CROSSING))
     154              :     return false;
     155              : 
     156              :   /* The conditional branch must target the block after the
     157              :      unconditional branch.  */
     158      1429371 :   cbranch_dest_block = cbranch_jump_edge->dest;
     159              : 
     160      1429371 :   if (cbranch_dest_block == EXIT_BLOCK_PTR_FOR_FN (cfun)
     161      1429371 :       || jump_dest_block == EXIT_BLOCK_PTR_FOR_FN (cfun)
     162      2857910 :       || !can_fallthru (jump_block, cbranch_dest_block))
     163      1422259 :     return false;
     164              : 
     165              :   /* Invert the conditional branch.  */
     166         7112 :   if (!invert_jump (as_a <rtx_jump_insn *> (cbranch_insn),
     167         7112 :                     block_label (jump_dest_block), 0))
     168              :     return false;
     169              : 
     170         7112 :   if (dump_file)
     171            1 :     fprintf (dump_file, "Simplifying condjump %i around jump %i\n",
     172            1 :              INSN_UID (cbranch_insn), INSN_UID (BB_END (jump_block)));
     173              : 
     174              :   /* Success.  Update the CFG to match.  Note that after this point
     175              :      the edge variable names appear backwards; the redirection is done
     176              :      this way to preserve edge profile data.  */
     177         7112 :   cbranch_jump_edge = redirect_edge_succ_nodup (cbranch_jump_edge,
     178              :                                                 cbranch_dest_block);
     179         7112 :   cbranch_fallthru_edge = redirect_edge_succ_nodup (cbranch_fallthru_edge,
     180              :                                                     jump_dest_block);
     181         7112 :   cbranch_jump_edge->flags |= EDGE_FALLTHRU;
     182         7112 :   cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
     183         7112 :   update_br_prob_note (cbranch_block);
     184              : 
     185              :   /* Delete the block with the unconditional jump, and clean up the mess.  */
     186         7112 :   delete_basic_block (jump_block);
     187         7112 :   tidy_fallthru_edge (cbranch_jump_edge);
     188         7112 :   update_forwarder_flag (cbranch_block);
     189              : 
     190         7112 :   return true;
     191              : }
     192              : 
     193              : /* Attempt to prove that operation is NOOP using CSElib or mark the effect
     194              :    on register.  Used by jump threading.  */
     195              : 
     196              : static bool
     197     33309931 : mark_effect (rtx exp, regset nonequal)
     198              : {
     199     33309931 :   rtx dest;
     200     33309931 :   switch (GET_CODE (exp))
     201              :     {
     202              :       /* In case we do clobber the register, mark it as equal, as we know the
     203              :          value is dead so it don't have to match.  */
     204      2470036 :     case CLOBBER:
     205      2470036 :       dest = XEXP (exp, 0);
     206      2470036 :       if (REG_P (dest))
     207      2437748 :         bitmap_clear_range (nonequal, REGNO (dest), REG_NREGS (dest));
     208              :       return false;
     209              : 
     210     11143584 :     case SET:
     211     11143584 :       if (cselib_redundant_set_p (exp))
     212              :         return false;
     213     10959885 :       dest = SET_DEST (exp);
     214     10959885 :       if (dest == pc_rtx)
     215              :         return false;
     216     10952733 :       if (!REG_P (dest))
     217              :         return true;
     218     10372823 :       bitmap_set_range (nonequal, REGNO (dest), REG_NREGS (dest));
     219     10372823 :       return false;
     220              : 
     221              :     default:
     222              :       return false;
     223              :     }
     224              : }
     225              : 
     226              : /* Return true if X contains a register in NONEQUAL.  */
     227              : static bool
     228      3590971 : mentions_nonequal_regs (const_rtx x, regset nonequal)
     229              : {
     230      3590971 :   subrtx_iterator::array_type array;
     231      7197210 :   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
     232              :     {
     233      7190058 :       const_rtx x = *iter;
     234      7190058 :       if (REG_P (x))
     235              :         {
     236      3591212 :           unsigned int end_regno = END_REGNO (x);
     237      3598605 :           for (unsigned int regno = REGNO (x); regno < end_regno; ++regno)
     238      3591212 :             if (REGNO_REG_SET_P (nonequal, regno))
     239      3583819 :               return true;
     240              :         }
     241              :     }
     242         7152 :   return false;
     243      3590971 : }
     244              : 
     245              : /* Attempt to prove that the basic block B will have no side effects and
     246              :    always continues in the same edge if reached via E.  Return the edge
     247              :    if exist, NULL otherwise.  */
     248              : 
     249              : static edge
     250     28229868 : thread_jump (edge e, basic_block b)
     251              : {
     252     28229868 :   rtx set1, set2, cond1, cond2;
     253     28229868 :   rtx_insn *insn;
     254     28229868 :   enum rtx_code code1, code2, reversed_code2;
     255     28229868 :   bool reverse1 = false;
     256     28229868 :   unsigned i;
     257     28229868 :   regset nonequal;
     258     28229868 :   bool failed = false;
     259              : 
     260              :   /* Jump threading may cause fixup_partitions to introduce new crossing edges,
     261              :      which is not allowed after reload.  */
     262     28229868 :   gcc_checking_assert (!reload_completed || !crtl->has_bb_partition);
     263              : 
     264     28229868 :   if (b->flags & BB_NONTHREADABLE_BLOCK)
     265              :     return NULL;
     266              : 
     267              :   /* At the moment, we do handle only conditional jumps, but later we may
     268              :      want to extend this code to tablejumps and others.  */
     269     45256140 :   if (EDGE_COUNT (e->src->succs) != 2)
     270              :     return NULL;
     271     17032819 :   if (EDGE_COUNT (b->succs) != 2)
     272              :     {
     273      7520478 :       b->flags |= BB_NONTHREADABLE_BLOCK;
     274      7520478 :       return NULL;
     275              :     }
     276              : 
     277              :   /* Second branch must end with onlyjump, as we will eliminate the jump.  */
     278      9512341 :   if (!any_condjump_p (BB_END (e->src)))
     279              :     return NULL;
     280              : 
     281      8695579 :   if (!any_condjump_p (BB_END (b)) || !onlyjump_p (BB_END (b)))
     282              :     {
     283       622339 :       b->flags |= BB_NONTHREADABLE_BLOCK;
     284       622339 :       return NULL;
     285              :     }
     286              : 
     287      8073240 :   set1 = pc_set (BB_END (e->src));
     288      8073240 :   set2 = pc_set (BB_END (b));
     289      8073240 :   if (((e->flags & EDGE_FALLTHRU) != 0)
     290      8073240 :       != (XEXP (SET_SRC (set1), 1) == pc_rtx))
     291      3968829 :     reverse1 = true;
     292              : 
     293      8073240 :   cond1 = XEXP (SET_SRC (set1), 0);
     294      8073240 :   cond2 = XEXP (SET_SRC (set2), 0);
     295      8073240 :   if (reverse1)
     296      3968829 :     code1 = reversed_comparison_code (cond1, BB_END (e->src));
     297              :   else
     298      4104411 :     code1 = GET_CODE (cond1);
     299              : 
     300      8073240 :   code2 = GET_CODE (cond2);
     301      8073240 :   reversed_code2 = reversed_comparison_code (cond2, BB_END (b));
     302              : 
     303      8073240 :   if (!comparison_dominates_p (code1, code2)
     304      8073240 :       && !comparison_dominates_p (code1, reversed_code2))
     305              :     return NULL;
     306              : 
     307              :   /* Ensure that the comparison operators are equivalent.
     308              :      ??? This is far too pessimistic.  We should allow swapped operands,
     309              :      different CCmodes, or for example comparisons for interval, that
     310              :      dominate even when operands are not equivalent.  */
     311      6506310 :   if (!rtx_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
     312      6506310 :       || !rtx_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
     313       919428 :     return NULL;
     314              : 
     315              :   /* Punt if BB_END (e->src) is doloop-like conditional jump that modifies
     316              :      the registers used in cond1.  */
     317      5586882 :   if (modified_in_p (cond1, BB_END (e->src)))
     318              :     return NULL;
     319              : 
     320              :   /* Short circuit cases where block B contains some side effects, as we can't
     321              :      safely bypass it.  */
     322     63920781 :   for (insn = NEXT_INSN (BB_HEAD (b)); insn != NEXT_INSN (BB_END (b));
     323     52747017 :        insn = NEXT_INSN (insn))
     324     54163018 :     if (INSN_P (insn) && side_effects_p (PATTERN (insn)))
     325              :       {
     326      1416001 :         b->flags |= BB_NONTHREADABLE_BLOCK;
     327      1416001 :         return NULL;
     328              :       }
     329              : 
     330      4170881 :   cselib_init (0);
     331              : 
     332              :   /* First process all values computed in the source basic block.  */
     333     58855809 :   for (insn = NEXT_INSN (BB_HEAD (e->src));
     334     58855809 :        insn != NEXT_INSN (BB_END (e->src));
     335     54684928 :        insn = NEXT_INSN (insn))
     336     54684928 :     if (INSN_P (insn))
     337     50935602 :       cselib_process_insn (insn);
     338              : 
     339      4170881 :   nonequal = BITMAP_ALLOC (NULL);
     340      4170881 :   CLEAR_REG_SET (nonequal);
     341              : 
     342              :   /* Now assume that we've continued by the edge E to B and continue
     343              :      processing as if it were same basic block.
     344              :      Our goal is to prove that whole block is an NOOP.  */
     345              : 
     346     38040997 :   for (insn = NEXT_INSN (BB_HEAD (b));
     347     38040997 :        insn != NEXT_INSN (BB_END (b)) && !failed;
     348     33870116 :        insn = NEXT_INSN (insn))
     349              :     {
     350              :       /* cond2 must not mention any register that is not equal to the
     351              :          former block.  Check this before processing that instruction,
     352              :          as BB_END (b) could contain also clobbers.  */
     353     37453935 :       if (insn == BB_END (b)
     354     37453935 :           && mentions_nonequal_regs (cond2, nonequal))
     355      3583819 :         goto failed_exit;
     356              : 
     357     33870116 :       if (INSN_P (insn))
     358              :         {
     359     30748447 :           rtx pat = PATTERN (insn);
     360              : 
     361     30748447 :           if (GET_CODE (pat) == PARALLEL)
     362              :             {
     363      7522402 :               for (i = 0; i < (unsigned)XVECLEN (pat, 0); i++)
     364      5041943 :                 failed |= mark_effect (XVECEXP (pat, 0, i), nonequal);
     365              :             }
     366              :           else
     367     28267988 :             failed |= mark_effect (pat, nonequal);
     368              :         }
     369              : 
     370     33870116 :       cselib_process_insn (insn);
     371              :     }
     372              : 
     373              :   /* Later we should clear nonequal of dead registers.  So far we don't
     374              :      have life information in cfg_cleanup.  */
     375       587062 :   if (failed)
     376              :     {
     377       579910 :       b->flags |= BB_NONTHREADABLE_BLOCK;
     378       579910 :       goto failed_exit;
     379              :     }
     380              : 
     381         7152 :   if (!REG_SET_EMPTY_P (nonequal))
     382          605 :     goto failed_exit;
     383              : 
     384         6547 :   BITMAP_FREE (nonequal);
     385         6547 :   cselib_finish ();
     386         6547 :   if ((comparison_dominates_p (code1, code2) != 0)
     387         6547 :       != (XEXP (SET_SRC (set2), 1) == pc_rtx))
     388         4458 :     return BRANCH_EDGE (b);
     389              :   else
     390         2089 :     return FALLTHRU_EDGE (b);
     391              : 
     392      4164334 : failed_exit:
     393      4164334 :   BITMAP_FREE (nonequal);
     394      4164334 :   cselib_finish ();
     395      4164334 :   return NULL;
     396              : }
     397              : 
     398              : /* Attempt to forward edges leaving basic block B.
     399              :    Return true if successful.  */
     400              : 
     401              : static bool
     402    400241433 : try_forward_edges (int mode, basic_block b)
     403              : {
     404    400241433 :   bool changed = false;
     405    400241433 :   edge_iterator ei;
     406    400241433 :   edge e, *threaded_edges = NULL;
     407              : 
     408   1003695210 :   for (ei = ei_start (b->succs); (e = ei_safe_edge (ei)); )
     409              :     {
     410    603453777 :       basic_block target, first;
     411    603453777 :       location_t goto_locus;
     412    603453777 :       int counter;
     413    603453777 :       bool threaded = false;
     414    603453777 :       int nthreaded_edges = 0;
     415    603453777 :       bool may_thread = first_pass || (b->flags & BB_MODIFIED) != 0;
     416    603453777 :       bool new_target_threaded = false;
     417              : 
     418              :       /* Skip complex edges because we don't know how to update them.
     419              : 
     420              :          Still handle fallthru edges, as we can succeed to forward fallthru
     421              :          edge to the same place as the branch edge of conditional branch
     422              :          and turn conditional branch to an unconditional branch.  */
     423    603453777 :       if (e->flags & EDGE_COMPLEX)
     424              :         {
     425     33080450 :           ei_next (&ei);
     426     33080450 :           continue;
     427              :         }
     428              : 
     429    570373327 :       target = first = e->dest;
     430    570373327 :       counter = NUM_FIXED_BLOCKS;
     431    570373327 :       goto_locus = e->goto_locus;
     432              : 
     433    600988374 :       while (counter < n_basic_blocks_for_fn (cfun))
     434              :         {
     435    600568975 :           basic_block new_target = NULL;
     436    600568975 :           may_thread |= (target->flags & BB_MODIFIED) != 0;
     437              : 
     438    600568975 :           if (FORWARDER_BLOCK_P (target)
     439    640703562 :               && single_succ (target) != EXIT_BLOCK_PTR_FOR_FN (cfun))
     440              :             {
     441              :               /* Bypass trivial infinite loops.  */
     442     30791825 :               new_target = single_succ (target);
     443     30791825 :               if (target == new_target)
     444              :                 counter = n_basic_blocks_for_fn (cfun);
     445     30514996 :               else if (!optimize)
     446              :                 {
     447              :                   /* When not optimizing, ensure that edges or forwarder
     448              :                      blocks with different locus are not optimized out.  */
     449      1174022 :                   location_t new_locus = single_succ_edge (target)->goto_locus;
     450      1174022 :                   location_t locus = goto_locus;
     451              : 
     452      1174022 :                   if (LOCATION_LOCUS (new_locus) != UNKNOWN_LOCATION
     453       462250 :                       && LOCATION_LOCUS (locus) != UNKNOWN_LOCATION
     454      1476519 :                       && new_locus != locus)
     455              :                     new_target = NULL;
     456              :                   else
     457              :                     {
     458       999350 :                       if (LOCATION_LOCUS (new_locus) != UNKNOWN_LOCATION)
     459       287578 :                         locus = new_locus;
     460              : 
     461       999350 :                       rtx_insn *last = BB_END (target);
     462       999350 :                       if (DEBUG_INSN_P (last))
     463            6 :                         last = prev_nondebug_insn (last);
     464       999350 :                       if (last && INSN_P (last))
     465       495616 :                         new_locus = INSN_LOCATION (last);
     466              :                       else
     467              :                         new_locus = UNKNOWN_LOCATION;
     468              : 
     469       495616 :                       if (LOCATION_LOCUS (new_locus) != UNKNOWN_LOCATION
     470       475149 :                           && LOCATION_LOCUS (locus) != UNKNOWN_LOCATION
     471      1360787 :                           && new_locus != locus)
     472              :                         new_target = NULL;
     473              :                       else
     474              :                         {
     475       991235 :                           if (LOCATION_LOCUS (new_locus) != UNKNOWN_LOCATION)
     476       467034 :                             locus = new_locus;
     477              : 
     478              :                           goto_locus = locus;
     479              :                         }
     480              :                     }
     481              :                 }
     482              :             }
     483              : 
     484              :           /* Allow to thread only over one edge at time to simplify updating
     485              :              of probabilities.  */
     486    569777150 :           else if ((mode & CLEANUP_THREADING) && may_thread)
     487              :             {
     488     28229868 :               edge t = thread_jump (e, target);
     489     28229868 :               if (t)
     490              :                 {
     491         6547 :                   if (!threaded_edges)
     492         6480 :                     threaded_edges = XNEWVEC (edge,
     493              :                                               n_basic_blocks_for_fn (cfun));
     494              :                   else
     495              :                     {
     496              :                       int i;
     497              : 
     498              :                       /* Detect an infinite loop across blocks not
     499              :                          including the start block.  */
     500          168 :                       for (i = 0; i < nthreaded_edges; ++i)
     501          123 :                         if (threaded_edges[i] == t)
     502              :                           break;
     503           67 :                       if (i < nthreaded_edges)
     504              :                         {
     505           22 :                           counter = n_basic_blocks_for_fn (cfun);
     506           22 :                           break;
     507              :                         }
     508              :                     }
     509              : 
     510              :                   /* Detect an infinite loop across the start block.  */
     511         6525 :                   if (t->dest == b)
     512              :                     break;
     513              : 
     514         6009 :                   gcc_assert (nthreaded_edges
     515              :                               < (n_basic_blocks_for_fn (cfun)
     516              :                                  - NUM_FIXED_BLOCKS));
     517         6009 :                   threaded_edges[nthreaded_edges++] = t;
     518              : 
     519         6009 :                   new_target = t->dest;
     520         6009 :                   new_target_threaded = true;
     521              :                 }
     522              :             }
     523              : 
     524     30615047 :           if (!new_target)
     525              :             break;
     526              : 
     527     30615047 :           counter++;
     528              :           /* Do not turn non-crossing jump to crossing.  Depending on target
     529              :              it may require different instruction pattern.  */
     530     30615047 :           if ((e->flags & EDGE_CROSSING)
     531     30582267 :               || BB_PARTITION (first) == BB_PARTITION (new_target))
     532              :             {
     533     15034407 :               target = new_target;
     534     15034407 :               threaded |= new_target_threaded;
     535              :             }
     536              :         }
     537              : 
     538    570373327 :       if (counter >= n_basic_blocks_for_fn (cfun))
     539              :         {
     540       419421 :           if (dump_file)
     541            4 :             fprintf (dump_file, "Infinite loop in BB %i.\n",
     542              :                      target->index);
     543              :         }
     544    569953906 :       else if (target == first)
     545              :         ; /* We didn't do anything.  */
     546              :       else
     547              :         {
     548              :           /* Save the values now, as the edge may get removed.  */
     549     13948840 :           profile_count edge_count = e->count ();
     550     13948840 :           int n = 0;
     551              : 
     552     13948840 :           e->goto_locus = goto_locus;
     553              : 
     554              :           /* Don't force if target is exit block.  */
     555     13948840 :           if (threaded && target != EXIT_BLOCK_PTR_FOR_FN (cfun))
     556              :             {
     557         5940 :               notice_new_block (redirect_edge_and_branch_force (e, target));
     558         5940 :               if (dump_file)
     559            0 :                 fprintf (dump_file, "Conditionals threaded.\n");
     560              :             }
     561     13942900 :           else if (!redirect_edge_and_branch (e, target))
     562              :             {
     563      7424954 :               if (dump_file)
     564          199 :                 fprintf (dump_file,
     565              :                          "Forwarding edge %i->%i to %i failed.\n",
     566          199 :                          b->index, e->dest->index, target->index);
     567      7424954 :               ei_next (&ei);
     568      7424954 :               continue;
     569              :             }
     570              : 
     571              :           /* We successfully forwarded the edge.  Now update profile
     572              :              data: for each edge we traversed in the chain, remove
     573              :              the original edge's execution count.  */
     574      6638170 :           do
     575              :             {
     576      6638170 :               edge t;
     577              : 
     578      6638170 :               if (!single_succ_p (first))
     579              :                 {
     580         5983 :                   gcc_assert (n < nthreaded_edges);
     581         5983 :                   t = threaded_edges [n++];
     582         5983 :                   gcc_assert (t->src == first);
     583         5983 :                   update_bb_profile_for_threading (first, edge_count, t);
     584         5983 :                   update_br_prob_note (first);
     585              :                 }
     586              :               else
     587              :                 {
     588      6632187 :                   first->count -= edge_count;
     589              :                   /* It is possible that as the result of
     590              :                      threading we've removed edge as it is
     591              :                      threaded to the fallthru edge.  Avoid
     592              :                      getting out of sync.  */
     593      6632187 :                   if (n < nthreaded_edges
     594            0 :                       && first == threaded_edges [n]->src)
     595            0 :                     n++;
     596      6632187 :                   t = single_succ_edge (first);
     597              :                 }
     598              : 
     599      6638170 :               first = t->dest;
     600              :             }
     601      6638170 :           while (first != target);
     602              : 
     603      6523886 :           changed = true;
     604      6523886 :           continue;
     605      6523886 :         }
     606    556424487 :       ei_next (&ei);
     607              :     }
     608              : 
     609    400241433 :   free (threaded_edges);
     610    400241433 :   return changed;
     611              : }
     612              : 
     613              : 
     614              : /* Blocks A and B are to be merged into a single block.  A has no incoming
     615              :    fallthru edge, so it can be moved before B without adding or modifying
     616              :    any jumps (aside from the jump from A to B).  */
     617              : 
     618              : static void
     619        21351 : merge_blocks_move_predecessor_nojumps (basic_block a, basic_block b)
     620              : {
     621        21351 :   rtx_insn *barrier;
     622              : 
     623              :   /* If we are partitioning hot/cold basic blocks, we don't want to
     624              :      mess up unconditional or indirect jumps that cross between hot
     625              :      and cold sections.
     626              : 
     627              :      Basic block partitioning may result in some jumps that appear to
     628              :      be optimizable (or blocks that appear to be mergeable), but which really
     629              :      must be left untouched (they are required to make it safely across
     630              :      partition boundaries).  See the comments at the top of
     631              :      bb-reorder.cc:partition_hot_cold_basic_blocks for complete details.  */
     632              : 
     633        21351 :   if (BB_PARTITION (a) != BB_PARTITION (b))
     634              :     return;
     635              : 
     636        21351 :   barrier = next_nonnote_insn (BB_END (a));
     637        21351 :   gcc_assert (BARRIER_P (barrier));
     638        21351 :   delete_insn (barrier);
     639              : 
     640              :   /* Scramble the insn chain.  */
     641        21351 :   if (BB_END (a) != PREV_INSN (BB_HEAD (b)))
     642        21324 :     reorder_insns_nobb (BB_HEAD (a), BB_END (a), PREV_INSN (BB_HEAD (b)));
     643        21351 :   df_set_bb_dirty (a);
     644              : 
     645        21351 :   if (dump_file)
     646            0 :     fprintf (dump_file, "Moved block %d before %d and merged.\n",
     647              :              a->index, b->index);
     648              : 
     649              :   /* Swap the records for the two blocks around.  */
     650              : 
     651        21351 :   unlink_block (a);
     652        21351 :   link_block (a, b->prev_bb);
     653              : 
     654              :   /* Now blocks A and B are contiguous.  Merge them.  */
     655        21351 :   merge_blocks (a, b);
     656              : }
     657              : 
     658              : /* Blocks A and B are to be merged into a single block.  B has no outgoing
     659              :    fallthru edge, so it can be moved after A without adding or modifying
     660              :    any jumps (aside from the jump from A to B).  */
     661              : 
     662              : static void
     663        30840 : merge_blocks_move_successor_nojumps (basic_block a, basic_block b)
     664              : {
     665        30840 :   rtx_insn *barrier, *real_b_end;
     666        30840 :   rtx_insn *label;
     667        30840 :   rtx_jump_table_data *table;
     668              : 
     669              :   /* If we are partitioning hot/cold basic blocks, we don't want to
     670              :      mess up unconditional or indirect jumps that cross between hot
     671              :      and cold sections.
     672              : 
     673              :      Basic block partitioning may result in some jumps that appear to
     674              :      be optimizable (or blocks that appear to be mergeable), but which really
     675              :      must be left untouched (they are required to make it safely across
     676              :      partition boundaries).  See the comments at the top of
     677              :      bb-reorder.cc:partition_hot_cold_basic_blocks for complete details.  */
     678              : 
     679        30840 :   if (BB_PARTITION (a) != BB_PARTITION (b))
     680            0 :     return;
     681              : 
     682        30840 :   real_b_end = BB_END (b);
     683              : 
     684              :   /* If there is a jump table following block B temporarily add the jump table
     685              :      to block B so that it will also be moved to the correct location.  */
     686        30840 :   if (tablejump_p (BB_END (b), &label, &table)
     687        30840 :       && prev_active_insn (label) == BB_END (b))
     688              :     {
     689            0 :       BB_END (b) = table;
     690              :     }
     691              : 
     692              :   /* There had better have been a barrier there.  Delete it.  */
     693        30840 :   barrier = NEXT_INSN (BB_END (b));
     694        30840 :   if (barrier && BARRIER_P (barrier))
     695        30840 :     delete_insn (barrier);
     696              : 
     697              : 
     698              :   /* Scramble the insn chain.  */
     699        30840 :   reorder_insns_nobb (BB_HEAD (b), BB_END (b), BB_END (a));
     700              : 
     701              :   /* Restore the real end of b.  */
     702        30840 :   BB_END (b) = real_b_end;
     703              : 
     704        30840 :   if (dump_file)
     705           22 :     fprintf (dump_file, "Moved block %d after %d and merged.\n",
     706              :              b->index, a->index);
     707              : 
     708              :   /* Now blocks A and B are contiguous.  Merge them.  */
     709        30840 :   merge_blocks (a, b);
     710              : }
     711              : 
     712              : /* Attempt to merge basic blocks that are potentially non-adjacent.
     713              :    Return NULL iff the attempt failed, otherwise return basic block
     714              :    where cleanup_cfg should continue.  Because the merging commonly
     715              :    moves basic block away or introduces another optimization
     716              :    possibility, return basic block just before B so cleanup_cfg don't
     717              :    need to iterate.
     718              : 
     719              :    It may be good idea to return basic block before C in the case
     720              :    C has been moved after B and originally appeared earlier in the
     721              :    insn sequence, but we have no information available about the
     722              :    relative ordering of these two.  Hopefully it is not too common.  */
     723              : 
     724              : static basic_block
     725      6780539 : merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
     726              : {
     727      6780539 :   basic_block next;
     728              : 
     729              :   /* If we are partitioning hot/cold basic blocks, we don't want to
     730              :      mess up unconditional or indirect jumps that cross between hot
     731              :      and cold sections.
     732              : 
     733              :      Basic block partitioning may result in some jumps that appear to
     734              :      be optimizable (or blocks that appear to be mergeable), but which really
     735              :      must be left untouched (they are required to make it safely across
     736              :      partition boundaries).  See the comments at the top of
     737              :      bb-reorder.cc:partition_hot_cold_basic_blocks for complete details.  */
     738              : 
     739      6780539 :   if (BB_PARTITION (b) != BB_PARTITION (c))
     740              :     return NULL;
     741              : 
     742              :   /* If B has a fallthru edge to C, no need to move anything.  */
     743      6439276 :   if (e->flags & EDGE_FALLTHRU)
     744              :     {
     745      3718096 :       int b_index = b->index, c_index = c->index;
     746              : 
     747              :       /* Protect the loop latches.  */
     748      3718096 :       if (current_loops && c->loop_father->latch == c)
     749              :         return NULL;
     750              : 
     751      3673228 :       merge_blocks (b, c);
     752      3673228 :       update_forwarder_flag (b);
     753              : 
     754      3673228 :       if (dump_file)
     755          829 :         fprintf (dump_file, "Merged %d and %d without moving.\n",
     756              :                  b_index, c_index);
     757              : 
     758      4768906 :       return b->prev_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? b : b->prev_bb;
     759              :     }
     760              : 
     761              :   /* Otherwise we will need to move code around.  Do that only if expensive
     762              :      transformations are allowed.  */
     763      2721180 :   else if (mode & CLEANUP_EXPENSIVE)
     764              :     {
     765       911020 :       edge tmp_edge, b_fallthru_edge;
     766       911020 :       bool c_has_outgoing_fallthru;
     767       911020 :       bool b_has_incoming_fallthru;
     768              : 
     769              :       /* Avoid overactive code motion, as the forwarder blocks should be
     770              :          eliminated by edge redirection instead.  One exception might have
     771              :          been if B is a forwarder block and C has no fallthru edge, but
     772              :          that should be cleaned up by bb-reorder instead.  */
     773       911020 :       if (FORWARDER_BLOCK_P (b) || FORWARDER_BLOCK_P (c))
     774              :         return NULL;
     775              : 
     776              :       /* We must make sure to not munge nesting of lexical blocks,
     777              :          and loop notes.  This is done by squeezing out all the notes
     778              :          and leaving them there to lie.  Not ideal, but functional.  */
     779              : 
     780        54345 :       tmp_edge = find_fallthru_edge (c->succs);
     781        54345 :       c_has_outgoing_fallthru = (tmp_edge != NULL);
     782              : 
     783        54345 :       tmp_edge = find_fallthru_edge (b->preds);
     784        54345 :       b_has_incoming_fallthru = (tmp_edge != NULL);
     785        54345 :       b_fallthru_edge = tmp_edge;
     786        54345 :       next = b->prev_bb;
     787        54345 :       if (next == c)
     788           57 :         next = next->prev_bb;
     789              : 
     790              :       /* Otherwise, we're going to try to move C after B.  If C does
     791              :          not have an outgoing fallthru, then it can be moved
     792              :          immediately after B without introducing or modifying jumps.  */
     793        54345 :       if (! c_has_outgoing_fallthru)
     794              :         {
     795        30840 :           merge_blocks_move_successor_nojumps (b, c);
     796        30840 :           return next == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? next->next_bb : next;
     797              :         }
     798              : 
     799              :       /* If B does not have an incoming fallthru, then it can be moved
     800              :          immediately before C without introducing or modifying jumps.
     801              :          C cannot be the first block, so we do not have to worry about
     802              :          accessing a non-existent block.  */
     803              : 
     804        23505 :       if (b_has_incoming_fallthru)
     805              :         {
     806        21888 :           basic_block bb;
     807              : 
     808        21888 :           if (b_fallthru_edge->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
     809              :             return NULL;
     810        19734 :           bb = force_nonfallthru (b_fallthru_edge);
     811        19734 :           if (bb)
     812        15164 :             notice_new_block (bb);
     813              :         }
     814              : 
     815        21351 :       merge_blocks_move_predecessor_nojumps (b, c);
     816        21351 :       return next == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? next->next_bb : next;
     817              :     }
     818              : 
     819              :   return NULL;
     820              : }
     821              : 
     822              : 
     823              : /* Removes the memory attributes of MEM expression
     824              :    if they are not equal.  */
     825              : 
     826              : static void
     827    150695836 : merge_memattrs (rtx x, rtx y)
     828              : {
     829    150695836 :   int i;
     830    150695836 :   int j;
     831    150695836 :   enum rtx_code code;
     832    150695836 :   const char *fmt;
     833              : 
     834    150695836 :   if (x == y)
     835              :     return;
     836    111689819 :   if (x == 0 || y == 0)
     837              :     return;
     838              : 
     839    111649858 :   code = GET_CODE (x);
     840              : 
     841    111649858 :   if (code != GET_CODE (y))
     842              :     return;
     843              : 
     844    111649481 :   if (GET_MODE (x) != GET_MODE (y))
     845              :     return;
     846              : 
     847    111646596 :   if (code == MEM && !mem_attrs_eq_p (MEM_ATTRS (x), MEM_ATTRS (y)))
     848              :     {
     849        49918 :       if (! MEM_ATTRS (x))
     850          955 :         MEM_ATTRS (y) = 0;
     851        48963 :       else if (! MEM_ATTRS (y))
     852         1334 :         MEM_ATTRS (x) = 0;
     853              :       else
     854              :         {
     855        47629 :           if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
     856              :             {
     857         2796 :               set_mem_alias_set (x, 0);
     858         2796 :               set_mem_alias_set (y, 0);
     859              :             }
     860              : 
     861        47629 :           if (! mem_expr_equal_p (MEM_EXPR (x), MEM_EXPR (y)))
     862              :             {
     863        46971 :               set_mem_expr (x, 0);
     864        46971 :               set_mem_expr (y, 0);
     865        46971 :               clear_mem_offset (x);
     866        46971 :               clear_mem_offset (y);
     867              :             }
     868          658 :           else if (MEM_OFFSET_KNOWN_P (x) != MEM_OFFSET_KNOWN_P (y)
     869          658 :                    || (MEM_OFFSET_KNOWN_P (x)
     870            0 :                        && maybe_ne (MEM_OFFSET (x), MEM_OFFSET (y))))
     871              :             {
     872            0 :               clear_mem_offset (x);
     873            0 :               clear_mem_offset (y);
     874              :             }
     875              : 
     876        55629 :           if (!MEM_SIZE_KNOWN_P (x))
     877          210 :             clear_mem_size (y);
     878        55218 :           else if (!MEM_SIZE_KNOWN_P (y))
     879            0 :             clear_mem_size (x);
     880        47419 :           else if (known_le (MEM_SIZE (x), MEM_SIZE (y)))
     881        47375 :             set_mem_size (x, MEM_SIZE (y));
     882           44 :           else if (known_le (MEM_SIZE (y), MEM_SIZE (x)))
     883           44 :             set_mem_size (y, MEM_SIZE (x));
     884              :           else
     885              :             {
     886              :               /* The sizes aren't ordered, so we can't merge them.  */
     887              :               clear_mem_size (x);
     888              :               clear_mem_size (y);
     889              :             }
     890              : 
     891        63637 :           set_mem_align (x, MIN (MEM_ALIGN (x), MEM_ALIGN (y)));
     892        55646 :           set_mem_align (y, MEM_ALIGN (x));
     893              :         }
     894              :     }
     895    111646596 :   if (code == MEM)
     896              :     {
     897      6477033 :       if (MEM_READONLY_P (x) != MEM_READONLY_P (y))
     898              :         {
     899            5 :           MEM_READONLY_P (x) = 0;
     900            5 :           MEM_READONLY_P (y) = 0;
     901              :         }
     902      6477033 :       if (MEM_NOTRAP_P (x) != MEM_NOTRAP_P (y))
     903              :         {
     904          298 :           MEM_NOTRAP_P (x) = 0;
     905          298 :           MEM_NOTRAP_P (y) = 0;
     906              :         }
     907      6477033 :       if (MEM_VOLATILE_P (x) != MEM_VOLATILE_P (y))
     908              :         {
     909            5 :           MEM_VOLATILE_P (x) = 1;
     910            5 :           MEM_VOLATILE_P (y) = 1;
     911              :         }
     912              :     }
     913              : 
     914    111646596 :   fmt = GET_RTX_FORMAT (code);
     915    337791684 :   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
     916              :     {
     917    226145088 :       switch (fmt[i])
     918              :         {
     919       374300 :         case 'E':
     920              :           /* Two vectors must have the same length.  */
     921       374300 :           if (XVECLEN (x, i) != XVECLEN (y, i))
     922              :             return;
     923              : 
     924      1104773 :           for (j = 0; j < XVECLEN (x, i); j++)
     925       730473 :             merge_memattrs (XVECEXP (x, i, j), XVECEXP (y, i, j));
     926              : 
     927              :           break;
     928              : 
     929    140788510 :         case 'e':
     930    140788510 :           merge_memattrs (XEXP (x, i), XEXP (y, i));
     931              :         }
     932              :     }
     933              :   return;
     934              : }
     935              : 
     936              : 
     937              :  /* Checks if patterns P1 and P2 are equivalent, apart from the possibly
     938              :     different single sets S1 and S2.  */
     939              : 
     940              : static bool
     941            2 : equal_different_set_p (rtx p1, rtx s1, rtx p2, rtx s2)
     942              : {
     943            2 :   int i;
     944            2 :   rtx e1, e2;
     945              : 
     946            2 :   if (p1 == s1 && p2 == s2)
     947              :     return true;
     948              : 
     949            0 :   if (GET_CODE (p1) != PARALLEL || GET_CODE (p2) != PARALLEL)
     950              :     return false;
     951              : 
     952            0 :   if (XVECLEN (p1, 0) != XVECLEN (p2, 0))
     953              :     return false;
     954              : 
     955            0 :   for (i = 0; i < XVECLEN (p1, 0); i++)
     956              :     {
     957            0 :       e1 = XVECEXP (p1, 0, i);
     958            0 :       e2 = XVECEXP (p2, 0, i);
     959            0 :       if (e1 == s1 && e2 == s2)
     960            0 :         continue;
     961            0 :       if (reload_completed
     962            0 :           ? rtx_renumbered_equal_p (e1, e2) : rtx_equal_p (e1, e2))
     963            0 :         continue;
     964              : 
     965              :       return false;
     966              :     }
     967              : 
     968              :   return true;
     969              : }
     970              : 
     971              : 
     972              : /* NOTE1 is the REG_EQUAL note, if any, attached to an insn
     973              :    that is a single_set with a SET_SRC of SRC1.  Similarly
     974              :    for NOTE2/SRC2.
     975              : 
     976              :    So effectively NOTE1/NOTE2 are an alternate form of
     977              :    SRC1/SRC2 respectively.
     978              : 
     979              :    Return nonzero if SRC1 or NOTE1 has the same constant
     980              :    integer value as SRC2 or NOTE2.   Else return zero.  */
     981              : static int
     982      7752320 : values_equal_p (rtx note1, rtx note2, rtx src1, rtx src2)
     983              : {
     984      7752320 :   if (note1
     985      7752320 :       && note2
     986      1281924 :       && CONST_INT_P (XEXP (note1, 0))
     987      7809874 :       && rtx_equal_p (XEXP (note1, 0), XEXP (note2, 0)))
     988              :     return 1;
     989              : 
     990      7752320 :   if (!note1
     991      7752320 :       && !note2
     992      5997542 :       && CONST_INT_P (src1)
     993      2536787 :       && CONST_INT_P (src2)
     994     10192140 :       && rtx_equal_p (src1, src2))
     995              :     return 1;
     996              : 
     997      7752320 :   if (note1
     998      1552243 :       && CONST_INT_P (src2)
     999      7896467 :       && rtx_equal_p (XEXP (note1, 0), src2))
    1000              :     return 1;
    1001              : 
    1002      7752320 :   if (note2
    1003      1484459 :       && CONST_INT_P (src1)
    1004      7826101 :       && rtx_equal_p (XEXP (note2, 0), src1))
    1005              :     return 1;
    1006              : 
    1007              :   return 0;
    1008              : }
    1009              : 
    1010              : /* Examine register notes on I1 and I2 and return:
    1011              :    - dir_forward if I1 can be replaced by I2, or
    1012              :    - dir_backward if I2 can be replaced by I1, or
    1013              :    - dir_both if both are the case.  */
    1014              : 
    1015              : static enum replace_direction
    1016     14031196 : can_replace_by (rtx_insn *i1, rtx_insn *i2)
    1017              : {
    1018     14031196 :   rtx s1, s2, d1, d2, src1, src2, note1, note2;
    1019     14031196 :   bool c1, c2;
    1020              : 
    1021              :   /* Check for 2 sets.  */
    1022     14031196 :   s1 = single_set (i1);
    1023     14031196 :   s2 = single_set (i2);
    1024     14031196 :   if (s1 == NULL_RTX || s2 == NULL_RTX)
    1025              :     return dir_none;
    1026              : 
    1027              :   /* Check that the 2 sets set the same dest.  */
    1028     13276260 :   d1 = SET_DEST (s1);
    1029     13276260 :   d2 = SET_DEST (s2);
    1030     26552520 :   if (!(reload_completed
    1031     13276260 :         ? rtx_renumbered_equal_p (d1, d2) : rtx_equal_p (d1, d2)))
    1032              :     return dir_none;
    1033              : 
    1034              :   /* Find identical req_equiv or reg_equal note, which implies that the 2 sets
    1035              :      set dest to the same value.  */
    1036      7752320 :   note1 = find_reg_equal_equiv_note (i1);
    1037      7752320 :   note2 = find_reg_equal_equiv_note (i2);
    1038              : 
    1039      7752320 :   src1 = SET_SRC (s1);
    1040      7752320 :   src2 = SET_SRC (s2);
    1041              : 
    1042      7752320 :   if (!values_equal_p (note1, note2, src1, src2))
    1043              :     return dir_none;
    1044              : 
    1045            2 :   if (!equal_different_set_p (PATTERN (i1), s1, PATTERN (i2), s2))
    1046              :     return dir_none;
    1047              : 
    1048              :   /* Although the 2 sets set dest to the same value, we cannot replace
    1049              :        (set (dest) (const_int))
    1050              :      by
    1051              :        (set (dest) (reg))
    1052              :      because we don't know if the reg is live and has the same value at the
    1053              :      location of replacement.  */
    1054            2 :   c1 = CONST_INT_P (src1);
    1055            2 :   c2 = CONST_INT_P (src2);
    1056            2 :   if (c1 && c2)
    1057              :     return dir_both;
    1058            2 :   else if (c2)
    1059              :     return dir_forward;
    1060            2 :   else if (c1)
    1061              :     return dir_backward;
    1062              : 
    1063              :   return dir_none;
    1064              : }
    1065              : 
    1066              : /* Merges directions A and B.  */
    1067              : 
    1068              : static enum replace_direction
    1069     24231462 : merge_dir (enum replace_direction a, enum replace_direction b)
    1070              : {
    1071              :   /* Implements the following table:
    1072              :         |bo fw bw no
    1073              :      ---+-----------
    1074              :      bo |bo fw bw no
    1075              :      fw |-- fw no no
    1076              :      bw |-- -- bw no
    1077              :      no |-- -- -- no.  */
    1078              : 
    1079            0 :   if (a == b)
    1080              :     return a;
    1081              : 
    1082     16545287 :   if (a == dir_both)
    1083              :     return b;
    1084      4011089 :   if (b == dir_both)
    1085            0 :     return a;
    1086              : 
    1087              :   return dir_none;
    1088              : }
    1089              : 
    1090              : /* Array of flags indexed by reg note kind, true if the given
    1091              :    reg note is CFA related.  */
    1092              : static const bool reg_note_cfa_p[] = {
    1093              : #undef REG_CFA_NOTE
    1094              : #define DEF_REG_NOTE(NAME) false,
    1095              : #define REG_CFA_NOTE(NAME) true,
    1096              : #include "reg-notes.def"
    1097              : #undef REG_CFA_NOTE
    1098              : #undef DEF_REG_NOTE
    1099              :   false
    1100              : };
    1101              : 
    1102              : /* Return true if I1 and I2 have identical CFA notes (the same order
    1103              :    and equivalent content).  */
    1104              : 
    1105              : static bool
    1106       716885 : insns_have_identical_cfa_notes (rtx_insn *i1, rtx_insn *i2)
    1107              : {
    1108       716885 :   rtx n1, n2;
    1109       716885 :   for (n1 = REG_NOTES (i1), n2 = REG_NOTES (i2); ;
    1110       561863 :        n1 = XEXP (n1, 1), n2 = XEXP (n2, 1))
    1111              :     {
    1112              :       /* Skip over reg notes not related to CFI information.  */
    1113      1867887 :       while (n1 && !reg_note_cfa_p[REG_NOTE_KIND (n1)])
    1114        27276 :         n1 = XEXP (n1, 1);
    1115      1306135 :       while (n2 && !reg_note_cfa_p[REG_NOTE_KIND (n2)])
    1116        27387 :         n2 = XEXP (n2, 1);
    1117      1278748 :       if (n1 == NULL_RTX && n2 == NULL_RTX)
    1118              :         return true;
    1119       698563 :       if (n1 == NULL_RTX || n2 == NULL_RTX)
    1120              :         return false;
    1121       691779 :       if (XEXP (n1, 0) == XEXP (n2, 0))
    1122              :         ;
    1123       691773 :       else if (XEXP (n1, 0) == NULL_RTX || XEXP (n2, 0) == NULL_RTX)
    1124              :         return false;
    1125      1383546 :       else if (!(reload_completed
    1126       691773 :                  ? rtx_renumbered_equal_p (XEXP (n1, 0), XEXP (n2, 0))
    1127            0 :                  : rtx_equal_p (XEXP (n1, 0), XEXP (n2, 0))))
    1128              :         return false;
    1129              :     }
    1130              : }
    1131              : 
    1132              : /* Examine I1 and I2 and return:
    1133              :    - dir_forward if I1 can be replaced by I2, or
    1134              :    - dir_backward if I2 can be replaced by I1, or
    1135              :    - dir_both if both are the case.  */
    1136              : 
    1137              : static enum replace_direction
    1138     40531430 : old_insns_match_p (int mode ATTRIBUTE_UNUSED, rtx_insn *i1, rtx_insn *i2)
    1139              : {
    1140     40531430 :   rtx p1, p2;
    1141              : 
    1142              :   /* Verify that I1 and I2 are equivalent.  */
    1143     40531430 :   if (GET_CODE (i1) != GET_CODE (i2))
    1144              :     return dir_none;
    1145              : 
    1146              :   /* __builtin_unreachable() may lead to empty blocks (ending with
    1147              :      NOTE_INSN_BASIC_BLOCK).  They may be crossjumped. */
    1148     35019746 :   if (NOTE_INSN_BASIC_BLOCK_P (i1) && NOTE_INSN_BASIC_BLOCK_P (i2))
    1149              :     return dir_both;
    1150              : 
    1151              :   /* ??? Do not allow cross-jumping between different stack levels.  */
    1152     34940007 :   p1 = find_reg_note (i1, REG_ARGS_SIZE, NULL);
    1153     34940007 :   p2 = find_reg_note (i2, REG_ARGS_SIZE, NULL);
    1154     34940007 :   if (p1 && p2)
    1155              :     {
    1156      3755047 :       p1 = XEXP (p1, 0);
    1157      3755047 :       p2 = XEXP (p2, 0);
    1158      3755047 :       if (!rtx_equal_p (p1, p2))
    1159              :         return dir_none;
    1160              : 
    1161              :       /* ??? Worse, this adjustment had better be constant lest we
    1162              :          have differing incoming stack levels.  */
    1163      3654003 :       if (!frame_pointer_needed
    1164      3654003 :           && known_eq (find_args_size_adjust (i1), HOST_WIDE_INT_MIN))
    1165           22 :         return dir_none;
    1166              :     }
    1167     31184960 :   else if (p1 || p2)
    1168              :     return dir_none;
    1169              : 
    1170              :   /* Do not allow cross-jumping between frame related insns and other
    1171              :      insns.  */
    1172     33280774 :   if (RTX_FRAME_RELATED_P (i1) != RTX_FRAME_RELATED_P (i2))
    1173              :     return dir_none;
    1174              : 
    1175     32656131 :   p1 = PATTERN (i1);
    1176     32656131 :   p2 = PATTERN (i2);
    1177              : 
    1178     32656131 :   if (GET_CODE (p1) != GET_CODE (p2))
    1179              :     return dir_none;
    1180              : 
    1181              :   /* If this is a CALL_INSN, compare register usage information.
    1182              :      If we don't check this on stack register machines, the two
    1183              :      CALL_INSNs might be merged leaving reg-stack.cc with mismatching
    1184              :      numbers of stack registers in the same basic block.
    1185              :      If we don't check this on machines with delay slots, a delay slot may
    1186              :      be filled that clobbers a parameter expected by the subroutine.
    1187              : 
    1188              :      ??? We take the simple route for now and assume that if they're
    1189              :      equal, they were constructed identically.
    1190              : 
    1191              :      Also check for identical exception regions.  */
    1192              : 
    1193     30293441 :   if (CALL_P (i1))
    1194              :     {
    1195              :       /* Ensure the same EH region.  */
    1196     11435338 :       rtx n1 = find_reg_note (i1, REG_EH_REGION, 0);
    1197     11435338 :       rtx n2 = find_reg_note (i2, REG_EH_REGION, 0);
    1198              : 
    1199     11435338 :       if (!n1 && n2)
    1200              :         return dir_none;
    1201              : 
    1202     11350480 :       if (n1 && (!n2 || XEXP (n1, 0) != XEXP (n2, 0)))
    1203              :         return dir_none;
    1204              : 
    1205     10687519 :       if (!rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
    1206     10687519 :                         CALL_INSN_FUNCTION_USAGE (i2))
    1207     10687519 :           || SIBLING_CALL_P (i1) != SIBLING_CALL_P (i2))
    1208              :         return dir_none;
    1209              : 
    1210              :       /* For address sanitizer, never crossjump __asan_report_* builtins,
    1211              :          otherwise errors might be reported on incorrect lines.  */
    1212      7053536 :       if (flag_sanitize & SANITIZE_ADDRESS)
    1213              :         {
    1214       279360 :           rtx call = get_call_rtx_from (i1);
    1215       279360 :           if (call && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
    1216              :             {
    1217       279360 :               rtx symbol = XEXP (XEXP (call, 0), 0);
    1218       279360 :               if (SYMBOL_REF_DECL (symbol)
    1219       279360 :                   && TREE_CODE (SYMBOL_REF_DECL (symbol)) == FUNCTION_DECL)
    1220              :                 {
    1221       279360 :                   if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol))
    1222       279360 :                        == BUILT_IN_NORMAL)
    1223       277395 :                       && DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
    1224              :                          >= BUILT_IN_ASAN_REPORT_LOAD1
    1225       555233 :                       && DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
    1226              :                          <= BUILT_IN_ASAN_STOREN)
    1227              :                     return dir_none;
    1228              :                 }
    1229              :             }
    1230              :         }
    1231              : 
    1232      6778602 :       if (insn_callee_abi (i1) != insn_callee_abi (i2))
    1233              :         return dir_none;
    1234              :     }
    1235              : 
    1236              :   /* If both i1 and i2 are frame related, verify all the CFA notes
    1237              :      in the same order and with the same content.  */
    1238     25580521 :   if (RTX_FRAME_RELATED_P (i1) && !insns_have_identical_cfa_notes (i1, i2))
    1239              :     return dir_none;
    1240              : 
    1241              : #ifdef STACK_REGS
    1242              :   /* If cross_jump_death_matters is not 0, the insn's mode
    1243              :      indicates whether or not the insn contains any stack-like
    1244              :      regs.  */
    1245              : 
    1246     25443821 :   if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
    1247              :     {
    1248              :       /* If register stack conversion has already been done, then
    1249              :          death notes must also be compared before it is certain that
    1250              :          the two instruction streams match.  */
    1251              : 
    1252              :       rtx note;
    1253              :       HARD_REG_SET i1_regset, i2_regset;
    1254              : 
    1255            0 :       CLEAR_HARD_REG_SET (i1_regset);
    1256            0 :       CLEAR_HARD_REG_SET (i2_regset);
    1257              : 
    1258            0 :       for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
    1259            0 :         if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
    1260            0 :           SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
    1261              : 
    1262            0 :       for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
    1263            0 :         if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
    1264            0 :           SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
    1265              : 
    1266            0 :       if (i1_regset != i2_regset)
    1267            0 :         return dir_none;
    1268              :     }
    1269              : #endif
    1270              : 
    1271     25443821 :   if (reload_completed
    1272     25443821 :       ? rtx_renumbered_equal_p (p1, p2) : rtx_equal_p (p1, p2))
    1273              :     return dir_both;
    1274              : 
    1275     14031196 :   return can_replace_by (i1, i2);
    1276              : }
    1277              : 
    1278              : /* When comparing insns I1 and I2 in flow_find_cross_jump or
    1279              :    flow_find_head_matching_sequence, ensure the notes match.  */
    1280              : 
    1281              : static void
    1282      9176853 : merge_notes (rtx_insn *i1, rtx_insn *i2)
    1283              : {
    1284              :   /* If the merged insns have different REG_EQUAL notes, then
    1285              :      remove them.  */
    1286      9176853 :   rtx equiv1 = find_reg_equal_equiv_note (i1);
    1287      9176853 :   rtx equiv2 = find_reg_equal_equiv_note (i2);
    1288              : 
    1289      9176853 :   if (equiv1 && !equiv2)
    1290        24103 :     remove_note (i1, equiv1);
    1291      9152750 :   else if (!equiv1 && equiv2)
    1292         8088 :     remove_note (i2, equiv2);
    1293      9144662 :   else if (equiv1 && equiv2
    1294      9144662 :            && !rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
    1295              :     {
    1296          486 :       remove_note (i1, equiv1);
    1297          486 :       remove_note (i2, equiv2);
    1298              :     }
    1299      9176853 : }
    1300              : 
    1301              :  /* Walks from I1 in BB1 backward till the next non-debug insn, and returns the
    1302              :     resulting insn in I1, and the corresponding bb in BB1.  At the head of a
    1303              :     bb, if there is a predecessor bb that reaches this bb via fallthru, and
    1304              :     FOLLOW_FALLTHRU, walks further in the predecessor bb and registers this in
    1305              :     DID_FALLTHRU.  Otherwise, stops at the head of the bb.  */
    1306              : 
    1307              : static void
    1308     50248896 : walk_to_nondebug_insn (rtx_insn **i1, basic_block *bb1, bool follow_fallthru,
    1309              :                        bool *did_fallthru)
    1310              : {
    1311     50248896 :   edge fallthru;
    1312              : 
    1313     50248896 :   *did_fallthru = false;
    1314              : 
    1315              :   /* Ignore notes.  */
    1316     68690140 :   while (!NONDEBUG_INSN_P (*i1))
    1317              :     {
    1318     19715869 :       if (*i1 != BB_HEAD (*bb1))
    1319              :         {
    1320     18215078 :           *i1 = PREV_INSN (*i1);
    1321     18215078 :           continue;
    1322              :         }
    1323              : 
    1324      1500791 :       if (!follow_fallthru)
    1325              :         return;
    1326              : 
    1327      1320950 :       fallthru = find_fallthru_edge ((*bb1)->preds);
    1328       579957 :       if (!fallthru || fallthru->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
    1329      1900845 :           || !single_succ_p (fallthru->src))
    1330              :         return;
    1331              : 
    1332       226166 :       *bb1 = fallthru->src;
    1333       226166 :       *i1 = BB_END (*bb1);
    1334       226166 :       *did_fallthru = true;
    1335              :      }
    1336              : }
    1337              : 
    1338              : /* Look through the insns at the end of BB1 and BB2 and find the longest
    1339              :    sequence that are either equivalent, or allow forward or backward
    1340              :    replacement.  Store the first insns for that sequence in *F1 and *F2 and
    1341              :    return the sequence length.
    1342              : 
    1343              :    DIR_P indicates the allowed replacement direction on function entry, and
    1344              :    the actual replacement direction on function exit.  If NULL, only equivalent
    1345              :    sequences are allowed.
    1346              : 
    1347              :    To simplify callers of this function, if the blocks match exactly,
    1348              :    store the head of the blocks in *F1 and *F2.  */
    1349              : 
    1350              : int
    1351     16157522 : flow_find_cross_jump (basic_block bb1, basic_block bb2, rtx_insn **f1,
    1352              :                       rtx_insn **f2, enum replace_direction *dir_p)
    1353              : {
    1354     16157522 :   rtx_insn *i1, *i2, *last1, *last2, *afterlast1, *afterlast2;
    1355     16157522 :   int ninsns = 0;
    1356     16157522 :   enum replace_direction dir, last_dir, afterlast_dir;
    1357     16157522 :   bool follow_fallthru, did_fallthru;
    1358              : 
    1359     16157522 :   if (dir_p)
    1360     16157522 :     dir = *dir_p;
    1361              :   else
    1362              :     dir = dir_both;
    1363     16157522 :   afterlast_dir = dir;
    1364     16157522 :   last_dir = afterlast_dir;
    1365              : 
    1366              :   /* Skip simple jumps at the end of the blocks.  Complex jumps still
    1367              :      need to be compared for equivalence, which we'll do below.  */
    1368              : 
    1369     16157522 :   i1 = BB_END (bb1);
    1370     16157522 :   last1 = afterlast1 = last2 = afterlast2 = NULL;
    1371     16157522 :   if (onlyjump_p (i1)
    1372     16157522 :       || (returnjump_p (i1) && !side_effects_p (PATTERN (i1))))
    1373              :     {
    1374     14591763 :       last1 = i1;
    1375     14591763 :       i1 = PREV_INSN (i1);
    1376              :     }
    1377              : 
    1378     16157522 :   i2 = BB_END (bb2);
    1379     16157522 :   if (onlyjump_p (i2)
    1380     16157522 :       || (returnjump_p (i2) && !side_effects_p (PATTERN (i2))))
    1381              :     {
    1382     11818837 :       last2 = i2;
    1383              :       /* Count everything except for unconditional jump as insn.
    1384              :          Don't count any jumps if dir_p is NULL.  */
    1385     11818837 :       if (!simplejump_p (i2) && !returnjump_p (i2) && last1 && dir_p)
    1386              :         ninsns++;
    1387     11818837 :       i2 = PREV_INSN (i2);
    1388              :     }
    1389              : 
    1390     34091374 :   while (true)
    1391              :     {
    1392              :       /* In the following example, we can replace all jumps to C by jumps to A.
    1393              : 
    1394              :          This removes 4 duplicate insns.
    1395              :          [bb A] insn1            [bb C] insn1
    1396              :                 insn2                   insn2
    1397              :          [bb B] insn3                   insn3
    1398              :                 insn4                   insn4
    1399              :                 jump_insn               jump_insn
    1400              : 
    1401              :          We could also replace all jumps to A by jumps to C, but that leaves B
    1402              :          alive, and removes only 2 duplicate insns.  In a subsequent crossjump
    1403              :          step, all jumps to B would be replaced with jumps to the middle of C,
    1404              :          achieving the same result with more effort.
    1405              :          So we allow only the first possibility, which means that we don't allow
    1406              :          fallthru in the block that's being replaced.  */
    1407              : 
    1408     25124448 :       follow_fallthru = dir_p && dir != dir_forward;
    1409     25124448 :       walk_to_nondebug_insn (&i1, &bb1, follow_fallthru, &did_fallthru);
    1410     25124448 :       if (did_fallthru)
    1411        24866 :         dir = dir_backward;
    1412              : 
    1413     25124448 :       follow_fallthru = dir_p && dir != dir_backward;
    1414     25124448 :       walk_to_nondebug_insn (&i2, &bb2, follow_fallthru, &did_fallthru);
    1415     25124448 :       if (did_fallthru)
    1416       201290 :         dir = dir_forward;
    1417              : 
    1418     25124448 :       if (i1 == BB_HEAD (bb1) || i2 == BB_HEAD (bb2))
    1419              :         break;
    1420              : 
    1421              :       /* Do not turn corssing edge to non-crossing or vice versa after
    1422              :          reload. */
    1423     24231462 :       if (BB_PARTITION (BLOCK_FOR_INSN (i1))
    1424     24231462 :           != BB_PARTITION (BLOCK_FOR_INSN (i2))
    1425     24231462 :           && reload_completed)
    1426              :         break;
    1427              : 
    1428     24231462 :       dir = merge_dir (dir, old_insns_match_p (0, i1, i2));
    1429     21501124 :       if (dir == dir_none || (!dir_p && dir != dir_both))
    1430              :         break;
    1431              : 
    1432      8966926 :       merge_memattrs (i1, i2);
    1433              : 
    1434              :       /* Don't begin a cross-jump with a NOTE insn.  */
    1435      8966926 :       if (INSN_P (i1))
    1436              :         {
    1437      8966926 :           merge_notes (i1, i2);
    1438              : 
    1439      8966926 :           afterlast1 = last1, afterlast2 = last2;
    1440      8966926 :           last1 = i1, last2 = i2;
    1441      8966926 :           afterlast_dir = last_dir;
    1442      8966926 :           last_dir = dir;
    1443      8966926 :           if (active_insn_p (i1))
    1444      8860158 :             ninsns++;
    1445              :         }
    1446              : 
    1447      8966926 :       i1 = PREV_INSN (i1);
    1448      8966926 :       i2 = PREV_INSN (i2);
    1449              :     }
    1450              : 
    1451              :   /* Include preceding notes and labels in the cross-jump.  One,
    1452              :      this may bring us to the head of the blocks as requested above.
    1453              :      Two, it keeps line number notes as matched as may be.  */
    1454     16157522 :   if (ninsns)
    1455              :     {
    1456      5360255 :       bb1 = BLOCK_FOR_INSN (last1);
    1457     13666285 :       while (last1 != BB_HEAD (bb1) && !NONDEBUG_INSN_P (PREV_INSN (last1)))
    1458              :         last1 = PREV_INSN (last1);
    1459              : 
    1460     10435931 :       if (last1 != BB_HEAD (bb1) && LABEL_P (PREV_INSN (last1)))
    1461              :         last1 = PREV_INSN (last1);
    1462              : 
    1463      5360255 :       bb2 = BLOCK_FOR_INSN (last2);
    1464     13696345 :       while (last2 != BB_HEAD (bb2) && !NONDEBUG_INSN_P (PREV_INSN (last2)))
    1465              :         last2 = PREV_INSN (last2);
    1466              : 
    1467     10017033 :       if (last2 != BB_HEAD (bb2) && LABEL_P (PREV_INSN (last2)))
    1468              :         last2 = PREV_INSN (last2);
    1469              : 
    1470      5360255 :       *f1 = last1;
    1471      5360255 :       *f2 = last2;
    1472              :     }
    1473              : 
    1474     16157522 :   if (dir_p)
    1475     16157522 :     *dir_p = last_dir;
    1476     16157522 :   return ninsns;
    1477              : }
    1478              : 
    1479              : /* Like flow_find_cross_jump, except start looking for a matching sequence from
    1480              :    the head of the two blocks.  Do not include jumps at the end.
    1481              :    If STOP_AFTER is nonzero, stop after finding that many matching
    1482              :    instructions.  If STOP_AFTER is zero, count all INSN_P insns, if it is
    1483              :    non-zero, only count active insns.  */
    1484              : 
    1485              : int
    1486      4666394 : flow_find_head_matching_sequence (basic_block bb1, basic_block bb2, rtx_insn **f1,
    1487              :                                   rtx_insn **f2, int stop_after)
    1488              : {
    1489      4666394 :   rtx_insn *i1, *i2, *last1, *last2, *beforelast1, *beforelast2;
    1490      4666394 :   int ninsns = 0;
    1491      4666394 :   edge e;
    1492      4666394 :   edge_iterator ei;
    1493      4666394 :   int nehedges1 = 0, nehedges2 = 0;
    1494              : 
    1495     11209821 :   FOR_EACH_EDGE (e, ei, bb1->succs)
    1496      6543427 :     if (e->flags & EDGE_EH)
    1497       310232 :       nehedges1++;
    1498     11527590 :   FOR_EACH_EDGE (e, ei, bb2->succs)
    1499      6861196 :     if (e->flags & EDGE_EH)
    1500       356762 :       nehedges2++;
    1501              : 
    1502      4666394 :   i1 = BB_HEAD (bb1);
    1503      4666394 :   i2 = BB_HEAD (bb2);
    1504      4666394 :   last1 = beforelast1 = last2 = beforelast2 = NULL;
    1505              : 
    1506       195167 :   while (true)
    1507              :     {
    1508              :       /* Ignore notes, except NOTE_INSN_EPILOGUE_BEG.  */
    1509     21688994 :       while (!NONDEBUG_INSN_P (i1) && i1 != BB_END (bb1))
    1510              :         {
    1511     16651662 :           if (NOTE_P (i1) && NOTE_KIND (i1) == NOTE_INSN_EPILOGUE_BEG)
    1512              :             break;
    1513     16632266 :           i1 = NEXT_INSN (i1);
    1514              :         }
    1515              : 
    1516     21581471 :       while (!NONDEBUG_INSN_P (i2) && i2 != BB_END (bb2))
    1517              :         {
    1518     16758141 :           if (NOTE_P (i2) && NOTE_KIND (i2) == NOTE_INSN_EPILOGUE_BEG)
    1519              :             break;
    1520     16719910 :           i2 = NEXT_INSN (i2);
    1521              :         }
    1522              : 
    1523      4861561 :       if ((i1 == BB_END (bb1) && !NONDEBUG_INSN_P (i1))
    1524      4849153 :           || (i2 == BB_END (bb2) && !NONDEBUG_INSN_P (i2)))
    1525              :         break;
    1526              : 
    1527      4843850 :       if (NOTE_P (i1) || NOTE_P (i2)
    1528      4788121 :           || JUMP_P (i1) || JUMP_P (i2))
    1529              :         break;
    1530              : 
    1531              :       /* A sanity check to make sure we're not merging insns with different
    1532              :          effects on EH.  If only one of them ends a basic block, it shouldn't
    1533              :          have an EH edge; if both end a basic block, there should be the same
    1534              :          number of EH edges.  */
    1535      3924408 :       if ((i1 == BB_END (bb1) && i2 != BB_END (bb2)
    1536       221424 :            && nehedges1 > 0)
    1537      3888556 :           || (i2 == BB_END (bb2) && i1 != BB_END (bb1)
    1538       239190 :               && nehedges2 > 0)
    1539      3859993 :           || (i1 == BB_END (bb1) && i2 == BB_END (bb2)
    1540        27616 :               && nehedges1 != nehedges2))
    1541              :         break;
    1542              : 
    1543      3856658 :       if (old_insns_match_p (0, i1, i2) != dir_both)
    1544              :         break;
    1545              : 
    1546       209927 :       merge_memattrs (i1, i2);
    1547              : 
    1548              :       /* Don't begin a cross-jump with a NOTE insn.  */
    1549       209927 :       if (INSN_P (i1))
    1550              :         {
    1551       209927 :           merge_notes (i1, i2);
    1552              : 
    1553       209927 :           beforelast1 = last1, beforelast2 = last2;
    1554       209927 :           last1 = i1, last2 = i2;
    1555       209927 :           if (!stop_after || active_insn_p (i1))
    1556       209927 :             ninsns++;
    1557              :         }
    1558              : 
    1559       209927 :       if (i1 == BB_END (bb1) || i2 == BB_END (bb2)
    1560       195167 :           || (stop_after > 0 && ninsns == stop_after))
    1561              :         break;
    1562              : 
    1563       195167 :       i1 = NEXT_INSN (i1);
    1564       195167 :       i2 = NEXT_INSN (i2);
    1565              :     }
    1566              : 
    1567      4666394 :   if (ninsns)
    1568              :     {
    1569       118028 :       *f1 = last1;
    1570       118028 :       *f2 = last2;
    1571              :     }
    1572              : 
    1573      4666394 :   return ninsns;
    1574              : }
    1575              : 
    1576              : /* Return true iff outgoing edges of BB1 and BB2 match, together with
    1577              :    the branch instruction.  This means that if we commonize the control
    1578              :    flow before end of the basic block, the semantic remains unchanged.
    1579              : 
    1580              :    We may assume that there exists one edge with a common destination.  */
    1581              : 
    1582              : static bool
    1583     47182261 : outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
    1584              : {
    1585     47182261 :   int nehedges1 = 0, nehedges2 = 0;
    1586     47182261 :   edge fallthru1 = 0, fallthru2 = 0;
    1587     47182261 :   edge e1, e2;
    1588     47182261 :   edge_iterator ei;
    1589              : 
    1590              :   /* If we performed shrink-wrapping, edges to the exit block can
    1591              :      only be distinguished for JUMP_INSNs.  The two paths may differ in
    1592              :      whether they went through the prologue.  Sibcalls are fine, we know
    1593              :      that we either didn't need or inserted an epilogue before them.  */
    1594     47182261 :   if (crtl->shrink_wrapped
    1595      1211372 :       && single_succ_p (bb1)
    1596       470258 :       && single_succ (bb1) == EXIT_BLOCK_PTR_FOR_FN (cfun)
    1597       148974 :       && (!JUMP_P (BB_END (bb1))
    1598              :           /* Punt if the only successor is a fake edge to exit, the jump
    1599              :              must be some weird one.  */
    1600        65262 :           || (single_succ_edge (bb1)->flags & EDGE_FAKE) != 0)
    1601     47266053 :       && !(CALL_P (BB_END (bb1)) && SIBLING_CALL_P (BB_END (bb1))))
    1602              :     return false;
    1603              : 
    1604              :   /* If BB1 has only one successor, we may be looking at either an
    1605              :      unconditional jump, or a fake edge to exit.  */
    1606     47133443 :   if (single_succ_p (bb1)
    1607     19319129 :       && (single_succ_edge (bb1)->flags & (EDGE_COMPLEX | EDGE_FAKE)) == 0
    1608     63081485 :       && (!JUMP_P (BB_END (bb1)) || simplejump_p (BB_END (bb1))))
    1609     15679536 :     return (single_succ_p (bb2)
    1610     14200052 :             && (single_succ_edge (bb2)->flags
    1611     14200052 :                 & (EDGE_COMPLEX | EDGE_FAKE)) == 0
    1612     29822601 :             && (!JUMP_P (BB_END (bb2)) || simplejump_p (BB_END (bb2))));
    1613              : 
    1614              :   /* Match conditional jumps - this may get tricky when fallthru and branch
    1615              :      edges are crossed.  */
    1616     31453907 :   if (EDGE_COUNT (bb1->succs) == 2
    1617     27733636 :       && any_condjump_p (BB_END (bb1))
    1618     50459441 :       && onlyjump_p (BB_END (bb1)))
    1619              :     {
    1620     19005534 :       edge b1, f1, b2, f2;
    1621     19005534 :       bool reverse, match;
    1622     19005534 :       rtx set1, set2, cond1, cond2;
    1623     19005534 :       enum rtx_code code1, code2;
    1624              : 
    1625     19005534 :       if (EDGE_COUNT (bb2->succs) != 2
    1626      8731540 :           || !any_condjump_p (BB_END (bb2))
    1627     27704282 :           || !onlyjump_p (BB_END (bb2)))
    1628     10306786 :         return false;
    1629              : 
    1630      8698748 :       b1 = BRANCH_EDGE (bb1);
    1631      8698748 :       b2 = BRANCH_EDGE (bb2);
    1632      8698748 :       f1 = FALLTHRU_EDGE (bb1);
    1633      8698748 :       f2 = FALLTHRU_EDGE (bb2);
    1634              : 
    1635              :       /* Get around possible forwarders on fallthru edges.  Other cases
    1636              :          should be optimized out already.  */
    1637      8698748 :       if (FORWARDER_BLOCK_P (f1->dest))
    1638      3509775 :         f1 = single_succ_edge (f1->dest);
    1639              : 
    1640      8698748 :       if (FORWARDER_BLOCK_P (f2->dest))
    1641      2617037 :         f2 = single_succ_edge (f2->dest);
    1642              : 
    1643              :       /* To simplify use of this function, return false if there are
    1644              :          unneeded forwarder blocks.  These will get eliminated later
    1645              :          during cleanup_cfg.  */
    1646      8698748 :       if (FORWARDER_BLOCK_P (f1->dest)
    1647      8693941 :           || FORWARDER_BLOCK_P (f2->dest)
    1648      8688461 :           || FORWARDER_BLOCK_P (b1->dest)
    1649      8658200 :           || FORWARDER_BLOCK_P (b2->dest))
    1650              :         return false;
    1651              : 
    1652      8631316 :       if (f1->dest == f2->dest && b1->dest == b2->dest)
    1653              :         reverse = false;
    1654      8433008 :       else if (f1->dest == b2->dest && b1->dest == f2->dest)
    1655              :         reverse = true;
    1656              :       else
    1657              :         return false;
    1658              : 
    1659       317112 :       set1 = pc_set (BB_END (bb1));
    1660       317112 :       set2 = pc_set (BB_END (bb2));
    1661       317112 :       if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
    1662       317112 :           != (XEXP (SET_SRC (set2), 1) == pc_rtx))
    1663            0 :         reverse = !reverse;
    1664              : 
    1665       317112 :       cond1 = XEXP (SET_SRC (set1), 0);
    1666       317112 :       cond2 = XEXP (SET_SRC (set2), 0);
    1667       317112 :       code1 = GET_CODE (cond1);
    1668       317112 :       if (reverse)
    1669       118804 :         code2 = reversed_comparison_code (cond2, BB_END (bb2));
    1670              :       else
    1671       198308 :         code2 = GET_CODE (cond2);
    1672              : 
    1673       317112 :       if (code2 == UNKNOWN)
    1674              :         return false;
    1675              : 
    1676              :       /* Verify codes and operands match.  */
    1677       900640 :       match = ((code1 == code2
    1678       273617 :                 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
    1679       271512 :                 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
    1680       319217 :                || (code1 == swap_condition (code2)
    1681         4632 :                    && rtx_renumbered_equal_p (XEXP (cond1, 1),
    1682         4632 :                                               XEXP (cond2, 0))
    1683            0 :                    && rtx_renumbered_equal_p (XEXP (cond1, 0),
    1684            0 :                                               XEXP (cond2, 1))));
    1685              : 
    1686              :       /* If we return true, we will join the blocks.  Which means that
    1687              :          we will only have one branch prediction bit to work with.  Thus
    1688              :          we require the existing branches to have probabilities that are
    1689              :          roughly similar.  */
    1690       271512 :       if (match
    1691       271512 :           && optimize_bb_for_speed_p (bb1)
    1692       246511 :           && optimize_bb_for_speed_p (bb2))
    1693              :         {
    1694       235442 :           profile_probability prob2;
    1695              : 
    1696       235442 :           if (b1->dest == b2->dest)
    1697       151071 :             prob2 = b2->probability;
    1698              :           else
    1699              :             /* Do not use f2 probability as f2 may be forwarded.  */
    1700        84371 :             prob2 = b2->probability.invert ();
    1701              : 
    1702              :           /* Fail if the difference in probabilities is greater than 50%.
    1703              :              This rules out two well-predicted branches with opposite
    1704              :              outcomes.  */
    1705       235442 :           if (b1->probability.differs_lot_from_p (prob2))
    1706              :             {
    1707         5096 :               if (dump_file)
    1708              :                 {
    1709            0 :                   fprintf (dump_file,
    1710              :                            "Outcomes of branch in bb %i and %i differ too"
    1711              :                            " much (", bb1->index, bb2->index);
    1712            0 :                   b1->probability.dump (dump_file);
    1713            0 :                   prob2.dump (dump_file);
    1714            0 :                   fprintf (dump_file, ")\n");
    1715              :                 }
    1716         5096 :               return false;
    1717              :             }
    1718              :         }
    1719              : 
    1720       312016 :       if (dump_file && match)
    1721           16 :         fprintf (dump_file, "Conditionals in bb %i and %i match.\n",
    1722              :                  bb1->index, bb2->index);
    1723              : 
    1724       312016 :       return match;
    1725              :     }
    1726              : 
    1727              :   /* Generic case - we are seeing a computed jump, table jump or trapping
    1728              :      instruction.  */
    1729              : 
    1730              :   /* Check whether there are tablejumps in the end of BB1 and BB2.
    1731              :      Return true if they are identical.  */
    1732     12448373 :     {
    1733     12448373 :       rtx_insn *label1, *label2;
    1734     12448373 :       rtx_jump_table_data *table1, *table2;
    1735              : 
    1736     12448373 :       if (tablejump_p (BB_END (bb1), &label1, &table1)
    1737        78151 :           && tablejump_p (BB_END (bb2), &label2, &table2)
    1738     12454422 :           && GET_CODE (PATTERN (table1)) == GET_CODE (PATTERN (table2)))
    1739              :         {
    1740              :           /* The labels should never be the same rtx.  If they really are same
    1741              :              the jump tables are same too. So disable crossjumping of blocks BB1
    1742              :              and BB2 because when deleting the common insns in the end of BB1
    1743              :              by delete_basic_block () the jump table would be deleted too.  */
    1744              :           /* If LABEL2 is referenced in BB1->END do not do anything
    1745              :              because we would loose information when replacing
    1746              :              LABEL1 by LABEL2 and then LABEL2 by LABEL1 in BB1->END.  */
    1747         6049 :           if (label1 != label2 && !rtx_referenced_p (label2, BB_END (bb1)))
    1748              :             {
    1749              :               /* Set IDENTICAL to true when the tables are identical.  */
    1750         6049 :               bool identical = false;
    1751         6049 :               rtx p1, p2;
    1752              : 
    1753         6049 :               p1 = PATTERN (table1);
    1754         6049 :               p2 = PATTERN (table2);
    1755         6049 :               if (GET_CODE (p1) == ADDR_VEC && rtx_equal_p (p1, p2))
    1756              :                 {
    1757              :                   identical = true;
    1758              :                 }
    1759         5069 :               else if (GET_CODE (p1) == ADDR_DIFF_VEC
    1760          202 :                        && (XVECLEN (p1, 1) == XVECLEN (p2, 1))
    1761           95 :                        && rtx_equal_p (XEXP (p1, 2), XEXP (p2, 2))
    1762         5164 :                        && rtx_equal_p (XEXP (p1, 3), XEXP (p2, 3)))
    1763              :                 {
    1764           95 :                   int i;
    1765              : 
    1766           95 :                   identical = true;
    1767          394 :                   for (i = XVECLEN (p1, 1) - 1; i >= 0 && identical; i--)
    1768          299 :                     if (!rtx_equal_p (XVECEXP (p1, 1, i), XVECEXP (p2, 1, i)))
    1769              :                       identical = false;
    1770              :                 }
    1771              : 
    1772           95 :               if (identical)
    1773              :                 {
    1774          986 :                   bool match;
    1775              : 
    1776              :                   /* Temporarily replace references to LABEL1 with LABEL2
    1777              :                      in BB1->END so that we could compare the instructions.  */
    1778          986 :                   replace_label_in_insn (BB_END (bb1), label1, label2, false);
    1779              : 
    1780          986 :                   match = (old_insns_match_p (mode, BB_END (bb1), BB_END (bb2))
    1781              :                            == dir_both);
    1782          986 :                   if (dump_file && match)
    1783            0 :                     fprintf (dump_file,
    1784              :                              "Tablejumps in bb %i and %i match.\n",
    1785              :                              bb1->index, bb2->index);
    1786              : 
    1787              :                   /* Set the original label in BB1->END because when deleting
    1788              :                      a block whose end is a tablejump, the tablejump referenced
    1789              :                      from the instruction is deleted too.  */
    1790          986 :                   replace_label_in_insn (BB_END (bb1), label2, label1, false);
    1791              : 
    1792         6049 :                   return match;
    1793              :                 }
    1794              :             }
    1795         5063 :           return false;
    1796              :         }
    1797              :     }
    1798              : 
    1799              :   /* Find the last non-debug non-note instruction in each bb, except
    1800              :      stop when we see the NOTE_INSN_BASIC_BLOCK, as old_insns_match_p
    1801              :      handles that case specially. old_insns_match_p does not handle
    1802              :      other types of instruction notes.  */
    1803     12442324 :   rtx_insn *last1 = BB_END (bb1);
    1804     12442324 :   rtx_insn *last2 = BB_END (bb2);
    1805     12442415 :   while (!NOTE_INSN_BASIC_BLOCK_P (last1) &&
    1806     12322902 :          (DEBUG_INSN_P (last1) || NOTE_P (last1)))
    1807           91 :     last1 = PREV_INSN (last1);
    1808     12472627 :   while (!NOTE_INSN_BASIC_BLOCK_P (last2) &&
    1809     12355232 :          (DEBUG_INSN_P (last2) || NOTE_P (last2)))
    1810        30303 :     last2 = PREV_INSN (last2);
    1811     12442324 :   gcc_assert (last1 && last2);
    1812              : 
    1813              :   /* First ensure that the instructions match.  There may be many outgoing
    1814              :      edges so this test is generally cheaper.  */
    1815     12442324 :   if (old_insns_match_p (mode, last1, last2) != dir_both)
    1816              :     return false;
    1817              : 
    1818              :   /* Search the outgoing edges, ensure that the counts do match, find possible
    1819              :      fallthru and exception handling edges since these needs more
    1820              :      validation.  */
    1821      6943584 :   if (EDGE_COUNT (bb1->succs) != EDGE_COUNT (bb2->succs))
    1822              :     return false;
    1823              : 
    1824      2314515 :   bool nonfakeedges = false;
    1825      5687052 :   FOR_EACH_EDGE (e1, ei, bb1->succs)
    1826              :     {
    1827      3372537 :       e2 = EDGE_SUCC (bb2, ei.index);
    1828              : 
    1829      3372537 :       if ((e1->flags & EDGE_FAKE) == 0)
    1830      2461235 :         nonfakeedges = true;
    1831              : 
    1832      3372537 :       if (e1->flags & EDGE_EH)
    1833      1096442 :         nehedges1++;
    1834              : 
    1835      3372537 :       if (e2->flags & EDGE_EH)
    1836      1096442 :         nehedges2++;
    1837              : 
    1838      3372537 :       if (e1->flags & EDGE_FALLTHRU)
    1839      1144833 :         fallthru1 = e1;
    1840      3372537 :       if (e2->flags & EDGE_FALLTHRU)
    1841      1144833 :         fallthru2 = e2;
    1842              :     }
    1843              : 
    1844              :   /* If number of edges of various types does not match, fail.  */
    1845      2314515 :   if (nehedges1 != nehedges2
    1846      2314515 :       || (fallthru1 != 0) != (fallthru2 != 0))
    1847              :     return false;
    1848              : 
    1849              :   /* If !ACCUMULATE_OUTGOING_ARGS, bb1 (and bb2) have no successors
    1850              :      and the last real insn doesn't have REG_ARGS_SIZE note, don't
    1851              :      attempt to optimize, as the two basic blocks might have different
    1852              :      REG_ARGS_SIZE depths.  For noreturn calls and unconditional
    1853              :      traps there should be REG_ARG_SIZE notes, they could be missing
    1854              :      for __builtin_unreachable () uses though.  */
    1855      2314515 :   if (!nonfakeedges
    1856       911302 :       && !ACCUMULATE_OUTGOING_ARGS
    1857      3225810 :       && (!INSN_P (last1)
    1858       911295 :           || !find_reg_note (last1, REG_ARGS_SIZE, NULL)))
    1859           11 :     return false;
    1860              : 
    1861              :   /* fallthru edges must be forwarded to the same destination.  */
    1862      2314504 :   if (fallthru1)
    1863              :     {
    1864      1144833 :       basic_block d1 = (FORWARDER_BLOCK_P (fallthru1->dest)
    1865      1144833 :                         ? single_succ (fallthru1->dest) : fallthru1->dest);
    1866      1144833 :       basic_block d2 = (FORWARDER_BLOCK_P (fallthru2->dest)
    1867      1144833 :                         ? single_succ (fallthru2->dest) : fallthru2->dest);
    1868              : 
    1869      1144833 :       if (d1 != d2)
    1870              :         return false;
    1871              :     }
    1872              : 
    1873              :   /* Ensure the same EH region.  */
    1874      1747089 :   {
    1875      1747089 :     rtx n1 = find_reg_note (last1, REG_EH_REGION, 0);
    1876      1747089 :     rtx n2 = find_reg_note (last2, REG_EH_REGION, 0);
    1877              : 
    1878      1747089 :     if (!n1 && n2)
    1879              :       return false;
    1880              : 
    1881      1747089 :     if (n1 && (!n2 || XEXP (n1, 0) != XEXP (n2, 0)))
    1882              :       return false;
    1883              :   }
    1884              : 
    1885              :   /* The same checks as in try_crossjump_to_edge. It is required for RTL
    1886              :      version of sequence abstraction.  */
    1887      3984143 :   FOR_EACH_EDGE (e1, ei, bb2->succs)
    1888              :     {
    1889      2237084 :       edge e2;
    1890      2237084 :       edge_iterator ei;
    1891      2237084 :       basic_block d1 = e1->dest;
    1892              : 
    1893      2237084 :       if (FORWARDER_BLOCK_P (d1))
    1894       595785 :         d1 = EDGE_SUCC (d1, 0)->dest;
    1895              : 
    1896      2728549 :       FOR_EACH_EDGE (e2, ei, bb1->succs)
    1897              :         {
    1898      2728549 :           basic_block d2 = e2->dest;
    1899      2728549 :           if (FORWARDER_BLOCK_P (d2))
    1900       770779 :             d2 = EDGE_SUCC (d2, 0)->dest;
    1901      2728549 :           if (d1 == d2)
    1902              :             break;
    1903              :         }
    1904              : 
    1905      2237084 :       if (!e2)
    1906            0 :         return false;
    1907              :     }
    1908              : 
    1909              :   return true;
    1910              : }
    1911              : 
    1912              : /* Returns true if BB basic block has a preserve label.  */
    1913              : 
    1914              : static bool
    1915       369074 : block_has_preserve_label (basic_block bb)
    1916              : {
    1917       369074 :   return (bb
    1918       369074 :           && block_label (bb)
    1919       687871 :           && LABEL_PRESERVE_P (block_label (bb)));
    1920              : }
    1921              : 
    1922              : /* E1 and E2 are edges with the same destination block.  Search their
    1923              :    predecessors for common code.  If found, redirect control flow from
    1924              :    (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC (dir_forward),
    1925              :    or the other way around (dir_backward).  DIR specifies the allowed
    1926              :    replacement direction.  */
    1927              : 
    1928              : static bool
    1929     51721778 : try_crossjump_to_edge (int mode, edge e1, edge e2,
    1930              :                        enum replace_direction dir)
    1931              : {
    1932     51721778 :   int nmatch;
    1933     51721778 :   basic_block src1 = e1->src, src2 = e2->src;
    1934     51721778 :   basic_block redirect_to, redirect_from, to_remove;
    1935     51721778 :   basic_block osrc1, osrc2, redirect_edges_to, tmp;
    1936     51721778 :   rtx_insn *newpos1, *newpos2;
    1937     51721778 :   edge s;
    1938     51721778 :   edge_iterator ei;
    1939              : 
    1940     51721778 :   newpos1 = newpos2 = NULL;
    1941              : 
    1942              :   /* Search backward through forwarder blocks.  We don't need to worry
    1943              :      about multiple entry or chained forwarders, as they will be optimized
    1944              :      away.  We do this to look past the unconditional jump following a
    1945              :      conditional jump that is required due to the current CFG shape.  */
    1946     51721778 :   if (single_pred_p (src1)
    1947     51721778 :       && FORWARDER_BLOCK_P (src1))
    1948     11864786 :     e1 = single_pred_edge (src1), src1 = e1->src;
    1949              : 
    1950     51721778 :   if (single_pred_p (src2)
    1951     51721571 :       && FORWARDER_BLOCK_P (src2))
    1952      5398748 :     e2 = single_pred_edge (src2), src2 = e2->src;
    1953              : 
    1954              :   /* Nothing to do if we reach ENTRY, or a common source block.  */
    1955     51721778 :   if (src1 == ENTRY_BLOCK_PTR_FOR_FN (cfun) || src2
    1956              :       == ENTRY_BLOCK_PTR_FOR_FN (cfun))
    1957              :     return false;
    1958     51721346 :   if (src1 == src2)
    1959              :     return false;
    1960              : 
    1961              :   /* Seeing more than 1 forwarder blocks would confuse us later...  */
    1962     51721223 :   if (FORWARDER_BLOCK_P (e1->dest)
    1963     51721223 :       && FORWARDER_BLOCK_P (single_succ (e1->dest)))
    1964              :     return false;
    1965              : 
    1966     51707630 :   if (FORWARDER_BLOCK_P (e2->dest)
    1967     51707630 :       && FORWARDER_BLOCK_P (single_succ (e2->dest)))
    1968              :     return false;
    1969              : 
    1970              :   /* Likewise with dead code (possibly newly created by the other optimizations
    1971              :      of cfg_cleanup).  */
    1972     51706818 :   if (EDGE_COUNT (src1->preds) == 0 || EDGE_COUNT (src2->preds) == 0)
    1973              :     return false;
    1974              : 
    1975              :   /* Do not turn corssing edge to non-crossing or vice versa after reload.  */
    1976     47367484 :   if (BB_PARTITION (src1) != BB_PARTITION (src2)
    1977       185223 :       && reload_completed)
    1978              :     return false;
    1979              : 
    1980              :   /* Look for the common insn sequence, part the first ...  */
    1981     47182261 :   if (!outgoing_edges_match (mode, src1, src2))
    1982              :     return false;
    1983              : 
    1984              :   /* ... and part the second.  */
    1985     16157522 :   nmatch = flow_find_cross_jump (src1, src2, &newpos1, &newpos2, &dir);
    1986              : 
    1987     16157522 :   osrc1 = src1;
    1988     16157522 :   osrc2 = src2;
    1989     16157522 :   if (newpos1 != NULL_RTX)
    1990      5360255 :     src1 = BLOCK_FOR_INSN (newpos1);
    1991     16157522 :   if (newpos2 != NULL_RTX)
    1992      5360255 :     src2 = BLOCK_FOR_INSN (newpos2);
    1993              : 
    1994              :   /* Check that SRC1 and SRC2 have preds again.  They may have changed
    1995              :      above due to the call to flow_find_cross_jump.  */
    1996     67542584 :   if (EDGE_COUNT (src1->preds) == 0 || EDGE_COUNT (src2->preds) == 0)
    1997              :     return false;
    1998              : 
    1999     16157522 :   if (dir == dir_backward)
    2000              :     {
    2001          788 :       std::swap (osrc1, osrc2);
    2002          788 :       std::swap (src1, src2);
    2003          788 :       std::swap (e1, e2);
    2004          788 :       std::swap (newpos1, newpos2);
    2005              :     }
    2006              : 
    2007              :   /* Don't proceed with the crossjump unless we found a sufficient number
    2008              :      of matching instructions or the 'from' block was totally matched
    2009              :      (such that its predecessors will hopefully be redirected and the
    2010              :      block removed).  */
    2011     16157522 :   if ((nmatch < param_min_crossjump_insns)
    2012     16038809 :       && (newpos1 != BB_HEAD (src1)))
    2013              :     return false;
    2014              : 
    2015              :   /* Avoid deleting preserve label when redirecting ABNORMAL edges.  */
    2016       369074 :   if (block_has_preserve_label (e1->dest)
    2017       369074 :       && (e1->flags & EDGE_ABNORMAL))
    2018              :     return false;
    2019              : 
    2020              :   /* Here we know that the insns in the end of SRC1 which are common with SRC2
    2021              :      will be deleted.
    2022              :      If we have tablejumps in the end of SRC1 and SRC2
    2023              :      they have been already compared for equivalence in outgoing_edges_match ()
    2024              :      so replace the references to TABLE1 by references to TABLE2.  */
    2025       336716 :   {
    2026       336716 :       rtx_insn *label1, *label2;
    2027       336716 :       rtx_jump_table_data *table1, *table2;
    2028              : 
    2029       336716 :       if (tablejump_p (BB_END (osrc1), &label1, &table1)
    2030            4 :           && tablejump_p (BB_END (osrc2), &label2, &table2)
    2031       336720 :           && label1 != label2)
    2032              :         {
    2033            4 :           rtx_insn *insn;
    2034              : 
    2035              :           /* Replace references to LABEL1 with LABEL2.  */
    2036         9246 :           for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
    2037              :             {
    2038              :               /* Do not replace the label in SRC1->END because when deleting
    2039              :                  a block whose end is a tablejump, the tablejump referenced
    2040              :                  from the instruction is deleted too.  */
    2041         9242 :               if (insn != BB_END (osrc1))
    2042         9238 :                 replace_label_in_insn (insn, label1, label2, true);
    2043              :             }
    2044              :         }
    2045              :   }
    2046              : 
    2047              :   /* Avoid splitting if possible.  We must always split when SRC2 has
    2048              :      EH predecessor edges, or we may end up with basic blocks with both
    2049              :      normal and EH predecessor edges.  */
    2050       336716 :   if (newpos2 == BB_HEAD (src2)
    2051       336716 :       && !(EDGE_PRED (src2, 0)->flags & EDGE_EH))
    2052              :     redirect_to = src2;
    2053              :   else
    2054              :     {
    2055        80704 :       if (newpos2 == BB_HEAD (src2))
    2056              :         {
    2057              :           /* Skip possible basic block header.  */
    2058         5432 :           if (LABEL_P (newpos2))
    2059         5432 :             newpos2 = NEXT_INSN (newpos2);
    2060         5432 :           while (DEBUG_INSN_P (newpos2))
    2061            0 :             newpos2 = NEXT_INSN (newpos2);
    2062         5432 :           if (NOTE_P (newpos2))
    2063         5432 :             newpos2 = NEXT_INSN (newpos2);
    2064         5625 :           while (DEBUG_INSN_P (newpos2))
    2065          193 :             newpos2 = NEXT_INSN (newpos2);
    2066              :         }
    2067              : 
    2068        80704 :       if (dump_file)
    2069            0 :         fprintf (dump_file, "Splitting bb %i before %i insns\n",
    2070              :                  src2->index, nmatch);
    2071        80704 :       redirect_to = split_block (src2, PREV_INSN (newpos2))->dest;
    2072              :     }
    2073              : 
    2074       336716 :   if (dump_file)
    2075            4 :     fprintf (dump_file,
    2076              :              "Cross jumping from bb %i to bb %i; %i common insns\n",
    2077              :              src1->index, src2->index, nmatch);
    2078              : 
    2079              :   /* We may have some registers visible through the block.  */
    2080       336716 :   df_set_bb_dirty (redirect_to);
    2081              : 
    2082       336716 :   if (osrc2 == src2)
    2083              :     redirect_edges_to = redirect_to;
    2084              :   else
    2085        11346 :     redirect_edges_to = osrc2;
    2086              : 
    2087              :   /* Recompute the counts of destinations of outgoing edges.  */
    2088       739555 :   FOR_EACH_EDGE (s, ei, redirect_edges_to->succs)
    2089              :     {
    2090       402839 :       edge s2;
    2091       402839 :       edge_iterator ei;
    2092       402839 :       basic_block d = s->dest;
    2093              : 
    2094       402839 :       if (FORWARDER_BLOCK_P (d))
    2095        28252 :         d = single_succ (d);
    2096              : 
    2097       469196 :       FOR_EACH_EDGE (s2, ei, src1->succs)
    2098              :         {
    2099       469196 :           basic_block d2 = s2->dest;
    2100       469196 :           if (FORWARDER_BLOCK_P (d2))
    2101        96567 :             d2 = single_succ (d2);
    2102       469196 :           if (d == d2)
    2103              :             break;
    2104              :         }
    2105              : 
    2106              :       /* Take care to update possible forwarder blocks.  We verified
    2107              :          that there is no more than one in the chain, so we can't run
    2108              :          into infinite loop.  */
    2109       402839 :       if (FORWARDER_BLOCK_P (s->dest))
    2110        28252 :         s->dest->count += s->count ();
    2111              : 
    2112       402839 :       if (FORWARDER_BLOCK_P (s2->dest))
    2113        74277 :         s2->dest->count -= s->count ();
    2114              : 
    2115       402839 :       s->probability = s->probability.combine_with_count
    2116       402839 :                           (redirect_edges_to->count,
    2117              :                            s2->probability, src1->count);
    2118              :     }
    2119              : 
    2120              :   /* Adjust count for the block.  An earlier jump
    2121              :      threading pass may have left the profile in an inconsistent
    2122              :      state (see update_bb_profile_for_threading) so we must be
    2123              :      prepared for overflows.  */
    2124              :   tmp = redirect_to;
    2125       360674 :   do
    2126              :     {
    2127       348695 :       tmp->count += src1->count;
    2128       348695 :       if (tmp == redirect_edges_to)
    2129              :         break;
    2130        11979 :       tmp = find_fallthru_edge (tmp->succs)->dest;
    2131              :     }
    2132              :   while (true);
    2133       336716 :   update_br_prob_note (redirect_edges_to);
    2134              : 
    2135              :   /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1.  */
    2136              : 
    2137              :   /* Skip possible basic block header.  */
    2138       336716 :   if (LABEL_P (newpos1))
    2139       136216 :     newpos1 = NEXT_INSN (newpos1);
    2140              : 
    2141       358749 :   while (DEBUG_INSN_P (newpos1))
    2142        22033 :     newpos1 = NEXT_INSN (newpos1);
    2143              : 
    2144       336716 :   if (NOTE_INSN_BASIC_BLOCK_P (newpos1))
    2145       254883 :     newpos1 = NEXT_INSN (newpos1);
    2146              : 
    2147              :   /* Skip also prologue and function markers.  */
    2148       808553 :   while (DEBUG_INSN_P (newpos1)
    2149       808553 :          || (NOTE_P (newpos1)
    2150        13041 :              && (NOTE_KIND (newpos1) == NOTE_INSN_PROLOGUE_END
    2151        13041 :                  || NOTE_KIND (newpos1) == NOTE_INSN_FUNCTION_BEG)))
    2152       471837 :     newpos1 = NEXT_INSN (newpos1);
    2153              : 
    2154       336716 :   redirect_from = split_block (src1, PREV_INSN (newpos1))->src;
    2155       336716 :   to_remove = single_succ (redirect_from);
    2156              : 
    2157       336716 :   redirect_edge_and_branch_force (single_succ_edge (redirect_from), redirect_to);
    2158       336716 :   delete_basic_block (to_remove);
    2159              : 
    2160       336716 :   update_forwarder_flag (redirect_from);
    2161       336716 :   if (redirect_to != src2)
    2162        80704 :     update_forwarder_flag (src2);
    2163              : 
    2164              :   return true;
    2165              : }
    2166              : 
    2167              : /* Search the predecessors of BB for common insn sequences.  When found,
    2168              :    share code between them by redirecting control flow.  Return true if
    2169              :    any changes made.  */
    2170              : 
    2171              : static bool
    2172     30462719 : try_crossjump_bb (int mode, basic_block bb)
    2173              : {
    2174     30462719 :   edge e, e2, fallthru;
    2175     30462719 :   bool changed;
    2176     30462719 :   unsigned max, ix, ix2;
    2177              : 
    2178              :   /* Nothing to do if there is not at least two incoming edges.  */
    2179     30670950 :   if (EDGE_COUNT (bb->preds) < 2)
    2180              :     return false;
    2181              : 
    2182              :   /* Don't crossjump if this block ends in a computed jump,
    2183              :      unless we are optimizing for size.  */
    2184      8397507 :   if (optimize_bb_for_size_p (bb)
    2185      1301319 :       && bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
    2186      9660142 :       && computed_jump_p (BB_END (bb)))
    2187              :     return false;
    2188              : 
    2189              :   /* If we are partitioning hot/cold basic blocks, we don't want to
    2190              :      mess up unconditional or indirect jumps that cross between hot
    2191              :      and cold sections.
    2192              : 
    2193              :      Basic block partitioning may result in some jumps that appear to
    2194              :      be optimizable (or blocks that appear to be mergeable), but which really
    2195              :      must be left untouched (they are required to make it safely across
    2196              :      partition boundaries).  See the comments at the top of
    2197              :      bb-reorder.cc:partition_hot_cold_basic_blocks for complete details.  */
    2198              : 
    2199      8397478 :   if (BB_PARTITION (EDGE_PRED (bb, 0)->src) !=
    2200      8397478 :                                         BB_PARTITION (EDGE_PRED (bb, 1)->src)
    2201      8397478 :       || (EDGE_PRED (bb, 0)->flags & EDGE_CROSSING))
    2202              :     return false;
    2203              : 
    2204              :   /* It is always cheapest to redirect a block that ends in a branch to
    2205              :      a block that falls through into BB, as that adds no branches to the
    2206              :      program.  We'll try that combination first.  */
    2207      8192047 :   fallthru = NULL;
    2208      8192047 :   max = param_max_crossjump_edges;
    2209              : 
    2210      8192047 :   if (EDGE_COUNT (bb->preds) > max)
    2211              :     return false;
    2212              : 
    2213      8189276 :   fallthru = find_fallthru_edge (bb->preds);
    2214              : 
    2215      8189276 :   changed = false;
    2216     40201299 :   for (ix = 0; ix < EDGE_COUNT (bb->preds);)
    2217              :     {
    2218     23822747 :       e = EDGE_PRED (bb, ix);
    2219     23822747 :       ix++;
    2220              : 
    2221              :       /* As noted above, first try with the fallthru predecessor (or, a
    2222              :          fallthru predecessor if we are in cfglayout mode).  */
    2223     23822747 :       if (fallthru)
    2224              :         {
    2225              :           /* Don't combine the fallthru edge into anything else.
    2226              :              If there is a match, we'll do it the other way around.  */
    2227     17678132 :           if (e == fallthru)
    2228      6632089 :             continue;
    2229              :           /* If nothing changed since the last attempt, there is nothing
    2230              :              we can do.  */
    2231     11046043 :           if (!first_pass
    2232      4309283 :               && !((e->src->flags & BB_MODIFIED)
    2233      3448406 :                    || (fallthru->src->flags & BB_MODIFIED)))
    2234      3056105 :             continue;
    2235              : 
    2236      7989938 :           if (try_crossjump_to_edge (mode, e, fallthru, dir_forward))
    2237              :             {
    2238       149510 :               changed = true;
    2239       149510 :               ix = 0;
    2240       149510 :               continue;
    2241              :             }
    2242              :         }
    2243              : 
    2244              :       /* Non-obvious work limiting check: Recognize that we're going
    2245              :          to call try_crossjump_bb on every basic block.  So if we have
    2246              :          two blocks with lots of outgoing edges (a switch) and they
    2247              :          share lots of common destinations, then we would do the
    2248              :          cross-jump check once for each common destination.
    2249              : 
    2250              :          Now, if the blocks actually are cross-jump candidates, then
    2251              :          all of their destinations will be shared.  Which means that
    2252              :          we only need check them for cross-jump candidacy once.  We
    2253              :          can eliminate redundant checks of crossjump(A,B) by arbitrarily
    2254              :          choosing to do the check from the block for which the edge
    2255              :          in question is the first successor of A.  */
    2256     13985043 :       if (EDGE_SUCC (e->src, 0) != e)
    2257      2825415 :         continue;
    2258              : 
    2259    167798213 :       for (ix2 = 0; ix2 < EDGE_COUNT (bb->preds); ix2++)
    2260              :         {
    2261    124813768 :           e2 = EDGE_PRED (bb, ix2);
    2262              : 
    2263    124813768 :           if (e2 == e)
    2264     11070882 :             continue;
    2265              : 
    2266              :           /* We've already checked the fallthru edge above.  */
    2267    113742886 :           if (e2 == fallthru)
    2268      6288144 :             continue;
    2269              : 
    2270              :           /* The "first successor" check above only prevents multiple
    2271              :              checks of crossjump(A,B).  In order to prevent redundant
    2272              :              checks of crossjump(B,A), require that A be the block
    2273              :              with the lowest index.  */
    2274    107454742 :           if (e->src->index > e2->src->index)
    2275     53785951 :             continue;
    2276              : 
    2277              :           /* If nothing changed since the last attempt, there is nothing
    2278              :              we can do.  */
    2279     53668791 :           if (!first_pass
    2280     13183602 :               && !((e->src->flags & BB_MODIFIED)
    2281     10806326 :                    || (e2->src->flags & BB_MODIFIED)))
    2282      9936951 :             continue;
    2283              : 
    2284              :           /* Both e and e2 are not fallthru edges, so we can crossjump in either
    2285              :              direction.  */
    2286     43731840 :           if (try_crossjump_to_edge (mode, e, e2, dir_both))
    2287              :             {
    2288              :               changed = true;
    2289              :               ix = 0;
    2290              :               break;
    2291              :             }
    2292              :         }
    2293              :     }
    2294              : 
    2295      8189276 :   if (changed)
    2296       160416 :     crossjumps_occurred = true;
    2297              : 
    2298              :   return changed;
    2299              : }
    2300              : 
    2301              : /* Search the successors of BB for common insn sequences.  When found,
    2302              :    share code between them by moving it across the basic block
    2303              :    boundary.  Return true if any changes made.  */
    2304              : 
    2305              : static bool
    2306     29262285 : try_head_merge_bb (basic_block bb)
    2307              : {
    2308     29262285 :   basic_block final_dest_bb = NULL;
    2309     29262285 :   int max_match = INT_MAX;
    2310     29262285 :   edge e0;
    2311     29262285 :   rtx_insn **headptr, **currptr, **nextptr;
    2312     29262285 :   bool changed, moveall;
    2313     29262285 :   unsigned ix;
    2314     29262285 :   rtx_insn *e0_last_head;
    2315     29262285 :   rtx cond;
    2316     29262285 :   rtx_insn *move_before;
    2317     29262285 :   unsigned nedges = EDGE_COUNT (bb->succs);
    2318     29262285 :   rtx_insn *jump = BB_END (bb);
    2319     29262285 :   regset live, live_union;
    2320              : 
    2321              :   /* Nothing to do if there is not at least two outgoing edges.  */
    2322     29262285 :   if (nedges < 2)
    2323              :     return false;
    2324              : 
    2325              :   /* Don't crossjump if this block ends in a computed jump,
    2326              :      unless we are optimizing for size.  */
    2327     15005446 :   if (optimize_bb_for_size_p (bb)
    2328      1732211 :       && bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
    2329     16737657 :       && computed_jump_p (BB_END (bb)))
    2330              :     return false;
    2331              : 
    2332     15005419 :   cond = get_condition (jump, &move_before, true, false);
    2333     15005419 :   if (cond == NULL_RTX)
    2334      1914153 :     move_before = jump;
    2335              : 
    2336     45289570 :   for (ix = 0; ix < nedges; ix++)
    2337     30284151 :     if (EDGE_SUCC (bb, ix)->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
    2338              :       return false;
    2339              : 
    2340     28073394 :   for (ix = 0; ix < nedges; ix++)
    2341              :     {
    2342     23411661 :       edge e = EDGE_SUCC (bb, ix);
    2343     23411661 :       basic_block other_bb = e->dest;
    2344              : 
    2345     23411661 :       if (df_get_bb_dirty (other_bb))
    2346              :         {
    2347       551352 :           block_was_dirty = true;
    2348       551352 :           return false;
    2349              :         }
    2350              : 
    2351     22860309 :       if (e->flags & EDGE_ABNORMAL)
    2352              :         return false;
    2353              : 
    2354              :       /* Normally, all destination blocks must only be reachable from this
    2355              :          block, i.e. they must have one incoming edge.
    2356              : 
    2357              :          There is one special case we can handle, that of multiple consecutive
    2358              :          jumps where the first jumps to one of the targets of the second jump.
    2359              :          This happens frequently in switch statements for default labels.
    2360              :          The structure is as follows:
    2361              :          FINAL_DEST_BB
    2362              :          ....
    2363              :          if (cond) jump A;
    2364              :          fall through
    2365              :          BB
    2366              :          jump with targets A, B, C, D...
    2367              :          A
    2368              :          has two incoming edges, from FINAL_DEST_BB and BB
    2369              : 
    2370              :          In this case, we can try to move the insns through BB and into
    2371              :          FINAL_DEST_BB.  */
    2372     21026565 :       if (EDGE_COUNT (other_bb->preds) != 1)
    2373              :         {
    2374      8310872 :           edge incoming_edge, incoming_bb_other_edge;
    2375      8310872 :           edge_iterator ei;
    2376              : 
    2377      8310872 :           if (final_dest_bb != NULL
    2378      8310872 :               || EDGE_COUNT (other_bb->preds) != 2)
    2379      7958590 :             return false;
    2380              : 
    2381              :           /* We must be able to move the insns across the whole block.  */
    2382      4568702 :           move_before = BB_HEAD (bb);
    2383     29964805 :           while (!NONDEBUG_INSN_P (move_before))
    2384     25396103 :             move_before = NEXT_INSN (move_before);
    2385              : 
    2386      4568702 :           if (EDGE_COUNT (bb->preds) != 1)
    2387              :             return false;
    2388      2721498 :           incoming_edge = EDGE_PRED (bb, 0);
    2389      2721498 :           final_dest_bb = incoming_edge->src;
    2390     10188197 :           if (EDGE_COUNT (final_dest_bb->succs) != 2)
    2391              :             return false;
    2392      3217542 :           FOR_EACH_EDGE (incoming_bb_other_edge, ei, final_dest_bb->succs)
    2393      3217542 :             if (incoming_bb_other_edge != incoming_edge)
    2394              :               break;
    2395      2229607 :           if (incoming_bb_other_edge->dest != other_bb)
    2396              :             return false;
    2397              :         }
    2398              :     }
    2399              : 
    2400      4661733 :   e0 = EDGE_SUCC (bb, 0);
    2401      4661733 :   e0_last_head = NULL;
    2402      4661733 :   changed = false;
    2403              : 
    2404      4779761 :   for (ix = 1; ix < nedges; ix++)
    2405              :     {
    2406      4666394 :       edge e = EDGE_SUCC (bb, ix);
    2407      4666394 :       rtx_insn *e0_last, *e_last;
    2408      4666394 :       int nmatch;
    2409              : 
    2410      4666394 :       nmatch = flow_find_head_matching_sequence (e0->dest, e->dest,
    2411              :                                                  &e0_last, &e_last, 0);
    2412      4666394 :       if (nmatch == 0)
    2413      4548366 :         return false;
    2414              : 
    2415       118028 :       if (nmatch < max_match)
    2416              :         {
    2417       113565 :           max_match = nmatch;
    2418       113565 :           e0_last_head = e0_last;
    2419              :         }
    2420              :     }
    2421              : 
    2422              :   /* If we matched an entire block, we probably have to avoid moving the
    2423              :      last insn.  */
    2424       113367 :   if (max_match > 0
    2425       113367 :       && e0_last_head == BB_END (e0->dest)
    2426       122847 :       && (find_reg_note (e0_last_head, REG_EH_REGION, 0)
    2427         7932 :           || control_flow_insn_p (e0_last_head)))
    2428              :     {
    2429         1560 :       max_match--;
    2430         1560 :       if (max_match == 0)
    2431              :         return false;
    2432          245 :       e0_last_head = prev_real_nondebug_insn (e0_last_head);
    2433              :     }
    2434              : 
    2435       112052 :   if (max_match == 0)
    2436              :     return false;
    2437              : 
    2438              :   /* We must find a union of the live registers at each of the end points.  */
    2439       112052 :   live = BITMAP_ALLOC (NULL);
    2440       112052 :   live_union = BITMAP_ALLOC (NULL);
    2441              : 
    2442       112052 :   currptr = XNEWVEC (rtx_insn *, nedges);
    2443       112052 :   headptr = XNEWVEC (rtx_insn *, nedges);
    2444       112052 :   nextptr = XNEWVEC (rtx_insn *, nedges);
    2445              : 
    2446       340494 :   for (ix = 0; ix < nedges; ix++)
    2447              :     {
    2448       228442 :       int j;
    2449       228442 :       basic_block merge_bb = EDGE_SUCC (bb, ix)->dest;
    2450       228442 :       rtx_insn *head = BB_HEAD (merge_bb);
    2451              : 
    2452      1489186 :       while (!NONDEBUG_INSN_P (head))
    2453      1260744 :         head = NEXT_INSN (head);
    2454       228442 :       headptr[ix] = head;
    2455       228442 :       currptr[ix] = head;
    2456              : 
    2457              :       /* Compute the end point and live information  */
    2458       369753 :       for (j = 1; j < max_match; j++)
    2459       201848 :         do
    2460       201848 :           head = NEXT_INSN (head);
    2461       201848 :         while (!NONDEBUG_INSN_P (head));
    2462       228442 :       simulate_backwards_to_point (merge_bb, live, head);
    2463       228442 :       IOR_REG_SET (live_union, live);
    2464              :     }
    2465              : 
    2466              :   /* If we're moving across two blocks, verify the validity of the
    2467              :      first move, then adjust the target and let the loop below deal
    2468              :      with the final move.  */
    2469       112052 :   if (final_dest_bb != NULL)
    2470              :     {
    2471         7242 :       rtx_insn *move_upto;
    2472              : 
    2473         7242 :       moveall = can_move_insns_across (currptr[0], e0_last_head, move_before,
    2474              :                                        jump, e0->dest, live_union,
    2475              :                                        NULL, &move_upto);
    2476         7242 :       if (!moveall)
    2477              :         {
    2478         6021 :           if (move_upto == NULL_RTX)
    2479         5652 :             goto out;
    2480              : 
    2481         1120 :           while (e0_last_head != move_upto)
    2482              :             {
    2483          751 :               df_simulate_one_insn_backwards (e0->dest, e0_last_head,
    2484              :                                               live_union);
    2485          751 :               e0_last_head = PREV_INSN (e0_last_head);
    2486              :             }
    2487              :         }
    2488         1590 :       if (e0_last_head == NULL_RTX)
    2489            0 :         goto out;
    2490              : 
    2491         1590 :       jump = BB_END (final_dest_bb);
    2492         1590 :       cond = get_condition (jump, &move_before, true, false);
    2493         1590 :       if (cond == NULL_RTX)
    2494            0 :         move_before = jump;
    2495              :     }
    2496              : 
    2497       180754 :   do
    2498              :     {
    2499       180754 :       rtx_insn *move_upto;
    2500       180754 :       moveall = can_move_insns_across (currptr[0], e0_last_head,
    2501              :                                        move_before, jump, e0->dest, live_union,
    2502              :                                        NULL, &move_upto);
    2503       180754 :       if (!moveall && move_upto == NULL_RTX)
    2504              :         {
    2505       139597 :           if (jump == move_before)
    2506              :             break;
    2507              : 
    2508              :           /* Try again, using a different insertion point.  */
    2509        70710 :           move_before = jump;
    2510              : 
    2511        70710 :           continue;
    2512              :         }
    2513              : 
    2514        41157 :       if (final_dest_bb && !moveall)
    2515              :         /* We haven't checked whether a partial move would be OK for the first
    2516              :            move, so we have to fail this case.  */
    2517              :         break;
    2518              : 
    2519        60422 :       changed = true;
    2520        60422 :       for (;;)
    2521              :         {
    2522        60422 :           if (currptr[0] == move_upto)
    2523              :             break;
    2524        58219 :           for (ix = 0; ix < nedges; ix++)
    2525              :             {
    2526        38837 :               rtx_insn *curr = currptr[ix];
    2527        59369 :               do
    2528        59369 :                 curr = NEXT_INSN (curr);
    2529        59369 :               while (!NONDEBUG_INSN_P (curr));
    2530        38837 :               currptr[ix] = curr;
    2531              :             }
    2532              :         }
    2533              : 
    2534              :       /* If we can't currently move all of the identical insns, remember
    2535              :          each insn after the range that we'll merge.  */
    2536        41040 :       if (!moveall)
    2537        14361 :         for (ix = 0; ix < nedges; ix++)
    2538              :           {
    2539         9588 :             rtx_insn *curr = currptr[ix];
    2540        15427 :             do
    2541        15427 :               curr = NEXT_INSN (curr);
    2542        15427 :             while (!NONDEBUG_INSN_P (curr));
    2543         9588 :             nextptr[ix] = curr;
    2544              :           }
    2545              : 
    2546        41040 :       reorder_insns (headptr[0], currptr[0], PREV_INSN (move_before));
    2547        41040 :       df_set_bb_dirty (EDGE_SUCC (bb, 0)->dest);
    2548        41040 :       if (final_dest_bb != NULL)
    2549         1468 :         df_set_bb_dirty (final_dest_bb);
    2550        41040 :       df_set_bb_dirty (bb);
    2551       124514 :       for (ix = 1; ix < nedges; ix++)
    2552              :         {
    2553        42434 :           df_set_bb_dirty (EDGE_SUCC (bb, ix)->dest);
    2554        42434 :           delete_insn_chain (headptr[ix], currptr[ix], false);
    2555              :         }
    2556        41040 :       if (!moveall)
    2557              :         {
    2558         4773 :           if (jump == move_before)
    2559              :             break;
    2560              : 
    2561              :           /* For the unmerged insns, try a different insertion point.  */
    2562         3644 :           move_before = jump;
    2563              : 
    2564        10932 :           for (ix = 0; ix < nedges; ix++)
    2565         7288 :             currptr[ix] = headptr[ix] = nextptr[ix];
    2566              :         }
    2567              :     }
    2568       110621 :   while (!moveall);
    2569              : 
    2570        36267 :  out:
    2571       112052 :   free (currptr);
    2572       112052 :   free (headptr);
    2573       112052 :   free (nextptr);
    2574              : 
    2575       112052 :   crossjumps_occurred |= changed;
    2576              : 
    2577       112052 :   return changed;
    2578              : }
    2579              : 
    2580              : /* Return true if BB contains just bb note, or bb note followed
    2581              :    by only DEBUG_INSNs.  */
    2582              : 
    2583              : static bool
    2584     16023131 : trivially_empty_bb_p (basic_block bb)
    2585              : {
    2586     16023131 :   rtx_insn *insn = BB_END (bb);
    2587              : 
    2588         6886 :   while (1)
    2589              :     {
    2590     16030017 :       if (insn == BB_HEAD (bb))
    2591              :         return true;
    2592     16029164 :       if (!DEBUG_INSN_P (insn))
    2593              :         return false;
    2594         6886 :       insn = PREV_INSN (insn);
    2595              :     }
    2596              : }
    2597              : 
    2598              : /* Return true if BB contains just a return and possibly a USE of the
    2599              :    return value.  Fill in *RET and *USE with the return and use insns
    2600              :    if any found, otherwise NULL.  All CLOBBERs are ignored.  */
    2601              : 
    2602              : bool
    2603    405276346 : bb_is_just_return (basic_block bb, rtx_insn **ret, rtx_insn **use)
    2604              : {
    2605    405276346 :   *ret = *use = NULL;
    2606    405276346 :   rtx_insn *insn;
    2607              : 
    2608    405276346 :   if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
    2609              :     return false;
    2610              : 
    2611    536345128 :   FOR_BB_INSNS_REVERSE (bb, insn)
    2612    522945462 :     if (NONDEBUG_INSN_P (insn))
    2613              :       {
    2614    400815839 :         rtx pat = PATTERN (insn);
    2615              : 
    2616    400815839 :         if (!*ret && ANY_RETURN_P (pat))
    2617      7524975 :           *ret = insn;
    2618      7863572 :         else if (*ret && !*use && GET_CODE (pat) == USE
    2619      1129335 :             && REG_P (XEXP (pat, 0))
    2620    394420199 :             && REG_FUNCTION_VALUE_P (XEXP (pat, 0)))
    2621      1126442 :           *use = insn;
    2622    392164422 :         else if (GET_CODE (pat) != CLOBBER)
    2623              :           return false;
    2624              :       }
    2625              : 
    2626     13399666 :   return !!*ret;
    2627              : }
    2628              : 
    2629              : /* Do simple CFG optimizations - basic block merging, simplifying of jump
    2630              :    instructions etc.  Return nonzero if changes were made.  */
    2631              : 
    2632              : static bool
    2633     21759693 : try_optimize_cfg (int mode)
    2634              : {
    2635     21759693 :   bool changed_overall = false;
    2636     21759693 :   bool changed;
    2637     21759693 :   int iterations = 0;
    2638     21759693 :   basic_block bb, b, next;
    2639              : 
    2640     21759693 :   if (mode & (CLEANUP_CROSSJUMP | CLEANUP_THREADING))
    2641      2983029 :     clear_bb_flags ();
    2642              : 
    2643     21759693 :   crossjumps_occurred = false;
    2644              : 
    2645    318801364 :   FOR_EACH_BB_FN (bb, cfun)
    2646    297041671 :     update_forwarder_flag (bb);
    2647              : 
    2648     21759693 :   if (! targetm.cannot_modify_jumps_p ())
    2649              :     {
    2650     21759693 :       first_pass = true;
    2651              :       /* Attempt to merge blocks as made possible by edge removal.  If
    2652              :          a block has only one successor, and the successor has only
    2653              :          one predecessor, they may be combined.  */
    2654     51170314 :       do
    2655              :         {
    2656     25585157 :           block_was_dirty = false;
    2657     25585157 :           changed = false;
    2658     25585157 :           iterations++;
    2659              : 
    2660     25585157 :           if (dump_file)
    2661         1763 :             fprintf (dump_file,
    2662              :                      "\n\ntry_optimize_cfg iteration %i\n\n",
    2663              :                      iterations);
    2664              : 
    2665     25585157 :           for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
    2666    431443303 :                != EXIT_BLOCK_PTR_FOR_FN (cfun);)
    2667              :             {
    2668    405858146 :               basic_block c;
    2669    405858146 :               edge s;
    2670    405858146 :               bool changed_here = false;
    2671              : 
    2672              :               /* Delete trivially dead basic blocks.  This is either
    2673              :                  blocks with no predecessors, or empty blocks with no
    2674              :                  successors.  However if the empty block with no
    2675              :                  successors is the successor of the ENTRY_BLOCK, it is
    2676              :                  kept.  This ensures that the ENTRY_BLOCK will have a
    2677              :                  successor which is a precondition for many RTL
    2678              :                  passes.  Empty blocks may result from expanding
    2679              :                  __builtin_unreachable ().  */
    2680    405858146 :               if (EDGE_COUNT (b->preds) == 0
    2681    405858146 :                   || (EDGE_COUNT (b->succs) == 0
    2682     16023131 :                       && trivially_empty_bb_p (b)
    2683          853 :                       && single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest
    2684              :                       != b))
    2685              :                 {
    2686      5479154 :                   c = b->prev_bb;
    2687      5479154 :                   if (EDGE_COUNT (b->preds) > 0)
    2688              :                     {
    2689          853 :                       edge e;
    2690          853 :                       edge_iterator ei;
    2691              : 
    2692          853 :                       if (current_ir_type () == IR_RTL_CFGLAYOUT)
    2693              :                         {
    2694           51 :                           rtx_insn *insn;
    2695           51 :                           for (insn = BB_FOOTER (b);
    2696           51 :                                insn; insn = NEXT_INSN (insn))
    2697           51 :                             if (BARRIER_P (insn))
    2698              :                               break;
    2699           51 :                           if (insn)
    2700          102 :                             FOR_EACH_EDGE (e, ei, b->preds)
    2701           51 :                               if ((e->flags & EDGE_FALLTHRU))
    2702              :                                 {
    2703           51 :                                   if (BB_FOOTER (b)
    2704           51 :                                       && BB_FOOTER (e->src) == NULL)
    2705              :                                     {
    2706           51 :                                       BB_FOOTER (e->src) = BB_FOOTER (b);
    2707           51 :                                       BB_FOOTER (b) = NULL;
    2708              :                                     }
    2709              :                                   else
    2710            0 :                                     emit_barrier_after_bb (e->src);
    2711              :                                 }
    2712              :                         }
    2713              :                       else
    2714              :                         {
    2715          802 :                           rtx_insn *last = get_last_bb_insn (b);
    2716          802 :                           if (last && BARRIER_P (last))
    2717         1604 :                             FOR_EACH_EDGE (e, ei, b->preds)
    2718          802 :                               if ((e->flags & EDGE_FALLTHRU))
    2719          802 :                                 emit_barrier_after (BB_END (e->src));
    2720              :                         }
    2721              :                     }
    2722      5479154 :                   delete_basic_block (b);
    2723      5479154 :                   changed = true;
    2724              :                   /* Avoid trying to remove the exit block.  */
    2725      5479154 :                   b = (c == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? c->next_bb : c);
    2726      5616713 :                   continue;
    2727      5479154 :                 }
    2728              : 
    2729              :               /* Remove code labels no longer used.  */
    2730    400378992 :               if (single_pred_p (b)
    2731    290693678 :                   && (single_pred_edge (b)->flags & EDGE_FALLTHRU)
    2732    204795646 :                   && !(single_pred_edge (b)->flags & EDGE_COMPLEX)
    2733    201852457 :                   && LABEL_P (BB_HEAD (b))
    2734       836524 :                   && !LABEL_PRESERVE_P (BB_HEAD (b))
    2735              :                   /* If the previous block ends with a branch to this
    2736              :                      block, we can't delete the label.  Normally this
    2737              :                      is a condjump that is yet to be simplified, but
    2738              :                      if CASE_DROPS_THRU, this can be a tablejump with
    2739              :                      some element going to the same place as the
    2740              :                      default (fallthru).  */
    2741    401195846 :                   && (single_pred (b) == ENTRY_BLOCK_PTR_FOR_FN (cfun)
    2742       816800 :                       || !JUMP_P (BB_END (single_pred (b)))
    2743       620822 :                       || ! label_is_jump_target_p (BB_HEAD (b),
    2744       620822 :                                                    BB_END (single_pred (b)))))
    2745              :                 {
    2746       810713 :                   delete_insn (BB_HEAD (b));
    2747       810713 :                   if (dump_file)
    2748           37 :                     fprintf (dump_file, "Deleted label in block %i.\n",
    2749              :                              b->index);
    2750              :                 }
    2751              : 
    2752              :               /* If we fall through an empty block, we can remove it.  */
    2753    400516551 :               if (!(mode & (CLEANUP_CFGLAYOUT | CLEANUP_NO_INSN_DEL))
    2754     84963893 :                   && single_pred_p (b)
    2755     62790444 :                   && (single_pred_edge (b)->flags & EDGE_FALLTHRU)
    2756     44375280 :                   && !LABEL_P (BB_HEAD (b))
    2757     44022497 :                   && FORWARDER_BLOCK_P (b)
    2758              :                   /* Note that forwarder_block_p true ensures that
    2759              :                      there is a successor for this block.  */
    2760      5293420 :                   && (single_succ_edge (b)->flags & EDGE_FALLTHRU)
    2761    400602091 :                   && n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS + 1)
    2762              :                 {
    2763       137559 :                   if (dump_file)
    2764           11 :                     fprintf (dump_file,
    2765              :                              "Deleting fallthru block %i.\n",
    2766              :                              b->index);
    2767              : 
    2768         2622 :                   c = ((b->prev_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
    2769       137559 :                        ? b->next_bb : b->prev_bb);
    2770       137559 :                   redirect_edge_succ_nodup (single_pred_edge (b),
    2771              :                                             single_succ (b));
    2772       137559 :                   delete_basic_block (b);
    2773       137559 :                   changed = true;
    2774       137559 :                   b = c;
    2775       137559 :                   continue;
    2776              :                 }
    2777              : 
    2778              :               /* Merge B with its single successor, if any.  */
    2779    400241433 :               if (single_succ_p (b)
    2780    175331478 :                   && (s = single_succ_edge (b))
    2781    175331478 :                   && !(s->flags & EDGE_COMPLEX)
    2782    166687284 :                   && (c = s->dest) != EXIT_BLOCK_PTR_FOR_FN (cfun)
    2783    409059005 :                   && single_pred_p (c)
    2784    407483993 :                   && b != c)
    2785              :                 {
    2786              :                   /* When not in cfg_layout mode use code aware of reordering
    2787              :                      INSN.  This code possibly creates new basic blocks so it
    2788              :                      does not fit merge_blocks interface and is kept here in
    2789              :                      hope that it will become useless once more of compiler
    2790              :                      is transformed to use cfg_layout mode.  */
    2791              : 
    2792      8817572 :                   if ((mode & CLEANUP_CFGLAYOUT)
    2793      8817572 :                       && can_merge_blocks_p (b, c))
    2794              :                     {
    2795       792952 :                       merge_blocks (b, c);
    2796       792952 :                       update_forwarder_flag (b);
    2797       792952 :                       changed_here = true;
    2798              :                     }
    2799      8024620 :                   else if (!(mode & CLEANUP_CFGLAYOUT)
    2800              :                            /* If the jump insn has side effects,
    2801              :                               we can't kill the edge.  */
    2802      6782485 :                            && (!JUMP_P (BB_END (b))
    2803      2226192 :                                || (reload_completed
    2804      3064650 :                                    ? simplejump_p (BB_END (b))
    2805       838458 :                                    : (onlyjump_p (BB_END (b))
    2806       837667 :                                       && !tablejump_p (BB_END (b),
    2807              :                                                        NULL, NULL))))
    2808     17869809 :                            && (next = merge_blocks_move (s, b, c, mode)))
    2809              :                       {
    2810              :                         b = next;
    2811              :                         changed_here = true;
    2812              :                       }
    2813              :                 }
    2814              : 
    2815              :               /* Try to change a branch to a return to just that return.  */
    2816    400241433 :               rtx_insn *ret, *use;
    2817    400241433 :               if (single_succ_p (b)
    2818    173680786 :                   && onlyjump_p (BB_END (b))
    2819    427606343 :                   && bb_is_just_return (single_succ (b), &ret, &use))
    2820              :                 {
    2821        37623 :                   if (redirect_jump (as_a <rtx_jump_insn *> (BB_END (b)),
    2822        37623 :                                      PATTERN (ret), 0))
    2823              :                     {
    2824        37623 :                       if (use)
    2825        21669 :                         emit_insn_before (copy_insn (PATTERN (use)),
    2826        21669 :                                           BB_END (b));
    2827        37623 :                       if (dump_file)
    2828           23 :                         fprintf (dump_file, "Changed jump %d->%d to return.\n",
    2829           23 :                                  b->index, single_succ (b)->index);
    2830        37623 :                       redirect_edge_succ (single_succ_edge (b),
    2831        37623 :                                           EXIT_BLOCK_PTR_FOR_FN (cfun));
    2832        37623 :                       single_succ_edge (b)->flags &= ~EDGE_CROSSING;
    2833        37623 :                       changed_here = true;
    2834              :                     }
    2835              :                 }
    2836              : 
    2837              :               /* Try to change a conditional branch to a return to the
    2838              :                  respective conditional return.  */
    2839    400241433 :               if (EDGE_COUNT (b->succs) == 2
    2840    210050671 :                   && any_condjump_p (BB_END (b))
    2841    584685329 :                   && bb_is_just_return (BRANCH_EDGE (b)->dest, &ret, &use))
    2842              :                 {
    2843       599081 :                   if (redirect_jump (as_a <rtx_jump_insn *> (BB_END (b)),
    2844       599081 :                                      PATTERN (ret), 0))
    2845              :                     {
    2846            0 :                       if (use)
    2847            0 :                         emit_insn_before (copy_insn (PATTERN (use)),
    2848            0 :                                           BB_END (b));
    2849            0 :                       if (dump_file)
    2850            0 :                         fprintf (dump_file, "Changed conditional jump %d->%d "
    2851              :                                  "to conditional return.\n",
    2852            0 :                                  b->index, BRANCH_EDGE (b)->dest->index);
    2853            0 :                       redirect_edge_succ (BRANCH_EDGE (b),
    2854            0 :                                           EXIT_BLOCK_PTR_FOR_FN (cfun));
    2855            0 :                       BRANCH_EDGE (b)->flags &= ~EDGE_CROSSING;
    2856            0 :                       changed_here = true;
    2857              :                     }
    2858              :                 }
    2859              : 
    2860              :               /* Try to flip a conditional branch that falls through to
    2861              :                  a return so that it becomes a conditional return and a
    2862              :                  new jump to the original branch target.  */
    2863    400241433 :               if (EDGE_COUNT (b->succs) == 2
    2864    210050671 :                   && BRANCH_EDGE (b)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
    2865    210050671 :                   && any_condjump_p (BB_END (b))
    2866    584685329 :                   && bb_is_just_return (FALLTHRU_EDGE (b)->dest, &ret, &use))
    2867              :                 {
    2868       158013 :                   if (invert_jump (as_a <rtx_jump_insn *> (BB_END (b)),
    2869       158013 :                                    JUMP_LABEL (BB_END (b)), 0))
    2870              :                     {
    2871       158013 :                       basic_block new_ft = BRANCH_EDGE (b)->dest;
    2872       158013 :                       if (redirect_jump (as_a <rtx_jump_insn *> (BB_END (b)),
    2873       158013 :                                          PATTERN (ret), 0))
    2874              :                         {
    2875            0 :                           if (use)
    2876            0 :                             emit_insn_before (copy_insn (PATTERN (use)),
    2877            0 :                                               BB_END (b));
    2878            0 :                           if (dump_file)
    2879            0 :                             fprintf (dump_file, "Changed conditional jump "
    2880              :                                      "%d->%d to conditional return, adding "
    2881              :                                      "fall-through jump.\n",
    2882            0 :                                      b->index, BRANCH_EDGE (b)->dest->index);
    2883            0 :                           redirect_edge_succ (BRANCH_EDGE (b),
    2884            0 :                                               EXIT_BLOCK_PTR_FOR_FN (cfun));
    2885            0 :                           BRANCH_EDGE (b)->flags &= ~EDGE_CROSSING;
    2886            0 :                           std::swap (BRANCH_EDGE (b)->probability,
    2887            0 :                                      FALLTHRU_EDGE (b)->probability);
    2888            0 :                           update_br_prob_note (b);
    2889            0 :                           basic_block jb = force_nonfallthru (FALLTHRU_EDGE (b));
    2890            0 :                           notice_new_block (jb);
    2891            0 :                           if (!redirect_jump (as_a <rtx_jump_insn *> (BB_END (jb)),
    2892            0 :                                               block_label (new_ft), 0))
    2893            0 :                             gcc_unreachable ();
    2894            0 :                           redirect_edge_succ (single_succ_edge (jb), new_ft);
    2895            0 :                           changed_here = true;
    2896              :                         }
    2897              :                       else
    2898              :                         {
    2899              :                           /* Invert the jump back to what it was.  This should
    2900              :                              never fail.  */
    2901       158013 :                           if (!invert_jump (as_a <rtx_jump_insn *> (BB_END (b)),
    2902       158013 :                                             JUMP_LABEL (BB_END (b)), 0))
    2903            0 :                             gcc_unreachable ();
    2904              :                         }
    2905              :                     }
    2906              :                 }
    2907              : 
    2908              :               /* Simplify branch over branch.  */
    2909    400241433 :               if ((mode & CLEANUP_EXPENSIVE)
    2910    105943045 :                    && !(mode & CLEANUP_CFGLAYOUT)
    2911    434406086 :                    && try_simplify_condjump (b))
    2912              :                 changed_here = true;
    2913              : 
    2914              :               /* If B has a single outgoing edge, but uses a
    2915              :                  non-trivial jump instruction without side-effects, we
    2916              :                  can either delete the jump entirely, or replace it
    2917              :                  with a simple unconditional jump.  */
    2918    400241433 :               if (single_succ_p (b)
    2919    173680819 :                   && single_succ (b) != EXIT_BLOCK_PTR_FOR_FN (cfun)
    2920    144015434 :                   && onlyjump_p (BB_END (b))
    2921     28967053 :                   && !CROSSING_JUMP_P (BB_END (b))
    2922    426497248 :                   && try_redirect_by_replacing_jump (single_succ_edge (b),
    2923              :                                                      single_succ (b),
    2924     27896187 :                                                      (mode & CLEANUP_CFGLAYOUT) != 0))
    2925              :                 {
    2926      4229989 :                   update_forwarder_flag (b);
    2927      4229989 :                   changed_here = true;
    2928              :                 }
    2929              : 
    2930              :               /* Simplify branch to branch.  */
    2931    400241433 :               if (try_forward_edges (mode, b))
    2932              :                 {
    2933      6422587 :                   update_forwarder_flag (b);
    2934      6422587 :                   changed_here = true;
    2935              :                 }
    2936              : 
    2937              :               /* Look for shared code between blocks.  */
    2938    400241433 :               if ((mode & CLEANUP_CROSSJUMP)
    2939    400241433 :                   && try_crossjump_bb (mode, b))
    2940              :                 changed_here = true;
    2941              : 
    2942    400241433 :               if ((mode & CLEANUP_CROSSJUMP)
    2943              :                   /* This can lengthen register lifetimes.  Do it only after
    2944              :                      reload.  */
    2945     29262285 :                   && reload_completed
    2946    429503718 :                   && try_head_merge_bb (b))
    2947              :                 changed_here = true;
    2948              : 
    2949              :               /* Don't get confused by the index shift caused by
    2950              :                  deleting blocks.  */
    2951    400201295 :               if (!changed_here)
    2952    385511125 :                 b = b->next_bb;
    2953              :               else
    2954              :                 changed = true;
    2955              :             }
    2956              : 
    2957     25585157 :           if ((mode & CLEANUP_CROSSJUMP)
    2958     25585157 :               && try_crossjump_bb (mode, EXIT_BLOCK_PTR_FOR_FN (cfun)))
    2959              :             changed = true;
    2960              : 
    2961     25585157 :           if (block_was_dirty)
    2962              :             {
    2963              :               /* This should only be set by head-merging.  */
    2964       197890 :               gcc_assert (mode & CLEANUP_CROSSJUMP);
    2965       197890 :               df_analyze ();
    2966              :             }
    2967              : 
    2968     25585157 :           if (changed)
    2969              :             {
    2970              :               /* Edge forwarding in particular can cause hot blocks previously
    2971              :                  reached by both hot and cold blocks to become dominated only
    2972              :                  by cold blocks. This will cause the verification below to fail,
    2973              :                  and lead to now cold code in the hot section. This is not easy
    2974              :                  to detect and fix during edge forwarding, and in some cases
    2975              :                  is only visible after newly unreachable blocks are deleted,
    2976              :                  which will be done in fixup_partitions.  */
    2977      3825464 :               if ((mode & CLEANUP_NO_PARTITIONING) == 0)
    2978              :                 {
    2979      3687972 :                   fixup_partitions ();
    2980      3687972 :                   checking_verify_flow_info ();
    2981              :                 }
    2982              :             }
    2983              : 
    2984     25585157 :           changed_overall |= changed;
    2985     25585157 :           first_pass = false;
    2986              :         }
    2987              :       while (changed);
    2988              :     }
    2989              : 
    2990    352230948 :   FOR_ALL_BB_FN (b, cfun)
    2991    330471255 :     b->flags &= ~(BB_FORWARDER_BLOCK | BB_NONTHREADABLE_BLOCK);
    2992              : 
    2993     21759693 :   return changed_overall;
    2994              : }
    2995              : 
    2996              : /* Delete all unreachable basic blocks.  */
    2997              : 
    2998              : bool
    2999     28087669 : delete_unreachable_blocks (void)
    3000              : {
    3001     28087669 :   bool changed = false;
    3002     28087669 :   basic_block b, prev_bb;
    3003              : 
    3004     28087669 :   find_unreachable_blocks ();
    3005              : 
    3006              :   /* When we're in GIMPLE mode and there may be debug bind insns, we
    3007              :      should delete blocks in reverse dominator order, so as to get a
    3008              :      chance to substitute all released DEFs into debug bind stmts.  If
    3009              :      we don't have dominators information, walking blocks backward
    3010              :      gets us a better chance of retaining most debug information than
    3011              :      otherwise.  */
    3012     12717817 :   if (MAY_HAVE_DEBUG_BIND_INSNS && current_ir_type () == IR_GIMPLE
    3013     28336501 :       && dom_info_available_p (CDI_DOMINATORS))
    3014              :     {
    3015            0 :       for (b = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
    3016            0 :            b != ENTRY_BLOCK_PTR_FOR_FN (cfun); b = prev_bb)
    3017              :         {
    3018            0 :           prev_bb = b->prev_bb;
    3019              : 
    3020            0 :           if (!(b->flags & BB_REACHABLE))
    3021              :             {
    3022              :               /* Speed up the removal of blocks that don't dominate
    3023              :                  others.  Walking backwards, this should be the common
    3024              :                  case.  */
    3025            0 :               if (!first_dom_son (CDI_DOMINATORS, b))
    3026            0 :                 delete_basic_block (b);
    3027              :               else
    3028              :                 {
    3029            0 :                   auto_vec<basic_block> h
    3030            0 :                     = get_all_dominated_blocks (CDI_DOMINATORS, b);
    3031              : 
    3032            0 :                   while (h.length ())
    3033              :                     {
    3034            0 :                       b = h.pop ();
    3035              : 
    3036            0 :                       prev_bb = b->prev_bb;
    3037              : 
    3038            0 :                       gcc_assert (!(b->flags & BB_REACHABLE));
    3039              : 
    3040            0 :                       delete_basic_block (b);
    3041              :                     }
    3042            0 :                 }
    3043              : 
    3044              :               changed = true;
    3045              :             }
    3046              :         }
    3047              :     }
    3048              :   else
    3049              :     {
    3050     28087669 :       for (b = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
    3051    417729776 :            b != ENTRY_BLOCK_PTR_FOR_FN (cfun); b = prev_bb)
    3052              :         {
    3053    389642107 :           prev_bb = b->prev_bb;
    3054              : 
    3055    389642107 :           if (!(b->flags & BB_REACHABLE))
    3056              :             {
    3057      1613628 :               delete_basic_block (b);
    3058      1613628 :               changed = true;
    3059              :             }
    3060              :         }
    3061              :     }
    3062              : 
    3063     28087669 :   if (changed)
    3064       434470 :     tidy_fallthru_edges ();
    3065     28087669 :   return changed;
    3066              : }
    3067              : 
    3068              : /* Delete any jump tables never referenced.  We can't delete them at the
    3069              :    time of removing tablejump insn as they are referenced by the preceding
    3070              :    insns computing the destination, so we delay deleting and garbagecollect
    3071              :    them once life information is computed.  */
    3072              : void
    3073      9095361 : delete_dead_jumptables (void)
    3074              : {
    3075      9095361 :   basic_block bb;
    3076              : 
    3077              :   /* A dead jump table does not belong to any basic block.  Scan insns
    3078              :      between two adjacent basic blocks.  */
    3079    101693511 :   FOR_EACH_BB_FN (bb, cfun)
    3080              :     {
    3081     92598150 :       rtx_insn *insn, *next;
    3082              : 
    3083     92598150 :       for (insn = NEXT_INSN (BB_END (bb));
    3084    169292793 :            insn && !NOTE_INSN_BASIC_BLOCK_P (insn);
    3085              :            insn = next)
    3086              :         {
    3087     76694643 :           next = NEXT_INSN (insn);
    3088     76694643 :           if (LABEL_P (insn)
    3089     43551884 :               && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
    3090     78529392 :               && JUMP_TABLE_DATA_P (next))
    3091              :             {
    3092            0 :               rtx_insn *label = insn, *jump = next;
    3093              : 
    3094            0 :               if (dump_file)
    3095            0 :                 fprintf (dump_file, "Dead jumptable %i removed\n",
    3096            0 :                          INSN_UID (insn));
    3097              : 
    3098            0 :               next = NEXT_INSN (next);
    3099            0 :               delete_insn (jump);
    3100            0 :               delete_insn (label);
    3101              :             }
    3102              :         }
    3103              :     }
    3104      9095361 : }
    3105              : 
    3106              : 
    3107              : /* Tidy the CFG by deleting unreachable code and whatnot.  */
    3108              : 
    3109              : bool
    3110     19606094 : cleanup_cfg (int mode)
    3111              : {
    3112     19606094 :   bool changed = false;
    3113              : 
    3114              :   /* Set the cfglayout mode flag here.  We could update all the callers
    3115              :      but that is just inconvenient, especially given that we eventually
    3116              :      want to have cfglayout mode as the default.  */
    3117     19606094 :   if (current_ir_type () == IR_RTL_CFGLAYOUT)
    3118     13026090 :     mode |= CLEANUP_CFGLAYOUT;
    3119              : 
    3120     19606094 :   timevar_push (TV_CLEANUP_CFG);
    3121     19606094 :   if (delete_unreachable_blocks ())
    3122              :     {
    3123       154330 :       changed = true;
    3124              :       /* We've possibly created trivially dead code.  Cleanup it right
    3125              :          now to introduce more opportunities for try_optimize_cfg.  */
    3126       154330 :       if (!(mode & (CLEANUP_NO_INSN_DEL))
    3127        41838 :           && !reload_completed)
    3128        41775 :         delete_trivially_dead_insns (get_insns (), max_reg_num ());
    3129              :     }
    3130              : 
    3131     19606094 :   compact_blocks ();
    3132              : 
    3133              :   /* To tail-merge blocks ending in the same noreturn function (e.g.
    3134              :      a call to abort) we have to insert fake edges to exit.  Do this
    3135              :      here once.  The fake edges do not interfere with any other CFG
    3136              :      cleanups.  */
    3137     19606094 :   if (mode & CLEANUP_CROSSJUMP)
    3138       963984 :     add_noreturn_fake_exit_edges ();
    3139              : 
    3140     19606094 :   if (!dbg_cnt (cfg_cleanup))
    3141              :     return changed;
    3142              : 
    3143     21759693 :   while (try_optimize_cfg (mode))
    3144              :     {
    3145      3735418 :       delete_unreachable_blocks (), changed = true;
    3146      3735418 :       if (!(mode & CLEANUP_NO_INSN_DEL))
    3147              :         {
    3148              :           /* Try to remove some trivially dead insns when doing an expensive
    3149              :              cleanup.  But delete_trivially_dead_insns doesn't work after
    3150              :              reload (it only handles pseudos) and run_fast_dce is too costly
    3151              :              to run in every iteration.
    3152              : 
    3153              :              For effective cross jumping, we really want to run a fast DCE to
    3154              :              clean up any dead conditions, or they get in the way of performing
    3155              :              useful tail merges.
    3156              : 
    3157              :              Other transformations in cleanup_cfg are not so sensitive to dead
    3158              :              code, so delete_trivially_dead_insns or even doing nothing at all
    3159              :              is good enough.  */
    3160       636625 :           if ((mode & CLEANUP_EXPENSIVE) && !reload_completed
    3161      2222120 :               && !delete_trivially_dead_insns (get_insns (), max_reg_num ()))
    3162              :             break;
    3163      2153599 :           if ((mode & CLEANUP_CROSSJUMP) && crossjumps_occurred)
    3164              :             {
    3165        81259 :               run_fast_dce ();
    3166        81259 :               mode &= ~CLEANUP_FORCE_FAST_DCE;
    3167              :             }
    3168              :         }
    3169              :       else
    3170              :         break;
    3171              :     }
    3172              : 
    3173     19606094 :   if (mode & CLEANUP_CROSSJUMP)
    3174       963984 :     remove_fake_exit_edges ();
    3175              : 
    3176     19606094 :   if (mode & CLEANUP_FORCE_FAST_DCE)
    3177       115627 :     run_fast_dce ();
    3178              : 
    3179              :   /* Don't call delete_dead_jumptables in cfglayout mode, because
    3180              :      that function assumes that jump tables are in the insns stream.
    3181              :      But we also don't _have_ to delete dead jumptables in cfglayout
    3182              :      mode because we shouldn't even be looking at things that are
    3183              :      not in a basic block.  Dead jumptables are cleaned up when
    3184              :      going out of cfglayout mode.  */
    3185     19606094 :   if (!(mode & CLEANUP_CFGLAYOUT))
    3186      6580004 :     delete_dead_jumptables ();
    3187              : 
    3188              :   /* ???  We probably do this way too often.  */
    3189     19606094 :   if (current_loops
    3190      9129413 :       && (changed
    3191      6739394 :           || (mode & CLEANUP_CFG_CHANGED)))
    3192              :     {
    3193      2480919 :       timevar_push (TV_REPAIR_LOOPS);
    3194              :       /* The above doesn't preserve dominance info if available.  */
    3195      2480919 :       gcc_assert (!dom_info_available_p (CDI_DOMINATORS));
    3196      2480919 :       calculate_dominance_info (CDI_DOMINATORS);
    3197      2480919 :       fix_loop_structure (NULL);
    3198      2480919 :       free_dominance_info (CDI_DOMINATORS);
    3199      2480919 :       timevar_pop (TV_REPAIR_LOOPS);
    3200              :     }
    3201              : 
    3202     19606094 :   timevar_pop (TV_CLEANUP_CFG);
    3203              : 
    3204     19606094 :   return changed;
    3205              : }
    3206              : 
    3207              : namespace {
    3208              : 
    3209              : const pass_data pass_data_jump =
    3210              : {
    3211              :   RTL_PASS, /* type */
    3212              :   "jump", /* name */
    3213              :   OPTGROUP_NONE, /* optinfo_flags */
    3214              :   TV_JUMP, /* tv_id */
    3215              :   0, /* properties_required */
    3216              :   0, /* properties_provided */
    3217              :   0, /* properties_destroyed */
    3218              :   0, /* todo_flags_start */
    3219              :   0, /* todo_flags_finish */
    3220              : };
    3221              : 
    3222              : class pass_jump : public rtl_opt_pass
    3223              : {
    3224              : public:
    3225       285722 :   pass_jump (gcc::context *ctxt)
    3226       571444 :     : rtl_opt_pass (pass_data_jump, ctxt)
    3227              :   {}
    3228              : 
    3229              :   /* opt_pass methods: */
    3230              :   unsigned int execute (function *) final override;
    3231              : 
    3232              : }; // class pass_jump
    3233              : 
    3234              : unsigned int
    3235      1471359 : pass_jump::execute (function *)
    3236              : {
    3237      1471359 :   delete_trivially_dead_insns (get_insns (), max_reg_num ());
    3238      1471359 :   if (dump_file)
    3239           84 :     dump_flow_info (dump_file, dump_flags);
    3240      2942718 :   cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0)
    3241      1043565 :                | (flag_thread_jumps && flag_expensive_optimizations
    3242      1471359 :                   ? CLEANUP_THREADING : 0));
    3243      1471359 :   return 0;
    3244              : }
    3245              : 
    3246              : } // anon namespace
    3247              : 
    3248              : rtl_opt_pass *
    3249       285722 : make_pass_jump (gcc::context *ctxt)
    3250              : {
    3251       285722 :   return new pass_jump (ctxt);
    3252              : }
    3253              : 
    3254              : namespace {
    3255              : 
    3256              : const pass_data pass_data_jump_after_combine =
    3257              : {
    3258              :   RTL_PASS, /* type */
    3259              :   "jump_after_combine", /* name */
    3260              :   OPTGROUP_NONE, /* optinfo_flags */
    3261              :   TV_JUMP, /* tv_id */
    3262              :   0, /* properties_required */
    3263              :   0, /* properties_provided */
    3264              :   0, /* properties_destroyed */
    3265              :   0, /* todo_flags_start */
    3266              :   0, /* todo_flags_finish */
    3267              : };
    3268              : 
    3269              : class pass_jump_after_combine : public rtl_opt_pass
    3270              : {
    3271              : public:
    3272       285722 :   pass_jump_after_combine (gcc::context *ctxt)
    3273       571444 :     : rtl_opt_pass (pass_data_jump_after_combine, ctxt)
    3274              :   {}
    3275              : 
    3276              :   /* opt_pass methods: */
    3277      1471370 :   bool gate (function *) final override
    3278              :   {
    3279      1471370 :     return flag_thread_jumps && flag_expensive_optimizations;
    3280              :   }
    3281              :   unsigned int execute (function *) final override;
    3282              : 
    3283              : }; // class pass_jump_after_combine
    3284              : 
    3285              : unsigned int
    3286       963839 : pass_jump_after_combine::execute (function *)
    3287              : {
    3288              :   /* Jump threading does not keep dominators up-to-date.  */
    3289       963839 :   free_dominance_info (CDI_DOMINATORS);
    3290       963839 :   cleanup_cfg (CLEANUP_THREADING);
    3291       963839 :   return 0;
    3292              : }
    3293              : 
    3294              : } // anon namespace
    3295              : 
    3296              : rtl_opt_pass *
    3297       285722 : make_pass_jump_after_combine (gcc::context *ctxt)
    3298              : {
    3299       285722 :   return new pass_jump_after_combine (ctxt);
    3300              : }
    3301              : 
    3302              : namespace {
    3303              : 
    3304              : const pass_data pass_data_jump2 =
    3305              : {
    3306              :   RTL_PASS, /* type */
    3307              :   "jump2", /* name */
    3308              :   OPTGROUP_NONE, /* optinfo_flags */
    3309              :   TV_JUMP, /* tv_id */
    3310              :   0, /* properties_required */
    3311              :   0, /* properties_provided */
    3312              :   0, /* properties_destroyed */
    3313              :   0, /* todo_flags_start */
    3314              :   0, /* todo_flags_finish */
    3315              : };
    3316              : 
    3317              : class pass_jump2 : public rtl_opt_pass
    3318              : {
    3319              : public:
    3320       285722 :   pass_jump2 (gcc::context *ctxt)
    3321       571444 :     : rtl_opt_pass (pass_data_jump2, ctxt)
    3322              :   {}
    3323              : 
    3324              :   /* opt_pass methods: */
    3325      1471363 :   unsigned int execute (function *) final override
    3326              :     {
    3327      1978742 :       cleanup_cfg (flag_crossjumping ? CLEANUP_CROSSJUMP : 0);
    3328      1471363 :       return 0;
    3329              :     }
    3330              : 
    3331              : }; // class pass_jump2
    3332              : 
    3333              : } // anon namespace
    3334              : 
    3335              : rtl_opt_pass *
    3336       285722 : make_pass_jump2 (gcc::context *ctxt)
    3337              : {
    3338       285722 :   return new pass_jump2 (ctxt);
    3339              : }
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.