LCOV - code coverage report
Current view: top level - gcc - gimple-harden-control-flow.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 95.7 % 644 616
Test Date: 2024-12-21 13:15:12 Functions: 95.8 % 24 23
Legend: Lines: hit not hit | Branches: + taken - not taken # not executed Branches: - 0 0

             Branch data     Line data    Source code
       1                 :             : /* Control flow redundancy hardening.
       2                 :             :    Copyright (C) 2022-2024 Free Software Foundation, Inc.
       3                 :             :    Contributed by Alexandre Oliva <oliva@adacore.com>.
       4                 :             : 
       5                 :             : This file is part of GCC.
       6                 :             : 
       7                 :             : GCC is free software; you can redistribute it and/or modify it under
       8                 :             : the terms of the GNU General Public License as published by the Free
       9                 :             : Software Foundation; either version 3, or (at your option) any later
      10                 :             : version.
      11                 :             : 
      12                 :             : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      13                 :             : WARRANTY; without even the implied warranty of MERCHANTABILITY or
      14                 :             : FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      15                 :             : for more details.
      16                 :             : 
      17                 :             : You should have received a copy of the GNU General Public License
      18                 :             : along with GCC; see the file COPYING3.  If not see
      19                 :             : <http://www.gnu.org/licenses/>.  */
      20                 :             : 
      21                 :             : #include "config.h"
      22                 :             : #define INCLUDE_ALGORITHM /* find */
      23                 :             : #include "system.h"
      24                 :             : #include "coretypes.h"
      25                 :             : #include "backend.h"
      26                 :             : #include "memmodel.h"
      27                 :             : #include "tm_p.h"
      28                 :             : #include "tree.h"
      29                 :             : #include "fold-const.h"
      30                 :             : #include "gimple.h"
      31                 :             : #include "gimplify.h"
      32                 :             : #include "tree-pass.h"
      33                 :             : #include "ssa.h"
      34                 :             : #include "gimple-iterator.h"
      35                 :             : #include "gimple-pretty-print.h"
      36                 :             : #include "tree-cfg.h"
      37                 :             : #include "tree-cfgcleanup.h"
      38                 :             : #include "tree-eh.h"
      39                 :             : #include "except.h"
      40                 :             : #include "sbitmap.h"
      41                 :             : #include "basic-block.h"
      42                 :             : #include "cfghooks.h"
      43                 :             : #include "cfgloop.h"
      44                 :             : #include "cgraph.h"
      45                 :             : #include "alias.h"
      46                 :             : #include "varasm.h"
      47                 :             : #include "output.h"
      48                 :             : #include "langhooks.h"
      49                 :             : #include "diagnostic.h"
      50                 :             : #include "intl.h"
      51                 :             : 
      52                 :             : namespace {
      53                 :             : 
      54                 :             : /* This pass introduces verification, at function exits, that booleans
      55                 :             :    set in each basic block during function execution reflect the
      56                 :             :    control flow graph: for each visited block, check that at least one
      57                 :             :    predecessor and at least one successor were also visited.  This
      58                 :             :    sort of hardening may detect various kinds of attacks.  */
      59                 :             : 
      60                 :             : /* Define a pass to harden code through control flow redundancy.  */
      61                 :             : 
      62                 :             : const pass_data pass_data_harden_control_flow_redundancy = {
      63                 :             :   GIMPLE_PASS,
      64                 :             :   "hardcfr",
      65                 :             :   OPTGROUP_NONE,
      66                 :             :   TV_NONE,
      67                 :             :   PROP_cfg | PROP_ssa, // properties_required
      68                 :             :   0,        // properties_provided
      69                 :             :   0,        // properties_destroyed
      70                 :             :   TODO_cleanup_cfg, // properties_start
      71                 :             :   0,        // properties_finish
      72                 :             : };
      73                 :             : 
      74                 :             : class pass_harden_control_flow_redundancy : public gimple_opt_pass
      75                 :             : {
      76                 :             : public:
      77                 :      280114 :   pass_harden_control_flow_redundancy (gcc::context *ctxt)
      78                 :      560228 :     : gimple_opt_pass (pass_data_harden_control_flow_redundancy, ctxt)
      79                 :             :   {}
      80                 :           0 :   opt_pass *clone () { return new pass_harden_control_flow_redundancy (m_ctxt); }
      81                 :     1416359 :   virtual bool gate (function *fun) {
      82                 :             :     /* Return quickly if the pass is disabled, without checking any of
      83                 :             :        the conditions that might give rise to warnings that would only
      84                 :             :        be appropriate if hardening was requested.  */
      85                 :     1416359 :     if (!flag_harden_control_flow_redundancy)
      86                 :             :       return false;
      87                 :             : 
      88                 :             :     /* Functions that return more than once, like setjmp and vfork
      89                 :             :        (that also gets this flag set), will start recording a path
      90                 :             :        after the first return, and then may take another path when
      91                 :             :        they return again.  The unterminated path may then be flagged
      92                 :             :        as an error.  ??? We could save the visited array before the
      93                 :             :        call and restore it if it returns again.  */
      94                 :        1717 :     if (fun->calls_setjmp)
      95                 :             :       {
      96                 :           0 :         warning_at (DECL_SOURCE_LOCATION (fun->decl), 0,
      97                 :             :                     "%qD calls %<setjmp%> or similar,"
      98                 :             :                     " %<-fharden-control-flow-redundancy%> is not supported",
      99                 :             :                     fun->decl);
     100                 :           0 :         return false;
     101                 :             :       }
     102                 :             : 
     103                 :             :     /* Some targets bypass the abnormal dispatcher block in nonlocal
     104                 :             :        gotos, and then we'd miss its visited bit.  It might be doable
     105                 :             :        to make it work uniformly, but this feature is not used often
     106                 :             :        enough to make it worthwhile.  */
     107                 :        1717 :     if (fun->has_nonlocal_label)
     108                 :             :       {
     109                 :           0 :         warning_at (DECL_SOURCE_LOCATION (fun->decl), 0,
     110                 :             :                     "%qD receives nonlocal gotos,"
     111                 :             :                     " %<-fharden-control-flow-redundancy%> is not supported",
     112                 :             :                     fun->decl);
     113                 :           0 :         return false;
     114                 :             :       }
     115                 :             : 
     116                 :        1717 :     if (fun->cfg && param_hardcfr_max_blocks > 0
     117                 :         330 :         && (n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS
     118                 :             :             > param_hardcfr_max_blocks))
     119                 :             :       {
     120                 :          82 :         warning_at (DECL_SOURCE_LOCATION (fun->decl), 0,
     121                 :             :                     "%qD has more than %u blocks, the requested"
     122                 :             :                     " maximum for %<-fharden-control-flow-redundancy%>",
     123                 :             :                     fun->decl, param_hardcfr_max_blocks);
     124                 :          82 :         return false;
     125                 :             :       }
     126                 :             : 
     127                 :             :     return true;
     128                 :             :   }
     129                 :             :   virtual unsigned int execute (function *);
     130                 :             : };
     131                 :             : 
     132                 :             : }
     133                 :             : 
     134                 :             : /* Return TRUE iff CFR checks should be inserted before returning
     135                 :             :    calls.  */
     136                 :             : 
     137                 :             : static bool
     138                 :        1122 : check_returning_calls_p ()
     139                 :             : {
     140                 :        1122 :   return
     141                 :        1122 :     flag_harden_control_flow_redundancy_check_returning_calls > 0
     142                 :        1122 :     || (flag_harden_control_flow_redundancy_check_returning_calls < 0
     143                 :             :         /* Gates pass_tail_calls.  */
     144                 :         286 :         && flag_optimize_sibling_calls
     145                 :             :         /* Gates pass_all_optimizations.  */
     146                 :         242 :         && optimize >= 1 && !optimize_debug);
     147                 :             : }
     148                 :             : 
     149                 :             : /* Scan BB from the end, updating *RETPTR if given as return stmts and
     150                 :             :    copies are found.  Return a call or a stmt that cannot appear after
     151                 :             :    a tail call, or NULL if the top of the block is reached without
     152                 :             :    finding any.  */
     153                 :             : 
     154                 :             : static gimple *
     155                 :        2697 : hardcfr_scan_block (basic_block bb, tree **retptr)
     156                 :             : {
     157                 :        2697 :   gimple_stmt_iterator gsi;
     158                 :        8600 :   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
     159                 :             :     {
     160                 :        3333 :       gimple *stmt = gsi_stmt (gsi);
     161                 :             : 
     162                 :             :       /* Ignore labels, returns, nops, clobbers and debug stmts.  */
     163                 :        3333 :       if (gimple_code (stmt) == GIMPLE_LABEL
     164                 :             :           || gimple_code (stmt) == GIMPLE_NOP
     165                 :             :           || gimple_code (stmt) == GIMPLE_PREDICT
     166                 :        3227 :           || gimple_clobber_p (stmt)
     167                 :        3090 :           || is_gimple_debug (stmt))
     168                 :         281 :         continue;
     169                 :             : 
     170                 :        3052 :       if (gimple_code (stmt) == GIMPLE_RETURN)
     171                 :             :         {
     172                 :        1279 :           greturn *gret = as_a <greturn *> (stmt);
     173                 :        1279 :           if (retptr)
     174                 :             :             {
     175                 :        1279 :               gcc_checking_assert (!*retptr);
     176                 :        1279 :               *retptr = gimple_return_retval_ptr (gret);
     177                 :             :             }
     178                 :        1279 :           continue;
     179                 :        1279 :         }
     180                 :             : 
     181                 :             :       /* Check for a call.  */
     182                 :        1773 :       if (is_gimple_call (stmt))
     183                 :             :         return stmt;
     184                 :             : 
     185                 :             :       /* Allow simple copies to the return value, updating the return
     186                 :             :          value to be found in earlier assignments.  */
     187                 :         535 :       if (retptr && *retptr && gimple_assign_single_p (stmt)
     188                 :         578 :           && **retptr == gimple_assign_lhs (stmt))
     189                 :             :         {
     190                 :          43 :           *retptr = gimple_assign_rhs1_ptr (stmt);
     191                 :          43 :           continue;
     192                 :             :         }
     193                 :             : 
     194                 :             :       return stmt;
     195                 :             :     }
     196                 :             : 
     197                 :             :   /* Any other kind of stmt will prevent a tail call.  */
     198                 :             :   return NULL;
     199                 :             : }
     200                 :             : 
     201                 :             : /* Return TRUE iff CALL is to be preceded by a CFR checkpoint, i.e.,
     202                 :             :    if it's a returning call (one whose result is ultimately returned
     203                 :             :    without intervening non-copy statements) and we're checking
     204                 :             :    returning calls, a __builtin_return call (noreturn with a path to
     205                 :             :    the exit block), a must-tail call, or a tail call.  */
     206                 :             : 
     207                 :             : static bool
     208                 :         890 : returning_call_p (gcall *call)
     209                 :             : {
     210                 :         890 :   if (!(gimple_call_noreturn_p (call)
     211                 :         476 :         || gimple_call_must_tail_p (call)
     212                 :         476 :         || gimple_call_tail_p (call)
     213                 :         476 :         || check_returning_calls_p ()))
     214                 :             :     return false;
     215                 :             : 
     216                 :             :   /* Quickly check that there's a path to exit compatible with a
     217                 :             :      returning call.  Detect infinite loops by limiting the path
     218                 :             :      length to the basic block count, and by looking for duplicate
     219                 :             :      blocks before allocating more memory for the path, for amortized
     220                 :             :      O(n).  */
     221                 :         703 :   auto_vec<basic_block, 10> path;
     222                 :         703 :   for (basic_block bb = gimple_bb (call);
     223                 :        1406 :        bb != EXIT_BLOCK_PTR_FOR_FN (cfun);
     224                 :         703 :        bb = single_succ (bb))
     225                 :         865 :     if (!single_succ_p (bb)
     226                 :         703 :         || (single_succ_edge (bb)->flags & EDGE_EH) != 0
     227                 :        1406 :         || n_basic_blocks_for_fn (cfun) - path.length () <= NUM_FIXED_BLOCKS
     228                 :        2243 :         || (path.length () == path.allocated ()
     229                 :           0 :             && std::find (path.begin (), path.end (), bb) != path.end ()))
     230                 :         162 :       return false;
     231                 :             :     else
     232                 :         703 :       path.safe_push (bb);
     233                 :             : 
     234                 :             :   /* Check the stmts in the blocks and trace the return value.  */
     235                 :         541 :   tree *retptr = NULL;
     236                 :         703 :   for (;;)
     237                 :             :     {
     238                 :         703 :       gcc_checking_assert (!path.is_empty ());
     239                 :         703 :       basic_block bb = path.pop ();
     240                 :         703 :       gimple *stop = hardcfr_scan_block (bb, &retptr);
     241                 :         703 :       if (stop)
     242                 :             :         {
     243                 :         541 :           if (stop != call)
     244                 :             :             return false;
     245                 :         521 :           gcc_checking_assert (path.is_empty ());
     246                 :         521 :           break;
     247                 :             :         }
     248                 :             : 
     249                 :         162 :       gphi *retphi = NULL;
     250                 :         162 :       if (retptr && *retptr && TREE_CODE (*retptr) == SSA_NAME
     251                 :           3 :           && !SSA_NAME_IS_DEFAULT_DEF (*retptr)
     252                 :           3 :           && SSA_NAME_DEF_STMT (*retptr)
     253                 :           3 :           && is_a <gphi *> (SSA_NAME_DEF_STMT (*retptr))
     254                 :         162 :           && gimple_bb (SSA_NAME_DEF_STMT (*retptr)) == bb)
     255                 :             :         {
     256                 :           0 :           retphi = as_a <gphi *> (SSA_NAME_DEF_STMT (*retptr));
     257                 :           0 :           gcc_checking_assert (gimple_phi_result (retphi) == *retptr);
     258                 :             :         }
     259                 :             :       else
     260                 :         162 :         continue;
     261                 :             : 
     262                 :           0 :       gcc_checking_assert (!path.is_empty ());
     263                 :           0 :       edge e = single_succ_edge (path.last ());
     264                 :           0 :       int i = EDGE_COUNT (bb->preds);
     265                 :           0 :       while (i--)
     266                 :           0 :         if (EDGE_PRED (bb, i) == e)
     267                 :             :           break;
     268                 :           0 :       gcc_checking_assert (i >= 0);
     269                 :           0 :       retptr = gimple_phi_arg_def_ptr (retphi, i);
     270                 :             :     }
     271                 :             : 
     272                 :         521 :   return (gimple_call_noreturn_p (call)
     273                 :         249 :           || gimple_call_must_tail_p (call)
     274                 :         249 :           || gimple_call_tail_p (call)
     275                 :         770 :           || (gimple_call_lhs (call) == (retptr ? *retptr : NULL)
     276                 :         249 :               && check_returning_calls_p ()));
     277                 :         703 : }
     278                 :             : 
     279                 :             : typedef auto_vec<edge, 10> chk_edges_t;
     280                 :             : 
     281                 :             : /* Declare for mutual recursion.  */
     282                 :             : static bool hardcfr_sibcall_search_preds (basic_block bb,
     283                 :             :                                           chk_edges_t &chk_edges,
     284                 :             :                                           int &count_chkcall,
     285                 :             :                                           auto_sbitmap &chkcall_blocks,
     286                 :             :                                           int &count_postchk,
     287                 :             :                                           auto_sbitmap &postchk_blocks,
     288                 :             :                                           tree *retptr);
     289                 :             : 
     290                 :             : /* Search backwards from the end of BB for a mandatory or potential
     291                 :             :    sibcall.  Schedule the block to be handled sort-of like noreturn if
     292                 :             :    so.  Recurse to preds, with updated RETPTR, if the block only
     293                 :             :    contains stmts that may follow such a call, scheduling checking at
     294                 :             :    edges and marking blocks as post-check as needed.  Return true iff,
     295                 :             :    at the end of the block, a check will have already been
     296                 :             :    performed.  */
     297                 :             : 
     298                 :             : static bool
     299                 :        2638 : hardcfr_sibcall_search_block (basic_block bb,
     300                 :             :                               chk_edges_t &chk_edges,
     301                 :             :                               int &count_chkcall,
     302                 :             :                               auto_sbitmap &chkcall_blocks,
     303                 :             :                               int &count_postchk,
     304                 :             :                               auto_sbitmap &postchk_blocks,
     305                 :             :                               tree *retptr)
     306                 :             : {
     307                 :             :   /* Conditionals and internal exceptions rule out tail calls.  */
     308                 :        3991 :   if (!single_succ_p (bb)
     309                 :        2638 :       || (single_succ_edge (bb)->flags & EDGE_EH) != 0)
     310                 :             :     return false;
     311                 :             : 
     312                 :        1994 :   gimple *stmt = hardcfr_scan_block (bb, &retptr);
     313                 :        1994 :   if (!stmt)
     314                 :         805 :     return hardcfr_sibcall_search_preds (bb, chk_edges,
     315                 :             :                                          count_chkcall, chkcall_blocks,
     316                 :             :                                          count_postchk, postchk_blocks,
     317                 :         805 :                                          retptr);
     318                 :             : 
     319                 :        1189 :   if (!is_a <gcall *> (stmt))
     320                 :             :     return false;
     321                 :             : 
     322                 :             :   /* Avoid disrupting mandatory or early-marked tail calls,
     323                 :             :      inserting the check before them.  This works for
     324                 :             :      must-tail calls, but tail calling as an optimization is
     325                 :             :      detected too late for us.
     326                 :             : 
     327                 :             :      Also check for noreturn calls here.  Noreturn calls won't
     328                 :             :      normally have edges to exit, so they won't be found here,
     329                 :             :      but __builtin_return does, and we must check before
     330                 :             :      it, so handle it like a tail call.  */
     331                 :         697 :   gcall *call = as_a <gcall *> (stmt);
     332                 :        1122 :   if (!(gimple_call_noreturn_p (call)
     333                 :         425 :         || gimple_call_must_tail_p (call)
     334                 :         425 :         || gimple_call_tail_p (call)
     335                 :         425 :         || (gimple_call_lhs (call) == (retptr ? *retptr : NULL)
     336                 :         397 :             && check_returning_calls_p ())))
     337                 :             :     return false;
     338                 :             : 
     339                 :         480 :   gcc_checking_assert (returning_call_p (call));
     340                 :             : 
     341                 :             :   /* We found a call that is to be preceded by checking.  */
     342                 :         480 :   if (bitmap_set_bit (chkcall_blocks, bb->index))
     343                 :         480 :     ++count_chkcall;
     344                 :             :   else
     345                 :           0 :     gcc_unreachable ();
     346                 :         480 :   return true;
     347                 :             : }
     348                 :             : 
     349                 :             : 
     350                 :             : /* Search preds of BB for a mandatory or potential sibcall or
     351                 :             :    returning call, and arrange for the blocks containing them to have
     352                 :             :    a check inserted before the call, like noreturn calls.  If any
     353                 :             :    preds are found to perform checking, schedule checks at the edges
     354                 :             :    of those that don't, and mark BB as postcheck..  */
     355                 :             : 
     356                 :             : static bool
     357                 :        2325 : hardcfr_sibcall_search_preds (basic_block bb,
     358                 :             :                               chk_edges_t &chk_edges,
     359                 :             :                               int &count_chkcall,
     360                 :             :                               auto_sbitmap &chkcall_blocks,
     361                 :             :                               int &count_postchk,
     362                 :             :                               auto_sbitmap &postchk_blocks,
     363                 :             :                               tree *retptr)
     364                 :             : {
     365                 :             :   /* For the exit block, we wish to force a check at every
     366                 :             :      predecessor, so pretend we've already found a pred that had
     367                 :             :      checking, so that we schedule checking at every one of its pred
     368                 :             :      edges.  */
     369                 :        2325 :   bool first = bb->index >= NUM_FIXED_BLOCKS;
     370                 :        2325 :   bool postchecked = true;
     371                 :             : 
     372                 :        2325 :   gphi *retphi = NULL;
     373                 :         805 :   if (retptr && *retptr && TREE_CODE (*retptr) == SSA_NAME
     374                 :         290 :       && !SSA_NAME_IS_DEFAULT_DEF (*retptr)
     375                 :         220 :       && SSA_NAME_DEF_STMT (*retptr)
     376                 :         220 :       && is_a <gphi *> (SSA_NAME_DEF_STMT (*retptr))
     377                 :        2490 :       && gimple_bb (SSA_NAME_DEF_STMT (*retptr)) == bb)
     378                 :             :     {
     379                 :         165 :       retphi = as_a <gphi *> (SSA_NAME_DEF_STMT (*retptr));
     380                 :         165 :       gcc_checking_assert (gimple_phi_result (retphi) == *retptr);
     381                 :             :     }
     382                 :             : 
     383                 :        7241 :   for (int i = EDGE_COUNT (bb->preds); i--; first = false)
     384                 :             :     {
     385                 :        2638 :       edge e = EDGE_PRED (bb, i);
     386                 :             : 
     387                 :        2638 :       bool checked
     388                 :        3132 :         = hardcfr_sibcall_search_block (e->src, chk_edges,
     389                 :             :                                         count_chkcall, chkcall_blocks,
     390                 :             :                                         count_postchk, postchk_blocks,
     391                 :             :                                         !retphi ? retptr
     392                 :         494 :                                         : gimple_phi_arg_def_ptr (retphi, i));
     393                 :             : 
     394                 :        2638 :       if (first)
     395                 :             :         {
     396                 :         805 :           postchecked = checked;
     397                 :         805 :           continue;
     398                 :             :         }
     399                 :             : 
     400                 :             :       /* When we first find a checked block, force a check at every
     401                 :             :          other incoming edge we've already visited, and those we
     402                 :             :          visit afterwards that don't have their own check, so that
     403                 :             :          when we reach BB, the check has already been performed.  */
     404                 :        1833 :       if (!postchecked && checked)
     405                 :             :         {
     406                 :           3 :           for (int j = EDGE_COUNT (bb->preds); --j > i; )
     407                 :           1 :             chk_edges.safe_push (EDGE_PRED (bb, j));
     408                 :             :           postchecked = true;
     409                 :             :         }
     410                 :        1833 :       if (postchecked && !checked)
     411                 :         913 :         chk_edges.safe_push (EDGE_PRED (bb, i));
     412                 :             :     }
     413                 :             : 
     414                 :        2325 :   if (postchecked && bb->index >= NUM_FIXED_BLOCKS)
     415                 :             :     {
     416                 :          76 :       if (bitmap_set_bit (postchk_blocks, bb->index))
     417                 :          76 :         count_postchk++;
     418                 :             :       else
     419                 :           0 :         gcc_unreachable ();
     420                 :             :     }
     421                 :             : 
     422                 :        2325 :   return postchecked;
     423                 :             : }
     424                 :             : 
     425                 :             : 
     426                 :             : class rt_bb_visited
     427                 :             : {
     428                 :             :   /* Use a sufficiently wide unsigned type to hold basic block numbers.  */
     429                 :             :   typedef size_t blknum;
     430                 :             : 
     431                 :             :   /* Record the original block count of the function.  */
     432                 :             :   blknum nblocks;
     433                 :             :   /* Record the number of bits per VWORD (short for VISITED WORD), an
     434                 :             :      efficient mode to set and test bits for blocks we visited, and to
     435                 :             :      encode the CFG in case out-of-line verification is used.  */
     436                 :             :   unsigned vword_bits;
     437                 :             : 
     438                 :             :   /* Hold the unsigned integral VWORD type.  */
     439                 :             :   tree vword_type;
     440                 :             :   /* Hold a pointer-to-VWORD type.  */
     441                 :             :   tree vword_ptr;
     442                 :             : 
     443                 :             :   /* Hold a growing sequence used to check, inline or out-of-line,
     444                 :             :      that VISITED encodes an expected execution path.  */
     445                 :             :   gimple_seq ckseq;
     446                 :             :   /* If nonNULL, hold a growing representation of the CFG for
     447                 :             :      out-of-line testing.  */
     448                 :             :   tree rtcfg;
     449                 :             : 
     450                 :             :   /* Hold the declaration of an array of VWORDs, used as an array of
     451                 :             :      NBLOCKS-2 bits.  */
     452                 :             :   tree visited;
     453                 :             : 
     454                 :             :   /* If performing inline checking, hold a declarations of boolean
     455                 :             :      variables used for inline checking.  CKBLK holds the result of
     456                 :             :      testing whether the VISITED bit corresponding to a predecessor or
     457                 :             :      successor is set, CKINV inverts that bit, CKPART gets cleared if
     458                 :             :      a block was not visited or if CKINV for any of its predecessors
     459                 :             :      or successors is set, and CKFAIL gets set if CKPART remains set
     460                 :             :      at the end of a block's predecessors or successors list.  */
     461                 :             :   tree ckfail, ckpart, ckinv, ckblk;
     462                 :             : 
     463                 :             :   /* If we need to deal with abnormal edges, we insert SSA_NAMEs for
     464                 :             :      boolean true and false.  */
     465                 :             :   tree vfalse, vtrue;
     466                 :             : 
     467                 :             :   /* Convert a block index N to a block vindex, the index used to
     468                 :             :      identify it in the VISITED array.  Check that it's in range:
     469                 :             :      neither ENTRY nor EXIT, but maybe one-past-the-end, to compute
     470                 :             :      the visited array length.  */
     471                 :       18547 :   blknum num2idx (blknum n) {
     472                 :       18547 :     gcc_checking_assert (n >= NUM_FIXED_BLOCKS && n <= nblocks);
     473                 :       18547 :     return (n - NUM_FIXED_BLOCKS);
     474                 :             :   }
     475                 :             :   /* Return the block vindex for BB, that must not be ENTRY or
     476                 :             :      EXIT.  */
     477                 :       15417 :   blknum bb2idx (basic_block bb) {
     478                 :       15417 :     gcc_checking_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
     479                 :             :                          && bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
     480                 :       15417 :     gcc_checking_assert (blknum (bb->index) < nblocks);
     481                 :       15417 :     return num2idx (bb->index);
     482                 :             :   }
     483                 :             : 
     484                 :             :   /* Compute the type to be used for the VISITED array.  */
     485                 :        1520 :   tree vtype ()
     486                 :             :   {
     487                 :        1520 :     blknum n = num2idx (nblocks);
     488                 :        3040 :     return build_array_type_nelts (vword_type,
     489                 :        1520 :                                    (n + vword_bits - 1) / vword_bits);
     490                 :             :   }
     491                 :             : 
     492                 :             :   /* Compute and return the index into VISITED for block BB.  If BITP
     493                 :             :      is non-NULL, also compute and store the bit mask corresponding to
     494                 :             :      block BB in *BITP, so that (visited[index] & mask) tells whether
     495                 :             :      BB was visited.  */
     496                 :       15417 :   tree vwordidx (basic_block bb, tree *bitp = NULL)
     497                 :             :   {
     498                 :       15417 :     blknum idx = bb2idx (bb);
     499                 :       15417 :     if (bitp)
     500                 :             :       {
     501                 :       15417 :         unsigned bit = idx % vword_bits;
     502                 :             :         /* We don't need to adjust shifts to follow native bit
     503                 :             :            endianness here, all of our uses of the CFG and visited
     504                 :             :            bitmaps, whether at compile or runtime, are shifted bits on
     505                 :             :            full words.  This adjustment here would require a
     506                 :             :            corresponding adjustment at runtime, which would be nothing
     507                 :             :            but undesirable overhead for us.  */
     508                 :       15417 :         if (0 /* && BITS_BIG_ENDIAN */)
     509                 :             :           bit = vword_bits - bit - 1;
     510                 :       15417 :         wide_int wbit = wi::set_bit_in_zero (bit, vword_bits);
     511                 :       15417 :         *bitp = wide_int_to_tree (vword_type, wbit);
     512                 :       15417 :       }
     513                 :       15417 :     return build_int_cst (vword_ptr, idx / vword_bits);
     514                 :             :   }
     515                 :             : 
     516                 :             :   /* Return an expr to accesses the visited element that holds
     517                 :             :      information about BB.  If BITP is non-NULL, set it to the mask to
     518                 :             :      tell which bit in that expr refers to BB.  */
     519                 :        7321 :   tree vword (basic_block bb, tree *bitp = NULL)
     520                 :             :   {
     521                 :        7321 :     return build2 (MEM_REF, vword_type,
     522                 :             :                    build1 (ADDR_EXPR, vword_ptr, visited),
     523                 :        7321 :                    int_const_binop (MULT_EXPR, vwordidx (bb, bitp),
     524                 :        7321 :                                     fold_convert (vword_ptr,
     525                 :             :                                                   TYPE_SIZE_UNIT
     526                 :        7321 :                                                   (vword_type))));
     527                 :             :   }
     528                 :             : 
     529                 :             :   /* Return an expr that evaluates to true iff BB was marked as
     530                 :             :      VISITED.  Add any gimple stmts to SEQP.  */
     531                 :        4647 :   tree vindex (basic_block bb, gimple_seq *seqp)
     532                 :             :   {
     533                 :        4647 :     if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
     534                 :        3932 :         || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     535                 :        1866 :       return boolean_true_node;
     536                 :             : 
     537                 :        2781 :     tree bit, setme = vword (bb, &bit);
     538                 :        2781 :     tree temp = create_tmp_var (vword_type, ".cfrtemp");
     539                 :             : 
     540                 :        2781 :     gassign *vload = gimple_build_assign (temp, setme);
     541                 :        2781 :     gimple_seq_add_stmt (seqp, vload);
     542                 :             : 
     543                 :        2781 :     gassign *vmask = gimple_build_assign (temp, BIT_AND_EXPR, temp, bit);
     544                 :        2781 :     gimple_seq_add_stmt (seqp, vmask);
     545                 :             : 
     546                 :        2781 :     return build2 (NE_EXPR, boolean_type_node,
     547                 :        2781 :                    temp, build_int_cst (vword_type, 0));
     548                 :             :   }
     549                 :             : 
     550                 :             :   /* Set the bit corresponding to BB in VISITED.  Add to SEQ any
     551                 :             :      required gimple stmts, and return SEQ, possibly modified.  */
     552                 :        4540 :   gimple_seq vset (basic_block bb, gimple_seq seq = NULL)
     553                 :             :   {
     554                 :        4540 :     tree bit, setme = vword (bb, &bit);
     555                 :        4540 :     tree temp = create_tmp_var (vword_type, ".cfrtemp");
     556                 :             : 
     557                 :        4540 :     gassign *vload = gimple_build_assign (temp, setme);
     558                 :        4540 :     gimple_seq_add_stmt (&seq, vload);
     559                 :             : 
     560                 :        4540 :     gassign *vbitset = gimple_build_assign (temp, BIT_IOR_EXPR, temp, bit);
     561                 :        4540 :     gimple_seq_add_stmt (&seq, vbitset);
     562                 :             : 
     563                 :        4540 :     gassign *vstore = gimple_build_assign (unshare_expr (setme), temp);
     564                 :        4540 :     gimple_seq_add_stmt (&seq, vstore);
     565                 :             : 
     566                 :             :     /* Prevent stores into visited from being deferred, forcing
     567                 :             :        subsequent bitsets to reload the word rather than reusing
     568                 :             :        values already in register.  The purpose is threefold: make the
     569                 :             :        bitset get to memory in this block, so that control flow
     570                 :             :        attacks in functions called in this block don't easily bypass
     571                 :             :        the bitset; prevent the bitset word from being retained in a
     572                 :             :        register across blocks, which could, in an attack scenario,
     573                 :             :        make a later block set more than one bit; and prevent hoisting
     574                 :             :        or sinking loads or stores of bitset words out of loops or even
     575                 :             :        throughout functions, which could significantly weaken the
     576                 :             :        verification.  This is equivalent to making the bitsetting
     577                 :             :        volatile within the function body, but without changing its
     578                 :             :        type; making the bitset volatile would make inline checking far
     579                 :             :        less optimizable for no reason.  */
     580                 :        4540 :     vec<tree, va_gc> *inputs = NULL;
     581                 :        4540 :     vec<tree, va_gc> *outputs = NULL;
     582                 :        9080 :     vec_safe_push (outputs,
     583                 :             :                    build_tree_list
     584                 :        4540 :                    (build_tree_list
     585                 :             :                     (NULL_TREE, build_string (2, "=m")),
     586                 :             :                     visited));
     587                 :        9080 :     vec_safe_push (inputs,
     588                 :             :                    build_tree_list
     589                 :        4540 :                    (build_tree_list
     590                 :             :                     (NULL_TREE, build_string (1, "m")),
     591                 :             :                     visited));
     592                 :        4540 :     gasm *stabilize = gimple_build_asm_vec ("", inputs, outputs,
     593                 :             :                                             NULL, NULL);
     594                 :        4540 :     gimple_seq_add_stmt (&seq, stabilize);
     595                 :             : 
     596                 :        4540 :     return seq;
     597                 :             :   }
     598                 :             : 
     599                 :             : public:
     600                 :             :   /* Prepare to add control flow redundancy testing to CFUN.  */
     601                 :        1520 :   rt_bb_visited (int checkpoints)
     602                 :        1520 :     : nblocks (n_basic_blocks_for_fn (cfun)),
     603                 :        1520 :       vword_type (NULL), ckseq (NULL), rtcfg (NULL),
     604                 :        1520 :       vfalse (NULL), vtrue (NULL)
     605                 :             :   {
     606                 :             :     /* If we've already added a declaration for the builtin checker,
     607                 :             :        extract vword_type and vword_bits from its declaration.  */
     608                 :        1520 :     if (tree checkfn = builtin_decl_explicit (BUILT_IN___HARDCFR_CHECK))
     609                 :             :       {
     610                 :        1007 :         tree check_arg_list = TYPE_ARG_TYPES (TREE_TYPE (checkfn));
     611                 :        1007 :         tree vword_const_ptr_type = TREE_VALUE (TREE_CHAIN (check_arg_list));
     612                 :        1007 :         vword_type = TYPE_MAIN_VARIANT (TREE_TYPE (vword_const_ptr_type));
     613                 :        1007 :         vword_bits = tree_to_shwi (TYPE_SIZE (vword_type));
     614                 :             :       }
     615                 :             :     /* Otherwise, select vword_bits, vword_type et al, and use it to
     616                 :             :        declare the builtin checker.  */
     617                 :             :     else
     618                 :             :       {
     619                 :             :         /* This setting needs to be kept in sync with libgcc/hardcfr.c.
     620                 :             :            We aim for at least 28 bits, which enables us to refer to as
     621                 :             :            many as 28 << 28 blocks in a function's CFG.  That's way over
     622                 :             :            4G blocks.  */
     623                 :         513 :         machine_mode VWORDmode;
     624                 :         513 :         if (BITS_PER_UNIT >= 28)
     625                 :             :           {
     626                 :             :             VWORDmode = QImode;
     627                 :             :             vword_bits = BITS_PER_UNIT;
     628                 :             :           }
     629                 :         513 :         else if (BITS_PER_UNIT >= 14)
     630                 :             :           {
     631                 :             :             VWORDmode = HImode;
     632                 :             :             vword_bits = 2 * BITS_PER_UNIT;
     633                 :             :           }
     634                 :             :         else
     635                 :             :           {
     636                 :         513 :             VWORDmode = SImode;
     637                 :         513 :             vword_bits = 4 * BITS_PER_UNIT;
     638                 :             :           }
     639                 :             : 
     640                 :         513 :         vword_type = lang_hooks.types.type_for_mode (VWORDmode, 1);
     641                 :         513 :         gcc_checking_assert (vword_bits == tree_to_shwi (TYPE_SIZE
     642                 :             :                                                          (vword_type)));
     643                 :             : 
     644                 :         513 :         vword_type = build_variant_type_copy (vword_type);
     645                 :         513 :         TYPE_ALIAS_SET (vword_type) = new_alias_set ();
     646                 :             : 
     647                 :         513 :         tree vword_const = build_qualified_type (vword_type, TYPE_QUAL_CONST);
     648                 :         513 :         tree vword_const_ptr = build_pointer_type (vword_const);
     649                 :         513 :         tree type = build_function_type_list (void_type_node, sizetype,
     650                 :             :                                               vword_const_ptr, vword_const_ptr,
     651                 :             :                                               NULL_TREE);
     652                 :         513 :         tree decl = add_builtin_function_ext_scope
     653                 :         513 :           ("__builtin___hardcfr_check",
     654                 :             :            type, BUILT_IN___HARDCFR_CHECK, BUILT_IN_NORMAL,
     655                 :             :            "__hardcfr_check", NULL_TREE);
     656                 :         513 :         TREE_NOTHROW (decl) = true;
     657                 :         513 :         set_builtin_decl (BUILT_IN___HARDCFR_CHECK, decl, true);
     658                 :             :       }
     659                 :             : 
     660                 :             :     /* The checker uses a qualified pointer, so we can't reuse it,
     661                 :             :        so build a new one.  */
     662                 :        1520 :     vword_ptr = build_pointer_type (vword_type);
     663                 :             : 
     664                 :        1520 :     tree visited_type = vtype ();
     665                 :        1520 :     visited = create_tmp_var (visited_type, ".cfrvisited");
     666                 :             : 
     667                 :        1520 :     if (nblocks - NUM_FIXED_BLOCKS > blknum (param_hardcfr_max_inline_blocks)
     668                 :        1422 :         || checkpoints > 1)
     669                 :             :       {
     670                 :             :         /* Make sure vword_bits is wide enough for the representation
     671                 :             :            of nblocks in rtcfg.  Compare with vword_bits << vword_bits,
     672                 :             :            but avoiding overflows, shifting nblocks right instead.  If
     673                 :             :            vword_bits is wider than HOST_WIDE_INT, assume it fits, so
     674                 :             :            as to avoid undefined shifts.  */
     675                 :         805 :         gcc_assert (HOST_BITS_PER_WIDE_INT <= vword_bits
     676                 :             :                     || (((unsigned HOST_WIDE_INT)(num2idx (nblocks))
     677                 :             :                          >> vword_bits) < vword_bits));
     678                 :             : 
     679                 :             :         /* Build a terminator for the constructor list.  */
     680                 :         805 :         rtcfg = build_tree_list (NULL_TREE, NULL_TREE);
     681                 :         805 :         return;
     682                 :             :       }
     683                 :             : 
     684                 :         715 :     ckfail = create_tmp_var (boolean_type_node, ".cfrfail");
     685                 :         715 :     ckpart = create_tmp_var (boolean_type_node, ".cfrpart");
     686                 :         715 :     ckinv = create_tmp_var (boolean_type_node, ".cfrinv");
     687                 :         715 :     ckblk = create_tmp_var (boolean_type_node, ".cfrblk");
     688                 :             : 
     689                 :         715 :     gassign *ckfail_init = gimple_build_assign (ckfail, boolean_false_node);
     690                 :         715 :     gimple_seq_add_stmt (&ckseq, ckfail_init);
     691                 :             :   }
     692                 :             : 
     693                 :             :   /* Insert SEQ before a resx or a call in INSBB.  */
     694                 :        1398 :   void insert_exit_check_in_block (gimple_seq seq, basic_block insbb)
     695                 :             :   {
     696                 :        1398 :     gimple_stmt_iterator gsi = gsi_last_bb (insbb);
     697                 :             : 
     698                 :        1398 :     while (!gsi_end_p (gsi))
     699                 :        1518 :       if (is_a <gresx *> (gsi_stmt (gsi))
     700                 :        1518 :           || is_a <gcall *> (gsi_stmt (gsi)))
     701                 :             :         break;
     702                 :             :       else
     703                 :        1638 :         gsi_prev (&gsi);
     704                 :             : 
     705                 :        1398 :     gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
     706                 :        1398 :   }
     707                 :             : 
     708                 :             :   /* Insert SEQ on E.  */
     709                 :         914 :   void insert_exit_check_on_edge (gimple_seq seq, edge e)
     710                 :             :   {
     711                 :         914 :     if (!(e->flags & EDGE_ABNORMAL))
     712                 :             :       {
     713                 :         913 :         gsi_insert_seq_on_edge_immediate (e, seq);
     714                 :         913 :         return;
     715                 :             :       }
     716                 :             : 
     717                 :             :     /* Initialize SSA boolean constants for use in abnormal PHIs.  */
     718                 :           1 :     if (!vfalse)
     719                 :             :       {
     720                 :           1 :         vfalse = make_ssa_name (boolean_type_node);
     721                 :           1 :         vtrue = make_ssa_name (boolean_type_node);
     722                 :             : 
     723                 :           1 :         gimple_seq vft_seq = NULL;
     724                 :           1 :         gassign *vfalse_init = gimple_build_assign (vfalse, boolean_false_node);
     725                 :           1 :         gimple_seq_add_stmt (&vft_seq, vfalse_init);
     726                 :           1 :         gassign *vtrue_init = gimple_build_assign (vtrue, boolean_true_node);
     727                 :           1 :         gimple_seq_add_stmt (&vft_seq, vtrue_init);
     728                 :             : 
     729                 :           1 :         gsi_insert_seq_on_edge_immediate (single_succ_edge
     730                 :           1 :                                           (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
     731                 :             :                                           vft_seq);
     732                 :             :       }
     733                 :             : 
     734                 :             :     /* We can't insert on abnormal edges, but we can arrange for SEQ
     735                 :             :        to execute conditionally at dest.  Add a PHI boolean with TRUE
     736                 :             :        from E and FALSE from other preds, split the whole block, add a
     737                 :             :        test for the PHI to run a new block with SEQ or skip straight
     738                 :             :        to the original block.  If there are multiple incoming abnormal
     739                 :             :        edges, we'll do this multiple times.  ??? Unless there are
     740                 :             :        multiple abnormal edges with different postcheck status, we
     741                 :             :        could split the block and redirect other edges, rearranging the
     742                 :             :        PHI nodes.  Optimizers already know how to do this, so we can
     743                 :             :        keep things simple here.  */
     744                 :           1 :     basic_block bb = e->dest;
     745                 :           1 :     basic_block bb_postcheck = split_block_after_labels (bb)->dest;
     746                 :             : 
     747                 :           1 :     basic_block bb_check = create_empty_bb (e->dest);
     748                 :           1 :     bb_check->count = e->count ();
     749                 :           1 :     if (dom_info_available_p (CDI_DOMINATORS))
     750                 :           1 :       set_immediate_dominator (CDI_DOMINATORS, bb_check, bb);
     751                 :           1 :     if (current_loops)
     752                 :           1 :       add_bb_to_loop (bb_check, current_loops->tree_root);
     753                 :             : 
     754                 :           1 :     gimple_stmt_iterator chkpt = gsi_after_labels (bb_check);
     755                 :           1 :     gsi_insert_seq_before_without_update (&chkpt, seq, GSI_SAME_STMT);
     756                 :           1 :     edge edge_postcheck = make_edge (bb_check, bb_postcheck, EDGE_FALLTHRU);
     757                 :           1 :     edge_postcheck->probability = profile_probability::always ();
     758                 :             : 
     759                 :           1 :     tree cond_var = make_ssa_name (boolean_type_node);
     760                 :           1 :     gcond *cond = gimple_build_cond (NE_EXPR, cond_var, boolean_false_node,
     761                 :             :                                      NULL, NULL);
     762                 :           1 :     gimple_stmt_iterator condpt = gsi_after_labels (bb);
     763                 :           1 :     gsi_insert_before (&condpt, cond, GSI_SAME_STMT);
     764                 :           1 :     edge edge_nocheck = single_succ_edge (bb);
     765                 :           1 :     edge_nocheck->flags &= ~EDGE_FALLTHRU;
     766                 :           1 :     edge_nocheck->flags |= EDGE_FALSE_VALUE;
     767                 :           1 :     edge edge_check = make_edge (bb, bb_check, EDGE_TRUE_VALUE);
     768                 :           1 :     edge_check->probability = e->count ().probability_in (bb->count);
     769                 :           1 :     edge_nocheck->probability = edge_check->probability.invert ();
     770                 :             : 
     771                 :           1 :     gphi *cond_phi = create_phi_node (cond_var, bb);
     772                 :           5 :     for (int i = 0, ei = EDGE_COUNT (bb->preds); i < ei; i++)
     773                 :             :       {
     774                 :           3 :         edge pred = EDGE_PRED (bb, i);
     775                 :           3 :         bool check_edge = pred == e;
     776                 :           3 :         tree val = check_edge ? vtrue : vfalse;
     777                 :           3 :         add_phi_arg (cond_phi, val, pred, UNKNOWN_LOCATION);
     778                 :             :       }
     779                 :             :   }
     780                 :             : 
     781                 :             :   /* Add checking code to CHK_EDGES and CHKCALL_BLOCKS, and
     782                 :             :      initialization code on the entry edge.  Before this point, the
     783                 :             :      CFG has been undisturbed, and all the needed data has been
     784                 :             :      collected and safely stowed.  */
     785                 :        1520 :   void check (chk_edges_t &chk_edges,
     786                 :             :               int count_chkcall, auto_sbitmap const &chkcall_blocks)
     787                 :             :   {
     788                 :             :     /* If we're using out-of-line checking, create and statically
     789                 :             :        initialize the CFG checking representation, generate the
     790                 :             :        checker call for the checking sequence, and insert it in all
     791                 :             :        exit edges, if there's more than one.  If there's only one, we
     792                 :             :        use the same logic as the inline case to insert the check
     793                 :             :        sequence.  */
     794                 :        1520 :     if (rtcfg)
     795                 :             :       {
     796                 :             :         /* Unreverse the list, and drop the tail node turned into head.  */
     797                 :         805 :         rtcfg = TREE_CHAIN (nreverse (rtcfg));
     798                 :             : 
     799                 :             :         /* Turn the indices stored in TREE_PURPOSE into separate
     800                 :             :            nodes.  It was useful to keep them together to enable
     801                 :             :            combination of masks and for clear separation of
     802                 :             :            terminators while constructing it, but now we have to turn
     803                 :             :            it into a sequence of words.  */
     804                 :       20263 :         for (tree node = rtcfg; node; node = TREE_CHAIN (node))
     805                 :             :           {
     806                 :       19458 :             tree wordidx = TREE_PURPOSE (node);
     807                 :       19458 :             if (!wordidx)
     808                 :       13020 :               continue;
     809                 :             : 
     810                 :        6438 :             TREE_PURPOSE (node) = NULL_TREE;
     811                 :        6438 :             TREE_CHAIN (node) = tree_cons (NULL_TREE,
     812                 :             :                                            fold_convert (vword_type, wordidx),
     813                 :        6438 :                                            TREE_CHAIN (node));
     814                 :             :           }
     815                 :             : 
     816                 :             :         /* Build the static initializer for the array with the CFG
     817                 :             :            representation for out-of-line checking.  */
     818                 :         805 :         tree init = build_constructor_from_list (NULL_TREE, rtcfg);
     819                 :        1610 :         TREE_TYPE (init) = build_array_type_nelts (vword_type,
     820                 :        1610 :                                                    CONSTRUCTOR_NELTS (init));
     821                 :         805 :         char buf[32];
     822                 :         805 :         ASM_GENERATE_INTERNAL_LABEL (buf, "Lhardcfg",
     823                 :             :                                      current_function_funcdef_no);
     824                 :         805 :         rtcfg = build_decl (UNKNOWN_LOCATION, VAR_DECL,
     825                 :             :                             get_identifier (buf),
     826                 :         805 :                             TREE_TYPE (init));
     827                 :         805 :         TREE_READONLY (rtcfg) = 1;
     828                 :         805 :         TREE_STATIC (rtcfg) = 1;
     829                 :         805 :         TREE_ADDRESSABLE (rtcfg) = 1;
     830                 :         805 :         TREE_USED (rtcfg) = 1;
     831                 :         805 :         DECL_ARTIFICIAL (rtcfg) = 1;
     832                 :         805 :         DECL_IGNORED_P (rtcfg) = 1;
     833                 :         805 :         DECL_INITIAL (rtcfg) = init;
     834                 :         805 :         make_decl_rtl (rtcfg);
     835                 :         805 :         varpool_node::finalize_decl (rtcfg);
     836                 :             : 
     837                 :             :         /* Add the checker call to ckseq.  */
     838                 :        1610 :         gcall *call_chk = gimple_build_call (builtin_decl_explicit
     839                 :             :                                              (BUILT_IN___HARDCFR_CHECK), 3,
     840                 :         805 :                                              build_int_cst (sizetype,
     841                 :         805 :                                                             num2idx (nblocks)),
     842                 :             :                                              build1 (ADDR_EXPR, vword_ptr,
     843                 :             :                                                      visited),
     844                 :             :                                              build1 (ADDR_EXPR, vword_ptr,
     845                 :             :                                                      rtcfg));
     846                 :         805 :         gimple_seq_add_stmt (&ckseq, call_chk);
     847                 :             : 
     848                 :         805 :         gimple *clobber = gimple_build_assign (visited,
     849                 :             :                                                build_clobber
     850                 :         805 :                                                (TREE_TYPE (visited)));
     851                 :         805 :         gimple_seq_add_stmt (&ckseq, clobber);
     852                 :             : 
     853                 :             :         /* If we have multiple exit edges, insert (copies of)
     854                 :             :            ckseq in all of them.  */
     855                 :        2272 :         for (int i = chk_edges.length (); i--; )
     856                 :             :           {
     857                 :         662 :             gimple_seq seq = ckseq;
     858                 :             :             /* Copy the sequence, unless we're dealing with the
     859                 :             :                last edge (we're counting down to zero).  */
     860                 :         662 :             if (i || count_chkcall)
     861                 :         580 :               seq = gimple_seq_copy (seq);
     862                 :             : 
     863                 :         662 :             edge e = chk_edges[i];
     864                 :             : 
     865                 :         662 :             if (dump_file)
     866                 :             :               {
     867                 :         661 :                 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
     868                 :         621 :                   fprintf (dump_file,
     869                 :             :                            "Inserting out-of-line check in"
     870                 :             :                            " block %i's edge to exit.\n",
     871                 :         621 :                            e->src->index);
     872                 :             :                 else
     873                 :          40 :                   fprintf (dump_file,
     874                 :             :                            "Inserting out-of-line check in"
     875                 :             :                            " block %i's edge to postcheck block %i.\n",
     876                 :          40 :                            e->src->index, e->dest->index);
     877                 :             :               }
     878                 :             : 
     879                 :         662 :             insert_exit_check_on_edge (seq, e);
     880                 :             : 
     881                 :         662 :             gcc_checking_assert (!bitmap_bit_p (chkcall_blocks, e->src->index));
     882                 :             :           }
     883                 :             : 
     884                 :         805 :         sbitmap_iterator it;
     885                 :         805 :         unsigned i;
     886                 :        2545 :         EXECUTE_IF_SET_IN_BITMAP (chkcall_blocks, 0, i, it)
     887                 :             :           {
     888                 :         935 :             basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
     889                 :             : 
     890                 :         935 :             gimple_seq seq = ckseq;
     891                 :         935 :             gcc_checking_assert (count_chkcall > 0);
     892                 :         935 :             if (--count_chkcall)
     893                 :         212 :               seq = gimple_seq_copy (seq);
     894                 :             : 
     895                 :         935 :             if (dump_file)
     896                 :         934 :               fprintf (dump_file,
     897                 :             :                        "Inserting out-of-line check before stmt in block %i.\n",
     898                 :             :                        bb->index);
     899                 :             : 
     900                 :         935 :             insert_exit_check_in_block (seq, bb);
     901                 :             :           }
     902                 :             : 
     903                 :         805 :         gcc_checking_assert (count_chkcall == 0);
     904                 :             :       }
     905                 :             :     else
     906                 :             :       {
     907                 :             :         /* Inline checking requires a single exit edge.  */
     908                 :         715 :         gimple *last = gimple_build_assign (visited,
     909                 :             :                                             build_clobber
     910                 :         715 :                                             (TREE_TYPE (visited)));
     911                 :         715 :         gimple_seq_add_stmt (&ckseq, last);
     912                 :             : 
     913                 :         715 :         if (!count_chkcall)
     914                 :             :           {
     915                 :         252 :             edge e = single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun));
     916                 :             : 
     917                 :         252 :             if (dump_file)
     918                 :             :               {
     919                 :         251 :                 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
     920                 :         251 :                   fprintf (dump_file,
     921                 :             :                            "Inserting out-of-line check in"
     922                 :             :                            " block %i's edge to postcheck block %i.\n",
     923                 :         251 :                            e->src->index, e->dest->index);
     924                 :             :                 else
     925                 :           0 :                   fprintf (dump_file,
     926                 :             :                            "Inserting inline check in"
     927                 :             :                            " block %i's edge to exit.\n",
     928                 :           0 :                            e->src->index);
     929                 :             :               }
     930                 :             : 
     931                 :         252 :             insert_exit_check_on_edge (ckseq, e);
     932                 :             :           }
     933                 :             :         else
     934                 :             :           {
     935                 :         463 :             gcc_checking_assert (count_chkcall == 1);
     936                 :             : 
     937                 :         463 :             sbitmap_iterator it;
     938                 :         463 :             unsigned i;
     939                 :        1389 :             EXECUTE_IF_SET_IN_BITMAP (chkcall_blocks, 0, i, it)
     940                 :             :               {
     941                 :         463 :                 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
     942                 :             : 
     943                 :         463 :                 gimple_seq seq = ckseq;
     944                 :         463 :                 gcc_checking_assert (count_chkcall > 0);
     945                 :         463 :                 if (--count_chkcall)
     946                 :             :                   seq = gimple_seq_copy (seq);
     947                 :             : 
     948                 :         463 :                 if (dump_file)
     949                 :         463 :                   fprintf (dump_file,
     950                 :             :                            "Inserting inline check before stmt in block %i.\n",
     951                 :             :                            bb->index);
     952                 :             : 
     953                 :         463 :                 insert_exit_check_in_block (seq, bb);
     954                 :             :               }
     955                 :             : 
     956                 :         463 :             gcc_checking_assert (count_chkcall == 0);
     957                 :             :           }
     958                 :             : 
     959                 :             :         /* The inserted ckseq computes CKFAIL at LAST.  Now we have to
     960                 :             :            conditionally trap on it.  */
     961                 :         715 :         basic_block insbb = gimple_bb (last);
     962                 :             : 
     963                 :             :         /* Create a block with the unconditional trap.  */
     964                 :         715 :         basic_block trp = create_empty_bb (insbb);
     965                 :         715 :         gimple_stmt_iterator gsit = gsi_after_labels (trp);
     966                 :             : 
     967                 :         715 :         gcall *trap = gimple_build_call (builtin_decl_explicit
     968                 :             :                                          (BUILT_IN_TRAP), 0);
     969                 :         715 :         gsi_insert_before (&gsit, trap, GSI_SAME_STMT);
     970                 :             : 
     971                 :         715 :         if (BB_PARTITION (insbb))
     972                 :           0 :           BB_SET_PARTITION (trp, BB_COLD_PARTITION);
     973                 :             : 
     974                 :         715 :         if (current_loops)
     975                 :         715 :           add_bb_to_loop (trp, current_loops->tree_root);
     976                 :             : 
     977                 :             :         /* Insert a conditional branch to the trap block.  If the
     978                 :             :            conditional wouldn't be the last stmt, split the block.  */
     979                 :         715 :         gimple_stmt_iterator gsi = gsi_for_stmt (last);
     980                 :         715 :         if (!gsi_one_before_end_p (gsi))
     981                 :         715 :           split_block (gsi_bb (gsi), gsi_stmt (gsi));
     982                 :             : 
     983                 :         715 :         gcond *cond = gimple_build_cond (NE_EXPR, ckfail,
     984                 :         715 :                                          fold_convert (TREE_TYPE (ckfail),
     985                 :             :                                                        boolean_false_node),
     986                 :             :                                          NULL, NULL);
     987                 :         715 :         gsi_insert_after (&gsi, cond, GSI_SAME_STMT);
     988                 :             : 
     989                 :             :         /* Adjust the edges.  */
     990                 :         715 :         single_succ_edge (gsi_bb (gsi))->flags &= ~EDGE_FALLTHRU;
     991                 :         715 :         single_succ_edge (gsi_bb (gsi))->flags |= EDGE_FALSE_VALUE;
     992                 :         715 :         single_succ_edge (gsi_bb (gsi))->probability
     993                 :         715 :           = profile_probability::always ();
     994                 :         715 :         edge e = make_edge (gsi_bb (gsi), trp, EDGE_TRUE_VALUE);
     995                 :         715 :         e->probability = profile_probability::never ();
     996                 :         715 :         gcc_checking_assert (e->dest == trp);
     997                 :         715 :         gcc_checking_assert (!e->dest->count.initialized_p ());
     998                 :         715 :         e->dest->count = e->count ();
     999                 :             : 
    1000                 :             :         /* Set the trap's dominator after splitting.  */
    1001                 :         715 :         if (dom_info_available_p (CDI_DOMINATORS))
    1002                 :         715 :           set_immediate_dominator (CDI_DOMINATORS, trp, gimple_bb (last));
    1003                 :             :       }
    1004                 :             : 
    1005                 :             :     /* Insert initializers for visited at the entry.  Do this after
    1006                 :             :        other insertions, to avoid messing with block numbers.  */
    1007                 :        1520 :     gimple_seq iseq = NULL;
    1008                 :             : 
    1009                 :        1520 :     gcall *vinit = gimple_build_call (builtin_decl_explicit
    1010                 :             :                                       (BUILT_IN_MEMSET), 3,
    1011                 :             :                                       build1 (ADDR_EXPR,
    1012                 :             :                                               build_pointer_type
    1013                 :        1520 :                                               (TREE_TYPE (visited)),
    1014                 :             :                                               visited),
    1015                 :             :                                       integer_zero_node,
    1016                 :        1520 :                                       TYPE_SIZE_UNIT (TREE_TYPE (visited)));
    1017                 :        1520 :     gimple_seq_add_stmt (&iseq, vinit);
    1018                 :             : 
    1019                 :        1520 :     gsi_insert_seq_on_edge_immediate (single_succ_edge
    1020                 :        1520 :                                       (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
    1021                 :             :                                       iseq);
    1022                 :        1520 :   }
    1023                 :             : 
    1024                 :             :   /* Push onto RTCFG a (mask, index) pair to test for IBB when BB is
    1025                 :             :      visited. XSELF is to be the ENTRY or EXIT block (depending on
    1026                 :             :      whether we're looking at preds or succs), to be remapped to BB
    1027                 :             :      because we can't represent them, and there's no point in testing
    1028                 :             :      them anyway.  Return true if no further blocks need to be visited
    1029                 :             :      in the list, because we've already encountered a
    1030                 :             :      self-reference.  */
    1031                 :             :   bool
    1032                 :        8096 :   push_rtcfg_pair (basic_block ibb, basic_block bb,
    1033                 :             :                    basic_block xself)
    1034                 :             :   {
    1035                 :             :     /* We don't have a bit to test for the entry and exit
    1036                 :             :        blocks, but it is always visited, so we test for the
    1037                 :             :        block itself, which gets us the right result and
    1038                 :             :        enables the self-test optimization below.  */
    1039                 :        8096 :     if (ibb == xself)
    1040                 :        2402 :       ibb = bb;
    1041                 :             : 
    1042                 :        8096 :     tree mask, idx = vwordidx (ibb, &mask);
    1043                 :             :     /* Combine masks with the same idx, but not if we're going
    1044                 :             :        to optimize for self-test.  */
    1045                 :        5694 :     if (ibb != bb && TREE_PURPOSE (rtcfg)
    1046                 :        9754 :         && tree_int_cst_equal (idx, TREE_PURPOSE (rtcfg)))
    1047                 :        1658 :       TREE_VALUE (rtcfg) = int_const_binop (BIT_IOR_EXPR, mask,
    1048                 :        1658 :                                             TREE_VALUE (rtcfg));
    1049                 :             :     else
    1050                 :        6438 :       rtcfg = tree_cons (idx, mask, rtcfg);
    1051                 :             : 
    1052                 :             :     /* For self-tests (i.e., tests that the block itself was
    1053                 :             :        also visited), testing anything else is pointless,
    1054                 :             :        because it's a tautology, so just drop other edges.  */
    1055                 :        8096 :     if (ibb == bb)
    1056                 :             :       {
    1057                 :        2402 :         while (TREE_PURPOSE (TREE_CHAIN (rtcfg)))
    1058                 :           0 :           TREE_CHAIN (rtcfg) = TREE_CHAIN (TREE_CHAIN (rtcfg));
    1059                 :             :         return true;
    1060                 :             :       }
    1061                 :             : 
    1062                 :             :     return false;
    1063                 :             :   }
    1064                 :             : 
    1065                 :             :   /* Add to CKSEQ stmts to clear CKPART if OBB is visited.  */
    1066                 :             :   void
    1067                 :        3326 :   build_block_check (basic_block obb)
    1068                 :             :   {
    1069                 :        3326 :     tree vobb = fold_convert (TREE_TYPE (ckblk),
    1070                 :             :                               vindex (obb, &ckseq));
    1071                 :        3326 :     gassign *blkrunp = gimple_build_assign (ckblk, vobb);
    1072                 :        3326 :     gimple_seq_add_stmt (&ckseq, blkrunp);
    1073                 :             : 
    1074                 :        3326 :     gassign *blknotrunp = gimple_build_assign (ckinv,
    1075                 :             :                                                EQ_EXPR,
    1076                 :             :                                                ckblk,
    1077                 :        3326 :                                                fold_convert
    1078                 :             :                                                (TREE_TYPE (ckblk),
    1079                 :             :                                                 boolean_false_node));
    1080                 :        3326 :     gimple_seq_add_stmt (&ckseq, blknotrunp);
    1081                 :             : 
    1082                 :        3326 :     gassign *andblk = gimple_build_assign (ckpart,
    1083                 :             :                                            BIT_AND_EXPR,
    1084                 :             :                                            ckpart, ckinv);
    1085                 :        3326 :     gimple_seq_add_stmt (&ckseq, andblk);
    1086                 :        3326 :   }
    1087                 :             : 
    1088                 :             :   /* Add to BB code to set its bit in VISITED, and add to RTCFG or
    1089                 :             :      CKSEQ the data or code needed to check BB's predecessors and
    1090                 :             :      successors.  If CHECKPOINT, assume the block is a checkpoint,
    1091                 :             :      whether or not it has an edge to EXIT.  If POSTCHECK, assume the
    1092                 :             :      block post-dominates checkpoints and therefore no bitmap setting
    1093                 :             :      or checks are to be performed in or for it.  Do NOT change the
    1094                 :             :      CFG.  */
    1095                 :        4616 :   void visit (basic_block bb, bool checkpoint, bool postcheck)
    1096                 :             :   {
    1097                 :             :     /* Set the bit in VISITED when entering the block.  */
    1098                 :        4616 :     gimple_stmt_iterator gsi = gsi_after_labels (bb);
    1099                 :        4616 :     if (!postcheck)
    1100                 :        4540 :       gsi_insert_seq_before (&gsi, vset (bb), GSI_SAME_STMT);
    1101                 :             : 
    1102                 :        4616 :     if (rtcfg)
    1103                 :             :       {
    1104                 :        3291 :         if (!postcheck)
    1105                 :             :           {
    1106                 :             :             /* Build a list of (index, mask) terminated by (NULL, 0).
    1107                 :             :                Consolidate masks with the same index when they're
    1108                 :             :                adjacent.  First, predecessors.  Count backwards, because
    1109                 :             :                we're going to reverse the list.  The order shouldn't
    1110                 :             :                matter, but let's not make it surprising.  */
    1111                 :        9318 :             for (int i = EDGE_COUNT (bb->preds); i--; )
    1112                 :        3685 :               if (push_rtcfg_pair (EDGE_PRED (bb, i)->src, bb,
    1113                 :        3685 :                                    ENTRY_BLOCK_PTR_FOR_FN (cfun)))
    1114                 :             :                 break;
    1115                 :             :           }
    1116                 :        3291 :         rtcfg = tree_cons (NULL_TREE, build_int_cst (vword_type, 0), rtcfg);
    1117                 :             : 
    1118                 :        3291 :         if (!postcheck)
    1119                 :             :           {
    1120                 :             :             /* Then, successors.  */
    1121                 :        3219 :             if (!checkpoint
    1122                 :        4816 :                 || !push_rtcfg_pair (EXIT_BLOCK_PTR_FOR_FN (cfun),
    1123                 :        1597 :                                      bb, EXIT_BLOCK_PTR_FOR_FN (cfun)))
    1124                 :        6058 :               for (int i = EDGE_COUNT (bb->succs); i--; )
    1125                 :        2814 :                 if (push_rtcfg_pair (EDGE_SUCC (bb, i)->dest, bb,
    1126                 :        2814 :                                      EXIT_BLOCK_PTR_FOR_FN (cfun)))
    1127                 :             :                   break;
    1128                 :             :           }
    1129                 :        3291 :         rtcfg = tree_cons (NULL_TREE, build_int_cst (vword_type, 0), rtcfg);
    1130                 :             :       }
    1131                 :        1325 :     else if (!postcheck)
    1132                 :             :       {
    1133                 :             :         /* Schedule test to fail if the block was reached but somehow none
    1134                 :             :            of its predecessors were.  */
    1135                 :        1321 :         tree bit = fold_convert (TREE_TYPE (ckpart), vindex (bb, &ckseq));
    1136                 :        1321 :         gassign *blkrunp = gimple_build_assign (ckpart, bit);
    1137                 :        1321 :         gimple_seq_add_stmt (&ckseq, blkrunp);
    1138                 :             : 
    1139                 :        4085 :         for (int i = 0, e = EDGE_COUNT (bb->preds); i < e; i++)
    1140                 :        1443 :           build_block_check (EDGE_PRED (bb, i)->src);
    1141                 :        1321 :         gimple *orfailp = gimple_build_assign (ckfail, BIT_IOR_EXPR,
    1142                 :             :                                                ckfail, ckpart);
    1143                 :        1321 :         gimple_seq_add_stmt (&ckseq, orfailp);
    1144                 :             : 
    1145                 :             :         /* Likewise for successors.  */
    1146                 :        1321 :         gassign *blkruns = gimple_build_assign (ckpart, unshare_expr (bit));
    1147                 :        1321 :         gimple_seq_add_stmt (&ckseq, blkruns);
    1148                 :             : 
    1149                 :        1321 :         if (checkpoint)
    1150                 :         715 :           build_block_check (EXIT_BLOCK_PTR_FOR_FN (cfun));
    1151                 :        3676 :         for (int i = 0, e = EDGE_COUNT (bb->succs); i < e; i++)
    1152                 :        1168 :           build_block_check (EDGE_SUCC (bb, i)->dest);
    1153                 :             : 
    1154                 :        1321 :         gimple *orfails = gimple_build_assign (ckfail, BIT_IOR_EXPR,
    1155                 :             :                                                ckfail, ckpart);
    1156                 :        1321 :         gimple_seq_add_stmt (&ckseq, orfails);
    1157                 :             :       }
    1158                 :        4616 :   }
    1159                 :             : };
    1160                 :             : 
    1161                 :             : /* Avoid checking before noreturn calls that are known (expected,
    1162                 :             :    really) to finish by throwing an exception, rather than by ending
    1163                 :             :    the program or looping forever.  Such functions have to be
    1164                 :             :    annotated, with an attribute (expected_throw) or flag (ECF_XTHROW),
    1165                 :             :    so that exception-raising functions, such as C++'s __cxa_throw,
    1166                 :             :    __cxa_rethrow, and Ada's gnat_rcheck_*, gnat_reraise*,
    1167                 :             :    ada.exception.raise_exception*, and the language-independent
    1168                 :             :    unwinders could be detected here and handled differently from other
    1169                 :             :    noreturn functions.  */
    1170                 :             : static bool
    1171                 :         323 : always_throwing_noreturn_call_p (gimple *stmt)
    1172                 :             : {
    1173                 :         323 :   if (!is_a <gcall *> (stmt))
    1174                 :           0 :     return is_a <gresx *> (stmt);
    1175                 :             : 
    1176                 :         323 :   gcall *call = as_a <gcall *> (stmt);
    1177                 :         323 :   return (gimple_call_noreturn_p (call)
    1178                 :         646 :           && gimple_call_expected_throw_p (call));
    1179                 :             : }
    1180                 :             : 
    1181                 :             : /* Control flow redundancy hardening: record the execution path, and
    1182                 :             :    verify at exit that an expect path was taken.  */
    1183                 :             : 
    1184                 :             : unsigned int
    1185                 :        1635 : pass_harden_control_flow_redundancy::execute (function *fun)
    1186                 :             : {
    1187                 :        3270 :   bool const check_at_escaping_exceptions
    1188                 :        1635 :     = (flag_exceptions
    1189                 :        1635 :        && flag_harden_control_flow_redundancy_check_exceptions);
    1190                 :        1635 :   bool const check_before_noreturn_calls
    1191                 :        1635 :     = flag_harden_control_flow_redundancy_check_noreturn > HCFRNR_NEVER;
    1192                 :        3270 :   bool const check_before_nothrow_noreturn_calls
    1193                 :             :     = (check_before_noreturn_calls
    1194                 :        1635 :        && flag_harden_control_flow_redundancy_check_noreturn >= HCFRNR_NOTHROW);
    1195                 :        3270 :   bool const check_before_throwing_noreturn_calls
    1196                 :             :     = (flag_exceptions
    1197                 :         915 :        && check_before_noreturn_calls
    1198                 :        2315 :        && flag_harden_control_flow_redundancy_check_noreturn > HCFRNR_NOTHROW);
    1199                 :        3270 :   bool const check_before_always_throwing_noreturn_calls
    1200                 :             :     = (flag_exceptions
    1201                 :         915 :        && check_before_noreturn_calls
    1202                 :        2315 :        && flag_harden_control_flow_redundancy_check_noreturn >= HCFRNR_ALWAYS);
    1203                 :        1635 :   basic_block bb;
    1204                 :        1635 :   basic_block bb_eh_cleanup = NULL;
    1205                 :             : 
    1206                 :        1635 :   if (flag_harden_control_flow_redundancy_skip_leaf)
    1207                 :             :     {
    1208                 :          66 :       bool found_calls_p = false;
    1209                 :             : 
    1210                 :         424 :       FOR_EACH_BB_FN (bb, fun)
    1211                 :             :         {
    1212                 :         376 :           for (gimple_stmt_iterator gsi = gsi_last_bb (bb);
    1213                 :        1726 :                !gsi_end_p (gsi); gsi_prev (&gsi))
    1214                 :         693 :             if (is_a <gcall *> (gsi_stmt (gsi)))
    1215                 :             :               {
    1216                 :             :                 found_calls_p = true;
    1217                 :             :                 break;
    1218                 :             :               }
    1219                 :         376 :           if (found_calls_p)
    1220                 :             :             break;
    1221                 :             :         }
    1222                 :             : 
    1223                 :          66 :       if (!found_calls_p)
    1224                 :             :         {
    1225                 :          48 :           if (dump_file)
    1226                 :          48 :             fprintf (dump_file,
    1227                 :             :                      "Disabling CFR for leaf function, as requested\n");
    1228                 :             : 
    1229                 :          48 :           return 0;
    1230                 :             :         }
    1231                 :             :     }
    1232                 :             : 
    1233                 :        1587 :   if (check_at_escaping_exceptions)
    1234                 :             :     {
    1235                 :         772 :       int lp_eh_cleanup = -1;
    1236                 :             : 
    1237                 :             :       /* Record the preexisting blocks, to avoid visiting newly-created
    1238                 :             :          blocks.  */
    1239                 :         772 :       auto_sbitmap to_visit (last_basic_block_for_fn (fun));
    1240                 :         772 :       bitmap_clear (to_visit);
    1241                 :             : 
    1242                 :        2963 :       FOR_EACH_BB_FN (bb, fun)
    1243                 :        2191 :         bitmap_set_bit (to_visit, bb->index);
    1244                 :             : 
    1245                 :             :       /* Scan the blocks for stmts with escaping exceptions, that
    1246                 :             :          wouldn't be denoted in the CFG, and associate them with an
    1247                 :             :          empty cleanup handler around the whole function.  Walk
    1248                 :             :          backwards, so that even when we split the block, */
    1249                 :         772 :       sbitmap_iterator it;
    1250                 :         772 :       unsigned i;
    1251                 :        3735 :       EXECUTE_IF_SET_IN_BITMAP (to_visit, 0, i, it)
    1252                 :             :         {
    1253                 :        2191 :           bb = BASIC_BLOCK_FOR_FN (fun, i);
    1254                 :             : 
    1255                 :        2191 :           for (gimple_stmt_iterator gsi = gsi_last_bb (bb);
    1256                 :       10319 :                !gsi_end_p (gsi); gsi_prev (&gsi))
    1257                 :             :             {
    1258                 :        4064 :               gimple *stmt = gsi_stmt (gsi);
    1259                 :        4064 :               if (!stmt_could_throw_p (fun, stmt))
    1260                 :        3114 :                 continue;
    1261                 :             : 
    1262                 :             :               /* If it must not throw, or if it already has a handler,
    1263                 :             :                  we need not worry about it.  */
    1264                 :         950 :               if (lookup_stmt_eh_lp (stmt) != 0)
    1265                 :         365 :                 continue;
    1266                 :             : 
    1267                 :             :               /* Don't split blocks at, nor add EH edges to, tail
    1268                 :             :                  calls, we will add verification before the call
    1269                 :             :                  anyway.  */
    1270                 :         585 :               if (is_a <gcall *> (stmt)
    1271                 :         585 :                   && (gimple_call_must_tail_p (as_a <gcall *> (stmt))
    1272                 :         410 :                       || gimple_call_tail_p (as_a <gcall *> (stmt))
    1273                 :         410 :                       || returning_call_p (as_a <gcall *> (stmt))))
    1274                 :          41 :                 continue;
    1275                 :             : 
    1276                 :         544 :               if (!gsi_one_before_end_p (gsi))
    1277                 :         105 :                 split_block (bb, stmt);
    1278                 :             :               /* A resx or noreturn call needs not be associated with
    1279                 :             :                  the cleanup handler if we're going to add checking
    1280                 :             :                  before it.  We only test cases that didn't require
    1281                 :             :                  block splitting because noreturn calls would always
    1282                 :             :                  be at the end of blocks, and we test for zero
    1283                 :             :                  successors because if there is an edge, it's not
    1284                 :             :                  noreturn, as any EH edges would have already been
    1285                 :             :                  caught by the lookup_stmt_eh_lp test above.  */
    1286                 :         500 :               else if (check_before_noreturn_calls
    1287                 :         341 :                        && EDGE_COUNT (bb->succs) == 0
    1288                 :         678 :                        && (is_a <gresx *> (stmt)
    1289                 :         107 :                            ? check_before_always_throwing_noreturn_calls
    1290                 :         107 :                            : (!is_a <gcall *> (stmt)
    1291                 :         107 :                               || !gimple_call_noreturn_p (stmt))
    1292                 :         107 :                            ? (gcc_unreachable (), false)
    1293                 :         107 :                            : (!flag_exceptions
    1294                 :         107 :                               || gimple_call_nothrow_p (as_a <gcall *> (stmt)))
    1295                 :         214 :                            ? check_before_nothrow_noreturn_calls
    1296                 :         107 :                            : always_throwing_noreturn_call_p (stmt)
    1297                 :             :                            ? check_before_always_throwing_noreturn_calls
    1298                 :             :                            : check_before_throwing_noreturn_calls))
    1299                 :             :                 {
    1300                 :          61 :                   if (dump_file)
    1301                 :             :                     {
    1302                 :          61 :                       fprintf (dump_file,
    1303                 :             :                                "Bypassing cleanup for noreturn stmt"
    1304                 :             :                                " in block %i:\n",
    1305                 :             :                                bb->index);
    1306                 :          61 :                       print_gimple_stmt (dump_file, stmt, 0);
    1307                 :             :                     }
    1308                 :          61 :                   continue;
    1309                 :             :                 }
    1310                 :             : 
    1311                 :         483 :               if (!bb_eh_cleanup)
    1312                 :             :                 {
    1313                 :         407 :                   bb_eh_cleanup = create_empty_bb (bb);
    1314                 :         407 :                   if (dom_info_available_p (CDI_DOMINATORS))
    1315                 :         407 :                     set_immediate_dominator (CDI_DOMINATORS, bb_eh_cleanup, bb);
    1316                 :         407 :                   if (current_loops)
    1317                 :         407 :                     add_bb_to_loop (bb_eh_cleanup, current_loops->tree_root);
    1318                 :             : 
    1319                 :             :                   /* Make the new block an EH cleanup for the call.  */
    1320                 :         407 :                   eh_region new_r = gen_eh_region_cleanup (NULL);
    1321                 :         407 :                   eh_landing_pad lp = gen_eh_landing_pad (new_r);
    1322                 :         407 :                   tree label = gimple_block_label (bb_eh_cleanup);
    1323                 :         407 :                   lp->post_landing_pad = label;
    1324                 :         407 :                   EH_LANDING_PAD_NR (label) = lp_eh_cleanup = lp->index;
    1325                 :             : 
    1326                 :             :                   /* Just propagate the exception.
    1327                 :             :                      We will later insert the verifier call.  */
    1328                 :         407 :                   gimple_stmt_iterator ehgsi;
    1329                 :         407 :                   ehgsi = gsi_after_labels (bb_eh_cleanup);
    1330                 :         407 :                   gresx *resx = gimple_build_resx (new_r->index);
    1331                 :         407 :                   gsi_insert_before (&ehgsi, resx, GSI_SAME_STMT);
    1332                 :             : 
    1333                 :         407 :                   if (dump_file)
    1334                 :         406 :                     fprintf (dump_file,
    1335                 :             :                              "Created cleanup block %i:\n",
    1336                 :             :                              bb_eh_cleanup->index);
    1337                 :             :                 }
    1338                 :          76 :               else if (dom_info_available_p (CDI_DOMINATORS))
    1339                 :             :                 {
    1340                 :          76 :                   basic_block immdom;
    1341                 :          76 :                   immdom = get_immediate_dominator (CDI_DOMINATORS,
    1342                 :             :                                                     bb_eh_cleanup);
    1343                 :          76 :                   if (!dominated_by_p (CDI_DOMINATORS, bb, immdom))
    1344                 :             :                     {
    1345                 :          76 :                       immdom = nearest_common_dominator (CDI_DOMINATORS,
    1346                 :             :                                                          immdom, bb);
    1347                 :          76 :                       set_immediate_dominator (CDI_DOMINATORS,
    1348                 :             :                                                bb_eh_cleanup, immdom);
    1349                 :             :                     }
    1350                 :             :                 }
    1351                 :             : 
    1352                 :         483 :               if (dump_file)
    1353                 :             :                 {
    1354                 :         482 :                   fprintf (dump_file,
    1355                 :             :                            "Associated cleanup block with stmt in block %i:\n",
    1356                 :             :                            bb->index);
    1357                 :         482 :                   print_gimple_stmt (dump_file, stmt, 0);
    1358                 :             :                 }
    1359                 :             : 
    1360                 :         483 :               add_stmt_to_eh_lp (stmt, lp_eh_cleanup);
    1361                 :             :               /* Finally, wire the EH cleanup block into the CFG.  */
    1362                 :         483 :               edge neeh = make_eh_edge (stmt);
    1363                 :         483 :               neeh->probability = profile_probability::never ();
    1364                 :         483 :               gcc_checking_assert (neeh->dest == bb_eh_cleanup);
    1365                 :         483 :               if (neeh->dest->count.initialized_p ())
    1366                 :          76 :                 neeh->dest->count += neeh->count ();
    1367                 :             :               else
    1368                 :         407 :                 neeh->dest->count = neeh->count ();
    1369                 :             :             }
    1370                 :             :         }
    1371                 :             : 
    1372                 :         772 :       if (bb_eh_cleanup)
    1373                 :             :         {
    1374                 :             :           /* A cfg_cleanup after bb_eh_cleanup makes for a more compact
    1375                 :             :              rtcfg, and it avoids bb numbering differences when we split
    1376                 :             :              blocks because of trailing debug insns only.  */
    1377                 :         407 :           cleanup_tree_cfg ();
    1378                 :         772 :           gcc_checking_assert (EDGE_COUNT (bb_eh_cleanup->succs) == 0);
    1379                 :             :         }
    1380                 :         772 :     }
    1381                 :             : 
    1382                 :             :   /* These record blocks with calls that are to be preceded by
    1383                 :             :      checkpoints, such as noreturn calls (if so chosen), must-tail
    1384                 :             :      calls, potential early-marked tail calls, and returning calls (if
    1385                 :             :      so chosen).  */
    1386                 :        1587 :   int count_chkcall = 0;
    1387                 :        1587 :   auto_sbitmap chkcall_blocks (last_basic_block_for_fn (fun));
    1388                 :        1587 :   bitmap_clear (chkcall_blocks);
    1389                 :             : 
    1390                 :             :   /* We wish to add verification at blocks without successors, such as
    1391                 :             :      noreturn calls (raising or not) and the reraise at the cleanup
    1392                 :             :      block, but not other reraises: they will go through the cleanup
    1393                 :             :      block.  */
    1394                 :        1587 :   if (check_before_noreturn_calls)
    1395                 :        4810 :     FOR_EACH_BB_FN (bb, fun)
    1396                 :             :       {
    1397                 :        3611 :         gimple_stmt_iterator gsi = gsi_last_bb (bb);
    1398                 :        3611 :         if (gsi_end_p (gsi))
    1399                 :        3611 :           continue;
    1400                 :        3600 :         gimple *stmt = gsi_stmt (gsi);
    1401                 :             : 
    1402                 :        3600 :         if (EDGE_COUNT (bb->succs) == 0)
    1403                 :             :           {
    1404                 :             :             /* A stmt at the end of a block without any successors is
    1405                 :             :                either a resx or a noreturn call without a local
    1406                 :             :                handler.  Check that it's one of the desired
    1407                 :             :                checkpoints.  */
    1408                 :        1346 :             if (flag_exceptions && is_a <gresx *> (stmt)
    1409                 :        1249 :                 ? (check_before_always_throwing_noreturn_calls
    1410                 :         339 :                    || bb == bb_eh_cleanup)
    1411                 :         455 :                 : (!is_a <gcall *> (stmt)
    1412                 :         455 :                    || !gimple_call_noreturn_p (stmt))
    1413                 :         455 :                 ? (stmt_can_make_abnormal_goto (stmt)
    1414                 :             :                    /* ??? Check before indirect nonlocal goto, or
    1415                 :             :                       calls thereof?  */
    1416                 :           0 :                    ? false
    1417                 :             :                    /* Catch cases in which successors would be
    1418                 :             :                       expected.  */
    1419                 :           0 :                    : (gcc_unreachable (), false))
    1420                 :         455 :                 : (!flag_exceptions
    1421                 :         213 :                    || gimple_call_nothrow_p (as_a <gcall *> (stmt)))
    1422                 :         668 :                 ? check_before_nothrow_noreturn_calls
    1423                 :          31 :                 : always_throwing_noreturn_call_p (stmt)
    1424                 :             :                 ? check_before_always_throwing_noreturn_calls
    1425                 :             :                 : check_before_throwing_noreturn_calls)
    1426                 :             :               {
    1427                 :         794 :                 if (dump_file)
    1428                 :             :                   {
    1429                 :         793 :                     fprintf (dump_file,
    1430                 :             :                              "Scheduling check before stmt"
    1431                 :             :                              " in succ-less block %i:\n",
    1432                 :             :                              bb->index);
    1433                 :         793 :                     print_gimple_stmt (dump_file, stmt, 0);
    1434                 :             :                   }
    1435                 :             : 
    1436                 :         794 :                 if (bitmap_set_bit (chkcall_blocks, bb->index))
    1437                 :         794 :                   count_chkcall++;
    1438                 :             :                 else
    1439                 :           0 :                   gcc_unreachable ();
    1440                 :             :               }
    1441                 :         794 :             continue;
    1442                 :             :           }
    1443                 :             : 
    1444                 :             :         /* If there are no exceptions, it would seem like any noreturn
    1445                 :             :            call must have zero successor edges, but __builtin_return
    1446                 :             :            gets successor edges.  We don't want to handle it here, it
    1447                 :             :            will be dealt with in sibcall_search_preds.  Otherwise,
    1448                 :             :            check for blocks without non-EH successors, but skip those
    1449                 :             :            with resx stmts and edges (i.e., those other than that in
    1450                 :             :            bb_eh_cleanup), since those will go through bb_eh_cleanup,
    1451                 :             :            that will have been counted as noreturn above because it
    1452                 :             :            has no successors.  */
    1453                 :        2806 :         gcc_checking_assert (bb != bb_eh_cleanup
    1454                 :             :                              || !check_at_escaping_exceptions);
    1455                 :        1644 :         if (flag_exceptions && is_a <gresx *> (stmt)
    1456                 :        2806 :             ? check_before_always_throwing_noreturn_calls
    1457                 :        2692 :             : (!is_a <gcall *> (stmt)
    1458                 :         789 :                || !gimple_call_noreturn_p (stmt))
    1459                 :        2692 :             ? false
    1460                 :         365 :             : (!flag_exceptions
    1461                 :         213 :                || gimple_call_nothrow_p (as_a <gcall *> (stmt)))
    1462                 :         185 :             ? false /* rather than check_before_nothrow_noreturn_calls */
    1463                 :         185 :             : always_throwing_noreturn_call_p (stmt)
    1464                 :         185 :             ? check_before_always_throwing_noreturn_calls
    1465                 :             :             : check_before_throwing_noreturn_calls)
    1466                 :             :           {
    1467                 :          26 :             gcc_checking_assert (single_succ_p (bb)
    1468                 :             :                                  && (single_succ_edge (bb)->flags & EDGE_EH));
    1469                 :             : 
    1470                 :          26 :             if (dump_file)
    1471                 :             :               {
    1472                 :          26 :                 fprintf (dump_file,
    1473                 :             :                          "Scheduling check before stmt"
    1474                 :             :                          " in EH-succ block %i:\n",
    1475                 :             :                          bb->index);
    1476                 :          26 :                 print_gimple_stmt (dump_file, stmt, 0);
    1477                 :             :               }
    1478                 :             : 
    1479                 :          26 :             if (bitmap_set_bit (chkcall_blocks, bb->index))
    1480                 :          26 :               count_chkcall++;
    1481                 :             :             else
    1482                 :           0 :               gcc_unreachable ();
    1483                 :             :           }
    1484                 :             :       }
    1485                 :         388 :   else if (bb_eh_cleanup)
    1486                 :             :     {
    1487                 :          98 :       if (bitmap_set_bit (chkcall_blocks, bb_eh_cleanup->index))
    1488                 :          98 :         count_chkcall++;
    1489                 :             :       else
    1490                 :           0 :         gcc_unreachable ();
    1491                 :             :     }
    1492                 :             : 
    1493                 :        1297 :   gcc_checking_assert (!bb_eh_cleanup
    1494                 :             :                        || bitmap_bit_p (chkcall_blocks, bb_eh_cleanup->index));
    1495                 :             : 
    1496                 :             :   /* If we don't have edges to exit nor noreturn calls (including the
    1497                 :             :      cleanup reraise), then we may skip instrumentation: that would
    1498                 :             :      amount to a function that ends with an infinite loop.  */
    1499                 :        1587 :   if (!count_chkcall
    1500                 :        1587 :       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
    1501                 :             :     {
    1502                 :          67 :       if (dump_file)
    1503                 :          67 :         fprintf (dump_file,
    1504                 :             :                  "Disabling CFR, no exit paths to check\n");
    1505                 :             : 
    1506                 :          67 :       return 0;
    1507                 :             :     }
    1508                 :             : 
    1509                 :             :   /* Search for must-tail calls, early-marked potential tail calls,
    1510                 :             :      and, if requested, returning calls.  As we introduce early
    1511                 :             :      checks, */
    1512                 :        1520 :   int count_postchk = 0;
    1513                 :        1520 :   auto_sbitmap postchk_blocks (last_basic_block_for_fn (fun));
    1514                 :        1520 :   bitmap_clear (postchk_blocks);
    1515                 :        1520 :   chk_edges_t chk_edges;
    1516                 :        1520 :   hardcfr_sibcall_search_preds (EXIT_BLOCK_PTR_FOR_FN (fun), chk_edges,
    1517                 :             :                                 count_chkcall, chkcall_blocks,
    1518                 :             :                                 count_postchk, postchk_blocks,
    1519                 :             :                                 NULL);
    1520                 :             : 
    1521                 :        3040 :   rt_bb_visited vstd (chk_edges.length () + count_chkcall);
    1522                 :             : 
    1523                 :        1520 :   auto_sbitmap combined_blocks (last_basic_block_for_fn (fun));
    1524                 :        1520 :   bitmap_copy (combined_blocks, chkcall_blocks);
    1525                 :        1520 :   int i;
    1526                 :        1520 :   edge *e;
    1527                 :        3954 :   FOR_EACH_VEC_ELT (chk_edges, i, e)
    1528                 :         914 :     if (!bitmap_set_bit (combined_blocks, (*e)->src->index))
    1529                 :             :       /* There may be multiple chk_edges with the same src block;
    1530                 :             :          guard againt overlaps with chkcall_blocks only.  */
    1531                 :           0 :       gcc_assert (!bitmap_bit_p (chkcall_blocks, (*e)->src->index));
    1532                 :             : 
    1533                 :             :   /* Visit blocks in index order, because building rtcfg depends on
    1534                 :             :      that.  Blocks must be compact, which the cleanup_cfg requirement
    1535                 :             :      ensures.  This would also enable FOR_EACH_BB_FN to be used to
    1536                 :             :      iterate in index order, but bb_eh_cleanup block splits and
    1537                 :             :      insertions changes that.  */
    1538                 :        1520 :   gcc_checking_assert (n_basic_blocks_for_fn (fun)
    1539                 :             :                        == last_basic_block_for_fn (fun));
    1540                 :        6136 :   for (int i = NUM_FIXED_BLOCKS; i < n_basic_blocks_for_fn (fun); i++)
    1541                 :             :     {
    1542                 :        4616 :       bb = BASIC_BLOCK_FOR_FN (fun, i);
    1543                 :        4616 :       gcc_checking_assert (bb->index == i);
    1544                 :        4616 :       vstd.visit (bb, bitmap_bit_p (combined_blocks, i),
    1545                 :        4616 :                   bitmap_bit_p (postchk_blocks, i));
    1546                 :             :     }
    1547                 :             : 
    1548                 :        1520 :   vstd.check (chk_edges, count_chkcall, chkcall_blocks);
    1549                 :             : 
    1550                 :        1520 :   return
    1551                 :             :     TODO_update_ssa
    1552                 :             :     | TODO_cleanup_cfg
    1553                 :        1520 :     | TODO_verify_il;
    1554                 :        1587 : }
    1555                 :             : 
    1556                 :             : /* Instantiate a hardcfr pass.  */
    1557                 :             : 
    1558                 :             : gimple_opt_pass *
    1559                 :      280114 : make_pass_harden_control_flow_redundancy (gcc::context *ctxt)
    1560                 :             : {
    1561                 :      280114 :   return new pass_harden_control_flow_redundancy (ctxt);
    1562                 :             : }
        

Generated by: LCOV version 2.1-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.