LCOV - code coverage report
Current view: top level - gcc - gimple-harden-control-flow.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 95.7 % 644 616
Test Date: 2024-04-27 14:03:13 Functions: 95.8 % 24 23
Legend: Lines: hit not hit | Branches: + taken - not taken # not executed Branches: - 0 0

             Branch data     Line data    Source code
       1                 :             : /* Control flow redundancy hardening.
       2                 :             :    Copyright (C) 2022-2024 Free Software Foundation, Inc.
       3                 :             :    Contributed by Alexandre Oliva <oliva@adacore.com>.
       4                 :             : 
       5                 :             : This file is part of GCC.
       6                 :             : 
       7                 :             : GCC is free software; you can redistribute it and/or modify it under
       8                 :             : the terms of the GNU General Public License as published by the Free
       9                 :             : Software Foundation; either version 3, or (at your option) any later
      10                 :             : version.
      11                 :             : 
      12                 :             : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      13                 :             : WARRANTY; without even the implied warranty of MERCHANTABILITY or
      14                 :             : FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      15                 :             : for more details.
      16                 :             : 
      17                 :             : You should have received a copy of the GNU General Public License
      18                 :             : along with GCC; see the file COPYING3.  If not see
      19                 :             : <http://www.gnu.org/licenses/>.  */
      20                 :             : 
      21                 :             : #include "config.h"
      22                 :             : #define INCLUDE_ALGORITHM /* find */
      23                 :             : #include "system.h"
      24                 :             : #include "coretypes.h"
      25                 :             : #include "backend.h"
      26                 :             : #include "memmodel.h"
      27                 :             : #include "tm_p.h"
      28                 :             : #include "tree.h"
      29                 :             : #include "fold-const.h"
      30                 :             : #include "gimple.h"
      31                 :             : #include "gimplify.h"
      32                 :             : #include "tree-pass.h"
      33                 :             : #include "ssa.h"
      34                 :             : #include "gimple-iterator.h"
      35                 :             : #include "gimple-pretty-print.h"
      36                 :             : #include "tree-cfg.h"
      37                 :             : #include "tree-cfgcleanup.h"
      38                 :             : #include "tree-eh.h"
      39                 :             : #include "except.h"
      40                 :             : #include "sbitmap.h"
      41                 :             : #include "basic-block.h"
      42                 :             : #include "cfghooks.h"
      43                 :             : #include "cfgloop.h"
      44                 :             : #include "cgraph.h"
      45                 :             : #include "alias.h"
      46                 :             : #include "varasm.h"
      47                 :             : #include "output.h"
      48                 :             : #include "langhooks.h"
      49                 :             : #include "diagnostic.h"
      50                 :             : #include "intl.h"
      51                 :             : 
      52                 :             : namespace {
      53                 :             : 
      54                 :             : /* This pass introduces verification, at function exits, that booleans
      55                 :             :    set in each basic block during function execution reflect the
      56                 :             :    control flow graph: for each visited block, check that at least one
      57                 :             :    predecessor and at least one successor were also visited.  This
      58                 :             :    sort of hardening may detect various kinds of attacks.  */
      59                 :             : 
      60                 :             : /* Define a pass to harden code through control flow redundancy.  */
      61                 :             : 
      62                 :             : const pass_data pass_data_harden_control_flow_redundancy = {
      63                 :             :   GIMPLE_PASS,
      64                 :             :   "hardcfr",
      65                 :             :   OPTGROUP_NONE,
      66                 :             :   TV_NONE,
      67                 :             :   PROP_cfg | PROP_ssa, // properties_required
      68                 :             :   0,        // properties_provided
      69                 :             :   0,        // properties_destroyed
      70                 :             :   TODO_cleanup_cfg, // properties_start
      71                 :             :   0,        // properties_finish
      72                 :             : };
      73                 :             : 
      74                 :             : class pass_harden_control_flow_redundancy : public gimple_opt_pass
      75                 :             : {
      76                 :             : public:
      77                 :      285189 :   pass_harden_control_flow_redundancy (gcc::context *ctxt)
      78                 :      570378 :     : gimple_opt_pass (pass_data_harden_control_flow_redundancy, ctxt)
      79                 :             :   {}
      80                 :           0 :   opt_pass *clone () { return new pass_harden_control_flow_redundancy (m_ctxt); }
      81                 :     1419229 :   virtual bool gate (function *fun) {
      82                 :             :     /* Return quickly if the pass is disabled, without checking any of
      83                 :             :        the conditions that might give rise to warnings that would only
      84                 :             :        be appropriate if hardening was requested.  */
      85                 :     1419229 :     if (!flag_harden_control_flow_redundancy)
      86                 :             :       return false;
      87                 :             : 
      88                 :             :     /* Functions that return more than once, like setjmp and vfork
      89                 :             :        (that also gets this flag set), will start recording a path
      90                 :             :        after the first return, and then may take another path when
      91                 :             :        they return again.  The unterminated path may then be flagged
      92                 :             :        as an error.  ??? We could save the visited array before the
      93                 :             :        call and restore it if it returns again.  */
      94                 :        1740 :     if (fun->calls_setjmp)
      95                 :             :       {
      96                 :           0 :         warning_at (DECL_SOURCE_LOCATION (fun->decl), 0,
      97                 :             :                     "%qD calls %<setjmp%> or similar,"
      98                 :             :                     " %<-fharden-control-flow-redundancy%> is not supported",
      99                 :             :                     fun->decl);
     100                 :           0 :         return false;
     101                 :             :       }
     102                 :             : 
     103                 :             :     /* Some targets bypass the abnormal dispatcher block in nonlocal
     104                 :             :        gotos, and then we'd miss its visited bit.  It might be doable
     105                 :             :        to make it work uniformly, but this feature is not used often
     106                 :             :        enough to make it worthwhile.  */
     107                 :        1740 :     if (fun->has_nonlocal_label)
     108                 :             :       {
     109                 :           0 :         warning_at (DECL_SOURCE_LOCATION (fun->decl), 0,
     110                 :             :                     "%qD receives nonlocal gotos,"
     111                 :             :                     " %<-fharden-control-flow-redundancy%> is not supported",
     112                 :             :                     fun->decl);
     113                 :           0 :         return false;
     114                 :             :       }
     115                 :             : 
     116                 :        1740 :     if (fun->cfg && param_hardcfr_max_blocks > 0
     117                 :         330 :         && (n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS
     118                 :             :             > param_hardcfr_max_blocks))
     119                 :             :       {
     120                 :          82 :         warning_at (DECL_SOURCE_LOCATION (fun->decl), 0,
     121                 :             :                     "%qD has more than %u blocks, the requested"
     122                 :             :                     " maximum for %<-fharden-control-flow-redundancy%>",
     123                 :             :                     fun->decl, param_hardcfr_max_blocks);
     124                 :          82 :         return false;
     125                 :             :       }
     126                 :             : 
     127                 :             :     return true;
     128                 :             :   }
     129                 :             :   virtual unsigned int execute (function *);
     130                 :             : };
     131                 :             : 
     132                 :             : }
     133                 :             : 
     134                 :             : /* Return TRUE iff CFR checks should be inserted before returning
     135                 :             :    calls.  */
     136                 :             : 
     137                 :             : static bool
     138                 :        1161 : check_returning_calls_p ()
     139                 :             : {
     140                 :        1161 :   return
     141                 :        1161 :     flag_harden_control_flow_redundancy_check_returning_calls > 0
     142                 :        1161 :     || (flag_harden_control_flow_redundancy_check_returning_calls < 0
     143                 :             :         /* Gates pass_tail_calls.  */
     144                 :         299 :         && flag_optimize_sibling_calls
     145                 :             :         /* Gates pass_all_optimizations.  */
     146                 :         248 :         && optimize >= 1 && !optimize_debug);
     147                 :             : }
     148                 :             : 
     149                 :             : /* Scan BB from the end, updating *RETPTR if given as return stmts and
     150                 :             :    copies are found.  Return a call or a stmt that cannot appear after
     151                 :             :    a tail call, or NULL if the top of the block is reached without
     152                 :             :    finding any.  */
     153                 :             : 
     154                 :             : static gimple *
     155                 :        2734 : hardcfr_scan_block (basic_block bb, tree **retptr)
     156                 :             : {
     157                 :        2734 :   gimple_stmt_iterator gsi;
     158                 :        8748 :   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
     159                 :             :     {
     160                 :        3391 :       gimple *stmt = gsi_stmt (gsi);
     161                 :             : 
     162                 :             :       /* Ignore labels, returns, nops, clobbers and debug stmts.  */
     163                 :        3391 :       if (gimple_code (stmt) == GIMPLE_LABEL
     164                 :             :           || gimple_code (stmt) == GIMPLE_NOP
     165                 :             :           || gimple_code (stmt) == GIMPLE_PREDICT
     166                 :        3285 :           || gimple_clobber_p (stmt)
     167                 :        3140 :           || is_gimple_debug (stmt))
     168                 :         289 :         continue;
     169                 :             : 
     170                 :        3102 :       if (gimple_code (stmt) == GIMPLE_RETURN)
     171                 :             :         {
     172                 :        1308 :           greturn *gret = as_a <greturn *> (stmt);
     173                 :        1308 :           if (retptr)
     174                 :             :             {
     175                 :        1308 :               gcc_checking_assert (!*retptr);
     176                 :        1308 :               *retptr = gimple_return_retval_ptr (gret);
     177                 :             :             }
     178                 :        1308 :           continue;
     179                 :        1308 :         }
     180                 :             : 
     181                 :             :       /* Check for a call.  */
     182                 :        1794 :       if (is_gimple_call (stmt))
     183                 :        1259 :         return stmt;
     184                 :             : 
     185                 :             :       /* Allow simple copies to the return value, updating the return
     186                 :             :          value to be found in earlier assignments.  */
     187                 :         535 :       if (retptr && *retptr && gimple_assign_single_p (stmt)
     188                 :         578 :           && **retptr == gimple_assign_lhs (stmt))
     189                 :             :         {
     190                 :          43 :           *retptr = gimple_assign_rhs1_ptr (stmt);
     191                 :          43 :           continue;
     192                 :             :         }
     193                 :             : 
     194                 :             :       return stmt;
     195                 :             :     }
     196                 :             : 
     197                 :             :   /* Any other kind of stmt will prevent a tail call.  */
     198                 :             :   return NULL;
     199                 :             : }
     200                 :             : 
     201                 :             : /* Return TRUE iff CALL is to be preceded by a CFR checkpoint, i.e.,
     202                 :             :    if it's a returning call (one whose result is ultimately returned
     203                 :             :    without intervening non-copy statements) and we're checking
     204                 :             :    returning calls, a __builtin_return call (noreturn with a path to
     205                 :             :    the exit block), a must-tail call, or a tail call.  */
     206                 :             : 
     207                 :             : static bool
     208                 :         911 : returning_call_p (gcall *call)
     209                 :             : {
     210                 :         911 :   if (!(gimple_call_noreturn_p (call)
     211                 :         494 :         || gimple_call_must_tail_p (call)
     212                 :         494 :         || gimple_call_tail_p (call)
     213                 :         494 :         || check_returning_calls_p ()))
     214                 :             :     return false;
     215                 :             : 
     216                 :             :   /* Quickly check that there's a path to exit compatible with a
     217                 :             :      returning call.  Detect infinite loops by limiting the path
     218                 :             :      length to the basic block count, and by looking for duplicate
     219                 :             :      blocks before allocating more memory for the path, for amortized
     220                 :             :      O(n).  */
     221                 :         716 :   auto_vec<basic_block, 10> path;
     222                 :         716 :   for (basic_block bb = gimple_bb (call);
     223                 :        1435 :        bb != EXIT_BLOCK_PTR_FOR_FN (cfun);
     224                 :         719 :        bb = single_succ (bb))
     225                 :         884 :     if (!single_succ_p (bb)
     226                 :         719 :         || (single_succ_edge (bb)->flags & EDGE_EH) != 0
     227                 :        1438 :         || n_basic_blocks_for_fn (cfun) - path.length () <= NUM_FIXED_BLOCKS
     228                 :        2322 :         || (path.length () == path.allocated ()
     229                 :           0 :             && std::find (path.begin (), path.end (), bb) != path.end ()))
     230                 :         165 :       return false;
     231                 :             :     else
     232                 :         719 :       path.safe_push (bb);
     233                 :             : 
     234                 :             :   /* Check the stmts in the blocks and trace the return value.  */
     235                 :         551 :   tree *retptr = NULL;
     236                 :         719 :   for (;;)
     237                 :             :     {
     238                 :         719 :       gcc_checking_assert (!path.is_empty ());
     239                 :         719 :       basic_block bb = path.pop ();
     240                 :         719 :       gimple *stop = hardcfr_scan_block (bb, &retptr);
     241                 :         719 :       if (stop)
     242                 :             :         {
     243                 :         551 :           if (stop != call)
     244                 :             :             return false;
     245                 :         531 :           gcc_checking_assert (path.is_empty ());
     246                 :         531 :           break;
     247                 :             :         }
     248                 :             : 
     249                 :         168 :       gphi *retphi = NULL;
     250                 :         168 :       if (retptr && *retptr && TREE_CODE (*retptr) == SSA_NAME
     251                 :           3 :           && !SSA_NAME_IS_DEFAULT_DEF (*retptr)
     252                 :           3 :           && SSA_NAME_DEF_STMT (*retptr)
     253                 :           3 :           && is_a <gphi *> (SSA_NAME_DEF_STMT (*retptr))
     254                 :         168 :           && gimple_bb (SSA_NAME_DEF_STMT (*retptr)) == bb)
     255                 :             :         {
     256                 :           0 :           retphi = as_a <gphi *> (SSA_NAME_DEF_STMT (*retptr));
     257                 :           0 :           gcc_checking_assert (gimple_phi_result (retphi) == *retptr);
     258                 :             :         }
     259                 :             :       else
     260                 :         168 :         continue;
     261                 :             : 
     262                 :           0 :       gcc_checking_assert (!path.is_empty ());
     263                 :           0 :       edge e = single_succ_edge (path.last ());
     264                 :           0 :       int i = EDGE_COUNT (bb->preds);
     265                 :           0 :       while (i--)
     266                 :           0 :         if (EDGE_PRED (bb, i) == e)
     267                 :             :           break;
     268                 :           0 :       gcc_checking_assert (i >= 0);
     269                 :           0 :       retptr = gimple_phi_arg_def_ptr (retphi, i);
     270                 :             :     }
     271                 :             : 
     272                 :         531 :   return (gimple_call_noreturn_p (call)
     273                 :         259 :           || gimple_call_must_tail_p (call)
     274                 :         259 :           || gimple_call_tail_p (call)
     275                 :         790 :           || (gimple_call_lhs (call) == (retptr ? *retptr : NULL)
     276                 :         259 :               && check_returning_calls_p ()));
     277                 :         716 : }
     278                 :             : 
     279                 :             : typedef auto_vec<edge, 10> chk_edges_t;
     280                 :             : 
     281                 :             : /* Declare for mutual recursion.  */
     282                 :             : static bool hardcfr_sibcall_search_preds (basic_block bb,
     283                 :             :                                           chk_edges_t &chk_edges,
     284                 :             :                                           int &count_chkcall,
     285                 :             :                                           auto_sbitmap &chkcall_blocks,
     286                 :             :                                           int &count_postchk,
     287                 :             :                                           auto_sbitmap &postchk_blocks,
     288                 :             :                                           tree *retptr);
     289                 :             : 
     290                 :             : /* Search backwards from the end of BB for a mandatory or potential
     291                 :             :    sibcall.  Schedule the block to be handled sort-of like noreturn if
     292                 :             :    so.  Recurse to preds, with updated RETPTR, if the block only
     293                 :             :    contains stmts that may follow such a call, scheduling checking at
     294                 :             :    edges and marking blocks as post-check as needed.  Return true iff,
     295                 :             :    at the end of the block, a check will have already been
     296                 :             :    performed.  */
     297                 :             : 
     298                 :             : static bool
     299                 :        2672 : hardcfr_sibcall_search_block (basic_block bb,
     300                 :             :                               chk_edges_t &chk_edges,
     301                 :             :                               int &count_chkcall,
     302                 :             :                               auto_sbitmap &chkcall_blocks,
     303                 :             :                               int &count_postchk,
     304                 :             :                               auto_sbitmap &postchk_blocks,
     305                 :             :                               tree *retptr)
     306                 :             : {
     307                 :             :   /* Conditionals and internal exceptions rule out tail calls.  */
     308                 :        2672 :   if (!single_succ_p (bb)
     309                 :        2672 :       || (single_succ_edge (bb)->flags & EDGE_EH) != 0)
     310                 :             :     return false;
     311                 :             : 
     312                 :        2015 :   gimple *stmt = hardcfr_scan_block (bb, &retptr);
     313                 :        2015 :   if (!stmt)
     314                 :         815 :     return hardcfr_sibcall_search_preds (bb, chk_edges,
     315                 :             :                                          count_chkcall, chkcall_blocks,
     316                 :             :                                          count_postchk, postchk_blocks,
     317                 :         815 :                                          retptr);
     318                 :             : 
     319                 :        1200 :   if (!is_a <gcall *> (stmt))
     320                 :             :     return false;
     321                 :             : 
     322                 :             :   /* Avoid disrupting mandatory or early-marked tail calls,
     323                 :             :      inserting the check before them.  This works for
     324                 :             :      must-tail calls, but tail calling as an optimization is
     325                 :             :      detected too late for us.
     326                 :             : 
     327                 :             :      Also check for noreturn calls here.  Noreturn calls won't
     328                 :             :      normally have edges to exit, so they won't be found here,
     329                 :             :      but __builtin_return does, and we must check before
     330                 :             :      it, so handle it like a tail call.  */
     331                 :         708 :   gcall *call = as_a <gcall *> (stmt);
     332                 :        1144 :   if (!(gimple_call_noreturn_p (call)
     333                 :         436 :         || gimple_call_must_tail_p (call)
     334                 :         436 :         || gimple_call_tail_p (call)
     335                 :         436 :         || (gimple_call_lhs (call) == (retptr ? *retptr : NULL)
     336                 :         408 :             && check_returning_calls_p ())))
     337                 :             :     return false;
     338                 :             : 
     339                 :         486 :   gcc_checking_assert (returning_call_p (call));
     340                 :             : 
     341                 :             :   /* We found a call that is to be preceded by checking.  */
     342                 :         486 :   if (bitmap_set_bit (chkcall_blocks, bb->index))
     343                 :         486 :     ++count_chkcall;
     344                 :             :   else
     345                 :           0 :     gcc_unreachable ();
     346                 :         486 :   return true;
     347                 :             : }
     348                 :             : 
     349                 :             : 
     350                 :             : /* Search preds of BB for a mandatory or potential sibcall or
     351                 :             :    returning call, and arrange for the blocks containing them to have
     352                 :             :    a check inserted before the call, like noreturn calls.  If any
     353                 :             :    preds are found to perform checking, schedule checks at the edges
     354                 :             :    of those that don't, and mark BB as postcheck..  */
     355                 :             : 
     356                 :             : static bool
     357                 :        2357 : hardcfr_sibcall_search_preds (basic_block bb,
     358                 :             :                               chk_edges_t &chk_edges,
     359                 :             :                               int &count_chkcall,
     360                 :             :                               auto_sbitmap &chkcall_blocks,
     361                 :             :                               int &count_postchk,
     362                 :             :                               auto_sbitmap &postchk_blocks,
     363                 :             :                               tree *retptr)
     364                 :             : {
     365                 :             :   /* For the exit block, we wish to force a check at every
     366                 :             :      predecessor, so pretend we've already found a pred that had
     367                 :             :      checking, so that we schedule checking at every one of its pred
     368                 :             :      edges.  */
     369                 :        2357 :   bool first = bb->index >= NUM_FIXED_BLOCKS;
     370                 :        2357 :   bool postchecked = true;
     371                 :             : 
     372                 :        2357 :   gphi *retphi = NULL;
     373                 :         815 :   if (retptr && *retptr && TREE_CODE (*retptr) == SSA_NAME
     374                 :         289 :       && !SSA_NAME_IS_DEFAULT_DEF (*retptr)
     375                 :         219 :       && SSA_NAME_DEF_STMT (*retptr)
     376                 :         219 :       && is_a <gphi *> (SSA_NAME_DEF_STMT (*retptr))
     377                 :        2521 :       && gimple_bb (SSA_NAME_DEF_STMT (*retptr)) == bb)
     378                 :             :     {
     379                 :         164 :       retphi = as_a <gphi *> (SSA_NAME_DEF_STMT (*retptr));
     380                 :         164 :       gcc_checking_assert (gimple_phi_result (retphi) == *retptr);
     381                 :             :     }
     382                 :             : 
     383                 :        7336 :   for (int i = EDGE_COUNT (bb->preds); i--; first = false)
     384                 :             :     {
     385                 :        2672 :       edge e = EDGE_PRED (bb, i);
     386                 :             : 
     387                 :        2672 :       bool checked
     388                 :        3164 :         = hardcfr_sibcall_search_block (e->src, chk_edges,
     389                 :             :                                         count_chkcall, chkcall_blocks,
     390                 :             :                                         count_postchk, postchk_blocks,
     391                 :             :                                         !retphi ? retptr
     392                 :         492 :                                         : gimple_phi_arg_def_ptr (retphi, i));
     393                 :             : 
     394                 :        2672 :       if (first)
     395                 :             :         {
     396                 :         815 :           postchecked = checked;
     397                 :         815 :           continue;
     398                 :             :         }
     399                 :             : 
     400                 :             :       /* When we first find a checked block, force a check at every
     401                 :             :          other incoming edge we've already visited, and those we
     402                 :             :          visit afterwards that don't have their own check, so that
     403                 :             :          when we reach BB, the check has already been performed.  */
     404                 :        1857 :       if (!postchecked && checked)
     405                 :             :         {
     406                 :           3 :           for (int j = EDGE_COUNT (bb->preds); --j > i; )
     407                 :           1 :             chk_edges.safe_push (EDGE_PRED (bb, j));
     408                 :             :           postchecked = true;
     409                 :             :         }
     410                 :        1857 :       if (postchecked && !checked)
     411                 :         928 :         chk_edges.safe_push (EDGE_PRED (bb, i));
     412                 :             :     }
     413                 :             : 
     414                 :        2357 :   if (postchecked && bb->index >= NUM_FIXED_BLOCKS)
     415                 :             :     {
     416                 :          78 :       if (bitmap_set_bit (postchk_blocks, bb->index))
     417                 :          78 :         count_postchk++;
     418                 :             :       else
     419                 :           0 :         gcc_unreachable ();
     420                 :             :     }
     421                 :             : 
     422                 :        2357 :   return postchecked;
     423                 :             : }
     424                 :             : 
     425                 :             : 
     426                 :             : class rt_bb_visited
     427                 :             : {
     428                 :             :   /* Use a sufficiently wide unsigned type to hold basic block numbers.  */
     429                 :             :   typedef size_t blknum;
     430                 :             : 
     431                 :             :   /* Record the original block count of the function.  */
     432                 :             :   blknum nblocks;
     433                 :             :   /* Record the number of bits per VWORD (short for VISITED WORD), an
     434                 :             :      efficient mode to set and test bits for blocks we visited, and to
     435                 :             :      encode the CFG in case out-of-line verification is used.  */
     436                 :             :   unsigned vword_bits;
     437                 :             : 
     438                 :             :   /* Hold the unsigned integral VWORD type.  */
     439                 :             :   tree vword_type;
     440                 :             :   /* Hold a pointer-to-VWORD type.  */
     441                 :             :   tree vword_ptr;
     442                 :             : 
     443                 :             :   /* Hold a growing sequence used to check, inline or out-of-line,
     444                 :             :      that VISITED encodes an expected execution path.  */
     445                 :             :   gimple_seq ckseq;
     446                 :             :   /* If nonNULL, hold a growing representation of the CFG for
     447                 :             :      out-of-line testing.  */
     448                 :             :   tree rtcfg;
     449                 :             : 
     450                 :             :   /* Hold the declaration of an array of VWORDs, used as an array of
     451                 :             :      NBLOCKS-2 bits.  */
     452                 :             :   tree visited;
     453                 :             : 
     454                 :             :   /* If performing inline checking, hold a declarations of boolean
     455                 :             :      variables used for inline checking.  CKBLK holds the result of
     456                 :             :      testing whether the VISITED bit corresponding to a predecessor or
     457                 :             :      successor is set, CKINV inverts that bit, CKPART gets cleared if
     458                 :             :      a block was not visited or if CKINV for any of its predecessors
     459                 :             :      or successors is set, and CKFAIL gets set if CKPART remains set
     460                 :             :      at the end of a block's predecessors or successors list.  */
     461                 :             :   tree ckfail, ckpart, ckinv, ckblk;
     462                 :             : 
     463                 :             :   /* If we need to deal with abnormal edges, we insert SSA_NAMEs for
     464                 :             :      boolean true and false.  */
     465                 :             :   tree vfalse, vtrue;
     466                 :             : 
     467                 :             :   /* Convert a block index N to a block vindex, the index used to
     468                 :             :      identify it in the VISITED array.  Check that it's in range:
     469                 :             :      neither ENTRY nor EXIT, but maybe one-past-the-end, to compute
     470                 :             :      the visited array length.  */
     471                 :       18820 :   blknum num2idx (blknum n) {
     472                 :       18820 :     gcc_checking_assert (n >= NUM_FIXED_BLOCKS && n <= nblocks);
     473                 :       18820 :     return (n - NUM_FIXED_BLOCKS);
     474                 :             :   }
     475                 :             :   /* Return the block vindex for BB, that must not be ENTRY or
     476                 :             :      EXIT.  */
     477                 :       15634 :   blknum bb2idx (basic_block bb) {
     478                 :       15634 :     gcc_checking_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
     479                 :             :                          && bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
     480                 :       15634 :     gcc_checking_assert (blknum (bb->index) < nblocks);
     481                 :       15634 :     return num2idx (bb->index);
     482                 :             :   }
     483                 :             : 
     484                 :             :   /* Compute the type to be used for the VISITED array.  */
     485                 :        1542 :   tree vtype ()
     486                 :             :   {
     487                 :        1542 :     blknum n = num2idx (nblocks);
     488                 :        3084 :     return build_array_type_nelts (vword_type,
     489                 :        1542 :                                    (n + vword_bits - 1) / vword_bits);
     490                 :             :   }
     491                 :             : 
     492                 :             :   /* Compute and return the index into VISITED for block BB.  If BITP
     493                 :             :      is non-NULL, also compute and store the bit mask corresponding to
     494                 :             :      block BB in *BITP, so that (visited[index] & mask) tells whether
     495                 :             :      BB was visited.  */
     496                 :       15634 :   tree vwordidx (basic_block bb, tree *bitp = NULL)
     497                 :             :   {
     498                 :       15634 :     blknum idx = bb2idx (bb);
     499                 :       15634 :     if (bitp)
     500                 :             :       {
     501                 :       15634 :         unsigned bit = idx % vword_bits;
     502                 :             :         /* We don't need to adjust shifts to follow native bit
     503                 :             :            endianness here, all of our uses of the CFG and visited
     504                 :             :            bitmaps, whether at compile or runtime, are shifted bits on
     505                 :             :            full words.  This adjustment here would require a
     506                 :             :            corresponding adjustment at runtime, which would be nothing
     507                 :             :            but undesirable overhead for us.  */
     508                 :       15634 :         if (0 /* && BITS_BIG_ENDIAN */)
     509                 :             :           bit = vword_bits - bit - 1;
     510                 :       15634 :         wide_int wbit = wi::set_bit_in_zero (bit, vword_bits);
     511                 :       15634 :         *bitp = wide_int_to_tree (vword_type, wbit);
     512                 :       15634 :       }
     513                 :       15634 :     return build_int_cst (vword_ptr, idx / vword_bits);
     514                 :             :   }
     515                 :             : 
     516                 :             :   /* Return an expr to accesses the visited element that holds
     517                 :             :      information about BB.  If BITP is non-NULL, set it to the mask to
     518                 :             :      tell which bit in that expr refers to BB.  */
     519                 :        7368 :   tree vword (basic_block bb, tree *bitp = NULL)
     520                 :             :   {
     521                 :        7368 :     return build2 (MEM_REF, vword_type,
     522                 :             :                    build1 (ADDR_EXPR, vword_ptr, visited),
     523                 :        7368 :                    int_const_binop (MULT_EXPR, vwordidx (bb, bitp),
     524                 :        7368 :                                     fold_convert (vword_ptr,
     525                 :             :                                                   TYPE_SIZE_UNIT
     526                 :        7368 :                                                   (vword_type))));
     527                 :             :   }
     528                 :             : 
     529                 :             :   /* Return an expr that evaluates to true iff BB was marked as
     530                 :             :      VISITED.  Add any gimple stmts to SEQP.  */
     531                 :        4636 :   tree vindex (basic_block bb, gimple_seq *seqp)
     532                 :             :   {
     533                 :        4636 :     if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
     534                 :        3916 :         || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     535                 :        1878 :       return boolean_true_node;
     536                 :             : 
     537                 :        2758 :     tree bit, setme = vword (bb, &bit);
     538                 :        2758 :     tree temp = create_tmp_var (vword_type, ".cfrtemp");
     539                 :             : 
     540                 :        2758 :     gassign *vload = gimple_build_assign (temp, setme);
     541                 :        2758 :     gimple_seq_add_stmt (seqp, vload);
     542                 :             : 
     543                 :        2758 :     gassign *vmask = gimple_build_assign (temp, BIT_AND_EXPR, temp, bit);
     544                 :        2758 :     gimple_seq_add_stmt (seqp, vmask);
     545                 :             : 
     546                 :        2758 :     return build2 (NE_EXPR, boolean_type_node,
     547                 :        2758 :                    temp, build_int_cst (vword_type, 0));
     548                 :             :   }
     549                 :             : 
     550                 :             :   /* Set the bit corresponding to BB in VISITED.  Add to SEQ any
     551                 :             :      required gimple stmts, and return SEQ, possibly modified.  */
     552                 :        4610 :   gimple_seq vset (basic_block bb, gimple_seq seq = NULL)
     553                 :             :   {
     554                 :        4610 :     tree bit, setme = vword (bb, &bit);
     555                 :        4610 :     tree temp = create_tmp_var (vword_type, ".cfrtemp");
     556                 :             : 
     557                 :        4610 :     gassign *vload = gimple_build_assign (temp, setme);
     558                 :        4610 :     gimple_seq_add_stmt (&seq, vload);
     559                 :             : 
     560                 :        4610 :     gassign *vbitset = gimple_build_assign (temp, BIT_IOR_EXPR, temp, bit);
     561                 :        4610 :     gimple_seq_add_stmt (&seq, vbitset);
     562                 :             : 
     563                 :        4610 :     gassign *vstore = gimple_build_assign (unshare_expr (setme), temp);
     564                 :        4610 :     gimple_seq_add_stmt (&seq, vstore);
     565                 :             : 
     566                 :             :     /* Prevent stores into visited from being deferred, forcing
     567                 :             :        subsequent bitsets to reload the word rather than reusing
     568                 :             :        values already in register.  The purpose is threefold: make the
     569                 :             :        bitset get to memory in this block, so that control flow
     570                 :             :        attacks in functions called in this block don't easily bypass
     571                 :             :        the bitset; prevent the bitset word from being retained in a
     572                 :             :        register across blocks, which could, in an attack scenario,
     573                 :             :        make a later block set more than one bit; and prevent hoisting
     574                 :             :        or sinking loads or stores of bitset words out of loops or even
     575                 :             :        throughout functions, which could significantly weaken the
     576                 :             :        verification.  This is equivalent to making the bitsetting
     577                 :             :        volatile within the function body, but without changing its
     578                 :             :        type; making the bitset volatile would make inline checking far
     579                 :             :        less optimizable for no reason.  */
     580                 :        4610 :     vec<tree, va_gc> *inputs = NULL;
     581                 :        4610 :     vec<tree, va_gc> *outputs = NULL;
     582                 :        9220 :     vec_safe_push (outputs,
     583                 :             :                    build_tree_list
     584                 :        4610 :                    (build_tree_list
     585                 :             :                     (NULL_TREE, build_string (2, "=m")),
     586                 :             :                     visited));
     587                 :        9220 :     vec_safe_push (inputs,
     588                 :             :                    build_tree_list
     589                 :        4610 :                    (build_tree_list
     590                 :             :                     (NULL_TREE, build_string (1, "m")),
     591                 :             :                     visited));
     592                 :        4610 :     gasm *stabilize = gimple_build_asm_vec ("", inputs, outputs,
     593                 :             :                                             NULL, NULL);
     594                 :        4610 :     gimple_seq_add_stmt (&seq, stabilize);
     595                 :             : 
     596                 :        4610 :     return seq;
     597                 :             :   }
     598                 :             : 
     599                 :             : public:
     600                 :             :   /* Prepare to add control flow redundancy testing to CFUN.  */
     601                 :        1542 :   rt_bb_visited (int checkpoints)
     602                 :        1542 :     : nblocks (n_basic_blocks_for_fn (cfun)),
     603                 :        1542 :       vword_type (NULL), ckseq (NULL), rtcfg (NULL),
     604                 :        1542 :       vfalse (NULL), vtrue (NULL)
     605                 :             :   {
     606                 :             :     /* If we've already added a declaration for the builtin checker,
     607                 :             :        extract vword_type and vword_bits from its declaration.  */
     608                 :        1542 :     if (tree checkfn = builtin_decl_explicit (BUILT_IN___HARDCFR_CHECK))
     609                 :             :       {
     610                 :        1026 :         tree check_arg_list = TYPE_ARG_TYPES (TREE_TYPE (checkfn));
     611                 :        1026 :         tree vword_const_ptr_type = TREE_VALUE (TREE_CHAIN (check_arg_list));
     612                 :        1026 :         vword_type = TYPE_MAIN_VARIANT (TREE_TYPE (vword_const_ptr_type));
     613                 :        1026 :         vword_bits = tree_to_shwi (TYPE_SIZE (vword_type));
     614                 :             :       }
     615                 :             :     /* Otherwise, select vword_bits, vword_type et al, and use it to
     616                 :             :        declare the builtin checker.  */
     617                 :             :     else
     618                 :             :       {
     619                 :             :         /* This setting needs to be kept in sync with libgcc/hardcfr.c.
     620                 :             :            We aim for at least 28 bits, which enables us to refer to as
     621                 :             :            many as 28 << 28 blocks in a function's CFG.  That's way over
     622                 :             :            4G blocks.  */
     623                 :         516 :         machine_mode VWORDmode;
     624                 :         516 :         if (BITS_PER_UNIT >= 28)
     625                 :             :           {
     626                 :             :             VWORDmode = QImode;
     627                 :             :             vword_bits = BITS_PER_UNIT;
     628                 :             :           }
     629                 :         516 :         else if (BITS_PER_UNIT >= 14)
     630                 :             :           {
     631                 :             :             VWORDmode = HImode;
     632                 :             :             vword_bits = 2 * BITS_PER_UNIT;
     633                 :             :           }
     634                 :             :         else
     635                 :             :           {
     636                 :         516 :             VWORDmode = SImode;
     637                 :         516 :             vword_bits = 4 * BITS_PER_UNIT;
     638                 :             :           }
     639                 :             : 
     640                 :         516 :         vword_type = lang_hooks.types.type_for_mode (VWORDmode, 1);
     641                 :         516 :         gcc_checking_assert (vword_bits == tree_to_shwi (TYPE_SIZE
     642                 :             :                                                          (vword_type)));
     643                 :             : 
     644                 :         516 :         vword_type = build_variant_type_copy (vword_type);
     645                 :         516 :         TYPE_ALIAS_SET (vword_type) = new_alias_set ();
     646                 :             : 
     647                 :         516 :         tree vword_const = build_qualified_type (vword_type, TYPE_QUAL_CONST);
     648                 :         516 :         tree vword_const_ptr = build_pointer_type (vword_const);
     649                 :         516 :         tree type = build_function_type_list (void_type_node, sizetype,
     650                 :             :                                               vword_const_ptr, vword_const_ptr,
     651                 :             :                                               NULL_TREE);
     652                 :         516 :         tree decl = add_builtin_function_ext_scope
     653                 :         516 :           ("__builtin___hardcfr_check",
     654                 :             :            type, BUILT_IN___HARDCFR_CHECK, BUILT_IN_NORMAL,
     655                 :             :            "__hardcfr_check", NULL_TREE);
     656                 :         516 :         TREE_NOTHROW (decl) = true;
     657                 :         516 :         set_builtin_decl (BUILT_IN___HARDCFR_CHECK, decl, true);
     658                 :             :       }
     659                 :             : 
     660                 :             :     /* The checker uses a qualified pointer, so we can't reuse it,
     661                 :             :        so build a new one.  */
     662                 :        1542 :     vword_ptr = build_pointer_type (vword_type);
     663                 :             : 
     664                 :        1542 :     tree visited_type = vtype ();
     665                 :        1542 :     visited = create_tmp_var (visited_type, ".cfrvisited");
     666                 :             : 
     667                 :        1542 :     if (nblocks - NUM_FIXED_BLOCKS > blknum (param_hardcfr_max_inline_blocks)
     668                 :        1444 :         || checkpoints > 1)
     669                 :             :       {
     670                 :             :         /* Make sure vword_bits is wide enough for the representation
     671                 :             :            of nblocks in rtcfg.  Compare with vword_bits << vword_bits,
     672                 :             :            but avoiding overflows, shifting nblocks right instead.  If
     673                 :             :            vword_bits is wider than HOST_WIDE_INT, assume it fits, so
     674                 :             :            as to avoid undefined shifts.  */
     675                 :         822 :         gcc_assert (HOST_BITS_PER_WIDE_INT <= vword_bits
     676                 :             :                     || (((unsigned HOST_WIDE_INT)(num2idx (nblocks))
     677                 :             :                          >> vword_bits) < vword_bits));
     678                 :             : 
     679                 :             :         /* Build a terminator for the constructor list.  */
     680                 :         822 :         rtcfg = build_tree_list (NULL_TREE, NULL_TREE);
     681                 :         822 :         return;
     682                 :             :       }
     683                 :             : 
     684                 :         720 :     ckfail = create_tmp_var (boolean_type_node, ".cfrfail");
     685                 :         720 :     ckpart = create_tmp_var (boolean_type_node, ".cfrpart");
     686                 :         720 :     ckinv = create_tmp_var (boolean_type_node, ".cfrinv");
     687                 :         720 :     ckblk = create_tmp_var (boolean_type_node, ".cfrblk");
     688                 :             : 
     689                 :         720 :     gassign *ckfail_init = gimple_build_assign (ckfail, boolean_false_node);
     690                 :         720 :     gimple_seq_add_stmt (&ckseq, ckfail_init);
     691                 :             :   }
     692                 :             : 
     693                 :             :   /* Insert SEQ before a resx or a call in INSBB.  */
     694                 :        1426 :   void insert_exit_check_in_block (gimple_seq seq, basic_block insbb)
     695                 :             :   {
     696                 :        1426 :     gimple_stmt_iterator gsi = gsi_last_bb (insbb);
     697                 :             : 
     698                 :        1426 :     while (!gsi_end_p (gsi))
     699                 :        1551 :       if (is_a <gresx *> (gsi_stmt (gsi))
     700                 :        1551 :           || is_a <gcall *> (gsi_stmt (gsi)))
     701                 :             :         break;
     702                 :             :       else
     703                 :        1676 :         gsi_prev (&gsi);
     704                 :             : 
     705                 :        1426 :     gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
     706                 :        1426 :   }
     707                 :             : 
     708                 :             :   /* Insert SEQ on E.  */
     709                 :         929 :   void insert_exit_check_on_edge (gimple_seq seq, edge e)
     710                 :             :   {
     711                 :         929 :     if (!(e->flags & EDGE_ABNORMAL))
     712                 :             :       {
     713                 :         928 :         gsi_insert_seq_on_edge_immediate (e, seq);
     714                 :         928 :         return;
     715                 :             :       }
     716                 :             : 
     717                 :             :     /* Initialize SSA boolean constants for use in abnormal PHIs.  */
     718                 :           1 :     if (!vfalse)
     719                 :             :       {
     720                 :           1 :         vfalse = make_ssa_name (boolean_type_node);
     721                 :           1 :         vtrue = make_ssa_name (boolean_type_node);
     722                 :             : 
     723                 :           1 :         gimple_seq vft_seq = NULL;
     724                 :           1 :         gassign *vfalse_init = gimple_build_assign (vfalse, boolean_false_node);
     725                 :           1 :         gimple_seq_add_stmt (&vft_seq, vfalse_init);
     726                 :           1 :         gassign *vtrue_init = gimple_build_assign (vtrue, boolean_true_node);
     727                 :           1 :         gimple_seq_add_stmt (&vft_seq, vtrue_init);
     728                 :             : 
     729                 :           1 :         gsi_insert_seq_on_edge_immediate (single_succ_edge
     730                 :           1 :                                           (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
     731                 :             :                                           vft_seq);
     732                 :             :       }
     733                 :             : 
     734                 :             :     /* We can't insert on abnormal edges, but we can arrange for SEQ
     735                 :             :        to execute conditionally at dest.  Add a PHI boolean with TRUE
     736                 :             :        from E and FALSE from other preds, split the whole block, add a
     737                 :             :        test for the PHI to run a new block with SEQ or skip straight
     738                 :             :        to the original block.  If there are multiple incoming abnormal
     739                 :             :        edges, we'll do this multiple times.  ??? Unless there are
     740                 :             :        multiple abnormal edges with different postcheck status, we
     741                 :             :        could split the block and redirect other edges, rearranging the
     742                 :             :        PHI nodes.  Optimizers already know how to do this, so we can
     743                 :             :        keep things simple here.  */
     744                 :           1 :     basic_block bb = e->dest;
     745                 :           1 :     basic_block bb_postcheck = split_block_after_labels (bb)->dest;
     746                 :             : 
     747                 :           1 :     basic_block bb_check = create_empty_bb (e->dest);
     748                 :           1 :     bb_check->count = e->count ();
     749                 :           1 :     if (dom_info_available_p (CDI_DOMINATORS))
     750                 :           1 :       set_immediate_dominator (CDI_DOMINATORS, bb_check, bb);
     751                 :           1 :     if (current_loops)
     752                 :           1 :       add_bb_to_loop (bb_check, current_loops->tree_root);
     753                 :             : 
     754                 :           1 :     gimple_stmt_iterator chkpt = gsi_after_labels (bb_check);
     755                 :           1 :     gsi_insert_seq_before_without_update (&chkpt, seq, GSI_SAME_STMT);
     756                 :           1 :     edge edge_postcheck = make_edge (bb_check, bb_postcheck, EDGE_FALLTHRU);
     757                 :           1 :     edge_postcheck->probability = profile_probability::always ();
     758                 :             : 
     759                 :           1 :     tree cond_var = make_ssa_name (boolean_type_node);
     760                 :           1 :     gcond *cond = gimple_build_cond (NE_EXPR, cond_var, boolean_false_node,
     761                 :             :                                      NULL, NULL);
     762                 :           1 :     gimple_stmt_iterator condpt = gsi_after_labels (bb);
     763                 :           1 :     gsi_insert_before (&condpt, cond, GSI_SAME_STMT);
     764                 :           1 :     edge edge_nocheck = single_succ_edge (bb);
     765                 :           1 :     edge_nocheck->flags &= ~EDGE_FALLTHRU;
     766                 :           1 :     edge_nocheck->flags |= EDGE_FALSE_VALUE;
     767                 :           1 :     edge edge_check = make_edge (bb, bb_check, EDGE_TRUE_VALUE);
     768                 :           1 :     edge_check->probability = e->count ().probability_in (bb->count);
     769                 :           1 :     edge_nocheck->probability = edge_check->probability.invert ();
     770                 :             : 
     771                 :           1 :     gphi *cond_phi = create_phi_node (cond_var, bb);
     772                 :           5 :     for (int i = 0, ei = EDGE_COUNT (bb->preds); i < ei; i++)
     773                 :             :       {
     774                 :           3 :         edge pred = EDGE_PRED (bb, i);
     775                 :           3 :         bool check_edge = pred == e;
     776                 :           3 :         tree val = check_edge ? vtrue : vfalse;
     777                 :           3 :         add_phi_arg (cond_phi, val, pred, UNKNOWN_LOCATION);
     778                 :             :       }
     779                 :             :   }
     780                 :             : 
     781                 :             :   /* Add checking code to CHK_EDGES and CHKCALL_BLOCKS, and
     782                 :             :      initialization code on the entry edge.  Before this point, the
     783                 :             :      CFG has been undisturbed, and all the needed data has been
     784                 :             :      collected and safely stowed.  */
     785                 :        1542 :   void check (chk_edges_t &chk_edges,
     786                 :             :               int count_chkcall, auto_sbitmap const &chkcall_blocks)
     787                 :             :   {
     788                 :             :     /* If we're using out-of-line checking, create and statically
     789                 :             :        initialize the CFG checking representation, generate the
     790                 :             :        checker call for the checking sequence, and insert it in all
     791                 :             :        exit edges, if there's more than one.  If there's only one, we
     792                 :             :        use the same logic as the inline case to insert the check
     793                 :             :        sequence.  */
     794                 :        1542 :     if (rtcfg)
     795                 :             :       {
     796                 :             :         /* Unreverse the list, and drop the tail node turned into head.  */
     797                 :         822 :         rtcfg = TREE_CHAIN (nreverse (rtcfg));
     798                 :             : 
     799                 :             :         /* Turn the indices stored in TREE_PURPOSE into separate
     800                 :             :            nodes.  It was useful to keep them together to enable
     801                 :             :            combination of masks and for clear separation of
     802                 :             :            terminators while constructing it, but now we have to turn
     803                 :             :            it into a sequence of words.  */
     804                 :       20710 :         for (tree node = rtcfg; node; node = TREE_CHAIN (node))
     805                 :             :           {
     806                 :       19888 :             tree wordidx = TREE_PURPOSE (node);
     807                 :       19888 :             if (!wordidx)
     808                 :       13308 :               continue;
     809                 :             : 
     810                 :        6580 :             TREE_PURPOSE (node) = NULL_TREE;
     811                 :        6580 :             TREE_CHAIN (node) = tree_cons (NULL_TREE,
     812                 :             :                                            fold_convert (vword_type, wordidx),
     813                 :        6580 :                                            TREE_CHAIN (node));
     814                 :             :           }
     815                 :             : 
     816                 :             :         /* Build the static initializer for the array with the CFG
     817                 :             :            representation for out-of-line checking.  */
     818                 :         822 :         tree init = build_constructor_from_list (NULL_TREE, rtcfg);
     819                 :        1644 :         TREE_TYPE (init) = build_array_type_nelts (vword_type,
     820                 :        1644 :                                                    CONSTRUCTOR_NELTS (init));
     821                 :         822 :         char buf[32];
     822                 :         822 :         ASM_GENERATE_INTERNAL_LABEL (buf, "Lhardcfg",
     823                 :             :                                      current_function_funcdef_no);
     824                 :         822 :         rtcfg = build_decl (UNKNOWN_LOCATION, VAR_DECL,
     825                 :             :                             get_identifier (buf),
     826                 :         822 :                             TREE_TYPE (init));
     827                 :         822 :         TREE_READONLY (rtcfg) = 1;
     828                 :         822 :         TREE_STATIC (rtcfg) = 1;
     829                 :         822 :         TREE_ADDRESSABLE (rtcfg) = 1;
     830                 :         822 :         TREE_USED (rtcfg) = 1;
     831                 :         822 :         DECL_ARTIFICIAL (rtcfg) = 1;
     832                 :         822 :         DECL_IGNORED_P (rtcfg) = 1;
     833                 :         822 :         DECL_INITIAL (rtcfg) = init;
     834                 :         822 :         make_decl_rtl (rtcfg);
     835                 :         822 :         varpool_node::finalize_decl (rtcfg);
     836                 :             : 
     837                 :             :         /* Add the checker call to ckseq.  */
     838                 :        1644 :         gcall *call_chk = gimple_build_call (builtin_decl_explicit
     839                 :             :                                              (BUILT_IN___HARDCFR_CHECK), 3,
     840                 :             :                                              build_int_cst (sizetype,
     841                 :         822 :                                                             num2idx (nblocks)),
     842                 :             :                                              build1 (ADDR_EXPR, vword_ptr,
     843                 :             :                                                      visited),
     844                 :             :                                              build1 (ADDR_EXPR, vword_ptr,
     845                 :             :                                                      rtcfg));
     846                 :         822 :         gimple_seq_add_stmt (&ckseq, call_chk);
     847                 :             : 
     848                 :         822 :         gimple *clobber = gimple_build_assign (visited,
     849                 :             :                                                build_clobber
     850                 :         822 :                                                (TREE_TYPE (visited)));
     851                 :         822 :         gimple_seq_add_stmt (&ckseq, clobber);
     852                 :             : 
     853                 :             :         /* If we have multiple exit edges, insert (copies of)
     854                 :             :            ckseq in all of them.  */
     855                 :        2320 :         for (int i = chk_edges.length (); i--; )
     856                 :             :           {
     857                 :         676 :             gimple_seq seq = ckseq;
     858                 :             :             /* Copy the sequence, unless we're dealing with the
     859                 :             :                last edge (we're counting down to zero).  */
     860                 :         676 :             if (i || count_chkcall)
     861                 :         594 :               seq = gimple_seq_copy (seq);
     862                 :             : 
     863                 :         676 :             edge e = chk_edges[i];
     864                 :             : 
     865                 :         676 :             if (dump_file)
     866                 :             :               {
     867                 :         675 :                 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
     868                 :         634 :                   fprintf (dump_file,
     869                 :             :                            "Inserting out-of-line check in"
     870                 :             :                            " block %i's edge to exit.\n",
     871                 :         634 :                            e->src->index);
     872                 :             :                 else
     873                 :          41 :                   fprintf (dump_file,
     874                 :             :                            "Inserting out-of-line check in"
     875                 :             :                            " block %i's edge to postcheck block %i.\n",
     876                 :          41 :                            e->src->index, e->dest->index);
     877                 :             :               }
     878                 :             : 
     879                 :         676 :             insert_exit_check_on_edge (seq, e);
     880                 :             : 
     881                 :         676 :             gcc_checking_assert (!bitmap_bit_p (chkcall_blocks, e->src->index));
     882                 :             :           }
     883                 :             : 
     884                 :         822 :         sbitmap_iterator it;
     885                 :         822 :         unsigned i;
     886                 :        2603 :         EXECUTE_IF_SET_IN_BITMAP (chkcall_blocks, 0, i, it)
     887                 :             :           {
     888                 :         959 :             basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
     889                 :             : 
     890                 :         959 :             gimple_seq seq = ckseq;
     891                 :         959 :             gcc_checking_assert (count_chkcall > 0);
     892                 :         959 :             if (--count_chkcall)
     893                 :         219 :               seq = gimple_seq_copy (seq);
     894                 :             : 
     895                 :         959 :             if (dump_file)
     896                 :         958 :               fprintf (dump_file,
     897                 :             :                        "Inserting out-of-line check before stmt in block %i.\n",
     898                 :             :                        bb->index);
     899                 :             : 
     900                 :         959 :             insert_exit_check_in_block (seq, bb);
     901                 :             :           }
     902                 :             : 
     903                 :         822 :         gcc_checking_assert (count_chkcall == 0);
     904                 :             :       }
     905                 :             :     else
     906                 :             :       {
     907                 :             :         /* Inline checking requires a single exit edge.  */
     908                 :         720 :         gimple *last = gimple_build_assign (visited,
     909                 :             :                                             build_clobber
     910                 :         720 :                                             (TREE_TYPE (visited)));
     911                 :         720 :         gimple_seq_add_stmt (&ckseq, last);
     912                 :             : 
     913                 :         720 :         if (!count_chkcall)
     914                 :             :           {
     915                 :         253 :             edge e = single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun));
     916                 :             : 
     917                 :         253 :             if (dump_file)
     918                 :             :               {
     919                 :         253 :                 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
     920                 :         253 :                   fprintf (dump_file,
     921                 :             :                            "Inserting out-of-line check in"
     922                 :             :                            " block %i's edge to postcheck block %i.\n",
     923                 :         253 :                            e->src->index, e->dest->index);
     924                 :             :                 else
     925                 :           0 :                   fprintf (dump_file,
     926                 :             :                            "Inserting inline check in"
     927                 :             :                            " block %i's edge to exit.\n",
     928                 :           0 :                            e->src->index);
     929                 :             :               }
     930                 :             : 
     931                 :         253 :             insert_exit_check_on_edge (ckseq, e);
     932                 :             :           }
     933                 :             :         else
     934                 :             :           {
     935                 :         467 :             gcc_checking_assert (count_chkcall == 1);
     936                 :             : 
     937                 :         467 :             sbitmap_iterator it;
     938                 :         467 :             unsigned i;
     939                 :        1401 :             EXECUTE_IF_SET_IN_BITMAP (chkcall_blocks, 0, i, it)
     940                 :             :               {
     941                 :         467 :                 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
     942                 :             : 
     943                 :         467 :                 gimple_seq seq = ckseq;
     944                 :         467 :                 gcc_checking_assert (count_chkcall > 0);
     945                 :         467 :                 if (--count_chkcall)
     946                 :             :                   seq = gimple_seq_copy (seq);
     947                 :             : 
     948                 :         467 :                 if (dump_file)
     949                 :         467 :                   fprintf (dump_file,
     950                 :             :                            "Inserting inline check before stmt in block %i.\n",
     951                 :             :                            bb->index);
     952                 :             : 
     953                 :         467 :                 insert_exit_check_in_block (seq, bb);
     954                 :             :               }
     955                 :             : 
     956                 :         467 :             gcc_checking_assert (count_chkcall == 0);
     957                 :             :           }
     958                 :             : 
     959                 :             :         /* The inserted ckseq computes CKFAIL at LAST.  Now we have to
     960                 :             :            conditionally trap on it.  */
     961                 :         720 :         basic_block insbb = gimple_bb (last);
     962                 :             : 
     963                 :             :         /* Create a block with the unconditional trap.  */
     964                 :         720 :         basic_block trp = create_empty_bb (insbb);
     965                 :         720 :         gimple_stmt_iterator gsit = gsi_after_labels (trp);
     966                 :             : 
     967                 :         720 :         gcall *trap = gimple_build_call (builtin_decl_explicit
     968                 :             :                                          (BUILT_IN_TRAP), 0);
     969                 :         720 :         gsi_insert_before (&gsit, trap, GSI_SAME_STMT);
     970                 :             : 
     971                 :         720 :         if (BB_PARTITION (insbb))
     972                 :           0 :           BB_SET_PARTITION (trp, BB_COLD_PARTITION);
     973                 :             : 
     974                 :         720 :         if (current_loops)
     975                 :         720 :           add_bb_to_loop (trp, current_loops->tree_root);
     976                 :             : 
     977                 :             :         /* Insert a conditional branch to the trap block.  If the
     978                 :             :            conditional wouldn't be the last stmt, split the block.  */
     979                 :         720 :         gimple_stmt_iterator gsi = gsi_for_stmt (last);
     980                 :         720 :         if (!gsi_one_before_end_p (gsi))
     981                 :         720 :           split_block (gsi_bb (gsi), gsi_stmt (gsi));
     982                 :             : 
     983                 :         720 :         gcond *cond = gimple_build_cond (NE_EXPR, ckfail,
     984                 :         720 :                                          fold_convert (TREE_TYPE (ckfail),
     985                 :             :                                                        boolean_false_node),
     986                 :             :                                          NULL, NULL);
     987                 :         720 :         gsi_insert_after (&gsi, cond, GSI_SAME_STMT);
     988                 :             : 
     989                 :             :         /* Adjust the edges.  */
     990                 :         720 :         single_succ_edge (gsi_bb (gsi))->flags &= ~EDGE_FALLTHRU;
     991                 :         720 :         single_succ_edge (gsi_bb (gsi))->flags |= EDGE_FALSE_VALUE;
     992                 :         720 :         single_succ_edge (gsi_bb (gsi))->probability
     993                 :         720 :           = profile_probability::always ();
     994                 :         720 :         edge e = make_edge (gsi_bb (gsi), trp, EDGE_TRUE_VALUE);
     995                 :         720 :         e->probability = profile_probability::never ();
     996                 :         720 :         gcc_checking_assert (e->dest == trp);
     997                 :         720 :         gcc_checking_assert (!e->dest->count.initialized_p ());
     998                 :         720 :         e->dest->count = e->count ();
     999                 :             : 
    1000                 :             :         /* Set the trap's dominator after splitting.  */
    1001                 :         720 :         if (dom_info_available_p (CDI_DOMINATORS))
    1002                 :         720 :           set_immediate_dominator (CDI_DOMINATORS, trp, gimple_bb (last));
    1003                 :             :       }
    1004                 :             : 
    1005                 :             :     /* Insert initializers for visited at the entry.  Do this after
    1006                 :             :        other insertions, to avoid messing with block numbers.  */
    1007                 :        1542 :     gimple_seq iseq = NULL;
    1008                 :             : 
    1009                 :        1542 :     gcall *vinit = gimple_build_call (builtin_decl_explicit
    1010                 :             :                                       (BUILT_IN_MEMSET), 3,
    1011                 :             :                                       build1 (ADDR_EXPR,
    1012                 :             :                                               build_pointer_type
    1013                 :        1542 :                                               (TREE_TYPE (visited)),
    1014                 :             :                                               visited),
    1015                 :             :                                       integer_zero_node,
    1016                 :        1542 :                                       TYPE_SIZE_UNIT (TREE_TYPE (visited)));
    1017                 :        1542 :     gimple_seq_add_stmt (&iseq, vinit);
    1018                 :             : 
    1019                 :        1542 :     gsi_insert_seq_on_edge_immediate (single_succ_edge
    1020                 :        1542 :                                       (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
    1021                 :             :                                       iseq);
    1022                 :        1542 :   }
    1023                 :             : 
    1024                 :             :   /* Push onto RTCFG a (mask, index) pair to test for IBB when BB is
    1025                 :             :      visited. XSELF is to be the ENTRY or EXIT block (depending on
    1026                 :             :      whether we're looking at preds or succs), to be remapped to BB
    1027                 :             :      because we can't represent them, and there's no point in testing
    1028                 :             :      them anyway.  Return true if no further blocks need to be visited
    1029                 :             :      in the list, because we've already encountered a
    1030                 :             :      self-reference.  */
    1031                 :             :   bool
    1032                 :        8266 :   push_rtcfg_pair (basic_block ibb, basic_block bb,
    1033                 :             :                    basic_block xself)
    1034                 :             :   {
    1035                 :             :     /* We don't have a bit to test for the entry and exit
    1036                 :             :        blocks, but it is always visited, so we test for the
    1037                 :             :        block itself, which gets us the right result and
    1038                 :             :        enables the self-test optimization below.  */
    1039                 :        8266 :     if (ibb == xself)
    1040                 :        2457 :       ibb = bb;
    1041                 :             : 
    1042                 :        8266 :     tree mask, idx = vwordidx (ibb, &mask);
    1043                 :             :     /* Combine masks with the same idx, but not if we're going
    1044                 :             :        to optimize for self-test.  */
    1045                 :        5809 :     if (ibb != bb && TREE_PURPOSE (rtcfg)
    1046                 :        9952 :         && tree_int_cst_equal (idx, TREE_PURPOSE (rtcfg)))
    1047                 :        1686 :       TREE_VALUE (rtcfg) = int_const_binop (BIT_IOR_EXPR, mask,
    1048                 :        1686 :                                             TREE_VALUE (rtcfg));
    1049                 :             :     else
    1050                 :        6580 :       rtcfg = tree_cons (idx, mask, rtcfg);
    1051                 :             : 
    1052                 :             :     /* For self-tests (i.e., tests that the block itself was
    1053                 :             :        also visited), testing anything else is pointless,
    1054                 :             :        because it's a tautology, so just drop other edges.  */
    1055                 :        8266 :     if (ibb == bb)
    1056                 :             :       {
    1057                 :        2457 :         while (TREE_PURPOSE (TREE_CHAIN (rtcfg)))
    1058                 :           0 :           TREE_CHAIN (rtcfg) = TREE_CHAIN (TREE_CHAIN (rtcfg));
    1059                 :             :         return true;
    1060                 :             :       }
    1061                 :             : 
    1062                 :             :     return false;
    1063                 :             :   }
    1064                 :             : 
    1065                 :             :   /* Add to CKSEQ stmts to clear CKPART if OBB is visited.  */
    1066                 :             :   void
    1067                 :        3316 :   build_block_check (basic_block obb)
    1068                 :             :   {
    1069                 :        3316 :     tree vobb = fold_convert (TREE_TYPE (ckblk),
    1070                 :             :                               vindex (obb, &ckseq));
    1071                 :        3316 :     gassign *blkrunp = gimple_build_assign (ckblk, vobb);
    1072                 :        3316 :     gimple_seq_add_stmt (&ckseq, blkrunp);
    1073                 :             : 
    1074                 :        3316 :     gassign *blknotrunp = gimple_build_assign (ckinv,
    1075                 :             :                                                EQ_EXPR,
    1076                 :             :                                                ckblk,
    1077                 :        3316 :                                                fold_convert
    1078                 :             :                                                (TREE_TYPE (ckblk),
    1079                 :             :                                                 boolean_false_node));
    1080                 :        3316 :     gimple_seq_add_stmt (&ckseq, blknotrunp);
    1081                 :             : 
    1082                 :        3316 :     gassign *andblk = gimple_build_assign (ckpart,
    1083                 :             :                                            BIT_AND_EXPR,
    1084                 :             :                                            ckpart, ckinv);
    1085                 :        3316 :     gimple_seq_add_stmt (&ckseq, andblk);
    1086                 :        3316 :   }
    1087                 :             : 
    1088                 :             :   /* Add to BB code to set its bit in VISITED, and add to RTCFG or
    1089                 :             :      CKSEQ the data or code needed to check BB's predecessors and
    1090                 :             :      successors.  If CHECKPOINT, assume the block is a checkpoint,
    1091                 :             :      whether or not it has an edge to EXIT.  If POSTCHECK, assume the
    1092                 :             :      block post-dominates checkpoints and therefore no bitmap setting
    1093                 :             :      or checks are to be performed in or for it.  Do NOT change the
    1094                 :             :      CFG.  */
    1095                 :        4688 :   void visit (basic_block bb, bool checkpoint, bool postcheck)
    1096                 :             :   {
    1097                 :             :     /* Set the bit in VISITED when entering the block.  */
    1098                 :        4688 :     gimple_stmt_iterator gsi = gsi_after_labels (bb);
    1099                 :        4688 :     if (!postcheck)
    1100                 :        4610 :       gsi_insert_seq_before (&gsi, vset (bb), GSI_SAME_STMT);
    1101                 :             : 
    1102                 :        4688 :     if (rtcfg)
    1103                 :             :       {
    1104                 :        3364 :         if (!postcheck)
    1105                 :             :           {
    1106                 :             :             /* Build a list of (index, mask) terminated by (NULL, 0).
    1107                 :             :                Consolidate masks with the same index when they're
    1108                 :             :                adjacent.  First, predecessors.  Count backwards, because
    1109                 :             :                we're going to reverse the list.  The order shouldn't
    1110                 :             :                matter, but let's not make it surprising.  */
    1111                 :        9520 :             for (int i = EDGE_COUNT (bb->preds); i--; )
    1112                 :        3762 :               if (push_rtcfg_pair (EDGE_PRED (bb, i)->src, bb,
    1113                 :        3762 :                                    ENTRY_BLOCK_PTR_FOR_FN (cfun)))
    1114                 :             :                 break;
    1115                 :             :           }
    1116                 :        3364 :         rtcfg = tree_cons (NULL_TREE, build_int_cst (vword_type, 0), rtcfg);
    1117                 :             : 
    1118                 :        3364 :         if (!postcheck)
    1119                 :             :           {
    1120                 :             :             /* Then, successors.  */
    1121                 :        3290 :             if (!checkpoint
    1122                 :        4925 :                 || !push_rtcfg_pair (EXIT_BLOCK_PTR_FOR_FN (cfun),
    1123                 :        1635 :                                      bb, EXIT_BLOCK_PTR_FOR_FN (cfun)))
    1124                 :        6179 :               for (int i = EDGE_COUNT (bb->succs); i--; )
    1125                 :        2869 :                 if (push_rtcfg_pair (EDGE_SUCC (bb, i)->dest, bb,
    1126                 :        2869 :                                      EXIT_BLOCK_PTR_FOR_FN (cfun)))
    1127                 :             :                   break;
    1128                 :             :           }
    1129                 :        3364 :         rtcfg = tree_cons (NULL_TREE, build_int_cst (vword_type, 0), rtcfg);
    1130                 :             :       }
    1131                 :        1324 :     else if (!postcheck)
    1132                 :             :       {
    1133                 :             :         /* Schedule test to fail if the block was reached but somehow none
    1134                 :             :            of its predecessors were.  */
    1135                 :        1320 :         tree bit = fold_convert (TREE_TYPE (ckpart), vindex (bb, &ckseq));
    1136                 :        1320 :         gassign *blkrunp = gimple_build_assign (ckpart, bit);
    1137                 :        1320 :         gimple_seq_add_stmt (&ckseq, blkrunp);
    1138                 :             : 
    1139                 :        4077 :         for (int i = 0, e = EDGE_COUNT (bb->preds); i < e; i++)
    1140                 :        1437 :           build_block_check (EDGE_PRED (bb, i)->src);
    1141                 :        1320 :         gimple *orfailp = gimple_build_assign (ckfail, BIT_IOR_EXPR,
    1142                 :             :                                                ckfail, ckpart);
    1143                 :        1320 :         gimple_seq_add_stmt (&ckseq, orfailp);
    1144                 :             : 
    1145                 :             :         /* Likewise for successors.  */
    1146                 :        1320 :         gassign *blkruns = gimple_build_assign (ckpart, unshare_expr (bit));
    1147                 :        1320 :         gimple_seq_add_stmt (&ckseq, blkruns);
    1148                 :             : 
    1149                 :        1320 :         if (checkpoint)
    1150                 :         720 :           build_block_check (EXIT_BLOCK_PTR_FOR_FN (cfun));
    1151                 :        3661 :         for (int i = 0, e = EDGE_COUNT (bb->succs); i < e; i++)
    1152                 :        1159 :           build_block_check (EDGE_SUCC (bb, i)->dest);
    1153                 :             : 
    1154                 :        1320 :         gimple *orfails = gimple_build_assign (ckfail, BIT_IOR_EXPR,
    1155                 :             :                                                ckfail, ckpart);
    1156                 :        1320 :         gimple_seq_add_stmt (&ckseq, orfails);
    1157                 :             :       }
    1158                 :        4688 :   }
    1159                 :             : };
    1160                 :             : 
    1161                 :             : /* Avoid checking before noreturn calls that are known (expected,
    1162                 :             :    really) to finish by throwing an exception, rather than by ending
    1163                 :             :    the program or looping forever.  Such functions have to be
    1164                 :             :    annotated, with an attribute (expected_throw) or flag (ECF_XTHROW),
    1165                 :             :    so that exception-raising functions, such as C++'s __cxa_throw,
    1166                 :             :    __cxa_rethrow, and Ada's gnat_rcheck_*, gnat_reraise*,
    1167                 :             :    ada.exception.raise_exception*, and the language-independent
    1168                 :             :    unwinders could be detected here and handled differently from other
    1169                 :             :    noreturn functions.  */
    1170                 :             : static bool
    1171                 :         335 : always_throwing_noreturn_call_p (gimple *stmt)
    1172                 :             : {
    1173                 :         335 :   if (!is_a <gcall *> (stmt))
    1174                 :           0 :     return is_a <gresx *> (stmt);
    1175                 :             : 
    1176                 :         335 :   gcall *call = as_a <gcall *> (stmt);
    1177                 :         335 :   return (gimple_call_noreturn_p (call)
    1178                 :         670 :           && gimple_call_expected_throw_p (call));
    1179                 :             : }
    1180                 :             : 
    1181                 :             : /* Control flow redundancy hardening: record the execution path, and
    1182                 :             :    verify at exit that an expect path was taken.  */
    1183                 :             : 
    1184                 :             : unsigned int
    1185                 :        1658 : pass_harden_control_flow_redundancy::execute (function *fun)
    1186                 :             : {
    1187                 :        3316 :   bool const check_at_escaping_exceptions
    1188                 :        1658 :     = (flag_exceptions
    1189                 :        1658 :        && flag_harden_control_flow_redundancy_check_exceptions);
    1190                 :        1658 :   bool const check_before_noreturn_calls
    1191                 :        1658 :     = flag_harden_control_flow_redundancy_check_noreturn > HCFRNR_NEVER;
    1192                 :        3316 :   bool const check_before_nothrow_noreturn_calls
    1193                 :             :     = (check_before_noreturn_calls
    1194                 :        1658 :        && flag_harden_control_flow_redundancy_check_noreturn >= HCFRNR_NOTHROW);
    1195                 :        3316 :   bool const check_before_throwing_noreturn_calls
    1196                 :             :     = (flag_exceptions
    1197                 :         939 :        && check_before_noreturn_calls
    1198                 :        2359 :        && flag_harden_control_flow_redundancy_check_noreturn > HCFRNR_NOTHROW);
    1199                 :        3316 :   bool const check_before_always_throwing_noreturn_calls
    1200                 :             :     = (flag_exceptions
    1201                 :         939 :        && check_before_noreturn_calls
    1202                 :        2359 :        && flag_harden_control_flow_redundancy_check_noreturn >= HCFRNR_ALWAYS);
    1203                 :        1658 :   basic_block bb;
    1204                 :        1658 :   basic_block bb_eh_cleanup = NULL;
    1205                 :             : 
    1206                 :        1658 :   if (flag_harden_control_flow_redundancy_skip_leaf)
    1207                 :             :     {
    1208                 :          66 :       bool found_calls_p = false;
    1209                 :             : 
    1210                 :         424 :       FOR_EACH_BB_FN (bb, fun)
    1211                 :             :         {
    1212                 :         376 :           for (gimple_stmt_iterator gsi = gsi_last_bb (bb);
    1213                 :        1726 :                !gsi_end_p (gsi); gsi_prev (&gsi))
    1214                 :         693 :             if (is_a <gcall *> (gsi_stmt (gsi)))
    1215                 :             :               {
    1216                 :             :                 found_calls_p = true;
    1217                 :             :                 break;
    1218                 :             :               }
    1219                 :         376 :           if (found_calls_p)
    1220                 :             :             break;
    1221                 :             :         }
    1222                 :             : 
    1223                 :          66 :       if (!found_calls_p)
    1224                 :             :         {
    1225                 :          48 :           if (dump_file)
    1226                 :          48 :             fprintf (dump_file,
    1227                 :             :                      "Disabling CFR for leaf function, as requested\n");
    1228                 :             : 
    1229                 :          48 :           return 0;
    1230                 :             :         }
    1231                 :             :     }
    1232                 :             : 
    1233                 :        1610 :   if (check_at_escaping_exceptions)
    1234                 :             :     {
    1235                 :         796 :       int lp_eh_cleanup = -1;
    1236                 :             : 
    1237                 :             :       /* Record the preexisting blocks, to avoid visiting newly-created
    1238                 :             :          blocks.  */
    1239                 :         796 :       auto_sbitmap to_visit (last_basic_block_for_fn (fun));
    1240                 :         796 :       bitmap_clear (to_visit);
    1241                 :             : 
    1242                 :        3056 :       FOR_EACH_BB_FN (bb, fun)
    1243                 :        2260 :         bitmap_set_bit (to_visit, bb->index);
    1244                 :             : 
    1245                 :             :       /* Scan the blocks for stmts with escaping exceptions, that
    1246                 :             :          wouldn't be denoted in the CFG, and associate them with an
    1247                 :             :          empty cleanup handler around the whole function.  Walk
    1248                 :             :          backwards, so that even when we split the block, */
    1249                 :         796 :       sbitmap_iterator it;
    1250                 :         796 :       unsigned i;
    1251                 :        3852 :       EXECUTE_IF_SET_IN_BITMAP (to_visit, 0, i, it)
    1252                 :             :         {
    1253                 :        2260 :           bb = BASIC_BLOCK_FOR_FN (fun, i);
    1254                 :             : 
    1255                 :        2260 :           for (gimple_stmt_iterator gsi = gsi_last_bb (bb);
    1256                 :       10662 :                !gsi_end_p (gsi); gsi_prev (&gsi))
    1257                 :             :             {
    1258                 :        4201 :               gimple *stmt = gsi_stmt (gsi);
    1259                 :        4201 :               if (!stmt_could_throw_p (fun, stmt))
    1260                 :        3205 :                 continue;
    1261                 :             : 
    1262                 :             :               /* If it must not throw, or if it already has a handler,
    1263                 :             :                  we need not worry about it.  */
    1264                 :         996 :               if (lookup_stmt_eh_lp (stmt) != 0)
    1265                 :         387 :                 continue;
    1266                 :             : 
    1267                 :             :               /* Don't split blocks at, nor add EH edges to, tail
    1268                 :             :                  calls, we will add verification before the call
    1269                 :             :                  anyway.  */
    1270                 :         609 :               if (is_a <gcall *> (stmt)
    1271                 :         609 :                   && (gimple_call_must_tail_p (as_a <gcall *> (stmt))
    1272                 :         425 :                       || gimple_call_tail_p (as_a <gcall *> (stmt))
    1273                 :         425 :                       || returning_call_p (as_a <gcall *> (stmt))))
    1274                 :          45 :                 continue;
    1275                 :             : 
    1276                 :         564 :               if (!gsi_one_before_end_p (gsi))
    1277                 :         107 :                 split_block (bb, stmt);
    1278                 :             :               /* A resx or noreturn call needs not be associated with
    1279                 :             :                  the cleanup handler if we're going to add checking
    1280                 :             :                  before it.  We only test cases that didn't require
    1281                 :             :                  block splitting because noreturn calls would always
    1282                 :             :                  be at the end of blocks, and we test for zero
    1283                 :             :                  successors because if there is an edge, it's not
    1284                 :             :                  noreturn, as any EH edges would have already been
    1285                 :             :                  caught by the lookup_stmt_eh_lp test above.  */
    1286                 :         522 :               else if (check_before_noreturn_calls
    1287                 :         359 :                        && EDGE_COUNT (bb->succs) == 0
    1288                 :         708 :                        && (is_a <gresx *> (stmt)
    1289                 :         110 :                            ? check_before_always_throwing_noreturn_calls
    1290                 :         110 :                            : (!is_a <gcall *> (stmt)
    1291                 :         110 :                               || !gimple_call_noreturn_p (stmt))
    1292                 :         110 :                            ? (gcc_unreachable (), false)
    1293                 :         110 :                            : (!flag_exceptions
    1294                 :         110 :                               || gimple_call_nothrow_p (as_a <gcall *> (stmt)))
    1295                 :         220 :                            ? check_before_nothrow_noreturn_calls
    1296                 :         110 :                            : always_throwing_noreturn_call_p (stmt)
    1297                 :             :                            ? check_before_always_throwing_noreturn_calls
    1298                 :             :                            : check_before_throwing_noreturn_calls))
    1299                 :             :                 {
    1300                 :          65 :                   if (dump_file)
    1301                 :             :                     {
    1302                 :          65 :                       fprintf (dump_file,
    1303                 :             :                                "Bypassing cleanup for noreturn stmt"
    1304                 :             :                                " in block %i:\n",
    1305                 :             :                                bb->index);
    1306                 :          65 :                       print_gimple_stmt (dump_file, stmt, 0);
    1307                 :             :                     }
    1308                 :          65 :                   continue;
    1309                 :             :                 }
    1310                 :             : 
    1311                 :         499 :               if (!bb_eh_cleanup)
    1312                 :             :                 {
    1313                 :         421 :                   bb_eh_cleanup = create_empty_bb (bb);
    1314                 :         421 :                   if (dom_info_available_p (CDI_DOMINATORS))
    1315                 :         421 :                     set_immediate_dominator (CDI_DOMINATORS, bb_eh_cleanup, bb);
    1316                 :         421 :                   if (current_loops)
    1317                 :         421 :                     add_bb_to_loop (bb_eh_cleanup, current_loops->tree_root);
    1318                 :             : 
    1319                 :             :                   /* Make the new block an EH cleanup for the call.  */
    1320                 :         421 :                   eh_region new_r = gen_eh_region_cleanup (NULL);
    1321                 :         421 :                   eh_landing_pad lp = gen_eh_landing_pad (new_r);
    1322                 :         421 :                   tree label = gimple_block_label (bb_eh_cleanup);
    1323                 :         421 :                   lp->post_landing_pad = label;
    1324                 :         421 :                   EH_LANDING_PAD_NR (label) = lp_eh_cleanup = lp->index;
    1325                 :             : 
    1326                 :             :                   /* Just propagate the exception.
    1327                 :             :                      We will later insert the verifier call.  */
    1328                 :         421 :                   gimple_stmt_iterator ehgsi;
    1329                 :         421 :                   ehgsi = gsi_after_labels (bb_eh_cleanup);
    1330                 :         421 :                   gresx *resx = gimple_build_resx (new_r->index);
    1331                 :         421 :                   gsi_insert_before (&ehgsi, resx, GSI_SAME_STMT);
    1332                 :             : 
    1333                 :         421 :                   if (dump_file)
    1334                 :         420 :                     fprintf (dump_file,
    1335                 :             :                              "Created cleanup block %i:\n",
    1336                 :             :                              bb_eh_cleanup->index);
    1337                 :             :                 }
    1338                 :          78 :               else if (dom_info_available_p (CDI_DOMINATORS))
    1339                 :             :                 {
    1340                 :          78 :                   basic_block immdom;
    1341                 :          78 :                   immdom = get_immediate_dominator (CDI_DOMINATORS,
    1342                 :             :                                                     bb_eh_cleanup);
    1343                 :          78 :                   if (!dominated_by_p (CDI_DOMINATORS, bb, immdom))
    1344                 :             :                     {
    1345                 :          78 :                       immdom = nearest_common_dominator (CDI_DOMINATORS,
    1346                 :             :                                                          immdom, bb);
    1347                 :          78 :                       set_immediate_dominator (CDI_DOMINATORS,
    1348                 :             :                                                bb_eh_cleanup, immdom);
    1349                 :             :                     }
    1350                 :             :                 }
    1351                 :             : 
    1352                 :         499 :               if (dump_file)
    1353                 :             :                 {
    1354                 :         498 :                   fprintf (dump_file,
    1355                 :             :                            "Associated cleanup block with stmt in block %i:\n",
    1356                 :             :                            bb->index);
    1357                 :         498 :                   print_gimple_stmt (dump_file, stmt, 0);
    1358                 :             :                 }
    1359                 :             : 
    1360                 :         499 :               add_stmt_to_eh_lp (stmt, lp_eh_cleanup);
    1361                 :             :               /* Finally, wire the EH cleanup block into the CFG.  */
    1362                 :         499 :               edge neeh = make_eh_edge (stmt);
    1363                 :         499 :               neeh->probability = profile_probability::never ();
    1364                 :         499 :               gcc_checking_assert (neeh->dest == bb_eh_cleanup);
    1365                 :         499 :               if (neeh->dest->count.initialized_p ())
    1366                 :          78 :                 neeh->dest->count += neeh->count ();
    1367                 :             :               else
    1368                 :         421 :                 neeh->dest->count = neeh->count ();
    1369                 :             :             }
    1370                 :             :         }
    1371                 :             : 
    1372                 :         796 :       if (bb_eh_cleanup)
    1373                 :             :         {
    1374                 :             :           /* A cfg_cleanup after bb_eh_cleanup makes for a more compact
    1375                 :             :              rtcfg, and it avoids bb numbering differences when we split
    1376                 :             :              blocks because of trailing debug insns only.  */
    1377                 :         421 :           cleanup_tree_cfg ();
    1378                 :         796 :           gcc_checking_assert (EDGE_COUNT (bb_eh_cleanup->succs) == 0);
    1379                 :             :         }
    1380                 :         796 :     }
    1381                 :             : 
    1382                 :             :   /* These record blocks with calls that are to be preceded by
    1383                 :             :      checkpoints, such as noreturn calls (if so chosen), must-tail
    1384                 :             :      calls, potential early-marked tail calls, and returning calls (if
    1385                 :             :      so chosen).  */
    1386                 :        1610 :   int count_chkcall = 0;
    1387                 :        1610 :   auto_sbitmap chkcall_blocks (last_basic_block_for_fn (fun));
    1388                 :        1610 :   bitmap_clear (chkcall_blocks);
    1389                 :             : 
    1390                 :             :   /* We wish to add verification at blocks without successors, such as
    1391                 :             :      noreturn calls (raising or not) and the reraise at the cleanup
    1392                 :             :      block, but not other reraises: they will go through the cleanup
    1393                 :             :      block.  */
    1394                 :        1610 :   if (check_before_noreturn_calls)
    1395                 :        4898 :     FOR_EACH_BB_FN (bb, fun)
    1396                 :             :       {
    1397                 :        3679 :         gimple_stmt_iterator gsi = gsi_last_bb (bb);
    1398                 :        3679 :         if (gsi_end_p (gsi))
    1399                 :        3679 :           continue;
    1400                 :        3670 :         gimple *stmt = gsi_stmt (gsi);
    1401                 :             : 
    1402                 :        3670 :         if (EDGE_COUNT (bb->succs) == 0)
    1403                 :             :           {
    1404                 :             :             /* A stmt at the end of a block without any successors is
    1405                 :             :                either a resx or a noreturn call without a local
    1406                 :             :                handler.  Check that it's one of the desired
    1407                 :             :                checkpoints.  */
    1408                 :        1382 :             if (flag_exceptions && is_a <gresx *> (stmt)
    1409                 :        1268 :                 ? (check_before_always_throwing_noreturn_calls
    1410                 :         356 :                    || bb == bb_eh_cleanup)
    1411                 :         456 :                 : (!is_a <gcall *> (stmt)
    1412                 :         456 :                    || !gimple_call_noreturn_p (stmt))
    1413                 :         456 :                 ? (stmt_can_make_abnormal_goto (stmt)
    1414                 :             :                    /* ??? Check before indirect nonlocal goto, or
    1415                 :             :                       calls thereof?  */
    1416                 :           0 :                    ? false
    1417                 :             :                    /* Catch cases in which successors would be
    1418                 :             :                       expected.  */
    1419                 :           0 :                    : (gcc_unreachable (), false))
    1420                 :         456 :                 : (!flag_exceptions
    1421                 :         214 :                    || gimple_call_nothrow_p (as_a <gcall *> (stmt)))
    1422                 :         670 :                 ? check_before_nothrow_noreturn_calls
    1423                 :          32 :                 : always_throwing_noreturn_call_p (stmt)
    1424                 :             :                 ? check_before_always_throwing_noreturn_calls
    1425                 :             :                 : check_before_throwing_noreturn_calls)
    1426                 :             :               {
    1427                 :         812 :                 if (dump_file)
    1428                 :             :                   {
    1429                 :         811 :                     fprintf (dump_file,
    1430                 :             :                              "Scheduling check before stmt"
    1431                 :             :                              " in succ-less block %i:\n",
    1432                 :             :                              bb->index);
    1433                 :         811 :                     print_gimple_stmt (dump_file, stmt, 0);
    1434                 :             :                   }
    1435                 :             : 
    1436                 :         812 :                 if (bitmap_set_bit (chkcall_blocks, bb->index))
    1437                 :         812 :                   count_chkcall++;
    1438                 :             :                 else
    1439                 :           0 :                   gcc_unreachable ();
    1440                 :             :               }
    1441                 :         812 :             continue;
    1442                 :             :           }
    1443                 :             : 
    1444                 :             :         /* If there are no exceptions, it would seem like any noreturn
    1445                 :             :            call must have zero successor edges, but __builtin_return
    1446                 :             :            gets successor edges.  We don't want to handle it here, it
    1447                 :             :            will be dealt with in sibcall_search_preds.  Otherwise,
    1448                 :             :            check for blocks without non-EH successors, but skip those
    1449                 :             :            with resx stmts and edges (i.e., those other than that in
    1450                 :             :            bb_eh_cleanup), since those will go through bb_eh_cleanup,
    1451                 :             :            that will have been counted as noreturn above because it
    1452                 :             :            has no successors.  */
    1453                 :        2858 :         gcc_checking_assert (bb != bb_eh_cleanup
    1454                 :             :                              || !check_at_escaping_exceptions);
    1455                 :        1706 :         if (flag_exceptions && is_a <gresx *> (stmt)
    1456                 :        2858 :             ? check_before_always_throwing_noreturn_calls
    1457                 :        2734 :             : (!is_a <gcall *> (stmt)
    1458                 :         815 :                || !gimple_call_noreturn_p (stmt))
    1459                 :        2734 :             ? false
    1460                 :         373 :             : (!flag_exceptions
    1461                 :         221 :                || gimple_call_nothrow_p (as_a <gcall *> (stmt)))
    1462                 :         193 :             ? false /* rather than check_before_nothrow_noreturn_calls */
    1463                 :         193 :             : always_throwing_noreturn_call_p (stmt)
    1464                 :         193 :             ? check_before_always_throwing_noreturn_calls
    1465                 :             :             : check_before_throwing_noreturn_calls)
    1466                 :             :           {
    1467                 :          60 :             gcc_checking_assert (single_succ_p (bb)
    1468                 :             :                                  && (single_succ_edge (bb)->flags & EDGE_EH));
    1469                 :             : 
    1470                 :          30 :             if (dump_file)
    1471                 :             :               {
    1472                 :          30 :                 fprintf (dump_file,
    1473                 :             :                          "Scheduling check before stmt"
    1474                 :             :                          " in EH-succ block %i:\n",
    1475                 :             :                          bb->index);
    1476                 :          30 :                 print_gimple_stmt (dump_file, stmt, 0);
    1477                 :             :               }
    1478                 :             : 
    1479                 :          30 :             if (bitmap_set_bit (chkcall_blocks, bb->index))
    1480                 :          30 :               count_chkcall++;
    1481                 :             :             else
    1482                 :           0 :               gcc_unreachable ();
    1483                 :             :           }
    1484                 :             :       }
    1485                 :         391 :   else if (bb_eh_cleanup)
    1486                 :             :     {
    1487                 :          98 :       if (bitmap_set_bit (chkcall_blocks, bb_eh_cleanup->index))
    1488                 :          98 :         count_chkcall++;
    1489                 :             :       else
    1490                 :           0 :         gcc_unreachable ();
    1491                 :             :     }
    1492                 :             : 
    1493                 :        1317 :   gcc_checking_assert (!bb_eh_cleanup
    1494                 :             :                        || bitmap_bit_p (chkcall_blocks, bb_eh_cleanup->index));
    1495                 :             : 
    1496                 :             :   /* If we don't have edges to exit nor noreturn calls (including the
    1497                 :             :      cleanup reraise), then we may skip instrumentation: that would
    1498                 :             :      amount to a function that ends with an infinite loop.  */
    1499                 :        1610 :   if (!count_chkcall
    1500                 :        1610 :       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
    1501                 :             :     {
    1502                 :          68 :       if (dump_file)
    1503                 :          68 :         fprintf (dump_file,
    1504                 :             :                  "Disabling CFR, no exit paths to check\n");
    1505                 :             : 
    1506                 :          68 :       return 0;
    1507                 :             :     }
    1508                 :             : 
    1509                 :             :   /* Search for must-tail calls, early-marked potential tail calls,
    1510                 :             :      and, if requested, returning calls.  As we introduce early
    1511                 :             :      checks, */
    1512                 :        1542 :   int count_postchk = 0;
    1513                 :        1542 :   auto_sbitmap postchk_blocks (last_basic_block_for_fn (fun));
    1514                 :        1542 :   bitmap_clear (postchk_blocks);
    1515                 :        1542 :   chk_edges_t chk_edges;
    1516                 :        1542 :   hardcfr_sibcall_search_preds (EXIT_BLOCK_PTR_FOR_FN (fun), chk_edges,
    1517                 :             :                                 count_chkcall, chkcall_blocks,
    1518                 :             :                                 count_postchk, postchk_blocks,
    1519                 :             :                                 NULL);
    1520                 :             : 
    1521                 :        3084 :   rt_bb_visited vstd (chk_edges.length () + count_chkcall);
    1522                 :             : 
    1523                 :        1542 :   auto_sbitmap combined_blocks (last_basic_block_for_fn (fun));
    1524                 :        1542 :   bitmap_copy (combined_blocks, chkcall_blocks);
    1525                 :        1542 :   int i;
    1526                 :        1542 :   edge *e;
    1527                 :        4013 :   FOR_EACH_VEC_ELT (chk_edges, i, e)
    1528                 :         929 :     if (!bitmap_set_bit (combined_blocks, (*e)->src->index))
    1529                 :             :       /* There may be multiple chk_edges with the same src block;
    1530                 :             :          guard againt overlaps with chkcall_blocks only.  */
    1531                 :           0 :       gcc_assert (!bitmap_bit_p (chkcall_blocks, (*e)->src->index));
    1532                 :             : 
    1533                 :             :   /* Visit blocks in index order, because building rtcfg depends on
    1534                 :             :      that.  Blocks must be compact, which the cleanup_cfg requirement
    1535                 :             :      ensures.  This would also enable FOR_EACH_BB_FN to be used to
    1536                 :             :      iterate in index order, but bb_eh_cleanup block splits and
    1537                 :             :      insertions changes that.  */
    1538                 :        1542 :   gcc_checking_assert (n_basic_blocks_for_fn (fun)
    1539                 :             :                        == last_basic_block_for_fn (fun));
    1540                 :        6230 :   for (int i = NUM_FIXED_BLOCKS; i < n_basic_blocks_for_fn (fun); i++)
    1541                 :             :     {
    1542                 :        4688 :       bb = BASIC_BLOCK_FOR_FN (fun, i);
    1543                 :        4688 :       gcc_checking_assert (bb->index == i);
    1544                 :        4688 :       vstd.visit (bb, bitmap_bit_p (combined_blocks, i),
    1545                 :        4688 :                   bitmap_bit_p (postchk_blocks, i));
    1546                 :             :     }
    1547                 :             : 
    1548                 :        1542 :   vstd.check (chk_edges, count_chkcall, chkcall_blocks);
    1549                 :             : 
    1550                 :        1542 :   return
    1551                 :             :     TODO_update_ssa
    1552                 :             :     | TODO_cleanup_cfg
    1553                 :        1542 :     | TODO_verify_il;
    1554                 :        1610 : }
    1555                 :             : 
    1556                 :             : /* Instantiate a hardcfr pass.  */
    1557                 :             : 
    1558                 :             : gimple_opt_pass *
    1559                 :      285189 : make_pass_harden_control_flow_redundancy (gcc::context *ctxt)
    1560                 :             : {
    1561                 :      285189 :   return new pass_harden_control_flow_redundancy (ctxt);
    1562                 :             : }
        

Generated by: LCOV version 2.1-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.