LCOV - code coverage report
Current view: top level - gcc/analyzer - infinite-recursion.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 92.5 % 214 198
Test Date: 2026-02-28 14:20:25 Functions: 94.7 % 19 18
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Detection of infinite recursion.
       2              :    Copyright (C) 2022-2026 Free Software Foundation, Inc.
       3              :    Contributed by David Malcolm <dmalcolm@redhat.com>.
       4              : 
       5              : This file is part of GCC.
       6              : 
       7              : GCC is free software; you can redistribute it and/or modify it
       8              : under the terms of the GNU General Public License as published by
       9              : the Free Software Foundation; either version 3, or (at your option)
      10              : any later version.
      11              : 
      12              : GCC is distributed in the hope that it will be useful, but
      13              : WITHOUT ANY WARRANTY; without even the implied warranty of
      14              : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
      15              : General Public License for more details.
      16              : 
      17              : You should have received a copy of the GNU General Public License
      18              : along with GCC; see the file COPYING3.  If not see
      19              : <http://www.gnu.org/licenses/>.  */
      20              : 
      21              : #include "analyzer/common.h"
      22              : 
      23              : #include "cfg.h"
      24              : #include "gimple-iterator.h"
      25              : #include "gimple-pretty-print.h"
      26              : #include "cgraph.h"
      27              : #include "digraph.h"
      28              : #include "diagnostics/sarif-sink.h"
      29              : 
      30              : #include "analyzer/analyzer-logging.h"
      31              : #include "analyzer/call-string.h"
      32              : #include "analyzer/program-point.h"
      33              : #include "analyzer/store.h"
      34              : #include "analyzer/region-model.h"
      35              : #include "analyzer/constraint-manager.h"
      36              : #include "analyzer/sm.h"
      37              : #include "analyzer/pending-diagnostic.h"
      38              : #include "analyzer/diagnostic-manager.h"
      39              : #include "analyzer/supergraph.h"
      40              : #include "analyzer/program-state.h"
      41              : #include "analyzer/exploded-graph.h"
      42              : #include "analyzer/checker-path.h"
      43              : #include "analyzer/feasible-graph.h"
      44              : 
      45              : /* A subclass of pending_diagnostic for complaining about suspected
      46              :    infinite recursion.  */
      47              : 
      48              : class infinite_recursion_diagnostic
      49              : : public pending_diagnostic_subclass<infinite_recursion_diagnostic>
      50              : {
      51              : public:
      52          482 :   infinite_recursion_diagnostic (const exploded_node *prev_entry_enode,
      53              :                                  const exploded_node *new_entry_enode,
      54              :                                  tree callee_fndecl)
      55          482 :   : m_prev_entry_enode (prev_entry_enode),
      56          482 :     m_new_entry_enode (new_entry_enode),
      57          482 :     m_callee_fndecl (callee_fndecl),
      58          482 :     m_prev_entry_event (nullptr)
      59              :   {}
      60              : 
      61         2771 :   const char *get_kind () const final override
      62              :   {
      63         2771 :     return "infinite_recursion_diagnostic";
      64              :   }
      65              : 
      66          412 :   bool operator== (const infinite_recursion_diagnostic &other) const
      67              :   {
      68          412 :     return m_callee_fndecl == other.m_callee_fndecl;
      69              :   }
      70              : 
      71          629 :   int get_controlling_option () const final override
      72              :   {
      73          629 :     return OPT_Wanalyzer_infinite_recursion;
      74              :   }
      75              : 
      76          147 :   bool emit (diagnostic_emission_context &ctxt) final override
      77              :   {
      78              :     /* "CWE-674: Uncontrolled Recursion".  */
      79          147 :     ctxt.add_cwe (674);
      80          147 :     return ctxt.warn ("infinite recursion");
      81              :   }
      82              : 
      83              :   bool
      84          294 :   describe_final_event (pretty_printer &pp,
      85              :                         const evdesc::final_event &) final override
      86              :   {
      87          294 :     const int frames_consumed = (m_new_entry_enode->get_stack_depth ()
      88          294 :                                  - m_prev_entry_enode->get_stack_depth ());
      89          294 :     if (frames_consumed > 1)
      90           64 :       pp_printf (&pp,
      91              :                  "apparently infinite chain of mutually-recursive function"
      92              :                  " calls, consuming %i stack frames per recursion",
      93              :                  frames_consumed);
      94              :     else
      95          230 :       pp_string (&pp, "apparently infinite recursion");
      96          294 :     return true;
      97              :   }
      98              : 
      99              :   void
     100          385 :   add_function_entry_event (const exploded_edge &eedge,
     101              :                             checker_path *emission_path) final override
     102              :   {
     103              :     /* Subclass of function_entry_event for use when reporting both
     104              :        the initial and subsequent entries to the function of interest,
     105              :        allowing for cross-referencing the first event in the description
     106              :        of the second.  */
     107            0 :     class recursive_function_entry_event : public function_entry_event
     108              :     {
     109              :     public:
     110          294 :       recursive_function_entry_event (const program_point &dst_point,
     111              :                                       const program_state &dst_state,
     112              :                                       const infinite_recursion_diagnostic &pd,
     113              :                                       bool topmost)
     114          294 :       : function_entry_event (dst_point, dst_state),
     115          294 :         m_pd (pd),
     116          294 :         m_topmost (topmost)
     117              :       {
     118              :       }
     119              : 
     120              :       void
     121          588 :       print_desc (pretty_printer &pp) const final override
     122              :       {
     123          588 :         if (m_topmost)
     124              :           {
     125          294 :             if (m_pd.m_prev_entry_event
     126          294 :                 && m_pd.m_prev_entry_event->get_id_ptr ()->known_p ())
     127          294 :               pp_printf (&pp,
     128              :                          "recursive entry to %qE; previously entered at %@",
     129          294 :                          m_effective_fndecl,
     130          294 :                          m_pd.m_prev_entry_event->get_id_ptr ());
     131              :             else
     132            0 :               pp_printf (&pp,
     133              :                          "recursive entry to %qE",
     134            0 :                          m_effective_fndecl);
     135              :           }
     136              :         else
     137          294 :           pp_printf (&pp,
     138              :                      "initial entry to %qE",
     139          294 :                      m_effective_fndecl);
     140          588 :       }
     141              : 
     142              :     private:
     143              :       const infinite_recursion_diagnostic &m_pd;
     144              :       bool m_topmost;
     145              :     };
     146          385 :     const exploded_node *dst_node = eedge.m_dest;
     147          385 :     const program_point &dst_point = dst_node->get_point ();
     148          385 :     if (eedge.m_dest == m_prev_entry_enode)
     149              :       {
     150          147 :         gcc_assert (m_prev_entry_event == nullptr);
     151          147 :         std::unique_ptr<checker_event> prev_entry_event
     152              :           = std::make_unique <recursive_function_entry_event>
     153          147 :               (dst_point,
     154              :                dst_node->get_state (),
     155          147 :                *this, false);
     156          147 :         m_prev_entry_event = prev_entry_event.get ();
     157          147 :         emission_path->add_event (std::move (prev_entry_event));
     158          147 :       }
     159          238 :     else if (eedge.m_dest == m_new_entry_enode)
     160          147 :       emission_path->add_event
     161          147 :         (std::make_unique<recursive_function_entry_event>
     162          147 :          (dst_point, dst_node->get_state (), *this, true));
     163              :     else
     164           91 :       pending_diagnostic::add_function_entry_event (eedge, emission_path);
     165          385 :   }
     166              : 
     167              :   /* Customize the location where the warning_event appears, putting
     168              :      it at the topmost entrypoint to the function.  */
     169          147 :   void add_final_event (const state_machine *,
     170              :                         const exploded_node *enode,
     171              :                         const event_loc_info &,
     172              :                         tree,
     173              :                         state_machine::state_t,
     174              :                         checker_path *emission_path) final override
     175              :   {
     176          147 :     gcc_assert (m_new_entry_enode);
     177          147 :     emission_path->add_event
     178          147 :       (std::make_unique<warning_event>
     179          147 :        (event_loc_info (m_new_entry_enode->get_supernode
     180              :                           ()->get_location (),
     181              :                         m_callee_fndecl,
     182          147 :                         m_new_entry_enode->get_stack_depth ()),
     183              :         enode,
     184          147 :         nullptr, nullptr, nullptr));
     185          147 :   }
     186              : 
     187              :   /* Reject paths in which conjured svalues have affected control flow
     188              :      since m_prev_entry_enode.  */
     189              : 
     190          474 :   bool check_valid_fpath_p (const feasible_node &final_fnode)
     191              :     const final override
     192              :   {
     193              :     /* Reject paths in which calls with unknown side effects have occurred
     194              :        since m_prev_entry_enode.
     195              :        Find num calls with side effects.  Walk backward until we reach the
     196              :        pref */
     197          474 :     gcc_assert (final_fnode.get_inner_node () == m_new_entry_enode);
     198              : 
     199              :     /* FG is actually a tree.  Walk backwards from FINAL_FNODE until we
     200              :        reach the prev_entry_enode (or the origin).  */
     201              :     const feasible_node *iter_fnode = &final_fnode;
     202         2682 :     while (iter_fnode->get_inner_node ()->m_index != 0)
     203              :       {
     204         2682 :         gcc_assert (iter_fnode->m_preds.length () == 1);
     205              : 
     206         2682 :         feasible_edge *pred_fedge
     207         2682 :           = static_cast <feasible_edge *> (iter_fnode->m_preds[0]);
     208              : 
     209              :         /* Determine if conjured svalues have affected control flow
     210              :            since the prev entry node.  */
     211         2682 :         if (fedge_uses_conjured_svalue_p (pred_fedge))
     212              :           /* If so, then reject this diagnostic.  */
     213              :           return false;
     214         2620 :         iter_fnode = static_cast <feasible_node *> (pred_fedge->m_src);
     215         2620 :         if (iter_fnode->get_inner_node () == m_prev_entry_enode)
     216              :           /* Accept this diagnostic.  */
     217              :           return true;
     218              :     }
     219              : 
     220              :     /* We shouldn't get here; if we do, reject the diagnostic.  */
     221            0 :     gcc_unreachable ();
     222              :     return false;
     223              :   }
     224              : 
     225              :   void
     226            0 :   maybe_add_sarif_properties (diagnostics::sarif_object &result_obj)
     227              :     const final override
     228              :   {
     229            0 :     auto &props = result_obj.get_or_create_properties ();
     230              : #define PROPERTY_PREFIX "gcc/analyzer/infinite_recursion_diagnostic/"
     231            0 :     props.set_integer (PROPERTY_PREFIX "prev_entry_enode",
     232            0 :                        m_prev_entry_enode->m_index);
     233            0 :     props.set_integer (PROPERTY_PREFIX "new_entry_enode",
     234            0 :                        m_new_entry_enode->m_index);
     235              : #undef PROPERTY_PREFIX
     236            0 :   }
     237              : 
     238              : private:
     239              :   /* Return true iff control flow along FEDGE was affected by
     240              :      a conjured_svalue.  */
     241         2682 :   static bool fedge_uses_conjured_svalue_p (feasible_edge *fedge)
     242              :   {
     243         2682 :     const exploded_edge *eedge = fedge->get_inner_edge ();
     244         2682 :     const superedge *sedge = eedge->m_sedge;
     245         2682 :     if (!sedge)
     246              :       return false;
     247         2682 :     auto op = sedge->get_op ();
     248         2682 :     if (!op)
     249              :       return false;
     250         1879 :     const control_flow_op *ctrlflow_op = op->dyn_cast_control_flow_op ();
     251         1879 :     if (!ctrlflow_op)
     252              :       return false;
     253              : 
     254          487 :     const gimple &last_stmt = ctrlflow_op->get_ctrlflow_stmt ();
     255              : 
     256          487 :     const feasible_node *dst_fnode
     257              :       = static_cast<const feasible_node *> (fedge->m_dest);
     258          487 :     const region_model &model = dst_fnode->get_state ().get_model ();
     259              : 
     260          487 :     if (const gcond *cond_stmt = dyn_cast <const gcond *> (&last_stmt))
     261              :       {
     262          445 :         if (expr_uses_conjured_svalue_p (model, gimple_cond_lhs (cond_stmt)))
     263              :           return true;
     264          413 :         if (expr_uses_conjured_svalue_p (model, gimple_cond_rhs (cond_stmt)))
     265              :           return true;
     266              :       }
     267           42 :     else if (const gswitch *switch_stmt
     268         2724 :                = dyn_cast <const gswitch *> (&last_stmt))
     269              :       {
     270           42 :         if (expr_uses_conjured_svalue_p (model,
     271              :                                          gimple_switch_index (switch_stmt)))
     272              :           return true;
     273              :       }
     274              :     return false;
     275              :   }
     276              : 
     277              :   /* Return true iff EXPR is affected by a conjured_svalue.  */
     278          900 :   static bool expr_uses_conjured_svalue_p (const region_model &model,
     279              :                                            tree expr)
     280              :   {
     281          900 :     class conjured_svalue_finder : public visitor
     282              :     {
     283              :     public:
     284          900 :       conjured_svalue_finder () : m_found_conjured_svalues (false)
     285              :       {
     286              :       }
     287              :       void
     288           66 :       visit_conjured_svalue (const conjured_svalue *) final override
     289              :       {
     290           66 :         m_found_conjured_svalues = true;
     291           66 :       }
     292              : 
     293              :       bool m_found_conjured_svalues;
     294              :     };
     295              : 
     296          900 :     const svalue *sval = model.get_rvalue (expr, nullptr);
     297          900 :     conjured_svalue_finder v;
     298          900 :     sval->accept (&v);
     299          900 :     return v.m_found_conjured_svalues;
     300              :   }
     301              : 
     302              :   const exploded_node *m_prev_entry_enode;
     303              :   const exploded_node *m_new_entry_enode;
     304              :   tree m_callee_fndecl;
     305              :   const checker_event *m_prev_entry_event;
     306              : };
     307              : 
     308              : /* Return true iff ENODE is at a function entrypoint.  */
     309              : 
     310              : static bool
     311       310500 : is_entrypoint_p (exploded_node *enode)
     312              : {
     313              :   /* Look for an entrypoint to a function...  */
     314            0 :   const supernode *snode = enode->get_supernode ();
     315       310500 :   if (!snode)
     316              :     return false;
     317       310500 :   return snode->entry_p ();
     318              : }
     319              : 
     320              : /* Walk backwards through the eg, looking for the first
     321              :    enode we find that's also the entrypoint of the same function.  */
     322              : 
     323              : exploded_node *
     324         1216 : exploded_graph::find_previous_entry_to (function *top_of_stack_fun,
     325              :                                         exploded_node *enode) const
     326              : {
     327         1216 :   auto_vec<exploded_node *> worklist;
     328         1216 :   hash_set<exploded_node *> visited;
     329              : 
     330         1216 :   visited.add (enode);
     331         4864 :   for (auto in_edge : enode->m_preds)
     332         1216 :     worklist.safe_push (in_edge->m_src);
     333              : 
     334        14488 :   while (worklist.length () > 0)
     335              :     {
     336        13272 :       exploded_node *iter = worklist.pop ();
     337              : 
     338        13272 :       if (is_entrypoint_p (iter)
     339        13272 :           && iter->get_function () == top_of_stack_fun)
     340         1216 :         return iter;
     341              : 
     342        12056 :       if (visited.contains (iter))
     343           28 :         continue;
     344        12028 :       visited.add (iter);
     345        48329 :       for (auto in_edge : iter->m_preds)
     346        12245 :         worklist.safe_push (in_edge->m_src);
     347              :     }
     348              : 
     349              :   /* Not found.  */
     350              :   return nullptr;
     351         1216 : }
     352              : 
     353              : /* Given BASE_REG within ENCLOSING_FRAME (such as a function parameter),
     354              :    remap it to the equivalent region within EQUIV_PREV_FRAME.
     355              : 
     356              :    For example, given param "n" within frame "foo@3", and equiv prev frame
     357              :    "foo@1", remap it to param "n" within frame "foo@1".  */
     358              : 
     359              : static const region *
     360          983 : remap_enclosing_frame (const region *base_reg,
     361              :                        const frame_region *enclosing_frame,
     362              :                        const frame_region *equiv_prev_frame,
     363              :                        region_model_manager *mgr)
     364              : {
     365          983 :   gcc_assert (base_reg->get_parent_region () == enclosing_frame);
     366          983 :   switch (base_reg->get_kind ())
     367              :     {
     368            0 :     default:
     369              :       /* We should only encounter params and varargs at the topmost
     370              :          entrypoint.  */
     371            0 :       gcc_unreachable ();
     372              : 
     373           20 :     case RK_VAR_ARG:
     374           20 :       {
     375           20 :         const var_arg_region *var_arg_reg = (const var_arg_region *)base_reg;
     376           20 :         return mgr->get_var_arg_region (equiv_prev_frame,
     377           20 :                                         var_arg_reg->get_index ());
     378              :       }
     379          963 :     case RK_DECL:
     380          963 :       {
     381          963 :         const decl_region *decl_reg = (const decl_region *)base_reg;
     382          963 :         return equiv_prev_frame->get_region_for_local (mgr,
     383              :                                                        decl_reg->get_decl (),
     384          963 :                                                        nullptr);
     385              :       }
     386              :     }
     387              : }
     388              : 
     389              : /* Return true iff SVAL is unknown, or contains an unknown svalue.  */
     390              : 
     391              : static bool
     392         6458 : contains_unknown_p (const svalue *sval)
     393              : {
     394         6458 :   if (sval->get_kind () == SK_UNKNOWN)
     395              :     return true;
     396        12662 :   if (const compound_svalue *compound_sval
     397         6331 :         = sval->dyn_cast_compound_svalue ())
     398           33 :     for (auto iter : *compound_sval)
     399           30 :       if (iter.m_sval->get_kind () == SK_UNKNOWN)
     400           18 :         return true;
     401              :   return false;
     402              : }
     403              : 
     404              : /* Subroutine of sufficiently_different_p.  Compare the store bindings
     405              :    for BASE_REG within NEW_ENTRY_ENODE and PREV_ENTRY_ENODE.
     406              : 
     407              :    Return true if the state of NEW_ENTRY_ENODE is sufficiently different
     408              :    from PREV_ENTRY_ENODE within BASE_REG to suggest that some variant is
     409              :    being modified, and thus the recursion isn't infinite.
     410              : 
     411              :    Return false if the states for BASE_REG are effectively the same.  */
     412              : 
     413              : static bool
     414         3983 : sufficiently_different_region_binding_p (exploded_node *new_entry_enode,
     415              :                                          exploded_node *prev_entry_enode,
     416              :                                          const region *base_reg)
     417              : {
     418              :   /* Compare the stores of the two enodes.  */
     419         3983 :   const region_model &new_model
     420         3983 :     = *new_entry_enode->get_state ().m_region_model;
     421         3983 :   const region_model &prev_model
     422         3983 :     = *prev_entry_enode->get_state ().m_region_model;
     423              : 
     424              :   /* Get the value within the new frame.  */
     425         3983 :   const svalue *new_sval
     426         3983 :     = new_model.get_store_value (base_reg, nullptr);
     427              : 
     428              :   /* If any part of the value is UNKNOWN (e.g. due to hitting
     429              :      complexity limits) assume that it differs from the previous
     430              :      value.  */
     431         3983 :   if (contains_unknown_p (new_sval))
     432              :     return true;
     433              : 
     434              :   /* Get the equivalent value within the old enode.  */
     435         3839 :   const svalue *prev_sval;
     436              : 
     437         7678 :   if (const frame_region *enclosing_frame
     438         3839 :       = base_reg->maybe_get_frame_region ())
     439              :     {
     440              :       /* We have a binding within a frame in the new entry enode.  */
     441              : 
     442              :       /* Consider changes in bindings below the original entry
     443              :          to the recursion.  */
     444         3395 :       const int old_stack_depth = prev_entry_enode->get_stack_depth ();
     445         3395 :       if (enclosing_frame->get_stack_depth () < old_stack_depth)
     446         1048 :         prev_sval = prev_model.get_store_value (base_reg, nullptr);
     447              :       else
     448              :         {
     449              :           /* Ignore bindings within frames below the new entry node.  */
     450         2347 :           const int new_stack_depth = new_entry_enode->get_stack_depth ();
     451         2347 :           if (enclosing_frame->get_stack_depth () < new_stack_depth)
     452              :             return false;
     453              : 
     454              :           /* We have a binding within the frame of the new entry node,
     455              :              presumably a parameter.  */
     456              : 
     457              :           /* Get the value within the equivalent frame of
     458              :              the old entrypoint; typically will be the initial_svalue
     459              :              of the parameter.  */
     460          983 :           const frame_region *equiv_prev_frame
     461          983 :             = prev_model.get_current_frame ();
     462          983 :           const region *equiv_prev_base_reg
     463          983 :             = remap_enclosing_frame (base_reg,
     464              :                                      enclosing_frame,
     465              :                                      equiv_prev_frame,
     466              :                                      new_model.get_manager ());
     467          983 :           prev_sval
     468          983 :             = prev_model.get_store_value (equiv_prev_base_reg, nullptr);
     469              :         }
     470              :     }
     471              :   else
     472          444 :     prev_sval = prev_model.get_store_value (base_reg, nullptr);
     473              : 
     474              :   /* If the prev_sval contains UNKNOWN (e.g. due to hitting complexity limits)
     475              :      assume that it will differ from any new value.  */
     476         2475 :   if (contains_unknown_p (prev_sval))
     477              :     return true;
     478              : 
     479         2474 :   if (new_sval != prev_sval)
     480              :     return true;
     481              : 
     482              :   return false;
     483              : }
     484              : 
     485              : /* Compare the state of memory at NEW_ENTRY_ENODE and PREV_ENTRY_ENODE,
     486              :    both of which are entrypoints to the same function, where recursion has
     487              :    occurred.
     488              : 
     489              :    Return true if the state of NEW_ENTRY_ENODE is sufficiently different
     490              :    from PREV_ENTRY_ENODE to suggest that some variant is being modified,
     491              :    and thus the recursion isn't infinite.
     492              : 
     493              :    Return false if the states are effectively the same, suggesting that
     494              :    the recursion is infinite.
     495              : 
     496              :    For example, consider mutually recursive functions "foo" and "bar".
     497              :    At the entrypoint to a "foo" frame where we've detected recursion,
     498              :    we might have three frames on the stack: the new 'foo'@3, an inner
     499              :    'bar'@2, and the innermost 'foo'@1.
     500              : 
     501              :      (gdb) call enode->dump(m_ext_state)
     502              :      EN: 16
     503              :      callstring: [(SN: 9 -> SN: 3 in foo), (SN: 5 -> SN: 8 in bar)]
     504              :      before SN: 0 (NULL from-edge)
     505              : 
     506              :      rmodel:
     507              :      stack depth: 3
     508              :        frame (index 2): frame: ‘foo’@3
     509              :        frame (index 1): frame: ‘bar’@2
     510              :        frame (index 0): frame: ‘foo’@1
     511              :      clusters within root region
     512              :        cluster for: (*INIT_VAL(f_4(D)))
     513              :      clusters within frame: ‘bar’@2
     514              :        cluster for: b_2(D): INIT_VAL(f_4(D))
     515              :      clusters within frame: ‘foo’@3
     516              :        cluster for: f_4(D): INIT_VAL(f_4(D))
     517              :      m_called_unknown_fn: FALSE
     518              : 
     519              :    whereas for the previous entry node we'd have just the innermost
     520              :    'foo'@1
     521              : 
     522              :      (gdb) call prev_entry_enode->dump(m_ext_state)
     523              :      EN: 1
     524              :      callstring: []
     525              :      before SN: 0 (NULL from-edge)
     526              : 
     527              :      rmodel:
     528              :      stack depth: 1
     529              :        frame (index 0): frame: ‘foo’@1
     530              :      clusters within root region
     531              :        cluster for: (*INIT_VAL(f_4(D)))
     532              :      m_called_unknown_fn: FALSE
     533              : 
     534              :    We want to abstract away frames 1 and 2 in the new entry enode,
     535              :    and compare its frame 3 with the frame 1 in the previous entry
     536              :    enode, and determine if enough state changes between them to
     537              :    rule out infinite recursion.  */
     538              : 
     539              : static bool
     540         1216 : sufficiently_different_p (exploded_node *new_entry_enode,
     541              :                           exploded_node *prev_entry_enode,
     542              :                           logger *logger)
     543              : {
     544         1216 :   LOG_SCOPE (logger);
     545         1216 :   gcc_assert (new_entry_enode);
     546         1216 :   gcc_assert (prev_entry_enode);
     547         1216 :   gcc_assert (is_entrypoint_p (new_entry_enode));
     548         1216 :   gcc_assert (is_entrypoint_p (prev_entry_enode));
     549              : 
     550              :   /* Compare the stores of the two enodes.  */
     551         1216 :   const region_model &new_model
     552         1216 :     = *new_entry_enode->get_state ().m_region_model;
     553         1216 :   const store &new_store = *new_model.get_store ();
     554              : 
     555         7714 :   for (auto kv : new_store)
     556              :     {
     557         3983 :       const region *base_reg = kv.first;
     558         3983 :       if (sufficiently_different_region_binding_p (new_entry_enode,
     559              :                                                    prev_entry_enode,
     560              :                                                    base_reg))
     561          734 :         return true;
     562              :     }
     563              : 
     564              :   /* No significant differences found.  */
     565          482 :   return false;
     566         1216 : }
     567              : 
     568              : /* Implementation of -Wanalyzer-infinite-recursion.
     569              : 
     570              :    Called when adding ENODE to the graph, after adding its first in-edge.
     571              : 
     572              :    For function entrypoints, see if recursion has occurred, and, if so,
     573              :    check if the state of memory changed between the recursion levels,
     574              :    which would suggest some kind of decreasing variant that leads to
     575              :    termination.
     576              : 
     577              :    For recursive calls where the state of memory is effectively unchanged
     578              :    between recursion levels, warn with -Wanalyzer-infinite-recursion.  */
     579              : 
     580              : void
     581       294796 : exploded_graph::detect_infinite_recursion (exploded_node *enode)
     582              : {
     583       294796 :   if (!is_entrypoint_p (enode))
     584       294314 :     return;
     585         6193 :   function *top_of_stack_fun = enode->get_function ();
     586         6193 :   gcc_assert (top_of_stack_fun);
     587              : 
     588              :   /* ....where a call to that function is already in the call string.  */
     589         6193 :   const call_string &call_string = enode->get_point ().get_call_string ();
     590              : 
     591         6193 :   if (call_string.count_occurrences_of_function (top_of_stack_fun) < 2)
     592              :     return;
     593              : 
     594         1216 :   tree fndecl = top_of_stack_fun->decl;
     595              : 
     596         1216 :   log_scope s (get_logger (),
     597              :                "checking for infinite recursion",
     598              :                "considering recursion at EN: %i entering %qE",
     599         1216 :                enode->m_index, fndecl);
     600              : 
     601              :   /* Find enode that's the entrypoint for the previous frame for fndecl
     602              :      in the recursion.  */
     603         1216 :   exploded_node *prev_entry_enode
     604         1216 :     = find_previous_entry_to (top_of_stack_fun, enode);
     605         1216 :   gcc_assert (prev_entry_enode);
     606         1216 :   if (get_logger ())
     607            0 :     get_logger ()->log ("previous entrypoint to %qE is EN: %i",
     608            0 :                         fndecl, prev_entry_enode->m_index);
     609              : 
     610              :   /* Look for changes to the state of memory between the recursion levels.  */
     611         1216 :   if (sufficiently_different_p (enode, prev_entry_enode, get_logger ()))
     612          734 :     return;
     613              : 
     614              :   /* Otherwise, the state of memory is effectively the same between the two
     615              :      recursion levels; warn.  */
     616          482 :   pending_location ploc (enode,
     617          482 :                          call_string.get_top_of_stack ().get_call_stmt ().location);
     618          482 :   get_diagnostic_manager ().add_diagnostic
     619          482 :     (std::move (ploc),
     620          482 :      std::make_unique<infinite_recursion_diagnostic> (prev_entry_enode,
     621              :                                                       enode,
     622              :                                                       fndecl));
     623         1216 : }
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.