LCOV - code coverage report
Current view: top level - gcc/analyzer - infinite-recursion.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 92.6 % 215 199
Test Date: 2026-05-11 19:44:49 Functions: 95.0 % 20 19
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Detection of infinite recursion.
       2              :    Copyright (C) 2022-2026 Free Software Foundation, Inc.
       3              :    Contributed by David Malcolm <dmalcolm@redhat.com>.
       4              : 
       5              : This file is part of GCC.
       6              : 
       7              : GCC is free software; you can redistribute it and/or modify it
       8              : under the terms of the GNU General Public License as published by
       9              : the Free Software Foundation; either version 3, or (at your option)
      10              : any later version.
      11              : 
      12              : GCC is distributed in the hope that it will be useful, but
      13              : WITHOUT ANY WARRANTY; without even the implied warranty of
      14              : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
      15              : General Public License for more details.
      16              : 
      17              : You should have received a copy of the GNU General Public License
      18              : along with GCC; see the file COPYING3.  If not see
      19              : <http://www.gnu.org/licenses/>.  */
      20              : 
      21              : #include "analyzer/common.h"
      22              : 
      23              : #include "cfg.h"
      24              : #include "gimple-iterator.h"
      25              : #include "gimple-pretty-print.h"
      26              : #include "cgraph.h"
      27              : #include "digraph.h"
      28              : #include "diagnostics/sarif-sink.h"
      29              : 
      30              : #include "analyzer/analyzer-logging.h"
      31              : #include "analyzer/call-string.h"
      32              : #include "analyzer/program-point.h"
      33              : #include "analyzer/store.h"
      34              : #include "analyzer/region-model.h"
      35              : #include "analyzer/constraint-manager.h"
      36              : #include "analyzer/sm.h"
      37              : #include "analyzer/pending-diagnostic.h"
      38              : #include "analyzer/diagnostic-manager.h"
      39              : #include "analyzer/supergraph.h"
      40              : #include "analyzer/program-state.h"
      41              : #include "analyzer/exploded-graph.h"
      42              : #include "analyzer/checker-path.h"
      43              : #include "analyzer/feasible-graph.h"
      44              : 
      45              : /* A subclass of pending_diagnostic for complaining about suspected
      46              :    infinite recursion.  */
      47              : 
      48              : class infinite_recursion_diagnostic
      49              : : public pending_diagnostic_subclass<infinite_recursion_diagnostic>
      50              : {
      51              : public:
      52          482 :   infinite_recursion_diagnostic (const exploded_node *prev_entry_enode,
      53              :                                  const exploded_node *new_entry_enode,
      54              :                                  tree callee_fndecl)
      55          482 :   : m_prev_entry_enode (prev_entry_enode),
      56          482 :     m_new_entry_enode (new_entry_enode),
      57          482 :     m_callee_fndecl (callee_fndecl),
      58          482 :     m_prev_entry_event (nullptr)
      59              :   {}
      60              : 
      61         2771 :   const char *get_kind () const final override
      62              :   {
      63         2771 :     return "infinite_recursion_diagnostic";
      64              :   }
      65              : 
      66          412 :   bool operator== (const infinite_recursion_diagnostic &other) const
      67              :   {
      68          412 :     return m_callee_fndecl == other.m_callee_fndecl;
      69              :   }
      70              : 
      71          629 :   int get_controlling_option () const final override
      72              :   {
      73          629 :     return OPT_Wanalyzer_infinite_recursion;
      74              :   }
      75              : 
      76          147 :   bool emit (diagnostic_emission_context &ctxt) final override
      77              :   {
      78              :     /* "CWE-674: Uncontrolled Recursion".  */
      79          147 :     ctxt.add_cwe (674);
      80          147 :     return ctxt.warn ("infinite recursion");
      81              :   }
      82              : 
      83              :   bool
      84          294 :   describe_final_event (pretty_printer &pp,
      85              :                         const evdesc::final_event &) final override
      86              :   {
      87          294 :     const int frames_consumed = (m_new_entry_enode->get_stack_depth ()
      88          294 :                                  - m_prev_entry_enode->get_stack_depth ());
      89          294 :     if (frames_consumed > 1)
      90           64 :       pp_printf (&pp,
      91              :                  "apparently infinite chain of mutually-recursive function"
      92              :                  " calls, consuming %i stack frames per recursion",
      93              :                  frames_consumed);
      94              :     else
      95          230 :       pp_string (&pp, "apparently infinite recursion");
      96          294 :     return true;
      97              :   }
      98              : 
      99              :   void
     100          385 :   add_function_entry_event (const exploded_edge &eedge,
     101              :                             checker_path *emission_path,
     102              :                             const state_transition_at_call *state_trans) final override
     103              :   {
     104              :     /* Subclass of function_entry_event for use when reporting both
     105              :        the initial and subsequent entries to the function of interest,
     106              :        allowing for cross-referencing the first event in the description
     107              :        of the second.  */
     108            0 :     class recursive_function_entry_event : public function_entry_event
     109              :     {
     110              :     public:
     111          294 :       recursive_function_entry_event (const program_point &dst_point,
     112              :                                       const program_state &dst_state,
     113              :                                       const infinite_recursion_diagnostic &pd,
     114              :                                       bool topmost)
     115          294 :       : function_entry_event (event_loc_info (dst_point),
     116              :                               dst_state,
     117              :                               nullptr),
     118          294 :         m_pd (pd),
     119          294 :         m_topmost (topmost)
     120              :       {
     121          294 :       }
     122              : 
     123              :       void
     124          588 :       print_desc (pretty_printer &pp) const final override
     125              :       {
     126          588 :         if (m_topmost)
     127              :           {
     128          294 :             if (m_pd.m_prev_entry_event
     129          294 :                 && m_pd.m_prev_entry_event->get_id_ptr ()->known_p ())
     130          294 :               pp_printf (&pp,
     131              :                          "recursive entry to %qE; previously entered at %@",
     132          294 :                          m_effective_fndecl,
     133          294 :                          m_pd.m_prev_entry_event->get_id_ptr ());
     134              :             else
     135            0 :               pp_printf (&pp,
     136              :                          "recursive entry to %qE",
     137            0 :                          m_effective_fndecl);
     138              :           }
     139              :         else
     140          294 :           pp_printf (&pp,
     141              :                      "initial entry to %qE",
     142          294 :                      m_effective_fndecl);
     143          588 :       }
     144              : 
     145              :     private:
     146              :       const infinite_recursion_diagnostic &m_pd;
     147              :       bool m_topmost;
     148              :     };
     149          385 :     const exploded_node *dst_node = eedge.m_dest;
     150          385 :     const program_point &dst_point = dst_node->get_point ();
     151          385 :     if (eedge.m_dest == m_prev_entry_enode)
     152              :       {
     153          147 :         gcc_assert (m_prev_entry_event == nullptr);
     154          147 :         std::unique_ptr<checker_event> prev_entry_event
     155              :           = std::make_unique <recursive_function_entry_event>
     156          147 :               (dst_point,
     157              :                dst_node->get_state (),
     158          147 :                *this, false);
     159          147 :         m_prev_entry_event = prev_entry_event.get ();
     160          147 :         emission_path->add_event (std::move (prev_entry_event));
     161          147 :       }
     162          238 :     else if (eedge.m_dest == m_new_entry_enode)
     163          147 :       emission_path->add_event
     164          147 :         (std::make_unique<recursive_function_entry_event>
     165          147 :          (dst_point, dst_node->get_state (), *this, true));
     166              :     else
     167           91 :       pending_diagnostic::add_function_entry_event (eedge, emission_path,
     168              :                                                     state_trans);
     169          385 :   }
     170              : 
     171              :   /* Customize the location where the warning_event appears, putting
     172              :      it at the topmost entrypoint to the function.  */
     173          147 :   void add_final_event (const state_machine *,
     174              :                         const exploded_node *enode,
     175              :                         const event_loc_info &,
     176              :                         tree,
     177              :                         state_machine::state_t,
     178              :                         checker_path *emission_path) final override
     179              :   {
     180          147 :     gcc_assert (m_new_entry_enode);
     181          147 :     emission_path->add_event
     182          147 :       (std::make_unique<warning_event>
     183          147 :        (event_loc_info (m_new_entry_enode->get_supernode
     184              :                           ()->get_location (),
     185              :                         m_callee_fndecl,
     186          147 :                         m_new_entry_enode->get_stack_depth ()),
     187              :         enode,
     188          147 :         nullptr, nullptr, nullptr));
     189          147 :   }
     190              : 
     191              :   /* Reject paths in which conjured svalues have affected control flow
     192              :      since m_prev_entry_enode.  */
     193              : 
     194          474 :   bool check_valid_fpath_p (const feasible_node &final_fnode)
     195              :     const final override
     196              :   {
     197              :     /* Reject paths in which calls with unknown side effects have occurred
     198              :        since m_prev_entry_enode.
     199              :        Find num calls with side effects.  Walk backward until we reach the
     200              :        pref */
     201          474 :     gcc_assert (final_fnode.get_inner_node () == m_new_entry_enode);
     202              : 
     203              :     /* FG is actually a tree.  Walk backwards from FINAL_FNODE until we
     204              :        reach the prev_entry_enode (or the origin).  */
     205              :     const feasible_node *iter_fnode = &final_fnode;
     206         2682 :     while (iter_fnode->get_inner_node ()->m_index != 0)
     207              :       {
     208         2682 :         gcc_assert (iter_fnode->m_preds.length () == 1);
     209              : 
     210         2682 :         feasible_edge *pred_fedge
     211         2682 :           = static_cast <feasible_edge *> (iter_fnode->m_preds[0]);
     212              : 
     213              :         /* Determine if conjured svalues have affected control flow
     214              :            since the prev entry node.  */
     215         2682 :         if (fedge_uses_conjured_svalue_p (pred_fedge))
     216              :           /* If so, then reject this diagnostic.  */
     217              :           return false;
     218         2620 :         iter_fnode = static_cast <feasible_node *> (pred_fedge->m_src);
     219         2620 :         if (iter_fnode->get_inner_node () == m_prev_entry_enode)
     220              :           /* Accept this diagnostic.  */
     221              :           return true;
     222              :     }
     223              : 
     224              :     /* We shouldn't get here; if we do, reject the diagnostic.  */
     225            0 :     gcc_unreachable ();
     226              :     return false;
     227              :   }
     228              : 
     229              :   void
     230            0 :   maybe_add_sarif_properties (diagnostics::sarif_object &result_obj)
     231              :     const final override
     232              :   {
     233            0 :     auto &props = result_obj.get_or_create_properties ();
     234              : #define PROPERTY_PREFIX "gcc/analyzer/infinite_recursion_diagnostic/"
     235            0 :     props.set_integer (PROPERTY_PREFIX "prev_entry_enode",
     236            0 :                        m_prev_entry_enode->m_index);
     237            0 :     props.set_integer (PROPERTY_PREFIX "new_entry_enode",
     238            0 :                        m_new_entry_enode->m_index);
     239              : #undef PROPERTY_PREFIX
     240            0 :   }
     241              : 
     242              : private:
     243              :   /* Return true iff control flow along FEDGE was affected by
     244              :      a conjured_svalue.  */
     245         2682 :   static bool fedge_uses_conjured_svalue_p (feasible_edge *fedge)
     246              :   {
     247         2682 :     const exploded_edge *eedge = fedge->get_inner_edge ();
     248         2682 :     const superedge *sedge = eedge->m_sedge;
     249         2682 :     if (!sedge)
     250              :       return false;
     251         2682 :     auto op = sedge->get_op ();
     252         2682 :     if (!op)
     253              :       return false;
     254         1879 :     const control_flow_op *ctrlflow_op = op->dyn_cast_control_flow_op ();
     255         1879 :     if (!ctrlflow_op)
     256              :       return false;
     257              : 
     258          487 :     const gimple &last_stmt = ctrlflow_op->get_ctrlflow_stmt ();
     259              : 
     260          487 :     const feasible_node *dst_fnode
     261              :       = static_cast<const feasible_node *> (fedge->m_dest);
     262          487 :     const region_model &model = dst_fnode->get_state ().get_model ();
     263              : 
     264          487 :     if (const gcond *cond_stmt = dyn_cast <const gcond *> (&last_stmt))
     265              :       {
     266          445 :         if (expr_uses_conjured_svalue_p (model, gimple_cond_lhs (cond_stmt)))
     267              :           return true;
     268          413 :         if (expr_uses_conjured_svalue_p (model, gimple_cond_rhs (cond_stmt)))
     269              :           return true;
     270              :       }
     271           42 :     else if (const gswitch *switch_stmt
     272         2724 :                = dyn_cast <const gswitch *> (&last_stmt))
     273              :       {
     274           42 :         if (expr_uses_conjured_svalue_p (model,
     275              :                                          gimple_switch_index (switch_stmt)))
     276              :           return true;
     277              :       }
     278              :     return false;
     279              :   }
     280              : 
     281              :   /* Return true iff EXPR is affected by a conjured_svalue.  */
     282          900 :   static bool expr_uses_conjured_svalue_p (const region_model &model,
     283              :                                            tree expr)
     284              :   {
     285          900 :     class conjured_svalue_finder : public visitor
     286              :     {
     287              :     public:
     288          900 :       conjured_svalue_finder () : m_found_conjured_svalues (false)
     289              :       {
     290              :       }
     291              :       void
     292           66 :       visit_conjured_svalue (const conjured_svalue *) final override
     293              :       {
     294           66 :         m_found_conjured_svalues = true;
     295           66 :       }
     296              : 
     297              :       bool m_found_conjured_svalues;
     298              :     };
     299              : 
     300          900 :     const svalue *sval = model.get_rvalue (expr, nullptr);
     301          900 :     conjured_svalue_finder v;
     302          900 :     sval->accept (&v);
     303          900 :     return v.m_found_conjured_svalues;
     304              :   }
     305              : 
     306              :   const exploded_node *m_prev_entry_enode;
     307              :   const exploded_node *m_new_entry_enode;
     308              :   tree m_callee_fndecl;
     309              :   const checker_event *m_prev_entry_event;
     310              : };
     311              : 
     312              : /* Return true iff ENODE is at a function entrypoint.  */
     313              : 
     314              : static bool
     315       311630 : is_entrypoint_p (exploded_node *enode)
     316              : {
     317              :   /* Look for an entrypoint to a function...  */
     318            0 :   const supernode *snode = enode->get_supernode ();
     319       311630 :   if (!snode)
     320              :     return false;
     321       311630 :   return snode->entry_p ();
     322              : }
     323              : 
     324              : /* Walk backwards through the eg, looking for the first
     325              :    enode we find that's also the entrypoint of the same function.  */
     326              : 
     327              : exploded_node *
     328         1216 : exploded_graph::find_previous_entry_to (function *top_of_stack_fun,
     329              :                                         exploded_node *enode) const
     330              : {
     331         1216 :   auto_vec<exploded_node *> worklist;
     332         1216 :   hash_set<exploded_node *> visited;
     333              : 
     334         1216 :   visited.add (enode);
     335         4864 :   for (auto in_edge : enode->m_preds)
     336         1216 :     worklist.safe_push (in_edge->m_src);
     337              : 
     338        14488 :   while (worklist.length () > 0)
     339              :     {
     340        13272 :       exploded_node *iter = worklist.pop ();
     341              : 
     342        13272 :       if (is_entrypoint_p (iter)
     343        13272 :           && iter->get_function () == top_of_stack_fun)
     344         1216 :         return iter;
     345              : 
     346        12056 :       if (visited.contains (iter))
     347           28 :         continue;
     348        12028 :       visited.add (iter);
     349        48329 :       for (auto in_edge : iter->m_preds)
     350        12245 :         worklist.safe_push (in_edge->m_src);
     351              :     }
     352              : 
     353              :   /* Not found.  */
     354              :   return nullptr;
     355         1216 : }
     356              : 
     357              : /* Given BASE_REG within ENCLOSING_FRAME (such as a function parameter),
     358              :    remap it to the equivalent region within EQUIV_PREV_FRAME.
     359              : 
     360              :    For example, given param "n" within frame "foo@3", and equiv prev frame
     361              :    "foo@1", remap it to param "n" within frame "foo@1".  */
     362              : 
     363              : static const region *
     364          982 : remap_enclosing_frame (const region *base_reg,
     365              :                        const frame_region *enclosing_frame,
     366              :                        const frame_region *equiv_prev_frame,
     367              :                        region_model_manager *mgr)
     368              : {
     369          982 :   gcc_assert (base_reg->get_parent_region () == enclosing_frame);
     370          982 :   switch (base_reg->get_kind ())
     371              :     {
     372            0 :     default:
     373              :       /* We should only encounter params and varargs at the topmost
     374              :          entrypoint.  */
     375            0 :       gcc_unreachable ();
     376              : 
     377           19 :     case RK_VAR_ARG:
     378           19 :       {
     379           19 :         const var_arg_region *var_arg_reg = (const var_arg_region *)base_reg;
     380           19 :         return mgr->get_var_arg_region (equiv_prev_frame,
     381           19 :                                         var_arg_reg->get_index ());
     382              :       }
     383          963 :     case RK_DECL:
     384          963 :       {
     385          963 :         const decl_region *decl_reg = (const decl_region *)base_reg;
     386          963 :         return equiv_prev_frame->get_region_for_local (mgr,
     387              :                                                        decl_reg->get_decl (),
     388          963 :                                                        nullptr);
     389              :       }
     390              :     }
     391              : }
     392              : 
     393              : /* Return true iff SVAL is unknown, or contains an unknown svalue.  */
     394              : 
     395              : static bool
     396         6406 : contains_unknown_p (const svalue *sval)
     397              : {
     398         6406 :   if (sval->get_kind () == SK_UNKNOWN)
     399              :     return true;
     400        12572 :   if (const compound_svalue *compound_sval
     401         6286 :         = sval->dyn_cast_compound_svalue ())
     402           39 :     for (auto iter : *compound_sval)
     403           35 :       if (iter.second->get_kind () == SK_UNKNOWN)
     404          139 :         return true;
     405              :   return false;
     406              : }
     407              : 
     408              : /* Subroutine of sufficiently_different_p.  Compare the store bindings
     409              :    for BASE_REG within NEW_ENTRY_ENODE and PREV_ENTRY_ENODE.
     410              : 
     411              :    Return true if the state of NEW_ENTRY_ENODE is sufficiently different
     412              :    from PREV_ENTRY_ENODE within BASE_REG to suggest that some variant is
     413              :    being modified, and thus the recursion isn't infinite.
     414              : 
     415              :    Return false if the states for BASE_REG are effectively the same.  */
     416              : 
     417              : static bool
     418         3955 : sufficiently_different_region_binding_p (exploded_node *new_entry_enode,
     419              :                                          exploded_node *prev_entry_enode,
     420              :                                          const region *base_reg)
     421              : {
     422              :   /* Compare the stores of the two enodes.  */
     423         3955 :   const region_model &new_model
     424         3955 :     = *new_entry_enode->get_state ().m_region_model;
     425         3955 :   const region_model &prev_model
     426         3955 :     = *prev_entry_enode->get_state ().m_region_model;
     427              : 
     428              :   /* Get the value within the new frame.  */
     429         3955 :   const svalue *new_sval
     430         3955 :     = new_model.get_store_value (base_reg, nullptr);
     431              : 
     432              :   /* If any part of the value is UNKNOWN (e.g. due to hitting
     433              :      complexity limits) assume that it differs from the previous
     434              :      value.  */
     435         3955 :   if (contains_unknown_p (new_sval))
     436              :     return true;
     437              : 
     438              :   /* Get the equivalent value within the old enode.  */
     439         3817 :   const svalue *prev_sval;
     440              : 
     441         7634 :   if (const frame_region *enclosing_frame
     442         3817 :       = base_reg->maybe_get_frame_region ())
     443              :     {
     444              :       /* We have a binding within a frame in the new entry enode.  */
     445              : 
     446              :       /* Consider changes in bindings below the original entry
     447              :          to the recursion.  */
     448         3356 :       const int old_stack_depth = prev_entry_enode->get_stack_depth ();
     449         3356 :       if (enclosing_frame->get_stack_depth () < old_stack_depth)
     450         1008 :         prev_sval = prev_model.get_store_value (base_reg, nullptr);
     451              :       else
     452              :         {
     453              :           /* Ignore bindings within frames below the new entry node.  */
     454         2348 :           const int new_stack_depth = new_entry_enode->get_stack_depth ();
     455         2348 :           if (enclosing_frame->get_stack_depth () < new_stack_depth)
     456              :             return false;
     457              : 
     458              :           /* We have a binding within the frame of the new entry node,
     459              :              presumably a parameter.  */
     460              : 
     461              :           /* Get the value within the equivalent frame of
     462              :              the old entrypoint; typically will be the initial_svalue
     463              :              of the parameter.  */
     464          982 :           const frame_region *equiv_prev_frame
     465          982 :             = prev_model.get_current_frame ();
     466          982 :           const region *equiv_prev_base_reg
     467          982 :             = remap_enclosing_frame (base_reg,
     468              :                                      enclosing_frame,
     469              :                                      equiv_prev_frame,
     470              :                                      new_model.get_manager ());
     471          982 :           prev_sval
     472          982 :             = prev_model.get_store_value (equiv_prev_base_reg, nullptr);
     473              :         }
     474              :     }
     475              :   else
     476          461 :     prev_sval = prev_model.get_store_value (base_reg, nullptr);
     477              : 
     478              :   /* If the prev_sval contains UNKNOWN (e.g. due to hitting complexity limits)
     479              :      assume that it will differ from any new value.  */
     480         2451 :   if (contains_unknown_p (prev_sval))
     481              :     return true;
     482              : 
     483         2450 :   if (new_sval != prev_sval)
     484              :     return true;
     485              : 
     486              :   return false;
     487              : }
     488              : 
     489              : /* Compare the state of memory at NEW_ENTRY_ENODE and PREV_ENTRY_ENODE,
     490              :    both of which are entrypoints to the same function, where recursion has
     491              :    occurred.
     492              : 
     493              :    Return true if the state of NEW_ENTRY_ENODE is sufficiently different
     494              :    from PREV_ENTRY_ENODE to suggest that some variant is being modified,
     495              :    and thus the recursion isn't infinite.
     496              : 
     497              :    Return false if the states are effectively the same, suggesting that
     498              :    the recursion is infinite.
     499              : 
     500              :    For example, consider mutually recursive functions "foo" and "bar".
     501              :    At the entrypoint to a "foo" frame where we've detected recursion,
     502              :    we might have three frames on the stack: the new 'foo'@3, an inner
     503              :    'bar'@2, and the innermost 'foo'@1.
     504              : 
     505              :      (gdb) call enode->dump(m_ext_state)
     506              :      EN: 16
     507              :      callstring: [(SN: 9 -> SN: 3 in foo), (SN: 5 -> SN: 8 in bar)]
     508              :      before SN: 0 (NULL from-edge)
     509              : 
     510              :      rmodel:
     511              :      stack depth: 3
     512              :        frame (index 2): frame: ‘foo’@3
     513              :        frame (index 1): frame: ‘bar’@2
     514              :        frame (index 0): frame: ‘foo’@1
     515              :      clusters within root region
     516              :        cluster for: (*INIT_VAL(f_4(D)))
     517              :      clusters within frame: ‘bar’@2
     518              :        cluster for: b_2(D): INIT_VAL(f_4(D))
     519              :      clusters within frame: ‘foo’@3
     520              :        cluster for: f_4(D): INIT_VAL(f_4(D))
     521              :      m_called_unknown_fn: FALSE
     522              : 
     523              :    whereas for the previous entry node we'd have just the innermost
     524              :    'foo'@1
     525              : 
     526              :      (gdb) call prev_entry_enode->dump(m_ext_state)
     527              :      EN: 1
     528              :      callstring: []
     529              :      before SN: 0 (NULL from-edge)
     530              : 
     531              :      rmodel:
     532              :      stack depth: 1
     533              :        frame (index 0): frame: ‘foo’@1
     534              :      clusters within root region
     535              :        cluster for: (*INIT_VAL(f_4(D)))
     536              :      m_called_unknown_fn: FALSE
     537              : 
     538              :    We want to abstract away frames 1 and 2 in the new entry enode,
     539              :    and compare its frame 3 with the frame 1 in the previous entry
     540              :    enode, and determine if enough state changes between them to
     541              :    rule out infinite recursion.  */
     542              : 
     543              : static bool
     544         1216 : sufficiently_different_p (exploded_node *new_entry_enode,
     545              :                           exploded_node *prev_entry_enode,
     546              :                           logger *logger)
     547              : {
     548         1216 :   LOG_SCOPE (logger);
     549         1216 :   gcc_assert (new_entry_enode);
     550         1216 :   gcc_assert (prev_entry_enode);
     551         1216 :   gcc_assert (is_entrypoint_p (new_entry_enode));
     552         1216 :   gcc_assert (is_entrypoint_p (prev_entry_enode));
     553              : 
     554              :   /* Compare the stores of the two enodes.  */
     555         1216 :   const region_model &new_model
     556         1216 :     = *new_entry_enode->get_state ().m_region_model;
     557         1216 :   const store &new_store = *new_model.get_store ();
     558              : 
     559         7658 :   for (auto kv : new_store)
     560              :     {
     561         3955 :       const region *base_reg = kv.first;
     562         3955 :       if (sufficiently_different_region_binding_p (new_entry_enode,
     563              :                                                    prev_entry_enode,
     564              :                                                    base_reg))
     565          734 :         return true;
     566              :     }
     567              : 
     568              :   /* No significant differences found.  */
     569          482 :   return false;
     570         1216 : }
     571              : 
     572              : /* Implementation of -Wanalyzer-infinite-recursion.
     573              : 
     574              :    Called when adding ENODE to the graph, after adding its first in-edge.
     575              : 
     576              :    For function entrypoints, see if recursion has occurred, and, if so,
     577              :    check if the state of memory changed between the recursion levels,
     578              :    which would suggest some kind of decreasing variant that leads to
     579              :    termination.
     580              : 
     581              :    For recursive calls where the state of memory is effectively unchanged
     582              :    between recursion levels, warn with -Wanalyzer-infinite-recursion.  */
     583              : 
     584              : void
     585       295926 : exploded_graph::detect_infinite_recursion (exploded_node *enode)
     586              : {
     587       295926 :   if (!is_entrypoint_p (enode))
     588       295444 :     return;
     589         6227 :   function *top_of_stack_fun = enode->get_function ();
     590         6227 :   gcc_assert (top_of_stack_fun);
     591              : 
     592              :   /* ....where a call to that function is already in the call string.  */
     593         6227 :   const call_string &call_string = enode->get_point ().get_call_string ();
     594              : 
     595         6227 :   if (call_string.count_occurrences_of_function (top_of_stack_fun) < 2)
     596              :     return;
     597              : 
     598         1216 :   tree fndecl = top_of_stack_fun->decl;
     599              : 
     600         1216 :   log_scope s (get_logger (),
     601              :                "checking for infinite recursion",
     602              :                "considering recursion at EN: %i entering %qE",
     603         1216 :                enode->m_index, fndecl);
     604              : 
     605              :   /* Find enode that's the entrypoint for the previous frame for fndecl
     606              :      in the recursion.  */
     607         1216 :   exploded_node *prev_entry_enode
     608         1216 :     = find_previous_entry_to (top_of_stack_fun, enode);
     609         1216 :   gcc_assert (prev_entry_enode);
     610         1216 :   if (get_logger ())
     611            0 :     get_logger ()->log ("previous entrypoint to %qE is EN: %i",
     612            0 :                         fndecl, prev_entry_enode->m_index);
     613              : 
     614              :   /* Look for changes to the state of memory between the recursion levels.  */
     615         1216 :   if (sufficiently_different_p (enode, prev_entry_enode, get_logger ()))
     616          734 :     return;
     617              : 
     618              :   /* Otherwise, the state of memory is effectively the same between the two
     619              :      recursion levels; warn.  */
     620          482 :   pending_location ploc (enode,
     621          482 :                          call_string.get_top_of_stack ().get_call_stmt ().location);
     622          482 :   get_diagnostic_manager ().add_diagnostic
     623          482 :     (std::move (ploc),
     624          482 :      std::make_unique<infinite_recursion_diagnostic> (prev_entry_enode,
     625              :                                                       enode,
     626              :                                                       fndecl));
     627         1216 : }
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.