LCOV - code coverage report
Current view: top level - gcc - resource.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 0.0 % 430 0
Test Date: 2026-02-28 14:20:25 Functions: 0.0 % 13 0
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Definitions for computing resource usage of specific insns.
       2              :    Copyright (C) 1999-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify it under
       7              : the terms of the GNU General Public License as published by the Free
       8              : Software Foundation; either version 3, or (at your option) any later
       9              : version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      12              : WARRANTY; without even the implied warranty of MERCHANTABILITY or
      13              : FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      14              : for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : #include "config.h"
      21              : #include "system.h"
      22              : #include "coretypes.h"
      23              : #include "backend.h"
      24              : #include "target.h"
      25              : #include "rtl.h"
      26              : #include "df.h"
      27              : #include "memmodel.h"
      28              : #include "tm_p.h"
      29              : #include "regs.h"
      30              : #include "emit-rtl.h"
      31              : #include "resource.h"
      32              : #include "insn-attr.h"
      33              : #include "function-abi.h"
      34              : 
      35              : /* This structure is used to record liveness information at the targets or
      36              :    fallthrough insns of branches.  We will most likely need the information
      37              :    at targets again, so save them in a hash table rather than recomputing them
      38              :    each time.  */
      39              : 
      40              : struct target_info
      41              : {
      42              :   int uid;                      /* INSN_UID of target.  */
      43              :   struct target_info *next;     /* Next info for same hash bucket.  */
      44              :   HARD_REG_SET live_regs;       /* Registers live at target.  */
      45              :   int block;                    /* Basic block number containing target.  */
      46              :   int bb_tick;                  /* Generation count of basic block info.  */
      47              : };
      48              : 
      49              : #define TARGET_HASH_PRIME 257
      50              : 
      51              : /* Indicates what resources are required at the beginning of the epilogue.  */
      52              : static struct resources start_of_epilogue_needs;
      53              : 
      54              : /* Indicates what resources are required at function end.  */
      55              : static struct resources end_of_function_needs;
      56              : 
      57              : /* Define the hash table itself.  */
      58              : static struct target_info **target_hash_table = NULL;
      59              : 
      60              : /* For each basic block, we maintain a generation number of its basic
      61              :    block info, which is updated each time we move an insn from the
      62              :    target of a jump.  This is the generation number indexed by block
      63              :    number.  */
      64              : 
      65              : static int *bb_ticks;
      66              : 
      67              : /* Marks registers possibly live at the current place being scanned by
      68              :    mark_target_live_regs.  Also used by update_live_status.  */
      69              : 
      70              : static HARD_REG_SET current_live_regs;
      71              : 
      72              : /* Marks registers for which we have seen a REG_DEAD note but no assignment.
      73              :    Also only used by the next two functions.  */
      74              : 
      75              : static HARD_REG_SET pending_dead_regs;
      76              : 
      77              : static void update_live_status (rtx, const_rtx, void *);
      78              : static int find_basic_block (rtx_insn *, int);
      79              : static rtx_insn *next_insn_no_annul (rtx_insn *);
      80              : 
      81              : /* Utility function called from mark_target_live_regs via note_stores.
      82              :    It deadens any CLOBBERed registers and livens any SET registers.  */
      83              : 
      84              : static void
      85            0 : update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
      86              : {
      87            0 :   int first_regno, last_regno;
      88            0 :   int i;
      89              : 
      90            0 :   if (!REG_P (dest)
      91            0 :       && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
      92              :     return;
      93              : 
      94            0 :   if (GET_CODE (dest) == SUBREG)
      95              :     {
      96            0 :       first_regno = subreg_regno (dest);
      97            0 :       last_regno = first_regno + subreg_nregs (dest);
      98              : 
      99              :     }
     100              :   else
     101              :     {
     102            0 :       first_regno = REGNO (dest);
     103            0 :       last_regno = END_REGNO (dest);
     104              :     }
     105              : 
     106            0 :   if (GET_CODE (x) == CLOBBER)
     107            0 :     for (i = first_regno; i < last_regno; i++)
     108            0 :       CLEAR_HARD_REG_BIT (current_live_regs, i);
     109              :   else
     110            0 :     for (i = first_regno; i < last_regno; i++)
     111              :       {
     112            0 :         SET_HARD_REG_BIT (current_live_regs, i);
     113            0 :         CLEAR_HARD_REG_BIT (pending_dead_regs, i);
     114              :       }
     115              : }
     116              : 
     117              : /* Find the number of the basic block with correct live register
     118              :    information that starts closest to INSN.  Return -1 if we couldn't
     119              :    find such a basic block or the beginning is more than
     120              :    SEARCH_LIMIT instructions before INSN.  Use SEARCH_LIMIT = -1 for
     121              :    an unlimited search.
     122              : 
     123              :    The delay slot filling code destroys the control-flow graph so,
     124              :    instead of finding the basic block containing INSN, we search
     125              :    backwards toward a BARRIER where the live register information is
     126              :    correct.  */
     127              : 
     128              : static int
     129            0 : find_basic_block (rtx_insn *insn, int search_limit)
     130              : {
     131              :   /* Scan backwards to the previous BARRIER.  Then see if we can find a
     132              :      label that starts a basic block.  Return the basic block number.  */
     133            0 :   for (insn = prev_nonnote_insn (insn);
     134            0 :        insn && !BARRIER_P (insn) && search_limit != 0;
     135            0 :        insn = prev_nonnote_insn (insn), --search_limit)
     136              :     ;
     137              : 
     138              :   /* The closest BARRIER is too far away.  */
     139            0 :   if (search_limit == 0)
     140              :     return -1;
     141              : 
     142              :   /* The start of the function.  */
     143            0 :   else if (insn == 0)
     144            0 :     return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index;
     145              : 
     146              :   /* See if any of the upcoming CODE_LABELs start a basic block.  If we reach
     147              :      anything other than a CODE_LABEL or note, we can't find this code.  */
     148            0 :   for (insn = next_nonnote_insn (insn);
     149            0 :        insn && LABEL_P (insn);
     150            0 :        insn = next_nonnote_insn (insn))
     151            0 :     if (BLOCK_FOR_INSN (insn))
     152            0 :       return BLOCK_FOR_INSN (insn)->index;
     153              : 
     154              :   return -1;
     155              : }
     156              : 
     157              : /* Similar to next_insn, but ignores insns in the delay slots of
     158              :    an annulled branch.  */
     159              : 
     160              : static rtx_insn *
     161            0 : next_insn_no_annul (rtx_insn *insn)
     162              : {
     163            0 :   if (insn)
     164              :     {
     165              :       /* If INSN is an annulled branch, skip any insns from the target
     166              :          of the branch.  */
     167            0 :       if (JUMP_P (insn)
     168            0 :           && INSN_ANNULLED_BRANCH_P (insn)
     169            0 :           && NEXT_INSN (PREV_INSN (insn)) != insn)
     170              :         {
     171            0 :           rtx_insn *next = NEXT_INSN (insn);
     172              : 
     173            0 :           while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next))
     174            0 :                  && INSN_FROM_TARGET_P (next))
     175              :             {
     176            0 :               insn = next;
     177            0 :               next = NEXT_INSN (insn);
     178              :             }
     179              :         }
     180              : 
     181            0 :       insn = NEXT_INSN (insn);
     182            0 :       if (insn && NONJUMP_INSN_P (insn)
     183            0 :           && GET_CODE (PATTERN (insn)) == SEQUENCE)
     184            0 :         insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
     185              :     }
     186              : 
     187            0 :   return insn;
     188              : }
     189              : 
     190              : /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
     191              :    which resources are referenced by the insn.  If INCLUDE_DELAYED_EFFECTS
     192              :    is TRUE, resources used by the called routine will be included for
     193              :    CALL_INSNs.  */
     194              : 
     195              : void
     196            0 : mark_referenced_resources (rtx x, struct resources *res,
     197              :                            bool include_delayed_effects)
     198              : {
     199            0 :   enum rtx_code code = GET_CODE (x);
     200            0 :   int i, j;
     201            0 :   unsigned int r;
     202            0 :   const char *format_ptr;
     203              : 
     204              :   /* Handle leaf items for which we set resource flags.  Also, special-case
     205              :      CALL, SET and CLOBBER operators.  */
     206            0 :   switch (code)
     207              :     {
     208              :     case CONST:
     209              :     CASE_CONST_ANY:
     210              :     case PC:
     211              :     case SYMBOL_REF:
     212              :     case LABEL_REF:
     213              :     case DEBUG_INSN:
     214              :       return;
     215              : 
     216            0 :     case SUBREG:
     217            0 :       if (!REG_P (SUBREG_REG (x)))
     218            0 :         mark_referenced_resources (SUBREG_REG (x), res, false);
     219              :       else
     220              :         {
     221            0 :           unsigned int regno = subreg_regno (x);
     222            0 :           unsigned int last_regno = regno + subreg_nregs (x);
     223              : 
     224            0 :           gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
     225            0 :           for (r = regno; r < last_regno; r++)
     226            0 :             SET_HARD_REG_BIT (res->regs, r);
     227              :         }
     228              :       return;
     229              : 
     230            0 :     case REG:
     231            0 :       gcc_assert (HARD_REGISTER_P (x));
     232            0 :       add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
     233            0 :       return;
     234              : 
     235            0 :     case MEM:
     236              :       /* If this memory shouldn't change, it really isn't referencing
     237              :          memory.  */
     238            0 :       if (! MEM_READONLY_P (x))
     239            0 :         res->memory = 1;
     240            0 :       res->volatil |= MEM_VOLATILE_P (x);
     241              : 
     242              :       /* Mark registers used to access memory.  */
     243            0 :       mark_referenced_resources (XEXP (x, 0), res, false);
     244            0 :       return;
     245              : 
     246            0 :     case UNSPEC_VOLATILE:
     247            0 :     case TRAP_IF:
     248            0 :     case ASM_INPUT:
     249              :       /* Traditional asm's are always volatile.  */
     250            0 :       res->volatil = 1;
     251            0 :       break;
     252              : 
     253            0 :     case ASM_OPERANDS:
     254            0 :       res->volatil |= MEM_VOLATILE_P (x);
     255              : 
     256              :       /* For all ASM_OPERANDS, we must traverse the vector of input operands.
     257              :          We cannot just fall through here since then we would be confused
     258              :          by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
     259              :          traditional asms unlike their normal usage.  */
     260              : 
     261            0 :       for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
     262            0 :         mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false);
     263              :       return;
     264              : 
     265            0 :     case CALL:
     266              :       /* The first operand will be a (MEM (xxx)) but doesn't really reference
     267              :          memory.  The second operand may be referenced, though.  */
     268            0 :       mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false);
     269            0 :       mark_referenced_resources (XEXP (x, 1), res, false);
     270            0 :       return;
     271              : 
     272            0 :     case SET:
     273              :       /* Usually, the first operand of SET is set, not referenced.  But
     274              :          registers used to access memory are referenced.  SET_DEST is
     275              :          also referenced if it is a ZERO_EXTRACT.  */
     276              : 
     277            0 :       mark_referenced_resources (SET_SRC (x), res, false);
     278              : 
     279            0 :       x = SET_DEST (x);
     280            0 :       if (GET_CODE (x) == ZERO_EXTRACT
     281            0 :           || GET_CODE (x) == STRICT_LOW_PART)
     282            0 :         mark_referenced_resources (x, res, false);
     283            0 :       else if (GET_CODE (x) == SUBREG)
     284            0 :         x = SUBREG_REG (x);
     285            0 :       if (MEM_P (x))
     286            0 :         mark_referenced_resources (XEXP (x, 0), res, false);
     287              :       return;
     288              : 
     289              :     case CLOBBER:
     290              :       return;
     291              : 
     292            0 :     case CALL_INSN:
     293            0 :       if (include_delayed_effects)
     294              :         {
     295              :           /* A CALL references memory, the frame pointer if it exists, the
     296              :              stack pointer, any global registers and any registers given in
     297              :              USE insns immediately in front of the CALL.
     298              : 
     299              :              However, we may have moved some of the parameter loading insns
     300              :              into the delay slot of this CALL.  If so, the USE's for them
     301              :              don't count and should be skipped.  */
     302            0 :           rtx_insn *insn = PREV_INSN (as_a <rtx_insn *> (x));
     303            0 :           rtx_sequence *sequence = 0;
     304            0 :           int seq_size = 0;
     305            0 :           int i;
     306              : 
     307              :           /* If we are part of a delay slot sequence, point at the SEQUENCE.  */
     308            0 :           if (NEXT_INSN (insn) != x)
     309              :             {
     310            0 :               sequence = as_a <rtx_sequence *> (PATTERN (NEXT_INSN (insn)));
     311            0 :               seq_size = sequence->len ();
     312            0 :               gcc_assert (GET_CODE (sequence) == SEQUENCE);
     313              :             }
     314              : 
     315            0 :           res->memory = 1;
     316            0 :           SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
     317            0 :           if (frame_pointer_needed)
     318              :             {
     319            0 :               SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
     320            0 :               if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
     321            0 :                 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
     322              :             }
     323              : 
     324            0 :           for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
     325            0 :             if (global_regs[i])
     326            0 :               SET_HARD_REG_BIT (res->regs, i);
     327              : 
     328              :           /* Check for a REG_SETJMP.  If it exists, then we must
     329              :              assume that this call can need any register.
     330              : 
     331              :              This is done to be more conservative about how we handle setjmp.
     332              :              We assume that they both use and set all registers.  Using all
     333              :              registers ensures that a register will not be considered dead
     334              :              just because it crosses a setjmp call.  A register should be
     335              :              considered dead only if the setjmp call returns nonzero.  */
     336            0 :           if (find_reg_note (x, REG_SETJMP, NULL))
     337            0 :             SET_HARD_REG_SET (res->regs);
     338              : 
     339            0 :           {
     340            0 :             rtx link;
     341              : 
     342            0 :             for (link = CALL_INSN_FUNCTION_USAGE (x);
     343            0 :                  link;
     344            0 :                  link = XEXP (link, 1))
     345            0 :               if (GET_CODE (XEXP (link, 0)) == USE)
     346              :                 {
     347            0 :                   for (i = 1; i < seq_size; i++)
     348              :                     {
     349            0 :                       rtx slot_pat = PATTERN (sequence->element (i));
     350            0 :                       if (GET_CODE (slot_pat) == SET
     351            0 :                           && rtx_equal_p (SET_DEST (slot_pat),
     352            0 :                                           XEXP (XEXP (link, 0), 0)))
     353              :                         break;
     354              :                     }
     355            0 :                   if (i >= seq_size)
     356            0 :                     mark_referenced_resources (XEXP (XEXP (link, 0), 0),
     357              :                                                res, false);
     358              :                 }
     359              :           }
     360              :         }
     361              : 
     362              :       /* ... fall through to other INSN processing ...  */
     363            0 :       gcc_fallthrough ();
     364              : 
     365            0 :     case INSN:
     366            0 :     case JUMP_INSN:
     367              : 
     368            0 :       if (GET_CODE (PATTERN (x)) == COND_EXEC)
     369              :       /* In addition to the usual references, also consider all outputs
     370              :          as referenced, to compensate for mark_set_resources treating
     371              :          them as killed.  This is similar to ZERO_EXTRACT / STRICT_LOW_PART
     372              :          handling, execpt that we got a partial incidence instead of a partial
     373              :          width.  */
     374            0 :       mark_set_resources (x, res, 0,
     375              :                           include_delayed_effects
     376              :                           ? MARK_SRC_DEST_CALL : MARK_SRC_DEST);
     377              : 
     378            0 :       if (! include_delayed_effects
     379              :           && INSN_REFERENCES_ARE_DELAYED (as_a <rtx_insn *> (x)))
     380              :         return;
     381              : 
     382              :       /* No special processing, just speed up.  */
     383            0 :       mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
     384            0 :       return;
     385              : 
     386              :     default:
     387              :       break;
     388              :     }
     389              : 
     390              :   /* Process each sub-expression and flag what it needs.  */
     391            0 :   format_ptr = GET_RTX_FORMAT (code);
     392            0 :   for (i = 0; i < GET_RTX_LENGTH (code); i++)
     393            0 :     switch (*format_ptr++)
     394              :       {
     395            0 :       case 'e':
     396            0 :         mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
     397            0 :         break;
     398              : 
     399              :       case 'E':
     400            0 :         for (j = 0; j < XVECLEN (x, i); j++)
     401            0 :           mark_referenced_resources (XVECEXP (x, i, j), res,
     402              :                                      include_delayed_effects);
     403              :         break;
     404              :       }
     405              : }
     406              : 
     407              : /* Given X, a part of an insn, and a pointer to a `struct resource',
     408              :    RES, indicate which resources are modified by the insn. If
     409              :    MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
     410              :    set by the called routine.
     411              : 
     412              :    If IN_DEST is nonzero, it means we are inside a SET.  Otherwise,
     413              :    objects are being referenced instead of set.  */
     414              : 
     415              : void
     416            0 : mark_set_resources (rtx x, struct resources *res, int in_dest,
     417              :                     enum mark_resource_type mark_type)
     418              : {
     419            0 :   enum rtx_code code;
     420            0 :   int i, j;
     421            0 :   unsigned int r;
     422            0 :   const char *format_ptr;
     423              : 
     424            0 :  restart:
     425              : 
     426            0 :   code = GET_CODE (x);
     427              : 
     428            0 :   switch (code)
     429              :     {
     430              :     case NOTE:
     431              :     case BARRIER:
     432              :     case CODE_LABEL:
     433              :     case USE:
     434              :     CASE_CONST_ANY:
     435              :     case LABEL_REF:
     436              :     case SYMBOL_REF:
     437              :     case CONST:
     438              :     case PC:
     439              :     case DEBUG_INSN:
     440              :       /* These don't set any resources.  */
     441              :       return;
     442              : 
     443            0 :     case CALL_INSN:
     444              :       /* Called routine modifies the condition code, memory, any registers
     445              :          that aren't saved across calls, global registers and anything
     446              :          explicitly CLOBBERed immediately after the CALL_INSN.  */
     447              : 
     448            0 :       if (mark_type == MARK_SRC_DEST_CALL)
     449              :         {
     450            0 :           rtx_call_insn *call_insn = as_a <rtx_call_insn *> (x);
     451            0 :           rtx link;
     452              : 
     453            0 :           res->cc = res->memory = 1;
     454              : 
     455            0 :           res->regs |= insn_callee_abi (call_insn).full_reg_clobbers ();
     456              : 
     457            0 :           for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
     458            0 :                link; link = XEXP (link, 1))
     459            0 :             if (GET_CODE (XEXP (link, 0)) == CLOBBER)
     460            0 :               mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
     461              :                                   MARK_SRC_DEST);
     462              : 
     463              :           /* Check for a REG_SETJMP.  If it exists, then we must
     464              :              assume that this call can clobber any register.  */
     465            0 :           if (find_reg_note (call_insn, REG_SETJMP, NULL))
     466            0 :             SET_HARD_REG_SET (res->regs);
     467              :         }
     468              : 
     469              :       /* ... and also what its RTL says it modifies, if anything.  */
     470            0 :       gcc_fallthrough ();
     471              : 
     472            0 :     case JUMP_INSN:
     473            0 :     case INSN:
     474              : 
     475              :         /* An insn consisting of just a CLOBBER (or USE) is just for flow
     476              :            and doesn't actually do anything, so we ignore it.  */
     477              : 
     478            0 :       if (mark_type != MARK_SRC_DEST_CALL
     479              :           && INSN_SETS_ARE_DELAYED (as_a <rtx_insn *> (x)))
     480              :         return;
     481              : 
     482            0 :       x = PATTERN (x);
     483            0 :       if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
     484            0 :         goto restart;
     485              :       return;
     486              : 
     487            0 :     case SET:
     488              :       /* If the source of a SET is a CALL, this is actually done by
     489              :          the called routine.  So only include it if we are to include the
     490              :          effects of the calling routine.  */
     491              : 
     492            0 :       mark_set_resources (SET_DEST (x), res,
     493              :                           (mark_type == MARK_SRC_DEST_CALL
     494            0 :                            || GET_CODE (SET_SRC (x)) != CALL),
     495              :                           mark_type);
     496              : 
     497            0 :       mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
     498            0 :       return;
     499              : 
     500            0 :     case CLOBBER:
     501            0 :       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
     502            0 :       return;
     503              : 
     504            0 :     case SEQUENCE:
     505            0 :       {
     506            0 :         rtx_sequence *seq = as_a <rtx_sequence *> (x);
     507            0 :         rtx control = seq->element (0);
     508            0 :         bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control);
     509              : 
     510            0 :         mark_set_resources (control, res, 0, mark_type);
     511            0 :         for (i = seq->len () - 1; i >= 0; --i)
     512              :           {
     513            0 :             rtx elt = seq->element (i);
     514            0 :             if (!annul_p && INSN_FROM_TARGET_P (elt))
     515            0 :               mark_set_resources (elt, res, 0, mark_type);
     516              :           }
     517              :       }
     518              :       return;
     519              : 
     520            0 :     case POST_INC:
     521            0 :     case PRE_INC:
     522            0 :     case POST_DEC:
     523            0 :     case PRE_DEC:
     524            0 :       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
     525            0 :       return;
     526              : 
     527            0 :     case PRE_MODIFY:
     528            0 :     case POST_MODIFY:
     529            0 :       mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
     530            0 :       mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
     531            0 :       mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
     532            0 :       return;
     533              : 
     534            0 :     case SIGN_EXTRACT:
     535            0 :     case ZERO_EXTRACT:
     536            0 :       mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
     537            0 :       mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
     538            0 :       mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
     539            0 :       return;
     540              : 
     541            0 :     case MEM:
     542            0 :       if (in_dest)
     543              :         {
     544            0 :           res->memory = 1;
     545            0 :           res->volatil |= MEM_VOLATILE_P (x);
     546              :         }
     547              : 
     548            0 :       mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
     549            0 :       return;
     550              : 
     551            0 :     case SUBREG:
     552            0 :       if (in_dest)
     553              :         {
     554            0 :           if (!REG_P (SUBREG_REG (x)))
     555              :             mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
     556              :           else
     557              :             {
     558            0 :               unsigned int regno = subreg_regno (x);
     559            0 :               unsigned int last_regno = regno + subreg_nregs (x);
     560              : 
     561            0 :               gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
     562            0 :               for (r = regno; r < last_regno; r++)
     563            0 :                 SET_HARD_REG_BIT (res->regs, r);
     564              :             }
     565              :         }
     566              :       return;
     567              : 
     568            0 :     case REG:
     569            0 :       if (in_dest)
     570              :         {
     571            0 :           gcc_assert (HARD_REGISTER_P (x));
     572            0 :           add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
     573              :         }
     574              :       return;
     575              : 
     576            0 :     case UNSPEC_VOLATILE:
     577            0 :     case ASM_INPUT:
     578              :       /* Traditional asm's are always volatile.  */
     579            0 :       res->volatil = 1;
     580            0 :       return;
     581              : 
     582            0 :     case TRAP_IF:
     583            0 :       res->volatil = 1;
     584            0 :       break;
     585              : 
     586            0 :     case ASM_OPERANDS:
     587            0 :       res->volatil |= MEM_VOLATILE_P (x);
     588              : 
     589              :       /* For all ASM_OPERANDS, we must traverse the vector of input operands.
     590              :          We cannot just fall through here since then we would be confused
     591              :          by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
     592              :          traditional asms unlike their normal usage.  */
     593              : 
     594            0 :       for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
     595            0 :         mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
     596              :                             MARK_SRC_DEST);
     597              :       return;
     598              : 
     599              :     default:
     600              :       break;
     601              :     }
     602              : 
     603              :   /* Process each sub-expression and flag what it needs.  */
     604            0 :   format_ptr = GET_RTX_FORMAT (code);
     605            0 :   for (i = 0; i < GET_RTX_LENGTH (code); i++)
     606            0 :     switch (*format_ptr++)
     607              :       {
     608            0 :       case 'e':
     609            0 :         mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
     610            0 :         break;
     611              : 
     612              :       case 'E':
     613            0 :         for (j = 0; j < XVECLEN (x, i); j++)
     614            0 :           mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
     615              :         break;
     616              :       }
     617              : }
     618              : 
     619              : /* Return TRUE if INSN is a return, possibly with a filled delay slot.  */
     620              : 
     621              : static bool
     622            0 : return_insn_p (const_rtx insn)
     623              : {
     624            0 :   if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
     625              :     return true;
     626              : 
     627            0 :   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
     628            0 :     return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
     629              : 
     630              :   return false;
     631              : }
     632              : 
     633              : /* Set the resources that are live at TARGET.
     634              : 
     635              :    If TARGET is zero, we refer to the end of the current function and can
     636              :    return our precomputed value.
     637              : 
     638              :    Otherwise, we try to find out what is live by consulting the basic block
     639              :    information.  This is tricky, because we must consider the actions of
     640              :    reload and jump optimization, which occur after the basic block information
     641              :    has been computed.
     642              : 
     643              :    Accordingly, we proceed as follows::
     644              : 
     645              :    We find the previous BARRIER and look at all immediately following labels
     646              :    (with no intervening active insns) to see if any of them start a basic
     647              :    block.  If we hit the start of the function first, we use block 0.
     648              : 
     649              :    Once we have found a basic block and a corresponding first insn, we can
     650              :    accurately compute the live status (by starting at a label following a
     651              :    BARRIER, we are immune to actions taken by reload and jump.)  Then we
     652              :    scan all insns between that point and our target.  For each CLOBBER (or
     653              :    for call-clobbered regs when we pass a CALL_INSN), mark the appropriate
     654              :    registers are dead.  For a SET, mark them as live.
     655              : 
     656              :    We have to be careful when using REG_DEAD notes because they are not
     657              :    updated by such things as find_equiv_reg.  So keep track of registers
     658              :    marked as dead that haven't been assigned to, and mark them dead at the
     659              :    next CODE_LABEL since reload and jump won't propagate values across labels.
     660              : 
     661              :    If we cannot find the start of a basic block (should be a very rare
     662              :    case, if it can happen at all), mark everything as potentially live.
     663              : 
     664              :    Because we can be called many times on the same target, save our results
     665              :    in a hash table indexed by INSN_UID.  This is only done if the function
     666              :    init_resource_info () was invoked before we are called.  */
     667              : 
     668              : void
     669            0 : mark_target_live_regs (rtx_insn *insns, rtx target_maybe_return, struct resources *res)
     670              : {
     671            0 :   int b = -1;
     672            0 :   unsigned int i;
     673            0 :   struct target_info *tinfo = NULL;
     674            0 :   rtx_insn *insn;
     675              : 
     676              :   /* Handle end of function.  */
     677            0 :   if (target_maybe_return == 0 || ANY_RETURN_P (target_maybe_return))
     678              :     {
     679            0 :       *res = end_of_function_needs;
     680            0 :       return;
     681              :     }
     682              : 
     683              :   /* We've handled the case of RETURN/SIMPLE_RETURN; we should now have an
     684              :      instruction.  */
     685            0 :   rtx_insn *target = as_a <rtx_insn *> (target_maybe_return);
     686              : 
     687              :   /* Handle return insn.  */
     688            0 :   if (return_insn_p (target))
     689              :     {
     690            0 :       *res = end_of_function_needs;
     691            0 :       mark_referenced_resources (target, res, false);
     692            0 :       return;
     693              :     }
     694              : 
     695              :   /* We have to assume memory is needed, but the CC isn't.  */
     696            0 :   res->memory = 1;
     697            0 :   res->volatil = 0;
     698            0 :   res->cc = 0;
     699              : 
     700              :   /* See if we have computed this value already.  */
     701            0 :   if (target_hash_table != NULL)
     702              :     {
     703            0 :       for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
     704            0 :            tinfo; tinfo = tinfo->next)
     705            0 :         if (tinfo->uid == INSN_UID (target))
     706              :           break;
     707              : 
     708              :       /* Start by getting the basic block number.  If we have saved
     709              :          information, we can get it from there unless the insn at the
     710              :          start of the basic block has been deleted.  */
     711            0 :       if (tinfo && tinfo->block != -1
     712            0 :           && ! BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, tinfo->block))->deleted ())
     713              :         b = tinfo->block;
     714              :     }
     715              : 
     716              :   if (b == -1)
     717            0 :     b = find_basic_block (target, param_max_delay_slot_live_search);
     718              : 
     719            0 :   if (target_hash_table != NULL)
     720              :     {
     721            0 :       if (tinfo)
     722              :         {
     723              :           /* If the information is up-to-date, use it.  Otherwise, we will
     724              :              update it below.  */
     725            0 :           if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
     726              :             {
     727            0 :               res->regs = tinfo->live_regs;
     728            0 :               return;
     729              :             }
     730              :         }
     731              :       else
     732              :         {
     733              :           /* Allocate a place to put our results and chain it into the
     734              :              hash table.  */
     735            0 :           tinfo = XNEW (struct target_info);
     736            0 :           tinfo->uid = INSN_UID (target);
     737            0 :           tinfo->block = b;
     738            0 :           tinfo->next
     739            0 :             = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
     740            0 :           target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
     741              :         }
     742              :     }
     743              : 
     744            0 :   CLEAR_HARD_REG_SET (pending_dead_regs);
     745              : 
     746              :   /* If we found a basic block, get the live registers from it and update
     747              :      them with anything set or killed between its start and the insn before
     748              :      TARGET; this custom life analysis is really about registers so we need
     749              :      to use the LR problem.  Otherwise, we must assume everything is live.  */
     750            0 :   if (b != -1)
     751              :     {
     752            0 :       regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
     753            0 :       rtx_insn *start_insn, *stop_insn;
     754            0 :       df_ref def;
     755              : 
     756              :       /* Compute hard regs live at start of block.  */
     757            0 :       REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
     758            0 :       FOR_EACH_ARTIFICIAL_DEF (def, b)
     759            0 :         if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
     760            0 :           SET_HARD_REG_BIT (current_live_regs, DF_REF_REGNO (def));
     761              : 
     762              :       /* Get starting and ending insn, handling the case where each might
     763              :          be a SEQUENCE.  */
     764            0 :       start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ?
     765            0 :                     insns : BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, b)));
     766            0 :       stop_insn = target;
     767              : 
     768            0 :       if (NONJUMP_INSN_P (start_insn)
     769            0 :           && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
     770            0 :         start_insn = as_a <rtx_sequence *> (PATTERN (start_insn))->insn (0);
     771              : 
     772            0 :       if (NONJUMP_INSN_P (stop_insn)
     773            0 :           && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
     774            0 :         stop_insn = next_insn (PREV_INSN (stop_insn));
     775              : 
     776            0 :       for (insn = start_insn; insn != stop_insn;
     777            0 :            insn = next_insn_no_annul (insn))
     778              :         {
     779            0 :           rtx link;
     780            0 :           rtx_insn *real_insn = insn;
     781            0 :           enum rtx_code code = GET_CODE (insn);
     782              : 
     783            0 :           if (DEBUG_INSN_P (insn))
     784            0 :             continue;
     785              : 
     786              :           /* If this insn is from the target of a branch, it isn't going to
     787              :              be used in the sequel.  If it is used in both cases, this
     788              :              test will not be true.  */
     789            0 :           if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
     790            0 :               && INSN_FROM_TARGET_P (insn))
     791            0 :             continue;
     792              : 
     793              :           /* If this insn is a USE made by update_block, we care about the
     794              :              underlying insn.  */
     795            0 :           if (code == INSN
     796            0 :               && GET_CODE (PATTERN (insn)) == USE
     797            0 :               && INSN_P (XEXP (PATTERN (insn), 0)))
     798            0 :             real_insn = as_a <rtx_insn *> (XEXP (PATTERN (insn), 0));
     799              : 
     800            0 :           if (CALL_P (real_insn))
     801              :             {
     802              :               /* Values in call-clobbered registers survive a COND_EXEC CALL
     803              :                  if that is not executed; this matters for resoure use because
     804              :                  they may be used by a complementarily (or more strictly)
     805              :                  predicated instruction, or if the CALL is NORETURN.  */
     806            0 :               if (GET_CODE (PATTERN (real_insn)) != COND_EXEC)
     807              :                 {
     808            0 :                   HARD_REG_SET regs_invalidated_by_this_call
     809            0 :                     = insn_callee_abi (real_insn).full_reg_clobbers ();
     810              :                   /* CALL clobbers all call-used regs that aren't fixed except
     811              :                      sp, ap, and fp.  Do this before setting the result of the
     812              :                      call live.  */
     813            0 :                   current_live_regs &= ~regs_invalidated_by_this_call;
     814              :                 }
     815              : 
     816              :               /* A CALL_INSN sets any global register live, since it may
     817              :                  have been modified by the call.  */
     818            0 :               for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
     819            0 :                 if (global_regs[i])
     820            0 :                   SET_HARD_REG_BIT (current_live_regs, i);
     821              :             }
     822              : 
     823              :           /* Mark anything killed in an insn to be deadened at the next
     824              :              label.  Ignore USE insns; the only REG_DEAD notes will be for
     825              :              parameters.  But they might be early.  A CALL_INSN will usually
     826              :              clobber registers used for parameters.  It isn't worth bothering
     827              :              with the unlikely case when it won't.  */
     828            0 :           if ((NONJUMP_INSN_P (real_insn)
     829            0 :                && GET_CODE (PATTERN (real_insn)) != USE
     830            0 :                && GET_CODE (PATTERN (real_insn)) != CLOBBER)
     831            0 :               || JUMP_P (real_insn)
     832            0 :               || CALL_P (real_insn))
     833              :             {
     834            0 :               for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
     835            0 :                 if (REG_NOTE_KIND (link) == REG_DEAD
     836            0 :                     && REG_P (XEXP (link, 0))
     837            0 :                     && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
     838            0 :                   add_to_hard_reg_set (&pending_dead_regs,
     839            0 :                                       GET_MODE (XEXP (link, 0)),
     840            0 :                                       REGNO (XEXP (link, 0)));
     841              : 
     842            0 :               note_stores (real_insn, update_live_status, NULL);
     843              : 
     844              :               /* If any registers were unused after this insn, kill them.
     845              :                  These notes will always be accurate.  */
     846            0 :               for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
     847            0 :                 if (REG_NOTE_KIND (link) == REG_UNUSED
     848            0 :                     && REG_P (XEXP (link, 0))
     849            0 :                     && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
     850            0 :                   remove_from_hard_reg_set (&current_live_regs,
     851            0 :                                            GET_MODE (XEXP (link, 0)),
     852            0 :                                            REGNO (XEXP (link, 0)));
     853              :             }
     854              : 
     855            0 :           else if (LABEL_P (real_insn))
     856              :             {
     857            0 :               basic_block bb;
     858              : 
     859              :               /* A label clobbers the pending dead registers since neither
     860              :                  reload nor jump will propagate a value across a label.  */
     861            0 :               current_live_regs &= ~pending_dead_regs;
     862            0 :               CLEAR_HARD_REG_SET (pending_dead_regs);
     863              : 
     864              :               /* We must conservatively assume that all registers that used
     865              :                  to be live here still are.  The fallthrough edge may have
     866              :                  left a live register uninitialized.  */
     867            0 :               bb = BLOCK_FOR_INSN (real_insn);
     868            0 :               if (bb)
     869              :                 {
     870              :                   HARD_REG_SET extra_live;
     871              : 
     872            0 :                   REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb));
     873            0 :                   current_live_regs |= extra_live;
     874              :                 }
     875              :             }
     876              : 
     877              :           /* The beginning of the epilogue corresponds to the end of the
     878              :              RTL chain when there are no epilogue insns.  Certain resources
     879              :              are implicitly required at that point.  */
     880            0 :           else if (NOTE_P (real_insn)
     881            0 :                    && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG)
     882            0 :             current_live_regs |= start_of_epilogue_needs.regs;
     883              :         }
     884              : 
     885            0 :       res->regs = current_live_regs;
     886            0 :       if (tinfo != NULL)
     887              :         {
     888            0 :           tinfo->block = b;
     889            0 :           tinfo->bb_tick = bb_ticks[b];
     890              :         }
     891              :     }
     892              :   else
     893              :     /* We didn't find the start of a basic block.  Assume everything
     894              :        in use.  This should happen only extremely rarely.  */
     895            0 :     SET_HARD_REG_SET (res->regs);
     896              : 
     897            0 :   if (tinfo != NULL)
     898            0 :     tinfo->live_regs = res->regs;
     899              : }
     900              : 
     901              : /* Initialize the resources required by mark_target_live_regs ().
     902              :    This should be invoked before the first call to mark_target_live_regs.  */
     903              : 
     904              : void
     905            0 : init_resource_info (rtx_insn *epilogue_insn)
     906              : {
     907            0 :   int i;
     908            0 :   basic_block bb;
     909              : 
     910              :   /* Indicate what resources are required to be valid at the end of the current
     911              :      function.  The condition code never is and memory always is.
     912              :      The stack pointer is needed unless EXIT_IGNORE_STACK is true
     913              :      and there is an epilogue that restores the original stack pointer
     914              :      from the frame pointer.  Registers used to return the function value
     915              :      are needed.  Registers holding global variables are needed.  */
     916              : 
     917            0 :   end_of_function_needs.cc = 0;
     918            0 :   end_of_function_needs.memory = 1;
     919            0 :   CLEAR_HARD_REG_SET (end_of_function_needs.regs);
     920              : 
     921            0 :   if (frame_pointer_needed)
     922              :     {
     923            0 :       SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
     924            0 :       if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
     925            0 :         SET_HARD_REG_BIT (end_of_function_needs.regs,
     926              :                           HARD_FRAME_POINTER_REGNUM);
     927              :     }
     928            0 :   if (!(frame_pointer_needed
     929              :         && EXIT_IGNORE_STACK
     930            0 :         && epilogue_insn
     931            0 :         && !crtl->sp_is_unchanging))
     932            0 :     SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
     933              : 
     934            0 :   if (crtl->return_rtx != 0)
     935            0 :     mark_referenced_resources (crtl->return_rtx,
     936              :                                &end_of_function_needs, true);
     937              : 
     938            0 :   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
     939            0 :     if (global_regs[i] || df_epilogue_uses_p (i))
     940            0 :       SET_HARD_REG_BIT (end_of_function_needs.regs, i);
     941              : 
     942              :   /* The registers required to be live at the end of the function are
     943              :      represented in the flow information as being dead just prior to
     944              :      reaching the end of the function.  For example, the return of a value
     945              :      might be represented by a USE of the return register immediately
     946              :      followed by an unconditional jump to the return label where the
     947              :      return label is the end of the RTL chain.  The end of the RTL chain
     948              :      is then taken to mean that the return register is live.
     949              : 
     950              :      This sequence is no longer maintained when epilogue instructions are
     951              :      added to the RTL chain.  To reconstruct the original meaning, the
     952              :      start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
     953              :      point where these registers become live (start_of_epilogue_needs).
     954              :      If epilogue instructions are present, the registers set by those
     955              :      instructions won't have been processed by flow.  Thus, those
     956              :      registers are additionally required at the end of the RTL chain
     957              :      (end_of_function_needs).  */
     958              : 
     959            0 :   start_of_epilogue_needs = end_of_function_needs;
     960              : 
     961            0 :   while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
     962              :     {
     963            0 :       mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
     964              :                           MARK_SRC_DEST_CALL);
     965            0 :       if (return_insn_p (epilogue_insn))
     966              :         break;
     967              :     }
     968              : 
     969              :   /* Filter-out the flags register from those additionally required
     970              :      registers. */
     971            0 :   if (targetm.flags_regnum != INVALID_REGNUM)
     972            0 :     CLEAR_HARD_REG_BIT (end_of_function_needs.regs, targetm.flags_regnum);
     973              : 
     974              :   /* Allocate and initialize the tables used by mark_target_live_regs.  */
     975            0 :   target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
     976            0 :   bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun));
     977              : 
     978              :   /* Set the BLOCK_FOR_INSN of each label that starts a basic block.  */
     979            0 :   FOR_EACH_BB_FN (bb, cfun)
     980            0 :     if (LABEL_P (BB_HEAD (bb)))
     981            0 :       BLOCK_FOR_INSN (BB_HEAD (bb)) = bb;
     982            0 : }
     983              : 
     984              : /* Free up the resources allocated to mark_target_live_regs ().  This
     985              :    should be invoked after the last call to mark_target_live_regs ().  */
     986              : 
     987              : void
     988            0 : free_resource_info (void)
     989              : {
     990            0 :   basic_block bb;
     991              : 
     992            0 :   if (target_hash_table != NULL)
     993              :     {
     994              :       int i;
     995              : 
     996            0 :       for (i = 0; i < TARGET_HASH_PRIME; ++i)
     997              :         {
     998            0 :           struct target_info *ti = target_hash_table[i];
     999              : 
    1000            0 :           while (ti)
    1001              :             {
    1002            0 :               struct target_info *next = ti->next;
    1003            0 :               free (ti);
    1004            0 :               ti = next;
    1005              :             }
    1006              :         }
    1007              : 
    1008            0 :       free (target_hash_table);
    1009            0 :       target_hash_table = NULL;
    1010              :     }
    1011              : 
    1012            0 :   if (bb_ticks != NULL)
    1013              :     {
    1014            0 :       free (bb_ticks);
    1015            0 :       bb_ticks = NULL;
    1016              :     }
    1017              : 
    1018            0 :   FOR_EACH_BB_FN (bb, cfun)
    1019            0 :     if (LABEL_P (BB_HEAD (bb)))
    1020            0 :       BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL;
    1021            0 : }
    1022              : 
    1023              : /* Clear any hashed information that we have stored for INSN.  */
    1024              : 
    1025              : void
    1026            0 : clear_hashed_info_for_insn (rtx_insn *insn)
    1027              : {
    1028            0 :   struct target_info *tinfo;
    1029              : 
    1030            0 :   if (target_hash_table != NULL)
    1031              :     {
    1032            0 :       for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
    1033            0 :            tinfo; tinfo = tinfo->next)
    1034            0 :         if (tinfo->uid == INSN_UID (insn))
    1035              :           break;
    1036              : 
    1037            0 :       if (tinfo)
    1038            0 :         tinfo->block = -1;
    1039              :     }
    1040            0 : }
    1041              : 
    1042              : /* Clear any hashed information that we have stored for instructions
    1043              :    between INSN and the next BARRIER that follow a JUMP or a LABEL.  */
    1044              : 
    1045              : void
    1046            0 : clear_hashed_info_until_next_barrier (rtx_insn *insn)
    1047              : {
    1048            0 :   while (insn && !BARRIER_P (insn))
    1049              :     {
    1050            0 :       if (JUMP_P (insn) || LABEL_P (insn))
    1051              :         {
    1052            0 :           rtx_insn *next = next_active_insn (insn);
    1053            0 :           if (next)
    1054            0 :             clear_hashed_info_for_insn (next);
    1055              :         }
    1056              : 
    1057            0 :       insn = next_nonnote_insn (insn);
    1058              :     }
    1059            0 : }
    1060              : 
    1061              : /* Increment the tick count for the basic block that contains INSN.  */
    1062              : 
    1063              : void
    1064            0 : incr_ticks_for_insn (rtx_insn *insn)
    1065              : {
    1066            0 :   int b = find_basic_block (insn, param_max_delay_slot_live_search);
    1067              : 
    1068            0 :   if (b != -1)
    1069            0 :     bb_ticks[b]++;
    1070            0 : }
    1071              : 
    1072              : /* Add TRIAL to the set of resources used at the end of the current
    1073              :    function.  */
    1074              : void
    1075            0 : mark_end_of_function_resources (rtx trial, bool include_delayed_effects)
    1076              : {
    1077            0 :   mark_referenced_resources (trial, &end_of_function_needs,
    1078              :                              include_delayed_effects);
    1079            0 : }
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.