LCOV - code coverage report
Current view: top level - gcc - rtlanal.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 85.9 % 2804 2410
Test Date: 2026-03-28 14:25:54 Functions: 88.4 % 172 152
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Analyze RTL for GNU compiler.
       2              :    Copyright (C) 1987-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify it under
       7              : the terms of the GNU General Public License as published by the Free
       8              : Software Foundation; either version 3, or (at your option) any later
       9              : version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      12              : WARRANTY; without even the implied warranty of MERCHANTABILITY or
      13              : FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      14              : for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : 
      21              : #include "config.h"
      22              : #include "system.h"
      23              : #include "coretypes.h"
      24              : #include "backend.h"
      25              : #include "target.h"
      26              : #include "rtl.h"
      27              : #include "rtlanal.h"
      28              : #include "tree.h"
      29              : #include "predict.h"
      30              : #include "df.h"
      31              : #include "memmodel.h"
      32              : #include "tm_p.h"
      33              : #include "insn-config.h"
      34              : #include "regs.h"
      35              : #include "emit-rtl.h"  /* FIXME: Can go away once crtl is moved to rtl.h.  */
      36              : #include "recog.h"
      37              : #include "addresses.h"
      38              : #include "rtl-iter.h"
      39              : #include "hard-reg-set.h"
      40              : #include "function-abi.h"
      41              : 
      42              : /* Forward declarations */
      43              : static void set_of_1 (rtx, const_rtx, void *);
      44              : static bool covers_regno_p (const_rtx, unsigned int);
      45              : static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
      46              : static bool computed_jump_p_1 (const_rtx);
      47              : static void parms_set (rtx, const_rtx, void *);
      48              : 
      49              : static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, scalar_int_mode,
      50              :                                                    const_rtx, machine_mode,
      51              :                                                    unsigned HOST_WIDE_INT);
      52              : static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, scalar_int_mode,
      53              :                                              const_rtx, machine_mode,
      54              :                                              unsigned HOST_WIDE_INT);
      55              : static unsigned int cached_num_sign_bit_copies (const_rtx, scalar_int_mode,
      56              :                                                 const_rtx, machine_mode,
      57              :                                                 unsigned int);
      58              : static unsigned int num_sign_bit_copies1 (const_rtx, scalar_int_mode,
      59              :                                           const_rtx, machine_mode,
      60              :                                           unsigned int);
      61              : 
      62              : rtx_subrtx_bound_info rtx_all_subrtx_bounds[NUM_RTX_CODE];
      63              : rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds[NUM_RTX_CODE];
      64              : 
      65              : /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
      66              :    If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
      67              :    SIGN_EXTEND then while narrowing we also have to enforce the
      68              :    representation and sign-extend the value to mode DESTINATION_REP.
      69              : 
      70              :    If the value is already sign-extended to DESTINATION_REP mode we
      71              :    can just switch to DESTINATION mode on it.  For each pair of
      72              :    integral modes SOURCE and DESTINATION, when truncating from SOURCE
      73              :    to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
      74              :    contains the number of high-order bits in SOURCE that have to be
      75              :    copies of the sign-bit so that we can do this mode-switch to
      76              :    DESTINATION.  */
      77              : 
      78              : static unsigned int
      79              : num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
      80              : 
      81              : /* Store X into index I of ARRAY.  ARRAY is known to have at least I
      82              :    elements.  Return the new base of ARRAY.  */
      83              : 
      84              : template <typename T>
      85              : typename T::value_type *
      86      9178098 : generic_subrtx_iterator <T>::add_single_to_queue (array_type &array,
      87              :                                                   value_type *base,
      88              :                                                   size_t i, value_type x)
      89              : {
      90      9178098 :   if (base == array.stack)
      91              :     {
      92      4709767 :       if (i < LOCAL_ELEMS)
      93              :         {
      94      4406341 :           base[i] = x;
      95      4406341 :           return base;
      96              :         }
      97       303426 :       gcc_checking_assert (i == LOCAL_ELEMS);
      98              :       /* A previous iteration might also have moved from the stack to the
      99              :          heap, in which case the heap array will already be big enough.  */
     100       303426 :       if (vec_safe_length (array.heap) <= i)
     101       303426 :         vec_safe_grow (array.heap, i + 1, true);
     102       303426 :       base = array.heap->address ();
     103       303426 :       memcpy (base, array.stack, sizeof (array.stack));
     104       303426 :       base[LOCAL_ELEMS] = x;
     105       303426 :       return base;
     106              :     }
     107      4468331 :   unsigned int length = array.heap->length ();
     108      4468331 :   if (length > i)
     109              :     {
     110      1164437 :       gcc_checking_assert (base == array.heap->address ());
     111      1164437 :       base[i] = x;
     112      1164437 :       return base;
     113              :     }
     114              :   else
     115              :     {
     116      3303894 :       gcc_checking_assert (i == length);
     117      3303894 :       vec_safe_push (array.heap, x);
     118      3303894 :       return array.heap->address ();
     119              :     }
     120              : }
     121              : 
     122              : /* Add the subrtxes of X to worklist ARRAY, starting at END.  Return the
     123              :    number of elements added to the worklist.  */
     124              : 
     125              : template <typename T>
     126              : size_t
     127    327000418 : generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array,
     128              :                                                     value_type *base,
     129              :                                                     size_t end, rtx_type x)
     130              : {
     131    327000418 :   enum rtx_code code = GET_CODE (x);
     132    327000418 :   const char *format = GET_RTX_FORMAT (code);
     133    327000418 :   size_t orig_end = end;
     134    327000418 :   if (UNLIKELY (INSN_P (x)))
     135              :     {
     136              :       /* Put the pattern at the top of the queue, since that's what
     137              :          we're likely to want most.  It also allows for the SEQUENCE
     138              :          code below.  */
     139       137241 :       for (int i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; --i)
     140       121226 :         if (format[i] == 'e')
     141              :           {
     142        32746 :             value_type subx = T::get_value (x->u.fld[i].rt_rtx);
     143        32746 :             if (LIKELY (end < LOCAL_ELEMS))
     144        32746 :               base[end++] = subx;
     145              :             else
     146            0 :               base = add_single_to_queue (array, base, end++, subx);
     147              :           }
     148              :     }
     149              :   else
     150    713895310 :     for (int i = 0; format[i]; ++i)
     151    386910907 :       if (format[i] == 'e')
     152              :         {
     153       518457 :           value_type subx = T::get_value (x->u.fld[i].rt_rtx);
     154       518457 :           if (LIKELY (end < LOCAL_ELEMS))
     155           16 :             base[end++] = subx;
     156              :           else
     157       518441 :             base = add_single_to_queue (array, base, end++, subx);
     158              :         }
     159    386392450 :       else if (format[i] == 'E')
     160              :         {
     161    333341205 :           unsigned int length = GET_NUM_ELEM (x->u.fld[i].rt_rtvec);
     162    333341205 :           rtx *vec = x->u.fld[i].rt_rtvec->elem;
     163    333341205 :           if (LIKELY (end + length <= LOCAL_ELEMS))
     164   1019679986 :             for (unsigned int j = 0; j < length; j++)
     165    686724170 :               base[end++] = T::get_value (vec[j]);
     166              :           else
     167      9045046 :             for (unsigned int j = 0; j < length; j++)
     168      8659657 :               base = add_single_to_queue (array, base, end++,
     169      8659657 :                                           T::get_value (vec[j]));
     170    333341205 :           if (code == SEQUENCE && end == length)
     171              :             /* If the subrtxes of the sequence fill the entire array then
     172              :                we know that no other parts of a containing insn are queued.
     173              :                The caller is therefore iterating over the sequence as a
     174              :                PATTERN (...), so we also want the patterns of the
     175              :                subinstructions.  */
     176            0 :             for (unsigned int j = 0; j < length; j++)
     177              :               {
     178            0 :                 typename T::rtx_type x = T::get_rtx (base[j]);
     179            0 :                 if (INSN_P (x))
     180            0 :                   base[j] = T::get_value (PATTERN (x));
     181              :               }
     182              :         }
     183    327000418 :   return end - orig_end;
     184              : }
     185              : 
     186              : template <typename T>
     187              : void
     188       303426 : generic_subrtx_iterator <T>::free_array (array_type &array)
     189              : {
     190       303426 :   vec_free (array.heap);
     191       303426 : }
     192              : 
     193              : template <typename T>
     194              : const size_t generic_subrtx_iterator <T>::LOCAL_ELEMS;
     195              : 
     196              : template class generic_subrtx_iterator <const_rtx_accessor>;
     197              : template class generic_subrtx_iterator <rtx_var_accessor>;
     198              : template class generic_subrtx_iterator <rtx_ptr_accessor>;
     199              : 
     200              : /* Return true if the value of X is unstable
     201              :    (would be different at a different point in the program).
     202              :    The frame pointer, arg pointer, etc. are considered stable
     203              :    (within one function) and so is anything marked `unchanging'.  */
     204              : 
     205              : bool
     206            0 : rtx_unstable_p (const_rtx x)
     207              : {
     208            0 :   const RTX_CODE code = GET_CODE (x);
     209            0 :   int i;
     210            0 :   const char *fmt;
     211              : 
     212            0 :   switch (code)
     213              :     {
     214            0 :     case MEM:
     215            0 :       return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
     216              : 
     217              :     case CONST:
     218              :     CASE_CONST_ANY:
     219              :     case SYMBOL_REF:
     220              :     case LABEL_REF:
     221              :       return false;
     222              : 
     223            0 :     case REG:
     224              :       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
     225            0 :       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
     226              :           /* The arg pointer varies if it is not a fixed register.  */
     227            0 :           || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
     228              :         return false;
     229              :       /* ??? When call-clobbered, the value is stable modulo the restore
     230              :          that must happen after a call.  This currently screws up local-alloc
     231              :          into believing that the restore is not needed.  */
     232            0 :       if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx)
     233              :         return false;
     234              :       return true;
     235              : 
     236            0 :     case ASM_OPERANDS:
     237            0 :       if (MEM_VOLATILE_P (x))
     238              :         return true;
     239              : 
     240              :       /* Fall through.  */
     241              : 
     242            0 :     default:
     243            0 :       break;
     244              :     }
     245              : 
     246            0 :   fmt = GET_RTX_FORMAT (code);
     247            0 :   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
     248            0 :     if (fmt[i] == 'e')
     249              :       {
     250            0 :         if (rtx_unstable_p (XEXP (x, i)))
     251              :           return true;
     252              :       }
     253            0 :     else if (fmt[i] == 'E')
     254              :       {
     255              :         int j;
     256            0 :         for (j = 0; j < XVECLEN (x, i); j++)
     257            0 :           if (rtx_unstable_p (XVECEXP (x, i, j)))
     258              :             return true;
     259              :       }
     260              : 
     261              :   return false;
     262              : }
     263              : 
     264              : /* Return true if X has a value that can vary even between two
     265              :    executions of the program.  false means X can be compared reliably
     266              :    against certain constants or near-constants.
     267              :    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
     268              :    zero, we are slightly more conservative.
     269              :    The frame pointer and the arg pointer are considered constant.  */
     270              : 
     271              : bool
     272    497603431 : rtx_varies_p (const_rtx x, bool for_alias)
     273              : {
     274    497603431 :   RTX_CODE code;
     275    497603431 :   int i;
     276    497603431 :   const char *fmt;
     277              : 
     278    497603431 :   if (!x)
     279              :     return false;
     280              : 
     281    497603431 :   code = GET_CODE (x);
     282    497603431 :   switch (code)
     283              :     {
     284     92188072 :     case MEM:
     285     92188072 :       return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
     286              : 
     287              :     case CONST:
     288              :     CASE_CONST_ANY:
     289              :     case SYMBOL_REF:
     290              :     case LABEL_REF:
     291              :       return false;
     292              : 
     293    160359337 :     case REG:
     294              :       /* Note that we have to test for the actual rtx used for the frame
     295              :          and arg pointers and not just the register number in case we have
     296              :          eliminated the frame and/or arg pointer and are using it
     297              :          for pseudos.  */
     298    160359337 :       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
     299              :           /* The arg pointer varies if it is not a fixed register.  */
     300    142286754 :           || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
     301              :         return false;
     302    142055731 :       if (x == pic_offset_table_rtx
     303              :           /* ??? When call-clobbered, the value is stable modulo the restore
     304              :              that must happen after a call.  This currently screws up
     305              :              local-alloc into believing that the restore is not needed, so we
     306              :              must return 0 only if we are called from alias analysis.  */
     307              :           && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias))
     308              :         return false;
     309              :       return true;
     310              : 
     311            0 :     case LO_SUM:
     312              :       /* The operand 0 of a LO_SUM is considered constant
     313              :          (in fact it is related specifically to operand 1)
     314              :          during alias analysis.  */
     315            0 :       return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
     316            0 :              || rtx_varies_p (XEXP (x, 1), for_alias);
     317              : 
     318        91949 :     case ASM_OPERANDS:
     319        91949 :       if (MEM_VOLATILE_P (x))
     320              :         return true;
     321              : 
     322              :       /* Fall through.  */
     323              : 
     324    138365469 :     default:
     325    138365469 :       break;
     326              :     }
     327              : 
     328    138365469 :   fmt = GET_RTX_FORMAT (code);
     329    242269208 :   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
     330    219576624 :     if (fmt[i] == 'e')
     331              :       {
     332    201024737 :         if (rtx_varies_p (XEXP (x, i), for_alias))
     333              :           return true;
     334              :       }
     335     18551887 :     else if (fmt[i] == 'E')
     336              :       {
     337              :         int j;
     338     19902536 :         for (j = 0; j < XVECLEN (x, i); j++)
     339     15686668 :           if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
     340              :             return true;
     341              :       }
     342              : 
     343              :   return false;
     344              : }
     345              : 
     346              : /* Compute an approximation for the offset between the register
     347              :    FROM and TO for the current function, as it was at the start
     348              :    of the routine.  */
     349              : 
     350              : static poly_int64
     351    239013060 : get_initial_register_offset (int from, int to)
     352              : {
     353    239013060 :   static const struct elim_table_t
     354              :   {
     355              :     const int from;
     356              :     const int to;
     357              :   } table[] = ELIMINABLE_REGS;
     358    239013060 :   poly_int64 offset1, offset2;
     359    239013060 :   unsigned int i, j;
     360              : 
     361    239013060 :   if (to == from)
     362            0 :     return 0;
     363              : 
     364              :   /* It is not safe to call INITIAL_ELIMINATION_OFFSET before the epilogue
     365              :      is completed, but we need to give at least an estimate for the stack
     366              :      pointer based on the frame size.  */
     367    239013060 :   if (!epilogue_completed)
     368              :     {
     369    131411239 :       offset1 = crtl->outgoing_args_size + get_frame_size ();
     370              : #if !STACK_GROWS_DOWNWARD
     371              :       offset1 = - offset1;
     372              : #endif
     373    131411239 :       if (to == STACK_POINTER_REGNUM)
     374    130873963 :         return offset1;
     375       537276 :       else if (from == STACK_POINTER_REGNUM)
     376       268638 :         return - offset1;
     377              :       else
     378       268638 :         return 0;
     379              :      }
     380              : 
     381    108745663 :   for (i = 0; i < ARRAY_SIZE (table); i++)
     382    108745663 :       if (table[i].from == from)
     383              :         {
     384    107601821 :           if (table[i].to == to)
     385              :             {
     386    106457979 :               INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
     387              :                                           offset1);
     388    106457979 :               return offset1;
     389              :             }
     390      5719210 :           for (j = 0; j < ARRAY_SIZE (table); j++)
     391              :             {
     392      4575368 :               if (table[j].to == to
     393      2287684 :                   && table[j].from == table[i].to)
     394              :                 {
     395            0 :                   INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
     396              :                                               offset1);
     397            0 :                   INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
     398              :                                               offset2);
     399            0 :                   return offset1 + offset2;
     400              :                 }
     401      4575368 :               if (table[j].from == to
     402            0 :                   && table[j].to == table[i].to)
     403              :                 {
     404            0 :                   INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
     405              :                                               offset1);
     406            0 :                   INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
     407              :                                               offset2);
     408            0 :                   return offset1 - offset2;
     409              :                 }
     410              :             }
     411              :         }
     412      1143842 :       else if (table[i].to == from)
     413              :         {
     414      1143842 :           if (table[i].from == to)
     415              :             {
     416            0 :               INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
     417              :                                           offset1);
     418            0 :               return - offset1;
     419              :             }
     420      2287684 :           for (j = 0; j < ARRAY_SIZE (table); j++)
     421              :             {
     422      2287684 :               if (table[j].to == to
     423      1143842 :                   && table[j].from == table[i].from)
     424              :                 {
     425      1143842 :                   INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
     426              :                                               offset1);
     427      1143842 :                   INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
     428              :                                               offset2);
     429      1143842 :                   return - offset1 + offset2;
     430              :                 }
     431      1143842 :               if (table[j].from == to
     432            0 :                   && table[j].to == table[i].from)
     433              :                 {
     434            0 :                   INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to,
     435              :                                               offset1);
     436            0 :                   INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to,
     437              :                                               offset2);
     438            0 :                   return - offset1 - offset2;
     439              :                 }
     440              :             }
     441              :         }
     442              : 
     443              :   /* If the requested register combination was not found,
     444              :      try a different more simple combination.  */
     445            0 :   if (from == ARG_POINTER_REGNUM)
     446              :     return get_initial_register_offset (HARD_FRAME_POINTER_REGNUM, to);
     447            0 :   else if (to == ARG_POINTER_REGNUM)
     448              :     return get_initial_register_offset (from, HARD_FRAME_POINTER_REGNUM);
     449            0 :   else if (from == HARD_FRAME_POINTER_REGNUM)
     450              :     return get_initial_register_offset (FRAME_POINTER_REGNUM, to);
     451            0 :   else if (to == HARD_FRAME_POINTER_REGNUM)
     452              :     return get_initial_register_offset (from, FRAME_POINTER_REGNUM);
     453              :   else
     454            0 :     return 0;
     455              : }
     456              : 
     457              : /* Return true if the use of X+OFFSET as an address in a MEM with SIZE
     458              :    bytes can cause a trap.  MODE is the mode of the MEM (not that of X) and
     459              :    UNALIGNED_MEMS controls whether true is returned for unaligned memory
     460              :    references on strict alignment machines.  */
     461              : 
     462              : static bool
     463    810641874 : rtx_addr_can_trap_p_1 (const_rtx x, poly_int64 offset, poly_int64 size,
     464              :                        machine_mode mode, bool unaligned_mems)
     465              : {
     466    810641874 :   enum rtx_code code = GET_CODE (x);
     467    810641874 :   gcc_checking_assert (mode == BLKmode
     468              :                        || mode == VOIDmode
     469              :                        || known_size_p (size));
     470    810641874 :   poly_int64 const_x1;
     471              : 
     472              :   /* The offset must be a multiple of the mode size if we are considering
     473              :      unaligned memory references on strict alignment machines.  */
     474    810641874 :   if (STRICT_ALIGNMENT
     475              :       && unaligned_mems
     476              :       && mode != BLKmode
     477              :       && mode != VOIDmode)
     478              :     {
     479              :       poly_int64 actual_offset = offset;
     480              : 
     481              : #ifdef SPARC_STACK_BOUNDARY_HACK
     482              :       /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
     483              :              the real alignment of %sp.  However, when it does this, the
     484              :              alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
     485              :       if (SPARC_STACK_BOUNDARY_HACK
     486              :           && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
     487              :         actual_offset -= STACK_POINTER_OFFSET;
     488              : #endif
     489              : 
     490              :       if (!multiple_p (actual_offset, GET_MODE_SIZE (mode)))
     491              :         return true;
     492              :     }
     493              : 
     494    810641874 :   switch (code)
     495              :     {
     496      6023564 :     case SYMBOL_REF:
     497      6023564 :       if (SYMBOL_REF_WEAK (x))
     498              :         return true;
     499      5693702 :       if (!CONSTANT_POOL_ADDRESS_P (x) && !SYMBOL_REF_FUNCTION_P (x))
     500              :         {
     501       699240 :           tree decl;
     502       699240 :           poly_int64 decl_size;
     503              : 
     504       699240 :           if (maybe_lt (offset, 0))
     505              :             return true;
     506       698260 :           if (!known_size_p (size))
     507          621 :             return maybe_ne (offset, 0);
     508              : 
     509              :           /* If the size of the access or of the symbol is unknown,
     510              :              assume the worst.  */
     511       697639 :           decl = SYMBOL_REF_DECL (x);
     512              : 
     513              :           /* Else check that the access is in bounds.  TODO: restructure
     514              :              expr_size/tree_expr_size/int_expr_size and just use the latter.  */
     515       697639 :           if (!decl)
     516       239259 :             decl_size = -1;
     517       458380 :           else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
     518              :             {
     519       450426 :               if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &decl_size))
     520            0 :                 decl_size = -1;
     521              :             }
     522         7954 :           else if (TREE_CODE (decl) == STRING_CST)
     523            0 :             decl_size = TREE_STRING_LENGTH (decl);
     524         7954 :           else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
     525            0 :             decl_size = int_size_in_bytes (TREE_TYPE (decl));
     526              :           else
     527         7954 :             decl_size = -1;
     528              : 
     529       697639 :           return (!known_size_p (decl_size) || known_eq (decl_size, 0)
     530       697639 :                   ? maybe_ne (offset, 0)
     531       697639 :                   : !known_subrange_p (offset, size, 0, decl_size));
     532              :         }
     533              : 
     534              :       return false;
     535              : 
     536              :     case LABEL_REF:
     537              :       return false;
     538              : 
     539    404934248 :     case REG:
     540              :       /* Stack references are assumed not to trap, but we need to deal with
     541              :          nonsensical offsets.  */
     542    404934248 :       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
     543    396974950 :          || x == stack_pointer_rtx
     544              :          /* The arg pointer varies if it is not a fixed register.  */
     545    160784349 :          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
     546              :         {
     547              : #ifdef RED_ZONE_SIZE
     548    244237028 :           poly_int64 red_zone_size = RED_ZONE_SIZE;
     549              : #else
     550              :           poly_int64 red_zone_size = 0;
     551              : #endif
     552    244237028 :           poly_int64 stack_boundary = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
     553    244237028 :           poly_int64 low_bound, high_bound;
     554              : 
     555    244237028 :           if (!known_size_p (size))
     556              :             return true;
     557              : 
     558    244233488 :           if (x == frame_pointer_rtx)
     559              :             {
     560      6545784 :               if (FRAME_GROWS_DOWNWARD)
     561              :                 {
     562      6545784 :                   high_bound = targetm.starting_frame_offset ();
     563      6545784 :                   low_bound  = high_bound - get_frame_size ();
     564              :                 }
     565              :               else
     566              :                 {
     567              :                   low_bound  = targetm.starting_frame_offset ();
     568              :                   high_bound = low_bound + get_frame_size ();
     569              :                 }
     570              :             }
     571    237687704 :           else if (x == hard_frame_pointer_rtx)
     572              :             {
     573      1412480 :               poly_int64 sp_offset
     574      1412480 :                 = get_initial_register_offset (STACK_POINTER_REGNUM,
     575              :                                                HARD_FRAME_POINTER_REGNUM);
     576      1412480 :               poly_int64 ap_offset
     577      1412480 :                 = get_initial_register_offset (ARG_POINTER_REGNUM,
     578              :                                                HARD_FRAME_POINTER_REGNUM);
     579              : 
     580              : #if STACK_GROWS_DOWNWARD
     581      1412480 :               low_bound  = sp_offset - red_zone_size - stack_boundary;
     582      1412480 :               high_bound = ap_offset
     583      1412480 :                            + FIRST_PARM_OFFSET (current_function_decl)
     584              : #if !ARGS_GROW_DOWNWARD
     585      1412480 :                            + crtl->args.size
     586              : #endif
     587      1412480 :                            + stack_boundary;
     588              : #else
     589              :               high_bound = sp_offset + red_zone_size + stack_boundary;
     590              :               low_bound  = ap_offset
     591              :                            + FIRST_PARM_OFFSET (current_function_decl)
     592              : #if ARGS_GROW_DOWNWARD
     593              :                            - crtl->args.size
     594              : #endif
     595              :                            - stack_boundary;
     596              : #endif
     597              :             }
     598    236275224 :           else if (x == stack_pointer_rtx)
     599              :             {
     600    236188100 :               poly_int64 ap_offset
     601    236188100 :                 = get_initial_register_offset (ARG_POINTER_REGNUM,
     602              :                                                STACK_POINTER_REGNUM);
     603              : 
     604              : #if STACK_GROWS_DOWNWARD
     605    236188100 :               low_bound  = - red_zone_size - stack_boundary;
     606    236188100 :               high_bound = ap_offset
     607    236188100 :                            + FIRST_PARM_OFFSET (current_function_decl)
     608              : #if !ARGS_GROW_DOWNWARD
     609    236188100 :                            + crtl->args.size
     610              : #endif
     611    236188100 :                            + stack_boundary;
     612              : #else
     613              :               high_bound = red_zone_size + stack_boundary;
     614              :               low_bound  = ap_offset
     615              :                            + FIRST_PARM_OFFSET (current_function_decl)
     616              : #if ARGS_GROW_DOWNWARD
     617              :                            - crtl->args.size
     618              : #endif
     619              :                            - stack_boundary;
     620              : #endif
     621              :             }
     622              :           else
     623              :             {
     624              :               /* We assume that accesses are safe to at least the
     625              :                  next stack boundary.
     626              :                  Examples are varargs and __builtin_return_address.  */
     627              : #if ARGS_GROW_DOWNWARD
     628              :               high_bound = FIRST_PARM_OFFSET (current_function_decl)
     629              :                            + stack_boundary;
     630              :               low_bound  = FIRST_PARM_OFFSET (current_function_decl)
     631              :                            - crtl->args.size - stack_boundary;
     632              : #else
     633        87124 :               low_bound  = FIRST_PARM_OFFSET (current_function_decl)
     634        87124 :                            - stack_boundary;
     635        87124 :               high_bound = FIRST_PARM_OFFSET (current_function_decl)
     636        87124 :                            + crtl->args.size + stack_boundary;
     637              : #endif
     638              :             }
     639              : 
     640    244233488 :           if (known_ge (offset, low_bound)
     641    244233488 :               && known_le (offset, high_bound - size))
     642              :             return false;
     643              :           return true;
     644              :         }
     645              :       /* All of the virtual frame registers are stack references.  */
     646    160697220 :       if (VIRTUAL_REGISTER_P (x))
     647              :         return false;
     648              :       return true;
     649              : 
     650       294739 :     case CONST:
     651       294739 :       return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
     652       294739 :                                     mode, unaligned_mems);
     653              : 
     654    176314853 :     case PLUS:
     655              :       /* An address is assumed not to trap if:
     656              :          - it is the pic register plus a const unspec without offset.  */
     657    176314853 :       if (XEXP (x, 0) == pic_offset_table_rtx
     658        38935 :           && GET_CODE (XEXP (x, 1)) == CONST
     659        38901 :           && GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
     660    176350461 :           && known_eq (offset, 0))
     661              :         return false;
     662              : 
     663              :       /* - or it is an address that can't trap plus a constant integer.  */
     664    176279245 :       if (poly_int_rtx_p (XEXP (x, 1), &const_x1)
     665    147994738 :           && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + const_x1,
     666              :                                      size, mode, unaligned_mems))
     667              :         return false;
     668              : 
     669              :       return true;
     670              : 
     671       413360 :     case LO_SUM:
     672       413360 :     case PRE_MODIFY:
     673       413360 :       return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
     674       413360 :                                     mode, unaligned_mems);
     675              : 
     676    196383486 :     case PRE_DEC:
     677    196383486 :     case PRE_INC:
     678    196383486 :     case POST_DEC:
     679    196383486 :     case POST_INC:
     680    196383486 :     case POST_MODIFY:
     681    196383486 :       return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
     682    196383486 :                                     mode, unaligned_mems);
     683              : 
     684              :     default:
     685              :       break;
     686              :     }
     687              : 
     688              :   /* If it isn't one of the case above, it can cause a trap.  */
     689              :   return true;
     690              : }
     691              : 
     692              : /* Return true if the use of X as an address in a MEM can cause a trap.  */
     693              : 
     694              : bool
     695     13929824 : rtx_addr_can_trap_p (const_rtx x)
     696              : {
     697     13929824 :   return rtx_addr_can_trap_p_1 (x, 0, -1, BLKmode, false);
     698              : }
     699              : 
     700              : /* Return true if X contains a MEM subrtx.  */
     701              : 
     702              : bool
     703     22127321 : contains_mem_rtx_p (rtx x)
     704              : {
     705     22127321 :   subrtx_iterator::array_type array;
     706     69121974 :   FOR_EACH_SUBRTX (iter, array, x, ALL)
     707     54168623 :     if (MEM_P (*iter))
     708      7173970 :       return true;
     709              : 
     710     14953351 :   return false;
     711     22127321 : }
     712              : 
     713              : /* Return true if X is an address that is known to not be zero.  */
     714              : 
     715              : bool
     716     56784183 : nonzero_address_p (const_rtx x)
     717              : {
     718     56786754 :   const enum rtx_code code = GET_CODE (x);
     719              : 
     720     56786754 :   switch (code)
     721              :     {
     722         3789 :     case SYMBOL_REF:
     723         3789 :       return flag_delete_null_pointer_checks && !SYMBOL_REF_WEAK (x);
     724              : 
     725              :     case LABEL_REF:
     726              :       return true;
     727              : 
     728     26524008 :     case REG:
     729              :       /* As in rtx_varies_p, we have to use the actual rtx, not reg number.  */
     730     26524008 :       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
     731     26523954 :           || x == stack_pointer_rtx
     732     26523954 :           || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
     733              :         return true;
     734              :       /* All of the virtual frame registers are stack references.  */
     735     26523954 :       if (VIRTUAL_REGISTER_P (x))
     736              :         return true;
     737              :       return false;
     738              : 
     739         2571 :     case CONST:
     740         2571 :       return nonzero_address_p (XEXP (x, 0));
     741              : 
     742     12602642 :     case PLUS:
     743              :       /* Handle PIC references.  */
     744     12602642 :       if (XEXP (x, 0) == pic_offset_table_rtx
     745            0 :                && CONSTANT_P (XEXP (x, 1)))
     746              :         return true;
     747              :       return false;
     748              : 
     749            0 :     case PRE_MODIFY:
     750              :       /* Similar to the above; allow positive offsets.  Further, since
     751              :          auto-inc is only allowed in memories, the register must be a
     752              :          pointer.  */
     753            0 :       if (CONST_INT_P (XEXP (x, 1))
     754            0 :           && INTVAL (XEXP (x, 1)) > 0)
     755              :         return true;
     756            0 :       return nonzero_address_p (XEXP (x, 0));
     757              : 
     758              :     case PRE_INC:
     759              :       /* Similarly.  Further, the offset is always positive.  */
     760              :       return true;
     761              : 
     762            0 :     case PRE_DEC:
     763            0 :     case POST_DEC:
     764            0 :     case POST_INC:
     765            0 :     case POST_MODIFY:
     766            0 :       return nonzero_address_p (XEXP (x, 0));
     767              : 
     768            0 :     case LO_SUM:
     769            0 :       return nonzero_address_p (XEXP (x, 1));
     770              : 
     771              :     default:
     772              :       break;
     773              :     }
     774              : 
     775              :   /* If it isn't one of the case above, might be zero.  */
     776              :   return false;
     777              : }
     778              : 
     779              : /* Return true if X refers to a memory location whose address
     780              :    cannot be compared reliably with constant addresses,
     781              :    or if X refers to a BLKmode memory object.
     782              :    FOR_ALIAS is nonzero if we are called from alias analysis; if it is
     783              :    zero, we are slightly more conservative.  */
     784              : 
     785              : bool
     786            0 : rtx_addr_varies_p (const_rtx x, bool for_alias)
     787              : {
     788            0 :   enum rtx_code code;
     789            0 :   int i;
     790            0 :   const char *fmt;
     791              : 
     792            0 :   if (x == 0)
     793              :     return false;
     794              : 
     795            0 :   code = GET_CODE (x);
     796            0 :   if (code == MEM)
     797            0 :     return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
     798              : 
     799            0 :   fmt = GET_RTX_FORMAT (code);
     800            0 :   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
     801            0 :     if (fmt[i] == 'e')
     802              :       {
     803            0 :         if (rtx_addr_varies_p (XEXP (x, i), for_alias))
     804              :           return true;
     805              :       }
     806            0 :     else if (fmt[i] == 'E')
     807              :       {
     808              :         int j;
     809            0 :         for (j = 0; j < XVECLEN (x, i); j++)
     810            0 :           if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
     811              :             return true;
     812              :       }
     813              :   return false;
     814              : }
     815              : 
     816              : /* Get the declaration of the function called by INSN.  */
     817              : 
     818              : tree
     819    307614177 : get_call_fndecl (const rtx_insn *insn)
     820              : {
     821    307614177 :   rtx note, datum;
     822              : 
     823    307614177 :   note = find_reg_note (insn, REG_CALL_DECL, NULL_RTX);
     824    307614177 :   if (note == NULL_RTX)
     825              :     return NULL_TREE;
     826              : 
     827    304344377 :   datum = XEXP (note, 0);
     828    304344377 :   if (datum != NULL_RTX)
     829    293269108 :     return SYMBOL_REF_DECL (datum);
     830              : 
     831              :   return NULL_TREE;
     832              : }
     833              : 
     834              : /* Return the value of the integer term in X, if one is apparent;
     835              :    otherwise return 0.
     836              :    Only obvious integer terms are detected.
     837              :    This is used in cse.cc with the `related_value' field.  */
     838              : 
     839              : HOST_WIDE_INT
     840       461944 : get_integer_term (const_rtx x)
     841              : {
     842       461944 :   if (GET_CODE (x) == CONST)
     843       259467 :     x = XEXP (x, 0);
     844              : 
     845       461944 :   if (GET_CODE (x) == MINUS
     846            0 :       && CONST_INT_P (XEXP (x, 1)))
     847            0 :     return - INTVAL (XEXP (x, 1));
     848       461944 :   if (GET_CODE (x) == PLUS
     849       259467 :       && CONST_INT_P (XEXP (x, 1)))
     850       259467 :     return INTVAL (XEXP (x, 1));
     851              :   return 0;
     852              : }
     853              : 
     854              : /* If X is a constant, return the value sans apparent integer term;
     855              :    otherwise return 0.
     856              :    Only obvious integer terms are detected.  */
     857              : 
     858              : rtx
     859      1367452 : get_related_value (const_rtx x)
     860              : {
     861      1367452 :   if (GET_CODE (x) != CONST)
     862              :     return 0;
     863      1367452 :   x = XEXP (x, 0);
     864      1367452 :   if (GET_CODE (x) == PLUS
     865      1339999 :       && CONST_INT_P (XEXP (x, 1)))
     866      1339999 :     return XEXP (x, 0);
     867        27453 :   else if (GET_CODE (x) == MINUS
     868            0 :            && CONST_INT_P (XEXP (x, 1)))
     869            0 :     return XEXP (x, 0);
     870              :   return 0;
     871              : }
     872              : 
     873              : /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
     874              :    to somewhere in the same object or object_block as SYMBOL.  */
     875              : 
     876              : bool
     877            0 : offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
     878              : {
     879            0 :   tree decl;
     880              : 
     881            0 :   if (GET_CODE (symbol) != SYMBOL_REF)
     882              :     return false;
     883              : 
     884            0 :   if (offset == 0)
     885              :     return true;
     886              : 
     887            0 :   if (offset > 0)
     888              :     {
     889            0 :       if (CONSTANT_POOL_ADDRESS_P (symbol)
     890            0 :           && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
     891            0 :         return true;
     892              : 
     893            0 :       decl = SYMBOL_REF_DECL (symbol);
     894            0 :       if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
     895              :         return true;
     896              :     }
     897              : 
     898            0 :   if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
     899            0 :       && SYMBOL_REF_BLOCK (symbol)
     900            0 :       && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
     901            0 :       && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
     902            0 :           < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
     903              :     return true;
     904              : 
     905              :   return false;
     906              : }
     907              : 
     908              : /* Split X into a base and a constant offset, storing them in *BASE_OUT
     909              :    and *OFFSET_OUT respectively.  */
     910              : 
     911              : void
     912            0 : split_const (rtx x, rtx *base_out, rtx *offset_out)
     913              : {
     914            0 :   if (GET_CODE (x) == CONST)
     915              :     {
     916            0 :       x = XEXP (x, 0);
     917            0 :       if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
     918              :         {
     919            0 :           *base_out = XEXP (x, 0);
     920            0 :           *offset_out = XEXP (x, 1);
     921            0 :           return;
     922              :         }
     923              :     }
     924            0 :   *base_out = x;
     925            0 :   *offset_out = const0_rtx;
     926              : }
     927              : 
     928              : /* Express integer value X as some value Y plus a polynomial offset,
     929              :    where Y is either const0_rtx, X or something within X (as opposed
     930              :    to a new rtx).  Return the Y and store the offset in *OFFSET_OUT.  */
     931              : 
     932              : rtx
     933    414209798 : strip_offset (rtx x, poly_int64 *offset_out)
     934              : {
     935    414209798 :   rtx base = const0_rtx;
     936    414209798 :   rtx test = x;
     937    414209798 :   if (GET_CODE (test) == CONST)
     938      8894993 :     test = XEXP (test, 0);
     939    414209798 :   if (GET_CODE (test) == PLUS)
     940              :     {
     941    311929148 :       base = XEXP (test, 0);
     942    311929148 :       test = XEXP (test, 1);
     943              :     }
     944    414209798 :   if (poly_int_rtx_p (test, offset_out))
     945    291959626 :     return base;
     946    122250172 :   *offset_out = 0;
     947    122250172 :   return x;
     948              : }
     949              : 
     950              : /* Return the argument size in REG_ARGS_SIZE note X.  */
     951              : 
     952              : poly_int64
     953      5295275 : get_args_size (const_rtx x)
     954              : {
     955      5295275 :   gcc_checking_assert (REG_NOTE_KIND (x) == REG_ARGS_SIZE);
     956      5295275 :   return rtx_to_poly_int64 (XEXP (x, 0));
     957              : }
     958              : 
     959              : /* Return the number of places FIND appears within X.  If COUNT_DEST is
     960              :    zero, we do not count occurrences inside the destination of a SET.  */
     961              : 
     962              : int
     963      8741673 : count_occurrences (const_rtx x, const_rtx find, int count_dest)
     964              : {
     965      8741673 :   int i, j;
     966      8741673 :   enum rtx_code code;
     967      8741673 :   const char *format_ptr;
     968      8741673 :   int count;
     969              : 
     970      8741673 :   if (x == find)
     971              :     return 1;
     972              : 
     973      6367076 :   code = GET_CODE (x);
     974              : 
     975      6367076 :   switch (code)
     976              :     {
     977              :     case REG:
     978              :     CASE_CONST_ANY:
     979              :     case SYMBOL_REF:
     980              :     case CODE_LABEL:
     981              :     case PC:
     982              :       return 0;
     983              : 
     984            0 :     case EXPR_LIST:
     985            0 :       count = count_occurrences (XEXP (x, 0), find, count_dest);
     986            0 :       if (XEXP (x, 1))
     987            0 :         count += count_occurrences (XEXP (x, 1), find, count_dest);
     988              :       return count;
     989              : 
     990        73855 :     case MEM:
     991        73855 :       if (MEM_P (find) && rtx_equal_p (x, find))
     992              :         return 1;
     993              :       break;
     994              : 
     995            0 :     case SET:
     996            0 :       if (SET_DEST (x) == find && ! count_dest)
     997            0 :         return count_occurrences (SET_SRC (x), find, count_dest);
     998              :       break;
     999              : 
    1000              :     default:
    1001              :       break;
    1002              :     }
    1003              : 
    1004      3228157 :   format_ptr = GET_RTX_FORMAT (code);
    1005      3228157 :   count = 0;
    1006              : 
    1007      9556354 :   for (i = 0; i < GET_RTX_LENGTH (code); i++)
    1008              :     {
    1009      6328197 :       switch (*format_ptr++)
    1010              :         {
    1011      6167169 :         case 'e':
    1012      6167169 :           count += count_occurrences (XEXP (x, i), find, count_dest);
    1013      6167169 :           break;
    1014              : 
    1015              :         case 'E':
    1016       136808 :           for (j = 0; j < XVECLEN (x, i); j++)
    1017       113372 :             count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
    1018              :           break;
    1019              :         }
    1020              :     }
    1021              :   return count;
    1022              : }
    1023              : 
    1024              : 
    1025              : /* Return TRUE if OP is a register or subreg of a register that
    1026              :    holds an unsigned quantity.  Otherwise, return FALSE.  */
    1027              : 
    1028              : bool
    1029            0 : unsigned_reg_p (rtx op)
    1030              : {
    1031            0 :   if (REG_P (op)
    1032            0 :       && REG_EXPR (op)
    1033            0 :       && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op))))
    1034              :     return true;
    1035              : 
    1036            0 :   if (GET_CODE (op) == SUBREG
    1037            0 :       && SUBREG_PROMOTED_SIGN (op))
    1038            0 :     return true;
    1039              : 
    1040              :   return false;
    1041              : }
    1042              : 
    1043              : 
    1044              : /* Return true if register REG appears somewhere within IN.
    1045              :    Also works if REG is not a register; in this case it checks
    1046              :    for a subexpression of IN that is Lisp "equal" to REG.  */
    1047              : 
    1048              : bool
    1049    441785197 : reg_mentioned_p (const_rtx reg, const_rtx in)
    1050              : {
    1051    441785197 :   const char *fmt;
    1052    441785197 :   int i;
    1053    441785197 :   enum rtx_code code;
    1054              : 
    1055    441785197 :   if (in == 0)
    1056              :     return false;
    1057              : 
    1058    436580346 :   if (reg == in)
    1059              :     return true;
    1060              : 
    1061    424310054 :   if (GET_CODE (in) == LABEL_REF)
    1062      6307366 :     return reg == label_ref_label (in);
    1063              : 
    1064    418002688 :   code = GET_CODE (in);
    1065              : 
    1066    418002688 :   switch (code)
    1067              :     {
    1068              :       /* Compare registers by number.  */
    1069    154434737 :     case REG:
    1070    154434737 :       return REG_P (reg) && REGNO (in) == REGNO (reg);
    1071              : 
    1072              :       /* These codes have no constituent expressions
    1073              :          and are unique.  */
    1074              :     case SCRATCH:
    1075              :     case PC:
    1076              :       return false;
    1077              : 
    1078              :     CASE_CONST_ANY:
    1079              :       /* These are kept unique for a given value.  */
    1080              :       return false;
    1081              : 
    1082    158614825 :     default:
    1083    158614825 :       break;
    1084              :     }
    1085              : 
    1086    158614825 :   if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
    1087              :     return true;
    1088              : 
    1089    158448812 :   fmt = GET_RTX_FORMAT (code);
    1090              : 
    1091    421938987 :   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    1092              :     {
    1093    302564230 :       if (fmt[i] == 'E')
    1094              :         {
    1095      3234209 :           int j;
    1096     11389176 :           for (j = XVECLEN (in, i) - 1; j >= 0; j--)
    1097      8385372 :             if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
    1098              :               return true;
    1099              :         }
    1100    299330021 :       else if (fmt[i] == 'e'
    1101    299330021 :                && reg_mentioned_p (reg, XEXP (in, i)))
    1102              :         return true;
    1103              :     }
    1104              :   return false;
    1105              : }
    1106              : 
    1107              : /* Return true if in between BEG and END, exclusive of BEG and END, there is
    1108              :    no CODE_LABEL insn.  */
    1109              : 
    1110              : bool
    1111            0 : no_labels_between_p (const rtx_insn *beg, const rtx_insn *end)
    1112              : {
    1113            0 :   rtx_insn *p;
    1114            0 :   if (beg == end)
    1115              :     return false;
    1116            0 :   for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
    1117            0 :     if (LABEL_P (p))
    1118              :       return false;
    1119              :   return true;
    1120              : }
    1121              : 
    1122              : /* Return true if register REG is used in an insn between
    1123              :    FROM_INSN and TO_INSN (exclusive of those two).  */
    1124              : 
    1125              : bool
    1126     23997880 : reg_used_between_p (const_rtx reg, const rtx_insn *from_insn,
    1127              :                     const rtx_insn *to_insn)
    1128              : {
    1129     23997880 :   rtx_insn *insn;
    1130              : 
    1131     23997880 :   if (from_insn == to_insn)
    1132              :     return false;
    1133              : 
    1134    173246368 :   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
    1135    125642688 :     if (NONDEBUG_INSN_P (insn)
    1136    125642688 :         && (reg_overlap_mentioned_p (reg, PATTERN (insn))
    1137     69599349 :            || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
    1138       392080 :       return true;
    1139              :   return false;
    1140              : }
    1141              : 
    1142              : /* Return true if the old value of X, a register, is referenced in BODY.  If X
    1143              :    is entirely replaced by a new value and the only use is as a SET_DEST,
    1144              :    we do not consider it a reference.  */
    1145              : 
    1146              : bool
    1147    131790708 : reg_referenced_p (const_rtx x, const_rtx body)
    1148              : {
    1149    131790708 :   int i;
    1150              : 
    1151    131790708 :   switch (GET_CODE (body))
    1152              :     {
    1153     97895101 :     case SET:
    1154     97895101 :       if (reg_overlap_mentioned_p (x, SET_SRC (body)))
    1155              :         return true;
    1156              : 
    1157              :       /* If the destination is anything other than PC, a REG or a SUBREG
    1158              :          of a REG that occupies all of the REG, the insn references X if
    1159              :          it is mentioned in the destination.  */
    1160     62444380 :       if (GET_CODE (SET_DEST (body)) != PC
    1161     62444380 :           && !REG_P (SET_DEST (body))
    1162      2762401 :           && ! (GET_CODE (SET_DEST (body)) == SUBREG
    1163       516862 :                 && REG_P (SUBREG_REG (SET_DEST (body)))
    1164       516862 :                 && !read_modify_subreg_p (SET_DEST (body)))
    1165     64353256 :           && reg_overlap_mentioned_p (x, SET_DEST (body)))
    1166              :         return true;
    1167              :       return false;
    1168              : 
    1169         6957 :     case ASM_OPERANDS:
    1170        16214 :       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
    1171        14869 :         if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
    1172              :           return true;
    1173              :       return false;
    1174              : 
    1175       399405 :     case CALL:
    1176       399405 :     case USE:
    1177       399405 :     case IF_THEN_ELSE:
    1178       399405 :       return reg_overlap_mentioned_p (x, body);
    1179              : 
    1180            0 :     case TRAP_IF:
    1181            0 :       return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
    1182              : 
    1183         2086 :     case PREFETCH:
    1184         2086 :       return reg_overlap_mentioned_p (x, XEXP (body, 0));
    1185              : 
    1186        28535 :     case UNSPEC:
    1187        28535 :     case UNSPEC_VOLATILE:
    1188        57044 :       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
    1189        31160 :         if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
    1190              :           return true;
    1191              :       return false;
    1192              : 
    1193     17161521 :     case PARALLEL:
    1194     49224911 :       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
    1195     35311459 :         if (reg_referenced_p (x, XVECEXP (body, 0, i)))
    1196              :           return true;
    1197              :       return false;
    1198              : 
    1199     16291122 :     case CLOBBER:
    1200     16291122 :       if (MEM_P (XEXP (body, 0)))
    1201        11909 :         if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
    1202              :           return true;
    1203              :       return false;
    1204              : 
    1205            0 :     case COND_EXEC:
    1206            0 :       if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
    1207              :         return true;
    1208            0 :       return reg_referenced_p (x, COND_EXEC_CODE (body));
    1209              : 
    1210              :     default:
    1211              :       return false;
    1212              :     }
    1213              : }
    1214              : 
    1215              : /* Return true if register REG is set or clobbered in an insn between
    1216              :    FROM_INSN and TO_INSN (exclusive of those two).  */
    1217              : 
    1218              : bool
    1219     65254362 : reg_set_between_p (const_rtx reg, const rtx_insn *from_insn,
    1220              :                    const rtx_insn *to_insn)
    1221              : {
    1222     65254362 :   const rtx_insn *insn;
    1223              : 
    1224     65254362 :   if (from_insn == to_insn)
    1225              :     return false;
    1226              : 
    1227    320782342 :   for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
    1228    193785949 :     if (INSN_P (insn) && reg_set_p (reg, insn))
    1229              :       return true;
    1230              :   return false;
    1231              : }
    1232              : 
    1233              : /* Return true if REG is set or clobbered inside INSN.  */
    1234              : 
    1235              : bool
    1236   1163650128 : reg_set_p (const_rtx reg, const_rtx insn)
    1237              : {
    1238              :   /* After delay slot handling, call and branch insns might be in a
    1239              :      sequence.  Check all the elements there.  */
    1240   1163650128 :   if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
    1241              :     {
    1242            0 :       for (int i = 0; i < XVECLEN (PATTERN (insn), 0); ++i)
    1243            0 :         if (reg_set_p (reg, XVECEXP (PATTERN (insn), 0, i)))
    1244              :           return true;
    1245              : 
    1246              :       return false;
    1247              :     }
    1248              : 
    1249              :   /* We can be passed an insn or part of one.  If we are passed an insn,
    1250              :      check if a side-effect of the insn clobbers REG.  */
    1251   1163650128 :   if (INSN_P (insn)
    1252   1163650128 :       && (FIND_REG_INC_NOTE (insn, reg)
    1253              :           || (CALL_P (insn)
    1254     66400408 :               && ((REG_P (reg)
    1255     66400408 :                    && REGNO (reg) < FIRST_PSEUDO_REGISTER
    1256     62125372 :                    && (insn_callee_abi (as_a<const rtx_insn *> (insn))
    1257     62125372 :                        .clobbers_reg_p (GET_MODE (reg), REGNO (reg))))
    1258     65389579 :                   || MEM_P (reg)
    1259     65389579 :                   || find_reg_fusage (insn, CLOBBER, reg)))))
    1260      1010829 :     return true;
    1261              : 
    1262              :   /* There are no REG_INC notes for SP autoinc.  */
    1263   1162639299 :   if (reg == stack_pointer_rtx && INSN_P (insn))
    1264              :     {
    1265      5868145 :       subrtx_var_iterator::array_type array;
    1266     46906309 :       FOR_EACH_SUBRTX_VAR (iter, array, PATTERN (insn), NONCONST)
    1267              :         {
    1268     41729453 :           rtx mem = *iter;
    1269     41729453 :           if (mem
    1270     41729453 :               && MEM_P (mem)
    1271      4616670 :               && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC)
    1272              :             {
    1273       691289 :               if (XEXP (XEXP (mem, 0), 0) == stack_pointer_rtx)
    1274       691289 :                 return true;
    1275            0 :               iter.skip_subrtxes ();
    1276              :             }
    1277              :         }
    1278      5868145 :     }
    1279              : 
    1280   1161948010 :   return set_of (reg, insn) != NULL_RTX;
    1281              : }
    1282              : 
    1283              : /* Similar to reg_set_between_p, but check all registers in X.  Return false
    1284              :    only if none of them are modified between START and END.  Return true if
    1285              :    X contains a MEM; this routine does use memory aliasing.  */
    1286              : 
    1287              : bool
    1288    160665271 : modified_between_p (const_rtx x, const rtx_insn *start, const rtx_insn *end)
    1289              : {
    1290    160665271 :   const enum rtx_code code = GET_CODE (x);
    1291    160665271 :   const char *fmt;
    1292    160665271 :   int i, j;
    1293    160665271 :   rtx_insn *insn;
    1294              : 
    1295    160665271 :   if (start == end)
    1296              :     return false;
    1297              : 
    1298    160665271 :   switch (code)
    1299              :     {
    1300              :     CASE_CONST_ANY:
    1301              :     case CONST:
    1302              :     case SYMBOL_REF:
    1303              :     case LABEL_REF:
    1304              :       return false;
    1305              : 
    1306              :     case PC:
    1307              :       return true;
    1308              : 
    1309     10730348 :     case MEM:
    1310     10730348 :       if (modified_between_p (XEXP (x, 0), start, end))
    1311              :         return true;
    1312     10721268 :       if (MEM_READONLY_P (x))
    1313              :         return false;
    1314     57629642 :       for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
    1315     37188263 :         if (memory_modified_in_insn_p (x, insn))
    1316              :           return true;
    1317              :       return false;
    1318              : 
    1319     62028755 :     case REG:
    1320     62028755 :       return reg_set_between_p (x, start, end);
    1321              : 
    1322     50541983 :     default:
    1323     50541983 :       break;
    1324              :     }
    1325              : 
    1326     50541983 :   fmt = GET_RTX_FORMAT (code);
    1327    148392482 :   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    1328              :     {
    1329     99190492 :       if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
    1330              :         return true;
    1331              : 
    1332     97851195 :       else if (fmt[i] == 'E')
    1333      3910285 :         for (j = XVECLEN (x, i) - 1; j >= 0; j--)
    1334      2833605 :           if (modified_between_p (XVECEXP (x, i, j), start, end))
    1335              :             return true;
    1336              :     }
    1337              : 
    1338              :   return false;
    1339              : }
    1340              : 
    1341              : /* Similar to reg_set_p, but check all registers in X.  Return false only if
    1342              :    none of them are modified in INSN.  Return true if X contains a MEM; this
    1343              :    routine does use memory aliasing.  */
    1344              : 
    1345              : bool
    1346   1068810212 : modified_in_p (const_rtx x, const_rtx insn)
    1347              : {
    1348   1068810212 :   const enum rtx_code code = GET_CODE (x);
    1349   1068810212 :   const char *fmt;
    1350   1068810212 :   int i, j;
    1351              : 
    1352   1068810212 :   switch (code)
    1353              :     {
    1354              :     CASE_CONST_ANY:
    1355              :     case CONST:
    1356              :     case SYMBOL_REF:
    1357              :     case LABEL_REF:
    1358              :       return false;
    1359              : 
    1360              :     case PC:
    1361              :       return true;
    1362              : 
    1363      7974922 :     case MEM:
    1364      7974922 :       if (modified_in_p (XEXP (x, 0), insn))
    1365              :         return true;
    1366      7931558 :       if (MEM_READONLY_P (x))
    1367              :         return false;
    1368      7718435 :       if (memory_modified_in_insn_p (x, insn))
    1369              :         return true;
    1370              :       return false;
    1371              : 
    1372    933618870 :     case REG:
    1373    933618870 :       return reg_set_p (x, insn);
    1374              : 
    1375     71488945 :     default:
    1376     71488945 :       break;
    1377              :     }
    1378              : 
    1379     71488945 :   fmt = GET_RTX_FORMAT (code);
    1380    206006813 :   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    1381              :     {
    1382    140456047 :       if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
    1383              :         return true;
    1384              : 
    1385    134526434 :       else if (fmt[i] == 'E')
    1386      1310074 :         for (j = XVECLEN (x, i) - 1; j >= 0; j--)
    1387       675162 :           if (modified_in_p (XVECEXP (x, i, j), insn))
    1388              :             return true;
    1389              :     }
    1390              : 
    1391              :   return false;
    1392              : }
    1393              : 
    1394              : /* Return true if X is a SUBREG and if storing a value to X would
    1395              :    preserve some of its SUBREG_REG.  For example, on a normal 32-bit
    1396              :    target, using a SUBREG to store to one half of a DImode REG would
    1397              :    preserve the other half.  */
    1398              : 
    1399              : bool
    1400    143224310 : read_modify_subreg_p (const_rtx x)
    1401              : {
    1402    143224310 :   if (GET_CODE (x) != SUBREG)
    1403              :     return false;
    1404     48623008 :   poly_uint64 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
    1405     48623008 :   poly_uint64 osize = GET_MODE_SIZE (GET_MODE (x));
    1406     24311504 :   poly_uint64 regsize = REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
    1407              :   /* The inner and outer modes of a subreg must be ordered, so that we
    1408              :      can tell whether they're paradoxical or partial.  */
    1409     24311504 :   gcc_checking_assert (ordered_p (isize, osize));
    1410     24311504 :   return (maybe_gt (isize, osize) && maybe_gt (isize, regsize));
    1411              : }
    1412              : 
    1413              : /* Helper function for set_of.  */
    1414              : struct set_of_data
    1415              :   {
    1416              :     const_rtx found;
    1417              :     const_rtx pat;
    1418              :   };
    1419              : 
    1420              : static void
    1421   1232411358 : set_of_1 (rtx x, const_rtx pat, void *data1)
    1422              : {
    1423   1232411358 :   struct set_of_data *const data = (struct set_of_data *) (data1);
    1424   1232411358 :   if (rtx_equal_p (x, data->pat)
    1425   1232411358 :       || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
    1426     70636634 :     data->found = pat;
    1427   1232411358 : }
    1428              : 
    1429              : /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
    1430              :    (either directly or via STRICT_LOW_PART and similar modifiers).  */
    1431              : const_rtx
    1432   1212691058 : set_of (const_rtx pat, const_rtx insn)
    1433              : {
    1434   1212691058 :   struct set_of_data data;
    1435   1212691058 :   data.found = NULL_RTX;
    1436   1212691058 :   data.pat = pat;
    1437   1212691058 :   note_pattern_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
    1438   1212691058 :   return data.found;
    1439              : }
    1440              : 
    1441              : /* Check whether instruction pattern PAT contains a SET with the following
    1442              :    properties:
    1443              : 
    1444              :    - the SET is executed unconditionally; and
    1445              :    - either:
    1446              :      - the destination of the SET is a REG that contains REGNO; or
    1447              :      - both:
    1448              :        - the destination of the SET is a SUBREG of such a REG; and
    1449              :        - writing to the subreg clobbers all of the SUBREG_REG
    1450              :          (in other words, read_modify_subreg_p is false).
    1451              : 
    1452              :    If PAT does have a SET like that, return the set, otherwise return null.
    1453              : 
    1454              :    This is intended to be an alternative to single_set for passes that
    1455              :    can handle patterns with multiple_sets.  */
    1456              : rtx
    1457    132696617 : simple_regno_set (rtx pat, unsigned int regno)
    1458              : {
    1459    132696617 :   if (GET_CODE (pat) == PARALLEL)
    1460              :     {
    1461     22316531 :       int last = XVECLEN (pat, 0) - 1;
    1462     22316645 :       for (int i = 0; i < last; ++i)
    1463     22316531 :         if (rtx set = simple_regno_set (XVECEXP (pat, 0, i), regno))
    1464              :           return set;
    1465              : 
    1466          114 :       pat = XVECEXP (pat, 0, last);
    1467              :     }
    1468              : 
    1469    110380200 :   if (GET_CODE (pat) == SET
    1470    110380200 :       && covers_regno_no_parallel_p (SET_DEST (pat), regno))
    1471              :     return pat;
    1472              : 
    1473              :   return nullptr;
    1474              : }
    1475              : 
    1476              : /* Add all hard register in X to *PSET.  */
    1477              : void
    1478      3883846 : find_all_hard_regs (const_rtx x, HARD_REG_SET *pset)
    1479              : {
    1480      3883846 :   subrtx_iterator::array_type array;
    1481     10539078 :   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
    1482              :     {
    1483      6655232 :       const_rtx x = *iter;
    1484      6655232 :       if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
    1485      3292098 :         add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
    1486              :     }
    1487      3883846 : }
    1488              : 
    1489              : /* This function, called through note_stores, collects sets and
    1490              :    clobbers of hard registers in a HARD_REG_SET, which is pointed to
    1491              :    by DATA.  */
    1492              : void
    1493     22396120 : record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
    1494              : {
    1495     22396120 :   HARD_REG_SET *pset = (HARD_REG_SET *)data;
    1496     22396120 :   if (REG_P (x) && HARD_REGISTER_P (x))
    1497     14836248 :     add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
    1498     22396120 : }
    1499              : 
    1500              : /* Examine INSN, and compute the set of hard registers written by it.
    1501              :    Store it in *PSET.  Should only be called after reload.
    1502              : 
    1503              :    IMPLICIT is true if we should include registers that are fully-clobbered
    1504              :    by calls.  This should be used with caution, since it doesn't include
    1505              :    partially-clobbered registers.  */
    1506              : void
    1507     17254543 : find_all_hard_reg_sets (const rtx_insn *insn, HARD_REG_SET *pset, bool implicit)
    1508              : {
    1509     17254543 :   rtx link;
    1510              : 
    1511     17254543 :   CLEAR_HARD_REG_SET (*pset);
    1512     17254543 :   note_stores (insn, record_hard_reg_sets, pset);
    1513     17254543 :   if (CALL_P (insn) && implicit)
    1514            0 :     *pset |= insn_callee_abi (insn).full_reg_clobbers ();
    1515     33733146 :   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
    1516     16478603 :     if (REG_NOTE_KIND (link) == REG_INC)
    1517            0 :       record_hard_reg_sets (XEXP (link, 0), NULL, pset);
    1518     17254543 : }
    1519              : 
    1520              : /* Like record_hard_reg_sets, but called through note_uses.  */
    1521              : void
    1522      3883846 : record_hard_reg_uses (rtx *px, void *data)
    1523              : {
    1524      3883846 :   find_all_hard_regs (*px, (HARD_REG_SET *) data);
    1525      3883846 : }
    1526              : 
    1527              : /* Given an INSN, return a SET expression if this insn has only a single SET.
    1528              :    It may also have CLOBBERs, USEs, or SET whose output
    1529              :    will not be used, which we ignore.  */
    1530              : 
    1531              : rtx
    1532   1908740019 : single_set_2 (const rtx_insn *insn, const_rtx pat)
    1533              : {
    1534   1908740019 :   rtx set = NULL;
    1535   1908740019 :   int set_verified = 1;
    1536   1908740019 :   int i;
    1537              : 
    1538   1908740019 :   if (GET_CODE (pat) == PARALLEL)
    1539              :     {
    1540   2160090355 :       for (i = 0; i < XVECLEN (pat, 0); i++)
    1541              :         {
    1542   1469556138 :           rtx sub = XVECEXP (pat, 0, i);
    1543   1469556138 :           switch (GET_CODE (sub))
    1544              :             {
    1545              :             case USE:
    1546              :             case CLOBBER:
    1547   1433247014 :               break;
    1548              : 
    1549              :             default:
    1550     36309124 :               return NULL_RTX;
    1551              : 
    1552    740627877 :             case SET:
    1553              :               /* We can consider insns having multiple sets, where all
    1554              :                  but one are dead as single set insns.  In common case
    1555              :                  only single set is present in the pattern so we want
    1556              :                  to avoid checking for REG_UNUSED notes unless necessary.
    1557              : 
    1558              :                  When we reach set first time, we just expect this is
    1559              :                  the single set we are looking for and only when more
    1560              :                  sets are found in the insn, we check them.  */
    1561    794347320 :               auto unused = [] (const rtx_insn *insn, rtx dest) {
    1562     53719443 :                 if (!df)
    1563              :                   return false;
    1564     52888711 :                 if (df_note)
    1565     30889381 :                   return !!find_reg_note (insn, REG_UNUSED, dest);
    1566     21999330 :                 return (REG_P (dest)
    1567     20087535 :                         && !HARD_REGISTER_P (dest)
    1568      5740851 :                         && REGNO (dest) < df->regs_inited
    1569     27739523 :                         && DF_REG_USE_COUNT (REGNO (dest)) == 0);
    1570              :               };
    1571    740627877 :               if (!set_verified)
    1572              :                 {
    1573     28545638 :                   if (unused (insn, SET_DEST (set)) && !side_effects_p (set))
    1574              :                     set = NULL;
    1575              :                   else
    1576              :                     set_verified = 1;
    1577              :                 }
    1578    740627877 :               if (!set)
    1579              :                 set = sub, set_verified = 0;
    1580     25173805 :               else if (!unused (insn, SET_DEST (sub)) || side_effects_p (sub))
    1581     20540199 :                 return NULL_RTX;
    1582              :               break;
    1583              :             }
    1584              :         }
    1585              :     }
    1586              :   return set;
    1587              : }
    1588              : 
    1589              : /* Given an INSN, return true if it has more than one SET, else return
    1590              :    false.  */
    1591              : 
    1592              : bool
    1593    285125895 : multiple_sets (const_rtx insn)
    1594              : {
    1595    285125895 :   bool found;
    1596    285125895 :   int i;
    1597              : 
    1598              :   /* INSN must be an insn.  */
    1599    285125895 :   if (! INSN_P (insn))
    1600              :     return false;
    1601              : 
    1602              :   /* Only a PARALLEL can have multiple SETs.  */
    1603    285125895 :   if (GET_CODE (PATTERN (insn)) == PARALLEL)
    1604              :     {
    1605    257979373 :       for (i = 0, found = false; i < XVECLEN (PATTERN (insn), 0); i++)
    1606    173823477 :         if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
    1607              :           {
    1608              :             /* If we have already found a SET, then return now.  */
    1609     87772177 :             if (found)
    1610              :               return true;
    1611              :             else
    1612              :               found = true;
    1613              :           }
    1614              :     }
    1615              : 
    1616              :   /* Either zero or one SET.  */
    1617              :   return false;
    1618              : }
    1619              : 
    1620              : /* Return true if the destination of SET equals the source
    1621              :    and there are no side effects.  */
    1622              : 
    1623              : bool
    1624   1677898139 : set_noop_p (const_rtx set)
    1625              : {
    1626   1677898139 :   rtx src = SET_SRC (set);
    1627   1677898139 :   rtx dst = SET_DEST (set);
    1628              : 
    1629   1677898139 :   if (dst == pc_rtx && src == pc_rtx)
    1630              :     return true;
    1631              : 
    1632   1677890636 :   if (MEM_P (dst) && MEM_P (src))
    1633      9011676 :     return (rtx_equal_p (dst, src)
    1634         4263 :             && !side_effects_p (dst)
    1635      9015800 :             && !side_effects_p (src));
    1636              : 
    1637   1668878960 :   if (GET_CODE (dst) == ZERO_EXTRACT)
    1638       108878 :     return (rtx_equal_p (XEXP (dst, 0), src)
    1639         6407 :             && !BITS_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
    1640            0 :             && !side_effects_p (src)
    1641       108878 :             && !side_effects_p (XEXP (dst, 0)));
    1642              : 
    1643   1668770082 :   if (GET_CODE (dst) == STRICT_LOW_PART)
    1644       266457 :     dst = XEXP (dst, 0);
    1645              : 
    1646   1668770082 :   if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
    1647              :     {
    1648       831345 :       if (maybe_ne (SUBREG_BYTE (src), SUBREG_BYTE (dst)))
    1649              :         return false;
    1650       786202 :       src = SUBREG_REG (src);
    1651       786202 :       dst = SUBREG_REG (dst);
    1652       786202 :       if (GET_MODE (src) != GET_MODE (dst))
    1653              :         /* It is hard to tell whether subregs refer to the same bits, so act
    1654              :            conservatively and return false.  */
    1655              :         return false;
    1656              :     }
    1657              : 
    1658              :   /* It is a NOOP if destination overlaps with selected src vector
    1659              :      elements.  */
    1660   1668654516 :   if (GET_CODE (src) == VEC_SELECT
    1661      7970591 :       && REG_P (XEXP (src, 0)) && REG_P (dst)
    1662      2741122 :       && HARD_REGISTER_P (XEXP (src, 0))
    1663   1669231811 :       && HARD_REGISTER_P (dst))
    1664              :     {
    1665       577059 :       int i;
    1666       577059 :       rtx par = XEXP (src, 1);
    1667       577059 :       rtx src0 = XEXP (src, 0);
    1668       577059 :       poly_int64 c0;
    1669       577059 :       if (!poly_int_rtx_p (XVECEXP (par, 0, 0), &c0))
    1670              :         return false;
    1671      1154118 :       poly_int64 offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0;
    1672              : 
    1673       961830 :       for (i = 1; i < XVECLEN (par, 0); i++)
    1674              :         {
    1675       627159 :           poly_int64 c0i;
    1676       627159 :           if (!poly_int_rtx_p (XVECEXP (par, 0, i), &c0i)
    1677       627159 :               || maybe_ne (c0i, c0 + i))
    1678   1677263006 :             return false;
    1679              :         }
    1680       334671 :       return
    1681       334671 :         REG_CAN_CHANGE_MODE_P (REGNO (dst), GET_MODE (src0), GET_MODE (dst))
    1682       334671 :         && validate_subreg (GET_MODE (dst), GET_MODE (src0), src0, offset)
    1683       778148 :         && simplify_subreg_regno (REGNO (src0), GET_MODE (src0),
    1684       123069 :                                   offset, GET_MODE (dst)) == (int) REGNO (dst);
    1685              :     }
    1686              : 
    1687    421576067 :   return (REG_P (src) && REG_P (dst)
    1688   1899996843 :           && REGNO (src) == REGNO (dst));
    1689              : }
    1690              : 
    1691              : /* Return true if an insn consists only of SETs, each of which only sets a
    1692              :    value to itself.  */
    1693              : 
    1694              : bool
    1695   1144270458 : noop_move_p (const rtx_insn *insn)
    1696              : {
    1697   1144270458 :   rtx pat = PATTERN (insn);
    1698              : 
    1699   1144270458 :   if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
    1700              :     return true;
    1701              : 
    1702              :   /* Check the code to be executed for COND_EXEC.  */
    1703   1144263863 :   if (GET_CODE (pat) == COND_EXEC)
    1704            0 :     pat = COND_EXEC_CODE (pat);
    1705              : 
    1706   1144263863 :   if (GET_CODE (pat) == SET && set_noop_p (pat))
    1707              :     return true;
    1708              : 
    1709   1144233129 :   if (GET_CODE (pat) == PARALLEL)
    1710              :     {
    1711              :       int i;
    1712              :       /* If nothing but SETs of registers to themselves,
    1713              :          this insn can also be deleted.  */
    1714    159344026 :       for (i = 0; i < XVECLEN (pat, 0); i++)
    1715              :         {
    1716    159343920 :           rtx tem = XVECEXP (pat, 0, i);
    1717              : 
    1718    159343920 :           if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
    1719        21413 :             continue;
    1720              : 
    1721    159322507 :           if (GET_CODE (tem) != SET || ! set_noop_p (tem))
    1722    159322401 :             return false;
    1723              :         }
    1724              : 
    1725              :       return true;
    1726              :     }
    1727              :   return false;
    1728              : }
    1729              : 
    1730              : 
    1731              : /* Return true if register in range [REGNO, ENDREGNO)
    1732              :    appears either explicitly or implicitly in X
    1733              :    other than being stored into.
    1734              : 
    1735              :    References contained within the substructure at LOC do not count.
    1736              :    LOC may be zero, meaning don't ignore anything.  */
    1737              : 
    1738              : bool
    1739   2217175052 : refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
    1740              :                    rtx *loc)
    1741              : {
    1742   2947564488 :   int i;
    1743   2947564488 :   unsigned int x_regno;
    1744   2947564488 :   RTX_CODE code;
    1745   2947564488 :   const char *fmt;
    1746              : 
    1747   2947564488 :  repeat:
    1748              :   /* The contents of a REG_NONNEG note is always zero, so we must come here
    1749              :      upon repeat in case the last REG_NOTE is a REG_NONNEG note.  */
    1750   2947564488 :   if (x == 0)
    1751              :     return false;
    1752              : 
    1753   2947564488 :   code = GET_CODE (x);
    1754              : 
    1755   2947564488 :   switch (code)
    1756              :     {
    1757   1604564160 :     case REG:
    1758   1604564160 :       x_regno = REGNO (x);
    1759              : 
    1760              :       /* If we modifying the stack, frame, or argument pointer, it will
    1761              :          clobber a virtual register.  In fact, we could be more precise,
    1762              :          but it isn't worth it.  */
    1763   1604564160 :       if ((x_regno == STACK_POINTER_REGNUM
    1764   1604564160 :            || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
    1765   1604564160 :                && x_regno == ARG_POINTER_REGNUM)
    1766              :            || x_regno == FRAME_POINTER_REGNUM)
    1767    125919897 :           && VIRTUAL_REGISTER_NUM_P (regno))
    1768              :         return true;
    1769              : 
    1770   1604564158 :       return endregno > x_regno && regno < END_REGNO (x);
    1771              : 
    1772     33613016 :     case SUBREG:
    1773              :       /* If this is a SUBREG of a hard reg, we can see exactly which
    1774              :          registers are being modified.  Otherwise, handle normally.  */
    1775     33613016 :       if (REG_P (SUBREG_REG (x))
    1776     33613016 :           && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
    1777              :         {
    1778         1856 :           unsigned int inner_regno = subreg_regno (x);
    1779         1856 :           unsigned int inner_endregno
    1780              :             = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
    1781         1856 :                              ? subreg_nregs (x) : 1);
    1782              : 
    1783         1856 :           return endregno > inner_regno && regno < inner_endregno;
    1784              :         }
    1785              :       break;
    1786              : 
    1787     98853912 :     case CLOBBER:
    1788     98853912 :     case SET:
    1789     98853912 :       if (&SET_DEST (x) != loc
    1790              :           /* Note setting a SUBREG counts as referring to the REG it is in for
    1791              :              a pseudo but not for hard registers since we can
    1792              :              treat each word individually.  */
    1793     98853912 :           && ((GET_CODE (SET_DEST (x)) == SUBREG
    1794       668402 :                && loc != &SUBREG_REG (SET_DEST (x))
    1795       668402 :                && REG_P (SUBREG_REG (SET_DEST (x)))
    1796       668402 :                && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
    1797       668402 :                && refers_to_regno_p (regno, endregno,
    1798              :                                      SUBREG_REG (SET_DEST (x)), loc))
    1799     98832458 :               || (!REG_P (SET_DEST (x))
    1800     10425305 :                   && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
    1801       134162 :         return true;
    1802              : 
    1803     98719750 :       if (code == CLOBBER || loc == &SET_SRC (x))
    1804              :         return false;
    1805     82310198 :       x = SET_SRC (x);
    1806     82310198 :       goto repeat;
    1807              : 
    1808              :     default:
    1809              :       break;
    1810              :     }
    1811              : 
    1812              :   /* X does not match, so try its subexpressions.  */
    1813              : 
    1814   1244144560 :   fmt = GET_RTX_FORMAT (code);
    1815   2426915228 :   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    1816              :     {
    1817   1838055660 :       if (fmt[i] == 'e' && loc != &XEXP (x, i))
    1818              :         {
    1819   1131233609 :           if (i == 0)
    1820              :             {
    1821    648079238 :               x = XEXP (x, 0);
    1822    648079238 :               goto repeat;
    1823              :             }
    1824              :           else
    1825    483154371 :             if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
    1826              :               return true;
    1827              :         }
    1828    706822051 :       else if (fmt[i] == 'E')
    1829              :         {
    1830     45205443 :           int j;
    1831    166286284 :           for (j = XVECLEN (x, i) - 1; j >= 0; j--)
    1832    122764112 :             if (loc != &XVECEXP (x, i, j)
    1833    122764112 :                 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
    1834              :               return true;
    1835              :         }
    1836              :     }
    1837              :   return false;
    1838              : }
    1839              : 
    1840              : /* Rreturn true if modifying X will affect IN.  If X is a register or a SUBREG,
    1841              :    we check if any register number in X conflicts with the relevant register
    1842              :    numbers.  If X is a constant, return false.  If X is a MEM, return true iff
    1843              :    IN contains a MEM (we don't bother checking for memory addresses that can't
    1844              :    conflict because we expect this to be a rare case.  */
    1845              : 
    1846              : bool
    1847   1492449346 : reg_overlap_mentioned_p (const_rtx x, const_rtx in)
    1848              : {
    1849   1492449346 :   unsigned int regno, endregno;
    1850              : 
    1851              :   /* If either argument is a constant, then modifying X cannot
    1852              :      affect IN.  Here we look at IN, we can profitably combine
    1853              :      CONSTANT_P (x) with the switch statement below.  */
    1854   1492449346 :   if (CONSTANT_P (in))
    1855              :     return false;
    1856              : 
    1857   1463503097 :  recurse:
    1858   1463507107 :   switch (GET_CODE (x))
    1859              :     {
    1860         4010 :     case CLOBBER:
    1861         4010 :     case STRICT_LOW_PART:
    1862         4010 :     case ZERO_EXTRACT:
    1863         4010 :     case SIGN_EXTRACT:
    1864              :       /* Overly conservative.  */
    1865         4010 :       x = XEXP (x, 0);
    1866         4010 :       goto recurse;
    1867              : 
    1868      1329229 :     case SUBREG:
    1869      1329229 :       regno = REGNO (SUBREG_REG (x));
    1870      1329229 :       if (regno < FIRST_PSEUDO_REGISTER)
    1871            0 :         regno = subreg_regno (x);
    1872      1329229 :       endregno = regno + (regno < FIRST_PSEUDO_REGISTER
    1873            0 :                           ? subreg_nregs (x) : 1);
    1874      1329229 :       goto do_reg;
    1875              : 
    1876   1458343852 :     case REG:
    1877   1458343852 :       regno = REGNO (x);
    1878   1458343852 :       endregno = END_REGNO (x);
    1879   1459673081 :     do_reg:
    1880   1459673081 :       return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
    1881              : 
    1882      1826377 :     case MEM:
    1883      1826377 :       {
    1884      1826377 :         const char *fmt;
    1885      1826377 :         int i;
    1886              : 
    1887      1826377 :         if (MEM_P (in))
    1888              :           return true;
    1889              : 
    1890      1687893 :         fmt = GET_RTX_FORMAT (GET_CODE (in));
    1891      3598805 :         for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
    1892      1958826 :           if (fmt[i] == 'e')
    1893              :             {
    1894       532703 :               if (reg_overlap_mentioned_p (x, XEXP (in, i)))
    1895              :                 return true;
    1896              :             }
    1897      1426123 :           else if (fmt[i] == 'E')
    1898              :             {
    1899         8676 :               int j;
    1900        38819 :               for (j = XVECLEN (in, i) - 1; j >= 0; --j)
    1901        31178 :                 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
    1902              :                   return true;
    1903              :             }
    1904              : 
    1905              :         return false;
    1906              :       }
    1907              : 
    1908      1886464 :     case SCRATCH:
    1909      1886464 :     case PC:
    1910      1886464 :       return reg_mentioned_p (x, in);
    1911              : 
    1912          676 :     case PARALLEL:
    1913          676 :       {
    1914          676 :         int i;
    1915              : 
    1916              :         /* If any register in here refers to it we return true.  */
    1917         1218 :         for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
    1918         1084 :           if (XEXP (XVECEXP (x, 0, i), 0) != 0
    1919         1084 :               && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
    1920              :             return true;
    1921              :         return false;
    1922              :       }
    1923              : 
    1924       116499 :     default:
    1925       116499 :       gcc_assert (CONSTANT_P (x));
    1926              :       return false;
    1927              :     }
    1928              : }
    1929              : 
    1930              : /* Call FUN on each register or MEM that is stored into or clobbered by X.
    1931              :    (X would be the pattern of an insn).  DATA is an arbitrary pointer,
    1932              :    ignored by note_stores, but passed to FUN.
    1933              : 
    1934              :    FUN receives three arguments:
    1935              :    1. the REG, MEM or PC being stored in or clobbered,
    1936              :    2. the SET or CLOBBER rtx that does the store,
    1937              :    3. the pointer DATA provided to note_stores.
    1938              : 
    1939              :   If the item being stored in or clobbered is a SUBREG of a hard register,
    1940              :   the SUBREG will be passed.  */
    1941              : 
    1942              : void
    1943   7084918165 : note_pattern_stores (const_rtx x,
    1944              :                      void (*fun) (rtx, const_rtx, void *), void *data)
    1945              : {
    1946   7084918165 :   int i;
    1947              : 
    1948   7084918165 :   if (GET_CODE (x) == COND_EXEC)
    1949            0 :     x = COND_EXEC_CODE (x);
    1950              : 
    1951   7084918165 :   if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
    1952              :     {
    1953   5225770378 :       rtx dest = SET_DEST (x);
    1954              : 
    1955   5225770378 :       while ((GET_CODE (dest) == SUBREG
    1956     20177730 :               && (!REG_P (SUBREG_REG (dest))
    1957     20177730 :                   || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
    1958   5226730228 :              || GET_CODE (dest) == ZERO_EXTRACT
    1959  10473337277 :              || GET_CODE (dest) == STRICT_LOW_PART)
    1960     21107322 :         dest = XEXP (dest, 0);
    1961              : 
    1962              :       /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
    1963              :          each of whose first operand is a register.  */
    1964   5225770378 :       if (GET_CODE (dest) == PARALLEL)
    1965              :         {
    1966      1466858 :           for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
    1967       930529 :             if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
    1968       930529 :               (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
    1969              :         }
    1970              :       else
    1971   5225234049 :         (*fun) (dest, x, data);
    1972              :     }
    1973              : 
    1974   1859147787 :   else if (GET_CODE (x) == PARALLEL)
    1975   2139344866 :     for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
    1976   1440974101 :       note_pattern_stores (XVECEXP (x, 0, i), fun, data);
    1977   7084918165 : }
    1978              : 
    1979              : /* Same, but for an instruction.  If the instruction is a call, include
    1980              :    any CLOBBERs in its CALL_INSN_FUNCTION_USAGE.  */
    1981              : 
    1982              : void
    1983   3434650222 : note_stores (const rtx_insn *insn,
    1984              :              void (*fun) (rtx, const_rtx, void *), void *data)
    1985              : {
    1986   3434650222 :   if (CALL_P (insn))
    1987    164026570 :     for (rtx link = CALL_INSN_FUNCTION_USAGE (insn);
    1988    500888644 :          link; link = XEXP (link, 1))
    1989    336862074 :       if (GET_CODE (XEXP (link, 0)) == CLOBBER)
    1990     12056865 :         note_pattern_stores (XEXP (link, 0), fun, data);
    1991   3434650222 :   note_pattern_stores (PATTERN (insn), fun, data);
    1992   3434650222 : }
    1993              : 
    1994              : /* Like notes_stores, but call FUN for each expression that is being
    1995              :    referenced in PBODY, a pointer to the PATTERN of an insn.  We only call
    1996              :    FUN for each expression, not any interior subexpressions.  FUN receives a
    1997              :    pointer to the expression and the DATA passed to this function.
    1998              : 
    1999              :    Note that this is not quite the same test as that done in reg_referenced_p
    2000              :    since that considers something as being referenced if it is being
    2001              :    partially set, while we do not.  */
    2002              : 
    2003              : void
    2004   1107685249 : note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
    2005              : {
    2006   1107685249 :   rtx body = *pbody;
    2007   1107685249 :   int i;
    2008              : 
    2009   1107685249 :   switch (GET_CODE (body))
    2010              :     {
    2011            0 :     case COND_EXEC:
    2012            0 :       (*fun) (&COND_EXEC_TEST (body), data);
    2013            0 :       note_uses (&COND_EXEC_CODE (body), fun, data);
    2014            0 :       return;
    2015              : 
    2016     86035913 :     case PARALLEL:
    2017    265942287 :       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
    2018    179906374 :         note_uses (&XVECEXP (body, 0, i), fun, data);
    2019              :       return;
    2020              : 
    2021            0 :     case SEQUENCE:
    2022            0 :       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
    2023            0 :         note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
    2024              :       return;
    2025              : 
    2026      4877119 :     case USE:
    2027      4877119 :       (*fun) (&XEXP (body, 0), data);
    2028      4877119 :       return;
    2029              : 
    2030       267086 :     case ASM_OPERANDS:
    2031       374034 :       for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
    2032       106948 :         (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
    2033              :       return;
    2034              : 
    2035       138127 :     case TRAP_IF:
    2036       138127 :       (*fun) (&TRAP_CONDITION (body), data);
    2037       138127 :       return;
    2038              : 
    2039        10914 :     case PREFETCH:
    2040        10914 :       (*fun) (&XEXP (body, 0), data);
    2041        10914 :       return;
    2042              : 
    2043      3237881 :     case UNSPEC:
    2044      3237881 :     case UNSPEC_VOLATILE:
    2045      6496515 :       for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
    2046      3258634 :         (*fun) (&XVECEXP (body, 0, i), data);
    2047              :       return;
    2048              : 
    2049     84001859 :     case CLOBBER:
    2050     84001859 :       if (MEM_P (XEXP (body, 0)))
    2051      3458523 :         (*fun) (&XEXP (XEXP (body, 0), 0), data);
    2052              :       return;
    2053              : 
    2054    549763749 :     case SET:
    2055    549763749 :       {
    2056    549763749 :         rtx dest = SET_DEST (body);
    2057              : 
    2058              :         /* For sets we replace everything in source plus registers in memory
    2059              :            expression in store and operands of a ZERO_EXTRACT.  */
    2060    549763749 :         (*fun) (&SET_SRC (body), data);
    2061              : 
    2062    549763749 :         if (GET_CODE (dest) == ZERO_EXTRACT)
    2063              :           {
    2064        33047 :             (*fun) (&XEXP (dest, 1), data);
    2065        33047 :             (*fun) (&XEXP (dest, 2), data);
    2066              :           }
    2067              : 
    2068    551914573 :         while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
    2069      2150824 :           dest = XEXP (dest, 0);
    2070              : 
    2071    549763749 :         if (MEM_P (dest))
    2072     94841667 :           (*fun) (&XEXP (dest, 0), data);
    2073              :       }
    2074              :       return;
    2075              : 
    2076    379352601 :     default:
    2077              :       /* All the other possibilities never store.  */
    2078    379352601 :       (*fun) (pbody, data);
    2079    379352601 :       return;
    2080              :     }
    2081              : }
    2082              : 
    2083              : /* Try to add a description of REG X to this object, stopping once
    2084              :    the REF_END limit has been reached.  FLAGS is a bitmask of
    2085              :    rtx_obj_reference flags that describe the context.  */
    2086              : 
    2087              : void
    2088    834533903 : rtx_properties::try_to_add_reg (const_rtx x, unsigned int flags)
    2089              : {
    2090    834533903 :   if (REG_NREGS (x) != 1)
    2091      2650079 :     flags |= rtx_obj_flags::IS_MULTIREG;
    2092    834533903 :   machine_mode mode = GET_MODE (x);
    2093    834533903 :   unsigned int start_regno = REGNO (x);
    2094    834533903 :   unsigned int end_regno = END_REGNO (x);
    2095   1671717885 :   for (unsigned int regno = start_regno; regno < end_regno; ++regno)
    2096    837183982 :     if (ref_iter != ref_end)
    2097    837029314 :       *ref_iter++ = rtx_obj_reference (regno, flags, mode,
    2098    837029314 :                                        regno - start_regno);
    2099    834533903 : }
    2100              : 
    2101              : /* Add a description of destination X to this object.  FLAGS is a bitmask
    2102              :    of rtx_obj_reference flags that describe the context.
    2103              : 
    2104              :    This routine accepts all rtxes that can legitimately appear in a
    2105              :    SET_DEST.  */
    2106              : 
    2107              : void
    2108    429165267 : rtx_properties::try_to_add_dest (const_rtx x, unsigned int flags)
    2109              : {
    2110              :   /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
    2111              :      each of whose first operand is a register.  */
    2112    429165267 :   if (UNLIKELY (GET_CODE (x) == PARALLEL))
    2113              :     {
    2114       132356 :       for (int i = XVECLEN (x, 0) - 1; i >= 0; --i)
    2115        83507 :         if (rtx dest = XEXP (XVECEXP (x, 0, i), 0))
    2116        83507 :           try_to_add_dest (dest, flags);
    2117              :       return;
    2118              :     }
    2119              : 
    2120    429116418 :   unsigned int base_flags = flags & rtx_obj_flags::STICKY_FLAGS;
    2121    429116418 :   flags |= rtx_obj_flags::IS_WRITE;
    2122    430749630 :   for (;;)
    2123    430749630 :     if (GET_CODE (x) == ZERO_EXTRACT)
    2124              :       {
    2125        20559 :         try_to_add_src (XEXP (x, 1), base_flags);
    2126        20559 :         try_to_add_src (XEXP (x, 2), base_flags);
    2127        20559 :         flags |= rtx_obj_flags::IS_READ;
    2128        20559 :         x = XEXP (x, 0);
    2129              :       }
    2130    430729071 :     else if (GET_CODE (x) == STRICT_LOW_PART)
    2131              :       {
    2132        56489 :         flags |= rtx_obj_flags::IS_READ;
    2133        56489 :         x = XEXP (x, 0);
    2134              :       }
    2135    430672582 :     else if (GET_CODE (x) == SUBREG)
    2136              :       {
    2137      1556164 :         flags |= rtx_obj_flags::IN_SUBREG;
    2138      1556164 :         if (read_modify_subreg_p (x))
    2139       900175 :           flags |= rtx_obj_flags::IS_READ;
    2140      1556164 :         x = SUBREG_REG (x);
    2141              :       }
    2142              :     else
    2143              :       break;
    2144              : 
    2145    429116418 :   if (MEM_P (x))
    2146              :     {
    2147     59808202 :       if (ref_iter != ref_end)
    2148     59797546 :         *ref_iter++ = rtx_obj_reference (MEM_REGNO, flags, GET_MODE (x));
    2149              : 
    2150     59808202 :       unsigned int addr_flags = base_flags | rtx_obj_flags::IN_MEM_STORE;
    2151     59808202 :       if (flags & rtx_obj_flags::IS_READ)
    2152         3870 :         addr_flags |= rtx_obj_flags::IN_MEM_LOAD;
    2153     59808202 :       try_to_add_src (XEXP (x, 0), addr_flags);
    2154     59808202 :       return;
    2155              :     }
    2156              : 
    2157    369308216 :   if (LIKELY (REG_P (x)))
    2158              :     {
    2159    326499646 :       if (REGNO (x) == STACK_POINTER_REGNUM)
    2160              :         {
    2161              :           /* Stack accesses are dependent on previous allocations and
    2162              :              anti-dependent on later deallocations, so both types of
    2163              :              stack operation are akin to a memory write.  */
    2164     23527088 :           if (ref_iter != ref_end)
    2165     23527088 :             *ref_iter++ = rtx_obj_reference (MEM_REGNO, flags, BLKmode);
    2166              : 
    2167              :           /* We want to keep sp alive everywhere - by making all
    2168              :              writes to sp also use sp.  */
    2169     23527088 :           flags |= rtx_obj_flags::IS_READ;
    2170              :         }
    2171    326499646 :       try_to_add_reg (x, flags);
    2172    326499646 :       return;
    2173              :     }
    2174              : }
    2175              : 
    2176              : /* Try to add a description of source X to this object, stopping once
    2177              :    the REF_END limit has been reached.  FLAGS is a bitmask of
    2178              :    rtx_obj_reference flags that describe the context.
    2179              : 
    2180              :    This routine accepts all rtxes that can legitimately appear in a SET_SRC.  */
    2181              : 
    2182              : void
    2183    926596682 : rtx_properties::try_to_add_src (const_rtx x, unsigned int flags)
    2184              : {
    2185    926596682 :   unsigned int base_flags = flags & rtx_obj_flags::STICKY_FLAGS;
    2186    926596682 :   subrtx_iterator::array_type array;
    2187   2934313638 :   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
    2188              :     {
    2189   2007716956 :       const_rtx x = *iter;
    2190   2007716956 :       rtx_code code = GET_CODE (x);
    2191   2007716956 :       if (code == REG)
    2192    508034257 :         try_to_add_reg (x, flags | rtx_obj_flags::IS_READ);
    2193              :       else if (code == MEM)
    2194              :         {
    2195    108313507 :           if (MEM_VOLATILE_P (x))
    2196      3183440 :             has_volatile_refs = true;
    2197              : 
    2198    108313507 :           if (!MEM_READONLY_P (x) && ref_iter != ref_end)
    2199              :             {
    2200    103435748 :               auto mem_flags = flags | rtx_obj_flags::IS_READ;
    2201    103435748 :               *ref_iter++ = rtx_obj_reference (MEM_REGNO, mem_flags,
    2202    103435748 :                                                GET_MODE (x));
    2203              :             }
    2204              : 
    2205    108313507 :           try_to_add_src (XEXP (x, 0),
    2206              :                           base_flags | rtx_obj_flags::IN_MEM_LOAD);
    2207    108313507 :           iter.skip_subrtxes ();
    2208              :         }
    2209              :       else if (code == SUBREG)
    2210              :         {
    2211      8267982 :           try_to_add_src (SUBREG_REG (x), flags | rtx_obj_flags::IN_SUBREG);
    2212      8267982 :           iter.skip_subrtxes ();
    2213              :         }
    2214              :       else if (code == UNSPEC_VOLATILE)
    2215      2646009 :         has_volatile_refs = true;
    2216              :       else if (code == ASM_INPUT || code == ASM_OPERANDS)
    2217              :         {
    2218      1082451 :           has_asm = true;
    2219      1082451 :           if (MEM_VOLATILE_P (x))
    2220       346684 :             has_volatile_refs = true;
    2221              :         }
    2222              :       else if (code == PRE_INC
    2223              :                || code == PRE_DEC
    2224              :                || code == POST_INC
    2225              :                || code == POST_DEC
    2226              :                || code == PRE_MODIFY
    2227              :                || code == POST_MODIFY)
    2228              :         {
    2229     12232512 :           has_pre_post_modify = true;
    2230              : 
    2231     12232512 :           unsigned int addr_flags = (flags
    2232              :                                      | rtx_obj_flags::IS_PRE_POST_MODIFY
    2233              :                                      | rtx_obj_flags::IS_READ);
    2234     12232512 :           try_to_add_dest (XEXP (x, 0), addr_flags);
    2235     12232512 :           if (code == PRE_MODIFY || code == POST_MODIFY)
    2236       357421 :             iter.substitute (XEXP (XEXP (x, 1), 1));
    2237              :           else
    2238     11875091 :             iter.skip_subrtxes ();
    2239              :         }
    2240              :       else if (code == CALL)
    2241     26736198 :         has_call = true;
    2242              :     }
    2243    926596682 : }
    2244              : 
    2245              : /* Try to add a description of instruction pattern PAT to this object,
    2246              :    stopping once the REF_END limit has been reached.  */
    2247              : 
    2248              : void
    2249    727500702 : rtx_properties::try_to_add_pattern (const_rtx pat)
    2250              : {
    2251    779968130 :   switch (GET_CODE (pat))
    2252              :     {
    2253            0 :     case COND_EXEC:
    2254            0 :       try_to_add_src (COND_EXEC_TEST (pat));
    2255            0 :       try_to_add_pattern (COND_EXEC_CODE (pat));
    2256            0 :       break;
    2257              : 
    2258     52467428 :     case PARALLEL:
    2259     52467428 :       {
    2260     52467428 :         int last = XVECLEN (pat, 0) - 1;
    2261    108742544 :         for (int i = 0; i < last; ++i)
    2262     56275116 :           try_to_add_pattern (XVECEXP (pat, 0, i));
    2263     52467428 :         try_to_add_pattern (XVECEXP (pat, 0, last));
    2264     52467428 :         break;
    2265              :       }
    2266              : 
    2267       228718 :     case ASM_OPERANDS:
    2268       299078 :       for (int i = 0, len = ASM_OPERANDS_INPUT_LENGTH (pat); i < len; ++i)
    2269        70360 :         try_to_add_src (ASM_OPERANDS_INPUT (pat, i));
    2270              :       break;
    2271              : 
    2272     52452280 :     case CLOBBER:
    2273     52452280 :       try_to_add_dest (XEXP (pat, 0), rtx_obj_flags::IS_CLOBBER);
    2274     52452280 :       break;
    2275              : 
    2276    361648772 :     case SET:
    2277    361648772 :       try_to_add_dest (SET_DEST (pat));
    2278    361648772 :       try_to_add_src (SET_SRC (pat));
    2279    361648772 :       break;
    2280              : 
    2281    313170932 :     default:
    2282              :       /* All the other possibilities never store and can use a normal
    2283              :          rtx walk.  This includes:
    2284              : 
    2285              :          - USE
    2286              :          - TRAP_IF
    2287              :          - PREFETCH
    2288              :          - UNSPEC
    2289              :          - UNSPEC_VOLATILE.  */
    2290    313170932 :       try_to_add_src (pat);
    2291    313170932 :       break;
    2292              :     }
    2293    727500702 : }
    2294              : 
    2295              : /* Try to add a description of INSN to this object, stopping once
    2296              :    the REF_END limit has been reached.  INCLUDE_NOTES is true if the
    2297              :    description should include REG_EQUAL and REG_EQUIV notes; all such
    2298              :    references will then be marked with rtx_obj_flags::IN_NOTE.
    2299              : 
    2300              :    For calls, this description includes all accesses in
    2301              :    CALL_INSN_FUNCTION_USAGE.  It also include all implicit accesses
    2302              :    to global registers by the target function.  However, it does not
    2303              :    include clobbers performed by the target function; callers that want
    2304              :    this information should instead use the function_abi interface.  */
    2305              : 
    2306              : void
    2307    653956391 : rtx_properties::try_to_add_insn (const rtx_insn *insn, bool include_notes)
    2308              : {
    2309    653956391 :   if (CALL_P (insn))
    2310              :     {
    2311              :       /* Non-const functions can read from global registers.  Impure
    2312              :          functions can also set them.
    2313              : 
    2314              :          Adding the global registers first removes a situation in which
    2315              :          a fixed-form clobber of register R could come before a real set
    2316              :          of register R.  */
    2317     26719433 :       if (!hard_reg_set_empty_p (global_reg_set)
    2318     26719433 :           && !RTL_CONST_CALL_P (insn))
    2319              :         {
    2320          514 :           unsigned int flags = rtx_obj_flags::IS_READ;
    2321          514 :           if (!RTL_PURE_CALL_P (insn))
    2322          470 :             flags |= rtx_obj_flags::IS_WRITE;
    2323        47802 :           for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
    2324              :             /* As a special case, the stack pointer is invariant across calls
    2325              :                even if it has been marked global; see the corresponding
    2326              :                handling in df_get_call_refs.  */
    2327        47288 :             if (regno != STACK_POINTER_REGNUM
    2328        46774 :                 && global_regs[regno]
    2329          436 :                 && ref_iter != ref_end)
    2330          436 :               *ref_iter++ = rtx_obj_reference (regno, flags,
    2331          436 :                                                reg_raw_mode[regno], 0);
    2332              :         }
    2333              :       /* Untyped calls implicitly set all function value registers.
    2334              :          Again, we add them first in case the main pattern contains
    2335              :          a fixed-form clobber.  */
    2336     26719433 :       if (find_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX))
    2337       209994 :         for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
    2338       207736 :           if (targetm.calls.function_value_regno_p (regno)
    2339       207736 :               && ref_iter != ref_end)
    2340        18064 :             *ref_iter++ = rtx_obj_reference (regno, rtx_obj_flags::IS_WRITE,
    2341        18064 :                                              reg_raw_mode[regno], 0);
    2342     26719433 :       if (ref_iter != ref_end && !RTL_CONST_CALL_P (insn))
    2343              :         {
    2344     25827369 :           auto mem_flags = rtx_obj_flags::IS_READ;
    2345     25827369 :           if (!RTL_PURE_CALL_P (insn))
    2346     24166001 :             mem_flags |= rtx_obj_flags::IS_WRITE;
    2347     25827369 :           *ref_iter++ = rtx_obj_reference (MEM_REGNO, mem_flags, BLKmode);
    2348              :         }
    2349     26719433 :       try_to_add_pattern (PATTERN (insn));
    2350     82740720 :       for (rtx link = CALL_INSN_FUNCTION_USAGE (insn); link;
    2351     56021287 :            link = XEXP (link, 1))
    2352              :         {
    2353     56021287 :           rtx x = XEXP (link, 0);
    2354     56021287 :           if (GET_CODE (x) == CLOBBER)
    2355      2748196 :             try_to_add_dest (XEXP (x, 0), rtx_obj_flags::IS_CLOBBER);
    2356     53273091 :           else if (GET_CODE (x) == USE)
    2357     52880829 :             try_to_add_src (XEXP (x, 0));
    2358              :         }
    2359              :     }
    2360              :   else
    2361    627236958 :     try_to_add_pattern (PATTERN (insn));
    2362              : 
    2363    653956391 :   if (include_notes)
    2364    991908359 :     for (rtx note = REG_NOTES (insn); note; note = XEXP (note, 1))
    2365    337952392 :       if (REG_NOTE_KIND (note) == REG_EQUAL
    2366    337952392 :           || REG_NOTE_KIND (note) == REG_EQUIV)
    2367     22394980 :         try_to_add_note (XEXP (note, 0));
    2368    653956391 : }
    2369              : 
    2370              : /* Grow the storage by a bit while keeping the contents of the first
    2371              :    START elements.  */
    2372              : 
    2373              : void
    2374        28520 : vec_rtx_properties_base::grow (ptrdiff_t start)
    2375              : {
    2376              :   /* The same heuristic that vec uses.  */
    2377        28520 :   ptrdiff_t new_elems = (ref_end - ref_begin) * 3 / 2;
    2378        28520 :   if (ref_begin == m_storage)
    2379              :     {
    2380        24416 :       ref_begin = XNEWVEC (rtx_obj_reference, new_elems);
    2381        24416 :       if (start)
    2382            0 :         memcpy (ref_begin, m_storage, start * sizeof (rtx_obj_reference));
    2383              :     }
    2384              :   else
    2385         4104 :     ref_begin = reinterpret_cast<rtx_obj_reference *>
    2386         4104 :       (xrealloc (ref_begin, new_elems * sizeof (rtx_obj_reference)));
    2387        28520 :   ref_iter = ref_begin + start;
    2388        28520 :   ref_end = ref_begin + new_elems;
    2389        28520 : }
    2390              : 
    2391              : /* Return true if X's old contents don't survive after INSN.
    2392              :    This will be true if X is a register and X dies in INSN or because
    2393              :    INSN entirely sets X.
    2394              : 
    2395              :    "Entirely set" means set directly and not through a SUBREG, or
    2396              :    ZERO_EXTRACT, so no trace of the old contents remains.
    2397              :    Likewise, REG_INC does not count.
    2398              : 
    2399              :    REG may be a hard or pseudo reg.  Renumbering is not taken into account,
    2400              :    but for this use that makes no difference, since regs don't overlap
    2401              :    during their lifetimes.  Therefore, this function may be used
    2402              :    at any time after deaths have been computed.
    2403              : 
    2404              :    If REG is a hard reg that occupies multiple machine registers, this
    2405              :    function will only return true if each of those registers will be replaced
    2406              :    by INSN.  */
    2407              : 
    2408              : bool
    2409    116724439 : dead_or_set_p (const rtx_insn *insn, const_rtx x)
    2410              : {
    2411    116724439 :   unsigned int regno, end_regno;
    2412    116724439 :   unsigned int i;
    2413              : 
    2414    116724439 :   gcc_assert (REG_P (x));
    2415              : 
    2416    116724439 :   regno = REGNO (x);
    2417    116724439 :   end_regno = END_REGNO (x);
    2418    206688436 :   for (i = regno; i < end_regno; i++)
    2419    116725481 :     if (! dead_or_set_regno_p (insn, i))
    2420              :       return false;
    2421              : 
    2422              :   return true;
    2423              : }
    2424              : 
    2425              : /* Return TRUE iff DEST is a register or subreg of a register, is a
    2426              :    complete rather than read-modify-write destination, and contains
    2427              :    register TEST_REGNO.  */
    2428              : 
    2429              : static bool
    2430    207448551 : covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
    2431              : {
    2432    207448551 :   unsigned int regno, endregno;
    2433              : 
    2434    207448551 :   if (GET_CODE (dest) == SUBREG && !read_modify_subreg_p (dest))
    2435       502776 :     dest = SUBREG_REG (dest);
    2436              : 
    2437    207448551 :   if (!REG_P (dest))
    2438              :     return false;
    2439              : 
    2440    198661495 :   regno = REGNO (dest);
    2441    198661495 :   endregno = END_REGNO (dest);
    2442    198661495 :   return (test_regno >= regno && test_regno < endregno);
    2443              : }
    2444              : 
    2445              : /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
    2446              :    any member matches the covers_regno_no_parallel_p criteria.  */
    2447              : 
    2448              : static bool
    2449     97068251 : covers_regno_p (const_rtx dest, unsigned int test_regno)
    2450              : {
    2451     97068251 :   if (GET_CODE (dest) == PARALLEL)
    2452              :     {
    2453              :       /* Some targets place small structures in registers for return
    2454              :          values of functions, and those registers are wrapped in
    2455              :          PARALLELs that we may see as the destination of a SET.  */
    2456          304 :       int i;
    2457              : 
    2458          822 :       for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
    2459              :         {
    2460          518 :           rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
    2461          518 :           if (inner != NULL_RTX
    2462          518 :               && covers_regno_no_parallel_p (inner, test_regno))
    2463              :             return true;
    2464              :         }
    2465              : 
    2466              :       return false;
    2467              :     }
    2468              :   else
    2469     97067947 :     return covers_regno_no_parallel_p (dest, test_regno);
    2470              : }
    2471              : 
    2472              : /* Utility function for dead_or_set_p to check an individual register. */
    2473              : 
    2474              : bool
    2475    118929713 : dead_or_set_regno_p (const rtx_insn *insn, unsigned int test_regno)
    2476              : {
    2477    118929713 :   const_rtx pattern;
    2478              : 
    2479              :   /* See if there is a death note for something that includes TEST_REGNO.  */
    2480    118929713 :   if (find_regno_note (insn, REG_DEAD, test_regno))
    2481              :     return true;
    2482              : 
    2483     76898802 :   if (CALL_P (insn)
    2484     76898802 :       && find_regno_fusage (insn, CLOBBER, test_regno))
    2485              :     return true;
    2486              : 
    2487     76884714 :   pattern = PATTERN (insn);
    2488              : 
    2489              :   /* If a COND_EXEC is not executed, the value survives.  */
    2490     76884714 :   if (GET_CODE (pattern) == COND_EXEC)
    2491              :     return false;
    2492              : 
    2493     76884714 :   if (GET_CODE (pattern) == SET || GET_CODE (pattern) == CLOBBER)
    2494     56344819 :     return covers_regno_p (SET_DEST (pattern), test_regno);
    2495     20539895 :   else if (GET_CODE (pattern) == PARALLEL)
    2496              :     {
    2497     20329692 :       int i;
    2498              : 
    2499     47154355 :       for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
    2500              :         {
    2501     40971409 :           rtx body = XVECEXP (pattern, 0, i);
    2502              : 
    2503     40971409 :           if (GET_CODE (body) == COND_EXEC)
    2504            0 :             body = COND_EXEC_CODE (body);
    2505              : 
    2506     20586726 :           if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
    2507     61310158 :               && covers_regno_p (SET_DEST (body), test_regno))
    2508              :             return true;
    2509              :         }
    2510              :     }
    2511              : 
    2512              :   return false;
    2513              : }
    2514              : 
    2515              : /* Return the reg-note of kind KIND in insn INSN, if there is one.
    2516              :    If DATUM is nonzero, look for one whose datum is DATUM.  */
    2517              : 
    2518              : rtx
    2519   8184068333 : find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
    2520              : {
    2521   8184068333 :   rtx link;
    2522              : 
    2523   8184068333 :   gcc_checking_assert (insn);
    2524              : 
    2525              :   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
    2526   8184068333 :   if (! INSN_P (insn))
    2527              :     return 0;
    2528   8068466734 :   if (datum == 0)
    2529              :     {
    2530  16080627599 :       for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
    2531   9586514763 :         if (REG_NOTE_KIND (link) == kind)
    2532              :           return link;
    2533              :       return 0;
    2534              :     }
    2535              : 
    2536    690025080 :   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
    2537    394841616 :     if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
    2538              :       return link;
    2539              :   return 0;
    2540              : }
    2541              : 
    2542              : /* Return the reg-note of kind KIND in insn INSN which applies to register
    2543              :    number REGNO, if any.  Return 0 if there is no such reg-note.  Note that
    2544              :    the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
    2545              :    it might be the case that the note overlaps REGNO.  */
    2546              : 
    2547              : rtx
    2548    392420402 : find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
    2549              : {
    2550    392420402 :   rtx link;
    2551              : 
    2552              :   /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN.  */
    2553    392420402 :   if (! INSN_P (insn))
    2554              :     return 0;
    2555              : 
    2556    582150609 :   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
    2557    366597975 :     if (REG_NOTE_KIND (link) == kind
    2558              :         /* Verify that it is a register, so that scratch and MEM won't cause a
    2559              :            problem here.  */
    2560    257758936 :         && REG_P (XEXP (link, 0))
    2561    257758936 :         && REGNO (XEXP (link, 0)) <= regno
    2562    573542113 :         && END_REGNO (XEXP (link, 0)) > regno)
    2563              :       return link;
    2564              :   return 0;
    2565              : }
    2566              : 
    2567              : /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
    2568              :    has such a note.  */
    2569              : 
    2570              : rtx
    2571   1757897172 : find_reg_equal_equiv_note (const_rtx insn)
    2572              : {
    2573   1757897172 :   rtx link;
    2574              : 
    2575   1757897172 :   if (!INSN_P (insn))
    2576              :     return 0;
    2577              : 
    2578   3037531416 :   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
    2579   1378316609 :     if (REG_NOTE_KIND (link) == REG_EQUAL
    2580   1378316609 :         || REG_NOTE_KIND (link) == REG_EQUIV)
    2581              :       {
    2582              :         /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
    2583              :            insns that have multiple sets.  Checking single_set to
    2584              :            make sure of this is not the proper check, as explained
    2585              :            in the comment in set_unique_reg_note.
    2586              : 
    2587              :            This should be changed into an assert.  */
    2588     92379590 :         if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
    2589              :           return 0;
    2590     92379590 :         return link;
    2591              :       }
    2592              :   return NULL;
    2593              : }
    2594              : 
    2595              : /* Check whether INSN is a single_set whose source is known to be
    2596              :    equivalent to a constant.  Return that constant if so, otherwise
    2597              :    return null.  */
    2598              : 
    2599              : rtx
    2600      2219836 : find_constant_src (const rtx_insn *insn)
    2601              : {
    2602      2219836 :   rtx note, set, x;
    2603              : 
    2604      2219836 :   set = single_set (insn);
    2605      2219836 :   if (set)
    2606              :     {
    2607      2219836 :       x = avoid_constant_pool_reference (SET_SRC (set));
    2608      2219836 :       if (CONSTANT_P (x))
    2609              :         return x;
    2610              :     }
    2611              : 
    2612      1548789 :   note = find_reg_equal_equiv_note (insn);
    2613      1548789 :   if (note && CONSTANT_P (XEXP (note, 0)))
    2614          610 :     return XEXP (note, 0);
    2615              : 
    2616              :   return NULL_RTX;
    2617              : }
    2618              : 
    2619              : /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
    2620              :    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
    2621              : 
    2622              : bool
    2623     88105068 : find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
    2624              : {
    2625              :   /* If it's not a CALL_INSN, it can't possibly have a
    2626              :      CALL_INSN_FUNCTION_USAGE field, so don't bother checking.  */
    2627     88105068 :   if (!CALL_P (insn))
    2628              :     return false;
    2629              : 
    2630     88105068 :   gcc_assert (datum);
    2631              : 
    2632     88105068 :   if (!REG_P (datum))
    2633              :     {
    2634        33604 :       rtx link;
    2635              : 
    2636        33604 :       for (link = CALL_INSN_FUNCTION_USAGE (insn);
    2637        69500 :            link;
    2638        35896 :            link = XEXP (link, 1))
    2639        35896 :         if (GET_CODE (XEXP (link, 0)) == code
    2640        35896 :             && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
    2641              :           return true;
    2642              :     }
    2643              :   else
    2644              :     {
    2645     88071464 :       unsigned int regno = REGNO (datum);
    2646              : 
    2647              :       /* CALL_INSN_FUNCTION_USAGE information cannot contain references
    2648              :          to pseudo registers, so don't bother checking.  */
    2649              : 
    2650     88071464 :       if (regno < FIRST_PSEUDO_REGISTER)
    2651              :         {
    2652     81120236 :           unsigned int end_regno = END_REGNO (datum);
    2653     81120236 :           unsigned int i;
    2654              : 
    2655    142267710 :           for (i = regno; i < end_regno; i++)
    2656     81120236 :             if (find_regno_fusage (insn, code, i))
    2657              :               return true;
    2658              :         }
    2659              :     }
    2660              : 
    2661              :   return false;
    2662              : }
    2663              : 
    2664              : /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
    2665              :    in the CALL_INSN_FUNCTION_USAGE information of INSN.  */
    2666              : 
    2667              : bool
    2668     81343471 : find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
    2669              : {
    2670     81343471 :   rtx link;
    2671              : 
    2672              :   /* CALL_INSN_FUNCTION_USAGE information cannot contain references
    2673              :      to pseudo registers, so don't bother checking.  */
    2674              : 
    2675     81343471 :   if (regno >= FIRST_PSEUDO_REGISTER
    2676     81288447 :       || !CALL_P (insn) )
    2677              :     return false;
    2678              : 
    2679    241974779 :   for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
    2680              :     {
    2681    180674736 :       rtx op, reg;
    2682              : 
    2683    180674736 :       if (GET_CODE (op = XEXP (link, 0)) == code
    2684     51432030 :           && REG_P (reg = XEXP (op, 0))
    2685     51423409 :           && REGNO (reg) <= regno
    2686    214835641 :           && END_REGNO (reg) > regno)
    2687              :         return true;
    2688              :     }
    2689              : 
    2690              :   return false;
    2691              : }
    2692              : 
    2693              : 
    2694              : /* Return true if KIND is an integer REG_NOTE.  */
    2695              : 
    2696              : static bool
    2697            0 : int_reg_note_p (enum reg_note kind)
    2698              : {
    2699            0 :   return kind == REG_BR_PROB;
    2700              : }
    2701              : 
    2702              : /* Allocate a register note with kind KIND and datum DATUM.  LIST is
    2703              :    stored as the pointer to the next register note.  */
    2704              : 
    2705              : rtx
    2706    758175497 : alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
    2707              : {
    2708    758175497 :   rtx note;
    2709              : 
    2710    758175497 :   gcc_checking_assert (!int_reg_note_p (kind));
    2711    758175497 :   switch (kind)
    2712              :     {
    2713        28364 :     case REG_LABEL_TARGET:
    2714        28364 :     case REG_LABEL_OPERAND:
    2715        28364 :     case REG_TM:
    2716              :       /* These types of register notes use an INSN_LIST rather than an
    2717              :          EXPR_LIST, so that copying is done right and dumps look
    2718              :          better.  */
    2719        28364 :       note = alloc_INSN_LIST (datum, list);
    2720        28364 :       PUT_REG_NOTE_KIND (note, kind);
    2721        28364 :       break;
    2722              : 
    2723    758147133 :     default:
    2724    758147133 :       note = alloc_EXPR_LIST (kind, datum, list);
    2725    758147133 :       break;
    2726              :     }
    2727              : 
    2728    758175497 :   return note;
    2729              : }
    2730              : 
    2731              : /* Add register note with kind KIND and datum DATUM to INSN.  */
    2732              : 
    2733              : void
    2734    751355117 : add_reg_note (rtx insn, enum reg_note kind, rtx datum)
    2735              : {
    2736    751355117 :   REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
    2737    751355117 : }
    2738              : 
    2739              : /* Add an integer register note with kind KIND and datum DATUM to INSN.  */
    2740              : 
    2741              : void
    2742      5269025 : add_int_reg_note (rtx_insn *insn, enum reg_note kind, int datum)
    2743              : {
    2744      5269025 :   gcc_checking_assert (int_reg_note_p (kind));
    2745      5269025 :   REG_NOTES (insn) = gen_rtx_INT_LIST ((machine_mode) kind,
    2746              :                                        datum, REG_NOTES (insn));
    2747      5269025 : }
    2748              : 
    2749              : /* Add a REG_ARGS_SIZE note to INSN with value VALUE.  */
    2750              : 
    2751              : void
    2752      5478570 : add_args_size_note (rtx_insn *insn, poly_int64 value)
    2753              : {
    2754      5478570 :   gcc_checking_assert (!find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX));
    2755      8969777 :   add_reg_note (insn, REG_ARGS_SIZE, gen_int_mode (value, Pmode));
    2756      5478570 : }
    2757              : 
    2758              : /* Add a register note like NOTE to INSN.  */
    2759              : 
    2760              : void
    2761            0 : add_shallow_copy_of_reg_note (rtx_insn *insn, rtx note)
    2762              : {
    2763            0 :   if (GET_CODE (note) == INT_LIST)
    2764            0 :     add_int_reg_note (insn, REG_NOTE_KIND (note), XINT (note, 0));
    2765              :   else
    2766            0 :     add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
    2767            0 : }
    2768              : 
    2769              : /* Duplicate NOTE and return the copy.  */
    2770              : rtx
    2771      2273035 : duplicate_reg_note (rtx note)
    2772              : {
    2773      2273035 :   reg_note kind = REG_NOTE_KIND (note);
    2774              : 
    2775      2273035 :   if (GET_CODE (note) == INT_LIST)
    2776       296547 :     return gen_rtx_INT_LIST ((machine_mode) kind, XINT (note, 0), NULL_RTX);
    2777      1976488 :   else if (GET_CODE (note) == EXPR_LIST)
    2778      1976488 :     return alloc_reg_note (kind, copy_insn_1 (XEXP (note, 0)), NULL_RTX);
    2779              :   else
    2780            0 :     return alloc_reg_note (kind, XEXP (note, 0), NULL_RTX);
    2781              : }
    2782              : 
    2783              : /* Remove register note NOTE from the REG_NOTES of INSN.  */
    2784              : 
    2785              : void
    2786      8261995 : remove_note (rtx_insn *insn, const_rtx note)
    2787              : {
    2788      8261995 :   rtx link;
    2789              : 
    2790      8261995 :   if (note == NULL_RTX)
    2791              :     return;
    2792              : 
    2793      7441382 :   if (REG_NOTES (insn) == note)
    2794      6950637 :     REG_NOTES (insn) = XEXP (note, 1);
    2795              :   else
    2796       981955 :     for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
    2797       981955 :       if (XEXP (link, 1) == note)
    2798              :         {
    2799       490745 :           XEXP (link, 1) = XEXP (note, 1);
    2800       490745 :           break;
    2801              :         }
    2802              : 
    2803      7441382 :   switch (REG_NOTE_KIND (note))
    2804              :     {
    2805      2650295 :     case REG_EQUAL:
    2806      2650295 :     case REG_EQUIV:
    2807      2650295 :       df_notes_rescan (insn);
    2808      2650295 :       break;
    2809              :     default:
    2810              :       break;
    2811              :     }
    2812              : }
    2813              : 
    2814              : /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes.
    2815              :    If NO_RESCAN is false and any notes were removed, call
    2816              :    df_notes_rescan.  Return true if any note has been removed.  */
    2817              : 
    2818              : bool
    2819        33780 : remove_reg_equal_equiv_notes (rtx_insn *insn, bool no_rescan)
    2820              : {
    2821        33780 :   rtx *loc;
    2822        33780 :   bool ret = false;
    2823              : 
    2824        33780 :   loc = &REG_NOTES (insn);
    2825        41109 :   while (*loc)
    2826              :     {
    2827         7329 :       enum reg_note kind = REG_NOTE_KIND (*loc);
    2828         7329 :       if (kind == REG_EQUAL || kind == REG_EQUIV)
    2829              :         {
    2830          382 :           *loc = XEXP (*loc, 1);
    2831          382 :           ret = true;
    2832              :         }
    2833              :       else
    2834         6947 :         loc = &XEXP (*loc, 1);
    2835              :     }
    2836        33780 :   if (ret && !no_rescan)
    2837          382 :     df_notes_rescan (insn);
    2838        33780 :   return ret;
    2839              : }
    2840              : 
    2841              : /* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO.  */
    2842              : 
    2843              : void
    2844      3917459 : remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
    2845              : {
    2846      3917459 :   df_ref eq_use;
    2847              : 
    2848      3917459 :   if (!df)
    2849              :     return;
    2850              : 
    2851              :   /* This loop is a little tricky.  We cannot just go down the chain because
    2852              :      it is being modified by some actions in the loop.  So we just iterate
    2853              :      over the head.  We plan to drain the list anyway.  */
    2854      4054359 :   while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
    2855              :     {
    2856       136900 :       rtx_insn *insn = DF_REF_INSN (eq_use);
    2857       136900 :       rtx note = find_reg_equal_equiv_note (insn);
    2858              : 
    2859              :       /* This assert is generally triggered when someone deletes a REG_EQUAL
    2860              :          or REG_EQUIV note by hacking the list manually rather than calling
    2861              :          remove_note.  */
    2862       136900 :       gcc_assert (note);
    2863              : 
    2864       136900 :       remove_note (insn, note);
    2865              :     }
    2866              : }
    2867              : 
    2868              : /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
    2869              :    return 1 if it is found.  A simple equality test is used to determine if
    2870              :    NODE matches.  */
    2871              : 
    2872              : bool
    2873           26 : in_insn_list_p (const rtx_insn_list *listp, const rtx_insn *node)
    2874              : {
    2875           26 :   const_rtx x;
    2876              : 
    2877           26 :   for (x = listp; x; x = XEXP (x, 1))
    2878            0 :     if (node == XEXP (x, 0))
    2879              :       return true;
    2880              : 
    2881              :   return false;
    2882              : }
    2883              : 
    2884              : /* Search LISTP (an INSN_LIST) for an entry whose first operand is NODE and
    2885              :    remove that entry from the list if it is found.
    2886              : 
    2887              :    A simple equality test is used to determine if NODE matches.  */
    2888              : 
    2889              : void
    2890      7662673 : remove_node_from_insn_list (const rtx_insn *node, rtx_insn_list **listp)
    2891              : {
    2892      7662673 :   rtx_insn_list *temp = *listp;
    2893      7662673 :   rtx_insn_list *prev = NULL;
    2894              : 
    2895      7679134 :   while (temp)
    2896              :     {
    2897        16487 :       if (node == temp->insn ())
    2898              :         {
    2899              :           /* Splice the node out of the list.  */
    2900           26 :           if (prev)
    2901            0 :             XEXP (prev, 1) = temp->next ();
    2902              :           else
    2903           26 :             *listp = temp->next ();
    2904              : 
    2905           26 :           gcc_checking_assert (!in_insn_list_p (temp->next (), node));
    2906              :           return;
    2907              :         }
    2908              : 
    2909        16461 :       prev = temp;
    2910        16461 :       temp = temp->next ();
    2911              :     }
    2912              : }
    2913              : 
    2914              : /* Return true if X contains any volatile instructions.  These are instructions
    2915              :    which may cause unpredictable machine state instructions, and thus no
    2916              :    instructions or register uses should be moved or combined across them.
    2917              :    This includes only volatile asms and UNSPEC_VOLATILE instructions.  */
    2918              : 
    2919              : bool
    2920    696260085 : volatile_insn_p (const_rtx x)
    2921              : {
    2922    696260085 :   const RTX_CODE code = GET_CODE (x);
    2923    696260085 :   switch (code)
    2924              :     {
    2925              :     case LABEL_REF:
    2926              :     case SYMBOL_REF:
    2927              :     case CONST:
    2928              :     CASE_CONST_ANY:
    2929              :     case PC:
    2930              :     case REG:
    2931              :     case SCRATCH:
    2932              :     case CLOBBER:
    2933              :     case ADDR_VEC:
    2934              :     case ADDR_DIFF_VEC:
    2935              :     case CALL:
    2936              :     case MEM:
    2937              :       return false;
    2938              : 
    2939              :     case UNSPEC_VOLATILE:
    2940              :       return true;
    2941              : 
    2942       136775 :     case ASM_INPUT:
    2943       136775 :     case ASM_OPERANDS:
    2944       136775 :       if (MEM_VOLATILE_P (x))
    2945              :         return true;
    2946              : 
    2947    324645457 :     default:
    2948    324645457 :       break;
    2949              :     }
    2950              : 
    2951              :   /* Recursively scan the operands of this expression.  */
    2952              : 
    2953    324645457 :   {
    2954    324645457 :     const char *const fmt = GET_RTX_FORMAT (code);
    2955    324645457 :     int i;
    2956              : 
    2957    889885939 :     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    2958              :       {
    2959    565540070 :         if (fmt[i] == 'e')
    2960              :           {
    2961    441188678 :             if (volatile_insn_p (XEXP (x, i)))
    2962              :               return true;
    2963              :           }
    2964    124351392 :         else if (fmt[i] == 'E')
    2965              :           {
    2966              :             int j;
    2967     70719149 :             for (j = 0; j < XVECLEN (x, i); j++)
    2968     48681507 :               if (volatile_insn_p (XVECEXP (x, i, j)))
    2969              :                 return true;
    2970              :           }
    2971              :       }
    2972              :   }
    2973              :   return false;
    2974              : }
    2975              : 
    2976              : /* Return true if X contains any volatile memory references
    2977              :    UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions.  */
    2978              : 
    2979              : bool
    2980   5183180262 : volatile_refs_p (const_rtx x)
    2981              : {
    2982   5183180262 :   const RTX_CODE code = GET_CODE (x);
    2983   5183180262 :   switch (code)
    2984              :     {
    2985              :     case LABEL_REF:
    2986              :     case SYMBOL_REF:
    2987              :     case CONST:
    2988              :     CASE_CONST_ANY:
    2989              :     case PC:
    2990              :     case REG:
    2991              :     case SCRATCH:
    2992              :     case CLOBBER:
    2993              :     case ADDR_VEC:
    2994              :     case ADDR_DIFF_VEC:
    2995              :       return false;
    2996              : 
    2997              :     case UNSPEC_VOLATILE:
    2998              :       return true;
    2999              : 
    3000    393235549 :     case MEM:
    3001    393235549 :     case ASM_INPUT:
    3002    393235549 :     case ASM_OPERANDS:
    3003    393235549 :       if (MEM_VOLATILE_P (x))
    3004              :         return true;
    3005              : 
    3006   2206960225 :     default:
    3007   2206960225 :       break;
    3008              :     }
    3009              : 
    3010              :   /* Recursively scan the operands of this expression.  */
    3011              : 
    3012   2206960225 :   {
    3013   2206960225 :     const char *const fmt = GET_RTX_FORMAT (code);
    3014   2206960225 :     int i;
    3015              : 
    3016   6488034414 :     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    3017              :       {
    3018   4297601105 :         if (fmt[i] == 'e')
    3019              :           {
    3020   3823092578 :             if (volatile_refs_p (XEXP (x, i)))
    3021              :               return true;
    3022              :           }
    3023    474508527 :         else if (fmt[i] == 'E')
    3024              :           {
    3025              :             int j;
    3026    161633195 :             for (j = 0; j < XVECLEN (x, i); j++)
    3027    112168880 :               if (volatile_refs_p (XVECEXP (x, i, j)))
    3028              :                 return true;
    3029              :           }
    3030              :       }
    3031              :   }
    3032              :   return false;
    3033              : }
    3034              : 
    3035              : /* Similar to above, except that it also rejects register pre- and post-
    3036              :    incrementing.  */
    3037              : 
    3038              : bool
    3039   5336172388 : side_effects_p (const_rtx x)
    3040              : {
    3041   5336172388 :   const RTX_CODE code = GET_CODE (x);
    3042   5336172388 :   switch (code)
    3043              :     {
    3044              :     case LABEL_REF:
    3045              :     case SYMBOL_REF:
    3046              :     case CONST:
    3047              :     CASE_CONST_ANY:
    3048              :     case PC:
    3049              :     case REG:
    3050              :     case SCRATCH:
    3051              :     case ADDR_VEC:
    3052              :     case ADDR_DIFF_VEC:
    3053              :     case VAR_LOCATION:
    3054              :       return false;
    3055              : 
    3056     62386957 :     case CLOBBER:
    3057              :       /* Reject CLOBBER with a non-VOID mode.  These are made by combine.cc
    3058              :          when some combination can't be done.  If we see one, don't think
    3059              :          that we can simplify the expression.  */
    3060     62386957 :       return (GET_MODE (x) != VOIDmode);
    3061              : 
    3062              :     case PRE_INC:
    3063              :     case PRE_DEC:
    3064              :     case POST_INC:
    3065              :     case POST_DEC:
    3066              :     case PRE_MODIFY:
    3067              :     case POST_MODIFY:
    3068              :     case CALL:
    3069              :     case UNSPEC_VOLATILE:
    3070              :       return true;
    3071              : 
    3072    326962680 :     case MEM:
    3073    326962680 :     case ASM_INPUT:
    3074    326962680 :     case ASM_OPERANDS:
    3075    326962680 :       if (MEM_VOLATILE_P (x))
    3076              :         return true;
    3077              : 
    3078   1891360270 :     default:
    3079   1891360270 :       break;
    3080              :     }
    3081              : 
    3082              :   /* Recursively scan the operands of this expression.  */
    3083              : 
    3084   1891360270 :   {
    3085   1891360270 :     const char *fmt = GET_RTX_FORMAT (code);
    3086   1891360270 :     int i;
    3087              : 
    3088   5625776490 :     for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    3089              :       {
    3090   3784185171 :         if (fmt[i] == 'e')
    3091              :           {
    3092   3347990352 :             if (side_effects_p (XEXP (x, i)))
    3093              :               return true;
    3094              :           }
    3095    436194819 :         else if (fmt[i] == 'E')
    3096              :           {
    3097              :             int j;
    3098    235791854 :             for (j = 0; j < XVECLEN (x, i); j++)
    3099    159234632 :               if (side_effects_p (XVECEXP (x, i, j)))
    3100              :                 return true;
    3101              :           }
    3102              :       }
    3103              :   }
    3104              :   return false;
    3105              : }
    3106              : 
    3107              : /* Return true if evaluating rtx X might cause a trap.
    3108              :    FLAGS controls how to consider MEMs.  A true means the context
    3109              :    of the access may have changed from the original, such that the
    3110              :    address may have become invalid.  */
    3111              : 
    3112              : bool
    3113   8989661445 : may_trap_p_1 (const_rtx x, unsigned flags)
    3114              : {
    3115   8989661445 :   int i;
    3116   8989661445 :   enum rtx_code code;
    3117   8989661445 :   const char *fmt;
    3118              : 
    3119              :   /* We make no distinction currently, but this function is part of
    3120              :      the internal target-hooks ABI so we keep the parameter as
    3121              :      "unsigned flags".  */
    3122   8989661445 :   bool code_changed = flags != 0;
    3123              : 
    3124   8989661445 :   if (x == 0)
    3125              :     return false;
    3126   8989659559 :   code = GET_CODE (x);
    3127   8989659559 :   switch (code)
    3128              :     {
    3129              :       /* Handle these cases quickly.  */
    3130              :     CASE_CONST_ANY:
    3131              :     case SYMBOL_REF:
    3132              :     case LABEL_REF:
    3133              :     case CONST:
    3134              :     case PC:
    3135              :     case REG:
    3136              :     case SCRATCH:
    3137              :       return false;
    3138              : 
    3139      5835718 :     case UNSPEC:
    3140      5835718 :       return targetm.unspec_may_trap_p (x, flags);
    3141              : 
    3142              :     case UNSPEC_VOLATILE:
    3143              :     case ASM_INPUT:
    3144              :     case TRAP_IF:
    3145              :       return true;
    3146              : 
    3147       188382 :     case ASM_OPERANDS:
    3148       188382 :       return MEM_VOLATILE_P (x);
    3149              : 
    3150              :       /* Memory ref can trap unless it's a static var or a stack slot.  */
    3151   1069722776 :     case MEM:
    3152              :       /* Recognize specific pattern of stack checking probes.  */
    3153   1069722776 :       if (flag_stack_check
    3154         7726 :           && MEM_VOLATILE_P (x)
    3155   1069723576 :           && XEXP (x, 0) == stack_pointer_rtx)
    3156              :         return true;
    3157   1069721977 :       if (/* MEM_NOTRAP_P only relates to the actual position of the memory
    3158              :              reference; moving it out of context such as when moving code
    3159              :              when optimizing, might cause its address to become invalid.  */
    3160              :           code_changed
    3161   1069721977 :           || !MEM_NOTRAP_P (x))
    3162              :         {
    3163    526922483 :           poly_int64 size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : -1;
    3164    451625727 :           return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
    3165    451625727 :                                         GET_MODE (x), code_changed);
    3166              :         }
    3167              : 
    3168              :       return false;
    3169              : 
    3170              :       /* Division by a non-constant might trap.  */
    3171       998764 :     case DIV:
    3172       998764 :     case MOD:
    3173       998764 :     case UDIV:
    3174       998764 :     case UMOD:
    3175       998764 :       if (HONOR_SNANS (x))
    3176              :         return true;
    3177       998097 :       if (FLOAT_MODE_P (GET_MODE (x)))
    3178       421005 :         return flag_trapping_math;
    3179       577092 :       if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
    3180              :         return true;
    3181        80473 :       if (GET_CODE (XEXP (x, 1)) == CONST_VECTOR)
    3182              :         {
    3183              :           /* For CONST_VECTOR, return 1 if any element is or might be zero.  */
    3184            0 :           unsigned int n_elts;
    3185            0 :           rtx op = XEXP (x, 1);
    3186            0 :           if (!GET_MODE_NUNITS (GET_MODE (op)).is_constant (&n_elts))
    3187              :             {
    3188              :               if (!CONST_VECTOR_DUPLICATE_P (op))
    3189    294316191 :                 return true;
    3190              :               for (unsigned i = 0; i < (unsigned int) XVECLEN (op, 0); i++)
    3191              :                 if (CONST_VECTOR_ENCODED_ELT (op, i) == const0_rtx)
    3192              :                   return true;
    3193              :             }
    3194              :           else
    3195            0 :             for (unsigned i = 0; i < n_elts; i++)
    3196            0 :               if (CONST_VECTOR_ELT (op, i) == const0_rtx)
    3197              :                 return true;
    3198              :         }
    3199              :       break;
    3200              : 
    3201              :     case EXPR_LIST:
    3202              :       /* An EXPR_LIST is used to represent a function call.  This
    3203              :          certainly may trap.  */
    3204              :       return true;
    3205              : 
    3206    185004388 :     case GE:
    3207    185004388 :     case GT:
    3208    185004388 :     case LE:
    3209    185004388 :     case LT:
    3210    185004388 :     case LTGT:
    3211    185004388 :     case COMPARE:
    3212              :     /* Treat min/max similar as comparisons.  */
    3213    185004388 :     case SMIN:
    3214    185004388 :     case SMAX:
    3215              :       /* Some floating point comparisons may trap.  */
    3216    185004388 :       if (!flag_trapping_math)
    3217              :         break;
    3218              :       /* ??? There is no machine independent way to check for tests that trap
    3219              :          when COMPARE is used, though many targets do make this distinction.
    3220              :          For instance, sparc uses CCFPE for compares which generate exceptions
    3221              :          and CCFP for compares which do not generate exceptions.  */
    3222    183390280 :       if (HONOR_NANS (x))
    3223              :         return true;
    3224              :       /* But often the compare has some CC mode, so check operand
    3225              :          modes as well.  */
    3226    183363762 :       if (HONOR_NANS (XEXP (x, 0))
    3227    183363762 :           || HONOR_NANS (XEXP (x, 1)))
    3228      2188311 :         return true;
    3229              :       break;
    3230              : 
    3231     28053179 :     case EQ:
    3232     28053179 :     case NE:
    3233     28053179 :       if (HONOR_SNANS (x))
    3234              :         return true;
    3235              :       /* Often comparison is CC mode, so check operand modes.  */
    3236     28053162 :       if (HONOR_SNANS (XEXP (x, 0))
    3237     28053162 :           || HONOR_SNANS (XEXP (x, 1)))
    3238            0 :         return true;
    3239              :       break;
    3240              : 
    3241       353837 :     case FIX:
    3242       353837 :     case UNSIGNED_FIX:
    3243              :       /* Conversion of floating point might trap.  */
    3244       353837 :       if (flag_trapping_math && HONOR_NANS (XEXP (x, 0)))
    3245              :         return true;
    3246              :       break;
    3247              : 
    3248              :     case PARALLEL:
    3249              :     case NEG:
    3250              :     case ABS:
    3251              :     case SUBREG:
    3252              :     case VEC_MERGE:
    3253              :     case VEC_SELECT:
    3254              :     case VEC_CONCAT:
    3255              :     case VEC_DUPLICATE:
    3256              :       /* These operations don't trap even with floating point.  */
    3257              :       break;
    3258              : 
    3259   3059696117 :     default:
    3260              :       /* Any floating arithmetic may trap.  */
    3261   3059696117 :       if (FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math)
    3262              :         return true;
    3263              :     }
    3264              : 
    3265   3684854010 :   fmt = GET_RTX_FORMAT (code);
    3266   9919174101 :   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    3267              :     {
    3268   6484782326 :       if (fmt[i] == 'e')
    3269              :         {
    3270   6035009021 :           if (may_trap_p_1 (XEXP (x, i), flags))
    3271              :             return true;
    3272              :         }
    3273    449773305 :       else if (fmt[i] == 'E')
    3274              :         {
    3275              :           int j;
    3276    967929712 :           for (j = 0; j < XVECLEN (x, i); j++)
    3277    667101078 :             if (may_trap_p_1 (XVECEXP (x, i, j), flags))
    3278              :               return true;
    3279              :         }
    3280              :     }
    3281              :   return false;
    3282              : }
    3283              : 
    3284              : /* Return true if evaluating rtx X might cause a trap.  */
    3285              : 
    3286              : bool
    3287   2269625951 : may_trap_p (const_rtx x)
    3288              : {
    3289   2269625951 :   return may_trap_p_1 (x, 0);
    3290              : }
    3291              : 
    3292              : /* Same as above, but additionally return true if evaluating rtx X might
    3293              :    cause a fault.  We define a fault for the purpose of this function as a
    3294              :    erroneous execution condition that cannot be encountered during the normal
    3295              :    execution of a valid program; the typical example is an unaligned memory
    3296              :    access on a strict alignment machine.  The compiler guarantees that it
    3297              :    doesn't generate code that will fault from a valid program, but this
    3298              :    guarantee doesn't mean anything for individual instructions.  Consider
    3299              :    the following example:
    3300              : 
    3301              :       struct S { int d; union { char *cp; int *ip; }; };
    3302              : 
    3303              :       int foo(struct S *s)
    3304              :       {
    3305              :         if (s->d == 1)
    3306              :           return *s->ip;
    3307              :         else
    3308              :           return *s->cp;
    3309              :       }
    3310              : 
    3311              :    on a strict alignment machine.  In a valid program, foo will never be
    3312              :    invoked on a structure for which d is equal to 1 and the underlying
    3313              :    unique field of the union not aligned on a 4-byte boundary, but the
    3314              :    expression *s->ip might cause a fault if considered individually.
    3315              : 
    3316              :    At the RTL level, potentially problematic expressions will almost always
    3317              :    verify may_trap_p; for example, the above dereference can be emitted as
    3318              :    (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
    3319              :    However, suppose that foo is inlined in a caller that causes s->cp to
    3320              :    point to a local character variable and guarantees that s->d is not set
    3321              :    to 1; foo may have been effectively translated into pseudo-RTL as:
    3322              : 
    3323              :       if ((reg:SI) == 1)
    3324              :         (set (reg:SI) (mem:SI (%fp - 7)))
    3325              :       else
    3326              :         (set (reg:QI) (mem:QI (%fp - 7)))
    3327              : 
    3328              :    Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
    3329              :    memory reference to a stack slot, but it will certainly cause a fault
    3330              :    on a strict alignment machine.  */
    3331              : 
    3332              : bool
    3333     10562846 : may_trap_or_fault_p (const_rtx x)
    3334              : {
    3335     10562846 :   return may_trap_p_1 (x, 1);
    3336              : }
    3337              : 
    3338              : /* Replace any occurrence of FROM in X with TO.  The function does
    3339              :    not enter into CONST_DOUBLE for the replace.
    3340              : 
    3341              :    Note that copying is not done so X must not be shared unless all copies
    3342              :    are to be modified.
    3343              : 
    3344              :    ALL_REGS is true if we want to replace all REGs equal to FROM, not just
    3345              :    those pointer-equal ones.  */
    3346              : 
    3347              : rtx
    3348      8779030 : replace_rtx (rtx x, rtx from, rtx to, bool all_regs)
    3349              : {
    3350      8779030 :   int i, j;
    3351      8779030 :   const char *fmt;
    3352              : 
    3353      8779030 :   if (x == from)
    3354              :     return to;
    3355              : 
    3356              :   /* Allow this function to make replacements in EXPR_LISTs.  */
    3357      6375290 :   if (x == 0)
    3358              :     return 0;
    3359              : 
    3360      6375290 :   if (all_regs
    3361            0 :       && REG_P (x)
    3362            0 :       && REG_P (from)
    3363      6375290 :       && REGNO (x) == REGNO (from))
    3364              :     {
    3365            0 :       gcc_assert (GET_MODE (x) == GET_MODE (from));
    3366              :       return to;
    3367              :     }
    3368      6375290 :   else if (GET_CODE (x) == SUBREG)
    3369              :     {
    3370        48037 :       rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to, all_regs);
    3371              : 
    3372        48037 :       if (CONST_SCALAR_INT_P (new_rtx))
    3373              :         {
    3374            2 :           x = simplify_subreg (GET_MODE (x), new_rtx,
    3375            1 :                                GET_MODE (SUBREG_REG (x)),
    3376            1 :                                SUBREG_BYTE (x));
    3377            1 :           gcc_assert (x);
    3378              :         }
    3379              :       else
    3380        48036 :         SUBREG_REG (x) = new_rtx;
    3381              : 
    3382        48037 :       return x;
    3383              :     }
    3384      6327253 :   else if (GET_CODE (x) == ZERO_EXTEND)
    3385              :     {
    3386       192370 :       rtx new_rtx = replace_rtx (XEXP (x, 0), from, to, all_regs);
    3387              : 
    3388       192370 :       if (CONST_SCALAR_INT_P (new_rtx))
    3389              :         {
    3390            2 :           x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
    3391            1 :                                         new_rtx, GET_MODE (XEXP (x, 0)));
    3392            1 :           gcc_assert (x);
    3393              :         }
    3394              :       else
    3395       192369 :         XEXP (x, 0) = new_rtx;
    3396              : 
    3397       192370 :       return x;
    3398              :     }
    3399              : 
    3400      6134883 :   fmt = GET_RTX_FORMAT (GET_CODE (x));
    3401     15218937 :   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
    3402              :     {
    3403      9084054 :       if (fmt[i] == 'e')
    3404      5934257 :         XEXP (x, i) = replace_rtx (XEXP (x, i), from, to, all_regs);
    3405      3149797 :       else if (fmt[i] == 'E')
    3406       136850 :         for (j = XVECLEN (x, i) - 1; j >= 0; j--)
    3407       113406 :           XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j),
    3408              :                                            from, to, all_regs);
    3409              :     }
    3410              : 
    3411              :   return x;
    3412              : }
    3413              : 
    3414              : /* Replace occurrences of the OLD_LABEL in *LOC with NEW_LABEL.  Also track
    3415              :    the change in LABEL_NUSES if UPDATE_LABEL_NUSES.  */
    3416              : 
    3417              : void
    3418        11170 : replace_label (rtx *loc, rtx old_label, rtx new_label, bool update_label_nuses)
    3419              : {
    3420              :   /* Handle jump tables specially, since ADDR_{DIFF_,}VECs can be long.  */
    3421        11170 :   rtx x = *loc;
    3422        11170 :   if (JUMP_TABLE_DATA_P (x))
    3423              :     {
    3424           12 :       x = PATTERN (x);
    3425           12 :       rtvec vec = XVEC (x, GET_CODE (x) == ADDR_DIFF_VEC);
    3426           12 :       int len = GET_NUM_ELEM (vec);
    3427          138 :       for (int i = 0; i < len; ++i)
    3428              :         {
    3429          126 :           rtx ref = RTVEC_ELT (vec, i);
    3430          126 :           if (XEXP (ref, 0) == old_label)
    3431              :             {
    3432            0 :               XEXP (ref, 0) = new_label;
    3433            0 :               if (update_label_nuses)
    3434              :                 {
    3435            0 :                   ++LABEL_NUSES (new_label);
    3436            0 :                   --LABEL_NUSES (old_label);
    3437              :                 }
    3438              :             }
    3439              :         }
    3440           12 :       return;
    3441              :     }
    3442              : 
    3443              :   /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
    3444              :      field.  This is not handled by the iterator because it doesn't
    3445              :      handle unprinted ('0') fields.  */
    3446        11158 :   if (JUMP_P (x) && JUMP_LABEL (x) == old_label)
    3447         1932 :     JUMP_LABEL (x) = new_label;
    3448              : 
    3449        11158 :   subrtx_ptr_iterator::array_type array;
    3450       109354 :   FOR_EACH_SUBRTX_PTR (iter, array, loc, ALL)
    3451              :     {
    3452        98196 :       rtx *loc = *iter;
    3453        98196 :       if (rtx x = *loc)
    3454              :         {
    3455        87659 :           if (GET_CODE (x) == SYMBOL_REF
    3456        87659 :               && CONSTANT_POOL_ADDRESS_P (x))
    3457              :             {
    3458          339 :               rtx c = get_pool_constant (x);
    3459          339 :               if (rtx_referenced_p (old_label, c))
    3460              :                 {
    3461              :                   /* Create a copy of constant C; replace the label inside
    3462              :                      but do not update LABEL_NUSES because uses in constant pool
    3463              :                      are not counted.  */
    3464            0 :                   rtx new_c = copy_rtx (c);
    3465            0 :                   replace_label (&new_c, old_label, new_label, false);
    3466              : 
    3467              :                   /* Add the new constant NEW_C to constant pool and replace
    3468              :                      the old reference to constant by new reference.  */
    3469            0 :                   rtx new_mem = force_const_mem (get_pool_mode (x), new_c);
    3470            0 :                   *loc = replace_rtx (x, x, XEXP (new_mem, 0));
    3471              :                 }
    3472              :             }
    3473              : 
    3474        87659 :           if ((GET_CODE (x) == LABEL_REF
    3475        83495 :                || GET_CODE (x) == INSN_LIST)
    3476         6092 :               && XEXP (x, 0) == old_label)
    3477              :             {
    3478         5772 :               XEXP (x, 0) = new_label;
    3479         5772 :               if (update_label_nuses)
    3480              :                 {
    3481            0 :                   ++LABEL_NUSES (new_label);
    3482            0 :                   --LABEL_NUSES (old_label);
    3483              :                 }
    3484              :             }
    3485              :         }
    3486              :     }
    3487        11158 : }
    3488              : 
    3489              : void
    3490        11170 : replace_label_in_insn (rtx_insn *insn, rtx_insn *old_label,
    3491              :                        rtx_insn *new_label, bool update_label_nuses)
    3492              : {
    3493        11170 :   rtx insn_as_rtx = insn;
    3494        11170 :   replace_label (&insn_as_rtx, old_label, new_label, update_label_nuses);
    3495        11170 :   gcc_checking_assert (insn_as_rtx == insn);
    3496        11170 : }
    3497              : 
    3498              : /* Return true if X is referenced in BODY.  */
    3499              : 
    3500              : bool
    3501       351728 : rtx_referenced_p (const_rtx x, const_rtx body)
    3502              : {
    3503       351728 :   subrtx_iterator::array_type array;
    3504      1722971 :   FOR_EACH_SUBRTX (iter, array, body, ALL)
    3505      1394319 :     if (const_rtx y = *iter)
    3506              :       {
    3507              :         /* Check if a label_ref Y refers to label X.  */
    3508      1388148 :         if (GET_CODE (y) == LABEL_REF
    3509        12125 :             && LABEL_P (x)
    3510      1400270 :             && label_ref_label (y) == x)
    3511        23076 :           return true;
    3512              : 
    3513      1388148 :         if (rtx_equal_p (x, y))
    3514              :           return true;
    3515              : 
    3516              :         /* If Y is a reference to pool constant traverse the constant.  */
    3517      1365072 :         if (GET_CODE (y) == SYMBOL_REF
    3518      1365072 :             && CONSTANT_POOL_ADDRESS_P (y))
    3519         7016 :           iter.substitute (get_pool_constant (y));
    3520              :       }
    3521       328652 :   return false;
    3522       351728 : }
    3523              : 
    3524              : /* If INSN is a tablejump return true and store the label (before jump table) to
    3525              :    *LABELP and the jump table to *TABLEP.  LABELP and TABLEP may be NULL.  */
    3526              : 
    3527              : bool
    3528    107323254 : tablejump_p (const rtx_insn *insn, rtx_insn **labelp,
    3529              :              rtx_jump_table_data **tablep)
    3530              : {
    3531    107323254 :   if (!JUMP_P (insn))
    3532              :     return false;
    3533              : 
    3534     80892589 :   rtx target = JUMP_LABEL (insn);
    3535     80892589 :   if (target == NULL_RTX || ANY_RETURN_P (target))
    3536              :     return false;
    3537              : 
    3538     77304530 :   rtx_insn *label = as_a<rtx_insn *> (target);
    3539     77304530 :   rtx_insn *table = next_insn (label);
    3540     77304530 :   if (table == NULL_RTX || !JUMP_TABLE_DATA_P (table))
    3541              :     return false;
    3542              : 
    3543       131697 :   if (labelp)
    3544        87926 :     *labelp = label;
    3545       131697 :   if (tablep)
    3546       127620 :     *tablep = as_a <rtx_jump_table_data *> (table);
    3547              :   return true;
    3548              : }
    3549              : 
    3550              : /* For INSN known to satisfy tablejump_p, determine if it actually is a
    3551              :    CASESI.  Return the insn pattern if so, NULL_RTX otherwise.  */
    3552              : 
    3553              : rtx
    3554        24975 : tablejump_casesi_pattern (const rtx_insn *insn)
    3555              : {
    3556        24975 :   rtx tmp;
    3557              : 
    3558        24975 :   if ((tmp = single_set (insn)) != NULL
    3559        24975 :       && SET_DEST (tmp) == pc_rtx
    3560        24975 :       && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
    3561        24975 :       && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
    3562            0 :     return tmp;
    3563              : 
    3564              :   return NULL_RTX;
    3565              : }
    3566              : 
    3567              : /* A subroutine of computed_jump_p, return true if X contains a REG or MEM or
    3568              :    constant that is not in the constant pool and not in the condition
    3569              :    of an IF_THEN_ELSE.  */
    3570              : 
    3571              : static bool
    3572         1916 : computed_jump_p_1 (const_rtx x)
    3573              : {
    3574         1916 :   const enum rtx_code code = GET_CODE (x);
    3575         1916 :   int i, j;
    3576         1916 :   const char *fmt;
    3577              : 
    3578         1916 :   switch (code)
    3579              :     {
    3580              :     case LABEL_REF:
    3581              :     case PC:
    3582              :       return false;
    3583              : 
    3584              :     case CONST:
    3585              :     CASE_CONST_ANY:
    3586              :     case SYMBOL_REF:
    3587              :     case REG:
    3588              :       return true;
    3589              : 
    3590          323 :     case MEM:
    3591          323 :       return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
    3592           14 :                 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
    3593              : 
    3594            0 :     case IF_THEN_ELSE:
    3595            0 :       return (computed_jump_p_1 (XEXP (x, 1))
    3596            0 :               || computed_jump_p_1 (XEXP (x, 2)));
    3597              : 
    3598            0 :     default:
    3599            0 :       break;
    3600              :     }
    3601              : 
    3602            0 :   fmt = GET_RTX_FORMAT (code);
    3603            0 :   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    3604              :     {
    3605            0 :       if (fmt[i] == 'e'
    3606            0 :           && computed_jump_p_1 (XEXP (x, i)))
    3607              :         return true;
    3608              : 
    3609            0 :       else if (fmt[i] == 'E')
    3610            0 :         for (j = 0; j < XVECLEN (x, i); j++)
    3611            0 :           if (computed_jump_p_1 (XVECEXP (x, i, j)))
    3612              :             return true;
    3613              :     }
    3614              : 
    3615              :   return false;
    3616              : }
    3617              : 
    3618              : /* Return true if INSN is an indirect jump (aka computed jump).
    3619              : 
    3620              :    Tablejumps and casesi insns are not considered indirect jumps;
    3621              :    we can recognize them by a (use (label_ref)).  */
    3622              : 
    3623              : bool
    3624     46200467 : computed_jump_p (const rtx_insn *insn)
    3625              : {
    3626     46200467 :   int i;
    3627     46200467 :   if (JUMP_P (insn))
    3628              :     {
    3629     42195142 :       rtx pat = PATTERN (insn);
    3630              : 
    3631              :       /* If we have a JUMP_LABEL set, we're not a computed jump.  */
    3632     42195142 :       if (JUMP_LABEL (insn) != NULL)
    3633              :         return false;
    3634              : 
    3635         2436 :       if (GET_CODE (pat) == PARALLEL)
    3636              :         {
    3637          489 :           int len = XVECLEN (pat, 0);
    3638          489 :           bool has_use_labelref = false;
    3639              : 
    3640         1467 :           for (i = len - 1; i >= 0; i--)
    3641          978 :             if (GET_CODE (XVECEXP (pat, 0, i)) == USE
    3642            0 :                 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
    3643              :                     == LABEL_REF))
    3644              :               {
    3645              :                 has_use_labelref = true;
    3646              :                 break;
    3647              :               }
    3648              : 
    3649          489 :           if (! has_use_labelref)
    3650         1467 :             for (i = len - 1; i >= 0; i--)
    3651          978 :               if (GET_CODE (XVECEXP (pat, 0, i)) == SET
    3652            0 :                   && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
    3653          978 :                   && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
    3654              :                 return true;
    3655              :         }
    3656         1947 :       else if (GET_CODE (pat) == SET
    3657         1916 :                && SET_DEST (pat) == pc_rtx
    3658         3863 :                && computed_jump_p_1 (SET_SRC (pat)))
    3659              :         return true;
    3660              :     }
    3661              :   return false;
    3662              : }
    3663              : 
    3664              : 
    3665              : 
    3666              : /* MEM has a PRE/POST-INC/DEC/MODIFY address X.  Extract the operands of
    3667              :    the equivalent add insn and pass the result to FN, using DATA as the
    3668              :    final argument.  */
    3669              : 
    3670              : static int
    3671     19691256 : for_each_inc_dec_find_inc_dec (rtx mem, for_each_inc_dec_fn fn, void *data)
    3672              : {
    3673     19691256 :   rtx x = XEXP (mem, 0);
    3674     19691256 :   switch (GET_CODE (x))
    3675              :     {
    3676      2315256 :     case PRE_INC:
    3677      2315256 :     case POST_INC:
    3678      2315256 :       {
    3679      4630512 :         poly_int64 size = GET_MODE_SIZE (GET_MODE (mem));
    3680      2315256 :         rtx r1 = XEXP (x, 0);
    3681      2315256 :         rtx c = gen_int_mode (size, GET_MODE (r1));
    3682      2315256 :         return fn (mem, x, r1, r1, c, data);
    3683              :       }
    3684              : 
    3685     16981561 :     case PRE_DEC:
    3686     16981561 :     case POST_DEC:
    3687     16981561 :       {
    3688     33963122 :         poly_int64 size = GET_MODE_SIZE (GET_MODE (mem));
    3689     16981561 :         rtx r1 = XEXP (x, 0);
    3690     16981561 :         rtx c = gen_int_mode (-size, GET_MODE (r1));
    3691     16981561 :         return fn (mem, x, r1, r1, c, data);
    3692              :       }
    3693              : 
    3694       394439 :     case PRE_MODIFY:
    3695       394439 :     case POST_MODIFY:
    3696       394439 :       {
    3697       394439 :         rtx r1 = XEXP (x, 0);
    3698       394439 :         rtx add = XEXP (x, 1);
    3699       394439 :         return fn (mem, x, r1, add, NULL, data);
    3700              :       }
    3701              : 
    3702            0 :     default:
    3703            0 :       gcc_unreachable ();
    3704              :     }
    3705              : }
    3706              : 
    3707              : /* Traverse *LOC looking for MEMs that have autoinc addresses.
    3708              :    For each such autoinc operation found, call FN, passing it
    3709              :    the innermost enclosing MEM, the operation itself, the RTX modified
    3710              :    by the operation, two RTXs (the second may be NULL) that, once
    3711              :    added, represent the value to be held by the modified RTX
    3712              :    afterwards, and DATA.  FN is to return 0 to continue the
    3713              :    traversal or any other value to have it returned to the caller of
    3714              :    for_each_inc_dec.  */
    3715              : 
    3716              : int
    3717   1020075320 : for_each_inc_dec (rtx x,
    3718              :                   for_each_inc_dec_fn fn,
    3719              :                   void *data)
    3720              : {
    3721   1020075320 :   subrtx_var_iterator::array_type array;
    3722   5418351668 :   FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
    3723              :     {
    3724   4398276348 :       rtx mem = *iter;
    3725   4398276348 :       if (mem
    3726   4398276348 :           && MEM_P (mem)
    3727    254231011 :           && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC)
    3728              :         {
    3729     19691256 :           int res = for_each_inc_dec_find_inc_dec (mem, fn, data);
    3730     19691256 :           if (res != 0)
    3731            0 :             return res;
    3732     19691256 :           iter.skip_subrtxes ();
    3733              :         }
    3734              :     }
    3735   1020075320 :   return 0;
    3736   1020075320 : }
    3737              : 
    3738              : 
    3739              : /* Searches X for any reference to REGNO, returning the rtx of the
    3740              :    reference found if any.  Otherwise, returns NULL_RTX.  */
    3741              : 
    3742              : rtx
    3743            0 : regno_use_in (unsigned int regno, rtx x)
    3744              : {
    3745            0 :   const char *fmt;
    3746            0 :   int i, j;
    3747            0 :   rtx tem;
    3748              : 
    3749            0 :   if (REG_P (x) && REGNO (x) == regno)
    3750              :     return x;
    3751              : 
    3752            0 :   fmt = GET_RTX_FORMAT (GET_CODE (x));
    3753            0 :   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
    3754              :     {
    3755            0 :       if (fmt[i] == 'e')
    3756              :         {
    3757            0 :           if ((tem = regno_use_in (regno, XEXP (x, i))))
    3758              :             return tem;
    3759              :         }
    3760            0 :       else if (fmt[i] == 'E')
    3761            0 :         for (j = XVECLEN (x, i) - 1; j >= 0; j--)
    3762            0 :           if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
    3763              :             return tem;
    3764              :     }
    3765              : 
    3766              :   return NULL_RTX;
    3767              : }
    3768              : 
    3769              : /* Return a value indicating whether OP, an operand of a commutative
    3770              :    operation, is preferred as the first or second operand.  The more
    3771              :    positive the value, the stronger the preference for being the first
    3772              :    operand.  */
    3773              : 
    3774              : int
    3775   2212420614 : commutative_operand_precedence (rtx op)
    3776              : {
    3777   2212420614 :   enum rtx_code code = GET_CODE (op);
    3778              : 
    3779              :   /* Constants always become the second operand.  Prefer "nice" constants.  */
    3780   2212420614 :   if (code == CONST_INT)
    3781              :     return -10;
    3782              :   if (code == CONST_WIDE_INT)
    3783              :     return -9;
    3784              :   if (code == CONST_POLY_INT)
    3785              :     return -8;
    3786              :   if (code == CONST_DOUBLE)
    3787              :     return -8;
    3788              :   if (code == CONST_FIXED)
    3789              :     return -8;
    3790   1316743913 :   op = avoid_constant_pool_reference (op);
    3791   1316743913 :   code = GET_CODE (op);
    3792              : 
    3793   1316743913 :   switch (GET_RTX_CLASS (code))
    3794              :     {
    3795     28033741 :     case RTX_CONST_OBJ:
    3796     28033741 :       if (code == CONST_INT)
    3797              :         return -7;
    3798              :       if (code == CONST_WIDE_INT)
    3799              :         return -6;
    3800              :       if (code == CONST_POLY_INT)
    3801              :         return -5;
    3802              :       if (code == CONST_DOUBLE)
    3803              :         return -5;
    3804              :       if (code == CONST_FIXED)
    3805              :         return -5;
    3806              :       return -4;
    3807              : 
    3808     42082867 :     case RTX_EXTRA:
    3809              :       /* SUBREGs of objects should come second.  */
    3810     42082867 :       if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
    3811              :         return -3;
    3812              :       return 0;
    3813              : 
    3814    976089992 :     case RTX_OBJ:
    3815              :       /* Complex expressions should be the first, so decrease priority
    3816              :          of objects.  Prefer pointer objects over non pointer objects.  */
    3817    880886613 :       if ((REG_P (op) && REG_POINTER (op))
    3818   1465932083 :           || (MEM_P (op) && MEM_POINTER (op)))
    3819    409668861 :         return -1;
    3820              :       return -2;
    3821              : 
    3822              :     case RTX_COMM_ARITH:
    3823              :       /* Prefer operands that are themselves commutative to be first.
    3824              :          This helps to make things linear.  In particular,
    3825              :          (and (and (reg) (reg)) (not (reg))) is canonical.  */
    3826              :       return 4;
    3827              : 
    3828     81185736 :     case RTX_BIN_ARITH:
    3829              :       /* If only one operand is a binary expression, it will be the first
    3830              :          operand.  In particular,  (plus (minus (reg) (reg)) (neg (reg)))
    3831              :          is canonical, although it will usually be further simplified.  */
    3832     81185736 :       return 2;
    3833              : 
    3834     26250129 :     case RTX_UNARY:
    3835              :       /* Then prefer NEG and NOT.  */
    3836     26250129 :       if (code == NEG || code == NOT)
    3837              :         return 1;
    3838              :       /* FALLTHRU */
    3839              : 
    3840              :     default:
    3841              :       return 0;
    3842              :     }
    3843              : }
    3844              : 
    3845              : /* Return true iff it is necessary to swap operands of commutative operation
    3846              :    in order to canonicalize expression.  */
    3847              : 
    3848              : bool
    3849    980499286 : swap_commutative_operands_p (rtx x, rtx y)
    3850              : {
    3851    980499286 :   return (commutative_operand_precedence (x)
    3852    980499286 :           < commutative_operand_precedence (y));
    3853              : }
    3854              : 
    3855              : /* Return true if X is an autoincrement side effect and the register is
    3856              :    not the stack pointer.  */
    3857              : bool
    3858            0 : auto_inc_p (const_rtx x)
    3859              : {
    3860            0 :   switch (GET_CODE (x))
    3861              :     {
    3862            0 :     case PRE_INC:
    3863            0 :     case POST_INC:
    3864            0 :     case PRE_DEC:
    3865            0 :     case POST_DEC:
    3866            0 :     case PRE_MODIFY:
    3867            0 :     case POST_MODIFY:
    3868              :       /* There are no REG_INC notes for SP.  */
    3869            0 :       if (XEXP (x, 0) != stack_pointer_rtx)
    3870            0 :         return true;
    3871              :     default:
    3872              :       break;
    3873              :     }
    3874              :   return false;
    3875              : }
    3876              : 
    3877              : /* Return true if IN contains a piece of rtl that has the address LOC.  */
    3878              : bool
    3879      1062808 : loc_mentioned_in_p (rtx *loc, const_rtx in)
    3880              : {
    3881      1062808 :   enum rtx_code code;
    3882      1062808 :   const char *fmt;
    3883      1062808 :   int i, j;
    3884              : 
    3885      1062808 :   if (!in)
    3886              :     return false;
    3887              : 
    3888      1062808 :   code = GET_CODE (in);
    3889      1062808 :   fmt = GET_RTX_FORMAT (code);
    3890      2123392 :   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    3891              :     {
    3892      1686560 :       if (fmt[i] == 'e')
    3893              :         {
    3894      1042607 :           if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
    3895       625260 :             return true;
    3896              :         }
    3897       643953 :       else if (fmt[i] == 'E')
    3898        27031 :         for (j = XVECLEN (in, i) - 1; j >= 0; j--)
    3899        18732 :           if (loc == &XVECEXP (in, i, j)
    3900        18732 :               || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
    3901          716 :             return true;
    3902              :     }
    3903              :   return false;
    3904              : }
    3905              : 
    3906              : /* Reinterpret a subreg as a bit extraction from an integer and return
    3907              :    the position of the least significant bit of the extracted value.
    3908              :    In other words, if the extraction were performed as a shift right
    3909              :    and mask, return the number of bits to shift right.
    3910              : 
    3911              :    The outer value of the subreg has OUTER_BYTES bytes and starts at
    3912              :    byte offset SUBREG_BYTE within an inner value of INNER_BYTES bytes.  */
    3913              : 
    3914              : poly_uint64
    3915     48000391 : subreg_size_lsb (poly_uint64 outer_bytes,
    3916              :                  poly_uint64 inner_bytes,
    3917              :                  poly_uint64 subreg_byte)
    3918              : {
    3919     48000391 :   poly_uint64 subreg_end, trailing_bytes, byte_pos;
    3920              : 
    3921              :   /* A paradoxical subreg begins at bit position 0.  */
    3922     48000391 :   gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
    3923     48000391 :   if (maybe_gt (outer_bytes, inner_bytes))
    3924              :     {
    3925        43319 :       gcc_checking_assert (known_eq (subreg_byte, 0U));
    3926        43319 :       return 0;
    3927              :     }
    3928              : 
    3929     47957072 :   subreg_end = subreg_byte + outer_bytes;
    3930     47957072 :   trailing_bytes = inner_bytes - subreg_end;
    3931     47957072 :   if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
    3932              :     byte_pos = trailing_bytes;
    3933     47957072 :   else if (!WORDS_BIG_ENDIAN && !BYTES_BIG_ENDIAN)
    3934     47957072 :     byte_pos = subreg_byte;
    3935              :   else
    3936              :     {
    3937              :       /* When bytes and words have opposite endianness, we must be able
    3938              :          to split offsets into words and bytes at compile time.  */
    3939              :       poly_uint64 leading_word_part
    3940              :         = force_align_down (subreg_byte, UNITS_PER_WORD);
    3941              :       poly_uint64 trailing_word_part
    3942              :         = force_align_down (trailing_bytes, UNITS_PER_WORD);
    3943              :       /* If the subreg crosses a word boundary ensure that
    3944              :          it also begins and ends on a word boundary.  */
    3945              :       gcc_assert (known_le (subreg_end - leading_word_part,
    3946              :                             (unsigned int) UNITS_PER_WORD)
    3947              :                   || (known_eq (leading_word_part, subreg_byte)
    3948              :                       && known_eq (trailing_word_part, trailing_bytes)));
    3949              :       if (WORDS_BIG_ENDIAN)
    3950              :         byte_pos = trailing_word_part + (subreg_byte - leading_word_part);
    3951              :       else
    3952              :         byte_pos = leading_word_part + (trailing_bytes - trailing_word_part);
    3953              :     }
    3954              : 
    3955     47957072 :   return byte_pos * BITS_PER_UNIT;
    3956              : }
    3957              : 
    3958              : /* Given a subreg X, return the bit offset where the subreg begins
    3959              :    (counting from the least significant bit of the reg).  */
    3960              : 
    3961              : poly_uint64
    3962      2964201 : subreg_lsb (const_rtx x)
    3963              : {
    3964      5928402 :   return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
    3965      2964201 :                        SUBREG_BYTE (x));
    3966              : }
    3967              : 
    3968              : /* Return the subreg byte offset for a subreg whose outer value has
    3969              :    OUTER_BYTES bytes, whose inner value has INNER_BYTES bytes, and where
    3970              :    there are LSB_SHIFT *bits* between the lsb of the outer value and the
    3971              :    lsb of the inner value.  This is the inverse of the calculation
    3972              :    performed by subreg_lsb_1 (which converts byte offsets to bit shifts).  */
    3973              : 
    3974              : poly_uint64
    3975     40041333 : subreg_size_offset_from_lsb (poly_uint64 outer_bytes, poly_uint64 inner_bytes,
    3976              :                              poly_uint64 lsb_shift)
    3977              : {
    3978              :   /* A paradoxical subreg begins at bit position 0.  */
    3979     40041333 :   gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
    3980     40041333 :   if (maybe_gt (outer_bytes, inner_bytes))
    3981              :     {
    3982            0 :       gcc_checking_assert (known_eq (lsb_shift, 0U));
    3983            0 :       return 0;
    3984              :     }
    3985              : 
    3986     40041333 :   poly_uint64 lower_bytes = exact_div (lsb_shift, BITS_PER_UNIT);
    3987     40041333 :   poly_uint64 upper_bytes = inner_bytes - (lower_bytes + outer_bytes);
    3988     40041333 :   if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
    3989              :     return upper_bytes;
    3990     40041333 :   else if (!WORDS_BIG_ENDIAN && !BYTES_BIG_ENDIAN)
    3991     40041333 :     return lower_bytes;
    3992              :   else
    3993              :     {
    3994              :       /* When bytes and words have opposite endianness, we must be able
    3995              :          to split offsets into words and bytes at compile time.  */
    3996              :       poly_uint64 lower_word_part = force_align_down (lower_bytes,
    3997              :                                                       UNITS_PER_WORD);
    3998              :       poly_uint64 upper_word_part = force_align_down (upper_bytes,
    3999              :                                                       UNITS_PER_WORD);
    4000              :       if (WORDS_BIG_ENDIAN)
    4001              :         return upper_word_part + (lower_bytes - lower_word_part);
    4002              :       else
    4003              :         return lower_word_part + (upper_bytes - upper_word_part);
    4004              :     }
    4005              : }
    4006              : 
    4007              : /* Fill in information about a subreg of a hard register.
    4008              :    xregno - A regno of an inner hard subreg_reg (or what will become one).
    4009              :    xmode  - The mode of xregno.
    4010              :    offset - The byte offset.
    4011              :    ymode  - The mode of a top level SUBREG (or what may become one).
    4012              :    info   - Pointer to structure to fill in.
    4013              : 
    4014              :    Rather than considering one particular inner register (and thus one
    4015              :    particular "outer" register) in isolation, this function really uses
    4016              :    XREGNO as a model for a sequence of isomorphic hard registers.  Thus the
    4017              :    function does not check whether adding INFO->offset to XREGNO gives
    4018              :    a valid hard register; even if INFO->offset + XREGNO is out of range,
    4019              :    there might be another register of the same type that is in range.
    4020              :    Likewise it doesn't check whether targetm.hard_regno_mode_ok accepts
    4021              :    the new register, since that can depend on things like whether the final
    4022              :    register number is even or odd.  Callers that want to check whether
    4023              :    this particular subreg can be replaced by a simple (reg ...) should
    4024              :    use simplify_subreg_regno.  */
    4025              : 
    4026              : void
    4027     33997122 : subreg_get_info (unsigned int xregno, machine_mode xmode,
    4028              :                  poly_uint64 offset, machine_mode ymode,
    4029              :                  struct subreg_info *info)
    4030              : {
    4031     33997122 :   unsigned int nregs_xmode, nregs_ymode;
    4032              : 
    4033     33997122 :   gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
    4034              : 
    4035     67994244 :   poly_uint64 xsize = GET_MODE_SIZE (xmode);
    4036     67994244 :   poly_uint64 ysize = GET_MODE_SIZE (ymode);
    4037              : 
    4038     33997122 :   bool rknown = false;
    4039              : 
    4040              :   /* If the register representation of a non-scalar mode has holes in it,
    4041              :      we expect the scalar units to be concatenated together, with the holes
    4042              :      distributed evenly among the scalar units.  Each scalar unit must occupy
    4043              :      at least one register.  */
    4044     33997122 :   if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
    4045              :     {
    4046              :       /* As a consequence, we must be dealing with a constant number of
    4047              :          scalars, and thus a constant offset and number of units.  */
    4048            0 :       HOST_WIDE_INT coffset = offset.to_constant ();
    4049            0 :       HOST_WIDE_INT cysize = ysize.to_constant ();
    4050            0 :       nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
    4051            0 :       unsigned int nunits = GET_MODE_NUNITS (xmode).to_constant ();
    4052            0 :       scalar_mode xmode_unit = GET_MODE_INNER (xmode);
    4053            0 :       gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
    4054            0 :       gcc_assert (nregs_xmode
    4055              :                   == (nunits
    4056              :                       * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
    4057            0 :       gcc_assert (hard_regno_nregs (xregno, xmode)
    4058              :                   == hard_regno_nregs (xregno, xmode_unit) * nunits);
    4059              : 
    4060              :       /* You can only ask for a SUBREG of a value with holes in the middle
    4061              :          if you don't cross the holes.  (Such a SUBREG should be done by
    4062              :          picking a different register class, or doing it in memory if
    4063              :          necessary.)  An example of a value with holes is XCmode on 32-bit
    4064              :          x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
    4065              :          3 for each part, but in memory it's two 128-bit parts.
    4066              :          Padding is assumed to be at the end (not necessarily the 'high part')
    4067              :          of each unit.  */
    4068            0 :       if ((coffset / GET_MODE_SIZE (xmode_unit) + 1 < nunits)
    4069            0 :           && (coffset / GET_MODE_SIZE (xmode_unit)
    4070            0 :               != ((coffset + cysize - 1) / GET_MODE_SIZE (xmode_unit))))
    4071              :         {
    4072            0 :           info->representable_p = false;
    4073            0 :           rknown = true;
    4074              :         }
    4075              :     }
    4076              :   else
    4077     33997122 :     nregs_xmode = hard_regno_nregs (xregno, xmode);
    4078              : 
    4079     33997122 :   nregs_ymode = hard_regno_nregs (xregno, ymode);
    4080              : 
    4081              :   /* Subreg sizes must be ordered, so that we can tell whether they are
    4082              :      partial, paradoxical or complete.  */
    4083     33997122 :   gcc_checking_assert (ordered_p (xsize, ysize));
    4084              : 
    4085              :   /* Paradoxical subregs are otherwise valid.  */
    4086     33997122 :   if (!rknown && known_eq (offset, 0U) && maybe_gt (ysize, xsize))
    4087              :     {
    4088     13688620 :       info->representable_p = true;
    4089              :       /* If this is a big endian paradoxical subreg, which uses more
    4090              :          actual hard registers than the original register, we must
    4091              :          return a negative offset so that we find the proper highpart
    4092              :          of the register.
    4093              : 
    4094              :          We assume that the ordering of registers within a multi-register
    4095              :          value has a consistent endianness: if bytes and register words
    4096              :          have different endianness, the hard registers that make up a
    4097              :          multi-register value must be at least word-sized.  */
    4098     13688620 :       if (REG_WORDS_BIG_ENDIAN)
    4099              :         info->offset = (int) nregs_xmode - (int) nregs_ymode;
    4100              :       else
    4101     13688620 :         info->offset = 0;
    4102     13688620 :       info->nregs = nregs_ymode;
    4103     13688620 :       return;
    4104              :     }
    4105              : 
    4106              :   /* If registers store different numbers of bits in the different
    4107              :      modes, we cannot generally form this subreg.  */
    4108     20308502 :   poly_uint64 regsize_xmode, regsize_ymode;
    4109     17463583 :   if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
    4110            0 :       && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
    4111     20308502 :       && multiple_p (xsize, nregs_xmode, &regsize_xmode)
    4112     20308502 :       && multiple_p (ysize, nregs_ymode, &regsize_ymode))
    4113              :     {
    4114     20308502 :       if (!rknown
    4115     20308502 :           && ((nregs_ymode > 1 && maybe_gt (regsize_xmode, regsize_ymode))
    4116     20308490 :               || (nregs_xmode > 1 && maybe_gt (regsize_ymode, regsize_xmode))))
    4117              :         {
    4118          119 :           info->representable_p = false;
    4119          119 :           if (!can_div_away_from_zero_p (ysize, regsize_xmode, &info->nregs)
    4120          119 :               || !can_div_trunc_p (offset, regsize_xmode, &info->offset))
    4121              :             /* Checked by validate_subreg.  We must know at compile time
    4122              :                which inner registers are being accessed.  */
    4123              :             gcc_unreachable ();
    4124     33477997 :           return;
    4125              :         }
    4126              :       /* It's not valid to extract a subreg of mode YMODE at OFFSET that
    4127              :          would go outside of XMODE.  */
    4128     20308383 :       if (!rknown && maybe_gt (ysize + offset, xsize))
    4129              :         {
    4130            0 :           info->representable_p = false;
    4131            0 :           info->nregs = nregs_ymode;
    4132            0 :           if (!can_div_trunc_p (offset, regsize_xmode, &info->offset))
    4133              :             /* Checked by validate_subreg.  We must know at compile time
    4134              :                which inner registers are being accessed.  */
    4135              :             gcc_unreachable ();
    4136            0 :           return;
    4137              :         }
    4138              :       /* Quick exit for the simple and common case of extracting whole
    4139              :          subregisters from a multiregister value.  */
    4140              :       /* ??? It would be better to integrate this into the code below,
    4141              :          if we can generalize the concept enough and figure out how
    4142              :          odd-sized modes can coexist with the other weird cases we support.  */
    4143     20308383 :       HOST_WIDE_INT count;
    4144     20308383 :       if (!rknown
    4145              :           && WORDS_BIG_ENDIAN == REG_WORDS_BIG_ENDIAN
    4146     20308383 :           && known_eq (regsize_xmode, regsize_ymode)
    4147     20308383 :           && constant_multiple_p (offset, regsize_ymode, &count))
    4148              :         {
    4149     12929671 :           info->representable_p = true;
    4150     12929671 :           info->nregs = nregs_ymode;
    4151     12929671 :           info->offset = count;
    4152     12929671 :           gcc_assert (info->offset + info->nregs <= (int) nregs_xmode);
    4153              :           return;
    4154              :         }
    4155              :     }
    4156              : 
    4157              :   /* Lowpart subregs are otherwise valid.  */
    4158      7378712 :   if (!rknown && known_eq (offset, subreg_lowpart_offset (ymode, xmode)))
    4159              :     {
    4160      6859587 :       info->representable_p = true;
    4161      6859587 :       rknown = true;
    4162              : 
    4163      6859587 :       if (known_eq (offset, 0U) || nregs_xmode == nregs_ymode)
    4164              :         {
    4165      6859587 :           info->offset = 0;
    4166      6859587 :           info->nregs = nregs_ymode;
    4167      6859587 :           return;
    4168              :         }
    4169              :     }
    4170              : 
    4171              :   /* Set NUM_BLOCKS to the number of independently-representable YMODE
    4172              :      values there are in (reg:XMODE XREGNO).  We can view the register
    4173              :      as consisting of this number of independent "blocks", where each
    4174              :      block occupies NREGS_YMODE registers and contains exactly one
    4175              :      representable YMODE value.  */
    4176       519125 :   gcc_assert ((nregs_xmode % nregs_ymode) == 0);
    4177       519125 :   unsigned int num_blocks = nregs_xmode / nregs_ymode;
    4178              : 
    4179              :   /* Calculate the number of bytes in each block.  This must always
    4180              :      be exact, otherwise we don't know how to verify the constraint.
    4181              :      These conditions may be relaxed but subreg_regno_offset would
    4182              :      need to be redesigned.  */
    4183       519125 :   poly_uint64 bytes_per_block = exact_div (xsize, num_blocks);
    4184              : 
    4185              :   /* Get the number of the first block that contains the subreg and the byte
    4186              :      offset of the subreg from the start of that block.  */
    4187       519125 :   unsigned int block_number;
    4188       519125 :   poly_uint64 subblock_offset;
    4189       519125 :   if (!can_div_trunc_p (offset, bytes_per_block, &block_number,
    4190              :                         &subblock_offset))
    4191              :     /* Checked by validate_subreg.  We must know at compile time which
    4192              :        inner registers are being accessed.  */
    4193              :     gcc_unreachable ();
    4194              : 
    4195       519125 :   if (!rknown)
    4196              :     {
    4197              :       /* Only the lowpart of each block is representable.  */
    4198       519125 :       info->representable_p
    4199       519125 :         = known_eq (subblock_offset,
    4200              :                     subreg_size_lowpart_offset (ysize, bytes_per_block));
    4201       519125 :       rknown = true;
    4202              :     }
    4203              : 
    4204              :   /* We assume that the ordering of registers within a multi-register
    4205              :      value has a consistent endianness: if bytes and register words
    4206              :      have different endianness, the hard registers that make up a
    4207              :      multi-register value must be at least word-sized.  */
    4208       519125 :   if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN)
    4209              :     /* The block number we calculated above followed memory endianness.
    4210              :        Convert it to register endianness by counting back from the end.
    4211              :        (Note that, because of the assumption above, each block must be
    4212              :        at least word-sized.)  */
    4213              :     info->offset = (num_blocks - block_number - 1) * nregs_ymode;
    4214              :   else
    4215       519125 :     info->offset = block_number * nregs_ymode;
    4216       519125 :   info->nregs = nregs_ymode;
    4217              : }
    4218              : 
    4219              : /* This function returns the regno offset of a subreg expression.
    4220              :    xregno - A regno of an inner hard subreg_reg (or what will become one).
    4221              :    xmode  - The mode of xregno.
    4222              :    offset - The byte offset.
    4223              :    ymode  - The mode of a top level SUBREG (or what may become one).
    4224              :    RETURN - The regno offset which would be used.  */
    4225              : unsigned int
    4226      5447328 : subreg_regno_offset (unsigned int xregno, machine_mode xmode,
    4227              :                      poly_uint64 offset, machine_mode ymode)
    4228              : {
    4229      5447328 :   struct subreg_info info;
    4230      5447328 :   subreg_get_info (xregno, xmode, offset, ymode, &info);
    4231      5447328 :   return info.offset;
    4232              : }
    4233              : 
    4234              : /* This function returns true when the offset is representable via
    4235              :    subreg_offset in the given regno.
    4236              :    xregno - A regno of an inner hard subreg_reg (or what will become one).
    4237              :    xmode  - The mode of xregno.
    4238              :    offset - The byte offset.
    4239              :    ymode  - The mode of a top level SUBREG (or what may become one).
    4240              :    RETURN - Whether the offset is representable.  */
    4241              : bool
    4242            0 : subreg_offset_representable_p (unsigned int xregno, machine_mode xmode,
    4243              :                                poly_uint64 offset, machine_mode ymode)
    4244              : {
    4245            0 :   struct subreg_info info;
    4246            0 :   subreg_get_info (xregno, xmode, offset, ymode, &info);
    4247            0 :   return info.representable_p;
    4248              : }
    4249              : 
    4250              : /* Return the number of a YMODE register to which
    4251              : 
    4252              :        (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
    4253              : 
    4254              :    can be simplified.  Return -1 if the subreg can't be simplified.
    4255              : 
    4256              :    XREGNO is a hard register number.  ALLOW_STACK_REGS is true if
    4257              :    we should allow subregs of stack_pointer_rtx, frame_pointer_rtx.
    4258              :    and arg_pointer_rtx (which are normally expected to be the unique
    4259              :    way of referring to their respective registers).  */
    4260              : 
    4261              : 
    4262              : int
    4263     29319656 : simplify_subreg_regno (unsigned int xregno, machine_mode xmode,
    4264              :                        poly_uint64 offset, machine_mode ymode,
    4265              :                        bool allow_stack_regs)
    4266              : {
    4267     29319656 :   struct subreg_info info;
    4268     29319656 :   unsigned int yregno;
    4269              : 
    4270              :   /* Give the backend a chance to disallow the mode change.  */
    4271     29319656 :   if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
    4272     29319656 :       && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
    4273     29319656 :       && !REG_CAN_CHANGE_MODE_P (xregno, xmode, ymode))
    4274              :     return -1;
    4275              : 
    4276     28674145 :   if (!allow_stack_regs)
    4277              :     {
    4278              :       /* We shouldn't simplify stack-related registers.  */
    4279     28320494 :       if ((!reload_completed || frame_pointer_needed)
    4280     24845296 :           && xregno == FRAME_POINTER_REGNUM)
    4281              :         return -1;
    4282              : 
    4283     28213725 :       if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
    4284              :           && xregno == ARG_POINTER_REGNUM)
    4285              :         return -1;
    4286              : 
    4287     28110865 :       if (xregno == STACK_POINTER_REGNUM
    4288              :           /* We should convert hard stack register in LRA if it is
    4289              :              possible.  */
    4290       104004 :           && ! lra_in_progress)
    4291              :         return -1;
    4292              :     }
    4293              : 
    4294              :   /* Try to get the register offset.  */
    4295     28362281 :   subreg_get_info (xregno, xmode, offset, ymode, &info);
    4296     28362281 :   if (!info.representable_p)
    4297              :     return -1;
    4298              : 
    4299              :   /* Make sure that the offsetted register value is in range.  */
    4300     27902292 :   yregno = xregno + info.offset;
    4301     27902292 :   if (!HARD_REGISTER_NUM_P (yregno))
    4302              :     return -1;
    4303              : 
    4304              :   /* See whether (reg:YMODE YREGNO) is valid.
    4305              : 
    4306              :      ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
    4307              :      This is a kludge to work around how complex FP arguments are passed
    4308              :      on IA-64 and should be fixed.  See PR target/49226.  */
    4309     27890141 :   if (!targetm.hard_regno_mode_ok (yregno, ymode)
    4310     27890141 :       && targetm.hard_regno_mode_ok (xregno, xmode))
    4311              :     return -1;
    4312              : 
    4313     27645222 :   return (int) yregno;
    4314              : }
    4315              : 
    4316              : /* A wrapper around simplify_subreg_regno that uses subreg_lowpart_offset
    4317              :    (xmode, ymode) as the offset.  */
    4318              : 
    4319              : int
    4320            0 : lowpart_subreg_regno (unsigned int regno, machine_mode xmode,
    4321              :                       machine_mode ymode)
    4322              : {
    4323            0 :   poly_uint64 offset = subreg_lowpart_offset (xmode, ymode);
    4324            0 :   return simplify_subreg_regno (regno, xmode, offset, ymode);
    4325              : }
    4326              : 
    4327              : /* Return the final regno that a subreg expression refers to.  */
    4328              : unsigned int
    4329        11257 : subreg_regno (const_rtx x)
    4330              : {
    4331        11257 :   unsigned int ret;
    4332        11257 :   rtx subreg = SUBREG_REG (x);
    4333        11257 :   int regno = REGNO (subreg);
    4334              : 
    4335        22514 :   ret = regno + subreg_regno_offset (regno,
    4336        11257 :                                      GET_MODE (subreg),
    4337        11257 :                                      SUBREG_BYTE (x),
    4338        11257 :                                      GET_MODE (x));
    4339        11257 :   return ret;
    4340              : 
    4341              : }
    4342              : 
    4343              : /* Return the number of registers that a subreg expression refers
    4344              :    to.  */
    4345              : unsigned int
    4346       180895 : subreg_nregs (const_rtx x)
    4347              : {
    4348       180895 :   return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
    4349              : }
    4350              : 
    4351              : /* Return the number of registers that a subreg REG with REGNO
    4352              :    expression refers to.  This is a copy of the rtlanal.cc:subreg_nregs
    4353              :    changed so that the regno can be passed in. */
    4354              : 
    4355              : unsigned int
    4356       180895 : subreg_nregs_with_regno (unsigned int regno, const_rtx x)
    4357              : {
    4358       180895 :   struct subreg_info info;
    4359       180895 :   rtx subreg = SUBREG_REG (x);
    4360              : 
    4361       180895 :   subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
    4362              :                    &info);
    4363       180895 :   return info.nregs;
    4364              : }
    4365              : 
    4366              : struct parms_set_data
    4367              : {
    4368              :   int nregs;
    4369              :   HARD_REG_SET regs;
    4370              : };
    4371              : 
    4372              : /* Helper function for noticing stores to parameter registers.  */
    4373              : static void
    4374        66964 : parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
    4375              : {
    4376        66964 :   struct parms_set_data *const d = (struct parms_set_data *) data;
    4377        66962 :   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
    4378       133926 :       && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
    4379              :     {
    4380        66636 :       CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
    4381        66636 :       d->nregs--;
    4382              :     }
    4383        66964 : }
    4384              : 
    4385              : /* Look backward for first parameter to be loaded.
    4386              :    Note that loads of all parameters will not necessarily be
    4387              :    found if CSE has eliminated some of them (e.g., an argument
    4388              :    to the outer function is passed down as a parameter).
    4389              :    Do not skip BOUNDARY.  */
    4390              : rtx_insn *
    4391        40988 : find_first_parameter_load (rtx_insn *call_insn, rtx_insn *boundary)
    4392              : {
    4393        40988 :   struct parms_set_data parm;
    4394        40988 :   rtx p;
    4395        40988 :   rtx_insn *before, *first_set;
    4396              : 
    4397              :   /* Since different machines initialize their parameter registers
    4398              :      in different orders, assume nothing.  Collect the set of all
    4399              :      parameter registers.  */
    4400        40988 :   CLEAR_HARD_REG_SET (parm.regs);
    4401        40988 :   parm.nregs = 0;
    4402       122952 :   for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
    4403        81964 :     if (GET_CODE (XEXP (p, 0)) == USE
    4404        81802 :         && REG_P (XEXP (XEXP (p, 0), 0))
    4405       151013 :         && !STATIC_CHAIN_REG_P (XEXP (XEXP (p, 0), 0)))
    4406              :       {
    4407        68774 :         gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
    4408              : 
    4409              :         /* We only care about registers which can hold function
    4410              :            arguments.  */
    4411        68774 :         if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
    4412         1784 :           continue;
    4413              : 
    4414        66990 :         SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
    4415        66990 :         parm.nregs++;
    4416              :       }
    4417              :   before = call_insn;
    4418              :   first_set = call_insn;
    4419              : 
    4420              :   /* Search backward for the first set of a register in this set.  */
    4421       107624 :   while (parm.nregs && before != boundary)
    4422              :     {
    4423        66964 :       before = PREV_INSN (before);
    4424              : 
    4425              :       /* It is possible that some loads got CSEed from one call to
    4426              :          another.  Stop in that case.  */
    4427        66964 :       if (CALL_P (before))
    4428              :         break;
    4429              : 
    4430              :       /* Our caller needs either ensure that we will find all sets
    4431              :          (in case code has not been optimized yet), or take care
    4432              :          for possible labels in a way by setting boundary to preceding
    4433              :          CODE_LABEL.  */
    4434        66964 :       if (LABEL_P (before))
    4435              :         {
    4436            0 :           gcc_assert (before == boundary);
    4437              :           break;
    4438              :         }
    4439              : 
    4440        66964 :       if (INSN_P (before))
    4441              :         {
    4442        66964 :           int nregs_old = parm.nregs;
    4443        66964 :           note_stores (before, parms_set, &parm);
    4444              :           /* If we found something that did not set a parameter reg,
    4445              :              we're done.  Do not keep going, as that might result
    4446              :              in hoisting an insn before the setting of a pseudo
    4447              :              that is used by the hoisted insn. */
    4448        66964 :           if (nregs_old != parm.nregs)
    4449              :             first_set = before;
    4450              :           else
    4451              :             break;
    4452              :         }
    4453              :     }
    4454        40988 :   return first_set;
    4455              : }
    4456              : 
    4457              : /* Return true if we should avoid inserting code between INSN and preceding
    4458              :    call instruction.  */
    4459              : 
    4460              : bool
    4461     11215757 : keep_with_call_p (const rtx_insn *insn)
    4462              : {
    4463     11215757 :   rtx set;
    4464              : 
    4465     11215757 :   if (INSN_P (insn) && (set = single_set (insn)) != NULL)
    4466              :     {
    4467      7559328 :       if (REG_P (SET_DEST (set))
    4468      2044908 :           && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
    4469      2044908 :           && fixed_regs[REGNO (SET_DEST (set))]
    4470      7726013 :           && general_operand (SET_SRC (set), VOIDmode))
    4471              :         return true;
    4472      7558977 :       if (REG_P (SET_SRC (set))
    4473       788652 :           && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
    4474       501315 :           && REG_P (SET_DEST (set))
    4475      7719853 :           && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
    4476              :         return true;
    4477              :       /* There may be a stack pop just after the call and before the store
    4478              :          of the return register.  Search for the actual store when deciding
    4479              :          if we can break or not.  */
    4480      7558977 :       if (SET_DEST (set) == stack_pointer_rtx)
    4481              :         {
    4482              :           /* This CONST_CAST is okay because next_nonnote_insn just
    4483              :              returns its argument and we assign it to a const_rtx
    4484              :              variable.  */
    4485       165191 :           const rtx_insn *i2
    4486       165191 :             = next_nonnote_insn (const_cast<rtx_insn *> (insn));
    4487       165191 :           if (i2 && keep_with_call_p (i2))
    4488              :             return true;
    4489              :         }
    4490              :     }
    4491              :   return false;
    4492              : }
    4493              : 
    4494              : /* Return true if LABEL is a target of JUMP_INSN.  This applies only
    4495              :    to non-complex jumps.  That is, direct unconditional, conditional,
    4496              :    and tablejumps, but not computed jumps or returns.  It also does
    4497              :    not apply to the fallthru case of a conditional jump.  */
    4498              : 
    4499              : bool
    4500     24178835 : label_is_jump_target_p (const_rtx label, const rtx_insn *jump_insn)
    4501              : {
    4502     24178835 :   rtx tmp = JUMP_LABEL (jump_insn);
    4503     24178835 :   rtx_jump_table_data *table;
    4504              : 
    4505     24178835 :   if (label == tmp)
    4506              :     return true;
    4507              : 
    4508      4022578 :   if (tablejump_p (jump_insn, NULL, &table))
    4509              :     {
    4510            0 :       rtvec vec = table->get_labels ();
    4511            0 :       int i, veclen = GET_NUM_ELEM (vec);
    4512              : 
    4513            0 :       for (i = 0; i < veclen; ++i)
    4514            0 :         if (XEXP (RTVEC_ELT (vec, i), 0) == label)
    4515              :           return true;
    4516              :     }
    4517              : 
    4518      4022578 :   if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
    4519              :     return true;
    4520              : 
    4521              :   return false;
    4522              : }
    4523              : 
    4524              : 
    4525              : /* Return an estimate of the cost of computing rtx X.
    4526              :    One use is in cse, to decide which expression to keep in the hash table.
    4527              :    Another is in rtl generation, to pick the cheapest way to multiply.
    4528              :    Other uses like the latter are expected in the future.
    4529              : 
    4530              :    X appears as operand OPNO in an expression with code OUTER_CODE.
    4531              :    SPEED specifies whether costs optimized for speed or size should
    4532              :    be returned.  */
    4533              : 
    4534              : int
    4535  12591382313 : rtx_cost (rtx x, machine_mode mode, enum rtx_code outer_code,
    4536              :           int opno, bool speed)
    4537              : {
    4538  12591382313 :   int i, j;
    4539  12591382313 :   enum rtx_code code;
    4540  12591382313 :   const char *fmt;
    4541  12591382313 :   int total;
    4542  12591382313 :   int factor;
    4543  12591382313 :   unsigned mode_size;
    4544              : 
    4545  12591382313 :   if (x == 0)
    4546              :     return 0;
    4547              : 
    4548  12591382313 :   if (GET_CODE (x) == SET)
    4549              :     /* A SET doesn't have a mode, so let's look at the SET_DEST to get
    4550              :        the mode for the factor.  */
    4551     47675604 :     mode = GET_MODE (SET_DEST (x));
    4552  12543706709 :   else if (GET_MODE (x) != VOIDmode)
    4553   9668790299 :     mode = GET_MODE (x);
    4554              : 
    4555  25182764626 :   mode_size = estimated_poly_value (GET_MODE_SIZE (mode));
    4556              : 
    4557              :   /* A size N times larger than UNITS_PER_WORD likely needs N times as
    4558              :      many insns, taking N times as long.  */
    4559  13097818513 :   factor = mode_size > UNITS_PER_WORD ? mode_size / UNITS_PER_WORD : 1;
    4560              : 
    4561              :   /* Compute the default costs of certain things.
    4562              :      Note that targetm.rtx_costs can override the defaults.  */
    4563              : 
    4564  12591382313 :   code = GET_CODE (x);
    4565  12591382313 :   switch (code)
    4566              :     {
    4567   1758388068 :     case MULT:
    4568   1758388068 :     case FMA:
    4569   1758388068 :     case SS_MULT:
    4570   1758388068 :     case US_MULT:
    4571   1758388068 :     case SMUL_HIGHPART:
    4572   1758388068 :     case UMUL_HIGHPART:
    4573              :       /* Multiplication has time-complexity O(N*N), where N is the
    4574              :          number of units (translated from digits) when using
    4575              :          schoolbook long multiplication.  */
    4576   1758388068 :       total = factor * factor * COSTS_N_INSNS (5);
    4577   1758388068 :       break;
    4578     72486234 :     case DIV:
    4579     72486234 :     case UDIV:
    4580     72486234 :     case MOD:
    4581     72486234 :     case UMOD:
    4582     72486234 :     case SS_DIV:
    4583     72486234 :     case US_DIV:
    4584              :       /* Similarly, complexity for schoolbook long division.  */
    4585     72486234 :       total = factor * factor * COSTS_N_INSNS (7);
    4586     72486234 :       break;
    4587            0 :     case USE:
    4588              :       /* Used in combine.cc as a marker.  */
    4589            0 :       total = 0;
    4590            0 :       break;
    4591  10760508011 :     default:
    4592  10760508011 :       total = factor * COSTS_N_INSNS (1);
    4593              :     }
    4594              : 
    4595  12591382313 :   switch (code)
    4596              :     {
    4597              :     case REG:
    4598              :       return 0;
    4599              : 
    4600     11762184 :     case SUBREG:
    4601     11762184 :       total = 0;
    4602              :       /* If we can't tie these modes, make this expensive.  The larger
    4603              :          the mode, the more expensive it is.  */
    4604     11762184 :       if (!targetm.modes_tieable_p (mode, GET_MODE (SUBREG_REG (x))))
    4605      4307947 :         return COSTS_N_INSNS (2 + factor);
    4606              :       break;
    4607              : 
    4608     17277128 :     case TRUNCATE:
    4609     17277128 :       if (targetm.modes_tieable_p (mode, GET_MODE (XEXP (x, 0))))
    4610              :         {
    4611      3851965 :           total = 0;
    4612      3851965 :           break;
    4613              :         }
    4614              :       /* FALLTHRU */
    4615   7702810706 :     default:
    4616   7702810706 :       if (targetm.rtx_costs (x, mode, outer_code, opno, &total, speed))
    4617   3531950419 :         return total;
    4618              :       break;
    4619              :     }
    4620              : 
    4621              :   /* Sum the costs of the sub-rtx's, plus cost of this operation,
    4622              :      which is already in total.  */
    4623              : 
    4624   4182166489 :   fmt = GET_RTX_FORMAT (code);
    4625  12455520008 :   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    4626   8273353519 :     if (fmt[i] == 'e')
    4627   8204184217 :       total += rtx_cost (XEXP (x, i), mode, code, i, speed);
    4628     69169302 :     else if (fmt[i] == 'E')
    4629     11817444 :       for (j = 0; j < XVECLEN (x, i); j++)
    4630      6730266 :         total += rtx_cost (XVECEXP (x, i, j), mode, code, i, speed);
    4631              : 
    4632   4182166489 :   return total;
    4633              : }
    4634              : 
    4635              : /* Fill in the structure C with information about both speed and size rtx
    4636              :    costs for X, which is operand OPNO in an expression with code OUTER.  */
    4637              : 
    4638              : void
    4639      2219746 : get_full_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer, int opno,
    4640              :                    struct full_rtx_costs *c)
    4641              : {
    4642      2219746 :   c->speed = rtx_cost (x, mode, outer, opno, true);
    4643      2219746 :   c->size = rtx_cost (x, mode, outer, opno, false);
    4644      2219746 : }
    4645              : 
    4646              : 
    4647              : /* Return cost of address expression X.
    4648              :    Expect that X is properly formed address reference.
    4649              : 
    4650              :    SPEED parameter specify whether costs optimized for speed or size should
    4651              :    be returned.  */
    4652              : 
    4653              : int
    4654     10858238 : address_cost (rtx x, machine_mode mode, addr_space_t as, bool speed)
    4655              : {
    4656              :   /* We may be asked for cost of various unusual addresses, such as operands
    4657              :      of push instruction.  It is not worthwhile to complicate writing
    4658              :      of the target hook by such cases.  */
    4659              : 
    4660     10858238 :   if (!memory_address_addr_space_p (mode, x, as))
    4661              :     return 1000;
    4662              : 
    4663     10773597 :   return targetm.address_cost (x, mode, as, speed);
    4664              : }
    4665              : 
    4666              : /* If the target doesn't override, compute the cost as with arithmetic.  */
    4667              : 
    4668              : int
    4669            0 : default_address_cost (rtx x, machine_mode, addr_space_t, bool speed)
    4670              : {
    4671            0 :   return rtx_cost (x, Pmode, MEM, 0, speed);
    4672              : }
    4673              : 
    4674              : 
    4675              : unsigned HOST_WIDE_INT
    4676    693673653 : nonzero_bits (const_rtx x, machine_mode mode)
    4677              : {
    4678    693673653 :   if (mode == VOIDmode)
    4679            0 :     mode = GET_MODE (x);
    4680    693673653 :   scalar_int_mode int_mode;
    4681    693673653 :   if (!is_a <scalar_int_mode> (mode, &int_mode))
    4682     20374620 :     return GET_MODE_MASK (mode);
    4683    673299033 :   return cached_nonzero_bits (x, int_mode, NULL_RTX, VOIDmode, 0);
    4684              : }
    4685              : 
    4686              : unsigned int
    4687    245783048 : num_sign_bit_copies (const_rtx x, machine_mode mode)
    4688              : {
    4689    245783048 :   if (mode == VOIDmode)
    4690            1 :     mode = GET_MODE (x);
    4691    245783048 :   scalar_int_mode int_mode;
    4692    245783048 :   if (!is_a <scalar_int_mode> (mode, &int_mode))
    4693              :     return 1;
    4694    225728510 :   return cached_num_sign_bit_copies (x, int_mode, NULL_RTX, VOIDmode, 0);
    4695              : }
    4696              : 
    4697              : /* Return true if nonzero_bits1 might recurse into both operands
    4698              :    of X.  */
    4699              : 
    4700              : static inline bool
    4701   1425310930 : nonzero_bits_binary_arith_p (const_rtx x)
    4702              : {
    4703   1425310930 :   if (!ARITHMETIC_P (x))
    4704              :     return false;
    4705    248986216 :   switch (GET_CODE (x))
    4706              :     {
    4707              :     case AND:
    4708              :     case XOR:
    4709              :     case IOR:
    4710              :     case UMIN:
    4711              :     case UMAX:
    4712              :     case SMIN:
    4713              :     case SMAX:
    4714              :     case PLUS:
    4715              :     case MINUS:
    4716              :     case MULT:
    4717              :     case DIV:
    4718              :     case UDIV:
    4719              :     case MOD:
    4720              :     case UMOD:
    4721              :       return true;
    4722              :     default:
    4723              :       return false;
    4724              :     }
    4725              : }
    4726              : 
    4727              : /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
    4728              :    It avoids exponential behavior in nonzero_bits1 when X has
    4729              :    identical subexpressions on the first or the second level.  */
    4730              : 
    4731              : static unsigned HOST_WIDE_INT
    4732   1147264813 : cached_nonzero_bits (const_rtx x, scalar_int_mode mode, const_rtx known_x,
    4733              :                      machine_mode known_mode,
    4734              :                      unsigned HOST_WIDE_INT known_ret)
    4735              : {
    4736   1147264813 :   if (x == known_x && mode == known_mode)
    4737              :     return known_ret;
    4738              : 
    4739              :   /* Try to find identical subexpressions.  If found call
    4740              :      nonzero_bits1 on X with the subexpressions as KNOWN_X and the
    4741              :      precomputed value for the subexpression as KNOWN_RET.  */
    4742              : 
    4743   1144967592 :   if (nonzero_bits_binary_arith_p (x))
    4744              :     {
    4745    140793646 :       rtx x0 = XEXP (x, 0);
    4746    140793646 :       rtx x1 = XEXP (x, 1);
    4747              : 
    4748              :       /* Check the first level.  */
    4749    140793646 :       if (x0 == x1)
    4750        59723 :         return nonzero_bits1 (x, mode, x0, mode,
    4751              :                               cached_nonzero_bits (x0, mode, known_x,
    4752        59723 :                                                    known_mode, known_ret));
    4753              : 
    4754              :       /* Check the second level.  */
    4755    140733923 :       if (nonzero_bits_binary_arith_p (x0)
    4756    140733923 :           && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
    4757      1124508 :         return nonzero_bits1 (x, mode, x1, mode,
    4758              :                               cached_nonzero_bits (x1, mode, known_x,
    4759      1124508 :                                                    known_mode, known_ret));
    4760              : 
    4761    139609415 :       if (nonzero_bits_binary_arith_p (x1)
    4762    139609415 :           && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
    4763         6361 :         return nonzero_bits1 (x, mode, x0, mode,
    4764              :                               cached_nonzero_bits (x0, mode, known_x,
    4765         6361 :                                                    known_mode, known_ret));
    4766              :     }
    4767              : 
    4768   1143777000 :   return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
    4769              : }
    4770              : 
    4771              : /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
    4772              :    We don't let nonzero_bits recur into num_sign_bit_copies, because that
    4773              :    is less useful.  We can't allow both, because that results in exponential
    4774              :    run time recursion.  There is a nullstone testcase that triggered
    4775              :    this.  This macro avoids accidental uses of num_sign_bit_copies.  */
    4776              : #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
    4777              : 
    4778              : /* Given an expression, X, compute which bits in X can be nonzero.
    4779              :    We don't care about bits outside of those defined in MODE.
    4780              : 
    4781              :    For most X this is simply GET_MODE_MASK (GET_MODE (X)), but if X is
    4782              :    an arithmetic operation, we can do better.  */
    4783              : 
    4784              : static unsigned HOST_WIDE_INT
    4785   1144967592 : nonzero_bits1 (const_rtx x, scalar_int_mode mode, const_rtx known_x,
    4786              :                machine_mode known_mode,
    4787              :                unsigned HOST_WIDE_INT known_ret)
    4788              : {
    4789   1144967592 :   unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
    4790   1144967592 :   unsigned HOST_WIDE_INT inner_nz;
    4791   1144967592 :   enum rtx_code code = GET_CODE (x);
    4792   1144967592 :   machine_mode inner_mode;
    4793   1144967592 :   unsigned int inner_width;
    4794   1144967592 :   scalar_int_mode xmode;
    4795              : 
    4796   1144967592 :   unsigned int mode_width = GET_MODE_PRECISION (mode);
    4797              : 
    4798              :   /* For unary ops like ffs or popcount we want to determine the number of
    4799              :      nonzero bits from the operand.  This only matters with very large
    4800              :      vector modes.  A
    4801              :        (popcount:DI (V128BImode)
    4802              :      should not get a nonzero-bit mask of (1 << 7) - 1 as that could
    4803              :      lead to incorrect optimizations based on it, see PR123501.  */
    4804   1144967592 :   unsigned int op_mode_width = mode_width;
    4805   1144967592 :   machine_mode op_mode = mode;
    4806   1144967592 :   if (UNARY_P (x))
    4807              :     {
    4808     16106773 :       const_rtx op = XEXP (x, 0);
    4809     16106773 :       if (GET_MODE_PRECISION (GET_MODE (op)).is_constant ())
    4810              :         {
    4811     16106773 :           op_mode = GET_MODE (op);
    4812     16106773 :           op_mode_width = GET_MODE_PRECISION (op_mode).to_constant ();
    4813              :         }
    4814              :     }
    4815              : 
    4816   1144967592 :   if (CONST_INT_P (x))
    4817              :     {
    4818    118047087 :       if (SHORT_IMMEDIATES_SIGN_EXTEND
    4819              :           && INTVAL (x) > 0
    4820              :           && mode_width < BITS_PER_WORD
    4821              :           && (UINTVAL (x) & (HOST_WIDE_INT_1U << (mode_width - 1))) != 0)
    4822              :         return UINTVAL (x) | (HOST_WIDE_INT_M1U << mode_width);
    4823              : 
    4824    118047087 :       return UINTVAL (x);
    4825              :     }
    4826              : 
    4827   1026920505 :   if (!is_a <scalar_int_mode> (GET_MODE (x), &xmode))
    4828              :     return nonzero;
    4829   1026589921 :   unsigned int xmode_width = GET_MODE_PRECISION (xmode);
    4830              : 
    4831              :   /* If X is wider than MODE, use its mode instead.  */
    4832   1026589921 :   if (xmode_width > mode_width)
    4833              :     {
    4834     18649047 :       mode = xmode;
    4835     18649047 :       nonzero = GET_MODE_MASK (mode);
    4836     18649047 :       mode_width = xmode_width;
    4837              :     }
    4838              : 
    4839   1026589921 :   if (mode_width > HOST_BITS_PER_WIDE_INT)
    4840              :     /* Our only callers in this case look for single bit values.  So
    4841              :        just return the mode mask.  Those tests will then be false.  */
    4842              :     return nonzero;
    4843              : 
    4844              :   /* If MODE is wider than X, but both are a single word for both the host
    4845              :      and target machines, we can compute this from which bits of the object
    4846              :      might be nonzero in its own mode, taking into account the fact that, on
    4847              :      CISC machines, accessing an object in a wider mode generally causes the
    4848              :      high-order bits to become undefined, so they are not known to be zero.
    4849              :      We extend this reasoning to RISC machines for operations that might not
    4850              :      operate on the full registers.  */
    4851   1025232040 :   if (mode_width > xmode_width
    4852    112185981 :       && xmode_width <= BITS_PER_WORD
    4853              :       && xmode_width <= HOST_BITS_PER_WIDE_INT
    4854              :       && !(WORD_REGISTER_OPERATIONS && word_register_operation_p (x)))
    4855              :     {
    4856     95864398 :       nonzero &= cached_nonzero_bits (x, xmode,
    4857              :                                       known_x, known_mode, known_ret);
    4858     95864398 :       nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (xmode);
    4859     95864398 :       return nonzero;
    4860              :     }
    4861              : 
    4862              :   /* Please keep nonzero_bits_binary_arith_p above in sync with
    4863              :      the code in the switch below.  */
    4864    929367642 :   switch (code)
    4865              :     {
    4866    512512378 :     case REG:
    4867              : #if defined(POINTERS_EXTEND_UNSIGNED)
    4868              :       /* If pointers extend unsigned and this is a pointer in Pmode, say that
    4869              :          all the bits above ptr_mode are known to be zero.  */
    4870              :       /* As we do not know which address space the pointer is referring to,
    4871              :          we can do this only if the target does not support different pointer
    4872              :          or address modes depending on the address space.  */
    4873    512512378 :       if (target_default_pointer_address_modes_p ()
    4874              :           && POINTERS_EXTEND_UNSIGNED
    4875    575001945 :           && xmode == Pmode
    4876    325152151 :           && REG_POINTER (x)
    4877    598787638 :           && !targetm.have_ptr_extend ())
    4878     86275260 :         nonzero &= GET_MODE_MASK (ptr_mode);
    4879              : #endif
    4880              : 
    4881              :       /* Include declared information about alignment of pointers.  */
    4882              :       /* ??? We don't properly preserve REG_POINTER changes across
    4883              :          pointer-to-integer casts, so we can't trust it except for
    4884              :          things that we know must be pointers.  See execute/960116-1.c.  */
    4885    512512378 :       if ((x == stack_pointer_rtx
    4886    511512783 :            || x == frame_pointer_rtx
    4887    497203817 :            || x == arg_pointer_rtx)
    4888    527371078 :           && REGNO_POINTER_ALIGN (REGNO (x)))
    4889              :         {
    4890     15858295 :           unsigned HOST_WIDE_INT alignment
    4891     15858295 :             = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
    4892              : 
    4893              : #ifdef PUSH_ROUNDING
    4894              :           /* If PUSH_ROUNDING is defined, it is possible for the
    4895              :              stack to be momentarily aligned only to that amount,
    4896              :              so we pick the least alignment.  */
    4897     15858295 :           if (x == stack_pointer_rtx && targetm.calls.push_argument (0))
    4898              :             {
    4899       775128 :               poly_uint64 rounded_1 = PUSH_ROUNDING (poly_int64 (1));
    4900       775128 :               alignment = MIN (known_alignment (rounded_1), alignment);
    4901              :             }
    4902              : #endif
    4903              : 
    4904     15858295 :           nonzero &= ~(alignment - 1);
    4905              :         }
    4906              : 
    4907    512512378 :       {
    4908    512512378 :         unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
    4909    512512378 :         rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, xmode, mode,
    4910              :                                                   &nonzero_for_hook);
    4911              : 
    4912    512512378 :         if (new_rtx)
    4913            6 :           nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
    4914              :                                                    known_mode, known_ret);
    4915              : 
    4916    512512378 :         return nonzero_for_hook;
    4917              :       }
    4918              : 
    4919              :     case MEM:
    4920              :       /* In many, if not most, RISC machines, reading a byte from memory
    4921              :          zeros the rest of the register.  Noticing that fact saves a lot
    4922              :          of extra zero-extends.  */
    4923              :       if (load_extend_op (xmode) == ZERO_EXTEND)
    4924              :         nonzero &= GET_MODE_MASK (xmode);
    4925              :       break;
    4926              : 
    4927      9544661 :     case EQ:  case NE:
    4928      9544661 :     case UNEQ:  case LTGT:
    4929      9544661 :     case GT:  case GTU:  case UNGT:
    4930      9544661 :     case LT:  case LTU:  case UNLT:
    4931      9544661 :     case GE:  case GEU:  case UNGE:
    4932      9544661 :     case LE:  case LEU:  case UNLE:
    4933      9544661 :     case UNORDERED: case ORDERED:
    4934              :       /* If this produces an integer result, we know which bits are set.
    4935              :          Code here used to clear bits outside the mode of X, but that is
    4936              :          now done above.  */
    4937              :       /* Mind that MODE is the mode the caller wants to look at this
    4938              :          operation in, and not the actual operation mode.  We can wind
    4939              :          up with (subreg:DI (gt:V4HI x y)), and we don't have anything
    4940              :          that describes the results of a vector compare.  */
    4941      9544661 :       if (GET_MODE_CLASS (xmode) == MODE_INT
    4942      9544661 :           && mode_width <= HOST_BITS_PER_WIDE_INT)
    4943   1144967592 :         nonzero = STORE_FLAG_VALUE;
    4944              :       break;
    4945              : 
    4946      1016606 :     case NEG:
    4947              : #if 0
    4948              :       /* Disabled to avoid exponential mutual recursion between nonzero_bits
    4949              :          and num_sign_bit_copies.  */
    4950              :       if (num_sign_bit_copies (XEXP (x, 0), xmode) == xmode_width)
    4951              :         nonzero = 1;
    4952              : #endif
    4953              : 
    4954      1016606 :       if (xmode_width < mode_width)
    4955            0 :         nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (xmode));
    4956              :       break;
    4957              : 
    4958              :     case ABS:
    4959              : #if 0
    4960              :       /* Disabled to avoid exponential mutual recursion between nonzero_bits
    4961              :          and num_sign_bit_copies.  */
    4962              :       if (num_sign_bit_copies (XEXP (x, 0), xmode) == xmode_width)
    4963              :         nonzero = 1;
    4964              : #endif
    4965              :       break;
    4966              : 
    4967         9723 :     case TRUNCATE:
    4968         9723 :       nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
    4969              :                                        known_x, known_mode, known_ret)
    4970         9723 :                   & GET_MODE_MASK (mode));
    4971         9723 :       break;
    4972              : 
    4973      6702521 :     case ZERO_EXTEND:
    4974      6702521 :       nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
    4975              :                                       known_x, known_mode, known_ret);
    4976      6702521 :       if (GET_MODE (XEXP (x, 0)) != VOIDmode)
    4977      6702521 :         nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
    4978              :       break;
    4979              : 
    4980      1866340 :     case SIGN_EXTEND:
    4981              :       /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
    4982              :          Otherwise, show all the bits in the outer mode but not the inner
    4983              :          may be nonzero.  */
    4984      1866340 :       inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
    4985              :                                       known_x, known_mode, known_ret);
    4986      1866340 :       if (GET_MODE (XEXP (x, 0)) != VOIDmode)
    4987              :         {
    4988      1866340 :           inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
    4989      1866340 :           if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz))
    4990      1828113 :             inner_nz |= (GET_MODE_MASK (mode)
    4991      1828113 :                          & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
    4992              :         }
    4993              : 
    4994      1866340 :       nonzero &= inner_nz;
    4995      1866340 :       break;
    4996              : 
    4997     16298290 :     case AND:
    4998     16298290 :       nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
    4999              :                                        known_x, known_mode, known_ret)
    5000     16298290 :                  & cached_nonzero_bits (XEXP (x, 1), mode,
    5001              :                                         known_x, known_mode, known_ret);
    5002     16298290 :       break;
    5003              : 
    5004     10544438 :     case XOR:   case IOR:
    5005     10544438 :     case UMIN:  case UMAX:  case SMIN:  case SMAX:
    5006     10544438 :       {
    5007     10544438 :         unsigned HOST_WIDE_INT nonzero0
    5008     10544438 :            = cached_nonzero_bits (XEXP (x, 0), mode,
    5009              :                                   known_x, known_mode, known_ret);
    5010              : 
    5011              :         /* Don't call nonzero_bits for the second time if it cannot change
    5012              :            anything.  */
    5013     10544438 :         if ((nonzero & nonzero0) != nonzero)
    5014      9988822 :           nonzero &= nonzero0
    5015      4994411 :                      | cached_nonzero_bits (XEXP (x, 1), mode,
    5016              :                                             known_x, known_mode, known_ret);
    5017              :       }
    5018              :       break;
    5019              : 
    5020     94156889 :     case PLUS:  case MINUS:
    5021     94156889 :     case MULT:
    5022     94156889 :     case DIV:   case UDIV:
    5023     94156889 :     case MOD:   case UMOD:
    5024              :       /* We can apply the rules of arithmetic to compute the number of
    5025              :          high- and low-order zero bits of these operations.  We start by
    5026              :          computing the width (position of the highest-order nonzero bit)
    5027              :          and the number of low-order zero bits for each value.  */
    5028     94156889 :       {
    5029     94156889 :         unsigned HOST_WIDE_INT nz0
    5030     94156889 :           = cached_nonzero_bits (XEXP (x, 0), mode,
    5031              :                                  known_x, known_mode, known_ret);
    5032     94156889 :         unsigned HOST_WIDE_INT nz1
    5033     94156889 :           = cached_nonzero_bits (XEXP (x, 1), mode,
    5034              :                                  known_x, known_mode, known_ret);
    5035     94156889 :         int sign_index = xmode_width - 1;
    5036     94156889 :         int width0 = floor_log2 (nz0) + 1;
    5037     94156889 :         int width1 = floor_log2 (nz1) + 1;
    5038     94156889 :         int low0 = ctz_or_zero (nz0);
    5039     94156889 :         int low1 = ctz_or_zero (nz1);
    5040     94156889 :         unsigned HOST_WIDE_INT op0_maybe_minusp
    5041     94156889 :           = nz0 & (HOST_WIDE_INT_1U << sign_index);
    5042     94156889 :         unsigned HOST_WIDE_INT op1_maybe_minusp
    5043              :           = nz1 & (HOST_WIDE_INT_1U << sign_index);
    5044     94156889 :         unsigned int result_width = mode_width;
    5045     94156889 :         int result_low = 0;
    5046              : 
    5047     94156889 :         switch (code)
    5048              :           {
    5049     69691892 :           case PLUS:
    5050     69691892 :             result_width = MAX (width0, width1) + 1;
    5051     69691892 :             result_low = MIN (low0, low1);
    5052              :             break;
    5053     15318289 :           case MINUS:
    5054     15318289 :             result_low = MIN (low0, low1);
    5055              :             break;
    5056      7379027 :           case MULT:
    5057      7379027 :             result_width = width0 + width1;
    5058      7379027 :             result_low = low0 + low1;
    5059      7379027 :             break;
    5060       676719 :           case DIV:
    5061       676719 :             if (width1 == 0)
    5062              :               break;
    5063       667009 :             if (!op0_maybe_minusp && !op1_maybe_minusp)
    5064        23091 :               result_width = width0;
    5065              :             break;
    5066       284409 :           case UDIV:
    5067       284409 :             if (width1 == 0)
    5068              :               break;
    5069       283541 :             result_width = width0;
    5070       283541 :             break;
    5071       411028 :           case MOD:
    5072       411028 :             if (width1 == 0)
    5073              :               break;
    5074       403696 :             if (!op0_maybe_minusp && !op1_maybe_minusp)
    5075        21341 :               result_width = MIN (width0, width1);
    5076       403696 :             result_low = MIN (low0, low1);
    5077              :             break;
    5078       395525 :           case UMOD:
    5079       395525 :             if (width1 == 0)
    5080              :               break;
    5081       395421 :             result_width = MIN (width0, width1);
    5082       395421 :             result_low = MIN (low0, low1);
    5083              :             break;
    5084            0 :           default:
    5085            0 :             gcc_unreachable ();
    5086              :           }
    5087              : 
    5088              :         /* Note that mode_width <= HOST_BITS_PER_WIDE_INT, see above.  */
    5089     94156889 :         if (result_width < mode_width)
    5090      4097722 :           nonzero &= (HOST_WIDE_INT_1U << result_width) - 1;
    5091              : 
    5092     94156889 :         if (result_low > 0)
    5093              :           {
    5094      6888457 :             if (result_low < HOST_BITS_PER_WIDE_INT)
    5095      6888445 :               nonzero &= ~((HOST_WIDE_INT_1U << result_low) - 1);
    5096              :             else
    5097              :               nonzero = 0;
    5098              :           }
    5099              :       }
    5100              :       break;
    5101              : 
    5102      1179581 :     case ZERO_EXTRACT:
    5103      1179581 :       if (CONST_INT_P (XEXP (x, 1))
    5104      1179204 :           && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
    5105      1179021 :         nonzero &= (HOST_WIDE_INT_1U << INTVAL (XEXP (x, 1))) - 1;
    5106              :       break;
    5107              : 
    5108     75933572 :     case SUBREG:
    5109              :       /* If this is a SUBREG formed for a promoted variable that has
    5110              :          been zero-extended, we know that at least the high-order bits
    5111              :          are zero, though others might be too.  */
    5112     75933572 :       if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
    5113        39362 :         nonzero = GET_MODE_MASK (xmode)
    5114        39362 :                   & cached_nonzero_bits (SUBREG_REG (x), xmode,
    5115              :                                          known_x, known_mode, known_ret);
    5116              : 
    5117              :       /* If the inner mode is a single word for both the host and target
    5118              :          machines, we can compute this from which bits of the inner
    5119              :          object might be nonzero.  */
    5120     75933572 :       inner_mode = GET_MODE (SUBREG_REG (x));
    5121     75933572 :       if (GET_MODE_PRECISION (inner_mode).is_constant (&inner_width)
    5122     81078015 :           && inner_width <= BITS_PER_WORD
    5123              :           && inner_width <= HOST_BITS_PER_WIDE_INT)
    5124              :         {
    5125     71813491 :           nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
    5126              :                                           known_x, known_mode, known_ret);
    5127              : 
    5128              :           /* On a typical CISC machine, accessing an object in a wider mode
    5129              :              causes the high-order bits to become undefined.  So they are
    5130              :              not known to be zero.
    5131              : 
    5132              :              On a typical RISC machine, we only have to worry about the way
    5133              :              loads are extended.  Otherwise, if we get a reload for the inner
    5134              :              part, it may be loaded from the stack, and then we may lose all
    5135              :              the zero bits that existed before the store to the stack.  */
    5136     71813491 :           rtx_code extend_op;
    5137     71813491 :           if ((!WORD_REGISTER_OPERATIONS
    5138              :                || ((extend_op = load_extend_op (inner_mode)) == SIGN_EXTEND
    5139              :                    ? val_signbit_known_set_p (inner_mode, nonzero)
    5140              :                    : extend_op != ZERO_EXTEND)
    5141              :                || !MEM_P (SUBREG_REG (x)))
    5142              :               && xmode_width > inner_width)
    5143     54803021 :             nonzero
    5144     54803021 :               |= (GET_MODE_MASK (GET_MODE (x)) & ~GET_MODE_MASK (inner_mode));
    5145              :         }
    5146              :       break;
    5147              : 
    5148     56183641 :     case ASHIFT:
    5149     56183641 :     case ASHIFTRT:
    5150     56183641 :     case LSHIFTRT:
    5151     56183641 :     case ROTATE:
    5152     56183641 :     case ROTATERT:
    5153              :       /* The nonzero bits are in two classes: any bits within MODE
    5154              :          that aren't in xmode are always significant.  The rest of the
    5155              :          nonzero bits are those that are significant in the operand of
    5156              :          the shift when shifted the appropriate number of bits.  This
    5157              :          shows that high-order bits are cleared by the right shift and
    5158              :          low-order bits by left shifts.  */
    5159     56183641 :       if (CONST_INT_P (XEXP (x, 1))
    5160     54641552 :           && INTVAL (XEXP (x, 1)) >= 0
    5161     54641406 :           && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
    5162     54641318 :           && INTVAL (XEXP (x, 1)) < xmode_width)
    5163              :         {
    5164     54641247 :           int count = INTVAL (XEXP (x, 1));
    5165     54641247 :           unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (xmode);
    5166     54641247 :           unsigned HOST_WIDE_INT op_nonzero
    5167     54641247 :             = cached_nonzero_bits (XEXP (x, 0), mode,
    5168              :                                    known_x, known_mode, known_ret);
    5169     54641247 :           unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
    5170     54641247 :           unsigned HOST_WIDE_INT outer = 0;
    5171              : 
    5172     54641247 :           if (mode_width > xmode_width)
    5173            0 :             outer = (op_nonzero & nonzero & ~mode_mask);
    5174              : 
    5175     54641247 :           switch (code)
    5176              :             {
    5177     31727454 :             case ASHIFT:
    5178     31727454 :               inner <<= count;
    5179     31727454 :               break;
    5180              : 
    5181     14605379 :             case LSHIFTRT:
    5182     14605379 :               inner >>= count;
    5183     14605379 :               break;
    5184              : 
    5185      8194410 :             case ASHIFTRT:
    5186      8194410 :               inner >>= count;
    5187              : 
    5188              :               /* If the sign bit may have been nonzero before the shift, we
    5189              :                  need to mark all the places it could have been copied to
    5190              :                  by the shift as possibly nonzero.  */
    5191      8194410 :               if (inner & (HOST_WIDE_INT_1U << (xmode_width - 1 - count)))
    5192      8180391 :                 inner |= (((HOST_WIDE_INT_1U << count) - 1)
    5193      8180391 :                           << (xmode_width - count));
    5194              :               break;
    5195              : 
    5196        72608 :             case ROTATE:
    5197        72608 :               inner = (inner << (count % xmode_width)
    5198        72608 :                        | (inner >> (xmode_width - (count % xmode_width))))
    5199              :                       & mode_mask;
    5200        72608 :               break;
    5201              : 
    5202        41396 :             case ROTATERT:
    5203        41396 :               inner = (inner >> (count % xmode_width)
    5204        41396 :                        | (inner << (xmode_width - (count % xmode_width))))
    5205              :                       & mode_mask;
    5206        41396 :               break;
    5207              : 
    5208              :             default:
    5209              :               gcc_unreachable ();
    5210              :             }
    5211              : 
    5212     54641247 :           nonzero &= (outer | inner);
    5213              :         }
    5214              :       break;
    5215              : 
    5216         5084 :     case FFS:
    5217         5084 :     case POPCOUNT:
    5218              :       /* This is at most the number of bits in the mode.  */
    5219         5084 :       nonzero = (HOST_WIDE_INT_UC (2) << (floor_log2 (op_mode_width))) - 1;
    5220         5084 :       break;
    5221              : 
    5222       797927 :     case CLZ:
    5223              :       /* If CLZ has a known value at zero, then the nonzero bits are
    5224              :          that value, plus the number of bits in the mode minus one.
    5225              :          If we have a different operand mode, don't try to get nonzero
    5226              :          bits as currently nonzero is not a poly_int.  */
    5227       797927 :       if (op_mode == mode
    5228      1595842 :           && CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
    5229         1147 :         nonzero
    5230         2294 :           |= (HOST_WIDE_INT_1U << (floor_log2 (mode_width))) - 1;
    5231              :       else
    5232              :         nonzero = -1;
    5233              :       break;
    5234              : 
    5235        47196 :     case CTZ:
    5236              :       /* If CTZ has a known value at zero, then the nonzero bits are
    5237              :          that value, plus the number of bits in the mode minus one.
    5238              :          See above for op_mode != mode.  */
    5239        47196 :       if (op_mode == mode
    5240        94392 :           && CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
    5241         1363 :         nonzero
    5242         2726 :           |= (HOST_WIDE_INT_1U << (floor_log2 (mode_width))) - 1;
    5243              :       else
    5244              :         nonzero = -1;
    5245              :       break;
    5246              : 
    5247            8 :     case CLRSB:
    5248              :       /* This is at most the number of bits in the mode minus 1.  */
    5249            8 :       nonzero = (HOST_WIDE_INT_1U << (floor_log2 (op_mode_width))) - 1;
    5250            8 :       break;
    5251              : 
    5252              :     case PARITY:
    5253   1144967592 :       nonzero = 1;
    5254              :       break;
    5255              : 
    5256      3817654 :     case IF_THEN_ELSE:
    5257      3817654 :       {
    5258      3817654 :         unsigned HOST_WIDE_INT nonzero_true
    5259      3817654 :           = cached_nonzero_bits (XEXP (x, 1), mode,
    5260              :                                  known_x, known_mode, known_ret);
    5261              : 
    5262              :         /* Don't call nonzero_bits for the second time if it cannot change
    5263              :            anything.  */
    5264      3817654 :         if ((nonzero & nonzero_true) != nonzero)
    5265      3142478 :           nonzero &= nonzero_true
    5266      1571239 :                      | cached_nonzero_bits (XEXP (x, 2), mode,
    5267              :                                             known_x, known_mode, known_ret);
    5268              :       }
    5269              :       break;
    5270              : 
    5271              :     default:
    5272              :       break;
    5273              :     }
    5274              : 
    5275              :   return nonzero;
    5276              : }
    5277              : 
    5278              : /* See the macro definition above.  */
    5279              : #undef cached_num_sign_bit_copies
    5280              : 
    5281              : 
    5282              : /* Return true if num_sign_bit_copies1 might recurse into both operands
    5283              :    of X.  */
    5284              : 
    5285              : static inline bool
    5286    442691519 : num_sign_bit_copies_binary_arith_p (const_rtx x)
    5287              : {
    5288    442691519 :   if (!ARITHMETIC_P (x))
    5289              :     return false;
    5290     80311656 :   switch (GET_CODE (x))
    5291              :     {
    5292              :     case IOR:
    5293              :     case AND:
    5294              :     case XOR:
    5295              :     case SMIN:
    5296              :     case SMAX:
    5297              :     case UMIN:
    5298              :     case UMAX:
    5299              :     case PLUS:
    5300              :     case MINUS:
    5301              :     case MULT:
    5302              :       return true;
    5303              :     default:
    5304              :       return false;
    5305              :     }
    5306              : }
    5307              : 
    5308              : /* The function cached_num_sign_bit_copies is a wrapper around
    5309              :    num_sign_bit_copies1.  It avoids exponential behavior in
    5310              :    num_sign_bit_copies1 when X has identical subexpressions on the
    5311              :    first or the second level.  */
    5312              : 
    5313              : static unsigned int
    5314    346551149 : cached_num_sign_bit_copies (const_rtx x, scalar_int_mode mode,
    5315              :                             const_rtx known_x, machine_mode known_mode,
    5316              :                             unsigned int known_ret)
    5317              : {
    5318    346551149 :   if (x == known_x && mode == known_mode)
    5319              :     return known_ret;
    5320              : 
    5321              :   /* Try to find identical subexpressions.  If found call
    5322              :      num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
    5323              :      the precomputed value for the subexpression as KNOWN_RET.  */
    5324              : 
    5325    344784952 :   if (num_sign_bit_copies_binary_arith_p (x))
    5326              :     {
    5327     49346241 :       rtx x0 = XEXP (x, 0);
    5328     49346241 :       rtx x1 = XEXP (x, 1);
    5329              : 
    5330              :       /* Check the first level.  */
    5331     49346241 :       if (x0 == x1)
    5332        17248 :         return
    5333        17248 :           num_sign_bit_copies1 (x, mode, x0, mode,
    5334              :                                 cached_num_sign_bit_copies (x0, mode, known_x,
    5335              :                                                             known_mode,
    5336        17248 :                                                             known_ret));
    5337              : 
    5338              :       /* Check the second level.  */
    5339     49328993 :       if (num_sign_bit_copies_binary_arith_p (x0)
    5340     49328993 :           && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
    5341       751419 :         return
    5342       751419 :           num_sign_bit_copies1 (x, mode, x1, mode,
    5343              :                                 cached_num_sign_bit_copies (x1, mode, known_x,
    5344              :                                                             known_mode,
    5345       751419 :                                                             known_ret));
    5346              : 
    5347     48577574 :       if (num_sign_bit_copies_binary_arith_p (x1)
    5348     48577574 :           && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
    5349          414 :         return
    5350          414 :           num_sign_bit_copies1 (x, mode, x0, mode,
    5351              :                                 cached_num_sign_bit_copies (x0, mode, known_x,
    5352              :                                                             known_mode,
    5353          414 :                                                             known_ret));
    5354              :     }
    5355              : 
    5356    344015871 :   return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
    5357              : }
    5358              : 
    5359              : /* Return the number of bits at the high-order end of X that are known to
    5360              :    be equal to the sign bit.  X will be used in mode MODE.  The returned
    5361              :    value will always be between 1 and the number of bits in MODE.  */
    5362              : 
    5363              : static unsigned int
    5364    344784952 : num_sign_bit_copies1 (const_rtx x, scalar_int_mode mode, const_rtx known_x,
    5365              :                       machine_mode known_mode,
    5366              :                       unsigned int known_ret)
    5367              : {
    5368    344784952 :   enum rtx_code code = GET_CODE (x);
    5369    344784952 :   unsigned int bitwidth = GET_MODE_PRECISION (mode);
    5370    344784952 :   int num0, num1, result;
    5371    344784952 :   unsigned HOST_WIDE_INT nonzero;
    5372              : 
    5373    344784952 :   if (CONST_INT_P (x))
    5374              :     {
    5375              :       /* If the constant is negative, take its 1's complement and remask.
    5376              :          Then see how many zero bits we have.  */
    5377     44276695 :       nonzero = UINTVAL (x) & GET_MODE_MASK (mode);
    5378     44276695 :       if (bitwidth <= HOST_BITS_PER_WIDE_INT
    5379     43974081 :           && (nonzero & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
    5380     19468280 :         nonzero = (~nonzero) & GET_MODE_MASK (mode);
    5381              : 
    5382     44276695 :       return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
    5383              :     }
    5384              : 
    5385    300508257 :   scalar_int_mode xmode, inner_mode;
    5386    505737158 :   if (!is_a <scalar_int_mode> (GET_MODE (x), &xmode))
    5387              :     return 1;
    5388              : 
    5389    300175178 :   unsigned int xmode_width = GET_MODE_PRECISION (xmode);
    5390              : 
    5391              :   /* For a smaller mode, just ignore the high bits.  */
    5392    300175178 :   if (bitwidth < xmode_width)
    5393              :     {
    5394        37884 :       num0 = cached_num_sign_bit_copies (x, xmode,
    5395              :                                          known_x, known_mode, known_ret);
    5396        37884 :       return MAX (1, num0 - (int) (xmode_width - bitwidth));
    5397              :     }
    5398              : 
    5399    300137294 :   if (bitwidth > xmode_width)
    5400              :     {
    5401              :       /* If this machine does not do all register operations on the entire
    5402              :          register and MODE is wider than the mode of X, we can say nothing
    5403              :          at all about the high-order bits.  We extend this reasoning to RISC
    5404              :          machines for operations that might not operate on full registers.  */
    5405              :       if (!(WORD_REGISTER_OPERATIONS && word_register_operation_p (x)))
    5406              :         return 1;
    5407              : 
    5408              :       /* Likewise on machines that do, if the mode of the object is smaller
    5409              :          than a word and loads of that size don't sign extend, we can say
    5410              :          nothing about the high order bits.  */
    5411              :       if (xmode_width < BITS_PER_WORD
    5412              :           && load_extend_op (xmode) != SIGN_EXTEND)
    5413              :         return 1;
    5414              :     }
    5415              : 
    5416              :   /* Please keep num_sign_bit_copies_binary_arith_p above in sync with
    5417              :      the code in the switch below.  */
    5418    300137282 :   switch (code)
    5419              :     {
    5420    157114776 :     case REG:
    5421              : 
    5422              : #if defined(POINTERS_EXTEND_UNSIGNED)
    5423              :       /* If pointers extend signed and this is a pointer in Pmode, say that
    5424              :          all the bits above ptr_mode are known to be sign bit copies.  */
    5425              :       /* As we do not know which address space the pointer is referring to,
    5426              :          we can do this only if the target does not support different pointer
    5427              :          or address modes depending on the address space.  */
    5428    157114776 :       if (target_default_pointer_address_modes_p ()
    5429              :           && ! POINTERS_EXTEND_UNSIGNED && xmode == Pmode
    5430              :           && mode == Pmode && REG_POINTER (x)
    5431              :           && !targetm.have_ptr_extend ())
    5432              :         return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
    5433              : #endif
    5434              : 
    5435    157114776 :       {
    5436    157114776 :         unsigned int copies_for_hook = 1, copies = 1;
    5437    157114776 :         rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, xmode, mode,
    5438              :                                                          &copies_for_hook);
    5439              : 
    5440    157114776 :         if (new_rtx)
    5441            5 :           copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
    5442              :                                                known_mode, known_ret);
    5443              : 
    5444    157114776 :         if (copies > 1 || copies_for_hook > 1)
    5445     22548585 :           return MAX (copies, copies_for_hook);
    5446              : 
    5447              :         /* Else, use nonzero_bits to guess num_sign_bit_copies (see below).  */
    5448              :       }
    5449    134566191 :       break;
    5450              : 
    5451              :     case MEM:
    5452              :       /* Some RISC machines sign-extend all loads of smaller than a word.  */
    5453              :       if (load_extend_op (xmode) == SIGN_EXTEND)
    5454              :         return MAX (1, ((int) bitwidth - (int) xmode_width + 1));
    5455              :       break;
    5456              : 
    5457     19964268 :     case SUBREG:
    5458              :       /* If this is a SUBREG for a promoted object that is sign-extended
    5459              :          and we are looking at it in a wider mode, we know that at least the
    5460              :          high-order bits are known to be sign bit copies.  */
    5461              : 
    5462     19964268 :       if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_SIGNED_P (x))
    5463              :         {
    5464            0 :           num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
    5465              :                                              known_x, known_mode, known_ret);
    5466            0 :           return MAX ((int) bitwidth - (int) xmode_width + 1, num0);
    5467              :         }
    5468              : 
    5469     19964268 :       if (is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (x)), &inner_mode))
    5470              :         {
    5471              :           /* For a smaller object, just ignore the high bits.  */
    5472     19758582 :           if (bitwidth <= GET_MODE_PRECISION (inner_mode))
    5473              :             {
    5474      6232483 :               num0 = cached_num_sign_bit_copies (SUBREG_REG (x), inner_mode,
    5475              :                                                  known_x, known_mode,
    5476              :                                                  known_ret);
    5477      6232483 :               return MAX (1, num0 - (int) (GET_MODE_PRECISION (inner_mode)
    5478              :                                            - bitwidth));
    5479              :             }
    5480              : 
    5481              :           /* For paradoxical SUBREGs on machines where all register operations
    5482              :              affect the entire register, just look inside.  Note that we are
    5483              :              passing MODE to the recursive call, so the number of sign bit
    5484              :              copies will remain relative to that mode, not the inner mode.
    5485              : 
    5486              :              This works only if loads sign extend.  Otherwise, if we get a
    5487              :              reload for the inner part, it may be loaded from the stack, and
    5488              :              then we lose all sign bit copies that existed before the store
    5489              :              to the stack.  */
    5490              :           if (WORD_REGISTER_OPERATIONS
    5491              :               && load_extend_op (inner_mode) == SIGN_EXTEND
    5492              :               && paradoxical_subreg_p (x)
    5493              :               && MEM_P (SUBREG_REG (x)))
    5494              :             return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
    5495              :                                                known_x, known_mode, known_ret);
    5496              :         }
    5497              :       break;
    5498              : 
    5499         2819 :     case SIGN_EXTRACT:
    5500         2819 :       if (CONST_INT_P (XEXP (x, 1)))
    5501         2819 :         return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
    5502              :       break;
    5503              : 
    5504      1877367 :     case SIGN_EXTEND:
    5505      1877367 :       if (is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode))
    5506      1877367 :         return (bitwidth - GET_MODE_PRECISION (inner_mode)
    5507      1877367 :                 + cached_num_sign_bit_copies (XEXP (x, 0), inner_mode,
    5508      1877367 :                                               known_x, known_mode, known_ret));
    5509              :       break;
    5510              : 
    5511           86 :     case TRUNCATE:
    5512              :       /* For a smaller object, just ignore the high bits.  */
    5513           86 :       inner_mode = as_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)));
    5514           86 :       num0 = cached_num_sign_bit_copies (XEXP (x, 0), inner_mode,
    5515              :                                          known_x, known_mode, known_ret);
    5516           86 :       return MAX (1, (num0 - (int) (GET_MODE_PRECISION (inner_mode)
    5517              :                                     - bitwidth)));
    5518              : 
    5519      1055151 :     case NOT:
    5520      1055151 :       return cached_num_sign_bit_copies (XEXP (x, 0), mode,
    5521      1055151 :                                          known_x, known_mode, known_ret);
    5522              : 
    5523        21186 :     case ROTATE:       case ROTATERT:
    5524              :       /* If we are rotating left by a number of bits less than the number
    5525              :          of sign bit copies, we can just subtract that amount from the
    5526              :          number.  */
    5527        21186 :       if (CONST_INT_P (XEXP (x, 1))
    5528        11800 :           && INTVAL (XEXP (x, 1)) >= 0
    5529        11797 :           && INTVAL (XEXP (x, 1)) < (int) bitwidth)
    5530              :         {
    5531        11797 :           num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
    5532              :                                              known_x, known_mode, known_ret);
    5533        11797 :           return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
    5534              :                                  : (int) bitwidth - INTVAL (XEXP (x, 1))));
    5535              :         }
    5536              :       break;
    5537              : 
    5538       764446 :     case NEG:
    5539              :       /* In general, this subtracts one sign bit copy.  But if the value
    5540              :          is known to be positive, the number of sign bit copies is the
    5541              :          same as that of the input.  Finally, if the input has just one bit
    5542              :          that might be nonzero, all the bits are copies of the sign bit.  */
    5543       764446 :       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
    5544              :                                          known_x, known_mode, known_ret);
    5545       764446 :       if (bitwidth > HOST_BITS_PER_WIDE_INT)
    5546        28338 :         return num0 > 1 ? num0 - 1 : 1;
    5547              : 
    5548       736108 :       nonzero = nonzero_bits (XEXP (x, 0), mode);
    5549       736108 :       if (nonzero == 1)
    5550              :         return bitwidth;
    5551              : 
    5552       342232 :       if (num0 > 1
    5553        87600 :           && ((HOST_WIDE_INT_1U << (bitwidth - 1)) & nonzero))
    5554        45889 :         num0--;
    5555              : 
    5556       342232 :       return num0;
    5557              : 
    5558      5765345 :     case IOR:   case AND:   case XOR:
    5559      5765345 :     case SMIN:  case SMAX:  case UMIN:  case UMAX:
    5560              :       /* Logical operations will preserve the number of sign-bit copies.
    5561              :          MIN and MAX operations always return one of the operands.  */
    5562      5765345 :       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
    5563              :                                          known_x, known_mode, known_ret);
    5564      5765345 :       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
    5565              :                                          known_x, known_mode, known_ret);
    5566              : 
    5567              :       /* If num1 is clearing some of the top bits then regardless of
    5568              :          the other term, we are guaranteed to have at least that many
    5569              :          high-order zero bits.  */
    5570      5765345 :       if (code == AND
    5571      5765345 :           && num1 > 1
    5572      2297399 :           && bitwidth <= HOST_BITS_PER_WIDE_INT
    5573      2287005 :           && CONST_INT_P (XEXP (x, 1))
    5574      2088901 :           && (UINTVAL (XEXP (x, 1))
    5575      2088901 :               & (HOST_WIDE_INT_1U << (bitwidth - 1))) == 0)
    5576              :         return num1;
    5577              : 
    5578              :       /* Similarly for IOR when setting high-order bits.  */
    5579      4240218 :       if (code == IOR
    5580      4240218 :           && num1 > 1
    5581       472710 :           && bitwidth <= HOST_BITS_PER_WIDE_INT
    5582       471163 :           && CONST_INT_P (XEXP (x, 1))
    5583       140523 :           && (UINTVAL (XEXP (x, 1))
    5584       140523 :               & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
    5585              :         return num1;
    5586              : 
    5587      4236089 :       return MIN (num0, num1);
    5588              : 
    5589     42293787 :     case PLUS:  case MINUS:
    5590              :       /* For addition and subtraction, we can have a 1-bit carry.  However,
    5591              :          if we are subtracting 1 from a positive number, there will not
    5592              :          be such a carry.  Furthermore, if the positive number is known to
    5593              :          be 0 or 1, we know the result is either -1 or 0.  */
    5594              : 
    5595     42293787 :       if (code == PLUS && XEXP (x, 1) == constm1_rtx
    5596      1326087 :           && bitwidth <= HOST_BITS_PER_WIDE_INT)
    5597              :         {
    5598      1321235 :           nonzero = nonzero_bits (XEXP (x, 0), mode);
    5599      1321235 :           if (((HOST_WIDE_INT_1U << (bitwidth - 1)) & nonzero) == 0)
    5600       108477 :             return (nonzero == 1 || nonzero == 0 ? bitwidth
    5601       102761 :                     : bitwidth - floor_log2 (nonzero) - 1);
    5602              :         }
    5603              : 
    5604     42185310 :       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
    5605              :                                          known_x, known_mode, known_ret);
    5606     42185310 :       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
    5607              :                                          known_x, known_mode, known_ret);
    5608     42185310 :       result = MAX (1, MIN (num0, num1) - 1);
    5609              : 
    5610     42185310 :       return result;
    5611              : 
    5612      1286749 :     case MULT:
    5613              :       /* The number of bits of the product is the sum of the number of
    5614              :          bits of both terms.  However, unless one of the terms if known
    5615              :          to be positive, we must allow for an additional bit since negating
    5616              :          a negative number can remove one sign bit copy.  */
    5617              : 
    5618      1286749 :       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
    5619              :                                          known_x, known_mode, known_ret);
    5620      1286749 :       num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
    5621              :                                          known_x, known_mode, known_ret);
    5622              : 
    5623      1286749 :       result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
    5624      1286749 :       if (result > 0
    5625      1286749 :           && (bitwidth > HOST_BITS_PER_WIDE_INT
    5626       331745 :               || (((nonzero_bits (XEXP (x, 0), mode)
    5627       331745 :                     & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
    5628       181042 :                   && ((nonzero_bits (XEXP (x, 1), mode)
    5629              :                        & (HOST_WIDE_INT_1U << (bitwidth - 1)))
    5630       181042 :                       != 0))))
    5631        30857 :         result--;
    5632              : 
    5633      1286749 :       return MAX (1, result);
    5634              : 
    5635       121956 :     case UDIV:
    5636              :       /* The result must be <= the first operand.  If the first operand
    5637              :          has the high bit set, we know nothing about the number of sign
    5638              :          bit copies.  */
    5639       121956 :       if (bitwidth > HOST_BITS_PER_WIDE_INT)
    5640              :         return 1;
    5641       121956 :       else if ((nonzero_bits (XEXP (x, 0), mode)
    5642       121956 :                 & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
    5643              :         return 1;
    5644              :       else
    5645        22847 :         return cached_num_sign_bit_copies (XEXP (x, 0), mode,
    5646        22847 :                                            known_x, known_mode, known_ret);
    5647              : 
    5648       118704 :     case UMOD:
    5649              :       /* The result must be <= the second operand.  If the second operand
    5650              :          has (or just might have) the high bit set, we know nothing about
    5651              :          the number of sign bit copies.  */
    5652       118704 :       if (bitwidth > HOST_BITS_PER_WIDE_INT)
    5653              :         return 1;
    5654       118704 :       else if ((nonzero_bits (XEXP (x, 1), mode)
    5655       118704 :                 & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
    5656              :         return 1;
    5657              :       else
    5658        30943 :         return cached_num_sign_bit_copies (XEXP (x, 1), mode,
    5659        30943 :                                            known_x, known_mode, known_ret);
    5660              : 
    5661       210836 :     case DIV:
    5662              :       /* Similar to unsigned division, except that we have to worry about
    5663              :          the case where the divisor is negative, in which case we have
    5664              :          to add 1.  */
    5665       210836 :       result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
    5666              :                                            known_x, known_mode, known_ret);
    5667       210836 :       if (result > 1
    5668       210836 :           && (bitwidth > HOST_BITS_PER_WIDE_INT
    5669        17820 :               || (nonzero_bits (XEXP (x, 1), mode)
    5670        17820 :                   & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0))
    5671        15272 :         result--;
    5672              : 
    5673       210836 :       return result;
    5674              : 
    5675       133232 :     case MOD:
    5676       133232 :       result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
    5677              :                                            known_x, known_mode, known_ret);
    5678       133232 :       if (result > 1
    5679       133232 :           && (bitwidth > HOST_BITS_PER_WIDE_INT
    5680        22935 :               || (nonzero_bits (XEXP (x, 1), mode)
    5681        22935 :                   & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0))
    5682        10116 :         result--;
    5683              : 
    5684       133232 :       return result;
    5685              : 
    5686       942355 :     case ASHIFTRT:
    5687              :       /* Shifts by a constant add to the number of bits equal to the
    5688              :          sign bit.  */
    5689       942355 :       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
    5690              :                                          known_x, known_mode, known_ret);
    5691       942355 :       if (CONST_INT_P (XEXP (x, 1))
    5692       902899 :           && INTVAL (XEXP (x, 1)) > 0
    5693       902899 :           && INTVAL (XEXP (x, 1)) < xmode_width)
    5694       902899 :         num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
    5695              : 
    5696       942355 :       return num0;
    5697              : 
    5698      8926318 :     case ASHIFT:
    5699              :       /* Left shifts destroy copies.  */
    5700      8926318 :       if (!CONST_INT_P (XEXP (x, 1))
    5701      8730708 :           || INTVAL (XEXP (x, 1)) < 0
    5702      8730564 :           || INTVAL (XEXP (x, 1)) >= (int) bitwidth
    5703      8730522 :           || INTVAL (XEXP (x, 1)) >= xmode_width)
    5704              :         return 1;
    5705              : 
    5706      8730522 :       num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
    5707              :                                          known_x, known_mode, known_ret);
    5708      8730522 :       return MAX (1, num0 - INTVAL (XEXP (x, 1)));
    5709              : 
    5710       764398 :     case IF_THEN_ELSE:
    5711       764398 :       num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
    5712              :                                          known_x, known_mode, known_ret);
    5713       764398 :       num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
    5714              :                                          known_x, known_mode, known_ret);
    5715       764398 :       return MIN (num0, num1);
    5716              : 
    5717      2340404 :     case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
    5718      2340404 :     case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
    5719      2340404 :     case GEU: case GTU: case LEU: case LTU:
    5720      2340404 :     case UNORDERED: case ORDERED:
    5721              :       /* If the constant is negative, take its 1's complement and remask.
    5722              :          Then see how many zero bits we have.  */
    5723      2340404 :       nonzero = STORE_FLAG_VALUE;
    5724      2340404 :       if (bitwidth <= HOST_BITS_PER_WIDE_INT
    5725      2340404 :           && (nonzero & (HOST_WIDE_INT_1U << (bitwidth - 1))) != 0)
    5726            0 :         nonzero = (~nonzero) & GET_MODE_MASK (mode);
    5727              : 
    5728      2340404 :       return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
    5729              : 
    5730              :     default:
    5731              :       break;
    5732              :     }
    5733              : 
    5734              :   /* If we haven't been able to figure it out by one of the above rules,
    5735              :      see if some of the high-order bits are known to be zero.  If so,
    5736              :      count those bits and return one less than that amount.  If we can't
    5737              :      safely compute the mask for this mode, always return BITWIDTH.  */
    5738              : 
    5739    204740464 :   bitwidth = GET_MODE_PRECISION (mode);
    5740    204740464 :   if (bitwidth > HOST_BITS_PER_WIDE_INT)
    5741              :     return 1;
    5742              : 
    5743    198886885 :   nonzero = nonzero_bits (x, mode);
    5744    198886885 :   return nonzero & (HOST_WIDE_INT_1U << (bitwidth - 1))
    5745    204027950 :          ? 1 : bitwidth - floor_log2 (nonzero) - 1;
    5746              : }
    5747              : 
    5748              : /* Calculate the rtx_cost of a single instruction pattern.  A return value of
    5749              :    zero indicates an instruction pattern without a known cost.  */
    5750              : 
    5751              : int
    5752    150583992 : pattern_cost (rtx pat, bool speed)
    5753              : {
    5754    150583992 :   int i, cost;
    5755    150583992 :   rtx set;
    5756              : 
    5757              :   /* Extract the single set rtx from the instruction pattern.  We
    5758              :      can't use single_set since we only have the pattern.  We also
    5759              :      consider PARALLELs of a normal set and a single comparison.  In
    5760              :      that case we use the cost of the non-comparison SET operation,
    5761              :      which is most-likely to be the real cost of this operation.  */
    5762    150583992 :   if (GET_CODE (pat) == SET)
    5763              :     set = pat;
    5764     66995052 :   else if (GET_CODE (pat) == PARALLEL)
    5765              :     {
    5766              :       set = NULL_RTX;
    5767              :       rtx comparison = NULL_RTX;
    5768              : 
    5769     46199557 :       for (i = 0; i < XVECLEN (pat, 0); i++)
    5770              :         {
    5771     31081413 :           rtx x = XVECEXP (pat, 0, i);
    5772     31081413 :           if (GET_CODE (x) == SET)
    5773              :             {
    5774     15719460 :               if (GET_CODE (SET_SRC (x)) == COMPARE
    5775     15443901 :                   || GET_MODE_CLASS (GET_MODE (SET_DEST (x))) == MODE_CC)
    5776              :                 {
    5777       341899 :                   if (comparison)
    5778              :                     return 0;
    5779              :                   comparison = x;
    5780              :                 }
    5781              :               else
    5782              :                 {
    5783     15377561 :                   if (set)
    5784              :                     return 0;
    5785              :                   set = x;
    5786              :                 }
    5787              :             }
    5788              :         }
    5789              : 
    5790     15118144 :       if (!set && comparison)
    5791              :         set = comparison;
    5792              : 
    5793     14971504 :       if (!set)
    5794              :         return 0;
    5795              :     }
    5796              :   else
    5797              :     return 0;
    5798              : 
    5799     98634489 :   cost = set_src_cost (SET_SRC (set), GET_MODE (SET_DEST (set)), speed);
    5800     98634489 :   return MAX (COSTS_N_INSNS (1), cost);
    5801              : }
    5802              : 
    5803              : /* Calculate the cost of a single instruction.  A return value of zero
    5804              :    indicates an instruction pattern without a known cost.  */
    5805              : 
    5806              : int
    5807    148450269 : insn_cost (rtx_insn *insn, bool speed)
    5808              : {
    5809    148450269 :   if (targetm.insn_cost)
    5810    148450269 :     return targetm.insn_cost (insn, speed);
    5811              : 
    5812            0 :   return pattern_cost (PATTERN (insn), speed);
    5813              : }
    5814              : 
    5815              : /* Returns estimate on cost of computing SEQ.  */
    5816              : 
    5817              : unsigned
    5818      2076302 : seq_cost (const rtx_insn *seq, bool speed)
    5819              : {
    5820      2076302 :   unsigned cost = 0;
    5821      2076302 :   rtx set;
    5822              : 
    5823      5442365 :   for (; seq; seq = NEXT_INSN (seq))
    5824              :     {
    5825      3366063 :       set = single_set (seq);
    5826      3366063 :       if (set)
    5827      3357699 :         cost += set_rtx_cost (set, speed);
    5828         8364 :       else if (NONDEBUG_INSN_P (seq))
    5829              :         {
    5830         8031 :           int this_cost = insn_cost (const_cast<struct rtx_insn *> (seq),
    5831              :                                      speed);
    5832         8031 :           if (this_cost > 0)
    5833          698 :             cost += this_cost;
    5834              :           else
    5835         7333 :             cost++;
    5836              :         }
    5837              :     }
    5838              : 
    5839      2076302 :   return cost;
    5840              : }
    5841              : 
    5842              : /* Given an insn INSN and condition COND, return the condition in a
    5843              :    canonical form to simplify testing by callers.  Specifically:
    5844              : 
    5845              :    (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
    5846              :    (2) Both operands will be machine operands.
    5847              :    (3) If an operand is a constant, it will be the second operand.
    5848              :    (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
    5849              :        for GE, GEU, and LEU.
    5850              : 
    5851              :    If the condition cannot be understood, or is an inequality floating-point
    5852              :    comparison which needs to be reversed, 0 will be returned.
    5853              : 
    5854              :    If REVERSE is nonzero, then reverse the condition prior to canonizing it.
    5855              : 
    5856              :    If EARLIEST is nonzero, it is a pointer to a place where the earliest
    5857              :    insn used in locating the condition was found.  If a replacement test
    5858              :    of the condition is desired, it should be placed in front of that
    5859              :    insn and we will be sure that the inputs are still valid.
    5860              : 
    5861              :    If WANT_REG is nonzero, we wish the condition to be relative to that
    5862              :    register, if possible.  Therefore, do not canonicalize the condition
    5863              :    further.  If ALLOW_CC_MODE is nonzero, allow the condition returned
    5864              :    to be a compare to a CC mode register.
    5865              : 
    5866              :    If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
    5867              :    and at INSN.  */
    5868              : 
    5869              : rtx
    5870     37966063 : canonicalize_condition (rtx_insn *insn, rtx cond, int reverse,
    5871              :                         rtx_insn **earliest,
    5872              :                         rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
    5873              : {
    5874     37966063 :   enum rtx_code code;
    5875     37966063 :   rtx_insn *prev = insn;
    5876     37966063 :   const_rtx set;
    5877     37966063 :   rtx tem;
    5878     37966063 :   rtx op0, op1;
    5879     37966063 :   int reverse_code = 0;
    5880     37966063 :   machine_mode mode;
    5881     37966063 :   basic_block bb = BLOCK_FOR_INSN (insn);
    5882              : 
    5883     37966063 :   code = GET_CODE (cond);
    5884     37966063 :   mode = GET_MODE (cond);
    5885     37966063 :   op0 = XEXP (cond, 0);
    5886     37966063 :   op1 = XEXP (cond, 1);
    5887              : 
    5888     37966063 :   if (reverse)
    5889      1892027 :     code = reversed_comparison_code (cond, insn);
    5890     37966063 :   if (code == UNKNOWN)
    5891              :     return 0;
    5892              : 
    5893     37966063 :   if (earliest)
    5894     18184859 :     *earliest = insn;
    5895              : 
    5896              :   /* If we are comparing a register with zero, see if the register is set
    5897              :      in the previous insn to a COMPARE or a comparison operation.  Perform
    5898              :      the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
    5899              :      in cse.cc  */
    5900              : 
    5901     80795718 :   while ((GET_RTX_CLASS (code) == RTX_COMPARE
    5902              :           || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
    5903     80795718 :          && op1 == CONST0_RTX (GET_MODE (op0))
    5904    141694032 :          && op0 != want_reg)
    5905              :     {
    5906              :       /* Set nonzero when we find something of interest.  */
    5907     60898314 :       rtx x = 0;
    5908              : 
    5909              :       /* If this is a COMPARE, pick up the two things being compared.  */
    5910     60898314 :       if (GET_CODE (op0) == COMPARE)
    5911              :         {
    5912            0 :           op1 = XEXP (op0, 1);
    5913            0 :           op0 = XEXP (op0, 0);
    5914            0 :           continue;
    5915              :         }
    5916     60898314 :       else if (!REG_P (op0))
    5917              :         break;
    5918              : 
    5919              :       /* Go back to the previous insn.  Stop if it is not an INSN.  We also
    5920              :          stop if it isn't a single set or if it has a REG_INC note because
    5921              :          we don't want to bother dealing with it.  */
    5922              : 
    5923     55866095 :       prev = prev_nonnote_nondebug_insn (prev);
    5924              : 
    5925     55866095 :       if (prev == 0
    5926     55763781 :           || !NONJUMP_INSN_P (prev)
    5927              :           || FIND_REG_INC_NOTE (prev, NULL_RTX)
    5928              :           /* In cfglayout mode, there do not have to be labels at the
    5929              :              beginning of a block, or jumps at the end, so the previous
    5930              :              conditions would not stop us when we reach bb boundary.  */
    5931    106517378 :           || BLOCK_FOR_INSN (prev) != bb)
    5932              :         break;
    5933              : 
    5934     50550490 :       set = set_of (op0, prev);
    5935              : 
    5936     50550490 :       if (set
    5937     50550490 :           && (GET_CODE (set) != SET
    5938     43823560 :               || !rtx_equal_p (SET_DEST (set), op0)))
    5939              :         break;
    5940              : 
    5941              :       /* If this is setting OP0, get what it sets it to if it looks
    5942              :          relevant.  */
    5943     50448009 :       if (set)
    5944              :         {
    5945     43721079 :           machine_mode inner_mode = GET_MODE (SET_DEST (set));
    5946              : #ifdef FLOAT_STORE_FLAG_VALUE
    5947              :           REAL_VALUE_TYPE fsfv;
    5948              : #endif
    5949              : 
    5950              :           /* ??? We may not combine comparisons done in a CCmode with
    5951              :              comparisons not done in a CCmode.  This is to aid targets
    5952              :              like Alpha that have an IEEE compliant EQ instruction, and
    5953              :              a non-IEEE compliant BEQ instruction.  The use of CCmode is
    5954              :              actually artificial, simply to prevent the combination, but
    5955              :              should not affect other platforms.
    5956              : 
    5957              :              However, we must allow VOIDmode comparisons to match either
    5958              :              CCmode or non-CCmode comparison, because some ports have
    5959              :              modeless comparisons inside branch patterns.
    5960              : 
    5961              :              ??? This mode check should perhaps look more like the mode check
    5962              :              in simplify_comparison in combine.  */
    5963     43721079 :           if (((GET_MODE_CLASS (mode) == MODE_CC)
    5964     43721079 :                != (GET_MODE_CLASS (inner_mode) == MODE_CC))
    5965     37307062 :               && mode != VOIDmode
    5966            0 :               && inner_mode != VOIDmode)
    5967              :             break;
    5968     43721079 :           if (GET_CODE (SET_SRC (set)) == COMPARE
    5969     43721079 :               || (((code == NE
    5970      4709210 :                     || (code == LT
    5971       156003 :                         && val_signbit_known_set_p (inner_mode,
    5972              :                                                     STORE_FLAG_VALUE))
    5973              : #ifdef FLOAT_STORE_FLAG_VALUE
    5974              :                     || (code == LT
    5975              :                         && SCALAR_FLOAT_MODE_P (inner_mode)
    5976              :                         && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
    5977              :                             REAL_VALUE_NEGATIVE (fsfv)))
    5978              : #endif
    5979              :                     ))
    5980      2816315 :                   && COMPARISON_P (SET_SRC (set))))
    5981     36504621 :             x = SET_SRC (set);
    5982      7216458 :           else if (((code == EQ
    5983      3988074 :                      || (code == GE
    5984       130105 :                          && val_signbit_known_set_p (inner_mode,
    5985              :                                                      STORE_FLAG_VALUE))
    5986              : #ifdef FLOAT_STORE_FLAG_VALUE
    5987              :                      || (code == GE
    5988              :                          && SCALAR_FLOAT_MODE_P (inner_mode)
    5989              :                          && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
    5990              :                              REAL_VALUE_NEGATIVE (fsfv)))
    5991              : #endif
    5992              :                      ))
    5993      7216458 :                    && COMPARISON_P (SET_SRC (set)))
    5994              :             {
    5995              :               reverse_code = 1;
    5996              :               x = SET_SRC (set);
    5997              :             }
    5998      6887591 :           else if ((code == EQ || code == NE)
    5999      5406765 :                    && GET_CODE (SET_SRC (set)) == XOR)
    6000              :             /* Handle sequences like:
    6001              : 
    6002              :                (set op0 (xor X Y))
    6003              :                ...(eq|ne op0 (const_int 0))...
    6004              : 
    6005              :                in which case:
    6006              : 
    6007              :                (eq op0 (const_int 0)) reduces to (eq X Y)
    6008              :                (ne op0 (const_int 0)) reduces to (ne X Y)
    6009              : 
    6010              :                This is the form used by MIPS16, for example.  */
    6011              :             x = SET_SRC (set);
    6012              :           else
    6013              :             break;
    6014              :         }
    6015              : 
    6016      6726930 :       else if (reg_set_p (op0, prev))
    6017              :         /* If this sets OP0, but not directly, we have to give up.  */
    6018              :         break;
    6019              : 
    6020     43568545 :       if (x)
    6021              :         {
    6022              :           /* If the caller is expecting the condition to be valid at INSN,
    6023              :              make sure X doesn't change before INSN.  */
    6024     36841615 :           if (valid_at_insn_p)
    6025     23447011 :             if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
    6026              :               break;
    6027     36102725 :           if (COMPARISON_P (x))
    6028       130676 :             code = GET_CODE (x);
    6029     36102725 :           if (reverse_code)
    6030              :             {
    6031        84745 :               code = reversed_comparison_code (x, prev);
    6032        84745 :               if (code == UNKNOWN)
    6033              :                 return 0;
    6034              :               reverse_code = 0;
    6035              :             }
    6036              : 
    6037     36102725 :           op0 = XEXP (x, 0), op1 = XEXP (x, 1);
    6038     36102725 :           if (earliest)
    6039     17540448 :             *earliest = prev;
    6040              :         }
    6041              :     }
    6042              : 
    6043              :   /* If constant is first, put it last.  */
    6044     37966063 :   if (CONSTANT_P (op0))
    6045        21207 :     code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
    6046              : 
    6047              :   /* If OP0 is the result of a comparison, we weren't able to find what
    6048              :      was really being compared, so fail.  */
    6049     37966063 :   if (!allow_cc_mode
    6050     20548566 :       && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
    6051              :     return 0;
    6052              : 
    6053              :   /* Canonicalize any ordered comparison with integers involving equality
    6054              :      if we can do computations in the relevant mode and we do not
    6055              :      overflow.  */
    6056              : 
    6057     36698890 :   scalar_int_mode op0_mode;
    6058     36698890 :   if (CONST_INT_P (op1)
    6059     24750603 :       && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
    6060     60811560 :       && GET_MODE_PRECISION (op0_mode) <= HOST_BITS_PER_WIDE_INT)
    6061              :     {
    6062     24083233 :       HOST_WIDE_INT const_val = INTVAL (op1);
    6063     24083233 :       unsigned HOST_WIDE_INT uconst_val = const_val;
    6064     24083233 :       unsigned HOST_WIDE_INT max_val
    6065     24083233 :         = (unsigned HOST_WIDE_INT) GET_MODE_MASK (op0_mode);
    6066              : 
    6067     24083233 :       switch (code)
    6068              :         {
    6069       913761 :         case LE:
    6070       913761 :           if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
    6071       913761 :             code = LT, op1 = gen_int_mode (const_val + 1, op0_mode);
    6072              :           break;
    6073              : 
    6074              :         /* When cross-compiling, const_val might be sign-extended from
    6075              :            BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
    6076       301996 :         case GE:
    6077       301996 :           if ((const_val & max_val)
    6078       301996 :               != (HOST_WIDE_INT_1U << (GET_MODE_PRECISION (op0_mode) - 1)))
    6079       301996 :             code = GT, op1 = gen_int_mode (const_val - 1, op0_mode);
    6080              :           break;
    6081              : 
    6082       676596 :         case LEU:
    6083       676596 :           if (uconst_val < max_val)
    6084       667080 :             code = LTU, op1 = gen_int_mode (uconst_val + 1, op0_mode);
    6085              :           break;
    6086              : 
    6087       121009 :         case GEU:
    6088       121009 :           if (uconst_val != 0)
    6089       121004 :             code = GTU, op1 = gen_int_mode (uconst_val - 1, op0_mode);
    6090              :           break;
    6091              : 
    6092              :         default:
    6093              :           break;
    6094              :         }
    6095              :     }
    6096              : 
    6097              :   /* We promised to return a comparison.  */
    6098     36698890 :   rtx ret = gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
    6099     36698890 :   if (COMPARISON_P (ret))
    6100              :     return ret;
    6101              :   return 0;
    6102              : }
    6103              : 
    6104              : /* Given a jump insn JUMP, return the condition that will cause it to branch
    6105              :    to its JUMP_LABEL.  If the condition cannot be understood, or is an
    6106              :    inequality floating-point comparison which needs to be reversed, 0 will
    6107              :    be returned.
    6108              : 
    6109              :    If EARLIEST is nonzero, it is a pointer to a place where the earliest
    6110              :    insn used in locating the condition was found.  If a replacement test
    6111              :    of the condition is desired, it should be placed in front of that
    6112              :    insn and we will be sure that the inputs are still valid.  If EARLIEST
    6113              :    is null, the returned condition will be valid at INSN.
    6114              : 
    6115              :    If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
    6116              :    compare CC mode register.
    6117              : 
    6118              :    VALID_AT_INSN_P is the same as for canonicalize_condition.  */
    6119              : 
    6120              : rtx
    6121     37189397 : get_condition (rtx_insn *jump, rtx_insn **earliest, int allow_cc_mode,
    6122              :                int valid_at_insn_p)
    6123              : {
    6124     37189397 :   rtx cond;
    6125     37189397 :   int reverse;
    6126     37189397 :   rtx set;
    6127              : 
    6128              :   /* If this is not a standard conditional jump, we can't parse it.  */
    6129     37189397 :   if (!JUMP_P (jump)
    6130     37189397 :       || ! any_condjump_p (jump))
    6131      3643480 :     return 0;
    6132     33545917 :   set = pc_set (jump);
    6133              : 
    6134     33545917 :   cond = XEXP (SET_SRC (set), 0);
    6135              : 
    6136              :   /* If this branches to JUMP_LABEL when the condition is false, reverse
    6137              :      the condition.  */
    6138     33545917 :   reverse
    6139     67091834 :     = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
    6140     33545917 :       && label_ref_label (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump);
    6141              : 
    6142     33545917 :   return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
    6143     33545917 :                                  allow_cc_mode, valid_at_insn_p);
    6144              : }
    6145              : 
    6146              : /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
    6147              :    TARGET_MODE_REP_EXTENDED.
    6148              : 
    6149              :    Note that we assume that the property of
    6150              :    TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
    6151              :    narrower than mode B.  I.e., if A is a mode narrower than B then in
    6152              :    order to be able to operate on it in mode B, mode A needs to
    6153              :    satisfy the requirements set by the representation of mode B.  */
    6154              : 
    6155              : static void
    6156       280826 : init_num_sign_bit_copies_in_rep (void)
    6157              : {
    6158       280826 :   opt_scalar_int_mode in_mode_iter;
    6159       280826 :   scalar_int_mode mode;
    6160              : 
    6161      2246608 :   FOR_EACH_MODE_IN_CLASS (in_mode_iter, MODE_INT)
    6162      7863128 :     FOR_EACH_MODE_UNTIL (mode, in_mode_iter.require ())
    6163              :       {
    6164      5897346 :         scalar_int_mode in_mode = in_mode_iter.require ();
    6165      5897346 :         scalar_int_mode i;
    6166              : 
    6167              :         /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
    6168              :            extends to the next widest mode.  */
    6169      5897346 :         gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
    6170              :                     || GET_MODE_WIDER_MODE (mode).require () == in_mode);
    6171              : 
    6172              :         /* We are in in_mode.  Count how many bits outside of mode
    6173              :            have to be copies of the sign-bit.  */
    6174     21623602 :         FOR_EACH_MODE (i, mode, in_mode)
    6175              :           {
    6176              :             /* This must always exist (for the last iteration it will be
    6177              :                IN_MODE).  */
    6178     15726256 :             scalar_int_mode wider = GET_MODE_WIDER_MODE (i).require ();
    6179              : 
    6180     15726256 :             if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
    6181              :                 /* We can only check sign-bit copies starting from the
    6182              :                    top-bit.  In order to be able to check the bits we
    6183              :                    have already seen we pretend that subsequent bits
    6184              :                    have to be sign-bit copies too.  */
    6185     15726256 :                 || num_sign_bit_copies_in_rep [in_mode][mode])
    6186            0 :               num_sign_bit_copies_in_rep [in_mode][mode]
    6187            0 :                 += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
    6188              :           }
    6189              :       }
    6190       280826 : }
    6191              : 
    6192              : /* Suppose that truncation from the machine mode of X to MODE is not a
    6193              :    no-op.  See if there is anything special about X so that we can
    6194              :    assume it already contains a truncated value of MODE.  */
    6195              : 
    6196              : bool
    6197            0 : truncated_to_mode (machine_mode mode, const_rtx x)
    6198              : {
    6199              :   /* This register has already been used in MODE without explicit
    6200              :      truncation.  */
    6201            0 :   if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
    6202              :     return true;
    6203              : 
    6204              :   /* See if we already satisfy the requirements of MODE.  If yes we
    6205              :      can just switch to MODE.  */
    6206            0 :   if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
    6207            0 :       && (num_sign_bit_copies (x, GET_MODE (x))
    6208            0 :           >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
    6209              :     return true;
    6210              : 
    6211              :   return false;
    6212              : }
    6213              : 
    6214              : /* Return true if RTX code CODE has a single sequence of zero or more
    6215              :    "e" operands and no rtvec operands.  Initialize its rtx_all_subrtx_bounds
    6216              :    entry in that case.  */
    6217              : 
    6218              : static bool
    6219     43247204 : setup_reg_subrtx_bounds (unsigned int code)
    6220              : {
    6221     43247204 :   const char *format = GET_RTX_FORMAT ((enum rtx_code) code);
    6222     43247204 :   unsigned int i = 0;
    6223     60939242 :   for (; format[i] != 'e'; ++i)
    6224              :     {
    6225     27240122 :       if (!format[i])
    6226              :         /* No subrtxes.  Leave start and count as 0.  */
    6227              :         return true;
    6228     19938646 :       if (format[i] == 'E' || format[i] == 'V')
    6229              :         return false;
    6230              :     }
    6231              : 
    6232              :   /* Record the sequence of 'e's.  */
    6233     33699120 :   rtx_all_subrtx_bounds[code].start = i;
    6234     54761070 :   do
    6235     54761070 :     ++i;
    6236     54761070 :   while (format[i] == 'e');
    6237     33699120 :   rtx_all_subrtx_bounds[code].count = i - rtx_all_subrtx_bounds[code].start;
    6238              :   /* rtl-iter.h relies on this.  */
    6239     33699120 :   gcc_checking_assert (rtx_all_subrtx_bounds[code].count <= 3);
    6240              : 
    6241     37630684 :   for (; format[i]; ++i)
    6242      5335694 :     if (format[i] == 'E' || format[i] == 'V' || format[i] == 'e')
    6243              :       return false;
    6244              : 
    6245              :   return true;
    6246              : }
    6247              : 
    6248              : /* Initialize rtx_all_subrtx_bounds.  */
    6249              : void
    6250       280826 : init_rtlanal (void)
    6251              : {
    6252       280826 :   int i;
    6253     43528030 :   for (i = 0; i < NUM_RTX_CODE; i++)
    6254              :     {
    6255     43247204 :       if (!setup_reg_subrtx_bounds (i))
    6256      3650738 :         rtx_all_subrtx_bounds[i].count = UCHAR_MAX;
    6257     43247204 :       if (GET_RTX_CLASS (i) != RTX_CONST_OBJ)
    6258     40438944 :         rtx_nonconst_subrtx_bounds[i] = rtx_all_subrtx_bounds[i];
    6259              :     }
    6260              : 
    6261       280826 :   init_num_sign_bit_copies_in_rep ();
    6262       280826 : }
    6263              : 
    6264              : /* Check whether this is a constant pool constant.  */
    6265              : bool
    6266        11659 : constant_pool_constant_p (rtx x)
    6267              : {
    6268        11659 :   x = avoid_constant_pool_reference (x);
    6269        11659 :   return CONST_DOUBLE_P (x);
    6270              : }
    6271              : 
    6272              : /* If M is a bitmask that selects a field of low-order bits within an item but
    6273              :    not the entire word, return the length of the field.  Return -1 otherwise.
    6274              :    M is used in machine mode MODE.  */
    6275              : 
    6276              : int
    6277         8144 : low_bitmask_len (machine_mode mode, unsigned HOST_WIDE_INT m)
    6278              : {
    6279         8144 :   if (mode != VOIDmode)
    6280              :     {
    6281         8144 :       if (!HWI_COMPUTABLE_MODE_P (mode))
    6282              :         return -1;
    6283         8144 :       m &= GET_MODE_MASK (mode);
    6284              :     }
    6285              : 
    6286         8144 :   return exact_log2 (m + 1);
    6287              : }
    6288              : 
    6289              : /* Return the mode of MEM's address.  */
    6290              : 
    6291              : scalar_int_mode
    6292    179708108 : get_address_mode (rtx mem)
    6293              : {
    6294    179708108 :   machine_mode mode;
    6295              : 
    6296    179708108 :   gcc_assert (MEM_P (mem));
    6297    179708108 :   mode = GET_MODE (XEXP (mem, 0));
    6298    179708108 :   if (mode != VOIDmode)
    6299    179200970 :     return as_a <scalar_int_mode> (mode);
    6300       530781 :   return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
    6301              : }
    6302              : 
    6303              : /* Split up a CONST_DOUBLE or integer constant rtx
    6304              :    into two rtx's for single words,
    6305              :    storing in *FIRST the word that comes first in memory in the target
    6306              :    and in *SECOND the other.
    6307              : 
    6308              :    TODO: This function needs to be rewritten to work on any size
    6309              :    integer.  */
    6310              : 
    6311              : void
    6312            0 : split_double (rtx value, rtx *first, rtx *second)
    6313              : {
    6314            0 :   if (CONST_INT_P (value))
    6315              :     {
    6316            0 :       if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
    6317              :         {
    6318              :           /* In this case the CONST_INT holds both target words.
    6319              :              Extract the bits from it into two word-sized pieces.
    6320              :              Sign extend each half to HOST_WIDE_INT.  */
    6321            0 :           unsigned HOST_WIDE_INT low, high;
    6322            0 :           unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
    6323            0 :           unsigned bits_per_word = BITS_PER_WORD;
    6324              : 
    6325              :           /* Set sign_bit to the most significant bit of a word.  */
    6326            0 :           sign_bit = 1;
    6327            0 :           sign_bit <<= bits_per_word - 1;
    6328              : 
    6329              :           /* Set mask so that all bits of the word are set.  We could
    6330              :              have used 1 << BITS_PER_WORD instead of basing the
    6331              :              calculation on sign_bit.  However, on machines where
    6332              :              HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
    6333              :              compiler warning, even though the code would never be
    6334              :              executed.  */
    6335            0 :           mask = sign_bit << 1;
    6336            0 :           mask--;
    6337              : 
    6338              :           /* Set sign_extend as any remaining bits.  */
    6339            0 :           sign_extend = ~mask;
    6340              : 
    6341              :           /* Pick the lower word and sign-extend it.  */
    6342            0 :           low = INTVAL (value);
    6343            0 :           low &= mask;
    6344            0 :           if (low & sign_bit)
    6345            0 :             low |= sign_extend;
    6346              : 
    6347              :           /* Pick the higher word, shifted to the least significant
    6348              :              bits, and sign-extend it.  */
    6349            0 :           high = INTVAL (value);
    6350            0 :           high >>= bits_per_word - 1;
    6351            0 :           high >>= 1;
    6352            0 :           high &= mask;
    6353            0 :           if (high & sign_bit)
    6354            0 :             high |= sign_extend;
    6355              : 
    6356              :           /* Store the words in the target machine order.  */
    6357            0 :           if (WORDS_BIG_ENDIAN)
    6358              :             {
    6359              :               *first = GEN_INT (high);
    6360              :               *second = GEN_INT (low);
    6361              :             }
    6362              :           else
    6363              :             {
    6364            0 :               *first = GEN_INT (low);
    6365            0 :               *second = GEN_INT (high);
    6366              :             }
    6367              :         }
    6368              :       else
    6369              :         {
    6370              :           /* The rule for using CONST_INT for a wider mode
    6371              :              is that we regard the value as signed.
    6372              :              So sign-extend it.  */
    6373            0 :           rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
    6374            0 :           if (WORDS_BIG_ENDIAN)
    6375              :             {
    6376              :               *first = high;
    6377              :               *second = value;
    6378              :             }
    6379              :           else
    6380              :             {
    6381            0 :               *first = value;
    6382            0 :               *second = high;
    6383              :             }
    6384              :         }
    6385              :     }
    6386            0 :   else if (GET_CODE (value) == CONST_WIDE_INT)
    6387              :     {
    6388              :       /* All of this is scary code and needs to be converted to
    6389              :          properly work with any size integer.  */
    6390            0 :       gcc_assert (CONST_WIDE_INT_NUNITS (value) == 2);
    6391            0 :       if (WORDS_BIG_ENDIAN)
    6392              :         {
    6393              :           *first = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
    6394              :           *second = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
    6395              :         }
    6396              :       else
    6397              :         {
    6398            0 :           *first = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
    6399            0 :           *second = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
    6400              :         }
    6401              :     }
    6402            0 :   else if (!CONST_DOUBLE_P (value))
    6403              :     {
    6404            0 :       if (WORDS_BIG_ENDIAN)
    6405              :         {
    6406              :           *first = const0_rtx;
    6407              :           *second = value;
    6408              :         }
    6409              :       else
    6410              :         {
    6411            0 :           *first = value;
    6412            0 :           *second = const0_rtx;
    6413              :         }
    6414              :     }
    6415            0 :   else if (GET_MODE (value) == VOIDmode
    6416              :            /* This is the old way we did CONST_DOUBLE integers.  */
    6417            0 :            || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
    6418              :     {
    6419              :       /* In an integer, the words are defined as most and least significant.
    6420              :          So order them by the target's convention.  */
    6421            0 :       if (WORDS_BIG_ENDIAN)
    6422              :         {
    6423              :           *first = GEN_INT (CONST_DOUBLE_HIGH (value));
    6424              :           *second = GEN_INT (CONST_DOUBLE_LOW (value));
    6425              :         }
    6426              :       else
    6427              :         {
    6428            0 :           *first = GEN_INT (CONST_DOUBLE_LOW (value));
    6429            0 :           *second = GEN_INT (CONST_DOUBLE_HIGH (value));
    6430              :         }
    6431              :     }
    6432              :   else
    6433              :     {
    6434            0 :       long l[2];
    6435              : 
    6436              :       /* Note, this converts the REAL_VALUE_TYPE to the target's
    6437              :          format, splits up the floating point double and outputs
    6438              :          exactly 32 bits of it into each of l[0] and l[1] --
    6439              :          not necessarily BITS_PER_WORD bits.  */
    6440            0 :       REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (value), l);
    6441              : 
    6442              :       /* If 32 bits is an entire word for the target, but not for the host,
    6443              :          then sign-extend on the host so that the number will look the same
    6444              :          way on the host that it would on the target.  See for instance
    6445              :          simplify_unary_operation.  The #if is needed to avoid compiler
    6446              :          warnings.  */
    6447              : 
    6448              : #if HOST_BITS_PER_LONG > 32
    6449            0 :       if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
    6450              :         {
    6451            0 :           if (l[0] & ((long) 1 << 31))
    6452            0 :             l[0] |= ((unsigned long) (-1) << 32);
    6453            0 :           if (l[1] & ((long) 1 << 31))
    6454            0 :             l[1] |= ((unsigned long) (-1) << 32);
    6455              :         }
    6456              : #endif
    6457              : 
    6458            0 :       *first = GEN_INT (l[0]);
    6459            0 :       *second = GEN_INT (l[1]);
    6460              :     }
    6461            0 : }
    6462              : 
    6463              : /* Return true if X is a sign_extract or zero_extract from the least
    6464              :    significant bit.  */
    6465              : 
    6466              : static bool
    6467    211324727 : lsb_bitfield_op_p (rtx x)
    6468              : {
    6469            0 :   if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS)
    6470              :     {
    6471            0 :       machine_mode mode = GET_MODE (XEXP (x, 0));
    6472            0 :       HOST_WIDE_INT len = INTVAL (XEXP (x, 1));
    6473            0 :       HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
    6474            0 :       poly_int64 remaining_bits = GET_MODE_PRECISION (mode) - len;
    6475              : 
    6476            0 :       return known_eq (pos, BITS_BIG_ENDIAN ? remaining_bits : 0);
    6477              :     }
    6478              :   return false;
    6479              : }
    6480              : 
    6481              : /* Strip outer address "mutations" from LOC and return a pointer to the
    6482              :    inner value.  If OUTER_CODE is nonnull, store the code of the innermost
    6483              :    stripped expression there.
    6484              : 
    6485              :    "Mutations" either convert between modes or apply some kind of
    6486              :    extension, truncation or alignment.  */
    6487              : 
    6488              : rtx *
    6489    211322555 : strip_address_mutations (rtx *loc, enum rtx_code *outer_code)
    6490              : {
    6491    211396693 :   for (;;)
    6492              :     {
    6493    211396693 :       enum rtx_code code = GET_CODE (*loc);
    6494    211396693 :       if (GET_RTX_CLASS (code) == RTX_UNARY)
    6495              :         /* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be
    6496              :            used to convert between pointer sizes.  */
    6497        71966 :         loc = &XEXP (*loc, 0);
    6498    211324727 :       else if (lsb_bitfield_op_p (*loc))
    6499              :         /* A [SIGN|ZERO]_EXTRACT from the least significant bit effectively
    6500              :            acts as a combined truncation and extension.  */
    6501            0 :         loc = &XEXP (*loc, 0);
    6502    211324727 :       else if (code == AND && CONST_INT_P (XEXP (*loc, 1)))
    6503              :         /* (and ... (const_int -X)) is used to align to X bytes.  */
    6504         2160 :         loc = &XEXP (*loc, 0);
    6505    211322567 :       else if (code == SUBREG
    6506        30383 :                && (!OBJECT_P (SUBREG_REG (*loc))
    6507        30381 :                    || CONSTANT_P (SUBREG_REG (*loc)))
    6508    211322579 :                && subreg_lowpart_p (*loc))
    6509              :         /* (subreg (operator ...) ...) inside AND is used for mode
    6510              :            conversion too.  It is also used for load-address operations
    6511              :            in which an extension can be done for free, such as:
    6512              : 
    6513              :              (zero_extend:DI
    6514              :                (subreg:SI (plus:DI (reg:DI R) (symbol_ref:DI "foo") 0)))
    6515              : 
    6516              :            The latter usage also covers subregs of plain "displacements",
    6517              :            such as:
    6518              : 
    6519              :              (zero_extend:DI (subreg:SI (symbol_ref:DI "foo") 0))
    6520              : 
    6521              :            The inner address should then be the symbol_ref, not the subreg,
    6522              :            similarly to the plus case above.
    6523              : 
    6524              :            In contrast, the subreg in:
    6525              : 
    6526              :              (zero_extend:DI (subreg:SI (reg:DI R) 0))
    6527              : 
    6528              :            should be treated as the base, since it should be replaced by
    6529              :            an SImode hard register during register allocation.  */
    6530           12 :         loc = &SUBREG_REG (*loc);
    6531              :       else
    6532    211322555 :         return loc;
    6533        74138 :       if (outer_code)
    6534        74138 :         *outer_code = code;
    6535              :     }
    6536              : }
    6537              : 
    6538              : /* Return true if CODE applies some kind of scale.  The scaled value is
    6539              :    is the first operand and the scale is the second.  */
    6540              : 
    6541              : static bool
    6542     66207604 : binary_scale_code_p (enum rtx_code code)
    6543              : {
    6544     66207604 :   return (code == MULT
    6545     66207604 :           || code == ASHIFT
    6546              :           /* Needed by ARM targets.  */
    6547              :           || code == ASHIFTRT
    6548              :           || code == LSHIFTRT
    6549     64013550 :           || code == ROTATE
    6550     64013550 :           || code == ROTATERT);
    6551              : }
    6552              : 
    6553              : /* Return true if X appears to be a valid base or index term.  */
    6554              : static bool
    6555    132415208 : valid_base_or_index_term_p (rtx x)
    6556              : {
    6557    132415208 :   if (GET_CODE (x) == SCRATCH)
    6558              :     return true;
    6559              :   /* Handle what appear to be eliminated forms of a register.  If we reach
    6560              :      here, the elimination occurs outside of the outermost PLUS tree,
    6561              :      and so the elimination offset cannot be treated as a displacement
    6562              :      of the main address.  Instead, we need to treat the whole PLUS as
    6563              :      the base or index term.  The address can only be made legitimate by
    6564              :      reloading the PLUS.  */
    6565    132415208 :   if (GET_CODE (x) == PLUS && CONST_SCALAR_INT_P (XEXP (x, 1)))
    6566            0 :     x = XEXP (x, 0);
    6567    132415208 :   if (GET_CODE (x) == SUBREG)
    6568        52082 :     x = SUBREG_REG (x);
    6569    132415208 :   return REG_P (x) || MEM_P (x);
    6570              : }
    6571              : 
    6572              : /* If *INNER can be interpreted as a base, return a pointer to the inner term
    6573              :    (see address_info).  Return null otherwise.  */
    6574              : 
    6575              : static rtx *
    6576     66207604 : get_base_term (rtx *inner)
    6577              : {
    6578     66207604 :   if (GET_CODE (*inner) == LO_SUM)
    6579            0 :     inner = strip_address_mutations (&XEXP (*inner, 0));
    6580     66207604 :   if (valid_base_or_index_term_p (*inner))
    6581     64013550 :     return inner;
    6582              :   return 0;
    6583              : }
    6584              : 
    6585              : /* If *INNER can be interpreted as an index, return a pointer to the inner term
    6586              :    (see address_info).  Return null otherwise.  */
    6587              : 
    6588              : static rtx *
    6589     66207604 : get_index_term (rtx *inner)
    6590              : {
    6591              :   /* At present, only constant scales are allowed.  */
    6592     66207604 :   if (binary_scale_code_p (GET_CODE (*inner)) && CONSTANT_P (XEXP (*inner, 1)))
    6593      2194054 :     inner = strip_address_mutations (&XEXP (*inner, 0));
    6594     66207604 :   if (valid_base_or_index_term_p (*inner))
    6595     66207604 :     return inner;
    6596              :   return 0;
    6597              : }
    6598              : 
    6599              : /* Set the segment part of address INFO to LOC, given that INNER is the
    6600              :    unmutated value.  */
    6601              : 
    6602              : static void
    6603           17 : set_address_segment (struct address_info *info, rtx *loc, rtx *inner)
    6604              : {
    6605           17 :   gcc_assert (!info->segment);
    6606           17 :   info->segment = loc;
    6607           17 :   info->segment_term = inner;
    6608           17 : }
    6609              : 
    6610              : /* Set the base part of address INFO to LOC, given that INNER is the
    6611              :    unmutated value.  */
    6612              : 
    6613              : static void
    6614     64557520 : set_address_base (struct address_info *info, rtx *loc, rtx *inner)
    6615              : {
    6616     64557520 :   gcc_assert (!info->base);
    6617     64557520 :   info->base = loc;
    6618     64557520 :   info->base_term = inner;
    6619     64557520 : }
    6620              : 
    6621              : /* Set the index part of address INFO to LOC, given that INNER is the
    6622              :    unmutated value.  */
    6623              : 
    6624              : static void
    6625      3732831 : set_address_index (struct address_info *info, rtx *loc, rtx *inner)
    6626              : {
    6627      3732831 :   gcc_assert (!info->index);
    6628      3732831 :   info->index = loc;
    6629      3732831 :   info->index_term = inner;
    6630      3732831 : }
    6631              : 
    6632              : /* Set the displacement part of address INFO to LOC, given that INNER
    6633              :    is the constant term.  */
    6634              : 
    6635              : static void
    6636     65792104 : set_address_disp (struct address_info *info, rtx *loc, rtx *inner)
    6637              : {
    6638     65792104 :   gcc_assert (!info->disp);
    6639     65792104 :   info->disp = loc;
    6640     65792104 :   info->disp_term = inner;
    6641     65792104 : }
    6642              : 
    6643              : /* INFO->INNER describes a {PRE,POST}_{INC,DEC} address.  Set up the
    6644              :    rest of INFO accordingly.  */
    6645              : 
    6646              : static void
    6647      1978850 : decompose_incdec_address (struct address_info *info)
    6648              : {
    6649      1978850 :   info->autoinc_p = true;
    6650              : 
    6651      1978850 :   rtx *base = &XEXP (*info->inner, 0);
    6652      1978850 :   set_address_base (info, base, base);
    6653      1978850 :   gcc_checking_assert (info->base == info->base_term);
    6654              : 
    6655              :   /* These addresses are only valid when the size of the addressed
    6656              :      value is known.  */
    6657      1978850 :   gcc_checking_assert (info->mode != VOIDmode);
    6658      1978850 : }
    6659              : 
    6660              : /* INFO->INNER describes a {PRE,POST}_MODIFY address.  Set up the rest
    6661              :    of INFO accordingly.  */
    6662              : 
    6663              : static void
    6664       103897 : decompose_automod_address (struct address_info *info)
    6665              : {
    6666       103897 :   info->autoinc_p = true;
    6667              : 
    6668       103897 :   rtx *base = &XEXP (*info->inner, 0);
    6669       103897 :   set_address_base (info, base, base);
    6670       103897 :   gcc_checking_assert (info->base == info->base_term);
    6671              : 
    6672       103897 :   rtx plus = XEXP (*info->inner, 1);
    6673       103897 :   gcc_assert (GET_CODE (plus) == PLUS);
    6674              : 
    6675       103897 :   info->base_term2 = &XEXP (plus, 0);
    6676       103897 :   gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2));
    6677              : 
    6678       103897 :   rtx *step = &XEXP (plus, 1);
    6679       103897 :   rtx *inner_step = strip_address_mutations (step);
    6680       103897 :   if (CONSTANT_P (*inner_step))
    6681       103897 :     set_address_disp (info, step, inner_step);
    6682              :   else
    6683            0 :     set_address_index (info, step, inner_step);
    6684       103897 : }
    6685              : 
    6686              : /* Treat *LOC as a tree of PLUS operands and store pointers to the summed
    6687              :    values in [PTR, END).  Return a pointer to the end of the used array.  */
    6688              : 
    6689              : static rtx **
    6690    131895828 : extract_plus_operands (rtx *loc, rtx **ptr, rtx **end)
    6691              : {
    6692    188745627 :   rtx x = *loc;
    6693    188745627 :   if (GET_CODE (x) == PLUS)
    6694              :     {
    6695     56849799 :       ptr = extract_plus_operands (&XEXP (x, 0), ptr, end);
    6696     56849799 :       ptr = extract_plus_operands (&XEXP (x, 1), ptr, end);
    6697              :     }
    6698              :   else
    6699              :     {
    6700    131895828 :       gcc_assert (ptr != end);
    6701    131895828 :       *ptr++ = loc;
    6702              :     }
    6703    131895828 :   return ptr;
    6704              : }
    6705              : 
    6706              : /* Evaluate the likelihood of X being a base or index value, returning
    6707              :    positive if it is likely to be a base, negative if it is likely to be
    6708              :    an index, and 0 if we can't tell.  Make the magnitude of the return
    6709              :    value reflect the amount of confidence we have in the answer.
    6710              : 
    6711              :    MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1.  */
    6712              : 
    6713              : static int
    6714      3077554 : baseness (rtx x, machine_mode mode, addr_space_t as,
    6715              :           enum rtx_code outer_code, enum rtx_code index_code)
    6716              : {
    6717              :   /* Believe *_POINTER unless the address shape requires otherwise.  */
    6718      3077554 :   if (REG_P (x) && REG_POINTER (x))
    6719              :     return 2;
    6720      1756270 :   if (MEM_P (x) && MEM_POINTER (x))
    6721              :     return 2;
    6722              : 
    6723      1756270 :   if (REG_P (x) && HARD_REGISTER_P (x))
    6724              :     {
    6725              :       /* X is a hard register.  If it only fits one of the base
    6726              :          or index classes, choose that interpretation.  */
    6727           12 :       int regno = REGNO (x);
    6728           12 :       bool base_p = ok_for_base_p_1 (regno, mode, as, outer_code, index_code);
    6729           12 :       bool index_p = REGNO_OK_FOR_INDEX_P (regno);
    6730           12 :       if (base_p != index_p)
    6731            0 :         return base_p ? 1 : -1;
    6732              :     }
    6733              :   return 0;
    6734              : }
    6735              : 
    6736              : /* INFO->INNER describes a normal, non-automodified address.
    6737              :    Fill in the rest of INFO accordingly.  */
    6738              : 
    6739              : static void
    6740     75046029 : decompose_normal_address (struct address_info *info)
    6741              : {
    6742              :   /* Treat the address as the sum of up to four values.  */
    6743     75046029 :   rtx *ops[4];
    6744     75046029 :   size_t n_ops = extract_plus_operands (info->inner, ops,
    6745     75046029 :                                         ops + ARRAY_SIZE (ops)) - ops;
    6746              : 
    6747              :   /* If there is more than one component, any base component is in a PLUS.  */
    6748     75046029 :   if (n_ops > 1)
    6749     55357018 :     info->base_outer_code = PLUS;
    6750              : 
    6751              :   /* Try to classify each sum operand now.  Leave those that could be
    6752              :      either a base or an index in OPS.  */
    6753              :   rtx *inner_ops[4];
    6754              :   size_t out = 0;
    6755    206941857 :   for (size_t in = 0; in < n_ops; ++in)
    6756              :     {
    6757    131895828 :       rtx *loc = ops[in];
    6758    131895828 :       rtx *inner = strip_address_mutations (loc);
    6759    131895828 :       if (CONSTANT_P (*inner))
    6760     65688207 :         set_address_disp (info, loc, inner);
    6761     66207621 :       else if (GET_CODE (*inner) == UNSPEC)
    6762           17 :         set_address_segment (info, loc, inner);
    6763              :       else
    6764              :         {
    6765              :           /* The only other possibilities are a base or an index.  */
    6766     66207604 :           rtx *base_term = get_base_term (inner);
    6767     66207604 :           rtx *index_term = get_index_term (inner);
    6768     66207604 :           gcc_assert (base_term || index_term);
    6769     66207604 :           if (!base_term)
    6770      2194054 :             set_address_index (info, loc, index_term);
    6771     64013550 :           else if (!index_term)
    6772            0 :             set_address_base (info, loc, base_term);
    6773              :           else
    6774              :             {
    6775     64013550 :               gcc_assert (base_term == index_term);
    6776     64013550 :               ops[out] = loc;
    6777     64013550 :               inner_ops[out] = base_term;
    6778     64013550 :               ++out;
    6779              :             }
    6780              :         }
    6781              :     }
    6782              : 
    6783              :   /* Classify the remaining OPS members as bases and indexes.  */
    6784     75046029 :   if (out == 1)
    6785              :     {
    6786              :       /* If we haven't seen a base or an index yet, assume that this is
    6787              :          the base.  If we were confident that another term was the base
    6788              :          or index, treat the remaining operand as the other kind.  */
    6789     60935996 :       if (!info->base)
    6790     60935996 :         set_address_base (info, ops[0], inner_ops[0]);
    6791              :       else
    6792            0 :         set_address_index (info, ops[0], inner_ops[0]);
    6793              :     }
    6794     14110033 :   else if (out == 2)
    6795              :     {
    6796      1538777 :       auto address_mode = targetm.addr_space.address_mode (info->as);
    6797      1538777 :       rtx inner_op0 = *inner_ops[0];
    6798      1538777 :       rtx inner_op1 = *inner_ops[1];
    6799      1538777 :       int base;
    6800              :       /* If one inner operand has the expected mode for a base and the other
    6801              :          doesn't, assume that the other one is the index.  This is useful
    6802              :          for addresses such as:
    6803              : 
    6804              :            (plus (zero_extend X) Y)
    6805              : 
    6806              :          zero_extend is not in itself enough to assume an index, since bases
    6807              :          can be zero-extended on POINTERS_EXTEND_UNSIGNED targets.  But if
    6808              :          Y has address mode and X doesn't, there should be little doubt that
    6809              :          Y is the base.  */
    6810      1538777 :       if (GET_MODE (inner_op0) == address_mode
    6811      1538777 :           && GET_MODE (inner_op1) != address_mode)
    6812              :         base = 0;
    6813      1538777 :       else if (GET_MODE (inner_op1) == address_mode
    6814      1538777 :                && GET_MODE (inner_op0) != address_mode)
    6815              :         base = 1;
    6816              :       /* In the event of a tie, assume the base comes first.  */
    6817      1538777 :       else if (baseness (inner_op0, info->mode, info->as, PLUS,
    6818      1538777 :                          GET_CODE (*ops[1]))
    6819      1538777 :                >= baseness (inner_op1, info->mode, info->as, PLUS,
    6820      1538777 :                             GET_CODE (*ops[0])))
    6821              :         base = 0;
    6822              :       else
    6823         7731 :         base = 1;
    6824      1538777 :       set_address_base (info, ops[base], inner_ops[base]);
    6825      1538777 :       set_address_index (info, ops[1 - base], inner_ops[1 - base]);
    6826              :     }
    6827              :   else
    6828     12571256 :     gcc_assert (out == 0);
    6829     75046029 : }
    6830              : 
    6831              : /* Describe address *LOC in *INFO.  MODE is the mode of the addressed value,
    6832              :    or VOIDmode if not known.  AS is the address space associated with LOC.
    6833              :    OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise.  */
    6834              : 
    6835              : void
    6836     77128776 : decompose_address (struct address_info *info, rtx *loc, machine_mode mode,
    6837              :                    addr_space_t as, enum rtx_code outer_code)
    6838              : {
    6839     77128776 :   memset (info, 0, sizeof (*info));
    6840     77128776 :   info->mode = mode;
    6841     77128776 :   info->as = as;
    6842     77128776 :   info->addr_outer_code = outer_code;
    6843     77128776 :   info->outer = loc;
    6844     77128776 :   info->inner = strip_address_mutations (loc, &outer_code);
    6845     77128776 :   info->base_outer_code = outer_code;
    6846     77128776 :   switch (GET_CODE (*info->inner))
    6847              :     {
    6848      1978850 :     case PRE_DEC:
    6849      1978850 :     case PRE_INC:
    6850      1978850 :     case POST_DEC:
    6851      1978850 :     case POST_INC:
    6852      1978850 :       decompose_incdec_address (info);
    6853      1978850 :       break;
    6854              : 
    6855       103897 :     case PRE_MODIFY:
    6856       103897 :     case POST_MODIFY:
    6857       103897 :       decompose_automod_address (info);
    6858       103897 :       break;
    6859              : 
    6860     75046029 :     default:
    6861     75046029 :       decompose_normal_address (info);
    6862     75046029 :       break;
    6863              :     }
    6864     77128776 : }
    6865              : 
    6866              : /* Describe address operand LOC in INFO.  */
    6867              : 
    6868              : void
    6869      3437514 : decompose_lea_address (struct address_info *info, rtx *loc)
    6870              : {
    6871      3437514 :   decompose_address (info, loc, VOIDmode, ADDR_SPACE_GENERIC, ADDRESS);
    6872      3437514 : }
    6873              : 
    6874              : /* Describe the address of MEM X in INFO.  */
    6875              : 
    6876              : void
    6877     73678171 : decompose_mem_address (struct address_info *info, rtx x)
    6878              : {
    6879     73678171 :   gcc_assert (MEM_P (x));
    6880     73678171 :   decompose_address (info, &XEXP (x, 0), GET_MODE (x),
    6881     73678171 :                      MEM_ADDR_SPACE (x), MEM);
    6882     73678171 : }
    6883              : 
    6884              : /* Update INFO after a change to the address it describes.  */
    6885              : 
    6886              : void
    6887        13091 : update_address (struct address_info *info)
    6888              : {
    6889        13091 :   decompose_address (info, info->outer, info->mode, info->as,
    6890              :                      info->addr_outer_code);
    6891        13091 : }
    6892              : 
    6893              : /* Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is
    6894              :    more complicated than that.  */
    6895              : 
    6896              : HOST_WIDE_INT
    6897            0 : get_index_scale (const struct address_info *info)
    6898              : {
    6899            0 :   rtx index = *info->index;
    6900            0 :   if (GET_CODE (index) == MULT
    6901            0 :       && CONST_INT_P (XEXP (index, 1))
    6902            0 :       && info->index_term == &XEXP (index, 0))
    6903            0 :     return INTVAL (XEXP (index, 1));
    6904              : 
    6905            0 :   if (GET_CODE (index) == ASHIFT
    6906            0 :       && CONST_INT_P (XEXP (index, 1))
    6907            0 :       && info->index_term == &XEXP (index, 0))
    6908            0 :     return HOST_WIDE_INT_1 << INTVAL (XEXP (index, 1));
    6909              : 
    6910            0 :   if (info->index == info->index_term)
    6911            0 :     return 1;
    6912              : 
    6913              :   return 0;
    6914              : }
    6915              : 
    6916              : /* Return the "index code" of INFO, in the form required by
    6917              :    ok_for_base_p_1.  */
    6918              : 
    6919              : enum rtx_code
    6920     33260886 : get_index_code (const struct address_info *info)
    6921              : {
    6922     33260886 :   if (info->index)
    6923      1521064 :     return GET_CODE (*info->index);
    6924              : 
    6925     31739822 :   if (info->disp)
    6926     25912983 :     return GET_CODE (*info->disp);
    6927              : 
    6928              :   return SCRATCH;
    6929              : }
    6930              : 
    6931              : /* Return true if RTL X contains a SYMBOL_REF.  */
    6932              : 
    6933              : bool
    6934       751965 : contains_symbol_ref_p (const_rtx x)
    6935              : {
    6936       751965 :   subrtx_iterator::array_type array;
    6937      3101368 :   FOR_EACH_SUBRTX (iter, array, x, ALL)
    6938      2426783 :     if (SYMBOL_REF_P (*iter))
    6939        77380 :       return true;
    6940              : 
    6941       674585 :   return false;
    6942       751965 : }
    6943              : 
    6944              : /* Return true if RTL X contains a SYMBOL_REF or LABEL_REF.  */
    6945              : 
    6946              : bool
    6947       360029 : contains_symbolic_reference_p (const_rtx x)
    6948              : {
    6949       360029 :   subrtx_iterator::array_type array;
    6950       832843 :   FOR_EACH_SUBRTX (iter, array, x, ALL)
    6951       477367 :     if (SYMBOL_REF_P (*iter) || GET_CODE (*iter) == LABEL_REF)
    6952         4553 :       return true;
    6953              : 
    6954       355476 :   return false;
    6955       360029 : }
    6956              : 
    6957              : /* Return true if RTL X contains a constant pool address.  */
    6958              : 
    6959              : bool
    6960            0 : contains_constant_pool_address_p (const_rtx x)
    6961              : {
    6962            0 :   subrtx_iterator::array_type array;
    6963            0 :   FOR_EACH_SUBRTX (iter, array, x, ALL)
    6964            0 :     if (SYMBOL_REF_P (*iter) && CONSTANT_POOL_ADDRESS_P (*iter))
    6965            0 :       return true;
    6966              : 
    6967            0 :   return false;
    6968            0 : }
    6969              : 
    6970              : 
    6971              : /* Return true if X contains a thread-local symbol.  */
    6972              : 
    6973              : bool
    6974            0 : tls_referenced_p (const_rtx x)
    6975              : {
    6976            0 :   if (!targetm.have_tls)
    6977              :     return false;
    6978              : 
    6979            0 :   subrtx_iterator::array_type array;
    6980            0 :   FOR_EACH_SUBRTX (iter, array, x, ALL)
    6981            0 :     if (GET_CODE (*iter) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (*iter) != 0)
    6982            0 :       return true;
    6983            0 :   return false;
    6984            0 : }
    6985              : 
    6986              : /* Process recursively X of INSN and add REG_INC notes if necessary.  */
    6987              : void
    6988            0 : add_auto_inc_notes (rtx_insn *insn, rtx x)
    6989              : {
    6990            0 :   enum rtx_code code = GET_CODE (x);
    6991            0 :   const char *fmt;
    6992            0 :   int i, j;
    6993              : 
    6994            0 :   if (code == MEM && auto_inc_p (XEXP (x, 0)))
    6995              :     {
    6996            0 :       add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
    6997            0 :       return;
    6998              :     }
    6999              : 
    7000              :   /* Scan all X sub-expressions.  */
    7001            0 :   fmt = GET_RTX_FORMAT (code);
    7002            0 :   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    7003              :     {
    7004            0 :       if (fmt[i] == 'e')
    7005            0 :         add_auto_inc_notes (insn, XEXP (x, i));
    7006            0 :       else if (fmt[i] == 'E')
    7007            0 :         for (j = XVECLEN (x, i) - 1; j >= 0; j--)
    7008            0 :           add_auto_inc_notes (insn, XVECEXP (x, i, j));
    7009              :     }
    7010              : }
    7011              : 
    7012              : /* Return true if INSN is the second element of a pair of macro-fused
    7013              :    single_sets, both of which having the same register output as another.  */
    7014              : bool
    7015     67463292 : single_output_fused_pair_p (rtx_insn *insn)
    7016              : {
    7017     67463292 :   rtx set, prev_set;
    7018     67463292 :   rtx_insn *prev;
    7019              : 
    7020     67463292 :   return INSN_P (insn)
    7021     67463292 :          && SCHED_GROUP_P (insn)
    7022      4114056 :          && (prev = prev_nonnote_nondebug_insn (insn))
    7023      4114056 :          && (set = single_set (insn)) != NULL_RTX
    7024      4114056 :          && (prev_set = single_set (prev))
    7025              :              != NULL_RTX
    7026      4113912 :          && REG_P (SET_DEST (set))
    7027            0 :          && REG_P (SET_DEST (prev_set))
    7028     67463292 :          && (!reload_completed
    7029            0 :              || REGNO (SET_DEST (set)) == REGNO (SET_DEST (prev_set)));
    7030              : }
    7031              : 
    7032              : /* Return true if X is register asm.  */
    7033              : 
    7034              : bool
    7035     18350829 : register_asm_p (const_rtx x)
    7036              : {
    7037     18350829 :   return (REG_P (x)
    7038     18350829 :           && REG_EXPR (x) != NULL_TREE
    7039      8969895 :           && HAS_DECL_ASSEMBLER_NAME_P (REG_EXPR (x))
    7040      2802698 :           && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (x))
    7041     18397681 :           && DECL_REGISTER (REG_EXPR (x)));
    7042              : }
    7043              : 
    7044              : /* Return true if, for all OP of mode OP_MODE:
    7045              : 
    7046              :      (vec_select:RESULT_MODE OP SEL)
    7047              : 
    7048              :    is equivalent to the highpart RESULT_MODE of OP.  */
    7049              : 
    7050              : bool
    7051            0 : vec_series_highpart_p (machine_mode result_mode, machine_mode op_mode, rtx sel)
    7052              : {
    7053            0 :   int nunits;
    7054            0 :   if (GET_MODE_NUNITS (op_mode).is_constant (&nunits)
    7055            0 :       && targetm.can_change_mode_class (op_mode, result_mode, ALL_REGS))
    7056              :     {
    7057            0 :       int offset = BYTES_BIG_ENDIAN ? 0 : nunits - XVECLEN (sel, 0);
    7058            0 :       return rtvec_series_p (XVEC (sel, 0), offset);
    7059              :     }
    7060              :   return false;
    7061              : }
    7062              : 
    7063              : /* Return true if, for all OP of mode OP_MODE:
    7064              : 
    7065              :      (vec_select:RESULT_MODE OP SEL)
    7066              : 
    7067              :    is equivalent to the lowpart RESULT_MODE of OP.  */
    7068              : 
    7069              : bool
    7070      5263362 : vec_series_lowpart_p (machine_mode result_mode, machine_mode op_mode, rtx sel)
    7071              : {
    7072      5263362 :   int nunits;
    7073      5263362 :   if (GET_MODE_NUNITS (op_mode).is_constant (&nunits)
    7074      5263362 :       && targetm.can_change_mode_class (op_mode, result_mode, ALL_REGS))
    7075              :     {
    7076       669435 :       int offset = BYTES_BIG_ENDIAN ? nunits - XVECLEN (sel, 0) : 0;
    7077       669435 :       return rtvec_series_p (XVEC (sel, 0), offset);
    7078              :     }
    7079              :   return false;
    7080              : }
    7081              : 
    7082              : /* Return true if X contains a paradoxical subreg.  */
    7083              : 
    7084              : bool
    7085      1188392 : contains_paradoxical_subreg_p (rtx x)
    7086              : {
    7087      1188392 :   subrtx_var_iterator::array_type array;
    7088      5039382 :   FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
    7089              :     {
    7090      3901483 :       x = *iter;
    7091      3901483 :       if (SUBREG_P (x) && paradoxical_subreg_p (x))
    7092        50493 :         return true;
    7093              :     }
    7094      1137899 :   return false;
    7095      1188392 : }
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.