LCOV - code coverage report
Current view: top level - gcc - explow.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 75.7 % 877 664
Test Date: 2026-02-28 14:20:25 Functions: 91.5 % 47 43
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Subroutines for manipulating rtx's in semantically interesting ways.
       2              :    Copyright (C) 1987-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify it under
       7              : the terms of the GNU General Public License as published by the Free
       8              : Software Foundation; either version 3, or (at your option) any later
       9              : version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      12              : WARRANTY; without even the implied warranty of MERCHANTABILITY or
      13              : FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      14              : for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : 
      21              : #include "config.h"
      22              : #include "system.h"
      23              : #include "coretypes.h"
      24              : #include "target.h"
      25              : #include "function.h"
      26              : #include "rtl.h"
      27              : #include "tree.h"
      28              : #include "memmodel.h"
      29              : #include "tm_p.h"
      30              : #include "optabs.h"
      31              : #include "expmed.h"
      32              : #include "profile-count.h"
      33              : #include "emit-rtl.h"
      34              : #include "recog.h"
      35              : #include "diagnostic-core.h"
      36              : #include "stor-layout.h"
      37              : #include "langhooks.h"
      38              : #include "except.h"
      39              : #include "dojump.h"
      40              : #include "explow.h"
      41              : #include "expr.h"
      42              : #include "stringpool.h"
      43              : #include "common/common-target.h"
      44              : #include "output.h"
      45              : 
      46              : static rtx break_out_memory_refs (rtx);
      47              : 
      48              : 
      49              : /* Truncate and perhaps sign-extend C as appropriate for MODE.  */
      50              : 
      51              : HOST_WIDE_INT
      52   5800904446 : trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
      53              : {
      54              :   /* Not scalar_int_mode because we also allow pointer bound modes.  */
      55   5800904446 :   scalar_mode smode = as_a <scalar_mode> (mode);
      56   5800904446 :   int width = GET_MODE_PRECISION (smode);
      57              : 
      58              :   /* You want to truncate to a _what_?  */
      59   5800904446 :   gcc_assert (SCALAR_INT_MODE_P (mode));
      60              : 
      61              :   /* Canonicalize BImode to 0 and STORE_FLAG_VALUE.  */
      62   5800904446 :   if (smode == BImode)
      63            0 :     return c & 1 ? STORE_FLAG_VALUE : 0;
      64              : 
      65              :   /* Sign-extend for the requested mode.  */
      66              : 
      67   5800904446 :   if (width < HOST_BITS_PER_WIDE_INT)
      68              :     {
      69   4665748902 :       HOST_WIDE_INT sign = 1;
      70   4665748902 :       sign <<= width - 1;
      71   4665748902 :       c &= (sign << 1) - 1;
      72   4665748902 :       c ^= sign;
      73   4665748902 :       c -= sign;
      74              :     }
      75              : 
      76              :   return c;
      77              : }
      78              : 
      79              : /* Likewise for polynomial values, using the sign-extended representation
      80              :    for each individual coefficient.  */
      81              : 
      82              : poly_int64
      83   1238459017 : trunc_int_for_mode (poly_int64 x, machine_mode mode)
      84              : {
      85   2476918034 :   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
      86   1238459017 :     x.coeffs[i] = trunc_int_for_mode (x.coeffs[i], mode);
      87   1238459017 :   return x;
      88              : }
      89              : 
      90              : /* Return an rtx for the sum of X and the integer C, given that X has
      91              :    mode MODE.  INPLACE is true if X can be modified inplace or false
      92              :    if it must be treated as immutable.  */
      93              : 
      94              : rtx
      95    730294283 : plus_constant (machine_mode mode, rtx x, poly_int64 c, bool inplace)
      96              : {
      97    730294283 :   RTX_CODE code;
      98    730294283 :   rtx y;
      99    730294283 :   rtx tem;
     100    730294283 :   int all_constant = 0;
     101              : 
     102    730294283 :   gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
     103              : 
     104    730294283 :   if (known_eq (c, 0))
     105              :     return x;
     106              : 
     107    688107131 :  restart:
     108              : 
     109    688769759 :   code = GET_CODE (x);
     110    688769759 :   y = x;
     111              : 
     112    688769759 :   switch (code)
     113              :     {
     114    494418264 :     CASE_CONST_SCALAR_INT:
     115    494418264 :       return immed_wide_int_const (wi::add (rtx_mode_t (x, mode), c), mode);
     116       471780 :     case MEM:
     117              :       /* If this is a reference to the constant pool, try replacing it with
     118              :          a reference to a new constant.  If the resulting address isn't
     119              :          valid, don't return it because we have no way to validize it.  */
     120       471780 :       if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
     121       471780 :           && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
     122              :         {
     123            0 :           rtx cst = get_pool_constant (XEXP (x, 0));
     124              : 
     125            0 :           if (GET_CODE (cst) == CONST_VECTOR
     126            0 :               && GET_MODE_INNER (GET_MODE (cst)) == mode)
     127              :             {
     128            0 :               cst = gen_lowpart (mode, cst);
     129            0 :               gcc_assert (cst);
     130              :             }
     131            0 :           else if (GET_MODE (cst) == VOIDmode
     132            0 :                    && get_pool_mode (XEXP (x, 0)) != mode)
     133              :             break;
     134            0 :           if (GET_MODE (cst) == VOIDmode || GET_MODE (cst) == mode)
     135              :             {
     136            0 :               tem = plus_constant (mode, cst, c);
     137            0 :               tem = force_const_mem (GET_MODE (x), tem);
     138              :               /* Targets may disallow some constants in the constant pool, thus
     139              :                  force_const_mem may return NULL_RTX.  */
     140            0 :               if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
     141            0 :                 return tem;
     142              :             }
     143              :         }
     144              :       break;
     145              : 
     146       662628 :     case CONST:
     147              :       /* If adding to something entirely constant, set a flag
     148              :          so that we can add a CONST around the result.  */
     149       662628 :       if (inplace && shared_const_p (x))
     150              :         inplace = false;
     151       662628 :       x = XEXP (x, 0);
     152       662628 :       all_constant = 1;
     153       662628 :       goto restart;
     154              : 
     155              :     case SYMBOL_REF:
     156              :     case LABEL_REF:
     157              :       all_constant = 1;
     158              :       break;
     159              : 
     160     39636115 :     case PLUS:
     161              :       /* The interesting case is adding the integer to a sum.  Look
     162              :          for constant term in the sum and combine with C.  For an
     163              :          integer constant term or a constant term that is not an
     164              :          explicit integer, we combine or group them together anyway.
     165              : 
     166              :          We may not immediately return from the recursive call here, lest
     167              :          all_constant gets lost.  */
     168              : 
     169     39636115 :       if (CONSTANT_P (XEXP (x, 1)))
     170              :         {
     171     37783480 :           rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
     172     37783480 :           if (term == const0_rtx)
     173       307885 :             x = XEXP (x, 0);
     174     37475595 :           else if (inplace)
     175            0 :             XEXP (x, 1) = term;
     176              :           else
     177     37475595 :             x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
     178     37783480 :           c = 0;
     179              :         }
     180      1852635 :       else if (rtx *const_loc = find_constant_term_loc (&y))
     181              :         {
     182            0 :           if (!inplace)
     183              :             {
     184              :               /* We need to be careful since X may be shared and we can't
     185              :                  modify it in place.  */
     186            0 :               x = copy_rtx (x);
     187            0 :               const_loc = find_constant_term_loc (&x);
     188              :             }
     189            0 :           *const_loc = plus_constant (mode, *const_loc, c, true);
     190            0 :           c = 0;
     191              :         }
     192              :       break;
     193              : 
     194              :     default:
     195              :       if (CONST_POLY_INT_P (x))
     196              :         return immed_wide_int_const (const_poly_int_value (x) + c, mode);
     197              :       break;
     198              :     }
     199              : 
     200    193688867 :   if (maybe_ne (c, 0))
     201    155905387 :     x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
     202              : 
     203    193688867 :   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
     204              :     return x;
     205    193687284 :   else if (all_constant)
     206      7570690 :     return gen_rtx_CONST (mode, x);
     207              :   else
     208              :     return x;
     209              : }
     210              : 
     211              : /* If X is a sum, return a new sum like X but lacking any constant terms.
     212              :    Add all the removed constant terms into *CONSTPTR.
     213              :    X itself is not altered.  The result != X if and only if
     214              :    it is not isomorphic to X.  */
     215              : 
     216              : rtx
     217       629983 : eliminate_constant_term (rtx x, rtx *constptr)
     218              : {
     219       629983 :   rtx x0, x1;
     220       629983 :   rtx tem;
     221              : 
     222       629983 :   if (GET_CODE (x) != PLUS)
     223              :     return x;
     224              : 
     225              :   /* First handle constants appearing at this level explicitly.  */
     226       282030 :   if (CONST_INT_P (XEXP (x, 1))
     227       105976 :       && (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
     228              :                                            XEXP (x, 1))) != 0
     229       388006 :       && CONST_INT_P (tem))
     230              :     {
     231       105976 :       *constptr = tem;
     232       105976 :       return eliminate_constant_term (XEXP (x, 0), constptr);
     233              :     }
     234              : 
     235       176054 :   tem = const0_rtx;
     236       176054 :   x0 = eliminate_constant_term (XEXP (x, 0), &tem);
     237       176054 :   x1 = eliminate_constant_term (XEXP (x, 1), &tem);
     238       176054 :   if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
     239            0 :       && (tem = simplify_binary_operation (PLUS, GET_MODE (x),
     240              :                                            *constptr, tem)) != 0
     241       176054 :       && CONST_INT_P (tem))
     242              :     {
     243            0 :       *constptr = tem;
     244            0 :       return gen_rtx_PLUS (GET_MODE (x), x0, x1);
     245              :     }
     246              : 
     247              :   return x;
     248              : }
     249              : 
     250              : 
     251              : /* Return a copy of X in which all memory references
     252              :    and all constants that involve symbol refs
     253              :    have been replaced with new temporary registers.
     254              :    Also emit code to load the memory locations and constants
     255              :    into those registers.
     256              : 
     257              :    If X contains no such constants or memory references,
     258              :    X itself (not a copy) is returned.
     259              : 
     260              :    If a constant is found in the address that is not a legitimate constant
     261              :    in an insn, it is left alone in the hope that it might be valid in the
     262              :    address.
     263              : 
     264              :    X may contain no arithmetic except addition, subtraction and multiplication.
     265              :    Values returned by expand_expr with 1 for sum_ok fit this constraint.  */
     266              : 
     267              : static rtx
     268     40835128 : break_out_memory_refs (rtx x)
     269              : {
     270     40835128 :   if (MEM_P (x)
     271     40835128 :       || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
     272     12700575 :           && GET_MODE (x) != VOIDmode))
     273       492801 :     x = force_reg (GET_MODE (x), x);
     274     40342327 :   else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
     275     27014193 :            || GET_CODE (x) == MULT)
     276              :     {
     277     13757314 :       rtx op0 = break_out_memory_refs (XEXP (x, 0));
     278     13757314 :       rtx op1 = break_out_memory_refs (XEXP (x, 1));
     279              : 
     280     13757314 :       if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
     281       218187 :         x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
     282              :     }
     283              : 
     284     40835128 :   return x;
     285              : }
     286              : 
     287              : /* Given X, a memory address in address space AS' pointer mode, convert it to
     288              :    an address in the address space's address mode, or vice versa (TO_MODE says
     289              :    which way).  We take advantage of the fact that pointers are not allowed to
     290              :    overflow by commuting arithmetic operations over conversions so that address
     291              :    arithmetic insns can be used. IN_CONST is true if this conversion is inside
     292              :    a CONST. NO_EMIT is true if no insns should be emitted, and instead
     293              :    it should return NULL if it can't be simplified without emitting insns.  */
     294              : 
     295              : rtx
     296     31891949 : convert_memory_address_addr_space_1 (scalar_int_mode to_mode ATTRIBUTE_UNUSED,
     297              :                                      rtx x, addr_space_t as ATTRIBUTE_UNUSED,
     298              :                                      bool in_const ATTRIBUTE_UNUSED,
     299              :                                      bool no_emit ATTRIBUTE_UNUSED)
     300              : {
     301              : #ifndef POINTERS_EXTEND_UNSIGNED
     302              :   gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
     303              :   return x;
     304              : #else /* defined(POINTERS_EXTEND_UNSIGNED) */
     305     31891949 :   scalar_int_mode pointer_mode, address_mode, from_mode;
     306     31891949 :   rtx temp;
     307     31891949 :   enum rtx_code code;
     308              : 
     309              :   /* If X already has the right mode, just return it.  */
     310     31891949 :   if (GET_MODE (x) == to_mode)
     311              :     return x;
     312              : 
     313         4444 :   pointer_mode = targetm.addr_space.pointer_mode (as);
     314         4444 :   address_mode = targetm.addr_space.address_mode (as);
     315         4444 :   from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
     316              : 
     317              :   /* Here we handle some special cases.  If none of them apply, fall through
     318              :      to the default case.  */
     319         4444 :   switch (GET_CODE (x))
     320              :     {
     321         4244 :     CASE_CONST_SCALAR_INT:
     322        12732 :       if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
     323              :         code = TRUNCATE;
     324         4244 :       else if (POINTERS_EXTEND_UNSIGNED < 0)
     325              :         break;
     326         4244 :       else if (POINTERS_EXTEND_UNSIGNED > 0)
     327         4244 :         code = ZERO_EXTEND;
     328              :       else
     329              :         code = SIGN_EXTEND;
     330         4244 :       temp = simplify_unary_operation (code, to_mode, x, from_mode);
     331         4244 :       if (temp)
     332              :         return temp;
     333              :       break;
     334              : 
     335           13 :     case SUBREG:
     336            1 :       if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
     337           13 :           && GET_MODE (SUBREG_REG (x)) == to_mode)
     338              :         return SUBREG_REG (x);
     339              :       break;
     340              : 
     341            0 :     case LABEL_REF:
     342            0 :       temp = gen_rtx_LABEL_REF (to_mode, label_ref_label (x));
     343            0 :       LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
     344            0 :       return temp;
     345              : 
     346          136 :     case SYMBOL_REF:
     347          136 :       temp = shallow_copy_rtx (x);
     348          136 :       PUT_MODE (temp, to_mode);
     349          136 :       return temp;
     350              : 
     351            1 :     case CONST:
     352            1 :       {
     353            1 :         auto *last = no_emit ? nullptr : get_last_insn ();
     354            1 :         temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0), as,
     355              :                                                     true, no_emit);
     356            1 :         if (temp && (no_emit || last == get_last_insn ()))
     357            1 :           return gen_rtx_CONST (to_mode, temp);
     358              :         return temp;
     359              :       }
     360              : 
     361           28 :     case PLUS:
     362           28 :     case MULT:
     363              :       /* For addition we can safely permute the conversion and addition
     364              :          operation if one operand is a constant and converting the constant
     365              :          does not change it or if one operand is a constant and we are
     366              :          using a ptr_extend instruction  (POINTERS_EXTEND_UNSIGNED < 0).
     367              :          We can always safely permute them if we are making the address
     368              :          narrower. Inside a CONST RTL, this is safe for both pointers
     369              :          zero or sign extended as pointers cannot wrap. */
     370           56 :       if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
     371           28 :           || (GET_CODE (x) == PLUS
     372           22 :               && CONST_INT_P (XEXP (x, 1))
     373            9 :               && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
     374              :                   || XEXP (x, 1) == convert_memory_address_addr_space_1
     375            8 :                                      (to_mode, XEXP (x, 1), as, in_const,
     376              :                                       no_emit)
     377            8 :                   || POINTERS_EXTEND_UNSIGNED < 0)))
     378              :         {
     379            9 :           temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0),
     380              :                                                       as, in_const, no_emit);
     381            9 :           return (temp ? gen_rtx_fmt_ee (GET_CODE (x), to_mode,
     382              :                                          temp, XEXP (x, 1))
     383              :                        : temp);
     384              :         }
     385              :       break;
     386              : 
     387            0 :     case UNSPEC:
     388              :       /* Assume that all UNSPECs in a constant address can be converted
     389              :          operand-by-operand.  We could add a target hook if some targets
     390              :          require different behavior.  */
     391            0 :       if (in_const && GET_MODE (x) == from_mode)
     392              :         {
     393            0 :           unsigned int n = XVECLEN (x, 0);
     394            0 :           rtvec v = gen_rtvec (n);
     395            0 :           for (unsigned int i = 0; i < n; ++i)
     396              :             {
     397            0 :               rtx op = XVECEXP (x, 0, i);
     398            0 :               if (GET_MODE (op) == from_mode)
     399            0 :                 op = convert_memory_address_addr_space_1 (to_mode, op, as,
     400              :                                                           in_const, no_emit);
     401            0 :               RTVEC_ELT (v, i) = op;
     402              :             }
     403            0 :           return gen_rtx_UNSPEC (to_mode, v, XINT (x, 1));
     404              :         }
     405              :       break;
     406              : 
     407              :     default:
     408              :       break;
     409              :     }
     410              : 
     411           42 :   if (no_emit)
     412              :     return NULL_RTX;
     413              : 
     414           42 :   return convert_modes (to_mode, from_mode,
     415           42 :                         x, POINTERS_EXTEND_UNSIGNED);
     416              : #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
     417              : }
     418              : 
     419              : /* Given X, a memory address in address space AS' pointer mode, convert it to
     420              :    an address in the address space's address mode, or vice versa (TO_MODE says
     421              :    which way).  We take advantage of the fact that pointers are not allowed to
     422              :    overflow by commuting arithmetic operations over conversions so that address
     423              :    arithmetic insns can be used.  */
     424              : 
     425              : rtx
     426     31891910 : convert_memory_address_addr_space (scalar_int_mode to_mode, rtx x,
     427              :                                    addr_space_t as)
     428              : {
     429     31891910 :   return convert_memory_address_addr_space_1 (to_mode, x, as, false, false);
     430              : }
     431              : 
     432              : 
     433              : /* Return something equivalent to X but valid as a memory address for something
     434              :    of mode MODE in the named address space AS.  When X is not itself valid,
     435              :    this works by copying X or subexpressions of it into registers.  */
     436              : 
     437              : rtx
     438     29960445 : memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
     439              : {
     440     29960445 :   rtx oldx = x;
     441     29960445 :   scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
     442              : 
     443     29960445 :   x = convert_memory_address_addr_space (address_mode, x, as);
     444              : 
     445              :   /* By passing constant addresses through registers
     446              :      we get a chance to cse them.  */
     447     29960445 :   if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
     448      1254259 :     x = force_reg (address_mode, x);
     449              : 
     450              :   /* We get better cse by rejecting indirect addressing at this stage.
     451              :      Let the combiner create indirect addresses where appropriate.
     452              :      For now, generate the code so that the subexpressions useful to share
     453              :      are visible.  But not if cse won't be done!  */
     454              :   else
     455              :     {
     456     28706186 :       if (! cse_not_expected && !REG_P (x))
     457     13320500 :         x = break_out_memory_refs (x);
     458              : 
     459              :       /* At this point, any valid address is accepted.  */
     460     28706186 :       if (memory_address_addr_space_p (mode, x, as))
     461     28022810 :         goto done;
     462              : 
     463              :       /* If it was valid before but breaking out memory refs invalidated it,
     464              :          use it the old way.  */
     465       683376 :       if (memory_address_addr_space_p (mode, oldx, as))
     466              :         {
     467        11346 :           x = oldx;
     468        11346 :           goto done;
     469              :         }
     470              : 
     471              :       /* Perform machine-dependent transformations on X
     472              :          in certain cases.  This is not necessary since the code
     473              :          below can handle all possible cases, but machine-dependent
     474              :          transformations can make better code.  */
     475       672030 :       {
     476       672030 :         rtx orig_x = x;
     477       672030 :         x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
     478       672030 :         if (orig_x != x && memory_address_addr_space_p (mode, x, as))
     479       163601 :           goto done;
     480              :       }
     481              : 
     482              :       /* PLUS and MULT can appear in special ways
     483              :          as the result of attempts to make an address usable for indexing.
     484              :          Usually they are dealt with by calling force_operand, below.
     485              :          But a sum containing constant terms is special
     486              :          if removing them makes the sum a valid address:
     487              :          then we generate that address in a register
     488              :          and index off of it.  We do this because it often makes
     489              :          shorter code, and because the addresses thus generated
     490              :          in registers often become common subexpressions.  */
     491       508429 :       if (GET_CODE (x) == PLUS)
     492              :         {
     493       171899 :           rtx constant_term = const0_rtx;
     494       171899 :           rtx y = eliminate_constant_term (x, &constant_term);
     495       171899 :           if (constant_term == const0_rtx
     496       171899 :               || ! memory_address_addr_space_p (mode, y, as))
     497       171889 :             x = force_operand (x, NULL_RTX);
     498              :           else
     499              :             {
     500           10 :               y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
     501           10 :               if (! memory_address_addr_space_p (mode, y, as))
     502            8 :                 x = force_operand (x, NULL_RTX);
     503              :               else
     504              :                 x = y;
     505              :             }
     506              :         }
     507              : 
     508       336530 :       else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
     509            0 :         x = force_operand (x, NULL_RTX);
     510              : 
     511              :       /* If we have a register that's an invalid address,
     512              :          it must be a hard reg of the wrong class.  Copy it to a pseudo.  */
     513       336530 :       else if (REG_P (x))
     514            0 :         x = copy_to_reg (x);
     515              : 
     516              :       /* Last resort: copy the value to a register, since
     517              :          the register is a valid address.  */
     518              :       else
     519       336530 :         x = force_reg (address_mode, x);
     520              :     }
     521              : 
     522     29960445 :  done:
     523              : 
     524     29960445 :   gcc_assert (memory_address_addr_space_p (mode, x, as));
     525              :   /* If we didn't change the address, we are done.  Otherwise, mark
     526              :      a reg as a pointer if we have REG or REG + CONST_INT.  */
     527     29960445 :   if (oldx == x)
     528              :     return x;
     529      2399110 :   else if (REG_P (x))
     530      2142104 :     mark_reg_pointer (x, BITS_PER_UNIT);
     531       257006 :   else if (GET_CODE (x) == PLUS
     532       257003 :            && REG_P (XEXP (x, 0))
     533       121418 :            && CONST_INT_P (XEXP (x, 1)))
     534          374 :     mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
     535              : 
     536              :   /* OLDX may have been the address on a temporary.  Update the address
     537              :      to indicate that X is now used.  */
     538      2399110 :   update_temp_slot_address (oldx, x);
     539              : 
     540      2399110 :   return x;
     541              : }
     542              : 
     543              : /* Convert a mem ref into one with a valid memory address.
     544              :    Pass through anything else unchanged.  */
     545              : 
     546              : rtx
     547      5959117 : validize_mem (rtx ref)
     548              : {
     549      5959117 :   if (!MEM_P (ref))
     550              :     return ref;
     551      3458177 :   ref = use_anchored_address (ref);
     552      6916354 :   if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
     553      3458177 :                                    MEM_ADDR_SPACE (ref)))
     554              :     return ref;
     555              : 
     556              :   /* Don't alter REF itself, since that is probably a stack slot.  */
     557        25203 :   return replace_equiv_address (ref, XEXP (ref, 0));
     558              : }
     559              : 
     560              : /* If X is a memory reference to a member of an object block, try rewriting
     561              :    it to use an anchor instead.  Return the new memory reference on success
     562              :    and the old one on failure.  */
     563              : 
     564              : rtx
     565     24157220 : use_anchored_address (rtx x)
     566              : {
     567     24157220 :   rtx base;
     568     24157220 :   HOST_WIDE_INT offset;
     569     24157220 :   machine_mode mode;
     570              : 
     571     24157220 :   if (!flag_section_anchors)
     572              :     return x;
     573              : 
     574            0 :   if (!MEM_P (x))
     575              :     return x;
     576              : 
     577              :   /* Split the address into a base and offset.  */
     578            0 :   base = XEXP (x, 0);
     579            0 :   offset = 0;
     580            0 :   if (GET_CODE (base) == CONST
     581            0 :       && GET_CODE (XEXP (base, 0)) == PLUS
     582            0 :       && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
     583              :     {
     584            0 :       offset += INTVAL (XEXP (XEXP (base, 0), 1));
     585            0 :       base = XEXP (XEXP (base, 0), 0);
     586              :     }
     587              : 
     588              :   /* Check whether BASE is suitable for anchors.  */
     589            0 :   if (GET_CODE (base) != SYMBOL_REF
     590            0 :       || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
     591            0 :       || SYMBOL_REF_ANCHOR_P (base)
     592            0 :       || SYMBOL_REF_BLOCK (base) == NULL
     593            0 :       || !targetm.use_anchors_for_symbol_p (base))
     594            0 :     return x;
     595              : 
     596              :   /* Decide where BASE is going to be.  */
     597            0 :   place_block_symbol (base);
     598              : 
     599              :   /* Get the anchor we need to use.  */
     600            0 :   offset += SYMBOL_REF_BLOCK_OFFSET (base);
     601            0 :   base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
     602            0 :                              SYMBOL_REF_TLS_MODEL (base));
     603              : 
     604              :   /* Work out the offset from the anchor.  */
     605            0 :   offset -= SYMBOL_REF_BLOCK_OFFSET (base);
     606              : 
     607              :   /* If we're going to run a CSE pass, force the anchor into a register.
     608              :      We will then be able to reuse registers for several accesses, if the
     609              :      target costs say that that's worthwhile.  */
     610            0 :   mode = GET_MODE (base);
     611            0 :   if (!cse_not_expected)
     612            0 :     base = force_reg (mode, base);
     613              : 
     614            0 :   return replace_equiv_address (x, plus_constant (mode, base, offset));
     615              : }
     616              : 
     617              : /* Copy the value or contents of X to a new temp reg and return that reg.  */
     618              : 
     619              : rtx
     620       745447 : copy_to_reg (rtx x)
     621              : {
     622       745447 :   rtx temp = gen_reg_rtx (GET_MODE (x));
     623              : 
     624              :   /* If not an operand, must be an address with PLUS and MULT so
     625              :      do the computation.  */
     626       745447 :   if (! general_operand (x, VOIDmode))
     627           54 :     x = force_operand (x, temp);
     628              : 
     629       745447 :   if (x != temp)
     630       745437 :     emit_move_insn (temp, x);
     631              : 
     632       745447 :   return temp;
     633              : }
     634              : 
     635              : /* Like copy_to_reg but always give the new register mode Pmode
     636              :    in case X is a constant.  */
     637              : 
     638              : rtx
     639       466746 : copy_addr_to_reg (rtx x)
     640              : {
     641       606173 :   return copy_to_mode_reg (Pmode, x);
     642              : }
     643              : 
     644              : /* Like copy_to_reg but always give the new register mode MODE
     645              :    in case X is a constant.  */
     646              : 
     647              : rtx
     648      3329311 : copy_to_mode_reg (machine_mode mode, rtx x)
     649              : {
     650      3329311 :   rtx temp = gen_reg_rtx (mode);
     651              : 
     652              :   /* If not an operand, must be an address with PLUS and MULT so
     653              :      do the computation.  */
     654      3329311 :   if (! general_operand (x, VOIDmode))
     655       599744 :     x = force_operand (x, temp);
     656              : 
     657      3329311 :   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
     658      3329311 :   if (x != temp)
     659      2975903 :     emit_move_insn (temp, x);
     660      3329311 :   return temp;
     661              : }
     662              : 
     663              : /* Load X into a register if it is not already one.
     664              :    Use mode MODE for the register.
     665              :    X should be valid for mode MODE, but it may be a constant which
     666              :    is valid for all integer modes; that's why caller must specify MODE.
     667              : 
     668              :    The caller must not alter the value in the register we return,
     669              :    since we mark it as a "constant" register.  */
     670              : 
     671              : rtx
     672      8467803 : force_reg (machine_mode mode, rtx x)
     673              : {
     674      8467803 :   rtx temp, set;
     675      8467803 :   rtx_insn *insn;
     676              : 
     677      8467803 :   if (REG_P (x))
     678              :     return x;
     679              : 
     680      7195596 :   if (general_operand (x, mode))
     681              :     {
     682      6974705 :       temp = gen_reg_rtx (mode);
     683      6974705 :       insn = emit_move_insn (temp, x);
     684              :     }
     685              :   else
     686              :     {
     687       220891 :       temp = force_operand (x, NULL_RTX);
     688       220891 :       if (REG_P (temp))
     689         2884 :         insn = get_last_insn ();
     690              :       else
     691              :         {
     692       218007 :           rtx temp2 = gen_reg_rtx (mode);
     693       218007 :           insn = emit_move_insn (temp2, temp);
     694       218007 :           temp = temp2;
     695              :         }
     696              :     }
     697              : 
     698              :   /* Let optimizers know that TEMP's value never changes
     699              :      and that X can be substituted for it.  Don't get confused
     700              :      if INSN set something else (such as a SUBREG of TEMP).  */
     701      7195596 :   if (CONSTANT_P (x)
     702      2718934 :       && (set = single_set (insn)) != 0
     703      2718934 :       && SET_DEST (set) == temp
     704      9912745 :       && ! rtx_equal_p (x, SET_SRC (set)))
     705       605589 :     set_unique_reg_note (insn, REG_EQUAL, x);
     706              : 
     707              :   /* Let optimizers know that TEMP is a pointer, and if so, the
     708              :      known alignment of that pointer.  */
     709      7195596 :   {
     710      7195596 :     unsigned align = 0;
     711      7195596 :     if (GET_CODE (x) == SYMBOL_REF)
     712              :       {
     713      1441426 :         align = BITS_PER_UNIT;
     714      1441426 :         if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
     715      1438224 :           align = DECL_ALIGN (SYMBOL_REF_DECL (x));
     716              :       }
     717      5754170 :     else if (GET_CODE (x) == LABEL_REF)
     718              :       align = BITS_PER_UNIT;
     719      5745104 :     else if (GET_CODE (x) == CONST
     720       122568 :              && GET_CODE (XEXP (x, 0)) == PLUS
     721       111133 :              && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
     722       108472 :              && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
     723              :       {
     724       108472 :         rtx s = XEXP (XEXP (x, 0), 0);
     725       108472 :         rtx c = XEXP (XEXP (x, 0), 1);
     726       108472 :         unsigned sa, ca;
     727              : 
     728       108472 :         sa = BITS_PER_UNIT;
     729       108472 :         if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
     730       108472 :           sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
     731              : 
     732       108472 :         if (INTVAL (c) == 0)
     733              :           align = sa;
     734              :         else
     735              :           {
     736       108472 :             ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
     737       108472 :             align = MIN (sa, ca);
     738              :           }
     739              :       }
     740              : 
     741      7183328 :     if (align || (MEM_P (x) && MEM_POINTER (x)))
     742      2507532 :       mark_reg_pointer (temp, align);
     743              :   }
     744              : 
     745              :   return temp;
     746              : }
     747              : 
     748              : /* Like simplify_gen_subreg, but force OP into a new register if the
     749              :    subreg cannot be formed directly.  */
     750              : 
     751              : rtx
     752       819397 : force_subreg (machine_mode outermode, rtx op,
     753              :               machine_mode innermode, poly_uint64 byte)
     754              : {
     755       819397 :   rtx x = simplify_gen_subreg (outermode, op, innermode, byte);
     756       819397 :   if (x)
     757              :     return x;
     758              : 
     759         6831 :   auto *start = get_last_insn ();
     760         6831 :   op = copy_to_mode_reg (innermode, op);
     761         6831 :   rtx res = simplify_gen_subreg (outermode, op, innermode, byte);
     762         6831 :   if (!res)
     763         6819 :     delete_insns_since (start);
     764              :   return res;
     765              : }
     766              : 
     767              : /* Try to return an rvalue expression for the OUTERMODE lowpart of OP,
     768              :    which has mode INNERMODE.  Allow OP to be forced into a new register
     769              :    if necessary.
     770              : 
     771              :    Return null on failure.  */
     772              : 
     773              : rtx
     774         5217 : force_lowpart_subreg (machine_mode outermode, rtx op,
     775              :                       machine_mode innermode)
     776              : {
     777         5217 :   auto byte = subreg_lowpart_offset (outermode, innermode);
     778         5217 :   return force_subreg (outermode, op, innermode, byte);
     779              : }
     780              : 
     781              : /* Try to return an rvalue expression for the OUTERMODE highpart of OP,
     782              :    which has mode INNERMODE.  Allow OP to be forced into a new register
     783              :    if necessary.
     784              : 
     785              :    Return null on failure.  */
     786              : 
     787              : rtx
     788           91 : force_highpart_subreg (machine_mode outermode, rtx op,
     789              :                        machine_mode innermode)
     790              : {
     791           91 :   auto byte = subreg_highpart_offset (outermode, innermode);
     792           91 :   return force_subreg (outermode, op, innermode, byte);
     793              : }
     794              : 
     795              : /* If X is a memory ref, copy its contents to a new temp reg and return
     796              :    that reg.  Otherwise, return X.  */
     797              : 
     798              : rtx
     799      1158628 : force_not_mem (rtx x)
     800              : {
     801      1158628 :   rtx temp;
     802              : 
     803      1158628 :   if (!MEM_P (x) || GET_MODE (x) == BLKmode)
     804              :     return x;
     805              : 
     806        10512 :   temp = gen_reg_rtx (GET_MODE (x));
     807              : 
     808        10512 :   if (MEM_POINTER (x))
     809         1260 :     REG_POINTER (temp) = 1;
     810              : 
     811        10512 :   emit_move_insn (temp, x);
     812        10512 :   return temp;
     813              : }
     814              : 
     815              : /* Copy X to TARGET (if it's nonzero and a reg)
     816              :    or to a new temp reg and return that reg.
     817              :    MODE is the mode to use for X in case it is a constant.  */
     818              : 
     819              : rtx
     820       177353 : copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
     821              : {
     822       177353 :   rtx temp;
     823              : 
     824       177353 :   if (target && REG_P (target))
     825              :     temp = target;
     826              :   else
     827       177334 :     temp = gen_reg_rtx (mode);
     828              : 
     829       177353 :   emit_move_insn (temp, x);
     830       177353 :   return temp;
     831              : }
     832              : 
     833              : /* Return the mode to use to pass or return a scalar of TYPE and MODE.
     834              :    PUNSIGNEDP points to the signedness of the type and may be adjusted
     835              :    to show what signedness to use on extension operations.
     836              : 
     837              :    FOR_RETURN is nonzero if the caller is promoting the return value
     838              :    of FNDECL, else it is for promoting args.  */
     839              : 
     840              : machine_mode
     841     35221496 : promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
     842              :                        const_tree funtype, int for_return)
     843              : {
     844              :   /* Called without a type node for a libcall.  */
     845     35221496 :   if (type == NULL_TREE)
     846              :     {
     847       204486 :       if (INTEGRAL_MODE_P (mode))
     848        33838 :         return targetm.calls.promote_function_mode (NULL_TREE, mode,
     849              :                                                     punsignedp, funtype,
     850        33838 :                                                     for_return);
     851              :       else
     852              :         return mode;
     853              :     }
     854              : 
     855     35017010 :   switch (TREE_CODE (type))
     856              :     {
     857        42034 :     case BITINT_TYPE:
     858        42034 :       if (TYPE_MODE (type) == BLKmode)
     859              :         return mode;
     860              : 
     861        25249 :       struct bitint_info info;
     862        25249 :       bool ok;
     863        25249 :       ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
     864        25249 :       gcc_assert (ok);
     865              : 
     866        25249 :       if (!info.extended)
     867              :         return mode;
     868              :       /* FALLTHRU */
     869     31921646 :     case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
     870     31921646 :     case REAL_TYPE:      case OFFSET_TYPE:     case FIXED_POINT_TYPE:
     871     31921646 :     case POINTER_TYPE:   case REFERENCE_TYPE:
     872     31921646 :       return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
     873     31921646 :                                                   for_return);
     874              : 
     875              :     default:
     876              :       return mode;
     877              :     }
     878              : }
     879              : /* Return the mode to use to store a scalar of TYPE and MODE.
     880              :    PUNSIGNEDP points to the signedness of the type and may be adjusted
     881              :    to show what signedness to use on extension operations.  */
     882              : 
     883              : machine_mode
     884     76245126 : promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
     885              :               int *punsignedp ATTRIBUTE_UNUSED)
     886              : {
     887              : #ifdef PROMOTE_MODE
     888     76245126 :   enum tree_code code;
     889     76245126 :   int unsignedp;
     890     76245126 :   scalar_mode smode;
     891              : #endif
     892              : 
     893              :   /* For libcalls this is invoked without TYPE from the backends
     894              :      TARGET_PROMOTE_FUNCTION_MODE hooks.  Don't do anything in that
     895              :      case.  */
     896     76245126 :   if (type == NULL_TREE)
     897              :     return mode;
     898              : 
     899              :   /* FIXME: this is the same logic that was there until GCC 4.4, but we
     900              :      probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
     901              :      is not defined.  The affected targets are M32C, S390, SPARC.  */
     902              : #ifdef PROMOTE_MODE
     903     76245126 :   code = TREE_CODE (type);
     904     76245126 :   unsignedp = *punsignedp;
     905              : 
     906     76245126 :   switch (code)
     907              :     {
     908        33206 :     case BITINT_TYPE:
     909        33206 :       if (TYPE_MODE (type) == BLKmode)
     910              :         return mode;
     911              : 
     912        33194 :       struct bitint_info info;
     913        33194 :       bool ok;
     914        33194 :       ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
     915        33194 :       gcc_assert (ok);
     916              : 
     917        33194 :       if (!info.extended)
     918              :         return mode;
     919              :       /* FALLTHRU */
     920     55017836 :     case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
     921     55017836 :     case REAL_TYPE:      case OFFSET_TYPE:     case FIXED_POINT_TYPE:
     922              :       /* Values of these types always have scalar mode.  */
     923     55017836 :       smode = as_a <scalar_mode> (mode);
     924     55017836 :       PROMOTE_MODE (smode, unsignedp, type);
     925     55017836 :       *punsignedp = unsignedp;
     926     55017836 :       return smode;
     927              : 
     928              : #ifdef POINTERS_EXTEND_UNSIGNED
     929     16996677 :     case REFERENCE_TYPE:
     930     16996677 :     case POINTER_TYPE:
     931     16996677 :       *punsignedp = POINTERS_EXTEND_UNSIGNED;
     932     16996677 :       return targetm.addr_space.address_mode
     933     16996677 :                (TYPE_ADDR_SPACE (TREE_TYPE (type)));
     934              : #endif
     935              : 
     936              :     default:
     937              :       return mode;
     938              :     }
     939              : #else
     940              :   return mode;
     941              : #endif
     942              : }
     943              : 
     944              : 
     945              : /* Use one of promote_mode or promote_function_mode to find the promoted
     946              :    mode of DECL.  If PUNSIGNEDP is not NULL, store there the unsignedness
     947              :    of DECL after promotion.  */
     948              : 
     949              : machine_mode
     950      8839398 : promote_decl_mode (const_tree decl, int *punsignedp)
     951              : {
     952      8839398 :   tree type = TREE_TYPE (decl);
     953      8839398 :   int unsignedp = TYPE_UNSIGNED (type);
     954      8839398 :   machine_mode mode = DECL_MODE (decl);
     955      8839398 :   machine_mode pmode;
     956              : 
     957      8839398 :   if (TREE_CODE (decl) == RESULT_DECL && !DECL_BY_REFERENCE (decl))
     958      7129862 :     pmode = promote_function_mode (type, mode, &unsignedp,
     959      3564931 :                                    TREE_TYPE (current_function_decl), 1);
     960      5274467 :   else if (TREE_CODE (decl) == RESULT_DECL || TREE_CODE (decl) == PARM_DECL)
     961      9429506 :     pmode = promote_function_mode (type, mode, &unsignedp,
     962      4714753 :                                    TREE_TYPE (current_function_decl), 2);
     963              :   else
     964       559714 :     pmode = promote_mode (type, mode, &unsignedp);
     965              : 
     966      8839398 :   if (punsignedp)
     967       962644 :     *punsignedp = unsignedp;
     968      8839398 :   return pmode;
     969              : }
     970              : 
     971              : /* Return the promoted mode for name.  If it is a named SSA_NAME, it
     972              :    is the same as promote_decl_mode.  Otherwise, it is the promoted
     973              :    mode of a temp decl of same type as the SSA_NAME, if we had created
     974              :    one.  */
     975              : 
     976              : machine_mode
     977     80273597 : promote_ssa_mode (const_tree name, int *punsignedp)
     978              : {
     979     80273597 :   gcc_assert (TREE_CODE (name) == SSA_NAME);
     980              : 
     981              :   /* Partitions holding parms and results must be promoted as expected
     982              :      by function.cc.  */
     983     80273597 :   if (SSA_NAME_VAR (name)
     984     21654340 :       && (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
     985     16990208 :           || TREE_CODE (SSA_NAME_VAR (name)) == RESULT_DECL))
     986              :     {
     987      8260366 :       machine_mode mode = promote_decl_mode (SSA_NAME_VAR (name), punsignedp);
     988      8260366 :       if (mode != BLKmode)
     989              :         return mode;
     990              :     }
     991              : 
     992     72013333 :   tree type = TREE_TYPE (name);
     993     72013333 :   int unsignedp = TYPE_UNSIGNED (type);
     994     72013333 :   machine_mode pmode = promote_mode (type, TYPE_MODE (type), &unsignedp);
     995     72013333 :   if (punsignedp)
     996      2148712 :     *punsignedp = unsignedp;
     997              : 
     998              :   return pmode;
     999              : }
    1000              : 
    1001              : 
    1002              : 
    1003              : /* Controls the behavior of {anti_,}adjust_stack.  */
    1004              : static bool suppress_reg_args_size;
    1005              : 
    1006              : /* A helper for adjust_stack and anti_adjust_stack.  */
    1007              : 
    1008              : static void
    1009      1925189 : adjust_stack_1 (rtx adjust, bool anti_p)
    1010              : {
    1011      1925189 :   rtx temp;
    1012      1925189 :   rtx_insn *insn;
    1013              : 
    1014              :   /* Hereafter anti_p means subtract_p.  */
    1015      1925189 :   if (!STACK_GROWS_DOWNWARD)
    1016              :     anti_p = !anti_p;
    1017              : 
    1018      4061161 :   temp = expand_binop (Pmode,
    1019              :                        anti_p ? sub_optab : add_optab,
    1020              :                        stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
    1021              :                        OPTAB_LIB_WIDEN);
    1022              : 
    1023      1925189 :   if (temp != stack_pointer_rtx)
    1024            0 :     insn = emit_move_insn (stack_pointer_rtx, temp);
    1025              :   else
    1026              :     {
    1027      1925189 :       insn = get_last_insn ();
    1028      1925189 :       temp = single_set (insn);
    1029      1925189 :       gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
    1030              :     }
    1031              : 
    1032      1925189 :   if (!suppress_reg_args_size)
    1033      1896992 :     add_args_size_note (insn, stack_pointer_delta);
    1034      1925189 : }
    1035              : 
    1036              : /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
    1037              :    This pops when ADJUST is positive.  ADJUST need not be constant.  */
    1038              : 
    1039              : void
    1040       876834 : adjust_stack (rtx adjust)
    1041              : {
    1042       876834 :   if (adjust == const0_rtx)
    1043       876834 :     return;
    1044              : 
    1045              :   /* We expect all variable sized adjustments to be multiple of
    1046              :      PREFERRED_STACK_BOUNDARY.  */
    1047       876834 :   poly_int64 const_adjust;
    1048       876834 :   if (poly_int_rtx_p (adjust, &const_adjust))
    1049       876834 :     stack_pointer_delta -= const_adjust;
    1050              : 
    1051       876834 :   adjust_stack_1 (adjust, false);
    1052              : }
    1053              : 
    1054              : /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
    1055              :    This pushes when ADJUST is positive.  ADJUST need not be constant.  */
    1056              : 
    1057              : void
    1058      3257521 : anti_adjust_stack (rtx adjust)
    1059              : {
    1060      3257521 :   if (adjust == const0_rtx)
    1061      3257521 :     return;
    1062              : 
    1063              :   /* We expect all variable sized adjustments to be multiple of
    1064              :      PREFERRED_STACK_BOUNDARY.  */
    1065      1048355 :   poly_int64 const_adjust;
    1066      1048355 :   if (poly_int_rtx_p (adjust, &const_adjust))
    1067      1048355 :     stack_pointer_delta += const_adjust;
    1068              : 
    1069      1048355 :   adjust_stack_1 (adjust, true);
    1070              : }
    1071              : 
    1072              : /* Round the size of a block to be pushed up to the boundary required
    1073              :    by this machine.  SIZE is the desired size, which need not be constant.  */
    1074              : 
    1075              : static rtx
    1076        28154 : round_push (rtx size)
    1077              : {
    1078        28154 :   rtx align_rtx, alignm1_rtx;
    1079              : 
    1080        28154 :   if (!SUPPORTS_STACK_ALIGNMENT
    1081        28154 :       || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
    1082              :     {
    1083            0 :       int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
    1084              : 
    1085            0 :       if (align == 1)
    1086              :         return size;
    1087              : 
    1088            0 :       if (CONST_INT_P (size))
    1089              :         {
    1090            0 :           HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
    1091              : 
    1092            0 :           if (INTVAL (size) != new_size)
    1093            0 :             size = GEN_INT (new_size);
    1094            0 :           return size;
    1095              :         }
    1096              : 
    1097            0 :       align_rtx = GEN_INT (align);
    1098            0 :       alignm1_rtx = GEN_INT (align - 1);
    1099              :     }
    1100              :   else
    1101              :     {
    1102              :       /* If crtl->preferred_stack_boundary might still grow, use
    1103              :          virtual_preferred_stack_boundary_rtx instead.  This will be
    1104              :          substituted by the right value in vregs pass and optimized
    1105              :          during combine.  */
    1106        28154 :       align_rtx = virtual_preferred_stack_boundary_rtx;
    1107        28834 :       alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
    1108              :                                    NULL_RTX);
    1109              :     }
    1110              : 
    1111              :   /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
    1112              :      but we know it can't.  So add ourselves and then do
    1113              :      TRUNC_DIV_EXPR.  */
    1114        28834 :   size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
    1115              :                        NULL_RTX, 1, OPTAB_LIB_WIDEN);
    1116        28834 :   size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
    1117              :                         NULL_RTX, 1);
    1118        28154 :   size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
    1119              : 
    1120        28154 :   return size;
    1121              : }
    1122              : 
    1123              : /* Save the stack pointer for the purpose in SAVE_LEVEL.  PSAVE is a pointer
    1124              :    to a previously-created save area.  If no save area has been allocated,
    1125              :    this function will allocate one.  If a save area is specified, it
    1126              :    must be of the proper mode.  */
    1127              : 
    1128              : void
    1129         3698 : emit_stack_save (enum save_level save_level, rtx *psave)
    1130              : {
    1131         3698 :   rtx sa = *psave;
    1132              :   /* The default is that we use a move insn and save in a Pmode object.  */
    1133         3698 :   rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
    1134         3698 :   machine_mode mode = STACK_SAVEAREA_MODE (save_level);
    1135              : 
    1136              :   /* See if this machine has anything special to do for this kind of save.  */
    1137         3698 :   switch (save_level)
    1138              :     {
    1139         1979 :     case SAVE_BLOCK:
    1140         1979 :       if (targetm.have_save_stack_block ())
    1141            0 :         fcn = targetm.gen_save_stack_block;
    1142              :       break;
    1143            0 :     case SAVE_FUNCTION:
    1144            0 :       if (targetm.have_save_stack_function ())
    1145            0 :         fcn = targetm.gen_save_stack_function;
    1146              :       break;
    1147         1719 :     case SAVE_NONLOCAL:
    1148         1719 :       if (targetm.have_save_stack_nonlocal ())
    1149         1719 :         fcn = targetm.gen_save_stack_nonlocal;
    1150              :       break;
    1151              :     default:
    1152              :       break;
    1153              :     }
    1154              : 
    1155              :   /* If there is no save area and we have to allocate one, do so.  Otherwise
    1156              :      verify the save area is the proper mode.  */
    1157              : 
    1158         3698 :   if (sa == 0)
    1159              :     {
    1160         2462 :       if (mode != VOIDmode)
    1161              :         {
    1162         2462 :           if (save_level == SAVE_NONLOCAL)
    1163          966 :             *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
    1164              :           else
    1165         1979 :             *psave = sa = gen_reg_rtx (mode);
    1166              :         }
    1167              :     }
    1168              : 
    1169         2462 :   do_pending_stack_adjust ();
    1170         3698 :   if (sa != 0)
    1171         3698 :     sa = validize_mem (sa);
    1172         3698 :   emit_insn (fcn (sa, stack_pointer_rtx));
    1173         3698 : }
    1174              : 
    1175              : /* Restore the stack pointer for the purpose in SAVE_LEVEL.  SA is the save
    1176              :    area made by emit_stack_save.  If it is zero, we have nothing to do.  */
    1177              : 
    1178              : void
    1179         3256 : emit_stack_restore (enum save_level save_level, rtx sa)
    1180              : {
    1181              :   /* The default is that we use a move insn.  */
    1182         3256 :   rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
    1183              : 
    1184              :   /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
    1185              :      STACK_POINTER and HARD_FRAME_POINTER.
    1186              :      If stack_realign_fp, the x86 backend emits a prologue that aligns only
    1187              :      STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
    1188              :      aligned variables, which is reflected in ix86_can_eliminate.
    1189              :      We normally still have the realigned STACK_POINTER that we can use.
    1190              :      But if there is a stack restore still present at reload, it can trigger
    1191              :      mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
    1192              :      FRAME_POINTER into a hard reg.
    1193              :      To prevent this situation, we force need_drap if we emit a stack
    1194              :      restore.  */
    1195         3256 :   if (SUPPORTS_STACK_ALIGNMENT)
    1196         3256 :     crtl->need_drap = true;
    1197              : 
    1198              :   /* See if this machine has anything special to do for this kind of save.  */
    1199         3256 :   switch (save_level)
    1200              :     {
    1201         1870 :     case SAVE_BLOCK:
    1202         1870 :       if (targetm.have_restore_stack_block ())
    1203            0 :         fcn = targetm.gen_restore_stack_block;
    1204              :       break;
    1205            0 :     case SAVE_FUNCTION:
    1206            0 :       if (targetm.have_restore_stack_function ())
    1207            0 :         fcn = targetm.gen_restore_stack_function;
    1208              :       break;
    1209         1386 :     case SAVE_NONLOCAL:
    1210         1386 :       if (targetm.have_restore_stack_nonlocal ())
    1211         1386 :         fcn = targetm.gen_restore_stack_nonlocal;
    1212              :       break;
    1213              :     default:
    1214              :       break;
    1215              :     }
    1216              : 
    1217         3256 :   if (sa != 0)
    1218              :     {
    1219         3256 :       sa = validize_mem (sa);
    1220              :       /* These clobbers prevent the scheduler from moving
    1221              :          references to variable arrays below the code
    1222              :          that deletes (pops) the arrays.  */
    1223         3256 :       emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
    1224         3256 :       emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
    1225              :     }
    1226              : 
    1227         3256 :   discard_pending_stack_adjust ();
    1228              : 
    1229         3256 :   emit_insn (fcn (stack_pointer_rtx, sa));
    1230         3256 : }
    1231              : 
    1232              : /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
    1233              :    function.  This should be called whenever we allocate or deallocate
    1234              :    dynamic stack space.  */
    1235              : 
    1236              : void
    1237          395 : update_nonlocal_goto_save_area (void)
    1238              : {
    1239          395 :   tree t_save;
    1240          395 :   rtx r_save;
    1241              : 
    1242              :   /* The nonlocal_goto_save_area object is an array of N pointers.  The
    1243              :      first one is used for the frame pointer save; the rest are sized by
    1244              :      STACK_SAVEAREA_MODE.  Create a reference to array index 1, the first
    1245              :      of the stack save area slots.  */
    1246          395 :   t_save = build4 (ARRAY_REF,
    1247          395 :                    TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
    1248          395 :                    cfun->nonlocal_goto_save_area,
    1249              :                    integer_one_node, NULL_TREE, NULL_TREE);
    1250          395 :   r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
    1251              : 
    1252          395 :   emit_stack_save (SAVE_NONLOCAL, &r_save);
    1253          395 : }
    1254              : 
    1255              : /* Record a new stack level for the current function.  This should be called
    1256              :    whenever we allocate or deallocate dynamic stack space.  */
    1257              : 
    1258              : void
    1259        29895 : record_new_stack_level (void)
    1260              : {
    1261              :   /* Record the new stack level for nonlocal gotos.  */
    1262        29895 :   if (cfun->nonlocal_goto_save_area)
    1263            2 :     update_nonlocal_goto_save_area ();
    1264              : 
    1265              :   /* Record the new stack level for SJLJ exceptions.  */
    1266        29895 :   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
    1267            0 :     update_sjlj_context ();
    1268        29895 : }
    1269              : 
    1270              : /* Return an rtx doing runtime alignment to REQUIRED_ALIGN on TARGET.  */
    1271              : 
    1272              : rtx
    1273        28154 : align_dynamic_address (rtx target, unsigned required_align)
    1274              : {
    1275        28154 :   if (required_align == BITS_PER_UNIT)
    1276              :     return target;
    1277              : 
    1278              :   /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
    1279              :      but we know it can't.  So add ourselves and then do
    1280              :      TRUNC_DIV_EXPR.  */
    1281        27337 :   target = expand_binop (Pmode, add_optab, target,
    1282        26679 :                          gen_int_mode (required_align / BITS_PER_UNIT - 1,
    1283        26679 :                                        Pmode),
    1284              :                          NULL_RTX, 1, OPTAB_LIB_WIDEN);
    1285        27337 :   target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
    1286        26679 :                           gen_int_mode (required_align / BITS_PER_UNIT,
    1287        26679 :                                         Pmode),
    1288              :                           NULL_RTX, 1);
    1289        27337 :   target = expand_mult (Pmode, target,
    1290        26679 :                         gen_int_mode (required_align / BITS_PER_UNIT,
    1291        26679 :                                       Pmode),
    1292              :                         NULL_RTX, 1);
    1293              : 
    1294        26679 :   return target;
    1295              : }
    1296              : 
    1297              : /* Return an rtx through *PSIZE, representing the size of an area of memory to
    1298              :    be dynamically pushed on the stack.
    1299              : 
    1300              :    *PSIZE is an rtx representing the size of the area.
    1301              : 
    1302              :    SIZE_ALIGN is the alignment (in bits) that we know SIZE has.  This
    1303              :    parameter may be zero.  If so, a proper value will be extracted
    1304              :    from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
    1305              : 
    1306              :    REQUIRED_ALIGN is the alignment (in bits) required for the region
    1307              :    of memory.
    1308              : 
    1309              :    If PSTACK_USAGE_SIZE is not NULL it points to a value that is increased for
    1310              :    the additional size returned.  */
    1311              : void
    1312        28154 : get_dynamic_stack_size (rtx *psize, unsigned size_align,
    1313              :                         unsigned required_align,
    1314              :                         HOST_WIDE_INT *pstack_usage_size)
    1315              : {
    1316        28154 :   rtx size = *psize;
    1317              : 
    1318              :   /* Ensure the size is in the proper mode.  */
    1319        28780 :   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
    1320        12259 :     size = convert_to_mode (Pmode, size, 1);
    1321              : 
    1322        28154 :   if (CONST_INT_P (size))
    1323              :     {
    1324         4626 :       unsigned HOST_WIDE_INT lsb;
    1325              : 
    1326         4626 :       lsb = INTVAL (size);
    1327         4626 :       lsb &= -lsb;
    1328              : 
    1329              :       /* Watch out for overflow truncating to "unsigned".  */
    1330         4626 :       if (lsb > UINT_MAX / BITS_PER_UNIT)
    1331              :         size_align = 1u << (HOST_BITS_PER_INT - 1);
    1332              :       else
    1333         4626 :         size_align = (unsigned)lsb * BITS_PER_UNIT;
    1334              :     }
    1335        23528 :   else if (size_align < BITS_PER_UNIT)
    1336              :     size_align = BITS_PER_UNIT;
    1337              : 
    1338              :   /* We can't attempt to minimize alignment necessary, because we don't
    1339              :      know the final value of preferred_stack_boundary yet while executing
    1340              :      this code.  */
    1341        28154 :   if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
    1342         7316 :     crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
    1343              : 
    1344              :   /* We will need to ensure that the address we return is aligned to
    1345              :      REQUIRED_ALIGN.  At this point in the compilation, we don't always
    1346              :      know the final value of the STACK_DYNAMIC_OFFSET used in function.cc
    1347              :      (it might depend on the size of the outgoing parameter lists, for
    1348              :      example), so we must preventively align the value.  We leave space
    1349              :      in SIZE for the hole that might result from the alignment operation.  */
    1350              : 
    1351        28154 :   unsigned known_align = REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM);
    1352        28154 :   if (known_align == 0)
    1353            0 :     known_align = BITS_PER_UNIT;
    1354        28154 :   if (required_align > known_align)
    1355              :     {
    1356        17853 :       unsigned extra = (required_align - known_align) / BITS_PER_UNIT;
    1357        18467 :       size = plus_constant (Pmode, size, extra);
    1358        17853 :       size = force_operand (size, NULL_RTX);
    1359        17853 :       if (size_align > known_align)
    1360              :         size_align = known_align;
    1361              : 
    1362        17853 :       if (flag_stack_usage_info && pstack_usage_size)
    1363            0 :         *pstack_usage_size += extra;
    1364              :     }
    1365              : 
    1366              :   /* Round the size to a multiple of the required stack alignment.
    1367              :      Since the stack is presumed to be rounded before this allocation,
    1368              :      this will maintain the required alignment.
    1369              : 
    1370              :      If the stack grows downward, we could save an insn by subtracting
    1371              :      SIZE from the stack pointer and then aligning the stack pointer.
    1372              :      The problem with this is that the stack pointer may be unaligned
    1373              :      between the execution of the subtraction and alignment insns and
    1374              :      some machines do not allow this.  Even on those that do, some
    1375              :      signal handlers malfunction if a signal should occur between those
    1376              :      insns.  Since this is an extremely rare event, we have no reliable
    1377              :      way of knowing which systems have this problem.  So we avoid even
    1378              :      momentarily mis-aligning the stack.  */
    1379        28154 :   if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
    1380              :     {
    1381        28154 :       size = round_push (size);
    1382              : 
    1383        28154 :       if (flag_stack_usage_info && pstack_usage_size)
    1384              :         {
    1385            1 :           int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
    1386            1 :           *pstack_usage_size =
    1387            1 :             (*pstack_usage_size + align - 1) / align * align;
    1388              :         }
    1389              :     }
    1390              : 
    1391        28154 :   *psize = size;
    1392        28154 : }
    1393              : 
    1394              : /* Return the number of bytes to "protect" on the stack for -fstack-check.
    1395              : 
    1396              :    "protect" in the context of -fstack-check means how many bytes we need
    1397              :    to always ensure are available on the stack; as a consequence, this is
    1398              :    also how many bytes are first skipped when probing the stack.
    1399              : 
    1400              :    On some targets we want to reuse the -fstack-check prologue support
    1401              :    to give a degree of protection against stack clashing style attacks.
    1402              : 
    1403              :    In that scenario we do not want to skip bytes before probing as that
    1404              :    would render the stack clash protections useless.
    1405              : 
    1406              :    So we never use STACK_CHECK_PROTECT directly.  Instead we indirectly
    1407              :    use it through this helper, which allows to provide different values
    1408              :    for -fstack-check and -fstack-clash-protection.  */
    1409              : 
    1410              : HOST_WIDE_INT
    1411            0 : get_stack_check_protect (void)
    1412              : {
    1413            0 :   if (flag_stack_clash_protection)
    1414              :     return 0;
    1415              : 
    1416            0 :  return STACK_CHECK_PROTECT;
    1417              : }
    1418              : 
    1419              : /* Return an rtx representing the address of an area of memory dynamically
    1420              :    pushed on the stack.
    1421              : 
    1422              :    Any required stack pointer alignment is preserved.
    1423              : 
    1424              :    SIZE is an rtx representing the size of the area.
    1425              : 
    1426              :    SIZE_ALIGN is the alignment (in bits) that we know SIZE has.  This
    1427              :    parameter may be zero.  If so, a proper value will be extracted
    1428              :    from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
    1429              : 
    1430              :    REQUIRED_ALIGN is the alignment (in bits) required for the region
    1431              :    of memory.
    1432              : 
    1433              :    MAX_SIZE is an upper bound for SIZE, if SIZE is not constant, or -1 if
    1434              :    no such upper bound is known.
    1435              : 
    1436              :    If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
    1437              :    stack space allocated by the generated code cannot be added with itself
    1438              :    in the course of the execution of the function.  It is always safe to
    1439              :    pass FALSE here and the following criterion is sufficient in order to
    1440              :    pass TRUE: every path in the CFG that starts at the allocation point and
    1441              :    loops to it executes the associated deallocation code.  */
    1442              : 
    1443              : rtx
    1444        28448 : allocate_dynamic_stack_space (rtx size, unsigned size_align,
    1445              :                               unsigned required_align,
    1446              :                               HOST_WIDE_INT max_size,
    1447              :                               bool cannot_accumulate)
    1448              : {
    1449        28448 :   HOST_WIDE_INT stack_usage_size = -1;
    1450        28448 :   rtx_code_label *final_label;
    1451        28448 :   rtx final_target, target;
    1452        28448 :   rtx addr = (virtuals_instantiated
    1453        28448 :               ? plus_constant (Pmode, stack_pointer_rtx,
    1454              :                                get_stack_dynamic_offset ())
    1455        28448 :               : virtual_stack_dynamic_rtx);
    1456              : 
    1457              :   /* If we're asking for zero bytes, it doesn't matter what we point
    1458              :      to since we can't dereference it.  But return a reasonable
    1459              :      address anyway.  */
    1460        28448 :   if (size == const0_rtx)
    1461              :     return addr;
    1462              : 
    1463              :   /* Otherwise, show we're calling alloca or equivalent.  */
    1464        28154 :   cfun->calls_alloca = 1;
    1465              : 
    1466              :   /* If stack usage info is requested, look into the size we are passed.
    1467              :      We need to do so this early to avoid the obfuscation that may be
    1468              :      introduced later by the various alignment operations.  */
    1469        28154 :   if (flag_stack_usage_info)
    1470              :     {
    1471            1 :       if (CONST_INT_P (size))
    1472            0 :         stack_usage_size = INTVAL (size);
    1473            1 :       else if (REG_P (size))
    1474              :         {
    1475              :           /* Look into the last emitted insn and see if we can deduce
    1476              :              something for the register.  */
    1477            1 :           rtx_insn *insn;
    1478            1 :           rtx set, note;
    1479            1 :           insn = get_last_insn ();
    1480            1 :           if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
    1481              :             {
    1482            1 :               if (CONST_INT_P (SET_SRC (set)))
    1483            0 :                 stack_usage_size = INTVAL (SET_SRC (set));
    1484            1 :               else if ((note = find_reg_equal_equiv_note (insn))
    1485            1 :                        && CONST_INT_P (XEXP (note, 0)))
    1486            0 :                 stack_usage_size = INTVAL (XEXP (note, 0));
    1487              :             }
    1488              :         }
    1489              : 
    1490              :       /* If the size is not constant, try the maximum size.  */
    1491            1 :       if (stack_usage_size < 0)
    1492            1 :         stack_usage_size = max_size;
    1493              : 
    1494              :       /* If the size is still not constant, we can't say anything.  */
    1495            1 :       if (stack_usage_size < 0)
    1496              :         {
    1497            1 :           current_function_has_unbounded_dynamic_stack_size = 1;
    1498            1 :           stack_usage_size = 0;
    1499              :         }
    1500              :     }
    1501              : 
    1502        28154 :   get_dynamic_stack_size (&size, size_align, required_align, &stack_usage_size);
    1503              : 
    1504        28834 :   target = gen_reg_rtx (Pmode);
    1505              : 
    1506              :   /* The size is supposed to be fully adjusted at this point so record it
    1507              :      if stack usage info is requested.  */
    1508        28154 :   if (flag_stack_usage_info)
    1509              :     {
    1510            1 :       current_function_dynamic_stack_size += stack_usage_size;
    1511              : 
    1512              :       /* ??? This is gross but the only safe stance in the absence
    1513              :          of stack usage oriented flow analysis.  */
    1514            1 :       if (!cannot_accumulate)
    1515            0 :         current_function_has_unbounded_dynamic_stack_size = 1;
    1516              :     }
    1517              : 
    1518        28154 :   do_pending_stack_adjust ();
    1519              : 
    1520        28154 :   final_label = NULL;
    1521        28154 :   final_target = NULL_RTX;
    1522              : 
    1523              :   /* If we are splitting the stack, we need to ask the backend whether
    1524              :      there is enough room on the current stack.  If there isn't, or if
    1525              :      the backend doesn't know how to tell is, then we need to call a
    1526              :      function to allocate memory in some other way.  This memory will
    1527              :      be released when we release the current stack segment.  The
    1528              :      effect is that stack allocation becomes less efficient, but at
    1529              :      least it doesn't cause a stack overflow.  */
    1530        28154 :   if (flag_split_stack)
    1531              :     {
    1532           10 :       rtx_code_label *available_label;
    1533           10 :       rtx ask, space, func;
    1534              : 
    1535           10 :       available_label = NULL;
    1536              : 
    1537           10 :       if (targetm.have_split_stack_space_check ())
    1538              :         {
    1539           10 :           available_label = gen_label_rtx ();
    1540              : 
    1541              :           /* This instruction will branch to AVAILABLE_LABEL if there
    1542              :              are SIZE bytes available on the stack.  */
    1543           10 :           emit_insn (targetm.gen_split_stack_space_check
    1544           10 :                      (size, available_label));
    1545              :         }
    1546              : 
    1547              :       /* The __morestack_allocate_stack_space function will allocate
    1548              :          memory using malloc.  If the alignment of the memory returned
    1549              :          by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
    1550              :          make sure we allocate enough space.  */
    1551           10 :       if (MALLOC_ABI_ALIGNMENT >= required_align)
    1552            6 :         ask = size;
    1553              :       else
    1554            4 :         ask = expand_binop (Pmode, add_optab, size,
    1555            4 :                             gen_int_mode (required_align / BITS_PER_UNIT - 1,
    1556            4 :                                           Pmode),
    1557              :                             NULL_RTX, 1, OPTAB_LIB_WIDEN);
    1558              : 
    1559           10 :       func = init_one_libfunc ("__morestack_allocate_stack_space");
    1560              : 
    1561           10 :       space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
    1562           10 :                                        ask, Pmode);
    1563              : 
    1564           10 :       if (available_label == NULL_RTX)
    1565              :         return space;
    1566              : 
    1567           10 :       final_target = gen_reg_rtx (Pmode);
    1568              : 
    1569           10 :       emit_move_insn (final_target, space);
    1570              : 
    1571           10 :       final_label = gen_label_rtx ();
    1572           10 :       emit_jump (final_label);
    1573              : 
    1574           10 :       emit_label (available_label);
    1575              :     }
    1576              : 
    1577              :  /* We ought to be called always on the toplevel and stack ought to be aligned
    1578              :     properly.  */
    1579        56308 :   gcc_assert (multiple_p (stack_pointer_delta,
    1580              :                           PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
    1581              : 
    1582              :   /* If needed, check that we have the required amount of stack.  Take into
    1583              :      account what has already been checked.  */
    1584        28154 :   if (STACK_CHECK_MOVING_SP)
    1585              :     ;
    1586              :   else if (flag_stack_check == GENERIC_STACK_CHECK)
    1587              :     probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
    1588              :                        size);
    1589              :   else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
    1590              :     probe_stack_range (get_stack_check_protect (), size);
    1591              : 
    1592              :   /* Don't let anti_adjust_stack emit notes.  */
    1593        28154 :   suppress_reg_args_size = true;
    1594              : 
    1595              :   /* Perform the required allocation from the stack.  Some systems do
    1596              :      this differently than simply incrementing/decrementing from the
    1597              :      stack pointer, such as acquiring the space by calling malloc().  */
    1598        28154 :   if (targetm.have_allocate_stack ())
    1599              :     {
    1600            0 :       class expand_operand ops[2];
    1601              :       /* We don't have to check against the predicate for operand 0 since
    1602              :          TARGET is known to be a pseudo of the proper mode, which must
    1603              :          be valid for the operand.  */
    1604            0 :       create_fixed_operand (&ops[0], target);
    1605            0 :       create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
    1606            0 :       expand_insn (targetm.code_for_allocate_stack, 2, ops);
    1607              :     }
    1608              :   else
    1609              :     {
    1610        28154 :       poly_int64 saved_stack_pointer_delta;
    1611              : 
    1612        28154 :       if (!STACK_GROWS_DOWNWARD)
    1613              :         emit_move_insn (target, force_operand (addr, target));
    1614              : 
    1615              :       /* Check stack bounds if necessary.  */
    1616        28154 :       if (crtl->limit_stack)
    1617              :         {
    1618            0 :           rtx available;
    1619            0 :           rtx_code_label *space_available = gen_label_rtx ();
    1620            0 :           if (STACK_GROWS_DOWNWARD)
    1621            0 :             available = expand_binop (Pmode, sub_optab,
    1622              :                                       stack_pointer_rtx, stack_limit_rtx,
    1623              :                                       NULL_RTX, 1, OPTAB_WIDEN);
    1624              :           else
    1625              :             available = expand_binop (Pmode, sub_optab,
    1626              :                                       stack_limit_rtx, stack_pointer_rtx,
    1627              :                                       NULL_RTX, 1, OPTAB_WIDEN);
    1628              : 
    1629            0 :           emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
    1630              :                                    space_available);
    1631            0 :           if (targetm.have_trap ())
    1632            0 :             emit_insn (targetm.gen_trap ());
    1633              :           else
    1634            0 :             error ("stack limits not supported on this target");
    1635            0 :           emit_barrier ();
    1636            0 :           emit_label (space_available);
    1637              :         }
    1638              : 
    1639        28154 :       saved_stack_pointer_delta = stack_pointer_delta;
    1640              : 
    1641              :       /* If stack checking or stack clash protection is requested,
    1642              :          then probe the stack while allocating space from it.  */
    1643        28154 :       if (flag_stack_check && STACK_CHECK_MOVING_SP)
    1644            9 :         anti_adjust_stack_and_probe (size, false);
    1645        28145 :       else if (flag_stack_clash_protection)
    1646           16 :         anti_adjust_stack_and_probe_stack_clash (size);
    1647              :       else
    1648        28129 :         anti_adjust_stack (size);
    1649              : 
    1650              :       /* Even if size is constant, don't modify stack_pointer_delta.
    1651              :          The constant size alloca should preserve
    1652              :          crtl->preferred_stack_boundary alignment.  */
    1653        28154 :       stack_pointer_delta = saved_stack_pointer_delta;
    1654              : 
    1655        28154 :       if (STACK_GROWS_DOWNWARD)
    1656        28154 :         emit_move_insn (target, force_operand (addr, target));
    1657              :     }
    1658              : 
    1659        28154 :   suppress_reg_args_size = false;
    1660              : 
    1661              :   /* Finish up the split stack handling.  */
    1662        28154 :   if (final_label != NULL_RTX)
    1663              :     {
    1664           10 :       gcc_assert (flag_split_stack);
    1665           10 :       emit_move_insn (final_target, target);
    1666           10 :       emit_label (final_label);
    1667           10 :       target = final_target;
    1668              :     }
    1669              : 
    1670        28154 :   target = align_dynamic_address (target, required_align);
    1671              : 
    1672              :   /* Now that we've committed to a return value, mark its alignment.  */
    1673        28154 :   mark_reg_pointer (target, required_align);
    1674              : 
    1675              :   /* Record the new stack level.  */
    1676        28154 :   record_new_stack_level ();
    1677              : 
    1678        28154 :   return target;
    1679              : }
    1680              : 
    1681              : /* Return an rtx representing the address of an area of memory already
    1682              :    statically pushed onto the stack in the virtual stack vars area.  (It is
    1683              :    assumed that the area is allocated in the function prologue.)
    1684              : 
    1685              :    Any required stack pointer alignment is preserved.
    1686              : 
    1687              :    OFFSET is the offset of the area into the virtual stack vars area.
    1688              : 
    1689              :    REQUIRED_ALIGN is the alignment (in bits) required for the region
    1690              :    of memory.
    1691              : 
    1692              :    BASE is the rtx of the base of this virtual stack vars area.
    1693              :    The only time this is not `virtual_stack_vars_rtx` is when tagging pointers
    1694              :    on the stack.  */
    1695              : 
    1696              : rtx
    1697            0 : get_dynamic_stack_base (poly_int64 offset, unsigned required_align, rtx base)
    1698              : {
    1699            0 :   rtx target;
    1700              : 
    1701            0 :   if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
    1702            0 :     crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
    1703              : 
    1704            0 :   target = gen_reg_rtx (Pmode);
    1705            0 :   emit_move_insn (target, base);
    1706            0 :   target = expand_binop (Pmode, add_optab, target,
    1707            0 :                          gen_int_mode (offset, Pmode),
    1708              :                          NULL_RTX, 1, OPTAB_LIB_WIDEN);
    1709            0 :   target = align_dynamic_address (target, required_align);
    1710              : 
    1711              :   /* Now that we've committed to a return value, mark its alignment.  */
    1712            0 :   mark_reg_pointer (target, required_align);
    1713              : 
    1714            0 :   return target;
    1715              : }
    1716              : 
    1717              : /* A front end may want to override GCC's stack checking by providing a
    1718              :    run-time routine to call to check the stack, so provide a mechanism for
    1719              :    calling that routine.  */
    1720              : 
    1721              : static GTY(()) rtx stack_check_libfunc;
    1722              : 
    1723              : void
    1724            0 : set_stack_check_libfunc (const char *libfunc_name)
    1725              : {
    1726            0 :   gcc_assert (stack_check_libfunc == NULL_RTX);
    1727            0 :   stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
    1728            0 :   tree ptype
    1729            0 :     = Pmode == ptr_mode
    1730            0 :       ? ptr_type_node
    1731            0 :       : lang_hooks.types.type_for_mode (Pmode, 1);
    1732            0 :   tree ftype
    1733            0 :     = build_function_type_list (void_type_node, ptype, NULL_TREE);
    1734            0 :   tree decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
    1735              :                           get_identifier (libfunc_name), ftype);
    1736            0 :   DECL_EXTERNAL (decl) = 1;
    1737            0 :   SET_SYMBOL_REF_DECL (stack_check_libfunc, decl);
    1738            0 : }
    1739              : 
    1740              : /* Emit one stack probe at ADDRESS, an address within the stack.  */
    1741              : 
    1742              : void
    1743          135 : emit_stack_probe (rtx address)
    1744              : {
    1745          135 :   if (targetm.have_probe_stack_address ())
    1746              :     {
    1747            0 :       class expand_operand ops[1];
    1748            0 :       insn_code icode = targetm.code_for_probe_stack_address;
    1749            0 :       create_address_operand (ops, address);
    1750            0 :       maybe_legitimize_operands (icode, 0, 1, ops);
    1751            0 :       expand_insn (icode, 1, ops);
    1752              :     }
    1753              :   else
    1754              :     {
    1755          135 :       rtx memref = gen_rtx_MEM (word_mode, address);
    1756              : 
    1757          135 :       MEM_VOLATILE_P (memref) = 1;
    1758          135 :       memref = validize_mem (memref);
    1759              : 
    1760              :       /* See if we have an insn to probe the stack.  */
    1761          135 :       if (targetm.have_probe_stack ())
    1762          135 :         emit_insn (targetm.gen_probe_stack (memref));
    1763              :       else
    1764            0 :         emit_move_insn (memref, const0_rtx);
    1765              :     }
    1766          135 : }
    1767              : 
    1768              : /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
    1769              :    FIRST is a constant and size is a Pmode RTX.  These are offsets from
    1770              :    the current stack pointer.  STACK_GROWS_DOWNWARD says whether to add
    1771              :    or subtract them from the stack pointer.  */
    1772              : 
    1773              : #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
    1774              : 
    1775              : #if STACK_GROWS_DOWNWARD
    1776              : #define STACK_GROW_OP MINUS
    1777              : #define STACK_GROW_OPTAB sub_optab
    1778              : #define STACK_GROW_OFF(off) -(off)
    1779              : #else
    1780              : #define STACK_GROW_OP PLUS
    1781              : #define STACK_GROW_OPTAB add_optab
    1782              : #define STACK_GROW_OFF(off) (off)
    1783              : #endif
    1784              : 
    1785              : void
    1786            0 : probe_stack_range (HOST_WIDE_INT first, rtx size)
    1787              : {
    1788              :   /* First ensure SIZE is Pmode.  */
    1789            0 :   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
    1790            0 :     size = convert_to_mode (Pmode, size, 1);
    1791              : 
    1792              :   /* Next see if we have a function to check the stack.  */
    1793            0 :   if (stack_check_libfunc)
    1794              :     {
    1795            0 :       rtx addr = memory_address (Pmode,
    1796              :                                  gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
    1797              :                                                  stack_pointer_rtx,
    1798              :                                                  plus_constant (Pmode,
    1799              :                                                                 size, first)));
    1800            0 :       emit_library_call (stack_check_libfunc, LCT_THROW, VOIDmode,
    1801            0 :                          addr, Pmode);
    1802              :     }
    1803              : 
    1804              :   /* Next see if we have an insn to check the stack.  */
    1805            0 :   else if (targetm.have_check_stack ())
    1806              :     {
    1807            0 :       class expand_operand ops[1];
    1808            0 :       rtx addr = memory_address (Pmode,
    1809              :                                  gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
    1810              :                                                  stack_pointer_rtx,
    1811              :                                                  plus_constant (Pmode,
    1812              :                                                                 size, first)));
    1813            0 :       bool success;
    1814            0 :       create_input_operand (&ops[0], addr, Pmode);
    1815            0 :       success = maybe_expand_insn (targetm.code_for_check_stack, 1, ops);
    1816            0 :       gcc_assert (success);
    1817              :     }
    1818              : 
    1819              :   /* Otherwise we have to generate explicit probes.  If we have a constant
    1820              :      small number of them to generate, that's the easy case.  */
    1821            0 :   else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
    1822              :     {
    1823              :       HOST_WIDE_INT isize = INTVAL (size), i;
    1824              :       rtx addr;
    1825              : 
    1826              :       /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
    1827              :          it exceeds SIZE.  If only one probe is needed, this will not
    1828              :          generate any code.  Then probe at FIRST + SIZE.  */
    1829            0 :       for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
    1830              :         {
    1831            0 :           addr = memory_address (Pmode,
    1832              :                                  plus_constant (Pmode, stack_pointer_rtx,
    1833              :                                                 STACK_GROW_OFF (first + i)));
    1834            0 :           emit_stack_probe (addr);
    1835              :         }
    1836              : 
    1837            0 :       addr = memory_address (Pmode,
    1838              :                              plus_constant (Pmode, stack_pointer_rtx,
    1839              :                                             STACK_GROW_OFF (first + isize)));
    1840            0 :       emit_stack_probe (addr);
    1841            0 :     }
    1842              : 
    1843              :   /* In the variable case, do the same as above, but in a loop.  Note that we
    1844              :      must be extra careful with variables wrapping around because we might be
    1845              :      at the very top (or the very bottom) of the address space and we have to
    1846              :      be able to handle this case properly; in particular, we use an equality
    1847              :      test for the loop condition.  */
    1848              :   else
    1849              :     {
    1850            0 :       rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
    1851            0 :       rtx_code_label *loop_lab = gen_label_rtx ();
    1852            0 :       rtx_code_label *end_lab = gen_label_rtx ();
    1853              : 
    1854              :       /* Step 1: round SIZE to the previous multiple of the interval.  */
    1855              : 
    1856              :       /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL  */
    1857            0 :       rounded_size
    1858            0 :         = simplify_gen_binary (AND, Pmode, size,
    1859            0 :                                gen_int_mode (-PROBE_INTERVAL, Pmode));
    1860            0 :       rounded_size_op = force_operand (rounded_size, NULL_RTX);
    1861              : 
    1862              : 
    1863              :       /* Step 2: compute initial and final value of the loop counter.  */
    1864              : 
    1865              :       /* TEST_ADDR = SP + FIRST.  */
    1866            0 :       test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
    1867              :                                                  stack_pointer_rtx,
    1868              :                                                  gen_int_mode (first, Pmode)),
    1869              :                                  NULL_RTX);
    1870              : 
    1871              :       /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE.  */
    1872            0 :       last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
    1873              :                                                  test_addr,
    1874              :                                                  rounded_size_op), NULL_RTX);
    1875              : 
    1876              : 
    1877              :       /* Step 3: the loop
    1878              : 
    1879              :          while (TEST_ADDR != LAST_ADDR)
    1880              :            {
    1881              :              TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
    1882              :              probe at TEST_ADDR
    1883              :            }
    1884              : 
    1885              :          probes at FIRST + N * PROBE_INTERVAL for values of N from 1
    1886              :          until it is equal to ROUNDED_SIZE.  */
    1887              : 
    1888            0 :       emit_label (loop_lab);
    1889              : 
    1890              :       /* Jump to END_LAB if TEST_ADDR == LAST_ADDR.  */
    1891            0 :       emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
    1892              :                                end_lab);
    1893              : 
    1894              :       /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL.  */
    1895            0 :       temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
    1896            0 :                            gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
    1897              :                            1, OPTAB_WIDEN);
    1898              : 
    1899              :       /* There is no guarantee that expand_binop constructs its result
    1900              :          in TEST_ADDR.  So copy into TEST_ADDR if necessary.  */
    1901            0 :       if (temp != test_addr)
    1902            0 :         emit_move_insn (test_addr, temp);
    1903              : 
    1904              :       /* Probe at TEST_ADDR.  */
    1905            0 :       emit_stack_probe (test_addr);
    1906              : 
    1907            0 :       emit_jump (loop_lab);
    1908              : 
    1909            0 :       emit_label (end_lab);
    1910              : 
    1911              : 
    1912              :       /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
    1913              :          that SIZE is equal to ROUNDED_SIZE.  */
    1914              : 
    1915              :       /* TEMP = SIZE - ROUNDED_SIZE.  */
    1916            0 :       temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
    1917            0 :       if (temp != const0_rtx)
    1918              :         {
    1919            0 :           rtx addr;
    1920              : 
    1921            0 :           if (CONST_INT_P (temp))
    1922              :             {
    1923              :               /* Use [base + disp} addressing mode if supported.  */
    1924            0 :               HOST_WIDE_INT offset = INTVAL (temp);
    1925            0 :               addr = memory_address (Pmode,
    1926              :                                      plus_constant (Pmode, last_addr,
    1927              :                                                     STACK_GROW_OFF (offset)));
    1928              :             }
    1929              :           else
    1930              :             {
    1931              :               /* Manual CSE if the difference is not known at compile-time.  */
    1932            0 :               temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
    1933            0 :               addr = memory_address (Pmode,
    1934              :                                      gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
    1935              :                                                      last_addr, temp));
    1936              :             }
    1937              : 
    1938            0 :           emit_stack_probe (addr);
    1939              :         }
    1940              :     }
    1941              : 
    1942              :   /* Make sure nothing is scheduled before we are done.  */
    1943            0 :   emit_insn (gen_blockage ());
    1944            0 : }
    1945              : 
    1946              : /* Compute parameters for stack clash probing a dynamic stack
    1947              :    allocation of SIZE bytes.
    1948              : 
    1949              :    We compute ROUNDED_SIZE, LAST_ADDR, RESIDUAL and PROBE_INTERVAL.
    1950              : 
    1951              :    Additionally we conditionally dump the type of probing that will
    1952              :    be needed given the values computed.  */
    1953              : 
    1954              : void
    1955           16 : compute_stack_clash_protection_loop_data (rtx *rounded_size, rtx *last_addr,
    1956              :                                           rtx *residual,
    1957              :                                           HOST_WIDE_INT *probe_interval,
    1958              :                                           rtx size)
    1959              : {
    1960              :   /* Round SIZE down to STACK_CLASH_PROTECTION_PROBE_INTERVAL */
    1961           16 :   *probe_interval
    1962           16 :     = 1 << param_stack_clash_protection_probe_interval;
    1963           16 :   *rounded_size = simplify_gen_binary (AND, Pmode, size,
    1964              :                                         GEN_INT (-*probe_interval));
    1965              : 
    1966              :   /* Compute the value of the stack pointer for the last iteration.
    1967              :      It's just SP + ROUNDED_SIZE.  */
    1968           16 :   rtx rounded_size_op = force_operand (*rounded_size, NULL_RTX);
    1969           16 :   *last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
    1970              :                                               stack_pointer_rtx,
    1971              :                                               rounded_size_op),
    1972              :                               NULL_RTX);
    1973              : 
    1974              :   /* Compute any residuals not allocated by the loop above.  Residuals
    1975              :      are just the ROUNDED_SIZE - SIZE.  */
    1976           16 :   *residual = simplify_gen_binary (MINUS, Pmode, size, *rounded_size);
    1977              : 
    1978              :   /* Dump key information to make writing tests easy.  */
    1979           16 :   if (dump_file)
    1980              :     {
    1981            9 :       if (*rounded_size == CONST0_RTX (Pmode))
    1982            0 :         fprintf (dump_file,
    1983              :                  "Stack clash skipped dynamic allocation and probing loop.\n");
    1984            9 :       else if (CONST_INT_P (*rounded_size)
    1985            0 :                && INTVAL (*rounded_size) <= 4 * *probe_interval)
    1986            0 :         fprintf (dump_file,
    1987              :                  "Stack clash dynamic allocation and probing inline.\n");
    1988            9 :       else if (CONST_INT_P (*rounded_size))
    1989            0 :         fprintf (dump_file,
    1990              :                  "Stack clash dynamic allocation and probing in "
    1991              :                  "rotated loop.\n");
    1992              :       else
    1993            9 :         fprintf (dump_file,
    1994              :                  "Stack clash dynamic allocation and probing in loop.\n");
    1995              : 
    1996            9 :       if (*residual != CONST0_RTX (Pmode))
    1997            9 :         fprintf (dump_file,
    1998              :                  "Stack clash dynamic allocation and probing residuals.\n");
    1999              :       else
    2000            0 :         fprintf (dump_file,
    2001              :                  "Stack clash skipped dynamic allocation and "
    2002              :                  "probing residuals.\n");
    2003              :     }
    2004           16 : }
    2005              : 
    2006              : /* Emit the start of an allocate/probe loop for stack
    2007              :    clash protection.
    2008              : 
    2009              :    LOOP_LAB and END_LAB are returned for use when we emit the
    2010              :    end of the loop.
    2011              : 
    2012              :    LAST addr is the value for SP which stops the loop.  */
    2013              : void
    2014           16 : emit_stack_clash_protection_probe_loop_start (rtx *loop_lab,
    2015              :                                               rtx *end_lab,
    2016              :                                               rtx last_addr,
    2017              :                                               bool rotated)
    2018              : {
    2019              :   /* Essentially we want to emit any setup code, the top of loop
    2020              :      label and the comparison at the top of the loop.  */
    2021           16 :   *loop_lab = gen_label_rtx ();
    2022           16 :   *end_lab = gen_label_rtx ();
    2023              : 
    2024           16 :   emit_label (*loop_lab);
    2025           16 :   if (!rotated)
    2026           16 :     emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
    2027           16 :                              Pmode, 1, *end_lab);
    2028           16 : }
    2029              : 
    2030              : /* Emit the end of a stack clash probing loop.
    2031              : 
    2032              :    This consists of just the jump back to LOOP_LAB and
    2033              :    emitting END_LOOP after the loop.  */
    2034              : 
    2035              : void
    2036           16 : emit_stack_clash_protection_probe_loop_end (rtx loop_lab, rtx end_loop,
    2037              :                                             rtx last_addr, bool rotated)
    2038              : {
    2039           16 :   if (rotated)
    2040            0 :     emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, NE, NULL_RTX,
    2041            0 :                              Pmode, 1, loop_lab);
    2042              :   else
    2043           16 :     emit_jump (loop_lab);
    2044              : 
    2045           16 :   emit_label (end_loop);
    2046              : 
    2047           16 : }
    2048              : 
    2049              : /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
    2050              :    while probing it.  This pushes when SIZE is positive.  SIZE need not
    2051              :    be constant.
    2052              : 
    2053              :    This is subtly different than anti_adjust_stack_and_probe to try and
    2054              :    prevent stack-clash attacks
    2055              : 
    2056              :      1. It must assume no knowledge of the probing state, any allocation
    2057              :         must probe.
    2058              : 
    2059              :         Consider the case of a 1 byte alloca in a loop.  If the sum of the
    2060              :         allocations is large, then this could be used to jump the guard if
    2061              :         probes were not emitted.
    2062              : 
    2063              :      2. It never skips probes, whereas anti_adjust_stack_and_probe will
    2064              :         skip the probe on the first PROBE_INTERVAL on the assumption it
    2065              :         was already done in the prologue and in previous allocations.
    2066              : 
    2067              :      3. It only allocates and probes SIZE bytes, it does not need to
    2068              :         allocate/probe beyond that because this probing style does not
    2069              :         guarantee signal handling capability if the guard is hit.  */
    2070              : 
    2071              : void
    2072           16 : anti_adjust_stack_and_probe_stack_clash (rtx size)
    2073              : {
    2074              :   /* First ensure SIZE is Pmode.  */
    2075           16 :   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
    2076            0 :     size = convert_to_mode (Pmode, size, 1);
    2077              : 
    2078              :   /* We can get here with a constant size on some targets.  */
    2079           16 :   rtx rounded_size, last_addr, residual;
    2080           16 :   HOST_WIDE_INT probe_interval, probe_range;
    2081           16 :   bool target_probe_range_p = false;
    2082           16 :   compute_stack_clash_protection_loop_data (&rounded_size, &last_addr,
    2083              :                                             &residual, &probe_interval, size);
    2084              : 
    2085              :   /* Get the back-end specific probe ranges.  */
    2086           16 :   probe_range = targetm.stack_clash_protection_alloca_probe_range ();
    2087           16 :   target_probe_range_p = probe_range != 0;
    2088           16 :   gcc_assert (probe_range >= 0);
    2089              : 
    2090              :   /* If no back-end specific range defined, default to the top of the newly
    2091              :      allocated range.  */
    2092           16 :   if (probe_range == 0)
    2093           32 :     probe_range = probe_interval - GET_MODE_SIZE (word_mode);
    2094              : 
    2095           16 :   if (rounded_size != CONST0_RTX (Pmode))
    2096              :     {
    2097           16 :       if (CONST_INT_P (rounded_size)
    2098            0 :           && INTVAL (rounded_size) <= 4 * probe_interval)
    2099              :         {
    2100            0 :           for (HOST_WIDE_INT i = 0;
    2101            0 :                i < INTVAL (rounded_size);
    2102            0 :                i += probe_interval)
    2103              :             {
    2104            0 :               anti_adjust_stack (GEN_INT (probe_interval));
    2105              :               /* The prologue does not probe residuals.  Thus the offset
    2106              :                  here to probe just beyond what the prologue had already
    2107              :                  allocated.  */
    2108            0 :               emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
    2109            0 :                                                probe_range));
    2110              : 
    2111            0 :               emit_insn (gen_blockage ());
    2112              :             }
    2113              :         }
    2114              :       else
    2115              :         {
    2116           16 :           rtx loop_lab, end_loop;
    2117           16 :           bool rotate_loop = CONST_INT_P (rounded_size);
    2118           16 :           emit_stack_clash_protection_probe_loop_start (&loop_lab, &end_loop,
    2119              :                                                         last_addr, rotate_loop);
    2120              : 
    2121           16 :           anti_adjust_stack (GEN_INT (probe_interval));
    2122              : 
    2123              :           /* The prologue does not probe residuals.  Thus the offset here
    2124              :              to probe just beyond what the prologue had already
    2125              :              allocated.  */
    2126           16 :           emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
    2127           16 :                                            probe_range));
    2128              : 
    2129           16 :           emit_stack_clash_protection_probe_loop_end (loop_lab, end_loop,
    2130              :                                                       last_addr, rotate_loop);
    2131           16 :           emit_insn (gen_blockage ());
    2132              :         }
    2133              :     }
    2134              : 
    2135           16 :   if (residual != CONST0_RTX (Pmode))
    2136              :     {
    2137           16 :       rtx label = NULL_RTX;
    2138              :       /* RESIDUAL could be zero at runtime and in that case *sp could
    2139              :          hold live data.  Furthermore, we do not want to probe into the
    2140              :          red zone.
    2141              : 
    2142              :          If TARGET_PROBE_RANGE_P then the target has promised it's safe to
    2143              :          probe at offset 0.  In which case we no longer have to check for
    2144              :          RESIDUAL == 0.  However we still need to probe at the right offset
    2145              :          when RESIDUAL > PROBE_RANGE, in which case we probe at PROBE_RANGE.
    2146              : 
    2147              :          If !TARGET_PROBE_RANGE_P then go ahead and just guard the probe at *sp
    2148              :          on RESIDUAL != 0 at runtime if RESIDUAL is not a compile time constant.
    2149              :          */
    2150           16 :       anti_adjust_stack (residual);
    2151              : 
    2152           16 :       if (!CONST_INT_P (residual))
    2153              :         {
    2154           16 :           label = gen_label_rtx ();
    2155           16 :           rtx_code op = target_probe_range_p ? LT : EQ;
    2156           16 :           rtx probe_cmp_value = target_probe_range_p
    2157            0 :             ? gen_rtx_CONST_INT (GET_MODE (residual), probe_range)
    2158           16 :             : CONST0_RTX (GET_MODE (residual));
    2159              : 
    2160           16 :           if (target_probe_range_p)
    2161            0 :             emit_stack_probe (stack_pointer_rtx);
    2162              : 
    2163           16 :           emit_cmp_and_jump_insns (residual, probe_cmp_value,
    2164           16 :                                    op, NULL_RTX, Pmode, 1, label);
    2165              :         }
    2166              : 
    2167           16 :       rtx x = NULL_RTX;
    2168              : 
    2169              :       /* If RESIDUAL isn't a constant and TARGET_PROBE_RANGE_P then we probe up
    2170              :          by the ABI defined safe value.  */
    2171           16 :       if (!CONST_INT_P (residual) && target_probe_range_p)
    2172            0 :         x = GEN_INT (probe_range);
    2173              :       /* If RESIDUAL is a constant but smaller than the ABI defined safe value,
    2174              :          we still want to probe up, but the safest amount if a word.  */
    2175            0 :       else if (target_probe_range_p)
    2176              :         {
    2177            0 :           if (INTVAL (residual) <= probe_range)
    2178            0 :             x = GEN_INT (GET_MODE_SIZE (word_mode));
    2179              :           else
    2180            0 :             x = GEN_INT (probe_range);
    2181              :         }
    2182              :       else
    2183              :       /* If nothing else, probe at the top of the new allocation.  */
    2184           32 :         x = plus_constant (Pmode, residual, -GET_MODE_SIZE (word_mode));
    2185              : 
    2186           16 :       emit_stack_probe (gen_rtx_PLUS (Pmode, stack_pointer_rtx, x));
    2187              : 
    2188           16 :       emit_insn (gen_blockage ());
    2189           16 :       if (!CONST_INT_P (residual))
    2190           16 :           emit_label (label);
    2191              :     }
    2192           16 : }
    2193              : 
    2194              : 
    2195              : /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
    2196              :    while probing it.  This pushes when SIZE is positive.  SIZE need not
    2197              :    be constant.  If ADJUST_BACK is true, adjust back the stack pointer
    2198              :    by plus SIZE at the end.  */
    2199              : 
    2200              : void
    2201           41 : anti_adjust_stack_and_probe (rtx size, bool adjust_back)
    2202              : {
    2203              :   /* We skip the probe for the first interval + a small dope of 4 words and
    2204              :      probe that many bytes past the specified size to maintain a protection
    2205              :      area at the botton of the stack.  */
    2206           41 :   const int dope = 4 * UNITS_PER_WORD;
    2207              : 
    2208              :   /* First ensure SIZE is Pmode.  */
    2209           41 :   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
    2210            0 :     size = convert_to_mode (Pmode, size, 1);
    2211              : 
    2212              :   /* If we have a constant small number of probes to generate, that's the
    2213              :      easy case.  */
    2214           41 :   if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
    2215              :     {
    2216              :       HOST_WIDE_INT isize = INTVAL (size), i;
    2217              :       bool first_probe = true;
    2218              : 
    2219              :       /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
    2220              :          values of N from 1 until it exceeds SIZE.  If only one probe is
    2221              :          needed, this will not generate any code.  Then adjust and probe
    2222              :          to PROBE_INTERVAL + SIZE.  */
    2223           32 :       for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
    2224              :         {
    2225            0 :           if (first_probe)
    2226              :             {
    2227            0 :               anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
    2228            0 :               first_probe = false;
    2229              :             }
    2230              :           else
    2231            0 :             anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
    2232            0 :           emit_stack_probe (stack_pointer_rtx);
    2233              :         }
    2234              : 
    2235           32 :       if (first_probe)
    2236           32 :         anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
    2237              :       else
    2238            0 :         anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
    2239           32 :       emit_stack_probe (stack_pointer_rtx);
    2240           32 :     }
    2241              : 
    2242              :   /* In the variable case, do the same as above, but in a loop.  Note that we
    2243              :      must be extra careful with variables wrapping around because we might be
    2244              :      at the very top (or the very bottom) of the address space and we have to
    2245              :      be able to handle this case properly; in particular, we use an equality
    2246              :      test for the loop condition.  */
    2247              :   else
    2248              :     {
    2249            9 :       rtx rounded_size, rounded_size_op, last_addr, temp;
    2250            9 :       rtx_code_label *loop_lab = gen_label_rtx ();
    2251            9 :       rtx_code_label *end_lab = gen_label_rtx ();
    2252              : 
    2253              : 
    2254              :       /* Step 1: round SIZE to the previous multiple of the interval.  */
    2255              : 
    2256              :       /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL  */
    2257            9 :       rounded_size
    2258            9 :         = simplify_gen_binary (AND, Pmode, size,
    2259            9 :                                gen_int_mode (-PROBE_INTERVAL, Pmode));
    2260            9 :       rounded_size_op = force_operand (rounded_size, NULL_RTX);
    2261              : 
    2262              : 
    2263              :       /* Step 2: compute initial and final value of the loop counter.  */
    2264              : 
    2265              :       /* SP = SP_0 + PROBE_INTERVAL.  */
    2266            9 :       anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
    2267              : 
    2268              :       /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE.  */
    2269            9 :       last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
    2270              :                                                  stack_pointer_rtx,
    2271              :                                                  rounded_size_op), NULL_RTX);
    2272              : 
    2273              : 
    2274              :       /* Step 3: the loop
    2275              : 
    2276              :          while (SP != LAST_ADDR)
    2277              :            {
    2278              :              SP = SP + PROBE_INTERVAL
    2279              :              probe at SP
    2280              :            }
    2281              : 
    2282              :          adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
    2283              :          values of N from 1 until it is equal to ROUNDED_SIZE.  */
    2284              : 
    2285            9 :       emit_label (loop_lab);
    2286              : 
    2287              :       /* Jump to END_LAB if SP == LAST_ADDR.  */
    2288            9 :       emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
    2289            9 :                                Pmode, 1, end_lab);
    2290              : 
    2291              :       /* SP = SP + PROBE_INTERVAL and probe at SP.  */
    2292            9 :       anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
    2293            9 :       emit_stack_probe (stack_pointer_rtx);
    2294              : 
    2295            9 :       emit_jump (loop_lab);
    2296              : 
    2297            9 :       emit_label (end_lab);
    2298              : 
    2299              : 
    2300              :       /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
    2301              :          assert at compile-time that SIZE is equal to ROUNDED_SIZE.  */
    2302              : 
    2303              :       /* TEMP = SIZE - ROUNDED_SIZE.  */
    2304            9 :       temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
    2305            9 :       if (temp != const0_rtx)
    2306              :         {
    2307              :           /* Manual CSE if the difference is not known at compile-time.  */
    2308            9 :           if (GET_CODE (temp) != CONST_INT)
    2309            9 :             temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
    2310            9 :           anti_adjust_stack (temp);
    2311            9 :           emit_stack_probe (stack_pointer_rtx);
    2312              :         }
    2313              :     }
    2314              : 
    2315              :   /* Adjust back and account for the additional first interval.  */
    2316           41 :   if (adjust_back)
    2317           32 :     adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
    2318              :   else
    2319            9 :     adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
    2320           41 : }
    2321              : 
    2322              : /* Return an rtx representing the register or memory location
    2323              :    in which a scalar value of data type VALTYPE
    2324              :    was returned by a function call to function FUNC.
    2325              :    FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
    2326              :    function is known, otherwise 0.
    2327              :    OUTGOING is 1 if on a machine with register windows this function
    2328              :    should return the register in which the function will put its result
    2329              :    and 0 otherwise.  */
    2330              : 
    2331              : rtx
    2332    106239697 : hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
    2333              :                      int outgoing ATTRIBUTE_UNUSED)
    2334              : {
    2335    106239697 :   rtx val;
    2336              : 
    2337    210006378 :   val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
    2338              : 
    2339    106239697 :   if (REG_P (val)
    2340    105380258 :       && GET_MODE (val) == BLKmode)
    2341              :     {
    2342       131350 :       unsigned HOST_WIDE_INT bytes = arg_int_size_in_bytes (valtype);
    2343       131350 :       opt_scalar_int_mode tmpmode;
    2344              : 
    2345              :       /* int_size_in_bytes can return -1.  We don't need a check here
    2346              :          since the value of bytes will then be large enough that no
    2347              :          mode will match anyway.  */
    2348              : 
    2349       474154 :       FOR_EACH_MODE_IN_CLASS (tmpmode, MODE_INT)
    2350              :         {
    2351              :           /* Have we found a large enough mode?  */
    2352       948308 :           if (GET_MODE_SIZE (tmpmode.require ()) >= bytes)
    2353              :             break;
    2354              :         }
    2355              : 
    2356       131350 :       PUT_MODE (val, tmpmode.require ());
    2357              :     }
    2358    106239697 :   return val;
    2359              : }
    2360              : 
    2361              : /* Return an rtx representing the register or memory location
    2362              :    in which a scalar value of mode MODE was returned by a library call.  */
    2363              : 
    2364              : rtx
    2365       105702 : hard_libcall_value (machine_mode mode, rtx fun)
    2366              : {
    2367       105702 :   return targetm.calls.libcall_value (mode, fun);
    2368              : }
    2369              : 
    2370              : /* Look up the tree code for a given rtx code
    2371              :    to provide the arithmetic operation for real_arithmetic.
    2372              :    The function returns an int because the caller may not know
    2373              :    what `enum tree_code' means.  */
    2374              : 
    2375              : int
    2376         5330 : rtx_to_tree_code (enum rtx_code code)
    2377              : {
    2378         5330 :   enum tree_code tcode;
    2379              : 
    2380         5330 :   switch (code)
    2381              :     {
    2382              :     case PLUS:
    2383              :       tcode = PLUS_EXPR;
    2384              :       break;
    2385              :     case MINUS:
    2386              :       tcode = MINUS_EXPR;
    2387              :       break;
    2388              :     case MULT:
    2389              :       tcode = MULT_EXPR;
    2390              :       break;
    2391              :     case DIV:
    2392              :       tcode = RDIV_EXPR;
    2393              :       break;
    2394              :     case SMIN:
    2395              :       tcode = MIN_EXPR;
    2396              :       break;
    2397              :     case SMAX:
    2398              :       tcode = MAX_EXPR;
    2399              :       break;
    2400              :     default:
    2401              :       tcode = LAST_AND_UNUSED_TREE_CODE;
    2402              :       break;
    2403              :     }
    2404         5330 :   return ((int) tcode);
    2405              : }
    2406              : 
    2407              : #include "gt-explow.h"
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.