LCOV - code coverage report
Current view: top level - gcc - function.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 83.3 % 2829 2357
Test Date: 2026-02-28 14:20:25 Functions: 90.3 % 165 149
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Expands front end tree to back end RTL for GCC.
       2              :    Copyright (C) 1987-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify it under
       7              : the terms of the GNU General Public License as published by the Free
       8              : Software Foundation; either version 3, or (at your option) any later
       9              : version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      12              : WARRANTY; without even the implied warranty of MERCHANTABILITY or
      13              : FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      14              : for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : /* This file handles the generation of rtl code from tree structure
      21              :    at the level of the function as a whole.
      22              :    It creates the rtl expressions for parameters and auto variables
      23              :    and has full responsibility for allocating stack slots.
      24              : 
      25              :    `expand_function_start' is called at the beginning of a function,
      26              :    before the function body is parsed, and `expand_function_end' is
      27              :    called after parsing the body.
      28              : 
      29              :    Call `assign_stack_local' to allocate a stack slot for a local variable.
      30              :    This is usually done during the RTL generation for the function body,
      31              :    but it can also be done in the reload pass when a pseudo-register does
      32              :    not get a hard register.  */
      33              : 
      34              : #include "config.h"
      35              : #include "system.h"
      36              : #include "coretypes.h"
      37              : #include "backend.h"
      38              : #include "target.h"
      39              : #include "rtl.h"
      40              : #include "tree.h"
      41              : #include "gimple-expr.h"
      42              : #include "cfghooks.h"
      43              : #include "df.h"
      44              : #include "memmodel.h"
      45              : #include "tm_p.h"
      46              : #include "stringpool.h"
      47              : #include "expmed.h"
      48              : #include "optabs.h"
      49              : #include "opts.h"
      50              : #include "regs.h"
      51              : #include "emit-rtl.h"
      52              : #include "recog.h"
      53              : #include "rtl-error.h"
      54              : #include "hard-reg-set.h"
      55              : #include "alias.h"
      56              : #include "fold-const.h"
      57              : #include "stor-layout.h"
      58              : #include "varasm.h"
      59              : #include "except.h"
      60              : #include "dojump.h"
      61              : #include "explow.h"
      62              : #include "calls.h"
      63              : #include "expr.h"
      64              : #include "optabs-tree.h"
      65              : #include "output.h"
      66              : #include "langhooks.h"
      67              : #include "common/common-target.h"
      68              : #include "gimplify.h"
      69              : #include "tree-pass.h"
      70              : #include "cfgrtl.h"
      71              : #include "cfganal.h"
      72              : #include "cfgbuild.h"
      73              : #include "cfgcleanup.h"
      74              : #include "cfgexpand.h"
      75              : #include "shrink-wrap.h"
      76              : #include "toplev.h"
      77              : #include "rtl-iter.h"
      78              : #include "tree-dfa.h"
      79              : #include "tree-ssa.h"
      80              : #include "stringpool.h"
      81              : #include "attribs.h"
      82              : #include "gimple.h"
      83              : #include "options.h"
      84              : #include "function-abi.h"
      85              : #include "value-range.h"
      86              : #include "gimple-range.h"
      87              : #include "insn-attr.h"
      88              : 
      89              : /* So we can assign to cfun in this file.  */
      90              : #undef cfun
      91              : 
      92              : #ifndef STACK_ALIGNMENT_NEEDED
      93              : #define STACK_ALIGNMENT_NEEDED 1
      94              : #endif
      95              : 
      96              : #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
      97              : 
      98              : /* Round a value to the lowest integer less than it that is a multiple of
      99              :    the required alignment.  Avoid using division in case the value is
     100              :    negative.  Assume the alignment is a power of two.  */
     101              : #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
     102              : 
     103              : /* Similar, but round to the next highest integer that meets the
     104              :    alignment.  */
     105              : #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
     106              : 
     107              : /* Nonzero once virtual register instantiation has been done.
     108              :    assign_stack_local uses frame_pointer_rtx when this is nonzero.
     109              :    calls.cc:emit_library_call_value_1 uses it to set up
     110              :    post-instantiation libcalls.  */
     111              : int virtuals_instantiated;
     112              : 
     113              : /* Assign unique numbers to labels generated for profiling, debugging, etc.  */
     114              : static GTY(()) int funcdef_no;
     115              : 
     116              : /* These variables hold pointers to functions to create and destroy
     117              :    target specific, per-function data structures.  */
     118              : struct machine_function * (*init_machine_status) (void);
     119              : 
     120              : /* The currently compiled function.  */
     121              : struct function *cfun = 0;
     122              : 
     123              : /* These hashes record the prologue and epilogue insns.  */
     124              : 
     125              : struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
     126              : {
     127   1153526823 :   static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
     128              :   static bool equal (rtx a, rtx b) { return a == b; }
     129              : };
     130              : 
     131              : static GTY((cache))
     132              :   hash_table<insn_cache_hasher> *prologue_insn_hash;
     133              : static GTY((cache))
     134              :   hash_table<insn_cache_hasher> *epilogue_insn_hash;
     135              : 
     136              : 
     137              : hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
     138              : vec<tree, va_gc> *types_used_by_cur_var_decl;
     139              : 
     140              : /* Forward declarations.  */
     141              : 
     142              : static class temp_slot *find_temp_slot_from_address (rtx);
     143              : static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
     144              : static void pad_below (struct args_size *, machine_mode, tree);
     145              : static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
     146              : static int all_blocks (tree, tree *);
     147              : static tree *get_block_vector (tree, int *);
     148              : extern tree debug_find_var_in_block_tree (tree, tree);
     149              : /* We always define `record_insns' even if it's not used so that we
     150              :    can always export `prologue_epilogue_contains'.  */
     151              : static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
     152              :      ATTRIBUTE_UNUSED;
     153              : static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
     154              : static void prepare_function_start (void);
     155              : static void do_clobber_return_reg (rtx, void *);
     156              : static void do_use_return_reg (rtx, void *);
     157              : 
     158              : 
     159              : /* Stack of nested functions.  */
     160              : /* Keep track of the cfun stack.  */
     161              : 
     162              : static vec<function *> function_context_stack;
     163              : 
     164              : /* Save the current context for compilation of a nested function.
     165              :    This is called from language-specific code.  */
     166              : 
     167              : void
     168    122536696 : push_function_context (void)
     169              : {
     170    122536696 :   if (cfun == 0)
     171           20 :     allocate_struct_function (NULL, false);
     172              : 
     173    122536696 :   function_context_stack.safe_push (cfun);
     174    122536696 :   set_cfun (NULL);
     175    122536696 : }
     176              : 
     177              : /* Restore the last saved context, at the end of a nested function.
     178              :    This function is called from language-specific code.  */
     179              : 
     180              : void
     181    122536675 : pop_function_context (void)
     182              : {
     183    122536675 :   struct function *p = function_context_stack.pop ();
     184    122536675 :   set_cfun (p);
     185    122536675 :   current_function_decl = p->decl;
     186              : 
     187              :   /* Reset variables that have known state during rtx generation.  */
     188    122536675 :   virtuals_instantiated = 0;
     189    122536675 :   generating_concat_p = 1;
     190    122536675 : }
     191              : 
     192              : /* Clear out all parts of the state in F that can safely be discarded
     193              :    after the function has been parsed, but not compiled, to let
     194              :    garbage collection reclaim the memory.  */
     195              : 
     196              : void
     197      1685618 : free_after_parsing (struct function *f)
     198              : {
     199      1685618 :   f->language = 0;
     200      1685618 : }
     201              : 
     202              : /* Clear out all parts of the state in F that can safely be discarded
     203              :    after the function has been compiled, to let garbage collection
     204              :    reclaim the memory.  */
     205              : 
     206              : void
     207      1691046 : free_after_compilation (struct function *f)
     208              : {
     209      1691046 :   prologue_insn_hash = NULL;
     210      1691046 :   epilogue_insn_hash = NULL;
     211              : 
     212      1691046 :   free (crtl->emit.regno_pointer_align);
     213              : 
     214      1691046 :   memset (crtl, 0, sizeof (struct rtl_data));
     215      1691046 :   f->eh = NULL;
     216      1691046 :   f->machine = NULL;
     217      1691046 :   f->cfg = NULL;
     218      1691046 :   f->curr_properties &= ~PROP_cfg;
     219      1691181 :   delete f->cond_uids;
     220              : 
     221      1691046 :   regno_reg_rtx = NULL;
     222      1691046 : }
     223              : 
     224              : /* Return size needed for stack frame based on slots so far allocated.
     225              :    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
     226              :    the caller may have to do that.  */
     227              : 
     228              : poly_int64
     229    149083539 : get_frame_size (void)
     230              : {
     231    149083539 :   if (FRAME_GROWS_DOWNWARD)
     232    149083539 :     return -frame_offset;
     233              :   else
     234              :     return frame_offset;
     235              : }
     236              : 
     237              : /* Issue an error message and return TRUE if frame OFFSET overflows in
     238              :    the signed target pointer arithmetics for function FUNC.  Otherwise
     239              :    return FALSE.  */
     240              : 
     241              : bool
     242      3933112 : frame_offset_overflow (poly_int64 offset, tree func)
     243              : {
     244      3933112 :   poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
     245      3933112 :   unsigned HOST_WIDE_INT limit
     246      3933112 :     = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
     247              :        /* Leave room for the fixed part of the frame.  */
     248      3933112 :        - 64 * UNITS_PER_WORD);
     249              : 
     250      7866224 :   if (!coeffs_in_range_p (size, 0U, limit))
     251              :     {
     252            0 :       unsigned HOST_WIDE_INT hwisize;
     253            0 :       if (size.is_constant (&hwisize))
     254            0 :         error_at (DECL_SOURCE_LOCATION (func),
     255              :                   "total size of local objects %wu exceeds maximum %wu",
     256              :                   hwisize, limit);
     257              :       else
     258              :         error_at (DECL_SOURCE_LOCATION (func),
     259              :                   "total size of local objects exceeds maximum %wu",
     260              :                   limit);
     261            0 :       return true;
     262              :     }
     263              : 
     264              :   return false;
     265              : }
     266              : 
     267              : /* Return the minimum spill slot alignment for a register of mode MODE.  */
     268              : 
     269              : unsigned int
     270      1420453 : spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
     271              : {
     272      1420453 :   return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
     273              : }
     274              : 
     275              : /* Return stack slot alignment in bits for TYPE and MODE.  */
     276              : 
     277              : static unsigned int
     278       153621 : get_stack_local_alignment (tree type, machine_mode mode)
     279              : {
     280       153621 :   unsigned int alignment;
     281              : 
     282       153621 :   if (mode == BLKmode)
     283        22008 :     alignment = BIGGEST_ALIGNMENT;
     284              :   else
     285       131613 :     alignment = GET_MODE_ALIGNMENT (mode);
     286              : 
     287              :   /* Allow the frond-end to (possibly) increase the alignment of this
     288              :      stack slot.  */
     289       153621 :   if (! type)
     290        55948 :     type = lang_hooks.types.type_for_mode (mode, 0);
     291              : 
     292       153621 :   return STACK_SLOT_ALIGNMENT (type, mode, alignment);
     293              : }
     294              : 
     295              : /* Determine whether it is possible to fit a stack slot of size SIZE and
     296              :    alignment ALIGNMENT into an area in the stack frame that starts at
     297              :    frame offset START and has a length of LENGTH.  If so, store the frame
     298              :    offset to be used for the stack slot in *POFFSET and return true;
     299              :    return false otherwise.  This function will extend the frame size when
     300              :    given a start/length pair that lies at the end of the frame.  */
     301              : 
     302              : static bool
     303      2348014 : try_fit_stack_local (poly_int64 start, poly_int64 length,
     304              :                      poly_int64 size, unsigned int alignment,
     305              :                      poly_int64 *poffset)
     306              : {
     307      2348014 :   poly_int64 this_frame_offset;
     308      2348014 :   int frame_off, frame_alignment, frame_phase;
     309              : 
     310              :   /* Calculate how many bytes the start of local variables is off from
     311              :      stack alignment.  */
     312      2348014 :   frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
     313      2348014 :   frame_off = targetm.starting_frame_offset () % frame_alignment;
     314      2348014 :   frame_phase = frame_off ? frame_alignment - frame_off : 0;
     315              : 
     316              :   /* Round the frame offset to the specified alignment.  */
     317              : 
     318      2348014 :   if (FRAME_GROWS_DOWNWARD)
     319      2348014 :     this_frame_offset
     320      2348014 :       = (aligned_lower_bound (start + length - size - frame_phase, alignment)
     321      2348014 :          + frame_phase);
     322              :   else
     323              :     this_frame_offset
     324              :       = aligned_upper_bound (start - frame_phase, alignment) + frame_phase;
     325              : 
     326              :   /* See if it fits.  If this space is at the edge of the frame,
     327              :      consider extending the frame to make it fit.  Our caller relies on
     328              :      this when allocating a new slot.  */
     329      2348014 :   if (maybe_lt (this_frame_offset, start))
     330              :     {
     331       482959 :       if (known_eq (frame_offset, start))
     332       359829 :         frame_offset = this_frame_offset;
     333              :       else
     334              :         return false;
     335              :     }
     336      1865055 :   else if (maybe_gt (this_frame_offset + size, start + length))
     337              :     {
     338            0 :       if (known_eq (frame_offset, start + length))
     339            0 :         frame_offset = this_frame_offset + size;
     340              :       else
     341              :         return false;
     342              :     }
     343              : 
     344      2224884 :   *poffset = this_frame_offset;
     345      2224884 :   return true;
     346              : }
     347              : 
     348              : /* Create a new frame_space structure describing free space in the stack
     349              :    frame beginning at START and ending at END, and chain it into the
     350              :    function's frame_space_list.  */
     351              : 
     352              : static void
     353       387469 : add_frame_space (poly_int64 start, poly_int64 end)
     354              : {
     355       387469 :   class frame_space *space = ggc_alloc<frame_space> ();
     356       387469 :   space->next = crtl->frame_space_list;
     357       387469 :   crtl->frame_space_list = space;
     358       387469 :   space->start = start;
     359       387469 :   space->length = end - start;
     360       387469 : }
     361              : 
     362              : /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
     363              :    with machine mode MODE.
     364              : 
     365              :    ALIGN controls the amount of alignment for the address of the slot:
     366              :    0 means according to MODE,
     367              :    -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
     368              :    -2 means use BITS_PER_UNIT,
     369              :    positive specifies alignment boundary in bits.
     370              : 
     371              :    KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
     372              :    alignment and ASLK_RECORD_PAD bit set if we should remember
     373              :    extra space we allocated for alignment purposes.  When we are
     374              :    called from assign_stack_temp_for_type, it is not set so we don't
     375              :    track the same stack slot in two independent lists.
     376              : 
     377              :    We do not round to stack_boundary here.  */
     378              : 
     379              : rtx
     380      2224884 : assign_stack_local_1 (machine_mode mode, poly_int64 size,
     381              :                       int align, int kind)
     382              : {
     383      2224884 :   rtx x, addr;
     384      2224884 :   poly_int64 bigend_correction = 0;
     385      2224884 :   poly_int64 slot_offset = 0, old_frame_offset;
     386      2224884 :   unsigned int alignment, alignment_in_bits;
     387              : 
     388      2224884 :   if (align == 0)
     389              :     {
     390         7003 :       alignment = get_stack_local_alignment (NULL, mode);
     391         7003 :       alignment /= BITS_PER_UNIT;
     392              :     }
     393      2217881 :   else if (align == -1)
     394              :     {
     395          947 :       alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
     396          947 :       size = aligned_upper_bound (size, alignment);
     397              :     }
     398      2216934 :   else if (align == -2)
     399              :     alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
     400              :   else
     401      2216934 :     alignment = align / BITS_PER_UNIT;
     402              : 
     403      2224884 :   alignment_in_bits = alignment * BITS_PER_UNIT;
     404              : 
     405              :   /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT.  */
     406      2224884 :   if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
     407              :     {
     408            0 :       alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
     409            0 :       alignment = MAX_SUPPORTED_STACK_ALIGNMENT / BITS_PER_UNIT;
     410              :     }
     411              : 
     412      2224884 :   if (SUPPORTS_STACK_ALIGNMENT)
     413              :     {
     414      2224884 :       if (crtl->stack_alignment_estimated < alignment_in_bits)
     415              :         {
     416         4382 :           if (!crtl->stack_realign_processed)
     417         4359 :             crtl->stack_alignment_estimated = alignment_in_bits;
     418              :           else
     419              :             {
     420              :               /* If stack is realigned and stack alignment value
     421              :                  hasn't been finalized, it is OK not to increase
     422              :                  stack_alignment_estimated.  The bigger alignment
     423              :                  requirement is recorded in stack_alignment_needed
     424              :                  below.  */
     425           23 :               gcc_assert (!crtl->stack_realign_finalized);
     426           23 :               if (!crtl->stack_realign_needed)
     427              :                 {
     428              :                   /* It is OK to reduce the alignment as long as the
     429              :                      requested size is 0 or the estimated stack
     430              :                      alignment >= mode alignment.  */
     431           23 :                   gcc_assert ((kind & ASLK_REDUCE_ALIGN)
     432              :                               || known_eq (size, 0)
     433              :                               || (crtl->stack_alignment_estimated
     434              :                                   >= GET_MODE_ALIGNMENT (mode)));
     435           23 :                   alignment_in_bits = crtl->stack_alignment_estimated;
     436           23 :                   alignment = alignment_in_bits / BITS_PER_UNIT;
     437              :                 }
     438              :             }
     439              :         }
     440              :     }
     441              : 
     442      2224884 :   if (crtl->stack_alignment_needed < alignment_in_bits)
     443        18327 :     crtl->stack_alignment_needed = alignment_in_bits;
     444      2224884 :   if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
     445       289671 :     crtl->max_used_stack_slot_alignment = alignment_in_bits;
     446              : 
     447      2224884 :   if (mode != BLKmode || maybe_ne (size, 0))
     448              :     {
     449      1436619 :       if (kind & ASLK_RECORD_PAD)
     450              :         {
     451              :           class frame_space **psp;
     452              : 
     453      1476609 :           for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
     454              :             {
     455       184389 :               class frame_space *space = *psp;
     456       184389 :               if (!try_fit_stack_local (space->start, space->length, size,
     457              :                                         alignment, &slot_offset))
     458       123130 :                 continue;
     459        61259 :               *psp = space->next;
     460        61259 :               if (known_gt (slot_offset, space->start))
     461        25559 :                 add_frame_space (space->start, slot_offset);
     462        61259 :               if (known_lt (slot_offset + size, space->start + space->length))
     463        10773 :                 add_frame_space (slot_offset + size,
     464        10773 :                                  space->start + space->length);
     465        61259 :               goto found_space;
     466              :             }
     467              :         }
     468              :     }
     469              :   else if (!STACK_ALIGNMENT_NEEDED)
     470              :     {
     471              :       slot_offset = frame_offset;
     472              :       goto found_space;
     473              :     }
     474              : 
     475      2163625 :   old_frame_offset = frame_offset;
     476              : 
     477      2163625 :   if (FRAME_GROWS_DOWNWARD)
     478              :     {
     479      2163625 :       frame_offset -= size;
     480      2163625 :       try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
     481              : 
     482      2163625 :       if (kind & ASLK_RECORD_PAD)
     483              :         {
     484      2080485 :           if (known_gt (slot_offset, frame_offset))
     485            0 :             add_frame_space (frame_offset, slot_offset);
     486      2080485 :           if (known_lt (slot_offset + size, old_frame_offset))
     487       351137 :             add_frame_space (slot_offset + size, old_frame_offset);
     488              :         }
     489              :     }
     490              :   else
     491              :     {
     492              :       frame_offset += size;
     493              :       try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
     494              : 
     495              :       if (kind & ASLK_RECORD_PAD)
     496              :         {
     497              :           if (known_gt (slot_offset, old_frame_offset))
     498              :             add_frame_space (old_frame_offset, slot_offset);
     499              :           if (known_lt (slot_offset + size, frame_offset))
     500              :             add_frame_space (slot_offset + size, frame_offset);
     501              :         }
     502              :     }
     503              : 
     504      2224884 :  found_space:
     505              :   /* On a big-endian machine, if we are allocating more space than we will use,
     506              :      use the least significant bytes of those that are allocated.  */
     507      2224884 :   if (mode != BLKmode)
     508              :     {
     509              :       /* The slot size can sometimes be smaller than the mode size;
     510              :          e.g. the rs6000 port allocates slots with a vector mode
     511              :          that have the size of only one element.  However, the slot
     512              :          size must always be ordered wrt to the mode size, in the
     513              :          same way as for a subreg.  */
     514       663956 :       gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
     515              :       if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size))
     516              :         bigend_correction = size - GET_MODE_SIZE (mode);
     517              :     }
     518              : 
     519              :   /* If we have already instantiated virtual registers, return the actual
     520              :      address relative to the frame pointer.  */
     521      2224884 :   if (virtuals_instantiated)
     522      1836476 :     addr = plus_constant (Pmode, frame_pointer_rtx,
     523              :                           trunc_int_for_mode
     524      1547263 :                           (slot_offset + bigend_correction
     525      1836476 :                            + targetm.starting_frame_offset (), Pmode));
     526              :   else
     527       709958 :     addr = plus_constant (Pmode, virtual_stack_vars_rtx,
     528              :                           trunc_int_for_mode
     529              :                           (slot_offset + bigend_correction,
     530       677621 :                            Pmode));
     531              : 
     532      2224884 :   x = gen_rtx_MEM (mode, addr);
     533      2224884 :   set_mem_align (x, alignment_in_bits);
     534      2224884 :   MEM_NOTRAP_P (x) = 1;
     535              : 
     536      2224884 :   vec_safe_push (stack_slot_list, x);
     537              : 
     538      2224884 :   if (frame_offset_overflow (frame_offset, current_function_decl))
     539            0 :     frame_offset = 0;
     540              : 
     541      2224884 :   return x;
     542              : }
     543              : 
     544              : /* Wrap up assign_stack_local_1 with last parameter as false.  */
     545              : 
     546              : rtx
     547      2141744 : assign_stack_local (machine_mode mode, poly_int64 size, int align)
     548              : {
     549      2141744 :   return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
     550              : }
     551              : 
     552              : /* In order to evaluate some expressions, such as function calls returning
     553              :    structures in memory, we need to temporarily allocate stack locations.
     554              :    We record each allocated temporary in the following structure.
     555              : 
     556              :    Associated with each temporary slot is a nesting level.  When we pop up
     557              :    one level, all temporaries associated with the previous level are freed.
     558              :    Normally, all temporaries are freed after the execution of the statement
     559              :    in which they were created.  However, if we are inside a ({...}) grouping,
     560              :    the result may be in a temporary and hence must be preserved.  If the
     561              :    result could be in a temporary, we preserve it if we can determine which
     562              :    one it is in.  If we cannot determine which temporary may contain the
     563              :    result, all temporaries are preserved.  A temporary is preserved by
     564              :    pretending it was allocated at the previous nesting level.  */
     565              : 
     566              : class GTY(()) temp_slot {
     567              : public:
     568              :   /* Points to next temporary slot.  */
     569              :   class temp_slot *next;
     570              :   /* Points to previous temporary slot.  */
     571              :   class temp_slot *prev;
     572              :   /* The rtx to used to reference the slot.  */
     573              :   rtx slot;
     574              :   /* The size, in units, of the slot.  */
     575              :   poly_int64 size;
     576              :   /* The type of the object in the slot, or zero if it doesn't correspond
     577              :      to a type.  We use this to determine whether a slot can be reused.
     578              :      It can be reused if objects of the type of the new slot will always
     579              :      conflict with objects of the type of the old slot.  */
     580              :   tree type;
     581              :   /* The alignment (in bits) of the slot.  */
     582              :   unsigned int align;
     583              :   /* True if this temporary is currently in use.  */
     584              :   bool in_use;
     585              :   /* Nesting level at which this slot is being used.  */
     586              :   int level;
     587              :   /* The offset of the slot from the frame_pointer, including extra space
     588              :      for alignment.  This info is for combine_temp_slots.  */
     589              :   poly_int64 base_offset;
     590              :   /* The size of the slot, including extra space for alignment.  This
     591              :      info is for combine_temp_slots.  */
     592              :   poly_int64 full_size;
     593              : };
     594              : 
     595              : /* Entry for the below hash table.  */
     596              : struct GTY((for_user)) temp_slot_address_entry {
     597              :   hashval_t hash;
     598              :   rtx address;
     599              :   class temp_slot *temp_slot;
     600              : };
     601              : 
     602              : struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
     603              : {
     604              :   static hashval_t hash (temp_slot_address_entry *);
     605              :   static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
     606              : };
     607              : 
     608              : /* A table of addresses that represent a stack slot.  The table is a mapping
     609              :    from address RTXen to a temp slot.  */
     610              : static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
     611              : static size_t n_temp_slots_in_use;
     612              : 
     613              : /* Removes temporary slot TEMP from LIST.  */
     614              : 
     615              : static void
     616       211462 : cut_slot_from_list (class temp_slot *temp, class temp_slot **list)
     617              : {
     618            0 :   if (temp->next)
     619        27605 :     temp->next->prev = temp->prev;
     620       211462 :   if (temp->prev)
     621         7906 :     temp->prev->next = temp->next;
     622              :   else
     623       203556 :     *list = temp->next;
     624              : 
     625       211462 :   temp->prev = temp->next = NULL;
     626          677 : }
     627              : 
     628              : /* Inserts temporary slot TEMP to LIST.  */
     629              : 
     630              : static void
     631       294514 : insert_slot_to_list (class temp_slot *temp, class temp_slot **list)
     632              : {
     633       294514 :   temp->next = *list;
     634            0 :   if (*list)
     635        79415 :     (*list)->prev = temp;
     636       294514 :   temp->prev = NULL;
     637       294514 :   *list = temp;
     638            0 : }
     639              : 
     640              : /* Returns the list of used temp slots at LEVEL.  */
     641              : 
     642              : static class temp_slot **
     643     65686173 : temp_slots_at_level (int level)
     644              : {
     645    129964427 :   if (level >= (int) vec_safe_length (used_temp_slots))
     646      1889286 :     vec_safe_grow_cleared (used_temp_slots, level + 1, true);
     647              : 
     648     65686173 :   return &(*used_temp_slots)[level];
     649              : }
     650              : 
     651              : /* Returns the maximal temporary slot level.  */
     652              : 
     653              : static int
     654      1320541 : max_slot_level (void)
     655              : {
     656            0 :   if (!used_temp_slots)
     657              :     return -1;
     658              : 
     659      1266728 :   return used_temp_slots->length () - 1;
     660              : }
     661              : 
     662              : /* Moves temporary slot TEMP to LEVEL.  */
     663              : 
     664              : static void
     665         1232 : move_slot_to_level (class temp_slot *temp, int level)
     666              : {
     667         1232 :   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
     668         1232 :   insert_slot_to_list (temp, temp_slots_at_level (level));
     669         1232 :   temp->level = level;
     670         1232 : }
     671              : 
     672              : /* Make temporary slot TEMP available.  */
     673              : 
     674              : static void
     675       146075 : make_slot_available (class temp_slot *temp)
     676              : {
     677       146075 :   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
     678       146075 :   insert_slot_to_list (temp, &avail_temp_slots);
     679       146075 :   temp->in_use = false;
     680       146075 :   temp->level = -1;
     681       146075 :   n_temp_slots_in_use--;
     682       146075 : }
     683              : 
     684              : /* Compute the hash value for an address -> temp slot mapping.
     685              :    The value is cached on the mapping entry.  */
     686              : static hashval_t
     687      8881400 : temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
     688              : {
     689      8881400 :   int do_not_record = 0;
     690      8881400 :   return hash_rtx (t->address, GET_MODE (t->address),
     691      8881400 :                    &do_not_record, NULL, false);
     692              : }
     693              : 
     694              : /* Return the hash value for an address -> temp slot mapping.  */
     695              : hashval_t
     696        29806 : temp_address_hasher::hash (temp_slot_address_entry *t)
     697              : {
     698        29806 :   return t->hash;
     699              : }
     700              : 
     701              : /* Compare two address -> temp slot mapping entries.  */
     702              : bool
     703        30623 : temp_address_hasher::equal (temp_slot_address_entry *t1,
     704              :                             temp_slot_address_entry *t2)
     705              : {
     706        30623 :   return exp_equiv_p (t1->address, t2->address, 0, true);
     707              : }
     708              : 
     709              : /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping.  */
     710              : static void
     711       146655 : insert_temp_slot_address (rtx address, class temp_slot *temp_slot)
     712              : {
     713       146655 :   struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
     714       146655 :   t->address = copy_rtx (address);
     715       146655 :   t->temp_slot = temp_slot;
     716       146655 :   t->hash = temp_slot_address_compute_hash (t);
     717       146655 :   *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
     718       146655 : }
     719              : 
     720              : /* Remove an address -> temp slot mapping entry if the temp slot is
     721              :    not in use anymore.  Callback for remove_unused_temp_slot_addresses.  */
     722              : int
     723         1055 : remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
     724              : {
     725         1055 :   const struct temp_slot_address_entry *t = *slot;
     726         1055 :   if (! t->temp_slot->in_use)
     727          604 :     temp_slot_address_table->clear_slot (slot);
     728         1055 :   return 1;
     729              : }
     730              : 
     731              : /* Remove all mappings of addresses to unused temp slots.  */
     732              : static void
     733       138954 : remove_unused_temp_slot_addresses (void)
     734              : {
     735              :   /* Use quicker clearing if there aren't any active temp slots.  */
     736       138954 :   if (n_temp_slots_in_use)
     737          434 :     temp_slot_address_table->traverse
     738         1489 :       <void *, remove_unused_temp_slot_addresses_1> (NULL);
     739              :   else
     740       138520 :     temp_slot_address_table->empty ();
     741       138954 : }
     742              : 
     743              : /* Find the temp slot corresponding to the object at address X.  */
     744              : 
     745              : static class temp_slot *
     746      8734745 : find_temp_slot_from_address (rtx x)
     747              : {
     748      8734745 :   class temp_slot *p;
     749      8734745 :   struct temp_slot_address_entry tmp, *t;
     750              : 
     751              :   /* First try the easy way:
     752              :      See if X exists in the address -> temp slot mapping.  */
     753      8734745 :   tmp.address = x;
     754      8734745 :   tmp.temp_slot = NULL;
     755      8734745 :   tmp.hash = temp_slot_address_compute_hash (&tmp);
     756      8734745 :   t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
     757      8734745 :   if (t)
     758         1263 :     return t->temp_slot;
     759              : 
     760              :   /* If we have a sum involving a register, see if it points to a temp
     761              :      slot.  */
     762      1528753 :   if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
     763      9910759 :       && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
     764              :     return p;
     765      1528753 :   else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
     766      8951679 :            && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
     767              :     return p;
     768              : 
     769              :   /* Last resort: Address is a virtual stack var address.  */
     770      8733482 :   poly_int64 offset;
     771      8733482 :   if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
     772              :     {
     773      1320541 :       int i;
     774      5763011 :       for (i = max_slot_level (); i >= 0; i--)
     775      3177267 :         for (p = *temp_slots_at_level (i); p; p = p->next)
     776         3050 :           if (known_in_range_p (offset, p->base_offset, p->full_size))
     777              :             return p;
     778              :     }
     779              : 
     780              :   return NULL;
     781              : }
     782              : 
     783              : /* Allocate a temporary stack slot and record it for possible later
     784              :    reuse.
     785              : 
     786              :    MODE is the machine mode to be given to the returned rtx.
     787              : 
     788              :    SIZE is the size in units of the space required.  We do no rounding here
     789              :    since assign_stack_local will do any required rounding.
     790              : 
     791              :    TYPE is the type that will be used for the stack slot.  */
     792              : 
     793              : rtx
     794       146618 : assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
     795              : {
     796       146618 :   unsigned int align;
     797       146618 :   class temp_slot *p, *best_p = 0, *selected = NULL, **pp;
     798       146618 :   rtx slot;
     799              : 
     800       146618 :   gcc_assert (known_size_p (size));
     801              : 
     802       146618 :   align = get_stack_local_alignment (type, mode);
     803              : 
     804              :   /* Try to find an available, already-allocated temporary of the proper
     805              :      mode which meets the size and alignment requirements.  Choose the
     806              :      smallest one with the closest alignment.
     807              : 
     808              :      If assign_stack_temp is called outside of the tree->rtl expansion,
     809              :      we cannot reuse the stack slots (that may still refer to
     810              :      VIRTUAL_STACK_VARS_REGNUM).  */
     811       146618 :   if (!virtuals_instantiated)
     812              :     {
     813     11095438 :       for (p = avail_temp_slots; p; p = p->next)
     814              :         {
     815     10999193 :           if (p->align >= align
     816     10439437 :               && known_ge (p->size, size)
     817     10422502 :               && GET_MODE (p->slot) == mode
     818     10285612 :               && objects_must_conflict_p (p->type, type)
     819     11062893 :               && (best_p == 0
     820          222 :                   || (known_eq (best_p->size, p->size)
     821          120 :                       ? best_p->align > p->align
     822          102 :                       : known_ge (best_p->size, p->size))))
     823              :             {
     824        63560 :               if (p->align == align && known_eq (p->size, size))
     825              :                 {
     826        50373 :                   selected = p;
     827        50373 :                   cut_slot_from_list (selected, &avail_temp_slots);
     828        50373 :                   best_p = 0;
     829        50373 :                   break;
     830              :                 }
     831              :               best_p = p;
     832              :             }
     833              :         }
     834              :     }
     835              : 
     836              :   /* Make our best, if any, the one to use.  */
     837       146618 :   if (best_p)
     838              :     {
     839        13105 :       selected = best_p;
     840        13105 :       cut_slot_from_list (selected, &avail_temp_slots);
     841              : 
     842              :       /* If there are enough aligned bytes left over, make them into a new
     843              :          temp_slot so that the extra bytes don't get wasted.  Do this only
     844              :          for BLKmode slots, so that we can be sure of the alignment.  */
     845        13105 :       if (GET_MODE (best_p->slot) == BLKmode)
     846              :         {
     847        10784 :           int alignment = best_p->align / BITS_PER_UNIT;
     848        10784 :           poly_int64 rounded_size = aligned_upper_bound (size, alignment);
     849              : 
     850        10784 :           if (known_ge (best_p->size - rounded_size, alignment))
     851              :             {
     852          589 :               p = ggc_alloc<temp_slot> ();
     853          589 :               p->in_use = false;
     854          589 :               p->size = best_p->size - rounded_size;
     855          589 :               p->base_offset = best_p->base_offset + rounded_size;
     856          589 :               p->full_size = best_p->full_size - rounded_size;
     857          589 :               p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
     858          589 :               p->align = best_p->align;
     859          589 :               p->type = best_p->type;
     860          589 :               insert_slot_to_list (p, &avail_temp_slots);
     861              : 
     862          589 :               vec_safe_push (stack_slot_list, p->slot);
     863              : 
     864          589 :               best_p->size = rounded_size;
     865          589 :               best_p->full_size = rounded_size;
     866              :             }
     867              :         }
     868              :     }
     869              : 
     870              :   /* If we still didn't find one, make a new temporary.  */
     871       144297 :   if (selected == 0)
     872              :     {
     873        83140 :       poly_int64 frame_offset_old = frame_offset;
     874              : 
     875        83140 :       p = ggc_alloc<temp_slot> ();
     876              : 
     877              :       /* We are passing an explicit alignment request to assign_stack_local.
     878              :          One side effect of that is assign_stack_local will not round SIZE
     879              :          to ensure the frame offset remains suitably aligned.
     880              : 
     881              :          So for requests which depended on the rounding of SIZE, we go ahead
     882              :          and round it now.  We also make sure ALIGNMENT is at least
     883              :          BIGGEST_ALIGNMENT.  */
     884        92931 :       gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
     885        83140 :       p->slot = assign_stack_local_1 (mode,
     886              :                                       (mode == BLKmode
     887         9884 :                                        ? aligned_upper_bound (size,
     888              :                                                               (int) align
     889              :                                                               / BITS_PER_UNIT)
     890              :                                        : size),
     891              :                                       align, 0);
     892              : 
     893        83140 :       p->align = align;
     894              : 
     895              :       /* The following slot size computation is necessary because we don't
     896              :          know the actual size of the temporary slot until assign_stack_local
     897              :          has performed all the frame alignment and size rounding for the
     898              :          requested temporary.  Note that extra space added for alignment
     899              :          can be either above or below this stack slot depending on which
     900              :          way the frame grows.  We include the extra space if and only if it
     901              :          is above this slot.  */
     902        83140 :       if (FRAME_GROWS_DOWNWARD)
     903        83140 :         p->size = frame_offset_old - frame_offset;
     904              :       else
     905              :         p->size = size;
     906              : 
     907              :       /* Now define the fields used by combine_temp_slots.  */
     908        83140 :       if (FRAME_GROWS_DOWNWARD)
     909              :         {
     910        83140 :           p->base_offset = frame_offset;
     911        83140 :           p->full_size = frame_offset_old - frame_offset;
     912              :         }
     913              :       else
     914              :         {
     915              :           p->base_offset = frame_offset_old;
     916              :           p->full_size = frame_offset - frame_offset_old;
     917              :         }
     918              : 
     919        83140 :       selected = p;
     920              :     }
     921              : 
     922       146618 :   p = selected;
     923       146618 :   p->in_use = true;
     924       146618 :   p->type = type;
     925       146618 :   p->level = temp_slot_level;
     926       146618 :   n_temp_slots_in_use++;
     927              : 
     928       146618 :   pp = temp_slots_at_level (p->level);
     929       146618 :   insert_slot_to_list (p, pp);
     930       146618 :   insert_temp_slot_address (XEXP (p->slot, 0), p);
     931              : 
     932              :   /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
     933       146618 :   slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
     934       146618 :   vec_safe_push (stack_slot_list, slot);
     935              : 
     936              :   /* If we know the alias set for the memory that will be used, use
     937              :      it.  If there's no TYPE, then we don't know anything about the
     938              :      alias set for the memory.  */
     939       146618 :   set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
     940       146618 :   set_mem_align (slot, align);
     941              : 
     942              :   /* If a type is specified, set the relevant flags.  */
     943       146618 :   if (type != 0)
     944        97673 :     MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
     945       146618 :   MEM_NOTRAP_P (slot) = 1;
     946              : 
     947       146618 :   return slot;
     948              : }
     949              : 
     950              : /* Allocate a temporary stack slot and record it for possible later
     951              :    reuse.  First two arguments are same as in preceding function.  */
     952              : 
     953              : rtx
     954        48945 : assign_stack_temp (machine_mode mode, poly_int64 size)
     955              : {
     956        48945 :   return assign_stack_temp_for_type (mode, size, NULL_TREE);
     957              : }
     958              : 
     959              : /* Assign a temporary.
     960              :    If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
     961              :    and so that should be used in error messages.  In either case, we
     962              :    allocate of the given type.
     963              :    MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
     964              :    it is 0 if a register is OK.
     965              :    DONT_PROMOTE is 1 if we should not promote values in register
     966              :    to wider modes.  */
     967              : 
     968              : rtx
     969       997353 : assign_temp (tree type_or_decl, int memory_required,
     970              :              int dont_promote ATTRIBUTE_UNUSED)
     971              : {
     972       997353 :   tree type, decl;
     973       997353 :   machine_mode mode;
     974              : #ifdef PROMOTE_MODE
     975       997353 :   int unsignedp;
     976              : #endif
     977              : 
     978       997353 :   if (DECL_P (type_or_decl))
     979            0 :     decl = type_or_decl, type = TREE_TYPE (decl);
     980              :   else
     981              :     decl = NULL, type = type_or_decl;
     982              : 
     983       997353 :   mode = TYPE_MODE (type);
     984              : #ifdef PROMOTE_MODE
     985       997353 :   unsignedp = TYPE_UNSIGNED (type);
     986              : #endif
     987              : 
     988              :   /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
     989              :      end.  See also create_tmp_var for the gimplification-time check.  */
     990       997353 :   gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
     991              : 
     992       997353 :   if (mode == BLKmode || memory_required)
     993              :     {
     994        92094 :       poly_int64 size;
     995        92094 :       rtx tmp;
     996              : 
     997              :       /* Unfortunately, we don't yet know how to allocate variable-sized
     998              :          temporaries.  However, sometimes we can find a fixed upper limit on
     999              :          the size, so try that instead.  */
    1000        92094 :       if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), &size))
    1001            0 :         size = max_int_size_in_bytes (type);
    1002              : 
    1003              :       /* Zero sized arrays are a GNU C extension.  Set size to 1 to avoid
    1004              :          problems with allocating the stack space.  */
    1005        92094 :       if (known_eq (size, 0))
    1006            0 :         size = 1;
    1007              : 
    1008              :       /* The size of the temporary may be too large to fit into an integer.  */
    1009              :       /* ??? Not sure this should happen except for user silliness, so limit
    1010              :          this to things that aren't compiler-generated temporaries.  The
    1011              :          rest of the time we'll die in assign_stack_temp_for_type.  */
    1012        92094 :       if (decl
    1013            0 :           && !known_size_p (size)
    1014        92094 :           && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
    1015              :         {
    1016            0 :           error ("size of variable %q+D is too large", decl);
    1017            0 :           size = 1;
    1018              :         }
    1019              : 
    1020        92094 :       tmp = assign_stack_temp_for_type (mode, size, type);
    1021        92094 :       return tmp;
    1022              :     }
    1023              : 
    1024              : #ifdef PROMOTE_MODE
    1025       905259 :   if (! dont_promote)
    1026            0 :     mode = promote_mode (type, mode, &unsignedp);
    1027              : #endif
    1028              : 
    1029       905259 :   return gen_reg_rtx (mode);
    1030              : }
    1031              : 
    1032              : /* Combine temporary stack slots which are adjacent on the stack.
    1033              : 
    1034              :    This allows for better use of already allocated stack space.  This is only
    1035              :    done for BLKmode slots because we can be sure that we won't have alignment
    1036              :    problems in this case.  */
    1037              : 
    1038              : static void
    1039       138954 : combine_temp_slots (void)
    1040              : {
    1041       138954 :   class temp_slot *p, *q, *next, *next_q;
    1042       138954 :   int num_slots;
    1043              : 
    1044              :   /* We can't combine slots, because the information about which slot
    1045              :      is in which alias set will be lost.  */
    1046       138954 :   if (flag_strict_aliasing)
    1047              :     return;
    1048              : 
    1049              :   /* If there are a lot of temp slots, don't do anything unless
    1050              :      high levels of optimization.  */
    1051        97022 :   if (! flag_expensive_optimizations)
    1052       427993 :     for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
    1053       418909 :       if (num_slots > 100 || (num_slots > 10 && optimize == 0))
    1054              :         return;
    1055              : 
    1056       270654 :   for (p = avail_temp_slots; p; p = next)
    1057              :     {
    1058       186160 :       int delete_p = 0;
    1059              : 
    1060       186160 :       next = p->next;
    1061              : 
    1062       186160 :       if (GET_MODE (p->slot) != BLKmode)
    1063       162594 :         continue;
    1064              : 
    1065        29536 :       for (q = p->next; q; q = next_q)
    1066              :         {
    1067         5980 :           int delete_q = 0;
    1068              : 
    1069         5980 :           next_q = q->next;
    1070              : 
    1071         5980 :           if (GET_MODE (q->slot) != BLKmode)
    1072         5240 :             continue;
    1073              : 
    1074          740 :           if (known_eq (p->base_offset + p->full_size, q->base_offset))
    1075              :             {
    1076              :               /* Q comes after P; combine Q into P.  */
    1077          667 :               p->size += q->size;
    1078        29536 :               p->full_size += q->full_size;
    1079              :               delete_q = 1;
    1080              :             }
    1081           73 :           else if (known_eq (q->base_offset + q->full_size, p->base_offset))
    1082              :             {
    1083              :               /* P comes after Q; combine P into Q.  */
    1084           10 :               q->size += p->size;
    1085           10 :               q->full_size += p->full_size;
    1086              :               delete_p = 1;
    1087              :               break;
    1088              :             }
    1089          667 :           if (delete_q)
    1090         1334 :             cut_slot_from_list (q, &avail_temp_slots);
    1091              :         }
    1092              : 
    1093              :       /* Either delete P or advance past it.  */
    1094        23566 :       if (delete_p)
    1095           20 :         cut_slot_from_list (p, &avail_temp_slots);
    1096              :     }
    1097              : }
    1098              : 
    1099              : /* Indicate that NEW_RTX is an alternate way of referring to the temp
    1100              :    slot that previously was known by OLD_RTX.  */
    1101              : 
    1102              : void
    1103     16299794 : update_temp_slot_address (rtx old_rtx, rtx new_rtx)
    1104              : {
    1105     17140323 :   class temp_slot *p;
    1106              : 
    1107     17140323 :   if (rtx_equal_p (old_rtx, new_rtx))
    1108              :     return;
    1109              : 
    1110      4929471 :   p = find_temp_slot_from_address (old_rtx);
    1111              : 
    1112              :   /* If we didn't find one, see if both OLD_RTX is a PLUS.  If so, and
    1113              :      NEW_RTX is a register, see if one operand of the PLUS is a
    1114              :      temporary location.  If so, NEW_RTX points into it.  Otherwise,
    1115              :      if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
    1116              :      in common between them.  If so, try a recursive call on those
    1117              :      values.  */
    1118      4929471 :   if (p == 0)
    1119              :     {
    1120      4929434 :       if (GET_CODE (old_rtx) != PLUS)
    1121              :         return;
    1122              : 
    1123       925595 :       if (REG_P (new_rtx))
    1124              :         {
    1125       290527 :           update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
    1126       290527 :           update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
    1127       290527 :           return;
    1128              :         }
    1129       635068 :       else if (GET_CODE (new_rtx) != PLUS)
    1130              :         return;
    1131              : 
    1132       635068 :       if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
    1133       330051 :         update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
    1134       305017 :       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
    1135            0 :         update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
    1136       305017 :       else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
    1137        32193 :         update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
    1138       272824 :       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
    1139       187758 :         update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
    1140              : 
    1141              :       return;
    1142              :     }
    1143              : 
    1144              :   /* Otherwise add an alias for the temp's address.  */
    1145           37 :   insert_temp_slot_address (new_rtx, p);
    1146              : }
    1147              : 
    1148              : /* If X could be a reference to a temporary slot, mark that slot as
    1149              :    belonging to the to one level higher than the current level.  If X
    1150              :    matched one of our slots, just mark that one.  Otherwise, we can't
    1151              :    easily predict which it is, so upgrade all of them.
    1152              : 
    1153              :    This is called when an ({...}) construct occurs and a statement
    1154              :    returns a value in memory.  */
    1155              : 
    1156              : void
    1157     23840976 : preserve_temp_slots (rtx x)
    1158              : {
    1159     23840976 :   class temp_slot *p = 0, *next;
    1160              : 
    1161     23840976 :   if (x == 0)
    1162              :     return;
    1163              : 
    1164              :   /* If X is a register that is being used as a pointer, see if we have
    1165              :      a temporary slot we know it points to.  */
    1166     10628498 :   if (REG_P (x) && REG_POINTER (x))
    1167      1748484 :     p = find_temp_slot_from_address (x);
    1168              : 
    1169              :   /* If X is not in memory or is at a constant address, it cannot be in
    1170              :      a temporary slot.  */
    1171     10628498 :   if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
    1172              :     return;
    1173              : 
    1174              :   /* First see if we can find a match.  */
    1175       661316 :   if (p == 0)
    1176       661316 :     p = find_temp_slot_from_address (XEXP (x, 0));
    1177              : 
    1178       661316 :   if (p != 0)
    1179              :     {
    1180         1226 :       if (p->level == temp_slot_level)
    1181         1226 :         move_slot_to_level (p, temp_slot_level - 1);
    1182         1226 :       return;
    1183              :     }
    1184              : 
    1185              :   /* Otherwise, preserve all non-kept slots at this level.  */
    1186       660096 :   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
    1187              :     {
    1188            6 :       next = p->next;
    1189            6 :       move_slot_to_level (p, temp_slot_level - 1);
    1190              :     }
    1191              : }
    1192              : 
    1193              : /* Free all temporaries used so far.  This is normally called at the
    1194              :    end of generating code for a statement.  */
    1195              : 
    1196              : void
    1197     61555184 : free_temp_slots (void)
    1198              : {
    1199     61555184 :   class temp_slot *p, *next;
    1200     61555184 :   bool some_available = false;
    1201              : 
    1202     61701259 :   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
    1203              :     {
    1204       146075 :       next = p->next;
    1205       146075 :       make_slot_available (p);
    1206       146075 :       some_available = true;
    1207              :     }
    1208              : 
    1209     61555184 :   if (some_available)
    1210              :     {
    1211       138954 :       remove_unused_temp_slot_addresses ();
    1212       138954 :       combine_temp_slots ();
    1213              :     }
    1214     61555184 : }
    1215              : 
    1216              : /* Push deeper into the nesting level for stack temporaries.  */
    1217              : 
    1218              : void
    1219     30142200 : push_temp_slots (void)
    1220              : {
    1221     30142200 :   temp_slot_level++;
    1222     30142200 : }
    1223              : 
    1224              : /* Pop a temporary nesting level.  All slots in use in the current level
    1225              :    are freed.  */
    1226              : 
    1227              : void
    1228     30142198 : pop_temp_slots (void)
    1229              : {
    1230     30142198 :   free_temp_slots ();
    1231     30142198 :   temp_slot_level--;
    1232     30142198 : }
    1233              : 
    1234              : /* Initialize temporary slots.  */
    1235              : 
    1236              : void
    1237      3163285 : init_temp_slots (void)
    1238              : {
    1239              :   /* We have not allocated any temporaries yet.  */
    1240      3163285 :   avail_temp_slots = 0;
    1241      3163285 :   vec_alloc (used_temp_slots, 0);
    1242      3163285 :   temp_slot_level = 0;
    1243      3163285 :   n_temp_slots_in_use = 0;
    1244              : 
    1245              :   /* Set up the table to map addresses to temp slots.  */
    1246      3163285 :   if (! temp_slot_address_table)
    1247       209218 :     temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
    1248              :   else
    1249      2954067 :     temp_slot_address_table->empty ();
    1250      3163285 : }
    1251              : 
    1252              : /* Functions and data structures to keep track of the values hard regs
    1253              :    had at the start of the function.  */
    1254              : 
    1255              : /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
    1256              :    and has_hard_reg_initial_val..  */
    1257              : struct GTY(()) initial_value_pair {
    1258              :   rtx hard_reg;
    1259              :   rtx pseudo;
    1260              : };
    1261              : /* ???  This could be a VEC but there is currently no way to define an
    1262              :    opaque VEC type.  This could be worked around by defining struct
    1263              :    initial_value_pair in function.h.  */
    1264              : struct GTY(()) initial_value_struct {
    1265              :   int num_entries;
    1266              :   int max_entries;
    1267              :   initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
    1268              : };
    1269              : 
    1270              : /* If a pseudo represents an initial hard reg (or expression), return
    1271              :    it, else return NULL_RTX.  */
    1272              : 
    1273              : rtx
    1274            0 : get_hard_reg_initial_reg (rtx reg)
    1275              : {
    1276            0 :   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
    1277            0 :   int i;
    1278              : 
    1279            0 :   if (ivs == 0)
    1280              :     return NULL_RTX;
    1281              : 
    1282            0 :   for (i = 0; i < ivs->num_entries; i++)
    1283            0 :     if (rtx_equal_p (ivs->entries[i].pseudo, reg))
    1284            0 :       return ivs->entries[i].hard_reg;
    1285              : 
    1286              :   return NULL_RTX;
    1287              : }
    1288              : 
    1289              : /* Make sure that there's a pseudo register of mode MODE that stores the
    1290              :    initial value of hard register REGNO.  Return an rtx for such a pseudo.  */
    1291              : 
    1292              : rtx
    1293            0 : get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
    1294              : {
    1295            0 :   struct initial_value_struct *ivs;
    1296            0 :   rtx rv;
    1297              : 
    1298            0 :   rv = has_hard_reg_initial_val (mode, regno);
    1299            0 :   if (rv)
    1300              :     return rv;
    1301              : 
    1302            0 :   ivs = crtl->hard_reg_initial_vals;
    1303            0 :   if (ivs == 0)
    1304              :     {
    1305            0 :       ivs = ggc_alloc<initial_value_struct> ();
    1306            0 :       ivs->num_entries = 0;
    1307            0 :       ivs->max_entries = 5;
    1308            0 :       ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
    1309            0 :       crtl->hard_reg_initial_vals = ivs;
    1310              :     }
    1311              : 
    1312            0 :   if (ivs->num_entries >= ivs->max_entries)
    1313              :     {
    1314            0 :       ivs->max_entries += 5;
    1315            0 :       ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
    1316              :                                     ivs->max_entries);
    1317              :     }
    1318              : 
    1319            0 :   ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
    1320            0 :   ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
    1321              : 
    1322            0 :   return ivs->entries[ivs->num_entries++].pseudo;
    1323              : }
    1324              : 
    1325              : /* See if get_hard_reg_initial_val has been used to create a pseudo
    1326              :    for the initial value of hard register REGNO in mode MODE.  Return
    1327              :    the associated pseudo if so, otherwise return NULL.  */
    1328              : 
    1329              : rtx
    1330            0 : has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
    1331              : {
    1332            0 :   struct initial_value_struct *ivs;
    1333            0 :   int i;
    1334              : 
    1335            0 :   ivs = crtl->hard_reg_initial_vals;
    1336            0 :   if (ivs != 0)
    1337            0 :     for (i = 0; i < ivs->num_entries; i++)
    1338            0 :       if (GET_MODE (ivs->entries[i].hard_reg) == mode
    1339            0 :           && REGNO (ivs->entries[i].hard_reg) == regno)
    1340            0 :         return ivs->entries[i].pseudo;
    1341              : 
    1342              :   return NULL_RTX;
    1343              : }
    1344              : 
    1345              : void
    1346      1472140 : emit_initial_value_sets (void)
    1347              : {
    1348      1472140 :   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
    1349      1472140 :   int i;
    1350      1472140 :   rtx_insn *seq;
    1351              : 
    1352      1472140 :   if (ivs == 0)
    1353              :     return;
    1354              : 
    1355            0 :   start_sequence ();
    1356            0 :   for (i = 0; i < ivs->num_entries; i++)
    1357            0 :     emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
    1358            0 :   seq = end_sequence ();
    1359              : 
    1360            0 :   emit_insn_at_entry (seq);
    1361              : }
    1362              : 
    1363              : /* Return the hardreg-pseudoreg initial values pair entry I and
    1364              :    TRUE if I is a valid entry, or FALSE if I is not a valid entry.  */
    1365              : bool
    1366            0 : initial_value_entry (int i, rtx *hreg, rtx *preg)
    1367              : {
    1368            0 :   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
    1369            0 :   if (!ivs || i >= ivs->num_entries)
    1370              :     return false;
    1371              : 
    1372            0 :   *hreg = ivs->entries[i].hard_reg;
    1373            0 :   *preg = ivs->entries[i].pseudo;
    1374            0 :   return true;
    1375              : }
    1376              : 
    1377              : /* These routines are responsible for converting virtual register references
    1378              :    to the actual hard register references once RTL generation is complete.
    1379              : 
    1380              :    The following four variables are used for communication between the
    1381              :    routines.  They contain the offsets of the virtual registers from their
    1382              :    respective hard registers.  */
    1383              : 
    1384              : static poly_int64 in_arg_offset;
    1385              : static poly_int64 var_offset;
    1386              : static poly_int64 dynamic_offset;
    1387              : static poly_int64 out_arg_offset;
    1388              : static poly_int64 cfa_offset;
    1389              : 
    1390              : /* In most machines, the stack pointer register is equivalent to the bottom
    1391              :    of the stack.  */
    1392              : 
    1393              : #ifndef STACK_POINTER_OFFSET
    1394              : #define STACK_POINTER_OFFSET    0
    1395              : #endif
    1396              : 
    1397              : #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
    1398              : #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
    1399              : #endif
    1400              : 
    1401              : /* If not defined, pick an appropriate default for the offset of dynamically
    1402              :    allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
    1403              :    INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
    1404              : 
    1405              : #ifndef STACK_DYNAMIC_OFFSET
    1406              : 
    1407              : /* The bottom of the stack points to the actual arguments.  If
    1408              :    REG_PARM_STACK_SPACE is defined, this includes the space for the register
    1409              :    parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
    1410              :    stack space for register parameters is not pushed by the caller, but
    1411              :    rather part of the fixed stack areas and hence not included in
    1412              :    `crtl->outgoing_args_size'.  Nevertheless, we must allow
    1413              :    for it when allocating stack dynamic objects.  */
    1414              : 
    1415              : #ifdef INCOMING_REG_PARM_STACK_SPACE
    1416              : #define STACK_DYNAMIC_OFFSET(FNDECL)    \
    1417              : ((ACCUMULATE_OUTGOING_ARGS                                                    \
    1418              :   ? (crtl->outgoing_args_size                                      \
    1419              :      + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
    1420              :                                                : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
    1421              :   : 0) + (STACK_POINTER_OFFSET))
    1422              : #else
    1423              : #define STACK_DYNAMIC_OFFSET(FNDECL)    \
    1424              :   ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
    1425              :  + (STACK_POINTER_OFFSET))
    1426              : #endif
    1427              : #endif
    1428              : 
    1429              : 
    1430              : /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
    1431              :    is a virtual register, return the equivalent hard register and set the
    1432              :    offset indirectly through the pointer.  Otherwise, return 0.  */
    1433              : 
    1434              : static rtx
    1435    364151674 : instantiate_new_reg (rtx x, poly_int64 *poffset)
    1436              : {
    1437    364151674 :   rtx new_rtx;
    1438    364151674 :   poly_int64 offset;
    1439              : 
    1440    364151674 :   if (x == virtual_incoming_args_rtx)
    1441              :     {
    1442      3743452 :       if (stack_realign_drap)
    1443              :         {
    1444              :           /* Replace virtual_incoming_args_rtx with internal arg
    1445              :              pointer if DRAP is used to realign stack.  */
    1446        16465 :           new_rtx = crtl->args.internal_arg_pointer;
    1447        16465 :           offset = 0;
    1448              :         }
    1449              :       else
    1450      3726987 :         new_rtx = arg_pointer_rtx, offset = in_arg_offset;
    1451              :     }
    1452    360408222 :   else if (x == virtual_stack_vars_rtx)
    1453     18096015 :     new_rtx = frame_pointer_rtx, offset = var_offset;
    1454    342312207 :   else if (x == virtual_stack_dynamic_rtx)
    1455        53077 :     new_rtx = stack_pointer_rtx, offset = dynamic_offset;
    1456    342259130 :   else if (x == virtual_outgoing_args_rtx)
    1457      1564617 :     new_rtx = stack_pointer_rtx, offset = out_arg_offset;
    1458    340694513 :   else if (x == virtual_cfa_rtx)
    1459              :     {
    1460              : #ifdef FRAME_POINTER_CFA_OFFSET
    1461              :       new_rtx = frame_pointer_rtx;
    1462              : #else
    1463         1802 :       new_rtx = arg_pointer_rtx;
    1464              : #endif
    1465         1802 :       offset = cfa_offset;
    1466              :     }
    1467    340692711 :   else if (x == virtual_preferred_stack_boundary_rtx)
    1468              :     {
    1469       112616 :       new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
    1470       112616 :       offset = 0;
    1471              :     }
    1472              :   else
    1473              :     return NULL_RTX;
    1474              : 
    1475     23571579 :   *poffset = offset;
    1476     23571579 :   return new_rtx;
    1477              : }
    1478              : 
    1479              : /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
    1480              :    registers present inside of *LOC.  The expression is simplified,
    1481              :    as much as possible, but is not to be considered "valid" in any sense
    1482              :    implied by the target.  Return true if any change is made.  */
    1483              : 
    1484              : static bool
    1485    205901175 : instantiate_virtual_regs_in_rtx (rtx *loc)
    1486              : {
    1487    205901175 :   if (!*loc)
    1488              :     return false;
    1489     92916818 :   bool changed = false;
    1490     92916818 :   subrtx_ptr_iterator::array_type array;
    1491    347381592 :   FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
    1492              :     {
    1493    254464774 :       rtx *loc = *iter;
    1494    254464774 :       if (rtx x = *loc)
    1495              :         {
    1496    230886432 :           rtx new_rtx;
    1497    230886432 :           poly_int64 offset;
    1498    230886432 :           switch (GET_CODE (x))
    1499              :             {
    1500     35656653 :             case REG:
    1501     35656653 :               new_rtx = instantiate_new_reg (x, &offset);
    1502     35656653 :               if (new_rtx)
    1503              :                 {
    1504      1654107 :                   *loc = plus_constant (GET_MODE (x), new_rtx, offset);
    1505      1654107 :                   changed = true;
    1506              :                 }
    1507     35656653 :               iter.skip_subrtxes ();
    1508     35656653 :               break;
    1509              : 
    1510     27810425 :             case PLUS:
    1511     27810425 :               new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
    1512     27810425 :               if (new_rtx)
    1513              :                 {
    1514     18841708 :                   XEXP (x, 0) = new_rtx;
    1515     18841708 :                   *loc = plus_constant (GET_MODE (x), x, offset, true);
    1516     18841708 :                   changed = true;
    1517     18841708 :                   iter.skip_subrtxes ();
    1518     18841708 :                   break;
    1519              :                 }
    1520              : 
    1521              :               /* FIXME -- from old code */
    1522              :               /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
    1523              :                  we can commute the PLUS and SUBREG because pointers into the
    1524              :                  frame are well-behaved.  */
    1525              :               break;
    1526              : 
    1527              :             default:
    1528              :               break;
    1529              :             }
    1530              :         }
    1531              :     }
    1532     92916818 :   return changed;
    1533     92916818 : }
    1534              : 
    1535              : /* A subroutine of instantiate_virtual_regs_in_insn.  Return true if X
    1536              :    matches the predicate for insn CODE operand OPERAND.  */
    1537              : 
    1538              : static bool
    1539     29605455 : safe_insn_predicate (int code, int operand, rtx x)
    1540              : {
    1541     29605455 :   return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
    1542              : }
    1543              : 
    1544              : /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
    1545              :    registers present inside of insn.  The result will be a valid insn.  */
    1546              : 
    1547              : static void
    1548     92916936 : instantiate_virtual_regs_in_insn (rtx_insn *insn)
    1549              : {
    1550     92916936 :   poly_int64 offset;
    1551     92916936 :   int insn_code, i;
    1552     92916936 :   bool any_change = false;
    1553     92916936 :   rtx set, new_rtx, x;
    1554     92916936 :   rtx_insn *seq;
    1555              : 
    1556              :   /* There are some special cases to be handled first.  */
    1557     92916936 :   set = single_set (insn);
    1558     92916936 :   if (set)
    1559              :     {
    1560              :       /* We're allowed to assign to a virtual register.  This is interpreted
    1561              :          to mean that the underlying register gets assigned the inverse
    1562              :          transformation.  This is used, for example, in the handling of
    1563              :          non-local gotos.  */
    1564     88730414 :       new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
    1565     88730414 :       if (new_rtx)
    1566              :         {
    1567            0 :           start_sequence ();
    1568              : 
    1569            0 :           instantiate_virtual_regs_in_rtx (&SET_SRC (set));
    1570            0 :           x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
    1571            0 :                                    gen_int_mode (-offset, GET_MODE (new_rtx)));
    1572            0 :           x = force_operand (x, new_rtx);
    1573            0 :           if (x != new_rtx)
    1574            0 :             emit_move_insn (new_rtx, x);
    1575              : 
    1576            0 :           seq = end_sequence ();
    1577              : 
    1578            0 :           emit_insn_before (seq, insn);
    1579            0 :           delete_insn (insn);
    1580        18681 :           return;
    1581              :         }
    1582              : 
    1583              :       /* Handle a straight copy from a virtual register by generating a
    1584              :          new add insn.  The difference between this and falling through
    1585              :          to the generic case is avoiding a new pseudo and eliminating a
    1586              :          move insn in the initial rtl stream.  */
    1587     88730414 :       new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
    1588     88730414 :       if (new_rtx
    1589       321211 :           && maybe_ne (offset, 0)
    1590         3841 :           && REG_P (SET_DEST (set))
    1591     88734255 :           && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
    1592              :         {
    1593         3841 :           start_sequence ();
    1594              : 
    1595         3841 :           x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
    1596              :                                    gen_int_mode (offset,
    1597         3841 :                                                  GET_MODE (SET_DEST (set))),
    1598              :                                    SET_DEST (set), 1, OPTAB_LIB_WIDEN);
    1599         3841 :           if (x != SET_DEST (set))
    1600            0 :             emit_move_insn (SET_DEST (set), x);
    1601              : 
    1602         3841 :           seq = end_sequence ();
    1603              : 
    1604         3841 :           emit_insn_before (seq, insn);
    1605         3841 :           delete_insn (insn);
    1606         3841 :           return;
    1607              :         }
    1608              : 
    1609     88726573 :       extract_insn (insn);
    1610     88726573 :       insn_code = INSN_CODE (insn);
    1611              : 
    1612              :       /* Handle a plus involving a virtual register by determining if the
    1613              :          operands remain valid if they're modified in place.  */
    1614     88726573 :       poly_int64 delta;
    1615     88726573 :       if (GET_CODE (SET_SRC (set)) == PLUS
    1616     10087003 :           && recog_data.n_operands >= 3
    1617     10022160 :           && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
    1618     10021605 :           && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
    1619     10021605 :           && poly_int_rtx_p (recog_data.operand[2], &delta)
    1620     96629971 :           && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
    1621              :         {
    1622      2340080 :           offset += delta;
    1623              : 
    1624              :           /* If the sum is zero, then replace with a plain move.  */
    1625      2340080 :           if (known_eq (offset, 0)
    1626        14840 :               && REG_P (SET_DEST (set))
    1627      2354920 :               && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
    1628              :             {
    1629        14840 :               start_sequence ();
    1630        14840 :               emit_move_insn (SET_DEST (set), new_rtx);
    1631        14840 :               seq = end_sequence ();
    1632              : 
    1633        14840 :               emit_insn_before (seq, insn);
    1634        14840 :               delete_insn (insn);
    1635        14840 :               return;
    1636              :             }
    1637              : 
    1638      2325240 :           x = gen_int_mode (offset, recog_data.operand_mode[2]);
    1639              : 
    1640              :           /* Using validate_change and apply_change_group here leaves
    1641              :              recog_data in an invalid state.  Since we know exactly what
    1642              :              we want to check, do those two by hand.  */
    1643      2325240 :           if (safe_insn_predicate (insn_code, 1, new_rtx)
    1644      2325240 :               && safe_insn_predicate (insn_code, 2, x))
    1645              :             {
    1646      2297086 :               *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
    1647      2297086 :               *recog_data.operand_loc[2] = recog_data.operand[2] = x;
    1648      2297086 :               any_change = true;
    1649              : 
    1650              :               /* Fall through into the regular operand fixup loop in
    1651              :                  order to take care of operands other than 1 and 2.  */
    1652              :             }
    1653              :         }
    1654              :     }
    1655              :   else
    1656              :     {
    1657      4186522 :       extract_insn (insn);
    1658      4186522 :       insn_code = INSN_CODE (insn);
    1659              :     }
    1660              : 
    1661              :   /* In the general case, we expect virtual registers to appear only in
    1662              :      operands, and then only as either bare registers or inside memories.  */
    1663    294258996 :   for (i = 0; i < recog_data.n_operands; ++i)
    1664              :     {
    1665    201360741 :       x = recog_data.operand[i];
    1666    201360741 :       switch (GET_CODE (x))
    1667              :         {
    1668     29007086 :         case MEM:
    1669     29007086 :           {
    1670     29007086 :             rtx addr = XEXP (x, 0);
    1671              : 
    1672     29007086 :             if (!instantiate_virtual_regs_in_rtx (&addr))
    1673     16722758 :               continue;
    1674              : 
    1675     12284328 :             start_sequence ();
    1676     12284328 :             x = replace_equiv_address (x, addr, true);
    1677              :             /* It may happen that the address with the virtual reg
    1678              :                was valid (e.g. based on the virtual stack reg, which might
    1679              :                be acceptable to the predicates with all offsets), whereas
    1680              :                the address now isn't anymore, for instance when the address
    1681              :                is still offsetted, but the base reg isn't virtual-stack-reg
    1682              :                anymore.  Below we would do a force_reg on the whole operand,
    1683              :                but this insn might actually only accept memory.  Hence,
    1684              :                before doing that last resort, try to reload the address into
    1685              :                a register, so this operand stays a MEM.  */
    1686     12284328 :             if (!safe_insn_predicate (insn_code, i, x))
    1687              :               {
    1688            0 :                 addr = force_reg (GET_MODE (addr), addr);
    1689            0 :                 x = replace_equiv_address (x, addr, true);
    1690              :               }
    1691     12284328 :             seq = end_sequence ();
    1692     12284328 :             if (seq)
    1693            0 :               emit_insn_before (seq, insn);
    1694              :           }
    1695     12284328 :           break;
    1696              : 
    1697    112602875 :         case REG:
    1698    112602875 :           new_rtx = instantiate_new_reg (x, &offset);
    1699    112602875 :           if (new_rtx == NULL)
    1700    112188408 :             continue;
    1701       414467 :           if (known_eq (offset, 0))
    1702              :             x = new_rtx;
    1703              :           else
    1704              :             {
    1705            0 :               start_sequence ();
    1706              : 
    1707              :               /* Careful, special mode predicates may have stuff in
    1708              :                  insn_data[insn_code].operand[i].mode that isn't useful
    1709              :                  to us for computing a new value.  */
    1710              :               /* ??? Recognize address_operand and/or "p" constraints
    1711              :                  to see if (plus new offset) is a valid before we put
    1712              :                  this through expand_simple_binop.  */
    1713            0 :               x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
    1714            0 :                                        gen_int_mode (offset, GET_MODE (x)),
    1715              :                                        NULL_RTX, 1, OPTAB_LIB_WIDEN);
    1716            0 :               seq = end_sequence ();
    1717            0 :               emit_insn_before (seq, insn);
    1718              :             }
    1719              :           break;
    1720              : 
    1721      2717495 :         case SUBREG:
    1722      2717495 :           new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
    1723      2717495 :           if (new_rtx == NULL)
    1724      2717489 :             continue;
    1725            6 :           start_sequence ();
    1726            6 :           if (maybe_ne (offset, 0))
    1727            0 :             new_rtx = expand_simple_binop
    1728            0 :               (GET_MODE (new_rtx), PLUS, new_rtx,
    1729            0 :                gen_int_mode (offset, GET_MODE (new_rtx)),
    1730              :                NULL_RTX, 1, OPTAB_LIB_WIDEN);
    1731           12 :           x = force_subreg (recog_data.operand_mode[i], new_rtx,
    1732            6 :                             GET_MODE (new_rtx), SUBREG_BYTE (x));
    1733            6 :           gcc_assert (x);
    1734            6 :           seq = end_sequence ();
    1735            6 :           emit_insn_before (seq, insn);
    1736            6 :           break;
    1737              : 
    1738     57033285 :         default:
    1739     57033285 :           continue;
    1740     57033285 :         }
    1741              : 
    1742              :       /* At this point, X contains the new value for the operand.
    1743              :          Validate the new value vs the insn predicate.  Note that
    1744              :          asm insns will have insn_code -1 here.  */
    1745     12698801 :       if (!safe_insn_predicate (insn_code, i, x))
    1746              :         {
    1747        56308 :           start_sequence ();
    1748        56308 :           if (REG_P (x))
    1749              :             {
    1750            0 :               gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
    1751            0 :               x = copy_to_reg (x);
    1752              :             }
    1753              :           else
    1754        56308 :             x = force_reg (insn_data[insn_code].operand[i].mode, x);
    1755        56308 :           seq = end_sequence ();
    1756        56308 :           if (seq)
    1757        56308 :             emit_insn_before (seq, insn);
    1758              :         }
    1759              : 
    1760     12698801 :       *recog_data.operand_loc[i] = recog_data.operand[i] = x;
    1761     12698801 :       any_change = true;
    1762              :     }
    1763              : 
    1764     92898255 :   if (any_change)
    1765              :     {
    1766              :       /* Propagate operand changes into the duplicates.  */
    1767     14909092 :       for (i = 0; i < recog_data.n_dups; ++i)
    1768        86991 :         *recog_data.dup_loc[i]
    1769        86991 :           = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
    1770              : 
    1771              :       /* Force re-recognition of the instruction for validation.  */
    1772     14822101 :       INSN_CODE (insn) = -1;
    1773              :     }
    1774              : 
    1775     92898255 :   if (asm_noperands (PATTERN (insn)) >= 0)
    1776              :     {
    1777       107527 :       if (!check_asm_operands (PATTERN (insn)))
    1778              :         {
    1779           23 :           error_for_asm (insn, "impossible constraint in %<asm%>");
    1780              :           /* For asm goto, instead of fixing up all the edges
    1781              :              just clear the template and clear input and output operands
    1782              :              and strip away clobbers.  */
    1783           23 :           if (JUMP_P (insn))
    1784              :             {
    1785           14 :               rtx asm_op = extract_asm_operands (PATTERN (insn));
    1786           14 :               PATTERN (insn) = asm_op;
    1787           14 :               PUT_MODE (asm_op, VOIDmode);
    1788           14 :               ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
    1789           14 :               ASM_OPERANDS_OUTPUT_CONSTRAINT (asm_op) = "";
    1790           14 :               ASM_OPERANDS_OUTPUT_IDX (asm_op) = 0;
    1791           14 :               ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
    1792           14 :               ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
    1793              :             }
    1794              :           else
    1795            9 :             delete_insn (insn);
    1796              :         }
    1797              :     }
    1798              :   else
    1799              :     {
    1800     92790728 :       if (recog_memoized (insn) < 0)
    1801            0 :         fatal_insn_not_found (insn);
    1802              :     }
    1803              : }
    1804              : 
    1805              : /* Subroutine of instantiate_decls.  Given RTL representing a decl,
    1806              :    do any instantiation required.  */
    1807              : 
    1808              : void
    1809      9575516 : instantiate_decl_rtl (rtx x)
    1810              : {
    1811      9581432 :   rtx addr;
    1812              : 
    1813      9581432 :   if (x == 0)
    1814              :     return;
    1815              : 
    1816              :   /* If this is a CONCAT, recurse for the pieces.  */
    1817      9581432 :   if (GET_CODE (x) == CONCAT)
    1818              :     {
    1819         5916 :       instantiate_decl_rtl (XEXP (x, 0));
    1820         5916 :       instantiate_decl_rtl (XEXP (x, 1));
    1821         5916 :       return;
    1822              :     }
    1823              : 
    1824              :   /* If this is not a MEM, no need to do anything.  Similarly if the
    1825              :      address is a constant or a register that is not a virtual register.  */
    1826      9575516 :   if (!MEM_P (x))
    1827              :     return;
    1828              : 
    1829      3202157 :   addr = XEXP (x, 0);
    1830      3202157 :   if (CONSTANT_P (addr)
    1831      3202157 :       || (REG_P (addr)
    1832       302434 :           && !VIRTUAL_REGISTER_P (addr)))
    1833              :     return;
    1834              : 
    1835      2981446 :   instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
    1836              : }
    1837              : 
    1838              : /* Helper for instantiate_decls called via walk_tree: Process all decls
    1839              :    in the given DECL_VALUE_EXPR.  */
    1840              : 
    1841              : static tree
    1842      1163604 : instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
    1843              : {
    1844      1163604 :   tree t = *tp;
    1845      1163604 :   if (! EXPR_P (t))
    1846              :     {
    1847       616312 :       *walk_subtrees = 0;
    1848       616312 :       if (DECL_P (t))
    1849              :         {
    1850       532581 :           if (DECL_RTL_SET_P (t))
    1851       169565 :             instantiate_decl_rtl (DECL_RTL (t));
    1852       134629 :           if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
    1853       643644 :               && DECL_INCOMING_RTL (t))
    1854       111063 :             instantiate_decl_rtl (DECL_INCOMING_RTL (t));
    1855       340886 :           if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
    1856       539998 :               && DECL_HAS_VALUE_EXPR_P (t))
    1857              :             {
    1858         7679 :               tree v = DECL_VALUE_EXPR (t);
    1859         7679 :               walk_tree (&v, instantiate_expr, NULL, NULL);
    1860              :             }
    1861              :         }
    1862              :     }
    1863      1163604 :   return NULL;
    1864              : }
    1865              : 
    1866              : /* Subroutine of instantiate_decls: Process all decls in the given
    1867              :    BLOCK node and all its subblocks.  */
    1868              : 
    1869              : static void
    1870     16098742 : instantiate_decls_1 (tree let)
    1871              : {
    1872     16098742 :   tree t;
    1873              : 
    1874     34631557 :   for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
    1875              :     {
    1876     18532815 :       if (DECL_RTL_SET_P (t))
    1877      2270099 :         instantiate_decl_rtl (DECL_RTL (t));
    1878     18532815 :       if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
    1879              :         {
    1880       264204 :           tree v = DECL_VALUE_EXPR (t);
    1881       264204 :           walk_tree (&v, instantiate_expr, NULL, NULL);
    1882              :         }
    1883              :     }
    1884              : 
    1885              :   /* Process all subblocks.  */
    1886     30726127 :   for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
    1887     14627385 :     instantiate_decls_1 (t);
    1888     16098742 : }
    1889              : 
    1890              : /* Scan all decls in FNDECL (both variables and parameters) and instantiate
    1891              :    all virtual registers in their DECL_RTL's.  */
    1892              : 
    1893              : static void
    1894      1471357 : instantiate_decls (tree fndecl)
    1895              : {
    1896      1471357 :   tree decl;
    1897      1471357 :   unsigned ix;
    1898              : 
    1899              :   /* Process all parameters of the function.  */
    1900      4573626 :   for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
    1901              :     {
    1902      3102269 :       instantiate_decl_rtl (DECL_RTL (decl));
    1903      3102269 :       instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
    1904      3102269 :       if (DECL_HAS_VALUE_EXPR_P (decl))
    1905              :         {
    1906          128 :           tree v = DECL_VALUE_EXPR (decl);
    1907          128 :           walk_tree (&v, instantiate_expr, NULL, NULL);
    1908              :         }
    1909              :     }
    1910              : 
    1911      1471357 :   if ((decl = DECL_RESULT (fndecl))
    1912      1471357 :       && TREE_CODE (decl) == RESULT_DECL)
    1913              :     {
    1914      1471357 :       if (DECL_RTL_SET_P (decl))
    1915       785667 :         instantiate_decl_rtl (DECL_RTL (decl));
    1916      1471357 :       if (DECL_HAS_VALUE_EXPR_P (decl))
    1917              :         {
    1918        69234 :           tree v = DECL_VALUE_EXPR (decl);
    1919        69234 :           walk_tree (&v, instantiate_expr, NULL, NULL);
    1920              :         }
    1921              :     }
    1922              : 
    1923              :   /* Process the saved static chain if it exists.  */
    1924      1471357 :   decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
    1925      1471357 :   if (decl && DECL_HAS_VALUE_EXPR_P (decl))
    1926         3908 :     instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
    1927              : 
    1928              :   /* Now process all variables defined in the function or its subblocks.  */
    1929      1471357 :   if (DECL_INITIAL (fndecl))
    1930      1471357 :     instantiate_decls_1 (DECL_INITIAL (fndecl));
    1931              : 
    1932      2769744 :   FOR_EACH_LOCAL_DECL (cfun, ix, decl)
    1933        64496 :     if (DECL_RTL_SET_P (decl))
    1934        24760 :       instantiate_decl_rtl (DECL_RTL (decl));
    1935      1471357 :   vec_free (cfun->local_decls);
    1936      1471357 : }
    1937              : 
    1938              : /* Return the value of STACK_DYNAMIC_OFFSET for the current function.
    1939              :    This is done through a function wrapper so that the macro sees a
    1940              :    predictable set of included files.  */
    1941              : 
    1942              : poly_int64
    1943      1471357 : get_stack_dynamic_offset ()
    1944              : {
    1945      1471357 :   return STACK_DYNAMIC_OFFSET (current_function_decl);
    1946              : }
    1947              : 
    1948              : /* Pass through the INSNS of function FNDECL and convert virtual register
    1949              :    references to hard register references.  */
    1950              : 
    1951              : static void
    1952      1471357 : instantiate_virtual_regs (void)
    1953              : {
    1954      1471357 :   rtx_insn *insn;
    1955              : 
    1956              :   /* Compute the offsets to use for this function.  */
    1957      1471357 :   in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
    1958      1471357 :   var_offset = targetm.starting_frame_offset ();
    1959      1471357 :   dynamic_offset = get_stack_dynamic_offset ();
    1960      1471357 :   out_arg_offset = STACK_POINTER_OFFSET;
    1961              : #ifdef FRAME_POINTER_CFA_OFFSET
    1962              :   cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
    1963              : #else
    1964      1471357 :   cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
    1965              : #endif
    1966              : 
    1967              :   /* Initialize recognition, indicating that volatile is OK.  */
    1968      1471357 :   init_recog ();
    1969              : 
    1970              :   /* Scan through all the insns, instantiating every virtual register still
    1971              :      present.  */
    1972    172982900 :   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
    1973    171511543 :     if (INSN_P (insn))
    1974              :       {
    1975              :         /* These patterns in the instruction stream can never be recognized.
    1976              :            Fortunately, they shouldn't contain virtual registers either.  */
    1977    154389385 :         if (GET_CODE (PATTERN (insn)) == USE
    1978    141662600 :             || GET_CODE (PATTERN (insn)) == CLOBBER
    1979    141497089 :             || GET_CODE (PATTERN (insn)) == ASM_INPUT
    1980    283917671 :             || DEBUG_MARKER_INSN_P (insn))
    1981     11966919 :           continue;
    1982    130455547 :         else if (DEBUG_BIND_INSN_P (insn))
    1983     37538611 :           instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
    1984              :         else
    1985     92916936 :           instantiate_virtual_regs_in_insn (insn);
    1986              : 
    1987    130455547 :         if (insn->deleted ())
    1988        18690 :           continue;
    1989              : 
    1990    130436857 :         instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
    1991              : 
    1992              :         /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
    1993    130436857 :         if (CALL_P (insn))
    1994      5937175 :           instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
    1995              :       }
    1996              : 
    1997              :   /* Instantiate the virtual registers in the DECLs for debugging purposes.  */
    1998      1471357 :   instantiate_decls (current_function_decl);
    1999              : 
    2000      1471357 :   targetm.instantiate_decls ();
    2001              : 
    2002              :   /* Indicate that, from now on, assign_stack_local should use
    2003              :      frame_pointer_rtx.  */
    2004      1471357 :   virtuals_instantiated = 1;
    2005      1471357 : }
    2006              : 
    2007              : namespace {
    2008              : 
    2009              : const pass_data pass_data_instantiate_virtual_regs =
    2010              : {
    2011              :   RTL_PASS, /* type */
    2012              :   "vregs", /* name */
    2013              :   OPTGROUP_NONE, /* optinfo_flags */
    2014              :   TV_NONE, /* tv_id */
    2015              :   0, /* properties_required */
    2016              :   0, /* properties_provided */
    2017              :   0, /* properties_destroyed */
    2018              :   0, /* todo_flags_start */
    2019              :   0, /* todo_flags_finish */
    2020              : };
    2021              : 
    2022              : class pass_instantiate_virtual_regs : public rtl_opt_pass
    2023              : {
    2024              : public:
    2025       285722 :   pass_instantiate_virtual_regs (gcc::context *ctxt)
    2026       571444 :     : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
    2027              :   {}
    2028              : 
    2029              :   /* opt_pass methods: */
    2030      1471357 :   unsigned int execute (function *) final override
    2031              :     {
    2032      1471357 :       instantiate_virtual_regs ();
    2033      1471357 :       return 0;
    2034              :     }
    2035              : 
    2036              : }; // class pass_instantiate_virtual_regs
    2037              : 
    2038              : } // anon namespace
    2039              : 
    2040              : rtl_opt_pass *
    2041       285722 : make_pass_instantiate_virtual_regs (gcc::context *ctxt)
    2042              : {
    2043       285722 :   return new pass_instantiate_virtual_regs (ctxt);
    2044              : }
    2045              : 
    2046              : 
    2047              : /* Return true if EXP is an aggregate type (or a value with aggregate type).
    2048              :    This means a type for which function calls must pass an address to the
    2049              :    function or get an address back from the function.
    2050              :    EXP may be a type node or an expression (whose type is tested).  */
    2051              : 
    2052              : bool
    2053    157070400 : aggregate_value_p (const_tree exp, const_tree fntype)
    2054              : {
    2055    157083645 :   const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
    2056    157083645 :   int i, regno, nregs;
    2057    157083645 :   rtx reg;
    2058              : 
    2059    157083645 :   if (fntype)
    2060    156765875 :     switch (TREE_CODE (fntype))
    2061              :       {
    2062     11731217 :       case CALL_EXPR:
    2063     11731217 :         {
    2064     11731217 :           tree fndecl = get_callee_fndecl (fntype);
    2065     11731217 :           if (fndecl)
    2066     10825541 :             fntype = TREE_TYPE (fndecl);
    2067       905676 :           else if (CALL_EXPR_FN (fntype))
    2068       324085 :             fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
    2069              :           else
    2070              :             /* For internal functions, assume nothing needs to be
    2071              :                returned in memory.  */
    2072              :             return false;
    2073              :         }
    2074              :         break;
    2075    135215513 :       case FUNCTION_DECL:
    2076    135215513 :         fntype = TREE_TYPE (fntype);
    2077    135215513 :         break;
    2078              :       case FUNCTION_TYPE:
    2079              :       case METHOD_TYPE:
    2080              :         break;
    2081              :       case IDENTIFIER_NODE:
    2082       330930 :         fntype = NULL_TREE;
    2083              :         break;
    2084            0 :       default:
    2085              :         /* We don't expect other tree types here.  */
    2086            0 :         gcc_unreachable ();
    2087              :       }
    2088              : 
    2089    156502054 :   if (VOID_TYPE_P (type))
    2090              :     return false;
    2091              : 
    2092    114065790 :   if (error_operand_p (fntype))
    2093              :     return false;
    2094              : 
    2095              :   /* If a record should be passed the same as its first (and only) member
    2096              :      don't pass it as an aggregate.  */
    2097    114065789 :   if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
    2098        13245 :     return aggregate_value_p (first_field (type), fntype);
    2099              : 
    2100              :   /* If the front end has decided that this needs to be passed by
    2101              :      reference, do so.  */
    2102    114052455 :   if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
    2103    208455608 :       && DECL_BY_REFERENCE (exp))
    2104              :     return true;
    2105              : 
    2106              :   /* Function types that are TREE_ADDRESSABLE force return in memory.  */
    2107    113902588 :   if (fntype && TREE_ADDRESSABLE (fntype))
    2108              :     return true;
    2109              : 
    2110              :   /* Types that are TREE_ADDRESSABLE must be constructed in memory,
    2111              :      and thus can't be returned in registers.  */
    2112    113902588 :   if (TREE_ADDRESSABLE (type))
    2113              :     return true;
    2114              : 
    2115    112424962 :   if (TYPE_EMPTY_P (type))
    2116              :     return false;
    2117              : 
    2118    111554856 :   if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
    2119              :     return true;
    2120              : 
    2121    110808651 :   if (targetm.calls.return_in_memory (type, fntype))
    2122              :     return true;
    2123              : 
    2124              :   /* Make sure we have suitable call-clobbered regs to return
    2125              :      the value in; if not, we must return it in memory.  */
    2126    103661757 :   reg = hard_function_value (type, 0, fntype, 0);
    2127              : 
    2128              :   /* If we have something other than a REG (e.g. a PARALLEL), then assume
    2129              :      it is OK.  */
    2130    103661757 :   if (!REG_P (reg))
    2131              :     return false;
    2132              : 
    2133              :   /* Use the default ABI if the type of the function isn't known.
    2134              :      The scheme for handling interoperability between different ABIs
    2135              :      requires us to be able to tell when we're calling a function with
    2136              :      a nondefault ABI.  */
    2137    102816701 :   const predefined_function_abi &abi = (fntype
    2138    102816701 :                                         ? fntype_abi (fntype)
    2139       285048 :                                         : default_function_abi);
    2140    102816701 :   regno = REGNO (reg);
    2141    102816701 :   nregs = hard_regno_nregs (regno, TYPE_MODE (type));
    2142    208598994 :   for (i = 0; i < nregs; i++)
    2143    105782293 :     if (!fixed_regs[regno + i] && !abi.clobbers_full_reg_p (regno + i))
    2144              :       return true;
    2145              : 
    2146              :   return false;
    2147              : }
    2148              : 
    2149              : /* Return true if we should assign DECL a pseudo register; false if it
    2150              :    should live on the local stack.  */
    2151              : 
    2152              : bool
    2153    162845200 : use_register_for_decl (const_tree decl)
    2154              : {
    2155    162845200 :   if (TREE_CODE (decl) == SSA_NAME)
    2156              :     {
    2157              :       /* We often try to use the SSA_NAME, instead of its underlying
    2158              :          decl, to get type information and guide decisions, to avoid
    2159              :          differences of behavior between anonymous and named
    2160              :          variables, but in this one case we have to go for the actual
    2161              :          variable if there is one.  The main reason is that, at least
    2162              :          at -O0, we want to place user variables on the stack, but we
    2163              :          don't mind using pseudos for anonymous or ignored temps.
    2164              :          Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
    2165              :          should go in pseudos, whereas their corresponding variables
    2166              :          might have to go on the stack.  So, disregarding the decl
    2167              :          here would negatively impact debug info at -O0, enable
    2168              :          coalescing between SSA_NAMEs that ought to get different
    2169              :          stack/pseudo assignments, and get the incoming argument
    2170              :          processing thoroughly confused by PARM_DECLs expected to live
    2171              :          in stack slots but assigned to pseudos.  */
    2172    144770159 :       if (!SSA_NAME_VAR (decl))
    2173     99669016 :         return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
    2174     99669016 :           && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
    2175              : 
    2176              :       decl = SSA_NAME_VAR (decl);
    2177              :     }
    2178              : 
    2179              :   /* Honor volatile.  */
    2180     63176184 :   if (TREE_SIDE_EFFECTS (decl))
    2181              :     return false;
    2182              : 
    2183              :   /* Honor addressability.  */
    2184     63058123 :   if (TREE_ADDRESSABLE (decl))
    2185              :     return false;
    2186              : 
    2187              :   /* RESULT_DECLs are a bit special in that they're assigned without
    2188              :      regard to use_register_for_decl, but we generally only store in
    2189              :      them.  If we coalesce their SSA NAMEs, we'd better return a
    2190              :      result that matches the assignment in expand_function_start.  */
    2191     58852415 :   if (TREE_CODE (decl) == RESULT_DECL)
    2192              :     {
    2193              :       /* If it's not an aggregate, we're going to use a REG or a
    2194              :          PARALLEL containing a REG.  */
    2195      3091808 :       if (!aggregate_value_p (decl, current_function_decl))
    2196              :         return true;
    2197              : 
    2198              :       /* If expand_function_start determines the return value, we'll
    2199              :          use MEM if it's not by reference.  */
    2200        31303 :       if (cfun->returns_pcc_struct
    2201        62606 :           || (targetm.calls.struct_value_rtx
    2202        31303 :               (TREE_TYPE (current_function_decl), 1)))
    2203            0 :         return DECL_BY_REFERENCE (decl);
    2204              : 
    2205              :       /* Otherwise, we're taking an extra all.function_result_decl
    2206              :          argument.  It's set up in assign_parms_augmented_arg_list,
    2207              :          under the (negated) conditions above, and then it's used to
    2208              :          set up the RESULT_DECL rtl in assign_params, after looping
    2209              :          over all parameters.  Now, if the RESULT_DECL is not by
    2210              :          reference, we'll use a MEM either way.  */
    2211        31303 :       if (!DECL_BY_REFERENCE (decl))
    2212              :         return false;
    2213              : 
    2214              :       /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
    2215              :          the function_result_decl's assignment.  Since it's a pointer,
    2216              :          we can short-circuit a number of the tests below, and we must
    2217              :          duplicate them because we don't have the function_result_decl
    2218              :          to test.  */
    2219        31303 :       if (!targetm.calls.allocate_stack_slots_for_args ())
    2220              :         return true;
    2221              :       /* We don't set DECL_IGNORED_P for the function_result_decl.  */
    2222        31303 :       if (optimize)
    2223              :         return true;
    2224              :       /* Needed for [[musttail]] which can operate even at -O0 */
    2225         3756 :       if (cfun->tail_call_marked)
    2226              :         return true;
    2227              :       /* We don't set DECL_REGISTER for the function_result_decl.  */
    2228              :       return false;
    2229              :     }
    2230              : 
    2231              :   /* Only register-like things go in registers.  */
    2232     55760607 :   if (DECL_MODE (decl) == BLKmode)
    2233              :     return false;
    2234              : 
    2235              :   /* If -ffloat-store specified, don't put explicit float variables
    2236              :      into registers.  */
    2237              :   /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
    2238              :      propagates values across these stores, and it probably shouldn't.  */
    2239     53995771 :   if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
    2240              :     return false;
    2241              : 
    2242     53993941 :   if (!targetm.calls.allocate_stack_slots_for_args ())
    2243              :     return true;
    2244              : 
    2245              :   /* If we're not interested in tracking debugging information for
    2246              :      this decl, then we can certainly put it in a register.  */
    2247     53993625 :   if (DECL_IGNORED_P (decl))
    2248              :     return true;
    2249              : 
    2250     36707584 :   if (optimize)
    2251              :     return true;
    2252              : 
    2253              :   /* Thunks force a tail call even at -O0 so we need to avoid creating a
    2254              :      dangling reference in case the parameter is passed by reference.  */
    2255      7187123 :   if (TREE_CODE (decl) == PARM_DECL && cfun->tail_call_marked)
    2256              :     return true;
    2257              : 
    2258      7186528 :   if (!DECL_REGISTER (decl))
    2259              :     return false;
    2260              : 
    2261              :   /* When not optimizing, disregard register keyword for types that
    2262              :      could have methods, otherwise the methods won't be callable from
    2263              :      the debugger.  */
    2264        11290 :   if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
    2265              :     return false;
    2266              : 
    2267              :   return true;
    2268              : }
    2269              : 
    2270              : /* Structures to communicate between the subroutines of assign_parms.
    2271              :    The first holds data persistent across all parameters, the second
    2272              :    is cleared out for each parameter.  */
    2273              : 
    2274              : struct assign_parm_data_all
    2275              : {
    2276              :   /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
    2277              :      should become a job of the target or otherwise encapsulated.  */
    2278              :   CUMULATIVE_ARGS args_so_far_v;
    2279              :   cumulative_args_t args_so_far;
    2280              :   struct args_size stack_args_size;
    2281              :   tree function_result_decl;
    2282              :   tree orig_fnargs;
    2283              :   rtx_insn *first_conversion_insn;
    2284              :   rtx_insn *last_conversion_insn;
    2285              :   HOST_WIDE_INT pretend_args_size;
    2286              :   HOST_WIDE_INT extra_pretend_bytes;
    2287              :   int reg_parm_stack_space;
    2288              : };
    2289              : 
    2290     18235384 : struct assign_parm_data_one
    2291              : {
    2292              :   tree nominal_type;
    2293              :   function_arg_info arg;
    2294              :   rtx entry_parm;
    2295              :   rtx stack_parm;
    2296              :   machine_mode nominal_mode;
    2297              :   machine_mode passed_mode;
    2298              :   struct locate_and_pad_arg_data locate;
    2299              :   int partial;
    2300              : };
    2301              : 
    2302              : /* A subroutine of assign_parms.  Initialize ALL.  */
    2303              : 
    2304              : static void
    2305      4341891 : assign_parms_initialize_all (struct assign_parm_data_all *all)
    2306              : {
    2307      4341891 :   tree fntype ATTRIBUTE_UNUSED;
    2308              : 
    2309      4341891 :   memset (all, 0, sizeof (*all));
    2310              : 
    2311      4341891 :   fntype = TREE_TYPE (current_function_decl);
    2312              : 
    2313              : #ifdef INIT_CUMULATIVE_INCOMING_ARGS
    2314              :   INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
    2315              : #else
    2316      4341891 :   INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
    2317              :                         current_function_decl, -1);
    2318              : #endif
    2319      4341891 :   all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
    2320              : 
    2321              : #ifdef INCOMING_REG_PARM_STACK_SPACE
    2322      4341891 :   all->reg_parm_stack_space
    2323      4341891 :     = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
    2324              : #endif
    2325      4341891 : }
    2326              : 
    2327              : /* If ARGS contains entries with complex types, split the entry into two
    2328              :    entries of the component type.  Return a new list of substitutions are
    2329              :    needed, else the old list.  */
    2330              : 
    2331              : static void
    2332            0 : split_complex_args (vec<tree> *args)
    2333              : {
    2334            0 :   unsigned i;
    2335            0 :   tree p;
    2336              : 
    2337            0 :   FOR_EACH_VEC_ELT (*args, i, p)
    2338              :     {
    2339            0 :       tree type = TREE_TYPE (p);
    2340            0 :       if (TREE_CODE (type) == COMPLEX_TYPE
    2341            0 :           && targetm.calls.split_complex_arg (type))
    2342              :         {
    2343            0 :           tree decl;
    2344            0 :           tree subtype = TREE_TYPE (type);
    2345            0 :           bool addressable = TREE_ADDRESSABLE (p);
    2346              : 
    2347              :           /* Rewrite the PARM_DECL's type with its component.  */
    2348            0 :           p = copy_node (p);
    2349            0 :           TREE_TYPE (p) = subtype;
    2350            0 :           DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
    2351            0 :           SET_DECL_MODE (p, VOIDmode);
    2352            0 :           DECL_SIZE (p) = NULL;
    2353            0 :           DECL_SIZE_UNIT (p) = NULL;
    2354              :           /* If this arg must go in memory, put it in a pseudo here.
    2355              :              We can't allow it to go in memory as per normal parms,
    2356              :              because the usual place might not have the imag part
    2357              :              adjacent to the real part.  */
    2358            0 :           DECL_ARTIFICIAL (p) = addressable;
    2359            0 :           DECL_IGNORED_P (p) = addressable;
    2360            0 :           TREE_ADDRESSABLE (p) = 0;
    2361            0 :           layout_decl (p, 0);
    2362            0 :           (*args)[i] = p;
    2363              : 
    2364              :           /* Build a second synthetic decl.  */
    2365            0 :           decl = build_decl (EXPR_LOCATION (p),
    2366              :                              PARM_DECL, NULL_TREE, subtype);
    2367            0 :           DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
    2368            0 :           DECL_ARTIFICIAL (decl) = addressable;
    2369            0 :           DECL_IGNORED_P (decl) = addressable;
    2370            0 :           layout_decl (decl, 0);
    2371            0 :           args->safe_insert (++i, decl);
    2372              :         }
    2373              :     }
    2374            0 : }
    2375              : 
    2376              : /* A subroutine of assign_parms.  Adjust the parameter list to incorporate
    2377              :    the hidden struct return argument, and (abi willing) complex args.
    2378              :    Return the new parameter list.  */
    2379              : 
    2380              : static vec<tree>
    2381      4341891 : assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
    2382              : {
    2383      4341891 :   tree fndecl = current_function_decl;
    2384      4341891 :   tree fntype = TREE_TYPE (fndecl);
    2385      4341891 :   vec<tree> fnargs = vNULL;
    2386      4341891 :   tree arg;
    2387              : 
    2388     13265503 :   for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
    2389      8923612 :     fnargs.safe_push (arg);
    2390              : 
    2391      4341891 :   all->orig_fnargs = DECL_ARGUMENTS (fndecl);
    2392              : 
    2393              :   /* If struct value address is treated as the first argument, make it so.  */
    2394      4341891 :   if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
    2395       194080 :       && ! cfun->returns_pcc_struct
    2396      4535971 :       && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
    2397              :     {
    2398       194080 :       tree type = build_pointer_type (TREE_TYPE (fntype));
    2399       194080 :       tree decl;
    2400              : 
    2401       194080 :       decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
    2402              :                          PARM_DECL, get_identifier (".result_ptr"), type);
    2403       194080 :       DECL_ARG_TYPE (decl) = type;
    2404       194080 :       DECL_ARTIFICIAL (decl) = 1;
    2405       194080 :       DECL_NAMELESS (decl) = 1;
    2406       194080 :       TREE_CONSTANT (decl) = 1;
    2407              :       /* We don't set DECL_IGNORED_P or DECL_REGISTER here.  If this
    2408              :          changes, the end of the RESULT_DECL handling block in
    2409              :          use_register_for_decl must be adjusted to match.  */
    2410              : 
    2411       194080 :       DECL_CHAIN (decl) = all->orig_fnargs;
    2412       194080 :       all->orig_fnargs = decl;
    2413       194080 :       fnargs.safe_insert (0, decl);
    2414              : 
    2415       194080 :       all->function_result_decl = decl;
    2416              :     }
    2417              : 
    2418              :   /* If the target wants to split complex arguments into scalars, do so.  */
    2419      4341891 :   if (targetm.calls.split_complex_arg)
    2420            0 :     split_complex_args (&fnargs);
    2421              : 
    2422      4341891 :   return fnargs;
    2423              : }
    2424              : 
    2425              : /* A subroutine of assign_parms.  Examine PARM and pull out type and mode
    2426              :    data for the parameter.  Incorporate ABI specifics such as pass-by-
    2427              :    reference and type promotion.  */
    2428              : 
    2429              : static void
    2430      9117692 : assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
    2431              :                              struct assign_parm_data_one *data)
    2432              : {
    2433      9117692 :   int unsignedp;
    2434              : 
    2435      9117692 :   *data = assign_parm_data_one ();
    2436              : 
    2437              :   /* NAMED_ARG is a misnomer.  We really mean 'non-variadic'. */
    2438      9117692 :   if (!cfun->stdarg)
    2439      9035953 :     data->arg.named = 1;  /* No variadic parms.  */
    2440        81739 :   else if (DECL_CHAIN (parm))
    2441        38940 :     data->arg.named = 1;  /* Not the last non-variadic parm. */
    2442        42799 :   else if (targetm.calls.strict_argument_naming (all->args_so_far))
    2443        42799 :     data->arg.named = 1;  /* Only variadic ones are unnamed.  */
    2444              :   else
    2445            0 :     data->arg.named = 0;  /* Treat as variadic.  */
    2446              : 
    2447      9117692 :   data->nominal_type = TREE_TYPE (parm);
    2448      9117692 :   data->arg.type = DECL_ARG_TYPE (parm);
    2449              : 
    2450              :   /* Look out for errors propagating this far.  Also, if the parameter's
    2451              :      type is void then its value doesn't matter.  */
    2452      9117692 :   if (TREE_TYPE (parm) == error_mark_node
    2453              :       /* This can happen after weird syntax errors
    2454              :          or if an enum type is defined among the parms.  */
    2455      9117607 :       || TREE_CODE (parm) != PARM_DECL
    2456      9117607 :       || data->arg.type == NULL
    2457     18235299 :       || VOID_TYPE_P (data->nominal_type))
    2458              :     {
    2459           85 :       data->nominal_type = data->arg.type = void_type_node;
    2460           85 :       data->nominal_mode = data->passed_mode = data->arg.mode = VOIDmode;
    2461           85 :       return;
    2462              :     }
    2463              : 
    2464              :   /* Find mode of arg as it is passed, and mode of arg as it should be
    2465              :      during execution of this function.  */
    2466      9117607 :   data->passed_mode = data->arg.mode = TYPE_MODE (data->arg.type);
    2467      9117607 :   data->nominal_mode = TYPE_MODE (data->nominal_type);
    2468              : 
    2469              :   /* If the parm is to be passed as a transparent union or record, use the
    2470              :      type of the first field for the tests below.  We have already verified
    2471              :      that the modes are the same.  */
    2472      9117607 :   if (RECORD_OR_UNION_TYPE_P (data->arg.type)
    2473      9117607 :       && TYPE_TRANSPARENT_AGGR (data->arg.type))
    2474         1536 :     data->arg.type = TREE_TYPE (first_field (data->arg.type));
    2475              : 
    2476              :   /* See if this arg was passed by invisible reference.  */
    2477      9117607 :   if (apply_pass_by_reference_rules (&all->args_so_far_v, data->arg))
    2478              :     {
    2479         9868 :       data->nominal_type = data->arg.type;
    2480         9868 :       data->passed_mode = data->nominal_mode = data->arg.mode;
    2481              :     }
    2482              : 
    2483              :   /* Find mode as it is passed by the ABI.  */
    2484      9117607 :   unsignedp = TYPE_UNSIGNED (data->arg.type);
    2485      9117607 :   data->arg.mode
    2486      9117607 :     = promote_function_mode (data->arg.type, data->arg.mode, &unsignedp,
    2487      9117607 :                              TREE_TYPE (current_function_decl), 0);
    2488              : }
    2489              : 
    2490              : /* A subroutine of assign_parms.  Invoke setup_incoming_varargs.  */
    2491              : 
    2492              : static void
    2493        21512 : assign_parms_setup_varargs (struct assign_parm_data_all *all,
    2494              :                             struct assign_parm_data_one *data, bool no_rtl)
    2495              : {
    2496        21512 :   int varargs_pretend_bytes = 0;
    2497              : 
    2498        21512 :   function_arg_info last_named_arg = data->arg;
    2499        21512 :   last_named_arg.named = true;
    2500        21512 :   targetm.calls.setup_incoming_varargs (all->args_so_far, last_named_arg,
    2501              :                                         &varargs_pretend_bytes, no_rtl);
    2502              : 
    2503              :   /* If the back-end has requested extra stack space, record how much is
    2504              :      needed.  Do not change pretend_args_size otherwise since it may be
    2505              :      nonzero from an earlier partial argument.  */
    2506        21512 :   if (varargs_pretend_bytes > 0)
    2507            0 :     all->pretend_args_size = varargs_pretend_bytes;
    2508        21512 : }
    2509              : 
    2510              : /* A subroutine of assign_parms.  Set DATA->ENTRY_PARM corresponding to
    2511              :    the incoming location of the current parameter.  */
    2512              : 
    2513              : static void
    2514      3172082 : assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
    2515              :                             struct assign_parm_data_one *data)
    2516              : {
    2517      3172082 :   HOST_WIDE_INT pretend_bytes = 0;
    2518      3172082 :   rtx entry_parm;
    2519      3172082 :   bool in_regs;
    2520              : 
    2521      3172082 :   if (data->arg.mode == VOIDmode)
    2522              :     {
    2523            0 :       data->entry_parm = data->stack_parm = const0_rtx;
    2524            0 :       return;
    2525              :     }
    2526              : 
    2527      3172082 :   targetm.calls.warn_parameter_passing_abi (all->args_so_far,
    2528              :                                             data->arg.type);
    2529              : 
    2530      6344164 :   entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
    2531      3172082 :                                                     data->arg);
    2532      3172082 :   if (entry_parm == 0)
    2533      1068559 :     data->arg.mode = data->passed_mode;
    2534              : 
    2535              :   /* Determine parm's home in the stack, in case it arrives in the stack
    2536              :      or we should pretend it did.  Compute the stack position and rtx where
    2537              :      the argument arrives and its size.
    2538              : 
    2539              :      There is one complexity here:  If this was a parameter that would
    2540              :      have been passed in registers, but wasn't only because it is
    2541              :      __builtin_va_alist, we want locate_and_pad_parm to treat it as if
    2542              :      it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
    2543              :      In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
    2544              :      as it was the previous time.  */
    2545      1068559 :   in_regs = (entry_parm != 0);
    2546              : #ifdef STACK_PARMS_IN_REG_PARM_AREA
    2547              :   in_regs = true;
    2548              : #endif
    2549      1068559 :   if (!in_regs && !data->arg.named)
    2550              :     {
    2551            0 :       if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
    2552              :         {
    2553            0 :           rtx tem;
    2554            0 :           function_arg_info named_arg = data->arg;
    2555            0 :           named_arg.named = true;
    2556            0 :           tem = targetm.calls.function_incoming_arg (all->args_so_far,
    2557              :                                                      named_arg);
    2558            0 :           in_regs = tem != NULL;
    2559              :         }
    2560              :     }
    2561              : 
    2562              :   /* If this parameter was passed both in registers and in the stack, use
    2563              :      the copy on the stack.  */
    2564      3172082 :   if (targetm.calls.must_pass_in_stack (data->arg))
    2565              :     entry_parm = 0;
    2566              : 
    2567      3172082 :   if (entry_parm)
    2568              :     {
    2569      2103523 :       int partial;
    2570              : 
    2571      2103523 :       partial = targetm.calls.arg_partial_bytes (all->args_so_far, data->arg);
    2572      2103523 :       data->partial = partial;
    2573              : 
    2574              :       /* The caller might already have allocated stack space for the
    2575              :          register parameters.  */
    2576      2103523 :       if (partial != 0 && all->reg_parm_stack_space == 0)
    2577              :         {
    2578              :           /* Part of this argument is passed in registers and part
    2579              :              is passed on the stack.  Ask the prologue code to extend
    2580              :              the stack part so that we can recreate the full value.
    2581              : 
    2582              :              PRETEND_BYTES is the size of the registers we need to store.
    2583              :              CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
    2584              :              stack space that the prologue should allocate.
    2585              : 
    2586              :              Internally, gcc assumes that the argument pointer is aligned
    2587              :              to STACK_BOUNDARY bits.  This is used both for alignment
    2588              :              optimizations (see init_emit) and to locate arguments that are
    2589              :              aligned to more than PARM_BOUNDARY bits.  We must preserve this
    2590              :              invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
    2591              :              a stack boundary.  */
    2592              : 
    2593              :           /* We assume at most one partial arg, and it must be the first
    2594              :              argument on the stack.  */
    2595            0 :           gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
    2596              : 
    2597            0 :           pretend_bytes = partial;
    2598            0 :           all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
    2599              : 
    2600              :           /* We want to align relative to the actual stack pointer, so
    2601              :              don't include this in the stack size until later.  */
    2602            0 :           all->extra_pretend_bytes = all->pretend_args_size;
    2603              :         }
    2604              :     }
    2605              : 
    2606      3172082 :   locate_and_pad_parm (data->arg.mode, data->arg.type, in_regs,
    2607              :                        all->reg_parm_stack_space,
    2608              :                        entry_parm ? data->partial : 0, current_function_decl,
    2609              :                        &all->stack_args_size, &data->locate);
    2610              : 
    2611              :   /* Update parm_stack_boundary if this parameter is passed in the
    2612              :      stack.  */
    2613      3172082 :   if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
    2614       197516 :     crtl->parm_stack_boundary = data->locate.boundary;
    2615              : 
    2616              :   /* Adjust offsets to include the pretend args.  */
    2617      3172082 :   pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
    2618      3172082 :   data->locate.slot_offset.constant += pretend_bytes;
    2619      3172082 :   data->locate.offset.constant += pretend_bytes;
    2620              : 
    2621      3172082 :   data->entry_parm = entry_parm;
    2622              : }
    2623              : 
    2624              : /* A subroutine of assign_parms.  If there is actually space on the stack
    2625              :    for this parm, count it in stack_args_size and return true.  */
    2626              : 
    2627              : static bool
    2628      3172082 : assign_parm_is_stack_parm (struct assign_parm_data_all *all,
    2629              :                            struct assign_parm_data_one *data)
    2630              : {
    2631              :   /* Trivially true if we've no incoming register.  */
    2632      3172082 :   if (data->entry_parm == NULL)
    2633              :     ;
    2634              :   /* Also true if we're partially in registers and partially not,
    2635              :      since we've arranged to drop the entire argument on the stack.  */
    2636      2103523 :   else if (data->partial != 0)
    2637              :     ;
    2638              :   /* Also true if the target says that it's passed in both registers
    2639              :      and on the stack.  */
    2640      2103523 :   else if (GET_CODE (data->entry_parm) == PARALLEL
    2641        53749 :            && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
    2642              :     ;
    2643              :   /* Also true if the target says that there's stack allocated for
    2644              :      all register parameters.  */
    2645      2103523 :   else if (all->reg_parm_stack_space > 0)
    2646              :     ;
    2647              :   /* Otherwise, no, this parameter has no ABI defined stack slot.  */
    2648              :   else
    2649              :     return false;
    2650              : 
    2651      1178752 :   all->stack_args_size.constant += data->locate.size.constant;
    2652      1178752 :   if (data->locate.size.var)
    2653            0 :     ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
    2654              : 
    2655              :   return true;
    2656              : }
    2657              : 
    2658              : /* A subroutine of assign_parms.  Given that this parameter is allocated
    2659              :    stack space by the ABI, find it.  */
    2660              : 
    2661              : static void
    2662      1178752 : assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
    2663              : {
    2664      1178752 :   rtx offset_rtx, stack_parm;
    2665      1178752 :   unsigned int align, boundary;
    2666              : 
    2667              :   /* If we're passing this arg using a reg, make its stack home the
    2668              :      aligned stack slot.  */
    2669      1178752 :   if (data->entry_parm)
    2670       110193 :     offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
    2671              :   else
    2672      1310908 :     offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
    2673              : 
    2674      1178752 :   stack_parm = crtl->args.internal_arg_pointer;
    2675      1178752 :   if (offset_rtx != const0_rtx)
    2676      1090454 :     stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
    2677      1178752 :   stack_parm = gen_rtx_MEM (data->arg.mode, stack_parm);
    2678              : 
    2679      1178752 :   if (!data->arg.pass_by_reference)
    2680              :     {
    2681      1173858 :       set_mem_attributes (stack_parm, parm, 1);
    2682              :       /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
    2683              :          while promoted mode's size is needed.  */
    2684      1173858 :       if (data->arg.mode != BLKmode
    2685      1173858 :           && data->arg.mode != DECL_MODE (parm))
    2686              :         {
    2687            0 :           set_mem_size (stack_parm, GET_MODE_SIZE (data->arg.mode));
    2688            0 :           if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
    2689              :             {
    2690            0 :               poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
    2691            0 :                                                          data->arg.mode);
    2692            0 :               if (maybe_ne (offset, 0))
    2693            0 :                 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
    2694              :             }
    2695              :         }
    2696              :     }
    2697              : 
    2698      1178752 :   boundary = data->locate.boundary;
    2699      1178752 :   align = BITS_PER_UNIT;
    2700              : 
    2701              :   /* If we're padding upward, we know that the alignment of the slot
    2702              :      is TARGET_FUNCTION_ARG_BOUNDARY.  If we're using slot_offset, we're
    2703              :      intentionally forcing upward padding.  Otherwise we have to come
    2704              :      up with a guess at the alignment based on OFFSET_RTX.  */
    2705      1178752 :   poly_int64 offset;
    2706      1178752 :   if (data->locate.where_pad == PAD_NONE || data->entry_parm)
    2707              :     align = boundary;
    2708      1068559 :   else if (data->locate.where_pad == PAD_UPWARD)
    2709              :     {
    2710      1068559 :       align = boundary;
    2711              :       /* If the argument offset is actually more aligned than the nominal
    2712              :          stack slot boundary, take advantage of that excess alignment.
    2713              :          Don't make any assumptions if STACK_POINTER_OFFSET is in use.  */
    2714      1068559 :       if (poly_int_rtx_p (offset_rtx, &offset)
    2715              :           && known_eq (STACK_POINTER_OFFSET, 0))
    2716              :         {
    2717      1068559 :           unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
    2718      1196766 :           if (offset_align == 0 || offset_align > STACK_BOUNDARY)
    2719       588851 :             offset_align = STACK_BOUNDARY;
    2720      1068559 :           align = MAX (align, offset_align);
    2721              :         }
    2722              :     }
    2723            0 :   else if (poly_int_rtx_p (offset_rtx, &offset))
    2724              :     {
    2725            0 :       align = least_bit_hwi (boundary);
    2726            0 :       unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
    2727            0 :       if (offset_align != 0)
    2728            0 :         align = MIN (align, offset_align);
    2729              :     }
    2730      1178752 :   set_mem_align (stack_parm, align);
    2731              : 
    2732      1178752 :   if (data->entry_parm)
    2733       110193 :     set_reg_attrs_for_parm (data->entry_parm, stack_parm);
    2734              : 
    2735      1178752 :   data->stack_parm = stack_parm;
    2736      1178752 : }
    2737              : 
    2738              : /* A subroutine of assign_parms.  Adjust DATA->ENTRY_RTL such that it's
    2739              :    always valid and contiguous.  */
    2740              : 
    2741              : static void
    2742      1178752 : assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
    2743              : {
    2744      1178752 :   rtx entry_parm = data->entry_parm;
    2745      1178752 :   rtx stack_parm = data->stack_parm;
    2746              : 
    2747              :   /* If this parm was passed part in regs and part in memory, pretend it
    2748              :      arrived entirely in memory by pushing the register-part onto the stack.
    2749              :      In the special case of a DImode or DFmode that is split, we could put
    2750              :      it together in a pseudoreg directly, but for now that's not worth
    2751              :      bothering with.  */
    2752      1178752 :   if (data->partial != 0)
    2753              :     {
    2754              :       /* Handle calls that pass values in multiple non-contiguous
    2755              :          locations.  The Irix 6 ABI has examples of this.  */
    2756            0 :       if (GET_CODE (entry_parm) == PARALLEL)
    2757            0 :         emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
    2758            0 :                           data->arg.type, int_size_in_bytes (data->arg.type));
    2759              :       else
    2760              :         {
    2761            0 :           gcc_assert (data->partial % UNITS_PER_WORD == 0);
    2762            0 :           move_block_from_reg (REGNO (entry_parm),
    2763              :                                validize_mem (copy_rtx (stack_parm)),
    2764              :                                data->partial / UNITS_PER_WORD);
    2765              :         }
    2766              : 
    2767              :       entry_parm = stack_parm;
    2768              :     }
    2769              : 
    2770              :   /* If we didn't decide this parm came in a register, by default it came
    2771              :      on the stack.  */
    2772      1178752 :   else if (entry_parm == NULL)
    2773              :     entry_parm = stack_parm;
    2774              : 
    2775              :   /* When an argument is passed in multiple locations, we can't make use
    2776              :      of this information, but we can save some copying if the whole argument
    2777              :      is passed in a single register.  */
    2778       110193 :   else if (GET_CODE (entry_parm) == PARALLEL
    2779            0 :            && data->nominal_mode != BLKmode
    2780            0 :            && data->passed_mode != BLKmode)
    2781              :     {
    2782            0 :       size_t i, len = XVECLEN (entry_parm, 0);
    2783              : 
    2784            0 :       for (i = 0; i < len; i++)
    2785            0 :         if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
    2786            0 :             && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
    2787            0 :             && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
    2788            0 :                 == data->passed_mode)
    2789            0 :             && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
    2790              :           {
    2791              :             entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
    2792              :             break;
    2793              :           }
    2794              :     }
    2795              : 
    2796      1178752 :   data->entry_parm = entry_parm;
    2797      1178752 : }
    2798              : 
    2799              : /* A subroutine of assign_parms.  Reconstitute any values which were
    2800              :    passed in multiple registers and would fit in a single register.  */
    2801              : 
    2802              : static void
    2803      3098767 : assign_parm_remove_parallels (struct assign_parm_data_one *data)
    2804              : {
    2805      3098767 :   rtx entry_parm = data->entry_parm;
    2806              : 
    2807              :   /* Convert the PARALLEL to a REG of the same mode as the parallel.
    2808              :      This can be done with register operations rather than on the
    2809              :      stack, even if we will store the reconstituted parameter on the
    2810              :      stack later.  */
    2811      3098767 :   if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
    2812              :     {
    2813        49938 :       rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
    2814        49938 :       emit_group_store (parmreg, entry_parm, data->arg.type,
    2815        99876 :                         GET_MODE_SIZE (GET_MODE (entry_parm)));
    2816        49938 :       entry_parm = parmreg;
    2817              :     }
    2818              : 
    2819      3098767 :   data->entry_parm = entry_parm;
    2820      3098767 : }
    2821              : 
    2822              : /* A subroutine of assign_parms.  Adjust DATA->STACK_RTL such that it's
    2823              :    always valid and properly aligned.  */
    2824              : 
    2825              : static void
    2826      3172082 : assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
    2827              : {
    2828      3172082 :   rtx stack_parm = data->stack_parm;
    2829              : 
    2830              :   /* If we can't trust the parm stack slot to be aligned enough for its
    2831              :      ultimate type, don't use that slot after entry.  We'll make another
    2832              :      stack slot, if we need one.  */
    2833      3172082 :   if (stack_parm
    2834      3172082 :       && ((GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)
    2835        24865 :            && ((optab_handler (movmisalign_optab, data->nominal_mode)
    2836              :                 != CODE_FOR_nothing)
    2837        49722 :                || targetm.slow_unaligned_access (data->nominal_mode,
    2838        24861 :                                                  MEM_ALIGN (stack_parm))))
    2839      1177351 :           || (data->nominal_type
    2840      2354702 :               && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
    2841         4485 :               && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
    2842              :     stack_parm = NULL;
    2843              : 
    2844              :   /* If parm was passed in memory, and we need to convert it on entry,
    2845              :      don't store it back in that same slot.  */
    2846      3167593 :   else if (data->entry_parm == stack_parm
    2847      1062673 :            && data->nominal_mode != BLKmode
    2848       994800 :            && data->nominal_mode != data->passed_mode)
    2849              :     stack_parm = NULL;
    2850              : 
    2851              :   /* If stack protection is in effect for this function, don't leave any
    2852              :      pointers in their passed stack slots.  */
    2853      3167593 :   else if (crtl->stack_protect_guard
    2854          185 :            && (flag_stack_protect == SPCT_FLAG_ALL
    2855          148 :                || data->arg.pass_by_reference
    2856          148 :                || POINTER_TYPE_P (data->nominal_type)))
    2857         4625 :     stack_parm = NULL;
    2858              : 
    2859      3172082 :   data->stack_parm = stack_parm;
    2860      3172082 : }
    2861              : 
    2862              : /* A subroutine of assign_parms.  Return true if the current parameter
    2863              :    should be stored as a BLKmode in the current frame.  */
    2864              : 
    2865              : static bool
    2866      3172082 : assign_parm_setup_block_p (struct assign_parm_data_one *data)
    2867              : {
    2868            0 :   if (data->nominal_mode == BLKmode)
    2869              :     return true;
    2870      3098767 :   if (GET_MODE (data->entry_parm) == BLKmode)
    2871            0 :     return true;
    2872              : 
    2873              : #ifdef BLOCK_REG_PADDING
    2874              :   /* Only assign_parm_setup_block knows how to deal with register arguments
    2875              :      that are padded at the least significant end.  */
    2876              :   if (REG_P (data->entry_parm)
    2877              :       && known_lt (GET_MODE_SIZE (data->arg.mode), UNITS_PER_WORD)
    2878              :       && (BLOCK_REG_PADDING (data->passed_mode, data->arg.type, 1)
    2879              :           == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
    2880              :     return true;
    2881              : #endif
    2882              : 
    2883              :   return false;
    2884              : }
    2885              : 
    2886              : /* A subroutine of assign_parms.  Arrange for the parameter to be
    2887              :    present and valid in DATA->STACK_RTL.  */
    2888              : 
    2889              : static void
    2890        73315 : assign_parm_setup_block (struct assign_parm_data_all *all,
    2891              :                          tree parm, struct assign_parm_data_one *data)
    2892              : {
    2893        73315 :   rtx entry_parm = data->entry_parm;
    2894        73315 :   rtx stack_parm = data->stack_parm;
    2895        73315 :   rtx target_reg = NULL_RTX;
    2896        73315 :   bool in_conversion_seq = false;
    2897        73315 :   HOST_WIDE_INT size;
    2898        73315 :   HOST_WIDE_INT size_stored;
    2899              : 
    2900        73315 :   if (GET_CODE (entry_parm) == PARALLEL)
    2901         3811 :     entry_parm = emit_group_move_into_temps (entry_parm);
    2902              : 
    2903              :   /* If we want the parameter in a pseudo, don't use a stack slot.  */
    2904        73315 :   if (is_gimple_reg (parm) && use_register_for_decl (parm))
    2905              :     {
    2906            0 :       tree def = ssa_default_def (cfun, parm);
    2907            0 :       gcc_assert (def);
    2908            0 :       machine_mode mode = promote_ssa_mode (def, NULL);
    2909            0 :       rtx reg = gen_reg_rtx (mode);
    2910            0 :       if (GET_CODE (reg) != CONCAT)
    2911              :         stack_parm = reg;
    2912              :       else
    2913              :         {
    2914            0 :           target_reg = reg;
    2915              :           /* Avoid allocating a stack slot, if there isn't one
    2916              :              preallocated by the ABI.  It might seem like we should
    2917              :              always prefer a pseudo, but converting between
    2918              :              floating-point and integer modes goes through the stack
    2919              :              on various machines, so it's better to use the reserved
    2920              :              stack slot than to risk wasting it and allocating more
    2921              :              for the conversion.  */
    2922            0 :           if (stack_parm == NULL_RTX)
    2923              :             {
    2924            0 :               int save = generating_concat_p;
    2925            0 :               generating_concat_p = 0;
    2926            0 :               stack_parm = gen_reg_rtx (mode);
    2927            0 :               generating_concat_p = save;
    2928              :             }
    2929              :         }
    2930            0 :       data->stack_parm = NULL;
    2931              :     }
    2932              : 
    2933        73315 :   size = int_size_in_bytes (data->arg.type);
    2934        85932 :   size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
    2935        73315 :   if (stack_parm == 0)
    2936              :     {
    2937         5445 :       HOST_WIDE_INT parm_align
    2938              :         = ((STRICT_ALIGNMENT || BITS_PER_WORD <= MAX_SUPPORTED_STACK_ALIGNMENT)
    2939         5657 :            ? MAX (DECL_ALIGN (parm), BITS_PER_WORD) : DECL_ALIGN (parm));
    2940              : 
    2941         5445 :       SET_DECL_ALIGN (parm, parm_align);
    2942         5445 :       if (DECL_ALIGN (parm) > MAX_SUPPORTED_STACK_ALIGNMENT)
    2943              :         {
    2944              :           rtx allocsize = gen_int_mode (size_stored, Pmode);
    2945              :           get_dynamic_stack_size (&allocsize, 0, DECL_ALIGN (parm), NULL);
    2946              :           stack_parm = assign_stack_local (BLKmode, UINTVAL (allocsize),
    2947              :                                            MAX_SUPPORTED_STACK_ALIGNMENT);
    2948              :           rtx addr = align_dynamic_address (XEXP (stack_parm, 0),
    2949              :                                             DECL_ALIGN (parm));
    2950              :           mark_reg_pointer (addr, DECL_ALIGN (parm));
    2951              :           stack_parm = gen_rtx_MEM (GET_MODE (stack_parm), addr);
    2952              :           MEM_NOTRAP_P (stack_parm) = 1;
    2953              :         }
    2954              :       else
    2955         5445 :         stack_parm = assign_stack_local (BLKmode, size_stored,
    2956         5445 :                                          DECL_ALIGN (parm));
    2957        10890 :       if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
    2958           77 :         PUT_MODE (stack_parm, GET_MODE (entry_parm));
    2959         5445 :       set_mem_attributes (stack_parm, parm, 1);
    2960              :     }
    2961              : 
    2962              :   /* If a BLKmode arrives in registers, copy it to a stack slot.  Handle
    2963              :      calls that pass values in multiple non-contiguous locations.  */
    2964        73315 :   if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
    2965              :     {
    2966         5147 :       rtx mem;
    2967              : 
    2968              :       /* Note that we will be storing an integral number of words.
    2969              :          So we have to be careful to ensure that we allocate an
    2970              :          integral number of words.  We do this above when we call
    2971              :          assign_stack_local if space was not allocated in the argument
    2972              :          list.  If it was, this will not work if PARM_BOUNDARY is not
    2973              :          a multiple of BITS_PER_WORD.  It isn't clear how to fix this
    2974              :          if it becomes a problem.  Exception is when BLKmode arrives
    2975              :          with arguments not conforming to word_mode.  */
    2976              : 
    2977         5147 :       if (data->stack_parm == 0)
    2978              :         ;
    2979         5147 :       else if (GET_CODE (entry_parm) == PARALLEL)
    2980              :         ;
    2981              :       else
    2982         5147 :         gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
    2983              : 
    2984         5147 :       mem = validize_mem (copy_rtx (stack_parm));
    2985              : 
    2986              :       /* Handle values in multiple non-contiguous locations.  */
    2987         5147 :       if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
    2988            0 :         emit_group_store (mem, entry_parm, data->arg.type, size);
    2989         5147 :       else if (GET_CODE (entry_parm) == PARALLEL)
    2990              :         {
    2991         3811 :           push_to_sequence2 (all->first_conversion_insn,
    2992              :                              all->last_conversion_insn);
    2993         3811 :           emit_group_store (mem, entry_parm, data->arg.type, size);
    2994         3811 :           all->first_conversion_insn = get_insns ();
    2995         3811 :           all->last_conversion_insn = get_last_insn ();
    2996         3811 :           end_sequence ();
    2997         3811 :           in_conversion_seq = true;
    2998              :         }
    2999              : 
    3000         1336 :       else if (size == 0)
    3001              :         ;
    3002              : 
    3003              :       /* If SIZE is that of a mode no bigger than a word, just use
    3004              :          that mode's store operation.  */
    3005         1320 :       else if (size <= UNITS_PER_WORD)
    3006              :         {
    3007         1316 :           unsigned int bits = size * BITS_PER_UNIT;
    3008         1316 :           machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
    3009              : 
    3010         1316 :           if (mode != BLKmode
    3011              : #ifdef BLOCK_REG_PADDING
    3012              :               && (size == UNITS_PER_WORD
    3013              :                   || (BLOCK_REG_PADDING (mode, data->arg.type, 1)
    3014              :                       != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
    3015              : #endif
    3016              :               )
    3017              :             {
    3018          102 :               rtx reg;
    3019              : 
    3020              :               /* We are really truncating a word_mode value containing
    3021              :                  SIZE bytes into a value of mode MODE.  If such an
    3022              :                  operation requires no actual instructions, we can refer
    3023              :                  to the value directly in mode MODE, otherwise we must
    3024              :                  start with the register in word_mode and explicitly
    3025              :                  convert it.  */
    3026          102 :               if (mode == word_mode
    3027          102 :                   || TRULY_NOOP_TRUNCATION_MODES_P (mode, word_mode))
    3028          102 :                 reg = gen_rtx_REG (mode, REGNO (entry_parm));
    3029              :               else
    3030              :                 {
    3031            0 :                   reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
    3032            0 :                   reg = convert_to_mode (mode, copy_to_reg (reg), 1);
    3033              :                 }
    3034              : 
    3035              :               /* We use adjust_address to get a new MEM with the mode
    3036              :                  changed.  adjust_address is better than change_address
    3037              :                  for this purpose because adjust_address does not lose
    3038              :                  the MEM_EXPR associated with the MEM.
    3039              : 
    3040              :                  If the MEM_EXPR is lost, then optimizations like DSE
    3041              :                  assume the MEM escapes and thus is not subject to DSE.  */
    3042          102 :               emit_move_insn (adjust_address (mem, mode, 0), reg);
    3043              :             }
    3044              : 
    3045              : #ifdef BLOCK_REG_PADDING
    3046              :           /* Storing the register in memory as a full word, as
    3047              :              move_block_from_reg below would do, and then using the
    3048              :              MEM in a smaller mode, has the effect of shifting right
    3049              :              if BYTES_BIG_ENDIAN.  If we're bypassing memory, the
    3050              :              shifting must be explicit.  */
    3051              :           else if (!MEM_P (mem))
    3052              :             {
    3053              :               rtx x;
    3054              : 
    3055              :               /* If the assert below fails, we should have taken the
    3056              :                  mode != BLKmode path above, unless we have downward
    3057              :                  padding of smaller-than-word arguments on a machine
    3058              :                  with little-endian bytes, which would likely require
    3059              :                  additional changes to work correctly.  */
    3060              :               gcc_checking_assert (BYTES_BIG_ENDIAN
    3061              :                                    && (BLOCK_REG_PADDING (mode,
    3062              :                                                           data->arg.type, 1)
    3063              :                                        == PAD_UPWARD));
    3064              : 
    3065              :               int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
    3066              : 
    3067              :               x = gen_rtx_REG (word_mode, REGNO (entry_parm));
    3068              :               x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
    3069              :                                 NULL_RTX, 1);
    3070              :               x = force_reg (word_mode, x);
    3071              :               x = gen_lowpart_SUBREG (GET_MODE (mem), x);
    3072              : 
    3073              :               emit_move_insn (mem, x);
    3074              :             }
    3075              : #endif
    3076              : 
    3077              :           /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
    3078              :              machine must be aligned to the left before storing
    3079              :              to memory.  Note that the previous test doesn't
    3080              :              handle all cases (e.g. SIZE == 3).  */
    3081         1214 :           else if (size != UNITS_PER_WORD
    3082              : #ifdef BLOCK_REG_PADDING
    3083              :                    && (BLOCK_REG_PADDING (mode, data->arg.type, 1)
    3084              :                        == PAD_DOWNWARD)
    3085              : #else
    3086              :                    && BYTES_BIG_ENDIAN
    3087              : #endif
    3088              :                    )
    3089              :             {
    3090              :               rtx tem, x;
    3091              :               int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
    3092              :               rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
    3093              : 
    3094              :               x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
    3095              :               tem = change_address (mem, word_mode, 0);
    3096              :               emit_move_insn (tem, x);
    3097              :             }
    3098              :           else
    3099         2428 :             move_block_from_reg (REGNO (entry_parm), mem,
    3100         1214 :                                  size_stored / UNITS_PER_WORD);
    3101              :         }
    3102            2 :       else if (!MEM_P (mem))
    3103              :         {
    3104            0 :           gcc_checking_assert (size > UNITS_PER_WORD);
    3105              : #ifdef BLOCK_REG_PADDING
    3106              :           gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
    3107              :                                                   data->arg.type, 0)
    3108              :                                == PAD_UPWARD);
    3109              : #endif
    3110            0 :           emit_move_insn (mem, entry_parm);
    3111              :         }
    3112              :       else
    3113            2 :         move_block_from_reg (REGNO (entry_parm), mem,
    3114            2 :                              size_stored / UNITS_PER_WORD);
    3115              :     }
    3116        68168 :   else if (data->stack_parm == 0 && !TYPE_EMPTY_P (data->arg.type))
    3117              :     {
    3118          202 :       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
    3119          202 :       emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
    3120              :                        BLOCK_OP_NORMAL);
    3121          202 :       all->first_conversion_insn = get_insns ();
    3122          202 :       all->last_conversion_insn = get_last_insn ();
    3123          202 :       end_sequence ();
    3124          202 :       in_conversion_seq = true;
    3125              :     }
    3126              : 
    3127        73315 :   if (target_reg)
    3128              :     {
    3129            0 :       if (!in_conversion_seq)
    3130            0 :         emit_move_insn (target_reg, stack_parm);
    3131              :       else
    3132              :         {
    3133            0 :           push_to_sequence2 (all->first_conversion_insn,
    3134              :                              all->last_conversion_insn);
    3135            0 :           emit_move_insn (target_reg, stack_parm);
    3136            0 :           all->first_conversion_insn = get_insns ();
    3137            0 :           all->last_conversion_insn = get_last_insn ();
    3138            0 :           end_sequence ();
    3139              :         }
    3140              :       stack_parm = target_reg;
    3141              :     }
    3142              : 
    3143        73315 :   data->stack_parm = stack_parm;
    3144        73315 :   set_parm_rtl (parm, stack_parm);
    3145        73315 : }
    3146              : 
    3147              : /* A subroutine of assign_parms.  Allocate a pseudo to hold the current
    3148              :    parameter.  Get it there.  Perform all ABI specified conversions.  */
    3149              : 
    3150              : static void
    3151      2289774 : assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
    3152              :                        struct assign_parm_data_one *data)
    3153              : {
    3154      2289774 :   rtx parmreg, validated_mem;
    3155      2289774 :   rtx equiv_stack_parm;
    3156      2289774 :   machine_mode promoted_nominal_mode;
    3157      2289774 :   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
    3158      2289774 :   bool did_conversion = false;
    3159      2289774 :   bool need_conversion, moved;
    3160      2289774 :   enum insn_code icode;
    3161      2289774 :   rtx rtl;
    3162              : 
    3163              :   /* Store the parm in a pseudoregister during the function, but we may
    3164              :      need to do it in a wider mode.  Using 2 here makes the result
    3165              :      consistent with promote_decl_mode and thus expand_expr_real_1.  */
    3166      2289774 :   promoted_nominal_mode
    3167      4579548 :     = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
    3168      2289774 :                              TREE_TYPE (current_function_decl), 2);
    3169              : 
    3170      2289774 :   parmreg = gen_reg_rtx (promoted_nominal_mode);
    3171      2289774 :   if (!DECL_ARTIFICIAL (parm))
    3172      2063853 :     mark_user_reg (parmreg);
    3173              : 
    3174              :   /* If this was an item that we received a pointer to,
    3175              :      set rtl appropriately.  */
    3176      2289774 :   if (data->arg.pass_by_reference)
    3177              :     {
    3178         4930 :       rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->arg.type)), parmreg);
    3179         4930 :       set_mem_attributes (rtl, parm, 1);
    3180              :     }
    3181              :   else
    3182              :     rtl = parmreg;
    3183              : 
    3184      2289774 :   assign_parm_remove_parallels (data);
    3185              : 
    3186              :   /* Copy the value into the register, thus bridging between
    3187              :      assign_parm_find_data_types and expand_expr_real_1.  */
    3188              : 
    3189      2289774 :   equiv_stack_parm = data->stack_parm;
    3190      2289774 :   validated_mem = validize_mem (copy_rtx (data->entry_parm));
    3191              : 
    3192      2289774 :   need_conversion = (data->nominal_mode != data->passed_mode
    3193      2289774 :                      || promoted_nominal_mode != data->arg.mode);
    3194         4171 :   moved = false;
    3195              : 
    3196              :   if (need_conversion
    3197         4171 :       && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
    3198         3660 :       && data->nominal_mode == data->passed_mode
    3199            0 :       && data->nominal_mode == GET_MODE (data->entry_parm))
    3200              :     {
    3201              :       /* ENTRY_PARM has been converted to PROMOTED_MODE, its
    3202              :          mode, by the caller.  We now have to convert it to
    3203              :          NOMINAL_MODE, if different.  However, PARMREG may be in
    3204              :          a different mode than NOMINAL_MODE if it is being stored
    3205              :          promoted.
    3206              : 
    3207              :          If ENTRY_PARM is a hard register, it might be in a register
    3208              :          not valid for operating in its mode (e.g., an odd-numbered
    3209              :          register for a DFmode).  In that case, moves are the only
    3210              :          thing valid, so we can't do a convert from there.  This
    3211              :          occurs when the calling sequence allow such misaligned
    3212              :          usages.
    3213              : 
    3214              :          In addition, the conversion may involve a call, which could
    3215              :          clobber parameters which haven't been copied to pseudo
    3216              :          registers yet.
    3217              : 
    3218              :          First, we try to emit an insn which performs the necessary
    3219              :          conversion.  We verify that this insn does not clobber any
    3220              :          hard registers.  */
    3221              : 
    3222            0 :       rtx op0, op1;
    3223              : 
    3224            0 :       icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
    3225              :                             unsignedp);
    3226              : 
    3227            0 :       op0 = parmreg;
    3228            0 :       op1 = validated_mem;
    3229            0 :       if (icode != CODE_FOR_nothing
    3230            0 :           && insn_operand_matches (icode, 0, op0)
    3231            0 :           && insn_operand_matches (icode, 1, op1))
    3232              :         {
    3233            0 :           enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
    3234            0 :           rtx_insn *insn, *insns;
    3235            0 :           rtx t = op1;
    3236            0 :           HARD_REG_SET hardregs;
    3237              : 
    3238            0 :           start_sequence ();
    3239              :           /* If op1 is a hard register that is likely spilled, first
    3240              :              force it into a pseudo, otherwise combiner might extend
    3241              :              its lifetime too much.  */
    3242            0 :           if (GET_CODE (t) == SUBREG)
    3243            0 :             t = SUBREG_REG (t);
    3244            0 :           if (REG_P (t)
    3245            0 :               && HARD_REGISTER_P (t)
    3246            0 :               && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
    3247            0 :               && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
    3248              :             {
    3249            0 :               t = gen_reg_rtx (GET_MODE (op1));
    3250            0 :               emit_move_insn (t, op1);
    3251              :             }
    3252              :           else
    3253              :             t = op1;
    3254            0 :           rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
    3255              :                                            data->passed_mode, unsignedp);
    3256            0 :           emit_insn (pat);
    3257            0 :           insns = get_insns ();
    3258              : 
    3259            0 :           moved = true;
    3260            0 :           CLEAR_HARD_REG_SET (hardregs);
    3261            0 :           for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
    3262              :             {
    3263            0 :               if (INSN_P (insn))
    3264            0 :                 note_stores (insn, record_hard_reg_sets, &hardregs);
    3265            0 :               if (!hard_reg_set_empty_p (hardregs))
    3266            0 :                 moved = false;
    3267              :             }
    3268              : 
    3269            0 :           end_sequence ();
    3270              : 
    3271            0 :           if (moved)
    3272              :             {
    3273            0 :               emit_insn (insns);
    3274            0 :               if (equiv_stack_parm != NULL_RTX)
    3275            0 :                 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
    3276              :                                                   equiv_stack_parm);
    3277              :             }
    3278              :         }
    3279              :     }
    3280              : 
    3281            0 :   if (moved)
    3282              :     /* Nothing to do.  */
    3283              :     ;
    3284      2289774 :   else if (need_conversion)
    3285              :     {
    3286              :       /* We did not have an insn to convert directly, or the sequence
    3287              :          generated appeared unsafe.  We must first copy the parm to a
    3288              :          pseudo reg, and save the conversion until after all
    3289              :          parameters have been moved.  */
    3290              : 
    3291         4171 :       int save_tree_used;
    3292         4171 :       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
    3293              : 
    3294         4171 :       emit_move_insn (tempreg, validated_mem);
    3295              : 
    3296         4171 :       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
    3297         4171 :       tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
    3298              : 
    3299         4171 :       if (partial_subreg_p (tempreg)
    3300         3660 :           && GET_MODE (tempreg) == data->nominal_mode
    3301         3660 :           && REG_P (SUBREG_REG (tempreg))
    3302         3660 :           && data->nominal_mode == data->passed_mode
    3303         3660 :           && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
    3304              :         {
    3305              :           /* The argument is already sign/zero extended, so note it
    3306              :              into the subreg.  */
    3307            0 :           SUBREG_PROMOTED_VAR_P (tempreg) = 1;
    3308            0 :           SUBREG_PROMOTED_SET (tempreg, unsignedp);
    3309              :         }
    3310              : 
    3311              :       /* TREE_USED gets set erroneously during expand_assignment.  */
    3312         4171 :       save_tree_used = TREE_USED (parm);
    3313         4171 :       SET_DECL_RTL (parm, rtl);
    3314         4171 :       expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
    3315         4171 :       SET_DECL_RTL (parm, NULL_RTX);
    3316         4171 :       TREE_USED (parm) = save_tree_used;
    3317         4171 :       all->first_conversion_insn = get_insns ();
    3318         4171 :       all->last_conversion_insn = get_last_insn ();
    3319         4171 :       end_sequence ();
    3320              : 
    3321         4171 :       did_conversion = true;
    3322              :     }
    3323      2285603 :   else if (MEM_P (data->entry_parm)
    3324       822044 :            && GET_MODE_ALIGNMENT (promoted_nominal_mode)
    3325       822102 :               > MEM_ALIGN (data->entry_parm)
    3326      2309459 :            && (((icode = optab_handler (movmisalign_optab,
    3327              :                                         promoted_nominal_mode))
    3328              :                 != CODE_FOR_nothing)
    3329        23852 :                || targetm.slow_unaligned_access (promoted_nominal_mode,
    3330        23910 :                                                  MEM_ALIGN (data->entry_parm))))
    3331              :     {
    3332            4 :       if (icode != CODE_FOR_nothing)
    3333            4 :         emit_insn (GEN_FCN (icode) (parmreg, validated_mem));
    3334              :       else
    3335            0 :         rtl = parmreg = extract_bit_field (validated_mem,
    3336            0 :                         GET_MODE_BITSIZE (promoted_nominal_mode), 0,
    3337              :                         unsignedp, parmreg,
    3338              :                         promoted_nominal_mode, VOIDmode, false, NULL);
    3339              :     }
    3340              :   else
    3341      2285599 :     emit_move_insn (parmreg, validated_mem);
    3342              : 
    3343              :   /* If we were passed a pointer but the actual value can live in a register,
    3344              :      retrieve it and use it directly.  Note that we cannot use nominal_mode,
    3345              :      because it will have been set to Pmode above, we must use the actual mode
    3346              :      of the parameter instead.  */
    3347      2289774 :   if (data->arg.pass_by_reference && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
    3348              :     {
    3349              :       /* Use a stack slot for debugging purposes if possible.  */
    3350          695 :       if (use_register_for_decl (parm))
    3351              :         {
    3352          365 :           parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
    3353          365 :           mark_user_reg (parmreg);
    3354              :         }
    3355              :       else
    3356              :         {
    3357          330 :           int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
    3358              :                                             TYPE_MODE (TREE_TYPE (parm)),
    3359              :                                             TYPE_ALIGN (TREE_TYPE (parm)));
    3360          330 :           parmreg
    3361          330 :             = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
    3362          660 :                                   GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
    3363              :                                   align);
    3364          330 :           set_mem_attributes (parmreg, parm, 1);
    3365              :         }
    3366              : 
    3367              :       /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
    3368              :          the debug info in case it is not legitimate.  */
    3369          695 :       if (GET_MODE (parmreg) != GET_MODE (rtl))
    3370              :         {
    3371            0 :           rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
    3372            0 :           int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
    3373              : 
    3374            0 :           push_to_sequence2 (all->first_conversion_insn,
    3375              :                              all->last_conversion_insn);
    3376            0 :           emit_move_insn (tempreg, rtl);
    3377            0 :           tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
    3378            0 :           emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
    3379              :                           tempreg);
    3380            0 :           all->first_conversion_insn = get_insns ();
    3381            0 :           all->last_conversion_insn = get_last_insn ();
    3382            0 :           end_sequence ();
    3383              : 
    3384            0 :           did_conversion = true;
    3385              :         }
    3386              :       else
    3387          695 :         emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
    3388              : 
    3389          695 :       rtl = parmreg;
    3390              : 
    3391              :       /* STACK_PARM is the pointer, not the parm, and PARMREG is
    3392              :          now the parm.  */
    3393          695 :       data->stack_parm = NULL;
    3394              :     }
    3395              : 
    3396      2289774 :   set_parm_rtl (parm, rtl);
    3397              : 
    3398              :   /* Mark the register as eliminable if we did no conversion and it was
    3399              :      copied from memory at a fixed offset, and the arg pointer was not
    3400              :      copied to a pseudo-reg.  If the arg pointer is a pseudo reg or the
    3401              :      offset formed an invalid address, such memory-equivalences as we
    3402              :      make here would screw up life analysis for it.  */
    3403      2289774 :   if (data->nominal_mode == data->passed_mode
    3404      2285603 :       && !did_conversion
    3405      2285603 :       && data->stack_parm != 0
    3406       877568 :       && MEM_P (data->stack_parm)
    3407       877568 :       && data->locate.offset.var == 0
    3408      3167342 :       && reg_mentioned_p (virtual_incoming_args_rtx,
    3409       877568 :                           XEXP (data->stack_parm, 0)))
    3410              :     {
    3411       877568 :       rtx_insn *linsn = get_last_insn ();
    3412       877568 :       rtx_insn *sinsn;
    3413       877568 :       rtx set;
    3414              : 
    3415              :       /* Mark complex types separately.  */
    3416       877568 :       if (GET_CODE (parmreg) == CONCAT)
    3417              :         {
    3418         1166 :           scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
    3419         1166 :           int regnor = REGNO (XEXP (parmreg, 0));
    3420         1166 :           int regnoi = REGNO (XEXP (parmreg, 1));
    3421         1166 :           rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
    3422         2332 :           rtx stacki = adjust_address_nv (data->stack_parm, submode,
    3423              :                                           GET_MODE_SIZE (submode));
    3424              : 
    3425              :           /* Scan backwards for the set of the real and
    3426              :              imaginary parts.  */
    3427         6072 :           for (sinsn = linsn; sinsn != 0;
    3428         4906 :                sinsn = prev_nonnote_insn (sinsn))
    3429              :             {
    3430         4906 :               set = single_set (sinsn);
    3431         4906 :               if (set == 0)
    3432            0 :                 continue;
    3433              : 
    3434         4906 :               if (SET_DEST (set) == regno_reg_rtx [regnoi])
    3435         1166 :                 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
    3436         3740 :               else if (SET_DEST (set) == regno_reg_rtx [regnor])
    3437         1166 :                 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
    3438              :             }
    3439              :         }
    3440              :       else
    3441       876402 :         set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
    3442              :     }
    3443              : 
    3444              :   /* For pointer data type, suggest pointer register.  */
    3445      2289774 :   if (POINTER_TYPE_P (TREE_TYPE (parm)))
    3446       937640 :     mark_reg_pointer (parmreg,
    3447       937640 :                       TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
    3448      2289774 : }
    3449              : 
    3450              : /* A subroutine of assign_parms.  Allocate stack space to hold the current
    3451              :    parameter.  Get it there.  Perform all ABI specified conversions.  */
    3452              : 
    3453              : static void
    3454       808993 : assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
    3455              :                          struct assign_parm_data_one *data)
    3456              : {
    3457              :   /* Value must be stored in the stack slot STACK_PARM during function
    3458              :      execution.  */
    3459       808993 :   bool to_conversion = false;
    3460              : 
    3461       808993 :   assign_parm_remove_parallels (data);
    3462              : 
    3463       808993 :   if (data->arg.mode != data->nominal_mode)
    3464              :     {
    3465              :       /* Conversion is required.  */
    3466          835 :       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
    3467              : 
    3468          835 :       emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
    3469              : 
    3470              :       /* Some ABIs require scalar floating point modes to be passed
    3471              :          in a wider scalar integer mode.  We need to explicitly
    3472              :          truncate to an integer mode of the correct precision before
    3473              :          using a SUBREG to reinterpret as a floating point value.  */
    3474          835 :       if (SCALAR_FLOAT_MODE_P (data->nominal_mode)
    3475          101 :           && SCALAR_INT_MODE_P (data->arg.mode)
    3476          835 :           && known_lt (GET_MODE_SIZE (data->nominal_mode),
    3477              :                        GET_MODE_SIZE (data->arg.mode)))
    3478            0 :         tempreg = convert_wider_int_to_float (data->nominal_mode,
    3479              :                                               data->arg.mode, tempreg);
    3480              : 
    3481          835 :       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
    3482          835 :       to_conversion = true;
    3483              : 
    3484         2505 :       data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
    3485          835 :                                           TYPE_UNSIGNED (TREE_TYPE (parm)));
    3486              : 
    3487          835 :       if (data->stack_parm)
    3488              :         {
    3489            0 :           poly_int64 offset
    3490            0 :             = subreg_lowpart_offset (data->nominal_mode,
    3491            0 :                                      GET_MODE (data->stack_parm));
    3492              :           /* ??? This may need a big-endian conversion on sparc64.  */
    3493            0 :           data->stack_parm
    3494            0 :             = adjust_address (data->stack_parm, data->nominal_mode, 0);
    3495            0 :           if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
    3496            0 :             set_mem_offset (data->stack_parm,
    3497            0 :                             MEM_OFFSET (data->stack_parm) + offset);
    3498              :         }
    3499              :     }
    3500              : 
    3501       808993 :   if (data->entry_parm != data->stack_parm)
    3502              :     {
    3503       632031 :       rtx src, dest;
    3504              : 
    3505       632031 :       if (data->stack_parm == 0)
    3506              :         {
    3507       582273 :           int align = STACK_SLOT_ALIGNMENT (data->arg.type,
    3508              :                                             GET_MODE (data->entry_parm),
    3509              :                                             TYPE_ALIGN (data->arg.type));
    3510       582273 :           if (align < (int)GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm))
    3511       584828 :               && ((optab_handler (movmisalign_optab,
    3512         2555 :                                   GET_MODE (data->entry_parm))
    3513              :                    != CODE_FOR_nothing)
    3514         2555 :                   || targetm.slow_unaligned_access (GET_MODE (data->entry_parm),
    3515              :                                                     align)))
    3516            0 :             align = GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm));
    3517       582273 :           data->stack_parm
    3518       582273 :             = assign_stack_local (GET_MODE (data->entry_parm),
    3519      1164546 :                                   GET_MODE_SIZE (GET_MODE (data->entry_parm)),
    3520              :                                   align);
    3521       582273 :           align = MEM_ALIGN (data->stack_parm);
    3522       582273 :           set_mem_attributes (data->stack_parm, parm, 1);
    3523       582273 :           set_mem_align (data->stack_parm, align);
    3524              :         }
    3525              : 
    3526       632031 :       dest = validize_mem (copy_rtx (data->stack_parm));
    3527       632031 :       src = validize_mem (copy_rtx (data->entry_parm));
    3528              : 
    3529       632031 :       if (TYPE_EMPTY_P (data->arg.type))
    3530              :         /* Empty types don't really need to be copied.  */;
    3531       630627 :       else if (MEM_P (src))
    3532              :         {
    3533              :           /* Use a block move to handle potentially misaligned entry_parm.  */
    3534           61 :           if (!to_conversion)
    3535           61 :             push_to_sequence2 (all->first_conversion_insn,
    3536              :                                all->last_conversion_insn);
    3537           61 :           to_conversion = true;
    3538              : 
    3539           61 :           emit_block_move (dest, src,
    3540           61 :                            GEN_INT (int_size_in_bytes (data->arg.type)),
    3541              :                            BLOCK_OP_NORMAL);
    3542              :         }
    3543              :       else
    3544              :         {
    3545       630566 :           if (!REG_P (src))
    3546         1531 :             src = force_reg (GET_MODE (src), src);
    3547       630566 :           emit_move_insn (dest, src);
    3548              :         }
    3549              :     }
    3550              : 
    3551       808993 :   if (to_conversion)
    3552              :     {
    3553          896 :       all->first_conversion_insn = get_insns ();
    3554          896 :       all->last_conversion_insn = get_last_insn ();
    3555          896 :       end_sequence ();
    3556              :     }
    3557              : 
    3558       808993 :   set_parm_rtl (parm, data->stack_parm);
    3559       808993 : }
    3560              : 
    3561              : /* A subroutine of assign_parms.  If the ABI splits complex arguments, then
    3562              :    undo the frobbing that we did in assign_parms_augmented_arg_list.  */
    3563              : 
    3564              : static void
    3565            0 : assign_parms_unsplit_complex (struct assign_parm_data_all *all,
    3566              :                               vec<tree> fnargs)
    3567              : {
    3568            0 :   tree parm;
    3569            0 :   tree orig_fnargs = all->orig_fnargs;
    3570            0 :   unsigned i = 0;
    3571              : 
    3572            0 :   for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
    3573              :     {
    3574            0 :       if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
    3575            0 :           && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
    3576              :         {
    3577            0 :           rtx tmp, real, imag;
    3578            0 :           scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
    3579              : 
    3580            0 :           real = DECL_RTL (fnargs[i]);
    3581            0 :           imag = DECL_RTL (fnargs[i + 1]);
    3582            0 :           if (inner != GET_MODE (real))
    3583              :             {
    3584            0 :               real = gen_lowpart_SUBREG (inner, real);
    3585            0 :               imag = gen_lowpart_SUBREG (inner, imag);
    3586              :             }
    3587              : 
    3588            0 :           if (TREE_ADDRESSABLE (parm))
    3589              :             {
    3590            0 :               rtx rmem, imem;
    3591            0 :               HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
    3592            0 :               int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
    3593              :                                                 DECL_MODE (parm),
    3594              :                                                 TYPE_ALIGN (TREE_TYPE (parm)));
    3595              : 
    3596              :               /* split_complex_arg put the real and imag parts in
    3597              :                  pseudos.  Move them to memory.  */
    3598            0 :               tmp = assign_stack_local (DECL_MODE (parm), size, align);
    3599            0 :               set_mem_attributes (tmp, parm, 1);
    3600            0 :               rmem = adjust_address_nv (tmp, inner, 0);
    3601            0 :               imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
    3602            0 :               push_to_sequence2 (all->first_conversion_insn,
    3603              :                                  all->last_conversion_insn);
    3604            0 :               emit_move_insn (rmem, real);
    3605            0 :               emit_move_insn (imem, imag);
    3606            0 :               all->first_conversion_insn = get_insns ();
    3607            0 :               all->last_conversion_insn = get_last_insn ();
    3608            0 :               end_sequence ();
    3609              :             }
    3610              :           else
    3611            0 :             tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
    3612            0 :           set_parm_rtl (parm, tmp);
    3613              : 
    3614            0 :           real = DECL_INCOMING_RTL (fnargs[i]);
    3615            0 :           imag = DECL_INCOMING_RTL (fnargs[i + 1]);
    3616            0 :           if (inner != GET_MODE (real))
    3617              :             {
    3618            0 :               real = gen_lowpart_SUBREG (inner, real);
    3619            0 :               imag = gen_lowpart_SUBREG (inner, imag);
    3620              :             }
    3621            0 :           tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
    3622            0 :           set_decl_incoming_rtl (parm, tmp, false);
    3623            0 :           i++;
    3624              :         }
    3625              :     }
    3626            0 : }
    3627              : 
    3628              : /* Assign RTL expressions to the function's parameters.  This may involve
    3629              :    copying them into registers and using those registers as the DECL_RTL.  */
    3630              : 
    3631              : static void
    3632      1472141 : assign_parms (tree fndecl)
    3633              : {
    3634      1472141 :   struct assign_parm_data_all all;
    3635      1472141 :   tree parm;
    3636      1472141 :   vec<tree> fnargs;
    3637      1472141 :   unsigned i;
    3638              : 
    3639      1472141 :   crtl->args.internal_arg_pointer
    3640      1472141 :     = targetm.calls.internal_arg_pointer ();
    3641              : 
    3642      1472141 :   assign_parms_initialize_all (&all);
    3643      1472141 :   fnargs = assign_parms_augmented_arg_list (&all);
    3644              : 
    3645      1472141 :   if (TYPE_NO_NAMED_ARGS_STDARG_P (TREE_TYPE (fndecl))
    3646      1472141 :       && fnargs.is_empty ())
    3647              :     {
    3648           96 :       struct assign_parm_data_one data = {};
    3649           96 :       assign_parms_setup_varargs (&all, &data, false);
    3650              :     }
    3651              : 
    3652      4644223 :   FOR_EACH_VEC_ELT (fnargs, i, parm)
    3653              :     {
    3654      3172082 :       struct assign_parm_data_one data;
    3655              : 
    3656              :       /* Extract the type of PARM; adjust it according to ABI.  */
    3657      3172082 :       assign_parm_find_data_types (&all, parm, &data);
    3658              : 
    3659              :       /* Early out for errors and void parameters.  */
    3660      3172082 :       if (data.passed_mode == VOIDmode)
    3661              :         {
    3662            0 :           SET_DECL_RTL (parm, const0_rtx);
    3663            0 :           DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
    3664            0 :           continue;
    3665              :         }
    3666              : 
    3667              :       /* Estimate stack alignment from parameter alignment.  */
    3668      3172082 :       if (SUPPORTS_STACK_ALIGNMENT)
    3669              :         {
    3670      3172082 :           unsigned int align
    3671      6344164 :             = targetm.calls.function_arg_boundary (data.arg.mode,
    3672      3172082 :                                                    data.arg.type);
    3673      3172082 :           align = MINIMUM_ALIGNMENT (data.arg.type, data.arg.mode, align);
    3674      3172082 :           if (TYPE_ALIGN (data.nominal_type) > align)
    3675         4714 :             align = MINIMUM_ALIGNMENT (data.nominal_type,
    3676              :                                        TYPE_MODE (data.nominal_type),
    3677              :                                        TYPE_ALIGN (data.nominal_type));
    3678      3172082 :           if (crtl->stack_alignment_estimated < align)
    3679              :             {
    3680       355438 :               gcc_assert (!crtl->stack_realign_processed);
    3681       355438 :               crtl->stack_alignment_estimated = align;
    3682              :             }
    3683              :         }
    3684              : 
    3685              :       /* Find out where the parameter arrives in this function.  */
    3686      3172082 :       assign_parm_find_entry_rtl (&all, &data);
    3687              : 
    3688              :       /* Find out where stack space for this parameter might be.  */
    3689      3172082 :       if (assign_parm_is_stack_parm (&all, &data))
    3690              :         {
    3691      1178752 :           assign_parm_find_stack_rtl (parm, &data);
    3692      1178752 :           assign_parm_adjust_entry_rtl (&data);
    3693              :           /* For arguments that occupy no space in the parameter
    3694              :              passing area, have non-zero size and have address taken,
    3695              :              force creation of a stack slot so that they have distinct
    3696              :              address from other parameters.  */
    3697      1178752 :           if (TYPE_EMPTY_P (data.arg.type)
    3698         6608 :               && TREE_ADDRESSABLE (parm)
    3699         1563 :               && data.entry_parm == data.stack_parm
    3700         1563 :               && MEM_P (data.entry_parm)
    3701      1180315 :               && int_size_in_bytes (data.arg.type))
    3702         1397 :             data.stack_parm = NULL_RTX;
    3703              :         }
    3704              :       /* Record permanently how this parm was passed.  */
    3705      3172082 :       if (data.arg.pass_by_reference)
    3706              :         {
    3707         4930 :           rtx incoming_rtl
    3708         4930 :             = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.arg.type)),
    3709              :                            data.entry_parm);
    3710         4930 :           set_decl_incoming_rtl (parm, incoming_rtl, true);
    3711              :         }
    3712              :       else
    3713      3167152 :         set_decl_incoming_rtl (parm, data.entry_parm, false);
    3714              : 
    3715      3172082 :       assign_parm_adjust_stack_rtl (&data);
    3716              : 
    3717      3172082 :       if (assign_parm_setup_block_p (&data))
    3718        73315 :         assign_parm_setup_block (&all, parm, &data);
    3719      3098767 :       else if (data.arg.pass_by_reference || use_register_for_decl (parm))
    3720      2289774 :         assign_parm_setup_reg (&all, parm, &data);
    3721              :       else
    3722       808993 :         assign_parm_setup_stack (&all, parm, &data);
    3723              : 
    3724      3172082 :       if (cfun->stdarg && !DECL_CHAIN (parm))
    3725        21416 :         assign_parms_setup_varargs (&all, &data, false);
    3726              : 
    3727              :       /* Update info on where next arg arrives in registers.  */
    3728      3172082 :       targetm.calls.function_arg_advance (all.args_so_far, data.arg);
    3729              :     }
    3730              : 
    3731      1472141 :   if (targetm.calls.split_complex_arg)
    3732            0 :     assign_parms_unsplit_complex (&all, fnargs);
    3733              : 
    3734      1472141 :   fnargs.release ();
    3735              : 
    3736              :   /* Output all parameter conversion instructions (possibly including calls)
    3737              :      now that all parameters have been copied out of hard registers.  */
    3738      1472141 :   emit_insn (all.first_conversion_insn);
    3739              : 
    3740      1472141 :   do_pending_stack_adjust ();
    3741              : 
    3742              :   /* Estimate reload stack alignment from scalar return mode.  */
    3743      1472141 :   if (SUPPORTS_STACK_ALIGNMENT)
    3744              :     {
    3745      1472141 :       if (DECL_RESULT (fndecl))
    3746              :         {
    3747      1472141 :           tree type = TREE_TYPE (DECL_RESULT (fndecl));
    3748      1472141 :           machine_mode mode = TYPE_MODE (type);
    3749              : 
    3750      1472141 :           if (mode != BLKmode
    3751      1421293 :               && mode != VOIDmode
    3752       735019 :               && !AGGREGATE_TYPE_P (type))
    3753              :             {
    3754       674090 :               unsigned int align = GET_MODE_ALIGNMENT (mode);
    3755       674090 :               if (crtl->stack_alignment_estimated < align)
    3756              :                 {
    3757           10 :                   gcc_assert (!crtl->stack_realign_processed);
    3758           10 :                   crtl->stack_alignment_estimated = align;
    3759              :                 }
    3760              :             }
    3761              :         }
    3762              :     }
    3763              : 
    3764              :   /* If we are receiving a struct value address as the first argument, set up
    3765              :      the RTL for the function result. As this might require code to convert
    3766              :      the transmitted address to Pmode, we do this here to ensure that possible
    3767              :      preliminary conversions of the address have been emitted already.  */
    3768      1472141 :   if (all.function_result_decl)
    3769              :     {
    3770        69241 :       tree result = DECL_RESULT (current_function_decl);
    3771        69241 :       rtx addr = DECL_RTL (all.function_result_decl);
    3772        69241 :       rtx x;
    3773              : 
    3774        69241 :       if (DECL_BY_REFERENCE (result))
    3775              :         {
    3776         8758 :           SET_DECL_VALUE_EXPR (result, all.function_result_decl);
    3777         8758 :           x = addr;
    3778              :         }
    3779              :       else
    3780              :         {
    3781        60483 :           SET_DECL_VALUE_EXPR (result,
    3782              :                                build1 (INDIRECT_REF, TREE_TYPE (result),
    3783              :                                        all.function_result_decl));
    3784        60483 :           addr = convert_memory_address (Pmode, addr);
    3785        60483 :           x = gen_rtx_MEM (DECL_MODE (result), addr);
    3786        60483 :           set_mem_attributes (x, result, 1);
    3787              :         }
    3788              : 
    3789        69241 :       DECL_HAS_VALUE_EXPR_P (result) = 1;
    3790              : 
    3791        69241 :       set_parm_rtl (result, x);
    3792              :     }
    3793              : 
    3794              :   /* We have aligned all the args, so add space for the pretend args.  */
    3795      1472141 :   crtl->args.pretend_args_size = all.pretend_args_size;
    3796      1472141 :   all.stack_args_size.constant += all.extra_pretend_bytes;
    3797      1472141 :   crtl->args.size = all.stack_args_size.constant;
    3798              : 
    3799              :   /* Adjust function incoming argument size for alignment and
    3800              :      minimum length.  */
    3801              : 
    3802      1472141 :   crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space);
    3803      2944282 :   crtl->args.size = aligned_upper_bound (crtl->args.size,
    3804      1472141 :                                          PARM_BOUNDARY / BITS_PER_UNIT);
    3805              : 
    3806      1472141 :   if (ARGS_GROW_DOWNWARD)
    3807              :     {
    3808              :       crtl->args.arg_offset_rtx
    3809              :         = (all.stack_args_size.var == 0
    3810              :            ? gen_int_mode (-all.stack_args_size.constant, Pmode)
    3811              :            : expand_expr (size_diffop (all.stack_args_size.var,
    3812              :                                        size_int (-all.stack_args_size.constant)),
    3813              :                           NULL_RTX, VOIDmode, EXPAND_NORMAL));
    3814              :     }
    3815              :   else
    3816      1598529 :     crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
    3817              : 
    3818              :   /* See how many bytes, if any, of its args a function should try to pop
    3819              :      on return.  */
    3820              : 
    3821      1472141 :   crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
    3822      1472141 :                                                          TREE_TYPE (fndecl),
    3823              :                                                          crtl->args.size);
    3824              : 
    3825              :   /* For stdarg.h function, save info about
    3826              :      regs and stack space used by the named args.  */
    3827              : 
    3828      1472141 :   crtl->args.info = all.args_so_far_v;
    3829              : 
    3830              :   /* Set the rtx used for the function return value.  Put this in its
    3831              :      own variable so any optimizers that need this information don't have
    3832              :      to include tree.h.  Do this here so it gets done when an inlined
    3833              :      function gets output.  */
    3834              : 
    3835      1472141 :   crtl->return_rtx
    3836      1472141 :     = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
    3837      2258008 :        ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
    3838              : 
    3839              :   /* If scalar return value was computed in a pseudo-reg, or was a named
    3840              :      return value that got dumped to the stack, copy that to the hard
    3841              :      return register.  */
    3842      1472141 :   if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
    3843              :     {
    3844       785867 :       tree decl_result = DECL_RESULT (fndecl);
    3845       785867 :       rtx decl_rtl = DECL_RTL (decl_result);
    3846              : 
    3847       785867 :       if (REG_P (decl_rtl)
    3848       785867 :           ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
    3849        67145 :           : DECL_REGISTER (decl_result))
    3850              :         {
    3851       723509 :           rtx real_decl_rtl;
    3852              : 
    3853              :           /* Unless the psABI says not to.  */
    3854       723509 :           if (TYPE_EMPTY_P (TREE_TYPE (decl_result)))
    3855              :             real_decl_rtl = NULL_RTX;
    3856              :           else
    3857              :             {
    3858       718891 :               real_decl_rtl
    3859       718891 :                 = targetm.calls.function_value (TREE_TYPE (decl_result),
    3860              :                                                 fndecl, true);
    3861       718891 :               REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
    3862              :             }
    3863              :           /* The delay slot scheduler assumes that crtl->return_rtx
    3864              :              holds the hard register containing the return value, not a
    3865              :              temporary pseudo.  */
    3866       723509 :           crtl->return_rtx = real_decl_rtl;
    3867              :         }
    3868              :     }
    3869      1472141 : }
    3870              : 
    3871              : /* Gimplify the parameter list for current_function_decl.  This involves
    3872              :    evaluating SAVE_EXPRs of variable sized parameters and generating code
    3873              :    to implement callee-copies reference parameters.  Returns a sequence of
    3874              :    statements to add to the beginning of the function.  */
    3875              : 
    3876              : gimple_seq
    3877      2869750 : gimplify_parameters (gimple_seq *cleanup)
    3878              : {
    3879      2869750 :   struct assign_parm_data_all all;
    3880      2869750 :   tree parm;
    3881      2869750 :   gimple_seq stmts = NULL;
    3882      2869750 :   vec<tree> fnargs;
    3883      2869750 :   unsigned i;
    3884              : 
    3885      2869750 :   assign_parms_initialize_all (&all);
    3886      2869750 :   fnargs = assign_parms_augmented_arg_list (&all);
    3887              : 
    3888      8815360 :   FOR_EACH_VEC_ELT (fnargs, i, parm)
    3889              :     {
    3890      5945610 :       struct assign_parm_data_one data;
    3891              : 
    3892              :       /* Extract the type of PARM; adjust it according to ABI.  */
    3893      5945610 :       assign_parm_find_data_types (&all, parm, &data);
    3894              : 
    3895              :       /* Early out for errors and void parameters.  */
    3896      5945610 :       if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
    3897           87 :         continue;
    3898              : 
    3899              :       /* Update info on where next arg arrives in registers.  */
    3900      5945523 :       targetm.calls.function_arg_advance (all.args_so_far, data.arg);
    3901              : 
    3902              :       /* ??? Once upon a time variable_size stuffed parameter list
    3903              :          SAVE_EXPRs (amongst others) onto a pending sizes list.  This
    3904              :          turned out to be less than manageable in the gimple world.
    3905              :          Now we have to hunt them down ourselves.  */
    3906      5945523 :       gimplify_type_sizes (TREE_TYPE (parm), &stmts);
    3907              : 
    3908      5945523 :       if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
    3909              :         {
    3910           41 :           gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
    3911           41 :           gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
    3912              :         }
    3913              : 
    3914      5945523 :       if (data.arg.pass_by_reference)
    3915              :         {
    3916         4938 :           tree type = TREE_TYPE (data.arg.type);
    3917         4938 :           function_arg_info orig_arg (type, data.arg.named);
    3918         4938 :           if (reference_callee_copied (&all.args_so_far_v, orig_arg))
    3919              :             {
    3920            0 :               tree local, t;
    3921              : 
    3922              :               /* For constant-sized objects, this is trivial; for
    3923              :                  variable-sized objects, we have to play games.  */
    3924            0 :               if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
    3925            0 :                   && !(flag_stack_check == GENERIC_STACK_CHECK
    3926            0 :                        && compare_tree_int (DECL_SIZE_UNIT (parm),
    3927              :                                             STACK_CHECK_MAX_VAR_SIZE) > 0))
    3928              :                 {
    3929            0 :                   local = create_tmp_var (type, get_name (parm));
    3930            0 :                   DECL_IGNORED_P (local) = 0;
    3931              :                   /* If PARM was addressable, move that flag over
    3932              :                      to the local copy, as its address will be taken,
    3933              :                      not the PARMs.  Keep the parms address taken
    3934              :                      as we'll query that flag during gimplification.  */
    3935            0 :                   if (TREE_ADDRESSABLE (parm))
    3936            0 :                     TREE_ADDRESSABLE (local) = 1;
    3937            0 :                   if (DECL_NOT_GIMPLE_REG_P (parm))
    3938            0 :                     DECL_NOT_GIMPLE_REG_P (local) = 1;
    3939              : 
    3940            0 :                   if (!is_gimple_reg (local)
    3941            0 :                       && flag_stack_reuse != SR_NONE)
    3942              :                     {
    3943            0 :                       tree clobber = build_clobber (type);
    3944            0 :                       gimple *clobber_stmt;
    3945            0 :                       clobber_stmt = gimple_build_assign (local, clobber);
    3946            0 :                       gimple_seq_add_stmt (cleanup, clobber_stmt);
    3947              :                     }
    3948              :                 }
    3949              :               else
    3950              :                 {
    3951            0 :                   tree ptr_type, addr;
    3952              : 
    3953            0 :                   ptr_type = build_pointer_type (type);
    3954            0 :                   addr = create_tmp_reg (ptr_type, get_name (parm));
    3955            0 :                   DECL_IGNORED_P (addr) = 0;
    3956            0 :                   local = build_fold_indirect_ref (addr);
    3957              : 
    3958            0 :                   t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
    3959            0 :                                               DECL_ALIGN (parm),
    3960              :                                               max_int_size_in_bytes (type));
    3961              :                   /* The call has been built for a variable-sized object.  */
    3962            0 :                   CALL_ALLOCA_FOR_VAR_P (t) = 1;
    3963            0 :                   t = fold_convert (ptr_type, t);
    3964            0 :                   t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
    3965            0 :                   gimplify_and_add (t, &stmts);
    3966              :                 }
    3967              : 
    3968            0 :               gimplify_assign (local, parm, &stmts);
    3969              : 
    3970            0 :               SET_DECL_VALUE_EXPR (parm, local);
    3971            0 :               DECL_HAS_VALUE_EXPR_P (parm) = 1;
    3972              :             }
    3973              :         }
    3974              :     }
    3975              : 
    3976      2869750 :   fnargs.release ();
    3977              : 
    3978      2869750 :   return stmts;
    3979              : }
    3980              : 
    3981              : /* Compute the size and offset from the start of the stacked arguments for a
    3982              :    parm passed in mode PASSED_MODE and with type TYPE.
    3983              : 
    3984              :    INITIAL_OFFSET_PTR points to the current offset into the stacked
    3985              :    arguments.
    3986              : 
    3987              :    The starting offset and size for this parm are returned in
    3988              :    LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
    3989              :    nonzero, the offset is that of stack slot, which is returned in
    3990              :    LOCATE->SLOT_OFFSET.  LOCATE->ALIGNMENT_PAD is the amount of
    3991              :    padding required from the initial offset ptr to the stack slot.
    3992              : 
    3993              :    IN_REGS is nonzero if the argument will be passed in registers.  It will
    3994              :    never be set if REG_PARM_STACK_SPACE is not defined.
    3995              : 
    3996              :    REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
    3997              :    for arguments which are passed in registers.
    3998              : 
    3999              :    FNDECL is the function in which the argument was defined.
    4000              : 
    4001              :    There are two types of rounding that are done.  The first, controlled by
    4002              :    TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
    4003              :    argument list to be aligned to the specific boundary (in bits).  This
    4004              :    rounding affects the initial and starting offsets, but not the argument
    4005              :    size.
    4006              : 
    4007              :    The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
    4008              :    optionally rounds the size of the parm to PARM_BOUNDARY.  The
    4009              :    initial offset is not affected by this rounding, while the size always
    4010              :    is and the starting offset may be.  */
    4011              : 
    4012              : /*  LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
    4013              :     INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
    4014              :     callers pass in the total size of args so far as
    4015              :     INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.  */
    4016              : 
    4017              : void
    4018      5417683 : locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
    4019              :                      int reg_parm_stack_space, int partial,
    4020              :                      tree fndecl ATTRIBUTE_UNUSED,
    4021              :                      struct args_size *initial_offset_ptr,
    4022              :                      struct locate_and_pad_arg_data *locate)
    4023              : {
    4024      5417683 :   tree sizetree;
    4025      5417683 :   pad_direction where_pad;
    4026      5417683 :   unsigned int boundary, round_boundary;
    4027      5417683 :   int part_size_in_regs;
    4028              : 
    4029              :   /* If we have found a stack parm before we reach the end of the
    4030              :      area reserved for registers, skip that area.  */
    4031      5417683 :   if (! in_regs)
    4032              :     {
    4033      3248063 :       if (reg_parm_stack_space > 0)
    4034              :         {
    4035        83755 :           if (initial_offset_ptr->var
    4036        83755 :               || !ordered_p (initial_offset_ptr->constant,
    4037              :                              reg_parm_stack_space))
    4038              :             {
    4039            0 :               initial_offset_ptr->var
    4040            0 :                 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
    4041              :                               ssize_int (reg_parm_stack_space));
    4042            0 :               initial_offset_ptr->constant = 0;
    4043              :             }
    4044              :           else
    4045        83755 :             initial_offset_ptr->constant
    4046        83755 :               = ordered_max (initial_offset_ptr->constant,
    4047              :                              reg_parm_stack_space);
    4048              :         }
    4049              :     }
    4050              : 
    4051      5417683 :   part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
    4052              : 
    4053      5417683 :   sizetree = (type
    4054      5417683 :               ? arg_size_in_bytes (type)
    4055        37841 :               : size_int (GET_MODE_SIZE (passed_mode)));
    4056      5417683 :   where_pad = targetm.calls.function_arg_padding (passed_mode, type);
    4057      5417683 :   boundary = targetm.calls.function_arg_boundary (passed_mode, type);
    4058      5417683 :   round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
    4059              :                                                               type);
    4060      5417683 :   locate->where_pad = where_pad;
    4061              : 
    4062              :   /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT.  */
    4063      5417683 :   if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
    4064              :     boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
    4065              : 
    4066      5417683 :   locate->boundary = boundary;
    4067              : 
    4068      5417683 :   if (SUPPORTS_STACK_ALIGNMENT)
    4069              :     {
    4070              :       /* stack_alignment_estimated can't change after stack has been
    4071              :          realigned.  */
    4072      5417683 :       if (crtl->stack_alignment_estimated < boundary)
    4073              :         {
    4074         5036 :           if (!crtl->stack_realign_processed)
    4075         5036 :             crtl->stack_alignment_estimated = boundary;
    4076              :           else
    4077              :             {
    4078              :               /* If stack is realigned and stack alignment value
    4079              :                  hasn't been finalized, it is OK not to increase
    4080              :                  stack_alignment_estimated.  The bigger alignment
    4081              :                  requirement is recorded in stack_alignment_needed
    4082              :                  below.  */
    4083            0 :               gcc_assert (!crtl->stack_realign_finalized
    4084              :                           && crtl->stack_realign_needed);
    4085              :             }
    4086              :         }
    4087              :     }
    4088              : 
    4089      5417683 :   if (ARGS_GROW_DOWNWARD)
    4090              :     {
    4091              :       locate->slot_offset.constant = -initial_offset_ptr->constant;
    4092              :       if (initial_offset_ptr->var)
    4093              :         locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
    4094              :                                               initial_offset_ptr->var);
    4095              : 
    4096              :       {
    4097              :         tree s2 = sizetree;
    4098              :         if (where_pad != PAD_NONE
    4099              :             && (!tree_fits_uhwi_p (sizetree)
    4100              :                 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
    4101              :           s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
    4102              :         SUB_PARM_SIZE (locate->slot_offset, s2);
    4103              :       }
    4104              : 
    4105              :       locate->slot_offset.constant += part_size_in_regs;
    4106              : 
    4107              :       if (!in_regs || reg_parm_stack_space > 0)
    4108              :         pad_to_arg_alignment (&locate->slot_offset, boundary,
    4109              :                               &locate->alignment_pad);
    4110              : 
    4111              :       locate->size.constant = (-initial_offset_ptr->constant
    4112              :                                - locate->slot_offset.constant);
    4113              :       if (initial_offset_ptr->var)
    4114              :         locate->size.var = size_binop (MINUS_EXPR,
    4115              :                                        size_binop (MINUS_EXPR,
    4116              :                                                    ssize_int (0),
    4117              :                                                    initial_offset_ptr->var),
    4118              :                                        locate->slot_offset.var);
    4119              : 
    4120              :       /* Pad_below needs the pre-rounded size to know how much to pad
    4121              :          below.  */
    4122              :       locate->offset = locate->slot_offset;
    4123              :       if (where_pad == PAD_DOWNWARD)
    4124              :         pad_below (&locate->offset, passed_mode, sizetree);
    4125              : 
    4126              :     }
    4127              :   else
    4128              :     {
    4129      5417683 :       if (!in_regs || reg_parm_stack_space > 0)
    4130      3424353 :         pad_to_arg_alignment (initial_offset_ptr, boundary,
    4131              :                               &locate->alignment_pad);
    4132      5417683 :       locate->slot_offset = *initial_offset_ptr;
    4133              : 
    4134              : #ifdef PUSH_ROUNDING
    4135      5417683 :       if (passed_mode != BLKmode)
    4136      5074061 :         sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
    4137              : #endif
    4138              : 
    4139              :       /* Pad_below needs the pre-rounded size to know how much to pad below
    4140              :          so this must be done before rounding up.  */
    4141      5417683 :       locate->offset = locate->slot_offset;
    4142      5417683 :       if (where_pad == PAD_DOWNWARD)
    4143            0 :         pad_below (&locate->offset, passed_mode, sizetree);
    4144              : 
    4145      5417683 :       if (where_pad != PAD_NONE
    4146      5417683 :           && (!tree_fits_uhwi_p (sizetree)
    4147      5417683 :               || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
    4148        18561 :         sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
    4149              : 
    4150      5417683 :       ADD_PARM_SIZE (locate->size, sizetree);
    4151              : 
    4152      5417683 :       locate->size.constant -= part_size_in_regs;
    4153              :     }
    4154              : 
    4155      5417683 :   locate->offset.constant
    4156      5417683 :     += targetm.calls.function_arg_offset (passed_mode, type);
    4157      5417683 : }
    4158              : 
    4159              : /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
    4160              :    BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
    4161              : 
    4162              : static void
    4163      3424353 : pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
    4164              :                       struct args_size *alignment_pad)
    4165              : {
    4166      3424353 :   tree save_var = NULL_TREE;
    4167      3424353 :   poly_int64 save_constant = 0;
    4168      3424353 :   int boundary_in_bytes = boundary / BITS_PER_UNIT;
    4169      3424353 :   poly_int64 sp_offset = STACK_POINTER_OFFSET;
    4170              : 
    4171              : #ifdef SPARC_STACK_BOUNDARY_HACK
    4172              :   /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
    4173              :      the real alignment of %sp.  However, when it does this, the
    4174              :      alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
    4175              :   if (SPARC_STACK_BOUNDARY_HACK)
    4176              :     sp_offset = 0;
    4177              : #endif
    4178              : 
    4179      5182105 :   if (boundary > PARM_BOUNDARY)
    4180              :     {
    4181       140252 :       save_var = offset_ptr->var;
    4182       140252 :       save_constant = offset_ptr->constant;
    4183              :     }
    4184              : 
    4185      3424353 :   alignment_pad->var = NULL_TREE;
    4186      3424353 :   alignment_pad->constant = 0;
    4187              : 
    4188      3424353 :   if (boundary > BITS_PER_UNIT)
    4189              :     {
    4190      3424353 :       int misalign;
    4191      3424353 :       if (offset_ptr->var
    4192      3424353 :           || !known_misalignment (offset_ptr->constant + sp_offset,
    4193              :                                   boundary_in_bytes, &misalign))
    4194              :         {
    4195            0 :           tree sp_offset_tree = ssize_int (sp_offset);
    4196            0 :           tree offset = size_binop (PLUS_EXPR,
    4197              :                                     ARGS_SIZE_TREE (*offset_ptr),
    4198              :                                     sp_offset_tree);
    4199            0 :           tree rounded;
    4200            0 :           if (ARGS_GROW_DOWNWARD)
    4201              :             rounded = round_down (offset, boundary / BITS_PER_UNIT);
    4202              :           else
    4203            0 :             rounded = round_up   (offset, boundary / BITS_PER_UNIT);
    4204              : 
    4205            0 :           offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
    4206              :           /* ARGS_SIZE_TREE includes constant term.  */
    4207            0 :           offset_ptr->constant = 0;
    4208            0 :           if (boundary > PARM_BOUNDARY)
    4209            0 :             alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
    4210              :                                              save_var);
    4211              :         }
    4212              :       else
    4213              :         {
    4214      3424353 :           if (ARGS_GROW_DOWNWARD)
    4215              :             offset_ptr->constant -= misalign;
    4216              :           else
    4217      3424353 :             offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
    4218              : 
    4219      3424353 :           if (boundary > PARM_BOUNDARY)
    4220       140252 :             alignment_pad->constant = offset_ptr->constant - save_constant;
    4221              :         }
    4222              :     }
    4223      3424353 : }
    4224              : 
    4225              : static void
    4226            0 : pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
    4227              : {
    4228            0 :   unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
    4229            0 :   int misalign;
    4230            0 :   if (passed_mode != BLKmode
    4231            0 :       && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign))
    4232            0 :     offset_ptr->constant += -misalign & (align - 1);
    4233              :   else
    4234              :     {
    4235            0 :       if (TREE_CODE (sizetree) != INTEGER_CST
    4236            0 :           || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
    4237              :         {
    4238              :           /* Round the size up to multiple of PARM_BOUNDARY bits.  */
    4239            0 :           tree s2 = round_up (sizetree, align);
    4240              :           /* Add it in.  */
    4241            0 :           ADD_PARM_SIZE (*offset_ptr, s2);
    4242            0 :           SUB_PARM_SIZE (*offset_ptr, sizetree);
    4243              :         }
    4244              :     }
    4245            0 : }
    4246              : 
    4247              : 
    4248              : /* True if register REGNO was alive at a place where `setjmp' was
    4249              :    called and was set more than once or is an argument.  Such regs may
    4250              :    be clobbered by `longjmp'.  */
    4251              : 
    4252              : static bool
    4253           44 : regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
    4254              : {
    4255              :   /* There appear to be cases where some local vars never reach the
    4256              :      backend but have bogus regnos.  */
    4257           44 :   if (regno >= max_reg_num ())
    4258              :     return false;
    4259              : 
    4260           44 :   return ((REG_N_SETS (regno) > 1
    4261           42 :            || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
    4262              :                                regno))
    4263           44 :           && REGNO_REG_SET_P (setjmp_crosses, regno));
    4264              : }
    4265              : 
    4266              : /* Walk the tree of blocks describing the binding levels within a
    4267              :    function and warn about variables the might be killed by setjmp or
    4268              :    vfork.  This is done after calling flow_analysis before register
    4269              :    allocation since that will clobber the pseudo-regs to hard
    4270              :    regs.  */
    4271              : 
    4272              : static void
    4273           78 : setjmp_vars_warning (bitmap setjmp_crosses, tree block)
    4274              : {
    4275           78 :   tree decl, sub;
    4276              : 
    4277          197 :   for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
    4278              :     {
    4279          119 :       if (VAR_P (decl)
    4280          119 :           && DECL_RTL_SET_P (decl)
    4281           25 :           && REG_P (DECL_RTL (decl))
    4282          137 :           && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
    4283            1 :         warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
    4284              :                  " %<longjmp%> or %<vfork%>", decl);
    4285              :     }
    4286              : 
    4287          134 :   for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
    4288           56 :     setjmp_vars_warning (setjmp_crosses, sub);
    4289           78 : }
    4290              : 
    4291              : /* Do the appropriate part of setjmp_vars_warning
    4292              :    but for arguments instead of local variables.  */
    4293              : 
    4294              : static void
    4295           22 : setjmp_args_warning (bitmap setjmp_crosses)
    4296              : {
    4297           22 :   tree decl;
    4298           22 :   for (decl = DECL_ARGUMENTS (current_function_decl);
    4299           48 :        decl; decl = DECL_CHAIN (decl))
    4300           26 :     if (DECL_RTL (decl) != 0
    4301           26 :         && REG_P (DECL_RTL (decl))
    4302           52 :         && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
    4303            0 :       warning (OPT_Wclobbered,
    4304              :                "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
    4305              :                decl);
    4306           22 : }
    4307              : 
    4308              : /* Generate warning messages for variables live across setjmp.  */
    4309              : 
    4310              : void
    4311       134888 : generate_setjmp_warnings (void)
    4312              : {
    4313       134888 :   bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
    4314              : 
    4315       134888 :   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
    4316       134888 :       || bitmap_empty_p (setjmp_crosses))
    4317              :     return;
    4318              : 
    4319           22 :   setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
    4320           22 :   setjmp_args_warning (setjmp_crosses);
    4321              : }
    4322              : 
    4323              : 
    4324              : /* Reverse the order of elements in the fragment chain T of blocks,
    4325              :    and return the new head of the chain (old last element).
    4326              :    In addition to that clear BLOCK_SAME_RANGE flags when needed
    4327              :    and adjust BLOCK_SUPERCONTEXT from the super fragment to
    4328              :    its super fragment origin.  */
    4329              : 
    4330              : static tree
    4331      5533848 : block_fragments_nreverse (tree t)
    4332              : {
    4333      5533848 :   tree prev = 0, block, next, prev_super = 0;
    4334      5533848 :   tree super = BLOCK_SUPERCONTEXT (t);
    4335      5533848 :   if (BLOCK_FRAGMENT_ORIGIN (super))
    4336      4565054 :     super = BLOCK_FRAGMENT_ORIGIN (super);
    4337     16438432 :   for (block = t; block; block = next)
    4338              :     {
    4339     10904584 :       next = BLOCK_FRAGMENT_CHAIN (block);
    4340     10904584 :       BLOCK_FRAGMENT_CHAIN (block) = prev;
    4341      5370736 :       if ((prev && !BLOCK_SAME_RANGE (prev))
    4342     13974281 :           || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
    4343              :               != prev_super))
    4344      3183042 :         BLOCK_SAME_RANGE (block) = 0;
    4345     10904584 :       prev_super = BLOCK_SUPERCONTEXT (block);
    4346     10904584 :       BLOCK_SUPERCONTEXT (block) = super;
    4347     10904584 :       prev = block;
    4348              :     }
    4349      5533848 :   t = BLOCK_FRAGMENT_ORIGIN (t);
    4350      5533848 :   if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
    4351              :       != prev_super)
    4352      1802171 :     BLOCK_SAME_RANGE (t) = 0;
    4353      5533848 :   BLOCK_SUPERCONTEXT (t) = super;
    4354      5533848 :   return prev;
    4355              : }
    4356              : 
    4357              : /* Reverse the order of elements in the chain T of blocks,
    4358              :    and return the new head of the chain (old last element).
    4359              :    Also do the same on subblocks and reverse the order of elements
    4360              :    in BLOCK_FRAGMENT_CHAIN as well.  */
    4361              : 
    4362              : static tree
    4363     23461801 : blocks_nreverse_all (tree t)
    4364              : {
    4365     23461801 :   tree prev = 0, block, next;
    4366     46350278 :   for (block = t; block; block = next)
    4367              :     {
    4368     22888477 :       next = BLOCK_CHAIN (block);
    4369     22888477 :       BLOCK_CHAIN (block) = prev;
    4370     22888477 :       if (BLOCK_FRAGMENT_CHAIN (block)
    4371     22888477 :           && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
    4372              :         {
    4373     11067696 :           BLOCK_FRAGMENT_CHAIN (block)
    4374      5533848 :             = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
    4375      5533848 :           if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
    4376      2656701 :             BLOCK_SAME_RANGE (block) = 0;
    4377              :         }
    4378     22888477 :       BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
    4379     22888477 :       prev = block;
    4380              :     }
    4381     23461801 :   return prev;
    4382              : }
    4383              : 
    4384              : 
    4385              : /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
    4386              :    and create duplicate blocks.  */
    4387              : /* ??? Need an option to either create block fragments or to create
    4388              :    abstract origin duplicates of a source block.  It really depends
    4389              :    on what optimization has been performed.  */
    4390              : 
    4391              : void
    4392       573324 : reorder_blocks (void)
    4393              : {
    4394       573324 :   tree block = DECL_INITIAL (current_function_decl);
    4395              : 
    4396       573324 :   if (block == NULL_TREE)
    4397            0 :     return;
    4398              : 
    4399       573324 :   auto_vec<tree, 10> block_stack;
    4400              : 
    4401              :   /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
    4402       573324 :   clear_block_marks (block);
    4403              : 
    4404              :   /* Prune the old trees away, so that they don't get in the way.  */
    4405       573324 :   BLOCK_SUBBLOCKS (block) = NULL_TREE;
    4406       573324 :   BLOCK_CHAIN (block) = NULL_TREE;
    4407              : 
    4408              :   /* Recreate the block tree from the note nesting.  */
    4409       573324 :   reorder_blocks_1 (get_insns (), block, &block_stack);
    4410       573324 :   BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
    4411       573324 : }
    4412              : 
    4413              : /* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
    4414              : 
    4415              : void
    4416     24208998 : clear_block_marks (tree block)
    4417              : {
    4418     44975456 :   while (block)
    4419              :     {
    4420     20766458 :       TREE_ASM_WRITTEN (block) = 0;
    4421     20766458 :       clear_block_marks (BLOCK_SUBBLOCKS (block));
    4422     20766458 :       block = BLOCK_CHAIN (block);
    4423              :     }
    4424     24208998 : }
    4425              : 
    4426              : static void
    4427       573324 : reorder_blocks_1 (rtx_insn *insns, tree current_block,
    4428              :                   vec<tree> *p_block_stack)
    4429              : {
    4430       573324 :   rtx_insn *insn;
    4431       573324 :   tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
    4432              : 
    4433    191490002 :   for (insn = insns; insn; insn = NEXT_INSN (insn))
    4434              :     {
    4435    190916678 :       if (NOTE_P (insn))
    4436              :         {
    4437    138073738 :           if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
    4438              :             {
    4439     22888477 :               tree block = NOTE_BLOCK (insn);
    4440     22888477 :               tree origin;
    4441              : 
    4442     22888477 :               gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
    4443     22888477 :               origin = block;
    4444              : 
    4445     22888477 :               if (prev_end)
    4446      1402237 :                 BLOCK_SAME_RANGE (prev_end) = 0;
    4447     22888477 :               prev_end = NULL_TREE;
    4448              : 
    4449              :               /* If we have seen this block before, that means it now
    4450              :                  spans multiple address regions.  Create a new fragment.  */
    4451     22888477 :               if (TREE_ASM_WRITTEN (block))
    4452              :                 {
    4453     10904584 :                   tree new_block = copy_node (block);
    4454              : 
    4455     10904584 :                   BLOCK_SAME_RANGE (new_block) = 0;
    4456     10904584 :                   BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
    4457     10904584 :                   BLOCK_FRAGMENT_CHAIN (new_block)
    4458     10904584 :                     = BLOCK_FRAGMENT_CHAIN (origin);
    4459     10904584 :                   BLOCK_FRAGMENT_CHAIN (origin) = new_block;
    4460              : 
    4461     10904584 :                   NOTE_BLOCK (insn) = new_block;
    4462     10904584 :                   block = new_block;
    4463              :                 }
    4464              : 
    4465     22888477 :               if (prev_beg == current_block && prev_beg)
    4466     14841083 :                 BLOCK_SAME_RANGE (block) = 1;
    4467              : 
    4468     22888477 :               prev_beg = origin;
    4469              : 
    4470     22888477 :               BLOCK_SUBBLOCKS (block) = 0;
    4471     22888477 :               TREE_ASM_WRITTEN (block) = 1;
    4472              :               /* When there's only one block for the entire function,
    4473              :                  current_block == block and we mustn't do this, it
    4474              :                  will cause infinite recursion.  */
    4475     22888477 :               if (block != current_block)
    4476              :                 {
    4477     22888477 :                   tree super;
    4478     22888477 :                   if (block != origin)
    4479     10904584 :                     gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
    4480              :                                 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
    4481              :                                                                       (origin))
    4482              :                                    == current_block);
    4483     22888477 :                   if (p_block_stack->is_empty ())
    4484              :                     super = current_block;
    4485              :                   else
    4486              :                     {
    4487     20556897 :                       super = p_block_stack->last ();
    4488     31653908 :                       gcc_assert (super == current_block
    4489              :                                   || BLOCK_FRAGMENT_ORIGIN (super)
    4490              :                                      == current_block);
    4491              :                     }
    4492     22888477 :                   BLOCK_SUPERCONTEXT (block) = super;
    4493     22888477 :                   BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
    4494     22888477 :                   BLOCK_SUBBLOCKS (current_block) = block;
    4495     22888477 :                   current_block = origin;
    4496              :                 }
    4497     22888477 :               p_block_stack->safe_push (block);
    4498              :             }
    4499    115185261 :           else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
    4500              :             {
    4501     22888477 :               NOTE_BLOCK (insn) = p_block_stack->pop ();
    4502     22888477 :               current_block = BLOCK_SUPERCONTEXT (current_block);
    4503     22888477 :               if (BLOCK_FRAGMENT_ORIGIN (current_block))
    4504      3761936 :                 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
    4505     22888477 :               prev_beg = NULL_TREE;
    4506     37729560 :               prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
    4507     22888477 :                          ? NOTE_BLOCK (insn) : NULL_TREE;
    4508              :             }
    4509              :         }
    4510              :       else
    4511              :         {
    4512     52842940 :           prev_beg = NULL_TREE;
    4513     52842940 :           if (prev_end)
    4514       752714 :             BLOCK_SAME_RANGE (prev_end) = 0;
    4515              :           prev_end = NULL_TREE;
    4516              :         }
    4517              :     }
    4518       573324 : }
    4519              : 
    4520              : /* Reverse the order of elements in the chain T of blocks,
    4521              :    and return the new head of the chain (old last element).  */
    4522              : 
    4523              : tree
    4524     29830817 : blocks_nreverse (tree t)
    4525              : {
    4526     29830817 :   tree prev = 0, block, next;
    4527     52270293 :   for (block = t; block; block = next)
    4528              :     {
    4529     22439476 :       next = BLOCK_CHAIN (block);
    4530     22439476 :       BLOCK_CHAIN (block) = prev;
    4531     22439476 :       prev = block;
    4532              :     }
    4533     29830817 :   return prev;
    4534              : }
    4535              : 
    4536              : /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
    4537              :    by modifying the last node in chain 1 to point to chain 2.  */
    4538              : 
    4539              : tree
    4540     91515539 : block_chainon (tree op1, tree op2)
    4541              : {
    4542     91515539 :   tree t1;
    4543              : 
    4544     91515539 :   if (!op1)
    4545              :     return op2;
    4546      4394154 :   if (!op2)
    4547              :     return op1;
    4548              : 
    4549     26103089 :   for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
    4550     21708935 :     continue;
    4551      4394154 :   BLOCK_CHAIN (t1) = op2;
    4552              : 
    4553              : #ifdef ENABLE_TREE_CHECKING
    4554      4394154 :   {
    4555      4394154 :     tree t2;
    4556      8804354 :     for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
    4557      4410200 :       gcc_assert (t2 != t1);
    4558              :   }
    4559              : #endif
    4560              : 
    4561              :   return op1;
    4562     21708935 : }
    4563              : 
    4564              : /* Count the subblocks of the list starting with BLOCK.  If VECTOR is
    4565              :    non-NULL, list them all into VECTOR, in a depth-first preorder
    4566              :    traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
    4567              :    blocks.  */
    4568              : 
    4569              : static int
    4570    119178824 : all_blocks (tree block, tree *vector)
    4571              : {
    4572    119178824 :   int n_blocks = 0;
    4573              : 
    4574    234089780 :   while (block)
    4575              :     {
    4576    114910956 :       TREE_ASM_WRITTEN (block) = 0;
    4577              : 
    4578              :       /* Record this block.  */
    4579    114910956 :       if (vector)
    4580     57455478 :         vector[n_blocks] = block;
    4581              : 
    4582    114910956 :       ++n_blocks;
    4583              : 
    4584              :       /* Record the subblocks, and their subblocks...  */
    4585    172366434 :       n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
    4586     57455478 :                               vector ? vector + n_blocks : 0);
    4587    114910956 :       block = BLOCK_CHAIN (block);
    4588              :     }
    4589              : 
    4590    119178824 :   return n_blocks;
    4591              : }
    4592              : 
    4593              : /* Return a vector containing all the blocks rooted at BLOCK.  The
    4594              :    number of elements in the vector is stored in N_BLOCKS_P.  The
    4595              :    vector is dynamically allocated; it is the caller's responsibility
    4596              :    to call `free' on the pointer returned.  */
    4597              : 
    4598              : static tree *
    4599      2133934 : get_block_vector (tree block, int *n_blocks_p)
    4600              : {
    4601      2133934 :   tree *block_vector;
    4602              : 
    4603      2133934 :   *n_blocks_p = all_blocks (block, NULL);
    4604      2133934 :   block_vector = XNEWVEC (tree, *n_blocks_p);
    4605      2133934 :   all_blocks (block, block_vector);
    4606              : 
    4607      2133934 :   return block_vector;
    4608              : }
    4609              : 
    4610              : static GTY(()) int next_block_index = 2;
    4611              : 
    4612              : /* Set BLOCK_NUMBER for all the blocks in FN.  */
    4613              : 
    4614              : void
    4615      2133934 : number_blocks (tree fn)
    4616              : {
    4617      2133934 :   int i;
    4618      2133934 :   int n_blocks;
    4619      2133934 :   tree *block_vector;
    4620              : 
    4621      2133934 :   block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
    4622              : 
    4623              :   /* The top-level BLOCK isn't numbered at all.  */
    4624     59589412 :   for (i = 1; i < n_blocks; ++i)
    4625              :     /* We number the blocks from two.  */
    4626     55321544 :     BLOCK_NUMBER (block_vector[i]) = next_block_index++;
    4627              : 
    4628      2133934 :   free (block_vector);
    4629              : 
    4630      2133934 :   return;
    4631              : }
    4632              : 
    4633              : /* If VAR is present in a subblock of BLOCK, return the subblock.  */
    4634              : 
    4635              : DEBUG_FUNCTION tree
    4636            0 : debug_find_var_in_block_tree (tree var, tree block)
    4637              : {
    4638            0 :   tree t;
    4639              : 
    4640            0 :   for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
    4641            0 :     if (t == var)
    4642              :       return block;
    4643              : 
    4644            0 :   for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
    4645              :     {
    4646            0 :       tree ret = debug_find_var_in_block_tree (var, t);
    4647            0 :       if (ret)
    4648              :         return ret;
    4649              :     }
    4650              : 
    4651              :   return NULL_TREE;
    4652              : }
    4653              : 
    4654              : /* Keep track of whether we're in a dummy function context.  If we are,
    4655              :    we don't want to invoke the set_current_function hook, because we'll
    4656              :    get into trouble if the hook calls target_reinit () recursively or
    4657              :    when the initial initialization is not yet complete.  */
    4658              : 
    4659              : static bool in_dummy_function;
    4660              : 
    4661              : /* Invoke the target hook when setting cfun.  Update the optimization options
    4662              :    if the function uses different options than the default.  */
    4663              : 
    4664              : static void
    4665    851804960 : invoke_set_current_function_hook (tree fndecl)
    4666              : {
    4667    851804960 :   if (!in_dummy_function)
    4668              :     {
    4669    851378070 :       tree opts = ((fndecl)
    4670    851378070 :                    ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
    4671    851378070 :                    : optimization_default_node);
    4672              : 
    4673    851378070 :       if (!opts)
    4674    396790829 :         opts = optimization_default_node;
    4675              : 
    4676              :       /* Change optimization options if needed.  */
    4677    851378070 :       if (optimization_current_node != opts)
    4678              :         {
    4679      3232006 :           optimization_current_node = opts;
    4680      3232006 :           cl_optimization_restore (&global_options, &global_options_set,
    4681      3232006 :                                    TREE_OPTIMIZATION (opts));
    4682              :         }
    4683              : 
    4684    851378070 :       targetm.set_current_function (fndecl);
    4685    851378070 :       this_fn_optabs = this_target_optabs;
    4686              : 
    4687              :       /* Initialize global alignment variables after op.  */
    4688    851378070 :       parse_alignment_opts ();
    4689              : 
    4690    851378070 :       if (opts != optimization_default_node)
    4691              :         {
    4692      1650796 :           init_tree_optimization_optabs (opts);
    4693      1650796 :           if (TREE_OPTIMIZATION_OPTABS (opts))
    4694       112055 :             this_fn_optabs = (struct target_optabs *)
    4695       112055 :               TREE_OPTIMIZATION_OPTABS (opts);
    4696              :         }
    4697              :     }
    4698    851804960 : }
    4699              : 
    4700              : /* Set cfun to NEW_CFUN and switch to the optimization and target options
    4701              :    associated with NEW_FNDECL.
    4702              : 
    4703              :    FORCE says whether we should do the switch even if NEW_CFUN is the current
    4704              :    function, e.g. because there has been a change in optimization or target
    4705              :    options.  */
    4706              : 
    4707              : static void
    4708   1891931028 : set_function_decl (function *new_cfun, tree new_fndecl, bool force)
    4709              : {
    4710   1891931028 :   if (cfun != new_cfun || force)
    4711              :     {
    4712    648109348 :       cfun = new_cfun;
    4713    648109348 :       invoke_set_current_function_hook (new_fndecl);
    4714    648109348 :       redirect_edge_var_map_empty ();
    4715              :     }
    4716   1891931028 : }
    4717              : 
    4718              : /* cfun should never be set directly; use this function.  */
    4719              : 
    4720              : void
    4721   1170728054 : set_cfun (struct function *new_cfun, bool force)
    4722              : {
    4723   1170728054 :   set_function_decl (new_cfun, new_cfun ? new_cfun->decl : NULL_TREE, force);
    4724   1170728054 : }
    4725              : 
    4726              : /* Initialized with NOGC, making this poisonous to the garbage collector.  */
    4727              : 
    4728              : static vec<function *> cfun_stack;
    4729              : 
    4730              : /* Push the current cfun onto the stack, then switch to function NEW_CFUN
    4731              :    and FUNCTION_DECL NEW_FNDECL.  FORCE is as for set_function_decl.  */
    4732              : 
    4733              : static void
    4734    721202974 : push_function_decl (function *new_cfun, tree new_fndecl, bool force)
    4735              : {
    4736    721202974 :   gcc_assert ((!cfun && !current_function_decl)
    4737              :               || (cfun && current_function_decl == cfun->decl));
    4738    721202974 :   cfun_stack.safe_push (cfun);
    4739    721202974 :   current_function_decl = new_fndecl;
    4740    721202974 :   set_function_decl (new_cfun, new_fndecl, force);
    4741    721202974 : }
    4742              : 
    4743              : /* Push the current cfun onto the stack and switch to function declaration
    4744              :    NEW_FNDECL, which might or might not have a function body.  FORCE is as for
    4745              :    set_function_decl.  */
    4746              : 
    4747              : void
    4748            0 : push_function_decl (tree new_fndecl, bool force)
    4749              : {
    4750            0 :   force |= current_function_decl != new_fndecl;
    4751            0 :   push_function_decl (DECL_STRUCT_FUNCTION (new_fndecl), new_fndecl, force);
    4752            0 : }
    4753              : 
    4754              : /* Push the current cfun onto the stack, and set cfun to new_cfun.  Also set
    4755              :    current_function_decl accordingly.  */
    4756              : 
    4757              : void
    4758    721202974 : push_cfun (struct function *new_cfun)
    4759              : {
    4760    721202974 :   push_function_decl (new_cfun, new_cfun ? new_cfun->decl : NULL_TREE, false);
    4761    721202974 : }
    4762              : 
    4763              : /* A common subroutine for pop_cfun and pop_function_decl.  FORCE is as
    4764              :    for set_function_decl.  */
    4765              : 
    4766              : static void
    4767    722054951 : pop_cfun_1 (bool force)
    4768              : {
    4769    722054951 :   struct function *new_cfun = cfun_stack.pop ();
    4770              :   /* When in_dummy_function, we do have a cfun but current_function_decl is
    4771              :      NULL.  We also allow pushing NULL cfun and subsequently changing
    4772              :      current_function_decl to something else and have both restored by
    4773              :      pop_cfun.  */
    4774    722054951 :   gcc_checking_assert (in_dummy_function
    4775              :                        || !cfun
    4776              :                        || current_function_decl == cfun->decl);
    4777    722054951 :   set_cfun (new_cfun, force);
    4778    722054951 :   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
    4779    722054951 : }
    4780              : 
    4781              : /* Pop cfun from the stack.  Also set current_function_decl accordingly.  */
    4782              : 
    4783              : void
    4784    722054951 : pop_cfun (void)
    4785              : {
    4786    722054951 :   pop_cfun_1 (false);
    4787    722054951 : }
    4788              : 
    4789              : /* Undo push_function_decl.  */
    4790              : 
    4791              : void
    4792            0 : pop_function_decl (void)
    4793              : {
    4794              :   /* If the previous cfun was null, the options should be reset to the
    4795              :      global set.  Checking the current cfun against the new (popped) cfun
    4796              :      wouldn't catch this if the current function decl has no function
    4797              :      struct.  */
    4798            0 :   pop_cfun_1 (!cfun_stack.last ());
    4799            0 : }
    4800              : 
    4801              : /* Return value of funcdef and increase it.  */
    4802              : int
    4803    203482152 : get_next_funcdef_no (void)
    4804              : {
    4805    203482152 :   return funcdef_no++;
    4806              : }
    4807              : 
    4808              : /* Return value of funcdef.  */
    4809              : int
    4810            0 : get_last_funcdef_no (void)
    4811              : {
    4812            0 :   return funcdef_no;
    4813              : }
    4814              : 
    4815              : /* Allocate and initialize the stack usage info data structure for the
    4816              :    current function.  */
    4817              : static void
    4818          690 : allocate_stack_usage_info (void)
    4819              : {
    4820          690 :   gcc_assert (!cfun->su);
    4821          690 :   cfun->su = ggc_cleared_alloc<stack_usage> ();
    4822          690 :   cfun->su->static_stack_size = -1;
    4823          690 : }
    4824              : 
    4825              : /* Allocate a function structure for FNDECL and set its contents
    4826              :    to the defaults.  Set cfun to the newly-allocated object.
    4827              :    Some of the helper functions invoked during initialization assume
    4828              :    that cfun has already been set.  Therefore, assign the new object
    4829              :    directly into cfun and invoke the back end hook explicitly at the
    4830              :    very end, rather than initializing a temporary and calling set_cfun
    4831              :    on it.
    4832              : 
    4833              :    ABSTRACT_P is true if this is a function that will never be seen by
    4834              :    the middle-end.  Such functions are front-end concepts (like C++
    4835              :    function templates) that do not correspond directly to functions
    4836              :    placed in object files.  */
    4837              : 
    4838              : void
    4839    203695612 : allocate_struct_function (tree fndecl, bool abstract_p)
    4840              : {
    4841    203695612 :   tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
    4842              : 
    4843    203695612 :   cfun = ggc_cleared_alloc<function> ();
    4844              : 
    4845    203695612 :   init_eh_for_function ();
    4846              : 
    4847    203695612 :   if (init_machine_status)
    4848    203695612 :     cfun->machine = (*init_machine_status) ();
    4849              : 
    4850              : #ifdef OVERRIDE_ABI_FORMAT
    4851    203695612 :   OVERRIDE_ABI_FORMAT (fndecl);
    4852              : #endif
    4853              : 
    4854    203695612 :   if (fndecl != NULL_TREE)
    4855              :     {
    4856    203482152 :       DECL_STRUCT_FUNCTION (fndecl) = cfun;
    4857    203482152 :       cfun->decl = fndecl;
    4858    203482152 :       current_function_funcdef_no = get_next_funcdef_no ();
    4859              :     }
    4860              : 
    4861    203695612 :   invoke_set_current_function_hook (fndecl);
    4862              : 
    4863    203695612 :   if (fndecl != NULL_TREE)
    4864              :     {
    4865    203482152 :       tree result = DECL_RESULT (fndecl);
    4866              : 
    4867    203482152 :       if (!abstract_p)
    4868              :         {
    4869              :           /* Now that we have activated any function-specific attributes
    4870              :              that might affect layout, particularly vector modes, relayout
    4871              :              each of the parameters and the result.  */
    4872    110963189 :           relayout_decl (result);
    4873    333899514 :           for (tree parm = DECL_ARGUMENTS (fndecl); parm;
    4874    222936325 :                parm = DECL_CHAIN (parm))
    4875    222936325 :             relayout_decl (parm);
    4876              : 
    4877              :           /* Similarly relayout the function decl.  */
    4878    110963189 :           targetm.target_option.relayout_function (fndecl);
    4879              :         }
    4880              : 
    4881    110963189 :       if (!abstract_p && aggregate_value_p (result, fndecl))
    4882              :         {
    4883              : #ifdef PCC_STATIC_STRUCT_RETURN
    4884              :           cfun->returns_pcc_struct = 1;
    4885              : #endif
    4886      4272012 :           cfun->returns_struct = 1;
    4887              :         }
    4888              : 
    4889    203482152 :       cfun->stdarg = stdarg_p (fntype);
    4890              : 
    4891              :       /* Assume all registers in stdarg functions need to be saved.  */
    4892    203482152 :       cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
    4893    203482152 :       cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
    4894              : 
    4895              :       /* ??? This could be set on a per-function basis by the front-end
    4896              :          but is this worth the hassle?  */
    4897    203482152 :       cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
    4898    203482152 :       cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
    4899              : 
    4900    203482152 :       if (!profile_flag && !flag_instrument_function_entry_exit)
    4901    203481709 :         DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
    4902              : 
    4903    203482152 :       if (flag_callgraph_info)
    4904            1 :         allocate_stack_usage_info ();
    4905              :     }
    4906              : 
    4907              :   /* Don't enable begin stmt markers if var-tracking at assignments is
    4908              :      disabled.  The markers make little sense without the variable
    4909              :      binding annotations among them.  */
    4910    407391224 :   cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
    4911    203695612 :     && MAY_HAVE_DEBUG_MARKER_STMTS;
    4912    203695612 : }
    4913              : 
    4914              : /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
    4915              :    instead of just setting it.  */
    4916              : 
    4917              : void
    4918       852002 : push_struct_function (tree fndecl, bool abstract_p)
    4919              : {
    4920              :   /* When in_dummy_function we might be in the middle of a pop_cfun and
    4921              :      current_function_decl and cfun may not match.  */
    4922       852002 :   gcc_assert (in_dummy_function
    4923              :               || (!cfun && !current_function_decl)
    4924              :               || (cfun && current_function_decl == cfun->decl));
    4925       852002 :   cfun_stack.safe_push (cfun);
    4926       852002 :   current_function_decl = fndecl;
    4927       852002 :   allocate_struct_function (fndecl, abstract_p);
    4928       852002 : }
    4929              : 
    4930              : /* Reset crtl and other non-struct-function variables to defaults as
    4931              :    appropriate for emitting rtl at the start of a function.  */
    4932              : 
    4933              : static void
    4934      1691107 : prepare_function_start (void)
    4935              : {
    4936      1691107 :   gcc_assert (!get_last_insn ());
    4937              : 
    4938      1691107 :   if (in_dummy_function)
    4939       213440 :     crtl->abi = &default_function_abi;
    4940              :   else
    4941      1477667 :     crtl->abi = &fndecl_abi (cfun->decl).base_abi ();
    4942              : 
    4943      1691107 :   init_temp_slots ();
    4944      1691107 :   init_emit ();
    4945      1691107 :   init_varasm_status ();
    4946      1691107 :   init_expr ();
    4947      1691107 :   default_rtl_profile ();
    4948              : 
    4949      1691107 :   if (flag_stack_usage_info && !flag_callgraph_info)
    4950          689 :     allocate_stack_usage_info ();
    4951              : 
    4952      1691107 :   cse_not_expected = ! optimize;
    4953              : 
    4954              :   /* Caller save not needed yet.  */
    4955      1691107 :   caller_save_needed = 0;
    4956              : 
    4957              :   /* We haven't done register allocation yet.  */
    4958      1691107 :   reg_renumber = 0;
    4959              : 
    4960              :   /* Indicate that we have not instantiated virtual registers yet.  */
    4961      1691107 :   virtuals_instantiated = 0;
    4962              : 
    4963              :   /* Indicate that we want CONCATs now.  */
    4964      1691107 :   generating_concat_p = 1;
    4965              : 
    4966              :   /* Indicate we have no need of a frame pointer yet.  */
    4967      1691107 :   frame_pointer_needed = 0;
    4968              : 
    4969              :   /* Reset the cache of the "extended" flag in the target's
    4970              :      _BitInt info struct.  */
    4971      1691107 :   bitint_extended = -1;
    4972      1691107 : }
    4973              : 
    4974              : void
    4975       213445 : push_dummy_function (bool with_decl)
    4976              : {
    4977       213445 :   tree fn_decl, fn_type, fn_result_decl;
    4978              : 
    4979       213445 :   gcc_assert (!in_dummy_function);
    4980       213445 :   in_dummy_function = true;
    4981              : 
    4982       213445 :   if (with_decl)
    4983              :     {
    4984            5 :       fn_type = build_function_type_list (void_type_node, NULL_TREE);
    4985            5 :       fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
    4986              :                             fn_type);
    4987            5 :       fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
    4988              :                                          NULL_TREE, void_type_node);
    4989            5 :       DECL_RESULT (fn_decl) = fn_result_decl;
    4990            5 :       DECL_ARTIFICIAL (fn_decl) = 1;
    4991            5 :       tree fn_name = get_identifier (" ");
    4992            5 :       SET_DECL_ASSEMBLER_NAME (fn_decl, fn_name);
    4993              :     }
    4994              :   else
    4995              :     fn_decl = NULL_TREE;
    4996              : 
    4997       213445 :   push_struct_function (fn_decl);
    4998       213445 : }
    4999              : 
    5000              : /* Initialize the rtl expansion mechanism so that we can do simple things
    5001              :    like generate sequences.  This is used to provide a context during global
    5002              :    initialization of some passes.  You must call expand_dummy_function_end
    5003              :    to exit this context.  */
    5004              : 
    5005              : void
    5006       213440 : init_dummy_function_start (void)
    5007              : {
    5008       213440 :   push_dummy_function (false);
    5009       213440 :   prepare_function_start ();
    5010       213440 : }
    5011              : 
    5012              : /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
    5013              :    and initialize static variables for generating RTL for the statements
    5014              :    of the function.  */
    5015              : 
    5016              : void
    5017      1477667 : init_function_start (tree subr)
    5018              : {
    5019              :   /* Initialize backend, if needed.  */
    5020      1477667 :   initialize_rtl ();
    5021              : 
    5022      1477667 :   prepare_function_start ();
    5023      1477667 :   decide_function_section (subr);
    5024              : 
    5025              :   /* Warn if this value is an aggregate type,
    5026              :      regardless of which calling convention we are using for it.  */
    5027      1477667 :   if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
    5028       107669 :     warning_at (DECL_SOURCE_LOCATION (DECL_RESULT (subr)),
    5029       107669 :                 OPT_Waggregate_return, "function returns an aggregate");
    5030      1477667 : }
    5031              : 
    5032              : /* Expand code to verify the stack_protect_guard.  This is invoked at
    5033              :    the end of a function to be protected.  */
    5034              : 
    5035              : void
    5036          264 : stack_protect_epilogue (void)
    5037              : {
    5038          264 :   tree guard_decl = crtl->stack_protect_guard_decl;
    5039          264 :   rtx_code_label *label = gen_label_rtx ();
    5040          264 :   rtx x, y;
    5041          264 :   rtx_insn *seq = NULL;
    5042              : 
    5043          264 :   x = expand_normal (crtl->stack_protect_guard);
    5044              : 
    5045          264 :   if (targetm.have_stack_protect_combined_test () && guard_decl)
    5046              :     {
    5047            0 :       gcc_assert (DECL_P (guard_decl));
    5048            0 :       y = DECL_RTL (guard_decl);
    5049              :       /* Allow the target to compute address of Y and compare it with X without
    5050              :          leaking Y into a register.  This combined address + compare pattern
    5051              :          allows the target to prevent spilling of any intermediate results by
    5052              :          splitting it after register allocator.  */
    5053            0 :       seq = targetm.gen_stack_protect_combined_test (x, y, label);
    5054              :     }
    5055              :   else
    5056              :     {
    5057          264 :       if (guard_decl)
    5058          264 :         y = expand_normal (guard_decl);
    5059              :       else
    5060            0 :         y = const0_rtx;
    5061              : 
    5062              :       /* Allow the target to compare Y with X without leaking either into
    5063              :          a register.  */
    5064          264 :       if (targetm.have_stack_protect_test ())
    5065          264 :         seq = targetm.gen_stack_protect_test (x, y, label);
    5066              :     }
    5067              : 
    5068          264 :   if (seq)
    5069          264 :     emit_insn (seq);
    5070              :   else
    5071            0 :     emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
    5072              : 
    5073              :   /* The noreturn predictor has been moved to the tree level.  The rtl-level
    5074              :      predictors estimate this branch about 20%, which isn't enough to get
    5075              :      things moved out of line.  Since this is the only extant case of adding
    5076              :      a noreturn function at the rtl level, it doesn't seem worth doing ought
    5077              :      except adding the prediction by hand.  */
    5078          264 :   rtx_insn *tmp = get_last_insn ();
    5079          264 :   if (JUMP_P (tmp))
    5080          264 :     predict_insn_def (tmp, PRED_NORETURN, TAKEN);
    5081              : 
    5082          264 :   expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
    5083          264 :   free_temp_slots ();
    5084          264 :   emit_label (label);
    5085          264 : }
    5086              : 
    5087              : /* Start the RTL for a new function, and set variables used for
    5088              :    emitting RTL.
    5089              :    SUBR is the FUNCTION_DECL node.
    5090              :    PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
    5091              :    the function's parameters, which must be run at any return statement.  */
    5092              : 
    5093              : bool currently_expanding_function_start;
    5094              : void
    5095      1472141 : expand_function_start (tree subr)
    5096              : {
    5097      1472141 :   currently_expanding_function_start = true;
    5098              : 
    5099              :   /* Make sure volatile mem refs aren't considered
    5100              :      valid operands of arithmetic insns.  */
    5101      1472141 :   init_recog_no_volatile ();
    5102              : 
    5103      1472141 :   crtl->profile
    5104      2944282 :     = (profile_flag
    5105      1472462 :        && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
    5106              : 
    5107      1472141 :   crtl->limit_stack
    5108      1472141 :     = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
    5109              : 
    5110              :   /* Make the label for return statements to jump to.  Do not special
    5111              :      case machines with special return instructions -- they will be
    5112              :      handled later during jump, ifcvt, or epilogue creation.  */
    5113      1472141 :   return_label = gen_label_rtx ();
    5114              : 
    5115              :   /* Initialize rtx used to return the value.  */
    5116              :   /* Do this before assign_parms so that we copy the struct value address
    5117              :      before any library calls that assign parms might generate.  */
    5118              : 
    5119              :   /* Decide whether to return the value in memory or in a register.  */
    5120      1472141 :   tree res = DECL_RESULT (subr);
    5121      1472141 :   if (aggregate_value_p (res, subr))
    5122              :     {
    5123              :       /* Returning something that won't go in a register.  */
    5124        69241 :       rtx value_address = 0;
    5125              : 
    5126              : #ifdef PCC_STATIC_STRUCT_RETURN
    5127              :       if (cfun->returns_pcc_struct)
    5128              :         {
    5129              :           int size = int_size_in_bytes (TREE_TYPE (res));
    5130              :           value_address = assemble_static_space (size);
    5131              :         }
    5132              :       else
    5133              : #endif
    5134        69241 :         {
    5135        69241 :           rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
    5136              :           /* Expect to be passed the address of a place to store the value.
    5137              :              If it is passed as an argument, assign_parms will take care of
    5138              :              it.  */
    5139        69241 :           if (sv)
    5140              :             {
    5141            0 :               value_address = gen_reg_rtx (Pmode);
    5142            0 :               emit_move_insn (value_address, sv);
    5143              :             }
    5144              :         }
    5145            0 :       if (value_address)
    5146              :         {
    5147            0 :           rtx x = value_address;
    5148            0 :           if (!DECL_BY_REFERENCE (res))
    5149              :             {
    5150            0 :               x = gen_rtx_MEM (DECL_MODE (res), x);
    5151            0 :               set_mem_attributes (x, res, 1);
    5152              :             }
    5153            0 :           set_parm_rtl (res, x);
    5154              :         }
    5155              :     }
    5156      1402900 :   else if (DECL_MODE (res) == VOIDmode)
    5157              :     /* If return mode is void, this decl rtl should not be used.  */
    5158       686274 :     set_parm_rtl (res, NULL_RTX);
    5159              :   else
    5160              :     {
    5161              :       /* Compute the return values into a pseudo reg, which we will copy
    5162              :          into the true return register after the cleanups are done.  */
    5163       716626 :       tree return_type = TREE_TYPE (res);
    5164              : 
    5165              :       /* If we may coalesce this result, make sure it has the expected mode
    5166              :          in case it was promoted.  But we need not bother about BLKmode.  */
    5167       716626 :       machine_mode promoted_mode
    5168       533249 :         = flag_tree_coalesce_vars && is_gimple_reg (res)
    5169      1212172 :           ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
    5170              :           : BLKmode;
    5171              : 
    5172       495546 :       if (promoted_mode != BLKmode)
    5173       495545 :         set_parm_rtl (res, gen_reg_rtx (promoted_mode));
    5174       221081 :       else if (TYPE_MODE (return_type) != BLKmode
    5175       221081 :                && targetm.calls.return_in_msb (return_type))
    5176              :         /* expand_function_end will insert the appropriate padding in
    5177              :            this case.  Use the return value's natural (unpadded) mode
    5178              :            within the function proper.  */
    5179            0 :         set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
    5180              :       else
    5181              :         {
    5182              :           /* In order to figure out what mode to use for the pseudo, we
    5183              :              figure out what the mode of the eventual return register will
    5184              :              actually be, and use that.  */
    5185       221081 :           rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
    5186              : 
    5187              :           /* Structures that are returned in registers are not
    5188              :              aggregate_value_p, so we may see a PARALLEL or a REG.  */
    5189       221081 :           if (REG_P (hard_reg))
    5190       218069 :             set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
    5191              :           else
    5192              :             {
    5193         3012 :               gcc_assert (GET_CODE (hard_reg) == PARALLEL);
    5194         3012 :               set_parm_rtl (res, gen_group_rtx (hard_reg));
    5195              :             }
    5196              :         }
    5197              : 
    5198              :       /* Set DECL_REGISTER flag so that expand_function_end will copy the
    5199              :          result to the real return register(s).  */
    5200       716626 :       DECL_REGISTER (res) = 1;
    5201              :     }
    5202              : 
    5203              :   /* Initialize rtx for parameters and local variables.
    5204              :      In some cases this requires emitting insns.  */
    5205      1472141 :   assign_parms (subr);
    5206              : 
    5207              :   /* If function gets a static chain arg, store it.  */
    5208      1472141 :   if (cfun->static_chain_decl)
    5209              :     {
    5210        19318 :       tree parm = cfun->static_chain_decl;
    5211        19318 :       rtx local, chain;
    5212        19318 :       rtx_insn *insn;
    5213        19318 :       int unsignedp;
    5214              : 
    5215        19318 :       local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
    5216        19318 :       chain = targetm.calls.static_chain (current_function_decl, true);
    5217              : 
    5218        19318 :       set_decl_incoming_rtl (parm, chain, false);
    5219        19318 :       set_parm_rtl (parm, local);
    5220        19318 :       mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
    5221              : 
    5222        19318 :       if (GET_MODE (local) != GET_MODE (chain))
    5223              :         {
    5224            1 :           convert_move (local, chain, unsignedp);
    5225            1 :           insn = get_last_insn ();
    5226              :         }
    5227              :       else
    5228        19317 :         insn = emit_move_insn (local, chain);
    5229              : 
    5230              :       /* Mark the register as eliminable, similar to parameters.  */
    5231        19318 :       if (MEM_P (chain)
    5232        19318 :           && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
    5233            0 :         set_dst_reg_note (insn, REG_EQUIV, chain, local);
    5234              : 
    5235              :       /* If we aren't optimizing, save the static chain onto the stack.  */
    5236        19318 :       if (!optimize)
    5237              :         {
    5238         3909 :           tree saved_static_chain_decl
    5239         3909 :             = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
    5240         3909 :                           DECL_NAME (parm), TREE_TYPE (parm));
    5241         3909 :           rtx saved_static_chain_rtx
    5242         7818 :             = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
    5243         3909 :           SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
    5244         3909 :           emit_move_insn (saved_static_chain_rtx, chain);
    5245         3909 :           SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
    5246         3909 :           DECL_HAS_VALUE_EXPR_P (parm) = 1;
    5247              :         }
    5248              :     }
    5249              : 
    5250              :   /* The following was moved from init_function_start.
    5251              :      The move was supposed to make sdb output more accurate.  */
    5252              :   /* Indicate the beginning of the function body,
    5253              :      as opposed to parm setup.  */
    5254      1472141 :   emit_note (NOTE_INSN_FUNCTION_BEG);
    5255              : 
    5256      1472141 :   gcc_assert (NOTE_P (get_last_insn ()));
    5257              : 
    5258      1472141 :   function_beg_insn = parm_birth_insn = get_last_insn ();
    5259              : 
    5260              :   /* If the function receives a non-local goto, then store the
    5261              :      bits we need to restore the frame pointer.  */
    5262      1472141 :   if (cfun->nonlocal_goto_save_area)
    5263              :     {
    5264          393 :       tree t_save;
    5265          393 :       rtx r_save;
    5266              : 
    5267          393 :       tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
    5268          393 :       gcc_assert (DECL_RTL_SET_P (var));
    5269              : 
    5270          393 :       t_save = build4 (ARRAY_REF,
    5271          393 :                        TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
    5272              :                        cfun->nonlocal_goto_save_area,
    5273              :                        integer_zero_node, NULL_TREE, NULL_TREE);
    5274          393 :       r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
    5275          393 :       gcc_assert (GET_MODE (r_save) == Pmode);
    5276              : 
    5277          393 :       emit_move_insn (r_save, hard_frame_pointer_rtx);
    5278          393 :       update_nonlocal_goto_save_area ();
    5279              :     }
    5280              : 
    5281      1472141 :   if (crtl->profile)
    5282              :     {
    5283              : #ifdef PROFILE_HOOK
    5284              :       PROFILE_HOOK (current_function_funcdef_no);
    5285              : #endif
    5286              :     }
    5287              : 
    5288              :   /* If we are doing generic stack checking, the probe should go here.  */
    5289      1472141 :   if (flag_stack_check == GENERIC_STACK_CHECK)
    5290           49 :     stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
    5291              : 
    5292      1472141 :   currently_expanding_function_start = false;
    5293      1472141 : }
    5294              : 
    5295              : void
    5296       213445 : pop_dummy_function (void)
    5297              : {
    5298       213445 :   pop_cfun ();
    5299       213445 :   in_dummy_function = false;
    5300       213445 : }
    5301              : 
    5302              : /* Undo the effects of init_dummy_function_start.  */
    5303              : void
    5304       213440 : expand_dummy_function_end (void)
    5305              : {
    5306       213440 :   gcc_assert (in_dummy_function);
    5307              : 
    5308              :   /* End any sequences that failed to be closed due to syntax errors.  */
    5309       213440 :   while (in_sequence_p ())
    5310            0 :     end_sequence ();
    5311              : 
    5312              :   /* Outside function body, can't compute type's actual size
    5313              :      until next function's body starts.  */
    5314              : 
    5315       213440 :   free_after_parsing (cfun);
    5316       213440 :   free_after_compilation (cfun);
    5317       213440 :   pop_dummy_function ();
    5318       213440 : }
    5319              : 
    5320              : /* Helper for diddle_return_value.  */
    5321              : 
    5322              : void
    5323     20098329 : diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
    5324              : {
    5325     20098329 :   if (! outgoing)
    5326              :     return;
    5327              : 
    5328     10512524 :   if (REG_P (outgoing))
    5329     10457554 :     (*doit) (outgoing, arg);
    5330        54970 :   else if (GET_CODE (outgoing) == PARALLEL)
    5331              :     {
    5332              :       int i;
    5333              : 
    5334       129271 :       for (i = 0; i < XVECLEN (outgoing, 0); i++)
    5335              :         {
    5336        76680 :           rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
    5337              : 
    5338        76680 :           if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
    5339        76680 :             (*doit) (x, arg);
    5340              :         }
    5341              :     }
    5342              : }
    5343              : 
    5344              : /* Call DOIT for each hard register used as a return value from
    5345              :    the current function.  */
    5346              : 
    5347              : void
    5348     20098329 : diddle_return_value (void (*doit) (rtx, void *), void *arg)
    5349              : {
    5350     20098329 :   diddle_return_value_1 (doit, arg, crtl->return_rtx);
    5351     20098329 : }
    5352              : 
    5353              : static void
    5354        12651 : do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
    5355              : {
    5356         5174 :   emit_clobber (reg);
    5357         7477 : }
    5358              : 
    5359              : void
    5360       619930 : clobber_return_register (void)
    5361              : {
    5362       619930 :   diddle_return_value (do_clobber_return_reg, NULL);
    5363              : 
    5364              :   /* In case we do use pseudo to return value, clobber it too.  */
    5365       619930 :   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
    5366              :     {
    5367         7507 :       tree decl_result = DECL_RESULT (current_function_decl);
    5368         7507 :       rtx decl_rtl = DECL_RTL (decl_result);
    5369         7507 :       if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
    5370              :         {
    5371         7477 :           do_clobber_return_reg (decl_rtl, NULL);
    5372              :         }
    5373              :     }
    5374       619930 : }
    5375              : 
    5376              : static void
    5377       782892 : do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
    5378              : {
    5379       782892 :   emit_use (reg);
    5380       782892 : }
    5381              : 
    5382              : static void
    5383      1472140 : use_return_register (void)
    5384              : {
    5385            0 :   diddle_return_value (do_use_return_reg, NULL);
    5386            0 : }
    5387              : 
    5388              : /* Generate RTL for the end of the current function.  */
    5389              : 
    5390              : void
    5391      1472140 : expand_function_end (void)
    5392              : {
    5393              :   /* If arg_pointer_save_area was referenced only from a nested
    5394              :      function, we will not have initialized it yet.  Do that now.  */
    5395      1472140 :   if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
    5396            0 :     get_arg_pointer_save_area ();
    5397              : 
    5398              :   /* If we are doing generic stack checking and this function makes calls,
    5399              :      do a stack probe at the start of the function to ensure we have enough
    5400              :      space for another stack frame.  */
    5401      1472140 :   if (flag_stack_check == GENERIC_STACK_CHECK)
    5402              :     {
    5403           49 :       rtx_insn *insn, *seq;
    5404              : 
    5405          650 :       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
    5406          633 :         if (CALL_P (insn))
    5407              :           {
    5408           32 :             rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
    5409           32 :             start_sequence ();
    5410           32 :             if (STACK_CHECK_MOVING_SP)
    5411           32 :               anti_adjust_stack_and_probe (max_frame_size, true);
    5412              :             else
    5413              :               probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
    5414           32 :             seq = end_sequence ();
    5415           32 :             set_insn_locations (seq, prologue_location);
    5416           32 :             emit_insn_before (seq, stack_check_probe_note);
    5417           32 :             break;
    5418              :           }
    5419              :     }
    5420              : 
    5421              :   /* End any sequences that failed to be closed due to syntax errors.  */
    5422      1472140 :   while (in_sequence_p ())
    5423            0 :     end_sequence ();
    5424              : 
    5425      1472140 :   clear_pending_stack_adjust ();
    5426      1472140 :   do_pending_stack_adjust ();
    5427              : 
    5428              :   /* Output a linenumber for the end of the function.
    5429              :      SDB depended on this.  */
    5430      1472140 :   set_curr_insn_location (input_location);
    5431              : 
    5432              :   /* Before the return label (if any), clobber the return
    5433              :      registers so that they are not propagated live to the rest of
    5434              :      the function.  This can only happen with functions that drop
    5435              :      through; if there had been a return statement, there would
    5436              :      have either been a return rtx, or a jump to the return label.
    5437              : 
    5438              :      We delay actual code generation after the current_function_value_rtx
    5439              :      is computed.  */
    5440      1472140 :   rtx_insn *clobber_after = get_last_insn ();
    5441              : 
    5442              :   /* Output the label for the actual return from the function.  */
    5443      1472140 :   emit_label (return_label);
    5444              : 
    5445      1472140 :   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
    5446              :     {
    5447              :       /* Let except.cc know where it should emit the call to unregister
    5448              :          the function context for sjlj exceptions.  */
    5449            0 :       if (flag_exceptions)
    5450            0 :         sjlj_emit_function_exit_after (get_last_insn ());
    5451              :     }
    5452              : 
    5453              :   /* If this is an implementation of throw, do what's necessary to
    5454              :      communicate between __builtin_eh_return and the epilogue.  */
    5455      1472140 :   expand_eh_return ();
    5456              : 
    5457              :   /* If stack protection is enabled for this function, check the guard.  */
    5458      1472140 :   if (crtl->stack_protect_guard
    5459          239 :       && targetm.stack_protect_runtime_enabled_p ()
    5460      1472378 :       && naked_return_label == NULL_RTX)
    5461          238 :     stack_protect_epilogue ();
    5462              : 
    5463              :   /* If scalar return value was computed in a pseudo-reg, or was a named
    5464              :      return value that got dumped to the stack, copy that to the hard
    5465              :      return register.  */
    5466      1472140 :   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
    5467              :     {
    5468       785866 :       tree decl_result = DECL_RESULT (current_function_decl);
    5469       785866 :       rtx decl_rtl = DECL_RTL (decl_result);
    5470              : 
    5471       785866 :       if ((REG_P (decl_rtl)
    5472       785866 :            ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
    5473        67145 :            : DECL_REGISTER (decl_result))
    5474              :           /* Unless the psABI says not to.  */
    5475       785866 :           && !TYPE_EMPTY_P (TREE_TYPE (decl_result)))
    5476              :         {
    5477       718891 :           rtx real_decl_rtl = crtl->return_rtx;
    5478       718891 :           complex_mode cmode;
    5479              : 
    5480              :           /* This should be set in assign_parms.  */
    5481       718891 :           gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
    5482              : 
    5483              :           /* If this is a BLKmode structure being returned in registers,
    5484              :              then use the mode computed in expand_return.  Note that if
    5485              :              decl_rtl is memory, then its mode may have been changed,
    5486              :              but that crtl->return_rtx has not.  */
    5487       718891 :           if (GET_MODE (real_decl_rtl) == BLKmode)
    5488         2683 :             PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
    5489              : 
    5490              :           /* If a non-BLKmode return value should be padded at the least
    5491              :              significant end of the register, shift it left by the appropriate
    5492              :              amount.  BLKmode results are handled using the group load/store
    5493              :              machinery.  */
    5494       718891 :           if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
    5495       716205 :               && REG_P (real_decl_rtl)
    5496      1432455 :               && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
    5497              :             {
    5498            0 :               emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
    5499              :                                            REGNO (real_decl_rtl)),
    5500              :                               decl_rtl);
    5501            0 :               shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
    5502              :             }
    5503       718891 :           else if (GET_CODE (real_decl_rtl) == PARALLEL)
    5504              :             {
    5505              :               /* If expand_function_start has created a PARALLEL for decl_rtl,
    5506              :                  move the result to the real return registers.  Otherwise, do
    5507              :                  a group load from decl_rtl for a named return.  */
    5508         4100 :               if (GET_CODE (decl_rtl) == PARALLEL)
    5509         3012 :                 emit_group_move (real_decl_rtl, decl_rtl);
    5510              :               else
    5511         1088 :                 emit_group_load (real_decl_rtl, decl_rtl,
    5512         1088 :                                  TREE_TYPE (decl_result),
    5513         1088 :                                  int_size_in_bytes (TREE_TYPE (decl_result)));
    5514              :             }
    5515              :           /* In the case of complex integer modes smaller than a word, we'll
    5516              :              need to generate some non-trivial bitfield insertions.  Do that
    5517              :              on a pseudo and not the hard register.  */
    5518       714791 :           else if (GET_CODE (decl_rtl) == CONCAT
    5519          687 :                    && is_complex_int_mode (GET_MODE (decl_rtl), &cmode)
    5520       714915 :                    && GET_MODE_BITSIZE (cmode) <= BITS_PER_WORD)
    5521              :             {
    5522           86 :               int old_generating_concat_p;
    5523           86 :               rtx tmp;
    5524              : 
    5525           86 :               old_generating_concat_p = generating_concat_p;
    5526           86 :               generating_concat_p = 0;
    5527           86 :               tmp = gen_reg_rtx (GET_MODE (decl_rtl));
    5528           86 :               generating_concat_p = old_generating_concat_p;
    5529              : 
    5530           86 :               emit_move_insn (tmp, decl_rtl);
    5531           86 :               emit_move_insn (real_decl_rtl, tmp);
    5532              :             }
    5533              :           /* If a named return value dumped decl_return to memory, then
    5534              :              we may need to re-do the PROMOTE_MODE signed/unsigned
    5535              :              extension.  */
    5536       714705 :           else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
    5537              :             {
    5538            0 :               int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
    5539            0 :               promote_function_mode (TREE_TYPE (decl_result),
    5540              :                                      GET_MODE (decl_rtl), &unsignedp,
    5541            0 :                                      TREE_TYPE (current_function_decl), 1);
    5542              : 
    5543            0 :               convert_move (real_decl_rtl, decl_rtl, unsignedp);
    5544              :             }
    5545              :           else
    5546       714705 :             emit_move_insn (real_decl_rtl, decl_rtl);
    5547              :         }
    5548              :     }
    5549              : 
    5550              :   /* If returning a structure, arrange to return the address of the value
    5551              :      in a place where debuggers expect to find it.
    5552              : 
    5553              :      If returning a structure PCC style,
    5554              :      the caller also depends on this value.
    5555              :      And cfun->returns_pcc_struct is not necessarily set.  */
    5556      1472140 :   if ((cfun->returns_struct || cfun->returns_pcc_struct)
    5557        69036 :       && !targetm.calls.omit_struct_return_reg)
    5558              :     {
    5559        69036 :       rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
    5560        69036 :       tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
    5561        69036 :       rtx outgoing;
    5562              : 
    5563        69036 :       if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
    5564         8758 :         type = TREE_TYPE (type);
    5565              :       else
    5566        60278 :         value_address = XEXP (value_address, 0);
    5567              : 
    5568        69036 :       outgoing = targetm.calls.function_value (build_pointer_type (type),
    5569              :                                                current_function_decl, true);
    5570              : 
    5571              :       /* Mark this as a function return value so integrate will delete the
    5572              :          assignment and USE below when inlining this function.  */
    5573        69036 :       REG_FUNCTION_VALUE_P (outgoing) = 1;
    5574              : 
    5575              :       /* The address may be ptr_mode and OUTGOING may be Pmode.  */
    5576        69036 :       scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing));
    5577        69036 :       value_address = convert_memory_address (mode, value_address);
    5578              : 
    5579        69036 :       emit_move_insn (outgoing, value_address);
    5580              : 
    5581              :       /* Show return register used to hold result (in this case the address
    5582              :          of the result.  */
    5583        69036 :       crtl->return_rtx = outgoing;
    5584              :     }
    5585              : 
    5586              :   /* Emit the actual code to clobber return register.  Don't emit
    5587              :      it if clobber_after is a barrier, then the previous basic block
    5588              :      certainly doesn't fall thru into the exit block.  */
    5589      1472140 :   if (!BARRIER_P (clobber_after))
    5590              :     {
    5591       551097 :       start_sequence ();
    5592       551097 :       clobber_return_register ();
    5593       551097 :       rtx_insn *seq = end_sequence ();
    5594              : 
    5595       551097 :       emit_insn_after (seq, clobber_after);
    5596              :     }
    5597              : 
    5598              :   /* Output the label for the naked return from the function.  */
    5599      1472140 :   if (naked_return_label)
    5600          379 :     emit_label (naked_return_label);
    5601              : 
    5602              :   /* @@@ This is a kludge.  We want to ensure that instructions that
    5603              :      may trap are not moved into the epilogue by scheduling, because
    5604              :      we don't always emit unwind information for the epilogue.  */
    5605      1472140 :   if (cfun->can_throw_non_call_exceptions
    5606      1472140 :       && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
    5607       262869 :     emit_insn (gen_blockage ());
    5608              : 
    5609              :   /* If stack protection is enabled for this function, check the guard.  */
    5610      1472140 :   if (crtl->stack_protect_guard
    5611          239 :       && targetm.stack_protect_runtime_enabled_p ()
    5612      1472378 :       && naked_return_label)
    5613            0 :     stack_protect_epilogue ();
    5614              : 
    5615              :   /* If we had calls to alloca, and this machine needs
    5616              :      an accurate stack pointer to exit the function,
    5617              :      insert some code to save and restore the stack pointer.  */
    5618      1472140 :   if (! EXIT_IGNORE_STACK
    5619              :       && cfun->calls_alloca)
    5620              :     {
    5621              :       rtx tem = 0;
    5622              : 
    5623              :       start_sequence ();
    5624              :       emit_stack_save (SAVE_FUNCTION, &tem);
    5625              :       rtx_insn *seq = end_sequence ();
    5626              :       emit_insn_before (seq, parm_birth_insn);
    5627              : 
    5628              :       emit_stack_restore (SAVE_FUNCTION, tem);
    5629              :     }
    5630              : 
    5631              :   /* ??? This should no longer be necessary since stupid is no longer with
    5632              :      us, but there are some parts of the compiler (eg reload_combine, and
    5633              :      sh mach_dep_reorg) that still try and compute their own lifetime info
    5634              :      instead of using the general framework.  */
    5635      1472140 :   use_return_register ();
    5636      1472140 : }
    5637              : 
    5638              : rtx
    5639            0 : get_arg_pointer_save_area (void)
    5640              : {
    5641            0 :   rtx ret = arg_pointer_save_area;
    5642              : 
    5643            0 :   if (! ret)
    5644              :     {
    5645            0 :       ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
    5646            0 :       arg_pointer_save_area = ret;
    5647              :     }
    5648              : 
    5649            0 :   if (! crtl->arg_pointer_save_area_init)
    5650              :     {
    5651              :       /* Save the arg pointer at the beginning of the function.  The
    5652              :          generated stack slot may not be a valid memory address, so we
    5653              :          have to check it and fix it if necessary.  */
    5654            0 :       start_sequence ();
    5655            0 :       emit_move_insn (validize_mem (copy_rtx (ret)),
    5656              :                       crtl->args.internal_arg_pointer);
    5657            0 :       rtx_insn *seq = end_sequence ();
    5658              : 
    5659            0 :       push_topmost_sequence ();
    5660            0 :       emit_insn_after (seq, entry_of_function ());
    5661            0 :       pop_topmost_sequence ();
    5662              : 
    5663            0 :       crtl->arg_pointer_save_area_init = true;
    5664              :     }
    5665              : 
    5666            0 :   return ret;
    5667              : }
    5668              : 
    5669              : 
    5670              : /* If debugging dumps are requested, dump information about how the
    5671              :    target handled -fstack-check=clash for the prologue.
    5672              : 
    5673              :    PROBES describes what if any probes were emitted.
    5674              : 
    5675              :    RESIDUALS indicates if the prologue had any residual allocation
    5676              :    (i.e. total allocation was not a multiple of PROBE_INTERVAL).  */
    5677              : 
    5678              : void
    5679          126 : dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
    5680              : {
    5681          126 :   if (!dump_file)
    5682              :     return;
    5683              : 
    5684           17 :   switch (probes)
    5685              :     {
    5686            1 :     case NO_PROBE_NO_FRAME:
    5687            1 :       fprintf (dump_file,
    5688              :                "Stack clash no probe no stack adjustment in prologue.\n");
    5689            1 :       break;
    5690           11 :     case NO_PROBE_SMALL_FRAME:
    5691           11 :       fprintf (dump_file,
    5692              :                "Stack clash no probe small stack adjustment in prologue.\n");
    5693           11 :       break;
    5694            3 :     case PROBE_INLINE:
    5695            3 :       fprintf (dump_file, "Stack clash inline probes in prologue.\n");
    5696            3 :       break;
    5697            2 :     case PROBE_LOOP:
    5698            2 :       fprintf (dump_file, "Stack clash probe loop in prologue.\n");
    5699            2 :       break;
    5700              :     }
    5701              : 
    5702           17 :   if (residuals)
    5703           16 :     fprintf (dump_file, "Stack clash residual allocation in prologue.\n");
    5704              :   else
    5705            1 :     fprintf (dump_file, "Stack clash no residual allocation in prologue.\n");
    5706              : 
    5707           17 :   if (frame_pointer_needed)
    5708            0 :     fprintf (dump_file, "Stack clash frame pointer needed.\n");
    5709              :   else
    5710           17 :     fprintf (dump_file, "Stack clash no frame pointer needed.\n");
    5711              : 
    5712           17 :   if (TREE_THIS_VOLATILE (cfun->decl))
    5713            1 :     fprintf (dump_file,
    5714              :              "Stack clash noreturn prologue, assuming no implicit"
    5715              :              " probes in caller.\n");
    5716              :   else
    5717           16 :     fprintf (dump_file,
    5718              :              "Stack clash not noreturn prologue.\n");
    5719              : }
    5720              : 
    5721              : /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
    5722              :    for the first time.  */
    5723              : 
    5724              : static void
    5725      3684221 : record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
    5726              : {
    5727      3684221 :   rtx_insn *tmp;
    5728      3684221 :   hash_table<insn_cache_hasher> *hash = *hashp;
    5729              : 
    5730      3684221 :   if (hash == NULL)
    5731      2942726 :     *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
    5732              : 
    5733     14245911 :   for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
    5734              :     {
    5735     10561690 :       rtx *slot = hash->find_slot (tmp, INSERT);
    5736     10561690 :       gcc_assert (*slot == NULL);
    5737     10561690 :       *slot = tmp;
    5738              :     }
    5739      3684221 : }
    5740              : 
    5741              : /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
    5742              :    basic block, splitting or peepholes.  If INSN is a prologue or epilogue
    5743              :    insn, then record COPY as well.  */
    5744              : 
    5745              : void
    5746      3792597 : maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
    5747              : {
    5748      3792597 :   hash_table<insn_cache_hasher> *hash;
    5749      3792597 :   rtx *slot;
    5750              : 
    5751      3792597 :   hash = epilogue_insn_hash;
    5752      3792597 :   if (!hash || !hash->find (insn))
    5753              :     {
    5754      3278424 :       hash = prologue_insn_hash;
    5755      3278424 :       if (!hash || !hash->find (insn))
    5756      3202379 :         return;
    5757              :     }
    5758              : 
    5759       590218 :   slot = hash->find_slot (copy, INSERT);
    5760       590218 :   gcc_assert (*slot == NULL);
    5761       590218 :   *slot = copy;
    5762              : }
    5763              : 
    5764              : /* Determine if any INSNs in HASH are, or are part of, INSN.  Because
    5765              :    we can be running after reorg, SEQUENCE rtl is possible.  */
    5766              : 
    5767              : static bool
    5768    285651006 : contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
    5769              : {
    5770    285651006 :   if (hash == NULL)
    5771              :     return false;
    5772              : 
    5773    285597416 :   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
    5774              :     {
    5775            0 :       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
    5776            0 :       int i;
    5777            0 :       for (i = seq->len () - 1; i >= 0; i--)
    5778            0 :         if (hash->find (seq->element (i)))
    5779              :           return true;
    5780              :       return false;
    5781              :     }
    5782              : 
    5783    285597416 :   return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
    5784              : }
    5785              : 
    5786              : bool
    5787    108664605 : prologue_contains (const rtx_insn *insn)
    5788              : {
    5789    108664605 :   return contains (insn, prologue_insn_hash);
    5790              : }
    5791              : 
    5792              : bool
    5793    108664605 : epilogue_contains (const rtx_insn *insn)
    5794              : {
    5795    108664605 :   return contains (insn, epilogue_insn_hash);
    5796              : }
    5797              : 
    5798              : bool
    5799         4479 : prologue_epilogue_contains (const rtx_insn *insn)
    5800              : {
    5801         4479 :   if (contains (insn, prologue_insn_hash))
    5802              :     return true;
    5803         4289 :   if (contains (insn, epilogue_insn_hash))
    5804              :     return true;
    5805              :   return false;
    5806              : }
    5807              : 
    5808              : void
    5809       175865 : record_prologue_seq (rtx_insn *seq)
    5810              : {
    5811       175865 :   record_insns (seq, NULL, &prologue_insn_hash);
    5812       175865 : }
    5813              : 
    5814              : void
    5815       156089 : record_epilogue_seq (rtx_insn *seq)
    5816              : {
    5817       156089 :   record_insns (seq, NULL, &epilogue_insn_hash);
    5818       156089 : }
    5819              : 
    5820              : /* Set JUMP_LABEL for a return insn.  */
    5821              : 
    5822              : void
    5823      1517790 : set_return_jump_label (rtx_insn *returnjump)
    5824              : {
    5825      1517790 :   rtx pat = PATTERN (returnjump);
    5826      1517790 :   if (GET_CODE (pat) == PARALLEL)
    5827        27139 :     pat = XVECEXP (pat, 0, 0);
    5828      1517790 :   if (ANY_RETURN_P (pat))
    5829      1517790 :     JUMP_LABEL (returnjump) = pat;
    5830              :   else
    5831            0 :     JUMP_LABEL (returnjump) = ret_rtx;
    5832      1517790 : }
    5833              : 
    5834              : /* Return a sequence to be used as the split prologue for the current
    5835              :    function, or NULL.  */
    5836              : 
    5837              : static rtx_insn *
    5838      1517128 : make_split_prologue_seq (void)
    5839              : {
    5840      1517128 :   if (!flag_split_stack
    5841      1517128 :       || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
    5842      1257196 :     return NULL;
    5843              : 
    5844       259932 :   start_sequence ();
    5845       259932 :   emit_insn (targetm.gen_split_stack_prologue ());
    5846       259932 :   rtx_insn *seq = end_sequence ();
    5847              : 
    5848       259932 :   record_insns (seq, NULL, &prologue_insn_hash);
    5849       259932 :   set_insn_locations (seq, prologue_location);
    5850              : 
    5851       259932 :   return seq;
    5852              : }
    5853              : 
    5854              : /* Return a sequence to be used as the prologue for the current function,
    5855              :    or NULL.  */
    5856              : 
    5857              : static rtx_insn *
    5858      1517128 : make_prologue_seq (void)
    5859              : {
    5860      1517128 :   if (!targetm.have_prologue ())
    5861              :     return NULL;
    5862              : 
    5863      1517128 :   start_sequence ();
    5864      1517128 :   rtx_insn *seq = targetm.gen_prologue ();
    5865      1517128 :   emit_insn (seq);
    5866              : 
    5867              :   /* Insert an explicit USE for the frame pointer
    5868              :      if the profiling is on and the frame pointer is required.  */
    5869      1517128 :   if (crtl->profile && frame_pointer_needed)
    5870          280 :     emit_use (hard_frame_pointer_rtx);
    5871              : 
    5872              :   /* Retain a map of the prologue insns.  */
    5873      1517128 :   record_insns (seq, NULL, &prologue_insn_hash);
    5874      1517128 :   emit_note (NOTE_INSN_PROLOGUE_END);
    5875              : 
    5876              :   /* Ensure that instructions are not moved into the prologue when
    5877              :      profiling is on.  The call to the profiling routine can be
    5878              :      emitted within the live range of a call-clobbered register.  */
    5879      1517128 :   if (!targetm.profile_before_prologue () && crtl->profile)
    5880           12 :     emit_insn (gen_blockage ());
    5881              : 
    5882      1517128 :   seq = end_sequence ();
    5883      1517128 :   set_insn_locations (seq, prologue_location);
    5884              : 
    5885      1517128 :   return seq;
    5886              : }
    5887              : 
    5888              : /* Emit a sequence of insns to zero the call-used registers before RET
    5889              :    according to ZERO_REGS_TYPE.  */
    5890              : 
    5891              : static void
    5892          176 : gen_call_used_regs_seq (rtx_insn *ret, unsigned int zero_regs_type)
    5893              : {
    5894          176 :   bool only_gpr = true;
    5895          176 :   bool only_used = true;
    5896          176 :   bool only_arg = true;
    5897              : 
    5898              :   /* No need to zero call-used-regs in main ().  */
    5899          176 :   if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
    5900           46 :     return;
    5901              : 
    5902              :   /* No need to zero call-used-regs if __builtin_eh_return is called
    5903              :      since it isn't a normal function return.  */
    5904          134 :   if (crtl->calls_eh_return)
    5905              :     return;
    5906              : 
    5907              :   /* If only_gpr is true, only zero call-used registers that are
    5908              :      general-purpose registers; if only_used is true, only zero
    5909              :      call-used registers that are used in the current function;
    5910              :      if only_arg is true, only zero call-used registers that pass
    5911              :      parameters defined by the flatform's calling conversion.  */
    5912              : 
    5913          134 :   using namespace zero_regs_flags;
    5914              : 
    5915          134 :   only_gpr = zero_regs_type & ONLY_GPR;
    5916          134 :   only_used = zero_regs_type & ONLY_USED;
    5917          134 :   only_arg = zero_regs_type & ONLY_ARG;
    5918              : 
    5919          134 :   if ((zero_regs_type & LEAFY_MODE) && leaf_function_p ())
    5920              :     only_used = true;
    5921              : 
    5922              :   /* For each of the hard registers, we should zero it if:
    5923              :             1. it is a call-used register;
    5924              :         and 2. it is not a fixed register;
    5925              :         and 3. it is not live at the return of the routine;
    5926              :         and 4. it is general registor if only_gpr is true;
    5927              :         and 5. it is used in the routine if only_used is true;
    5928              :         and 6. it is a register that passes parameter if only_arg is true.  */
    5929              : 
    5930              :   /* First, prepare the data flow information.  */
    5931          134 :   basic_block bb = BLOCK_FOR_INSN (ret);
    5932          134 :   auto_bitmap live_out;
    5933          134 :   bitmap_copy (live_out, df_get_live_out (bb));
    5934          134 :   df_simulate_initialize_backwards (bb, live_out);
    5935          134 :   df_simulate_one_insn_backwards (bb, ret, live_out);
    5936              : 
    5937          134 :   HARD_REG_SET selected_hardregs;
    5938          134 :   HARD_REG_SET all_call_used_regs;
    5939          536 :   CLEAR_HARD_REG_SET (selected_hardregs);
    5940        12462 :   CLEAR_HARD_REG_SET (all_call_used_regs);
    5941        12462 :   for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
    5942              :     {
    5943        12328 :       if (!crtl->abi->clobbers_full_reg_p (regno))
    5944         1206 :         continue;
    5945        11122 :       if (fixed_regs[regno])
    5946         5612 :         continue;
    5947         5510 :       if (REGNO_REG_SET_P (live_out, regno))
    5948          124 :         continue;
    5949              : #ifdef LEAF_REG_REMAP
    5950              :       if (crtl->uses_only_leaf_regs && LEAF_REG_REMAP (regno) < 0)
    5951              :         continue;
    5952              : #endif
    5953              :       /* This is a call used register that is dead at return.  */
    5954         5386 :       SET_HARD_REG_BIT (all_call_used_regs, regno);
    5955              : 
    5956         7306 :       if (only_gpr
    5957         5386 :           && !TEST_HARD_REG_BIT (reg_class_contents[GENERAL_REGS], regno))
    5958         1920 :         continue;
    5959         3466 :       if (only_used && !df_regs_ever_live_p (regno))
    5960         1806 :         continue;
    5961         1660 :       if (only_arg && !FUNCTION_ARG_REGNO_P (regno))
    5962          364 :         continue;
    5963              : 
    5964              :       /* Now this is a register that we might want to zero.  */
    5965         1296 :       SET_HARD_REG_BIT (selected_hardregs, regno);
    5966              :     }
    5967              : 
    5968          134 :   if (hard_reg_set_empty_p (selected_hardregs))
    5969            4 :     return;
    5970              : 
    5971              :   /* Now that we have a hard register set that needs to be zeroed, pass it to
    5972              :      target to generate zeroing sequence.  */
    5973          130 :   HARD_REG_SET zeroed_hardregs;
    5974          130 :   start_sequence ();
    5975          130 :   zeroed_hardregs = targetm.calls.zero_call_used_regs (selected_hardregs);
    5976              : 
    5977              :   /* For most targets, the returned set of registers is a subset of
    5978              :      selected_hardregs, however, for some of the targets (for example MIPS),
    5979              :      clearing some registers that are in selected_hardregs requires clearing
    5980              :      other call used registers that are not in the selected_hardregs, under
    5981              :      such situation, the returned set of registers must be a subset of
    5982              :      all call used registers.  */
    5983          260 :   gcc_assert (hard_reg_set_subset_p (zeroed_hardregs, all_call_used_regs));
    5984              : 
    5985          130 :   rtx_insn *seq = end_sequence ();
    5986          130 :   if (seq)
    5987              :     {
    5988              :       /* Emit the memory blockage and register clobber asm volatile before
    5989              :          the whole sequence.  */
    5990          130 :       start_sequence ();
    5991          130 :       expand_asm_reg_clobber_mem_blockage (zeroed_hardregs);
    5992          130 :       rtx_insn *seq_barrier = end_sequence ();
    5993              : 
    5994          130 :       emit_insn_before (seq_barrier, ret);
    5995          130 :       emit_insn_before (seq, ret);
    5996              : 
    5997              :       /* Update the data flow information.  */
    5998          130 :       crtl->must_be_zero_on_return |= zeroed_hardregs;
    5999          130 :       df_update_exit_block_uses ();
    6000              :     }
    6001          134 : }
    6002              : 
    6003              : 
    6004              : /* Return a sequence to be used as the epilogue for the current function,
    6005              :    or NULL.  */
    6006              : 
    6007              : static rtx_insn *
    6008      1517128 : make_epilogue_seq (void)
    6009              : {
    6010      1517128 :   if (!targetm.have_epilogue ())
    6011              :     return NULL;
    6012              : 
    6013      1517128 :   start_sequence ();
    6014      1517128 :   emit_note (NOTE_INSN_EPILOGUE_BEG);
    6015      1517128 :   rtx_insn *seq = targetm.gen_epilogue ();
    6016      1517128 :   if (seq)
    6017      1517128 :     emit_jump_insn (seq);
    6018              : 
    6019              :   /* Retain a map of the epilogue insns.  */
    6020      1517128 :   record_insns (seq, NULL, &epilogue_insn_hash);
    6021      1517128 :   set_insn_locations (seq, epilogue_location);
    6022              : 
    6023      1517128 :   seq = get_insns ();
    6024      1517128 :   rtx_insn *returnjump = get_last_insn ();
    6025      1517128 :   end_sequence ();
    6026              : 
    6027      1517128 :   if (JUMP_P (returnjump))
    6028      1517054 :     set_return_jump_label (returnjump);
    6029              : 
    6030              :   return seq;
    6031              : }
    6032              : 
    6033              : 
    6034              : /* Generate the prologue and epilogue RTL if the machine supports it.  Thread
    6035              :    this into place with notes indicating where the prologue ends and where
    6036              :    the epilogue begins.  Update the basic block information when possible.
    6037              : 
    6038              :    Notes on epilogue placement:
    6039              :    There are several kinds of edges to the exit block:
    6040              :    * a single fallthru edge from LAST_BB
    6041              :    * possibly, edges from blocks containing sibcalls
    6042              :    * possibly, fake edges from infinite loops
    6043              : 
    6044              :    The epilogue is always emitted on the fallthru edge from the last basic
    6045              :    block in the function, LAST_BB, into the exit block.
    6046              : 
    6047              :    If LAST_BB is empty except for a label, it is the target of every
    6048              :    other basic block in the function that ends in a return.  If a
    6049              :    target has a return or simple_return pattern (possibly with
    6050              :    conditional variants), these basic blocks can be changed so that a
    6051              :    return insn is emitted into them, and their target is adjusted to
    6052              :    the real exit block.
    6053              : 
    6054              :    Notes on shrink wrapping: We implement a fairly conservative
    6055              :    version of shrink-wrapping rather than the textbook one.  We only
    6056              :    generate a single prologue and a single epilogue.  This is
    6057              :    sufficient to catch a number of interesting cases involving early
    6058              :    exits.
    6059              : 
    6060              :    First, we identify the blocks that require the prologue to occur before
    6061              :    them.  These are the ones that modify a call-saved register, or reference
    6062              :    any of the stack or frame pointer registers.  To simplify things, we then
    6063              :    mark everything reachable from these blocks as also requiring a prologue.
    6064              :    This takes care of loops automatically, and avoids the need to examine
    6065              :    whether MEMs reference the frame, since it is sufficient to check for
    6066              :    occurrences of the stack or frame pointer.
    6067              : 
    6068              :    We then compute the set of blocks for which the need for a prologue
    6069              :    is anticipatable (borrowing terminology from the shrink-wrapping
    6070              :    description in Muchnick's book).  These are the blocks which either
    6071              :    require a prologue themselves, or those that have only successors
    6072              :    where the prologue is anticipatable.  The prologue needs to be
    6073              :    inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
    6074              :    is not.  For the moment, we ensure that only one such edge exists.
    6075              : 
    6076              :    The epilogue is placed as described above, but we make a
    6077              :    distinction between inserting return and simple_return patterns
    6078              :    when modifying other blocks that end in a return.  Blocks that end
    6079              :    in a sibcall omit the sibcall_epilogue if the block is not in
    6080              :    ANTIC.  */
    6081              : 
    6082              : void
    6083      1471363 : thread_prologue_and_epilogue_insns (void)
    6084              : {
    6085      1471363 :   df_analyze ();
    6086              : 
    6087              :   /* Can't deal with multiple successors of the entry block at the
    6088              :      moment.  Function should always have at least one entry
    6089              :      point.  */
    6090      1471363 :   gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
    6091              : 
    6092      1471363 :   edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
    6093      1471363 :   edge orig_entry_edge = entry_edge;
    6094              : 
    6095      1471363 :   rtx_insn *split_prologue_seq = make_split_prologue_seq ();
    6096      1471363 :   rtx_insn *prologue_seq = make_prologue_seq ();
    6097      1471363 :   rtx_insn *epilogue_seq = make_epilogue_seq ();
    6098              : 
    6099              :   /* Try to perform a kind of shrink-wrapping, making sure the
    6100              :      prologue/epilogue is emitted only around those parts of the
    6101              :      function that require it.  */
    6102      1471363 :   try_shrink_wrapping (&entry_edge, prologue_seq);
    6103              : 
    6104              :   /* If the target can handle splitting the prologue/epilogue into separate
    6105              :      components, try to shrink-wrap these components separately.  */
    6106      1471363 :   try_shrink_wrapping_separate (entry_edge->dest);
    6107              : 
    6108              :   /* If that did anything for any component we now need the generate the
    6109              :      "main" prologue again.  Because some targets require some of these
    6110              :      to be called in a specific order (i386 requires the split prologue
    6111              :      to be first, for example), we create all three sequences again here.
    6112              :      If this does not work for some target, that target should not enable
    6113              :      separate shrink-wrapping.  */
    6114      1471363 :   if (crtl->shrink_wrapped_separate)
    6115              :     {
    6116        45765 :       split_prologue_seq = make_split_prologue_seq ();
    6117        45765 :       prologue_seq = make_prologue_seq ();
    6118        45765 :       epilogue_seq = make_epilogue_seq ();
    6119              :     }
    6120              : 
    6121      1471363 :   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
    6122              : 
    6123              :   /* A small fib -- epilogue is not yet completed, but we wish to re-use
    6124              :      this marker for the splits of EH_RETURN patterns, and nothing else
    6125              :      uses the flag in the meantime.  */
    6126      1471363 :   epilogue_completed = 1;
    6127              : 
    6128              :   /* Find non-fallthru edges that end with EH_RETURN instructions.  On
    6129              :      some targets, these get split to a special version of the epilogue
    6130              :      code.  In order to be able to properly annotate these with unwind
    6131              :      info, try to split them now.  If we get a valid split, drop an
    6132              :      EPILOGUE_BEG note and mark the insns as epilogue insns.  */
    6133      1471363 :   edge e;
    6134      1471363 :   edge_iterator ei;
    6135      2998427 :   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
    6136              :     {
    6137      1527064 :       rtx_insn *prev, *last, *trial;
    6138              : 
    6139      1527064 :       if (e->flags & EDGE_FALLTHRU)
    6140      1342164 :         continue;
    6141       184900 :       last = BB_END (e->src);
    6142       184900 :       if (!eh_returnjump_p (last))
    6143       184871 :         continue;
    6144              : 
    6145           29 :       prev = PREV_INSN (last);
    6146           29 :       trial = try_split (PATTERN (last), last, 1);
    6147           29 :       if (trial == last)
    6148            0 :         continue;
    6149              : 
    6150           29 :       record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
    6151           29 :       emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
    6152              :     }
    6153              : 
    6154      1471363 :   edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
    6155              : 
    6156      1471363 :   if (exit_fallthru_edge)
    6157              :     {
    6158      1342164 :       if (epilogue_seq)
    6159              :         {
    6160      1342164 :           insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
    6161      1342164 :           commit_edge_insertions ();
    6162              : 
    6163              :           /* The epilogue insns we inserted may cause the exit edge to no longer
    6164              :              be fallthru.  */
    6165      2766458 :           FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
    6166              :             {
    6167      1424294 :               if (((e->flags & EDGE_FALLTHRU) != 0)
    6168      1424294 :                   && returnjump_p (BB_END (e->src)))
    6169            0 :                 e->flags &= ~EDGE_FALLTHRU;
    6170              :             }
    6171              : 
    6172      1342164 :           find_sub_basic_blocks (BLOCK_FOR_INSN (epilogue_seq));
    6173              :         }
    6174            0 :       else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
    6175              :         {
    6176              :           /* We have a fall-through edge to the exit block, the source is not
    6177              :              at the end of the function, and there will be an assembler epilogue
    6178              :              at the end of the function.
    6179              :              We can't use force_nonfallthru here, because that would try to
    6180              :              use return.  Inserting a jump 'by hand' is extremely messy, so
    6181              :              we take advantage of cfg_layout_finalize using
    6182              :              fixup_fallthru_exit_predecessor.  */
    6183            0 :           cfg_layout_initialize (0);
    6184            0 :           basic_block cur_bb;
    6185            0 :           FOR_EACH_BB_FN (cur_bb, cfun)
    6186            0 :             if (cur_bb->index >= NUM_FIXED_BLOCKS
    6187            0 :                 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
    6188            0 :               cur_bb->aux = cur_bb->next_bb;
    6189            0 :           cfg_layout_finalize ();
    6190              :         }
    6191              :     }
    6192              : 
    6193              :   /* Insert the prologue.  */
    6194              : 
    6195      1471363 :   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
    6196              : 
    6197      1471363 :   if (split_prologue_seq || prologue_seq)
    6198              :     {
    6199      1471363 :       rtx_insn *split_prologue_insn = split_prologue_seq;
    6200      1471363 :       if (split_prologue_seq)
    6201              :         {
    6202       259928 :           while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
    6203            0 :             split_prologue_insn = NEXT_INSN (split_prologue_insn);
    6204       259928 :           insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
    6205              :         }
    6206              : 
    6207      1471363 :       rtx_insn *prologue_insn = prologue_seq;
    6208      1471363 :       if (prologue_seq)
    6209              :         {
    6210      1861713 :           while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
    6211       390350 :             prologue_insn = NEXT_INSN (prologue_insn);
    6212      1471363 :           insert_insn_on_edge (prologue_seq, entry_edge);
    6213              :         }
    6214              : 
    6215      1471363 :       commit_edge_insertions ();
    6216              : 
    6217              :       /* Look for basic blocks within the prologue insns.  */
    6218      1471363 :       if (split_prologue_insn
    6219      1471363 :           && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
    6220              :         split_prologue_insn = NULL;
    6221      1471363 :       if (prologue_insn
    6222      1471363 :           && BLOCK_FOR_INSN (prologue_insn) == NULL)
    6223              :         prologue_insn = NULL;
    6224      1471363 :       if (split_prologue_insn || prologue_insn)
    6225              :         {
    6226      1104656 :           auto_sbitmap blocks (last_basic_block_for_fn (cfun));
    6227      1104656 :           bitmap_clear (blocks);
    6228      1104656 :           if (split_prologue_insn)
    6229       259928 :             bitmap_set_bit (blocks,
    6230       259928 :                             BLOCK_FOR_INSN (split_prologue_insn)->index);
    6231      1104656 :           if (prologue_insn)
    6232      1081013 :             bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
    6233      1104656 :           find_many_sub_basic_blocks (blocks);
    6234      1104656 :         }
    6235              :     }
    6236              : 
    6237      1471363 :   default_rtl_profile ();
    6238              : 
    6239              :   /* Emit sibling epilogues before any sibling call sites.  */
    6240      1471363 :   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
    6241      2998427 :        (e = ei_safe_edge (ei));
    6242      1527064 :        ei_next (&ei))
    6243              :     {
    6244              :       /* Skip those already handled, the ones that run without prologue.  */
    6245      1527064 :       if (e->flags & EDGE_IGNORE)
    6246              :         {
    6247         4983 :           e->flags &= ~EDGE_IGNORE;
    6248         4983 :           continue;
    6249              :         }
    6250              : 
    6251      1522081 :       rtx_insn *insn = BB_END (e->src);
    6252              : 
    6253      1522081 :       if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
    6254      1396722 :         continue;
    6255              : 
    6256       125359 :       rtx_insn *ep_seq;
    6257       125359 :       if (targetm.emit_epilogue_for_sibcall)
    6258              :         {
    6259            0 :           start_sequence ();
    6260            0 :           targetm.emit_epilogue_for_sibcall (as_a<rtx_call_insn *> (insn));
    6261            0 :           ep_seq = end_sequence ();
    6262              :         }
    6263              :       else
    6264       125359 :         ep_seq = targetm.gen_sibcall_epilogue ();
    6265       125359 :       if (ep_seq)
    6266              :         {
    6267        58050 :           start_sequence ();
    6268        58050 :           emit_note (NOTE_INSN_EPILOGUE_BEG);
    6269        58050 :           emit_insn (ep_seq);
    6270        58050 :           rtx_insn *seq = end_sequence ();
    6271              : 
    6272              :           /* Retain a map of the epilogue insns.  Used in life analysis to
    6273              :              avoid getting rid of sibcall epilogue insns.  Do this before we
    6274              :              actually emit the sequence.  */
    6275        58050 :           record_insns (seq, NULL, &epilogue_insn_hash);
    6276        58050 :           set_insn_locations (seq, epilogue_location);
    6277              : 
    6278        58050 :           emit_insn_before (seq, insn);
    6279              : 
    6280        58050 :           find_sub_basic_blocks (BLOCK_FOR_INSN (insn));
    6281              :         }
    6282              :     }
    6283              : 
    6284      1471363 :   if (epilogue_seq)
    6285              :     {
    6286              :       rtx_insn *insn, *next;
    6287              : 
    6288              :       /* Similarly, move any line notes that appear after the epilogue.
    6289              :          There is no need, however, to be quite so anal about the existence
    6290              :          of such a note.  Also possibly move
    6291              :          NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
    6292              :          info generation.  */
    6293     11553811 :       for (insn = epilogue_seq; insn; insn = next)
    6294              :         {
    6295     10082448 :           next = NEXT_INSN (insn);
    6296     10082448 :           if (NOTE_P (insn)
    6297      3083261 :               && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
    6298            0 :             reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
    6299              :         }
    6300              :     }
    6301              : 
    6302              :   /* Threading the prologue and epilogue changes the artificial refs in the
    6303              :      entry and exit blocks, and may invalidate DF info for tail calls.
    6304              :      This is also needed for [[musttail]] conversion even when not
    6305              :      optimizing.  */
    6306      1471363 :   if (optimize
    6307       427678 :       || cfun->tail_call_marked
    6308       427518 :       || flag_optimize_sibling_calls
    6309       427477 :       || flag_ipa_icf_functions
    6310       427420 :       || in_lto_p)
    6311      1052374 :     df_update_entry_exit_and_calls ();
    6312              :   else
    6313              :     {
    6314       418989 :       df_update_entry_block_defs ();
    6315       418989 :       df_update_exit_block_uses ();
    6316              :     }
    6317      1471363 : }
    6318              : 
    6319              : /* Reposition the prologue-end and epilogue-begin notes after
    6320              :    instruction scheduling.  */
    6321              : 
    6322              : void
    6323       963984 : reposition_prologue_and_epilogue_notes (void)
    6324              : {
    6325       963984 :   if (!targetm.have_prologue ()
    6326            0 :       && !targetm.have_epilogue ()
    6327            0 :       && !targetm.have_sibcall_epilogue ()
    6328       963984 :       && !targetm.emit_epilogue_for_sibcall)
    6329              :     return;
    6330              : 
    6331              :   /* Since the hash table is created on demand, the fact that it is
    6332              :      non-null is a signal that it is non-empty.  */
    6333       963984 :   if (prologue_insn_hash != NULL)
    6334              :     {
    6335       963984 :       size_t len = prologue_insn_hash->elements ();
    6336       963984 :       rtx_insn *insn, *last = NULL, *note = NULL;
    6337              : 
    6338              :       /* Scan from the beginning until we reach the last prologue insn.  */
    6339              :       /* ??? While we do have the CFG intact, there are two problems:
    6340              :          (1) The prologue can contain loops (typically probing the stack),
    6341              :              which means that the end of the prologue isn't in the first bb.
    6342              :          (2) Sometimes the PROLOGUE_END note gets pushed into the next bb.  */
    6343     69171434 :       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
    6344              :         {
    6345     68635253 :           if (NOTE_P (insn))
    6346              :             {
    6347     12882102 :               if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
    6348     68207450 :                 note = insn;
    6349              :             }
    6350     55753151 :           else if (contains (insn, prologue_insn_hash))
    6351              :             {
    6352      3455900 :               last = insn;
    6353      3455900 :               if (--len == 0)
    6354              :                 break;
    6355              :             }
    6356              :         }
    6357              : 
    6358       963984 :       if (last)
    6359              :         {
    6360       625924 :           if (note == NULL)
    6361              :             {
    6362              :               /* Scan forward looking for the PROLOGUE_END note.  It should
    6363              :                  be right at the beginning of the block, possibly with other
    6364              :                  insn notes that got moved there.  */
    6365         1397 :               for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
    6366              :                 {
    6367         1397 :                   if (NOTE_P (note)
    6368         1283 :                       && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
    6369              :                     break;
    6370              :                 }
    6371              :             }
    6372              : 
    6373              :           /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
    6374       625924 :           if (LABEL_P (last))
    6375          229 :             last = NEXT_INSN (last);
    6376       625924 :           reorder_insns (note, note, last);
    6377              :         }
    6378              :     }
    6379              : 
    6380       963984 :   if (epilogue_insn_hash != NULL)
    6381              :     {
    6382       963984 :       edge_iterator ei;
    6383       963984 :       edge e;
    6384              : 
    6385      2193705 :       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
    6386              :         {
    6387      1229721 :           rtx_insn *insn, *first = NULL, *note = NULL;
    6388      1229721 :           basic_block bb = e->src;
    6389              : 
    6390              :           /* Scan from the beginning until we reach the first epilogue insn. */
    6391     16689064 :           FOR_BB_INSNS (bb, insn)
    6392              :             {
    6393     16537264 :               if (NOTE_P (insn))
    6394              :                 {
    6395      3936829 :                   if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
    6396              :                     {
    6397      1077921 :                       note = insn;
    6398      1077921 :                       if (first != NULL)
    6399              :                         break;
    6400              :                     }
    6401              :                 }
    6402     12600435 :               else if (first == NULL && contains (insn, epilogue_insn_hash))
    6403              :                 {
    6404      1077925 :                   first = insn;
    6405      1077925 :                   if (note != NULL)
    6406              :                     break;
    6407              :                 }
    6408              :             }
    6409              : 
    6410      1229721 :           if (note)
    6411              :             {
    6412              :               /* If the function has a single basic block, and no real
    6413              :                  epilogue insns (e.g. sibcall with no cleanup), the
    6414              :                  epilogue note can get scheduled before the prologue
    6415              :                  note.  If we have frame related prologue insns, having
    6416              :                  them scanned during the epilogue will result in a crash.
    6417              :                  In this case re-order the epilogue note to just before
    6418              :                  the last insn in the block.  */
    6419      1077921 :               if (first == NULL)
    6420            0 :                 first = BB_END (bb);
    6421              : 
    6422      1077921 :               if (PREV_INSN (first) != note)
    6423        30990 :                 reorder_insns (note, note, PREV_INSN (first));
    6424              :             }
    6425              :         }
    6426              :     }
    6427              : }
    6428              : 
    6429              : /* Returns the name of function declared by FNDECL.  */
    6430              : const char *
    6431       119310 : fndecl_name (tree fndecl)
    6432              : {
    6433       119310 :   if (fndecl == NULL)
    6434              :     return "(nofn)";
    6435       119295 :   return lang_hooks.decl_printable_name (fndecl, 1);
    6436              : }
    6437              : 
    6438              : /* Returns the name of function FN.  */
    6439              : const char *
    6440       119257 : function_name (const function *fn)
    6441              : {
    6442       119257 :   tree fndecl = (fn == NULL) ? NULL : fn->decl;
    6443       119257 :   return fndecl_name (fndecl);
    6444              : }
    6445              : 
    6446              : /* Returns the name of the current function.  */
    6447              : const char *
    6448         8725 : current_function_name (void)
    6449              : {
    6450         8725 :   return function_name (cfun);
    6451              : }
    6452              : 
    6453              : 
    6454              : static void
    6455            0 : rest_of_handle_check_leaf_regs (void)
    6456              : {
    6457              : #ifdef LEAF_REGISTERS
    6458              :   crtl->uses_only_leaf_regs
    6459              :     = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
    6460              : #endif
    6461            0 : }
    6462              : 
    6463              : /* Insert a TYPE into the used types hash table of CFUN.  */
    6464              : 
    6465              : static void
    6466     66388451 : used_types_insert_helper (tree type, struct function *func)
    6467              : {
    6468     66388451 :   if (type != NULL && func != NULL)
    6469              :     {
    6470     66388451 :       if (func->used_types_hash == NULL)
    6471     19092070 :         func->used_types_hash = hash_set<tree>::create_ggc (37);
    6472              : 
    6473     66388451 :       func->used_types_hash->add (type);
    6474              :     }
    6475     66388451 : }
    6476              : 
    6477              : /* Given a type, insert it into the used hash table in cfun.  */
    6478              : void
    6479    218975303 : used_types_insert (tree t)
    6480              : {
    6481    230216692 :   while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
    6482     11672272 :     if (TYPE_NAME (t))
    6483              :       break;
    6484              :     else
    6485     11241389 :       t = TREE_TYPE (t);
    6486    218975303 :   if (TREE_CODE (t) == ERROR_MARK)
    6487              :     return;
    6488    218975298 :   if (TYPE_NAME (t) == NULL_TREE
    6489    218975298 :       || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
    6490     87610245 :     t = TYPE_MAIN_VARIANT (t);
    6491    218975298 :   if (debug_info_level > DINFO_LEVEL_NONE)
    6492              :     {
    6493     95114902 :       if (cfun)
    6494     66388451 :         used_types_insert_helper (t, cfun);
    6495              :       else
    6496              :         {
    6497              :           /* So this might be a type referenced by a global variable.
    6498              :              Record that type so that we can later decide to emit its
    6499              :              debug information.  */
    6500     28726451 :           vec_safe_push (types_used_by_cur_var_decl, t);
    6501              :         }
    6502              :     }
    6503              : }
    6504              : 
    6505              : /* Helper to Hash a struct types_used_by_vars_entry.  */
    6506              : 
    6507              : static hashval_t
    6508    193081925 : hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
    6509              : {
    6510    193081925 :   gcc_assert (entry && entry->var_decl && entry->type);
    6511              : 
    6512    193081925 :   return iterative_hash_object (entry->type,
    6513              :                                 iterative_hash_object (entry->var_decl, 0));
    6514              : }
    6515              : 
    6516              : /* Hash function of the types_used_by_vars_entry hash table.  */
    6517              : 
    6518              : hashval_t
    6519    193081925 : used_type_hasher::hash (types_used_by_vars_entry *entry)
    6520              : {
    6521    193081925 :   return hash_types_used_by_vars_entry (entry);
    6522              : }
    6523              : 
    6524              : /*Equality function of the types_used_by_vars_entry hash table.  */
    6525              : 
    6526              : bool
    6527    205090515 : used_type_hasher::equal (types_used_by_vars_entry *e1,
    6528              :                          types_used_by_vars_entry *e2)
    6529              : {
    6530    205090515 :   return (e1->var_decl == e2->var_decl && e1->type == e2->type);
    6531              : }
    6532              : 
    6533              : /* Inserts an entry into the types_used_by_vars_hash hash table. */
    6534              : 
    6535              : void
    6536     28655592 : types_used_by_var_decl_insert (tree type, tree var_decl)
    6537              : {
    6538     28655592 :   if (type != NULL && var_decl != NULL)
    6539              :     {
    6540     28655592 :       types_used_by_vars_entry **slot;
    6541     28655592 :       struct types_used_by_vars_entry e;
    6542     28655592 :       e.var_decl = var_decl;
    6543     28655592 :       e.type = type;
    6544     28655592 :       if (types_used_by_vars_hash == NULL)
    6545        14377 :         types_used_by_vars_hash
    6546        14377 :           = hash_table<used_type_hasher>::create_ggc (37);
    6547              : 
    6548     28655592 :       slot = types_used_by_vars_hash->find_slot (&e, INSERT);
    6549     28655592 :       if (*slot == NULL)
    6550              :         {
    6551      8930441 :           struct types_used_by_vars_entry *entry;
    6552      8930441 :           entry = ggc_alloc<types_used_by_vars_entry> ();
    6553      8930441 :           entry->type = type;
    6554      8930441 :           entry->var_decl = var_decl;
    6555      8930441 :           *slot = entry;
    6556              :         }
    6557              :     }
    6558     28655592 : }
    6559              : 
    6560              : namespace {
    6561              : 
    6562              : const pass_data pass_data_leaf_regs =
    6563              : {
    6564              :   RTL_PASS, /* type */
    6565              :   "*leaf_regs", /* name */
    6566              :   OPTGROUP_NONE, /* optinfo_flags */
    6567              :   TV_NONE, /* tv_id */
    6568              :   0, /* properties_required */
    6569              :   0, /* properties_provided */
    6570              :   0, /* properties_destroyed */
    6571              :   0, /* todo_flags_start */
    6572              :   0, /* todo_flags_finish */
    6573              : };
    6574              : 
    6575              : class pass_leaf_regs : public rtl_opt_pass
    6576              : {
    6577              : public:
    6578       285722 :   pass_leaf_regs (gcc::context *ctxt)
    6579       571444 :     : rtl_opt_pass (pass_data_leaf_regs, ctxt)
    6580              :   {}
    6581              : 
    6582              :   /* opt_pass methods: */
    6583      1471363 :   unsigned int execute (function *) final override
    6584              :     {
    6585      1471363 :       rest_of_handle_check_leaf_regs ();
    6586      1471363 :       return 0;
    6587              :     }
    6588              : 
    6589              : }; // class pass_leaf_regs
    6590              : 
    6591              : } // anon namespace
    6592              : 
    6593              : rtl_opt_pass *
    6594       285722 : make_pass_leaf_regs (gcc::context *ctxt)
    6595              : {
    6596       285722 :   return new pass_leaf_regs (ctxt);
    6597              : }
    6598              : 
    6599              : static void
    6600      1471363 : rest_of_handle_thread_prologue_and_epilogue (function *fun)
    6601              : {
    6602              :   /* prepare_shrink_wrap is sensitive to the block structure of the control
    6603              :      flow graph, so clean it up first.  */
    6604      1471363 :   if (cfun->tail_call_marked || optimize)
    6605      1043845 :     cleanup_cfg (0);
    6606              : 
    6607              :   /* On some machines, the prologue and epilogue code, or parts thereof,
    6608              :      can be represented as RTL.  Doing so lets us schedule insns between
    6609              :      it and the rest of the code and also allows delayed branch
    6610              :      scheduling to operate in the epilogue.  */
    6611      1471363 :   thread_prologue_and_epilogue_insns ();
    6612              : 
    6613              :   /* Some non-cold blocks may now be only reachable from cold blocks.
    6614              :      Fix that up.  */
    6615      1471363 :   fixup_partitions ();
    6616              : 
    6617              :   /* After prologue and epilogue generation, the judgement on whether
    6618              :      one memory access onto stack frame may trap or not could change,
    6619              :      since we get more exact stack information by now.  So try to
    6620              :      remove any EH edges here, see PR90259.  */
    6621      1471363 :   if (fun->can_throw_non_call_exceptions)
    6622       262866 :     purge_all_dead_edges ();
    6623              : 
    6624              :   /* Shrink-wrapping can result in unreachable edges in the epilogue,
    6625              :      see PR57320.  */
    6626      1899041 :   cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
    6627              : 
    6628              :   /* The stack usage info is finalized during prologue expansion.  */
    6629      1471363 :   if (flag_stack_usage_info || flag_callgraph_info)
    6630          356 :     output_stack_usage ();
    6631      1471363 : }
    6632              : 
    6633              : /* Record a final call to CALLEE at LOCATION.  */
    6634              : 
    6635              : void
    6636            0 : record_final_call (tree callee, location_t location)
    6637              : {
    6638            0 :   struct callinfo_callee datum = { location, callee };
    6639            0 :   vec_safe_push (cfun->su->callees, datum);
    6640            0 : }
    6641              : 
    6642              : /* Record a dynamic allocation made for DECL_OR_EXP.  */
    6643              : 
    6644              : void
    6645            0 : record_dynamic_alloc (tree decl_or_exp)
    6646              : {
    6647            0 :   struct callinfo_dalloc datum;
    6648              : 
    6649            0 :   if (DECL_P (decl_or_exp))
    6650              :     {
    6651            0 :       datum.location = DECL_SOURCE_LOCATION (decl_or_exp);
    6652            0 :       const char *name = lang_hooks.decl_printable_name (decl_or_exp, 2);
    6653            0 :       const char *dot = strrchr (name, '.');
    6654            0 :       if (dot)
    6655            0 :         name = dot + 1;
    6656            0 :       datum.name = ggc_strdup (name);
    6657              :     }
    6658              :   else
    6659              :     {
    6660            0 :       datum.location = EXPR_LOCATION (decl_or_exp);
    6661            0 :       datum.name = NULL;
    6662              :     }
    6663              : 
    6664            0 :   vec_safe_push (cfun->su->dallocs, datum);
    6665            0 : }
    6666              : 
    6667              : namespace {
    6668              : 
    6669              : const pass_data pass_data_thread_prologue_and_epilogue =
    6670              : {
    6671              :   RTL_PASS, /* type */
    6672              :   "pro_and_epilogue", /* name */
    6673              :   OPTGROUP_NONE, /* optinfo_flags */
    6674              :   TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
    6675              :   0, /* properties_required */
    6676              :   0, /* properties_provided */
    6677              :   0, /* properties_destroyed */
    6678              :   0, /* todo_flags_start */
    6679              :   ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
    6680              : };
    6681              : 
    6682              : class pass_thread_prologue_and_epilogue : public rtl_opt_pass
    6683              : {
    6684              : public:
    6685       285722 :   pass_thread_prologue_and_epilogue (gcc::context *ctxt)
    6686       571444 :     : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
    6687              :   {}
    6688              : 
    6689              :   /* opt_pass methods: */
    6690      1471370 :   bool gate (function *) final override
    6691              :     {
    6692      1471370 :       return !targetm.use_late_prologue_epilogue ();
    6693              :     }
    6694              : 
    6695      1471363 :   unsigned int execute (function * fun) final override
    6696              :     {
    6697      1471363 :       rest_of_handle_thread_prologue_and_epilogue (fun);
    6698      1471363 :       return 0;
    6699              :     }
    6700              : 
    6701              : }; // class pass_thread_prologue_and_epilogue
    6702              : 
    6703              : const pass_data pass_data_late_thread_prologue_and_epilogue =
    6704              : {
    6705              :   RTL_PASS, /* type */
    6706              :   "late_pro_and_epilogue", /* name */
    6707              :   OPTGROUP_NONE, /* optinfo_flags */
    6708              :   TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
    6709              :   0, /* properties_required */
    6710              :   0, /* properties_provided */
    6711              :   0, /* properties_destroyed */
    6712              :   0, /* todo_flags_start */
    6713              :   ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
    6714              : };
    6715              : 
    6716              : class pass_late_thread_prologue_and_epilogue : public rtl_opt_pass
    6717              : {
    6718              : public:
    6719       285722 :   pass_late_thread_prologue_and_epilogue (gcc::context *ctxt)
    6720       571444 :     : rtl_opt_pass (pass_data_late_thread_prologue_and_epilogue, ctxt)
    6721              :   {}
    6722              : 
    6723              :   /* opt_pass methods: */
    6724      1471370 :   bool gate (function *) final override
    6725              :     {
    6726      1471370 :       return targetm.use_late_prologue_epilogue ();
    6727              :     }
    6728              : 
    6729            0 :   unsigned int execute (function *fn) final override
    6730              :     {
    6731              :       /* It's not currently possible to have both delay slots and
    6732              :          late prologue/epilogue, since the latter has to run before
    6733              :          the former, and the former won't honor whatever restrictions
    6734              :          the latter is trying to enforce.  */
    6735            0 :       gcc_assert (!DELAY_SLOTS);
    6736            0 :       rest_of_handle_thread_prologue_and_epilogue (fn);
    6737            0 :       return 0;
    6738              :     }
    6739              : }; // class pass_late_thread_prologue_and_epilogue
    6740              : 
    6741              : } // anon namespace
    6742              : 
    6743              : rtl_opt_pass *
    6744       285722 : make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
    6745              : {
    6746       285722 :   return new pass_thread_prologue_and_epilogue (ctxt);
    6747              : }
    6748              : 
    6749              : rtl_opt_pass *
    6750       285722 : make_pass_late_thread_prologue_and_epilogue (gcc::context *ctxt)
    6751              : {
    6752       285722 :   return new pass_late_thread_prologue_and_epilogue (ctxt);
    6753              : }
    6754              : 
    6755              : namespace {
    6756              : 
    6757              : const pass_data pass_data_zero_call_used_regs =
    6758              : {
    6759              :   RTL_PASS, /* type */
    6760              :   "zero_call_used_regs", /* name */
    6761              :   OPTGROUP_NONE, /* optinfo_flags */
    6762              :   TV_NONE, /* tv_id */
    6763              :   0, /* properties_required */
    6764              :   0, /* properties_provided */
    6765              :   0, /* properties_destroyed */
    6766              :   0, /* todo_flags_start */
    6767              :   0, /* todo_flags_finish */
    6768              : };
    6769              : 
    6770              : class pass_zero_call_used_regs: public rtl_opt_pass
    6771              : {
    6772              : public:
    6773       285722 :   pass_zero_call_used_regs (gcc::context *ctxt)
    6774       571444 :     : rtl_opt_pass (pass_data_zero_call_used_regs, ctxt)
    6775              :   {}
    6776              : 
    6777              :   /* opt_pass methods: */
    6778              :   unsigned int execute (function *) final override;
    6779              : 
    6780              : }; // class pass_zero_call_used_regs
    6781              : 
    6782              : unsigned int
    6783      1471363 : pass_zero_call_used_regs::execute (function *fun)
    6784              : {
    6785      1471363 :   using namespace zero_regs_flags;
    6786      1471363 :   unsigned int zero_regs_type = UNSET;
    6787              : 
    6788      1471363 :   tree attr_zero_regs = lookup_attribute ("zero_call_used_regs",
    6789      1471363 :                                           DECL_ATTRIBUTES (fun->decl));
    6790              : 
    6791              :   /* Get the type of zero_call_used_regs from function attribute.
    6792              :      We have filtered out invalid attribute values already at this point.  */
    6793      1471363 :   if (attr_zero_regs)
    6794              :     {
    6795              :       /* The TREE_VALUE of an attribute is a TREE_LIST whose TREE_VALUE
    6796              :          is the attribute argument's value.  */
    6797           88 :       attr_zero_regs = TREE_VALUE (attr_zero_regs);
    6798           88 :       gcc_assert (TREE_CODE (attr_zero_regs) == TREE_LIST);
    6799           88 :       attr_zero_regs = TREE_VALUE (attr_zero_regs);
    6800           88 :       gcc_assert (TREE_CODE (attr_zero_regs) == STRING_CST);
    6801              : 
    6802          496 :       for (unsigned int i = 0; zero_call_used_regs_opts[i].name != NULL; ++i)
    6803          496 :         if (strcmp (TREE_STRING_POINTER (attr_zero_regs),
    6804          496 :                      zero_call_used_regs_opts[i].name) == 0)
    6805              :           {
    6806           88 :             zero_regs_type = zero_call_used_regs_opts[i].flag;
    6807           88 :             break;
    6808              :           }
    6809              :     }
    6810              : 
    6811           88 :   if (!zero_regs_type)
    6812      1471275 :     zero_regs_type = flag_zero_call_used_regs;
    6813              : 
    6814              :   /* No need to zero call-used-regs when no user request is present.  */
    6815      1471363 :   if (!(zero_regs_type & ENABLED))
    6816              :     return 0;
    6817              : 
    6818          182 :   edge_iterator ei;
    6819          182 :   edge e;
    6820              : 
    6821              :   /* This pass needs data flow information.  */
    6822          182 :   df_analyze ();
    6823              : 
    6824              :   /* Iterate over the function's return instructions and insert any
    6825              :      register zeroing required by the -fzero-call-used-regs command-line
    6826              :      option or the "zero_call_used_regs" function attribute.  */
    6827          365 :   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
    6828              :     {
    6829          183 :       rtx_insn *insn = BB_END (e->src);
    6830          183 :       if (JUMP_P (insn) && ANY_RETURN_P (JUMP_LABEL (insn)))
    6831          176 :         gen_call_used_regs_seq (insn, zero_regs_type);
    6832              :     }
    6833              : 
    6834              :   return 0;
    6835              : }
    6836              : 
    6837              : } // anon namespace
    6838              : 
    6839              : rtl_opt_pass *
    6840       285722 : make_pass_zero_call_used_regs (gcc::context *ctxt)
    6841              : {
    6842       285722 :   return new pass_zero_call_used_regs (ctxt);
    6843              : }
    6844              : 
    6845              : /* If CONSTRAINT is a matching constraint, then return its number.
    6846              :    Otherwise, return -1.  */
    6847              : 
    6848              : static int
    6849        42412 : matching_constraint_num (const char *constraint)
    6850              : {
    6851        42412 :   if (*constraint == '%')
    6852         1140 :     constraint++;
    6853              : 
    6854        42412 :   if (IN_RANGE (*constraint, '0', '9'))
    6855        31261 :     return strtoul (constraint, NULL, 10);
    6856              : 
    6857              :   return -1;
    6858              : }
    6859              : 
    6860              : /* This mini-pass fixes fall-out from SSA in asm statements that have
    6861              :    in-out constraints.  Say you start with
    6862              : 
    6863              :      orig = inout;
    6864              :      asm ("": "+mr" (inout));
    6865              :      use (orig);
    6866              : 
    6867              :    which is transformed very early to use explicit output and match operands:
    6868              : 
    6869              :      orig = inout;
    6870              :      asm ("": "=mr" (inout) : "0" (inout));
    6871              :      use (orig);
    6872              : 
    6873              :    Or, after SSA and copyprop,
    6874              : 
    6875              :      asm ("": "=mr" (inout_2) : "0" (inout_1));
    6876              :      use (inout_1);
    6877              : 
    6878              :    Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
    6879              :    they represent two separate values, so they will get different pseudo
    6880              :    registers during expansion.  Then, since the two operands need to match
    6881              :    per the constraints, but use different pseudo registers, reload can
    6882              :    only register a reload for these operands.  But reloads can only be
    6883              :    satisfied by hardregs, not by memory, so we need a register for this
    6884              :    reload, just because we are presented with non-matching operands.
    6885              :    So, even though we allow memory for this operand, no memory can be
    6886              :    used for it, just because the two operands don't match.  This can
    6887              :    cause reload failures on register-starved targets.
    6888              : 
    6889              :    So it's a symptom of reload not being able to use memory for reloads
    6890              :    or, alternatively it's also a symptom of both operands not coming into
    6891              :    reload as matching (in which case the pseudo could go to memory just
    6892              :    fine, as the alternative allows it, and no reload would be necessary).
    6893              :    We fix the latter problem here, by transforming
    6894              : 
    6895              :      asm ("": "=mr" (inout_2) : "0" (inout_1));
    6896              : 
    6897              :    back to
    6898              : 
    6899              :      inout_2 = inout_1;
    6900              :      asm ("": "=mr" (inout_2) : "0" (inout_2));  */
    6901              : 
    6902              : static void
    6903        34062 : match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
    6904              : {
    6905        34062 :   int i;
    6906        34062 :   bool changed = false;
    6907        34062 :   rtx op = SET_SRC (p_sets[0]);
    6908        34062 :   int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
    6909        34062 :   rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
    6910        34062 :   bool *output_matched = XALLOCAVEC (bool, noutputs);
    6911              : 
    6912        34062 :   memset (output_matched, 0, noutputs * sizeof (bool));
    6913        75545 :   for (i = 0; i < ninputs; i++)
    6914              :     {
    6915        41483 :       rtx input, output;
    6916        41483 :       rtx_insn *insns;
    6917        41483 :       const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
    6918        41483 :       int match, j;
    6919              : 
    6920        41483 :       match = matching_constraint_num (constraint);
    6921        41483 :       if (match < 0)
    6922        11147 :         continue;
    6923              : 
    6924        30336 :       gcc_assert (match < noutputs);
    6925        30336 :       output = SET_DEST (p_sets[match]);
    6926        30336 :       input = RTVEC_ELT (inputs, i);
    6927              :       /* Only do the transformation for pseudos.  */
    6928        31309 :       if (! REG_P (output)
    6929        30159 :           || rtx_equal_p (output, input)
    6930        29502 :           || !(REG_P (input) || SUBREG_P (input)
    6931         3160 :                || MEM_P (input) || CONSTANT_P (input))
    6932        59837 :           || !general_operand (input, GET_MODE (output)))
    6933          973 :         continue;
    6934              : 
    6935              :       /* We can't do anything if the output is also used as input,
    6936              :          as we're going to overwrite it.  */
    6937        80005 :       for (j = 0; j < ninputs; j++)
    6938        50642 :         if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
    6939              :           break;
    6940        29363 :       if (j != ninputs)
    6941            0 :         continue;
    6942              : 
    6943              :       /* Avoid changing the same input several times.  For
    6944              :          asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
    6945              :          only change it once (to out1), rather than changing it
    6946              :          first to out1 and afterwards to out2.  */
    6947        29363 :       if (i > 0)
    6948              :         {
    6949        39938 :           for (j = 0; j < noutputs; j++)
    6950        32615 :             if (output_matched[j] && input == SET_DEST (p_sets[j]))
    6951              :               break;
    6952         7394 :           if (j != noutputs)
    6953           71 :             continue;
    6954              :         }
    6955        29292 :       output_matched[match] = true;
    6956              : 
    6957        29292 :       start_sequence ();
    6958        29292 :       emit_move_insn (output, copy_rtx (input));
    6959        29292 :       insns = end_sequence ();
    6960        29292 :       emit_insn_before (insns, insn);
    6961              : 
    6962        29292 :       constraint = ASM_OPERANDS_OUTPUT_CONSTRAINT(SET_SRC(p_sets[match]));
    6963        29292 :       bool early_clobber_p = strchr (constraint, '&') != NULL;
    6964              : 
    6965              :       /* Now replace all mentions of the input with output.  We can't
    6966              :          just replace the occurrence in inputs[i], as the register might
    6967              :          also be used in some other input (or even in an address of an
    6968              :          output), which would mean possibly increasing the number of
    6969              :          inputs by one (namely 'output' in addition), which might pose
    6970              :          a too complicated problem for reload to solve.  E.g. this situation:
    6971              : 
    6972              :            asm ("" : "=r" (output), "=m" (input) : "0" (input))
    6973              : 
    6974              :          Here 'input' is used in two occurrences as input (once for the
    6975              :          input operand, once for the address in the second output operand).
    6976              :          If we would replace only the occurrence of the input operand (to
    6977              :          make the matching) we would be left with this:
    6978              : 
    6979              :            output = input
    6980              :            asm ("" : "=r" (output), "=m" (input) : "0" (output))
    6981              : 
    6982              :          Now we suddenly have two different input values (containing the same
    6983              :          value, but different pseudos) where we formerly had only one.
    6984              :          With more complicated asms this might lead to reload failures
    6985              :          which wouldn't have happen without this pass.  So, iterate over
    6986              :          all operands and replace all occurrences of the register used.
    6987              : 
    6988              :          However, if one or more of the 'input' uses have a non-matching
    6989              :          constraint and the matched output operand is an early clobber
    6990              :          operand, then do not replace the input operand, since by definition
    6991              :          it conflicts with the output operand and cannot share the same
    6992              :          register.  See PR89313 for details.  */
    6993              : 
    6994       144275 :       for (j = 0; j < noutputs; j++)
    6995       114983 :         if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
    6996       114983 :             && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
    6997           49 :           SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
    6998              :                                               input, output);
    6999        79646 :       for (j = 0; j < ninputs; j++)
    7000        50354 :         if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
    7001              :           {
    7002        26913 :             if (!early_clobber_p
    7003        27842 :                 || match == matching_constraint_num
    7004          929 :                               (ASM_OPERANDS_INPUT_CONSTRAINT (op, j)))
    7005        26909 :               RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
    7006              :                                                    input, output);
    7007              :           }
    7008              : 
    7009              :       changed = true;
    7010              :     }
    7011              : 
    7012        34062 :   if (changed)
    7013        22314 :     df_insn_rescan (insn);
    7014        34062 : }
    7015              : 
    7016              : /* It is expected and desired that optimizations coalesce multiple pseudos into
    7017              :    one whenever possible.  However, in case of hard register constraints we may
    7018              :    have to undo this and introduce copies since otherwise we could constraint a
    7019              :    single pseudo to different hard registers.  For example, during register
    7020              :    allocation the following insn would be unsatisfiable since pseudo 60 is
    7021              :    constrained to hard register r5 and r6 at the same time.
    7022              : 
    7023              :    (insn 7 5 0 2 (asm_operands/v ("foo") ("") 0 [
    7024              :                (reg:DI 60) repeated x2
    7025              :            ]
    7026              :             [
    7027              :                (asm_input:DI ("{r5}") t.c:4)
    7028              :                (asm_input:DI ("{r6}") t.c:4)
    7029              :            ]
    7030              :             [] t.c:4) "t.c":4:3 -1
    7031              :         (expr_list:REG_DEAD (reg:DI 60)
    7032              :            (nil)))
    7033              : 
    7034              :    Therefore, introduce a copy of pseudo 60 and transform it into
    7035              : 
    7036              :    (insn 10 5 7 2 (set (reg:DI 62)
    7037              :            (reg:DI 60)) "t.c":4:3 1503 {*movdi_64}
    7038              :         (nil))
    7039              :    (insn 7 10 11 2 (asm_operands/v ("foo") ("") 0 [
    7040              :                (reg:DI 60)
    7041              :                (reg:DI 62)
    7042              :            ]
    7043              :             [
    7044              :                (asm_input:DI ("{r5}") t.c:4)
    7045              :                (asm_input:DI ("{r6}") t.c:4)
    7046              :            ]
    7047              :             [] t.c:4) "t.c":4:3 -1
    7048              :         (expr_list:REG_DEAD (reg:DI 62)
    7049              :            (expr_list:REG_DEAD (reg:DI 60)
    7050              :                (nil))))
    7051              : 
    7052              :    Now, LRA can assign pseudo 60 to r5, and pseudo 62 to r6.
    7053              : 
    7054              :    TODO: The current implementation is conservative and we could do a bit
    7055              :    better in case of alternatives.  For example
    7056              : 
    7057              :    (insn 7 5 0 2 (asm_operands/v ("foo") ("") 0 [
    7058              :                (reg:DI 60) repeated x2
    7059              :            ]
    7060              :             [
    7061              :                (asm_input:DI ("r,{r5}") t.c:4)
    7062              :                (asm_input:DI ("{r6},r") t.c:4)
    7063              :            ]
    7064              :             [] t.c:4) "t.c":4:3 -1
    7065              :         (expr_list:REG_DEAD (reg:DI 60)
    7066              :            (nil)))
    7067              : 
    7068              :    For this insn we wouldn't need to come up with a copy of pseudo 60 since in
    7069              :    each alternative pseudo 60 is constrained exactly one time.  */
    7070              : 
    7071              : static void
    7072      3395167 : match_asm_constraints_2 (rtx_insn *insn, rtx pat)
    7073              : {
    7074      3395167 :   rtx op;
    7075      3395167 :   if (GET_CODE (pat) == SET && GET_CODE (SET_SRC (pat)) == ASM_OPERANDS)
    7076              :     op = SET_SRC (pat);
    7077      3320123 :   else if (GET_CODE (pat) == ASM_OPERANDS)
    7078              :     op = pat;
    7079              :   else
    7080      3247657 :     return;
    7081       147510 :   int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
    7082       147510 :   rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
    7083       147510 :   bool changed = false;
    7084       147510 :   auto_bitmap constrained_regs;
    7085              : 
    7086       265390 :   for (int i = 0; i < ninputs; ++i)
    7087              :     {
    7088       117880 :       rtx input = RTVEC_ELT (inputs, i);
    7089       117880 :       const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
    7090        22323 :       if ((!REG_P (input) && !SUBREG_P (input))
    7091        95674 :           || (REG_P (input) && HARD_REGISTER_P (input))
    7092       212474 :           || strchr (constraint, '{') == nullptr)
    7093       117832 :         continue;
    7094           48 :       int regno;
    7095           48 :       if (SUBREG_P (input))
    7096              :         {
    7097            0 :           if (REG_P (SUBREG_REG (input)))
    7098            0 :             regno = REGNO (SUBREG_REG (input));
    7099              :           else
    7100            0 :             continue;
    7101              :         }
    7102              :       else
    7103           48 :         regno = REGNO (input);
    7104              :       /* Keep the first usage of a constrained pseudo as is and only
    7105              :          introduce copies for subsequent usages.  */
    7106           48 :       if (! bitmap_bit_p (constrained_regs, regno))
    7107              :         {
    7108           48 :           bitmap_set_bit (constrained_regs, regno);
    7109           48 :           continue;
    7110              :         }
    7111            0 :       rtx tmp = gen_reg_rtx (GET_MODE (input));
    7112            0 :       start_sequence ();
    7113            0 :       emit_move_insn (tmp, input);
    7114            0 :       rtx_insn *insns = get_insns ();
    7115            0 :       end_sequence ();
    7116            0 :       emit_insn_before (insns, insn);
    7117            0 :       RTVEC_ELT (inputs, i) = tmp;
    7118            0 :       changed = true;
    7119              :     }
    7120              : 
    7121       147510 :   if (changed)
    7122            0 :     df_insn_rescan (insn);
    7123       147510 : }
    7124              : 
    7125              : /* Add the decl D to the local_decls list of FUN.  */
    7126              : 
    7127              : void
    7128     37392904 : add_local_decl (struct function *fun, tree d)
    7129              : {
    7130     37392904 :   gcc_assert (VAR_P (d));
    7131     37392904 :   vec_safe_push (fun->local_decls, d);
    7132     37392904 : }
    7133              : 
    7134              : namespace {
    7135              : 
    7136              : const pass_data pass_data_match_asm_constraints =
    7137              : {
    7138              :   RTL_PASS, /* type */
    7139              :   "asmcons", /* name */
    7140              :   OPTGROUP_NONE, /* optinfo_flags */
    7141              :   TV_NONE, /* tv_id */
    7142              :   0, /* properties_required */
    7143              :   0, /* properties_provided */
    7144              :   0, /* properties_destroyed */
    7145              :   0, /* todo_flags_start */
    7146              :   0, /* todo_flags_finish */
    7147              : };
    7148              : 
    7149              : class pass_match_asm_constraints : public rtl_opt_pass
    7150              : {
    7151              : public:
    7152       285722 :   pass_match_asm_constraints (gcc::context *ctxt)
    7153       571444 :     : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
    7154              :   {}
    7155              : 
    7156              :   /* opt_pass methods: */
    7157              :   unsigned int execute (function *) final override;
    7158              : 
    7159              : }; // class pass_match_asm_constraints
    7160              : 
    7161              : unsigned
    7162      1471361 : pass_match_asm_constraints::execute (function *fun)
    7163              : {
    7164      1471361 :   basic_block bb;
    7165      1471361 :   rtx_insn *insn;
    7166      1471361 :   rtx pat, *p_sets;
    7167      1471361 :   int noutputs;
    7168              : 
    7169      1471361 :   if (!crtl->has_asm_statement)
    7170              :     return 0;
    7171              : 
    7172        34128 :   df_set_flags (DF_DEFER_INSN_RESCAN);
    7173       327492 :   FOR_EACH_BB_FN (bb, fun)
    7174              :     {
    7175      3759845 :       FOR_BB_INSNS (bb, insn)
    7176              :         {
    7177      3466481 :           if (!INSN_P (insn))
    7178       644409 :             continue;
    7179              : 
    7180      2822072 :           pat = PATTERN (insn);
    7181              : 
    7182      2822072 :           if (GET_CODE (pat) == PARALLEL)
    7183      1462311 :             for (int i = XVECLEN (pat, 0) - 1; i >= 0; --i)
    7184      1017703 :               match_asm_constraints_2 (insn, XVECEXP (pat, 0, i));
    7185              :           else
    7186      2377464 :             match_asm_constraints_2 (insn, pat);
    7187              : 
    7188      2822072 :           if (GET_CODE (pat) == PARALLEL)
    7189       444608 :             p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
    7190      2377464 :           else if (GET_CODE (pat) == SET)
    7191      1693562 :             p_sets = &PATTERN (insn), noutputs = 1;
    7192              :           else
    7193       683902 :             continue;
    7194              : 
    7195      2138170 :           if (GET_CODE (*p_sets) == SET
    7196      2061910 :               && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
    7197        34062 :             match_asm_constraints_1 (insn, p_sets, noutputs);
    7198              :          }
    7199              :     }
    7200              : 
    7201              :   return TODO_df_finish;
    7202              : }
    7203              : 
    7204              : } // anon namespace
    7205              : 
    7206              : rtl_opt_pass *
    7207       285722 : make_pass_match_asm_constraints (gcc::context *ctxt)
    7208              : {
    7209       285722 :   return new pass_match_asm_constraints (ctxt);
    7210              : }
    7211              : 
    7212              : 
    7213              : #include "gt-function.h"
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.