LCOV - code coverage report
Current view: top level - gcc - function.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 83.3 % 2830 2358
Test Date: 2026-03-28 14:25:54 Functions: 90.3 % 165 149
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Expands front end tree to back end RTL for GCC.
       2              :    Copyright (C) 1987-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify it under
       7              : the terms of the GNU General Public License as published by the Free
       8              : Software Foundation; either version 3, or (at your option) any later
       9              : version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      12              : WARRANTY; without even the implied warranty of MERCHANTABILITY or
      13              : FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      14              : for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : /* This file handles the generation of rtl code from tree structure
      21              :    at the level of the function as a whole.
      22              :    It creates the rtl expressions for parameters and auto variables
      23              :    and has full responsibility for allocating stack slots.
      24              : 
      25              :    `expand_function_start' is called at the beginning of a function,
      26              :    before the function body is parsed, and `expand_function_end' is
      27              :    called after parsing the body.
      28              : 
      29              :    Call `assign_stack_local' to allocate a stack slot for a local variable.
      30              :    This is usually done during the RTL generation for the function body,
      31              :    but it can also be done in the reload pass when a pseudo-register does
      32              :    not get a hard register.  */
      33              : 
      34              : #include "config.h"
      35              : #include "system.h"
      36              : #include "coretypes.h"
      37              : #include "backend.h"
      38              : #include "target.h"
      39              : #include "rtl.h"
      40              : #include "tree.h"
      41              : #include "gimple-expr.h"
      42              : #include "cfghooks.h"
      43              : #include "df.h"
      44              : #include "memmodel.h"
      45              : #include "tm_p.h"
      46              : #include "stringpool.h"
      47              : #include "expmed.h"
      48              : #include "optabs.h"
      49              : #include "opts.h"
      50              : #include "regs.h"
      51              : #include "emit-rtl.h"
      52              : #include "recog.h"
      53              : #include "rtl-error.h"
      54              : #include "hard-reg-set.h"
      55              : #include "alias.h"
      56              : #include "fold-const.h"
      57              : #include "stor-layout.h"
      58              : #include "varasm.h"
      59              : #include "except.h"
      60              : #include "dojump.h"
      61              : #include "explow.h"
      62              : #include "calls.h"
      63              : #include "expr.h"
      64              : #include "optabs-tree.h"
      65              : #include "output.h"
      66              : #include "langhooks.h"
      67              : #include "common/common-target.h"
      68              : #include "gimplify.h"
      69              : #include "tree-pass.h"
      70              : #include "cfgrtl.h"
      71              : #include "cfganal.h"
      72              : #include "cfgbuild.h"
      73              : #include "cfgcleanup.h"
      74              : #include "cfgexpand.h"
      75              : #include "shrink-wrap.h"
      76              : #include "toplev.h"
      77              : #include "rtl-iter.h"
      78              : #include "tree-dfa.h"
      79              : #include "tree-ssa.h"
      80              : #include "stringpool.h"
      81              : #include "attribs.h"
      82              : #include "gimple.h"
      83              : #include "options.h"
      84              : #include "function-abi.h"
      85              : #include "value-range.h"
      86              : #include "gimple-range.h"
      87              : #include "insn-attr.h"
      88              : #include "hierarchical_discriminator.h"
      89              : 
      90              : /* So we can assign to cfun in this file.  */
      91              : #undef cfun
      92              : 
      93              : #ifndef STACK_ALIGNMENT_NEEDED
      94              : #define STACK_ALIGNMENT_NEEDED 1
      95              : #endif
      96              : 
      97              : #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
      98              : 
      99              : /* Round a value to the lowest integer less than it that is a multiple of
     100              :    the required alignment.  Avoid using division in case the value is
     101              :    negative.  Assume the alignment is a power of two.  */
     102              : #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
     103              : 
     104              : /* Similar, but round to the next highest integer that meets the
     105              :    alignment.  */
     106              : #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
     107              : 
     108              : /* Nonzero once virtual register instantiation has been done.
     109              :    assign_stack_local uses frame_pointer_rtx when this is nonzero.
     110              :    calls.cc:emit_library_call_value_1 uses it to set up
     111              :    post-instantiation libcalls.  */
     112              : int virtuals_instantiated;
     113              : 
     114              : /* Assign unique numbers to labels generated for profiling, debugging, etc.  */
     115              : static GTY(()) int funcdef_no;
     116              : 
     117              : /* These variables hold pointers to functions to create and destroy
     118              :    target specific, per-function data structures.  */
     119              : struct machine_function * (*init_machine_status) (void);
     120              : 
     121              : /* The currently compiled function.  */
     122              : struct function *cfun = 0;
     123              : 
     124              : /* These hashes record the prologue and epilogue insns.  */
     125              : 
     126              : struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
     127              : {
     128   1138601490 :   static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
     129              :   static bool equal (rtx a, rtx b) { return a == b; }
     130              : };
     131              : 
     132              : static GTY((cache))
     133              :   hash_table<insn_cache_hasher> *prologue_insn_hash;
     134              : static GTY((cache))
     135              :   hash_table<insn_cache_hasher> *epilogue_insn_hash;
     136              : 
     137              : 
     138              : hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
     139              : vec<tree, va_gc> *types_used_by_cur_var_decl;
     140              : 
     141              : /* Forward declarations.  */
     142              : 
     143              : static class temp_slot *find_temp_slot_from_address (rtx);
     144              : static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
     145              : static void pad_below (struct args_size *, machine_mode, tree);
     146              : static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
     147              : static int all_blocks (tree, tree *);
     148              : static tree *get_block_vector (tree, int *);
     149              : extern tree debug_find_var_in_block_tree (tree, tree);
     150              : /* We always define `record_insns' even if it's not used so that we
     151              :    can always export `prologue_epilogue_contains'.  */
     152              : static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
     153              :      ATTRIBUTE_UNUSED;
     154              : static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
     155              : static void prepare_function_start (void);
     156              : static void do_clobber_return_reg (rtx, void *);
     157              : static void do_use_return_reg (rtx, void *);
     158              : 
     159              : 
     160              : /* Stack of nested functions.  */
     161              : /* Keep track of the cfun stack.  */
     162              : 
     163              : static vec<function *> function_context_stack;
     164              : 
     165              : /* Save the current context for compilation of a nested function.
     166              :    This is called from language-specific code.  */
     167              : 
     168              : void
     169    121036987 : push_function_context (void)
     170              : {
     171    121036987 :   if (cfun == 0)
     172           20 :     allocate_struct_function (NULL, false);
     173              : 
     174    121036987 :   function_context_stack.safe_push (cfun);
     175    121036987 :   set_cfun (NULL);
     176    121036987 : }
     177              : 
     178              : /* Restore the last saved context, at the end of a nested function.
     179              :    This function is called from language-specific code.  */
     180              : 
     181              : void
     182    121036966 : pop_function_context (void)
     183              : {
     184    121036966 :   struct function *p = function_context_stack.pop ();
     185    121036966 :   set_cfun (p);
     186    121036966 :   current_function_decl = p->decl;
     187              : 
     188              :   /* Reset variables that have known state during rtx generation.  */
     189    121036966 :   virtuals_instantiated = 0;
     190    121036966 :   generating_concat_p = 1;
     191    121036966 : }
     192              : 
     193              : /* Clear out all parts of the state in F that can safely be discarded
     194              :    after the function has been parsed, but not compiled, to let
     195              :    garbage collection reclaim the memory.  */
     196              : 
     197              : void
     198      1697478 : free_after_parsing (struct function *f)
     199              : {
     200      1697478 :   f->language = 0;
     201      1697478 : }
     202              : 
     203              : /* Clear out all parts of the state in F that can safely be discarded
     204              :    after the function has been compiled, to let garbage collection
     205              :    reclaim the memory.  */
     206              : 
     207              : void
     208      1702902 : free_after_compilation (struct function *f)
     209              : {
     210      1702902 :   prologue_insn_hash = NULL;
     211      1702902 :   epilogue_insn_hash = NULL;
     212              : 
     213      1702902 :   free (crtl->emit.regno_pointer_align);
     214              : 
     215      1702902 :   memset (crtl, 0, sizeof (struct rtl_data));
     216      1702902 :   f->eh = NULL;
     217      1702902 :   f->machine = NULL;
     218      1702902 :   f->cfg = NULL;
     219      1702902 :   f->curr_properties &= ~PROP_cfg;
     220      1703040 :   delete f->cond_uids;
     221      1702902 :   free_copyid_allocator (f);
     222              : 
     223      1702902 :   regno_reg_rtx = NULL;
     224      1702902 : }
     225              : 
     226              : /* Return size needed for stack frame based on slots so far allocated.
     227              :    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
     228              :    the caller may have to do that.  */
     229              : 
     230              : poly_int64
     231    149098458 : get_frame_size (void)
     232              : {
     233    149098458 :   if (FRAME_GROWS_DOWNWARD)
     234    149098458 :     return -frame_offset;
     235              :   else
     236              :     return frame_offset;
     237              : }
     238              : 
     239              : /* Issue an error message and return TRUE if frame OFFSET overflows in
     240              :    the signed target pointer arithmetics for function FUNC.  Otherwise
     241              :    return FALSE.  */
     242              : 
     243              : bool
     244      3975416 : frame_offset_overflow (poly_int64 offset, tree func)
     245              : {
     246      3975416 :   poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
     247      3975416 :   unsigned HOST_WIDE_INT limit
     248      3975416 :     = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
     249              :        /* Leave room for the fixed part of the frame.  */
     250      3975416 :        - 64 * UNITS_PER_WORD);
     251              : 
     252      7950832 :   if (!coeffs_in_range_p (size, 0U, limit))
     253              :     {
     254            0 :       unsigned HOST_WIDE_INT hwisize;
     255            0 :       if (size.is_constant (&hwisize))
     256            0 :         error_at (DECL_SOURCE_LOCATION (func),
     257              :                   "total size of local objects %wu exceeds maximum %wu",
     258              :                   hwisize, limit);
     259              :       else
     260              :         error_at (DECL_SOURCE_LOCATION (func),
     261              :                   "total size of local objects exceeds maximum %wu",
     262              :                   limit);
     263            0 :       return true;
     264              :     }
     265              : 
     266              :   return false;
     267              : }
     268              : 
     269              : /* Return the minimum spill slot alignment for a register of mode MODE.  */
     270              : 
     271              : unsigned int
     272      1426772 : spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
     273              : {
     274      1426772 :   return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
     275              : }
     276              : 
     277              : /* Return stack slot alignment in bits for TYPE and MODE.  */
     278              : 
     279              : static unsigned int
     280       154469 : get_stack_local_alignment (tree type, machine_mode mode)
     281              : {
     282       154469 :   unsigned int alignment;
     283              : 
     284       154469 :   if (mode == BLKmode)
     285        22158 :     alignment = BIGGEST_ALIGNMENT;
     286              :   else
     287       132311 :     alignment = GET_MODE_ALIGNMENT (mode);
     288              : 
     289              :   /* Allow the frond-end to (possibly) increase the alignment of this
     290              :      stack slot.  */
     291       154469 :   if (! type)
     292        56644 :     type = lang_hooks.types.type_for_mode (mode, 0);
     293              : 
     294       154469 :   return STACK_SLOT_ALIGNMENT (type, mode, alignment);
     295              : }
     296              : 
     297              : /* Determine whether it is possible to fit a stack slot of size SIZE and
     298              :    alignment ALIGNMENT into an area in the stack frame that starts at
     299              :    frame offset START and has a length of LENGTH.  If so, store the frame
     300              :    offset to be used for the stack slot in *POFFSET and return true;
     301              :    return false otherwise.  This function will extend the frame size when
     302              :    given a start/length pair that lies at the end of the frame.  */
     303              : 
     304              : static bool
     305      2385467 : try_fit_stack_local (poly_int64 start, poly_int64 length,
     306              :                      poly_int64 size, unsigned int alignment,
     307              :                      poly_int64 *poffset)
     308              : {
     309      2385467 :   poly_int64 this_frame_offset;
     310      2385467 :   int frame_off, frame_alignment, frame_phase;
     311              : 
     312              :   /* Calculate how many bytes the start of local variables is off from
     313              :      stack alignment.  */
     314      2385467 :   frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
     315      2385467 :   frame_off = targetm.starting_frame_offset () % frame_alignment;
     316      2385467 :   frame_phase = frame_off ? frame_alignment - frame_off : 0;
     317              : 
     318              :   /* Round the frame offset to the specified alignment.  */
     319              : 
     320      2385467 :   if (FRAME_GROWS_DOWNWARD)
     321      2385467 :     this_frame_offset
     322      2385467 :       = (aligned_lower_bound (start + length - size - frame_phase, alignment)
     323      2385467 :          + frame_phase);
     324              :   else
     325              :     this_frame_offset
     326              :       = aligned_upper_bound (start - frame_phase, alignment) + frame_phase;
     327              : 
     328              :   /* See if it fits.  If this space is at the edge of the frame,
     329              :      consider extending the frame to make it fit.  Our caller relies on
     330              :      this when allocating a new slot.  */
     331      2385467 :   if (maybe_lt (this_frame_offset, start))
     332              :     {
     333       495603 :       if (known_eq (frame_offset, start))
     334       367375 :         frame_offset = this_frame_offset;
     335              :       else
     336              :         return false;
     337              :     }
     338      1889864 :   else if (maybe_gt (this_frame_offset + size, start + length))
     339              :     {
     340            0 :       if (known_eq (frame_offset, start + length))
     341            0 :         frame_offset = this_frame_offset + size;
     342              :       else
     343              :         return false;
     344              :     }
     345              : 
     346      2257239 :   *poffset = this_frame_offset;
     347      2257239 :   return true;
     348              : }
     349              : 
     350              : /* Create a new frame_space structure describing free space in the stack
     351              :    frame beginning at START and ending at END, and chain it into the
     352              :    function's frame_space_list.  */
     353              : 
     354              : static void
     355       395310 : add_frame_space (poly_int64 start, poly_int64 end)
     356              : {
     357       395310 :   class frame_space *space = ggc_alloc<frame_space> ();
     358       395310 :   space->next = crtl->frame_space_list;
     359       395310 :   crtl->frame_space_list = space;
     360       395310 :   space->start = start;
     361       395310 :   space->length = end - start;
     362       395310 : }
     363              : 
     364              : /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
     365              :    with machine mode MODE.
     366              : 
     367              :    ALIGN controls the amount of alignment for the address of the slot:
     368              :    0 means according to MODE,
     369              :    -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
     370              :    -2 means use BITS_PER_UNIT,
     371              :    positive specifies alignment boundary in bits.
     372              : 
     373              :    KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
     374              :    alignment and ASLK_RECORD_PAD bit set if we should remember
     375              :    extra space we allocated for alignment purposes.  When we are
     376              :    called from assign_stack_temp_for_type, it is not set so we don't
     377              :    track the same stack slot in two independent lists.
     378              : 
     379              :    We do not round to stack_boundary here.  */
     380              : 
     381              : rtx
     382      2257239 : assign_stack_local_1 (machine_mode mode, poly_int64 size,
     383              :                       int align, int kind)
     384              : {
     385      2257239 :   rtx x, addr;
     386      2257239 :   poly_int64 bigend_correction = 0;
     387      2257239 :   poly_int64 slot_offset = 0, old_frame_offset;
     388      2257239 :   unsigned int alignment, alignment_in_bits;
     389              : 
     390      2257239 :   if (align == 0)
     391              :     {
     392         7007 :       alignment = get_stack_local_alignment (NULL, mode);
     393         7007 :       alignment /= BITS_PER_UNIT;
     394              :     }
     395      2250232 :   else if (align == -1)
     396              :     {
     397          947 :       alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
     398          947 :       size = aligned_upper_bound (size, alignment);
     399              :     }
     400      2249285 :   else if (align == -2)
     401              :     alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
     402              :   else
     403      2249285 :     alignment = align / BITS_PER_UNIT;
     404              : 
     405      2257239 :   alignment_in_bits = alignment * BITS_PER_UNIT;
     406              : 
     407              :   /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT.  */
     408      2257239 :   if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
     409              :     {
     410            0 :       alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
     411            0 :       alignment = MAX_SUPPORTED_STACK_ALIGNMENT / BITS_PER_UNIT;
     412              :     }
     413              : 
     414      2257239 :   if (SUPPORTS_STACK_ALIGNMENT)
     415              :     {
     416      2257239 :       if (crtl->stack_alignment_estimated < alignment_in_bits)
     417              :         {
     418         4379 :           if (!crtl->stack_realign_processed)
     419         4356 :             crtl->stack_alignment_estimated = alignment_in_bits;
     420              :           else
     421              :             {
     422              :               /* If stack is realigned and stack alignment value
     423              :                  hasn't been finalized, it is OK not to increase
     424              :                  stack_alignment_estimated.  The bigger alignment
     425              :                  requirement is recorded in stack_alignment_needed
     426              :                  below.  */
     427           23 :               gcc_assert (!crtl->stack_realign_finalized);
     428           23 :               if (!crtl->stack_realign_needed)
     429              :                 {
     430              :                   /* It is OK to reduce the alignment as long as the
     431              :                      requested size is 0 or the estimated stack
     432              :                      alignment >= mode alignment.  */
     433           23 :                   gcc_assert ((kind & ASLK_REDUCE_ALIGN)
     434              :                               || known_eq (size, 0)
     435              :                               || (crtl->stack_alignment_estimated
     436              :                                   >= GET_MODE_ALIGNMENT (mode)));
     437           23 :                   alignment_in_bits = crtl->stack_alignment_estimated;
     438           23 :                   alignment = alignment_in_bits / BITS_PER_UNIT;
     439              :                 }
     440              :             }
     441              :         }
     442              :     }
     443              : 
     444      2257239 :   if (crtl->stack_alignment_needed < alignment_in_bits)
     445        18328 :     crtl->stack_alignment_needed = alignment_in_bits;
     446      2257239 :   if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
     447       290979 :     crtl->max_used_stack_slot_alignment = alignment_in_bits;
     448              : 
     449      2257239 :   if (mode != BLKmode || maybe_ne (size, 0))
     450              :     {
     451      1460082 :       if (kind & ASLK_RECORD_PAD)
     452              :         {
     453              :           class frame_space **psp;
     454              : 
     455      1504442 :           for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
     456              :             {
     457       190234 :               class frame_space *space = *psp;
     458       190234 :               if (!try_fit_stack_local (space->start, space->length, size,
     459              :                                         alignment, &slot_offset))
     460       128228 :                 continue;
     461        62006 :               *psp = space->next;
     462        62006 :               if (known_gt (slot_offset, space->start))
     463        25842 :                 add_frame_space (space->start, slot_offset);
     464        62006 :               if (known_lt (slot_offset + size, space->start + space->length))
     465        10905 :                 add_frame_space (slot_offset + size,
     466        10905 :                                  space->start + space->length);
     467        62006 :               goto found_space;
     468              :             }
     469              :         }
     470              :     }
     471              :   else if (!STACK_ALIGNMENT_NEEDED)
     472              :     {
     473              :       slot_offset = frame_offset;
     474              :       goto found_space;
     475              :     }
     476              : 
     477      2195233 :   old_frame_offset = frame_offset;
     478              : 
     479      2195233 :   if (FRAME_GROWS_DOWNWARD)
     480              :     {
     481      2195233 :       frame_offset -= size;
     482      2195233 :       try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
     483              : 
     484      2195233 :       if (kind & ASLK_RECORD_PAD)
     485              :         {
     486      2111365 :           if (known_gt (slot_offset, frame_offset))
     487            0 :             add_frame_space (frame_offset, slot_offset);
     488      2111365 :           if (known_lt (slot_offset + size, old_frame_offset))
     489       358563 :             add_frame_space (slot_offset + size, old_frame_offset);
     490              :         }
     491              :     }
     492              :   else
     493              :     {
     494              :       frame_offset += size;
     495              :       try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
     496              : 
     497              :       if (kind & ASLK_RECORD_PAD)
     498              :         {
     499              :           if (known_gt (slot_offset, old_frame_offset))
     500              :             add_frame_space (old_frame_offset, slot_offset);
     501              :           if (known_lt (slot_offset + size, frame_offset))
     502              :             add_frame_space (slot_offset + size, frame_offset);
     503              :         }
     504              :     }
     505              : 
     506      2257239 :  found_space:
     507              :   /* On a big-endian machine, if we are allocating more space than we will use,
     508              :      use the least significant bytes of those that are allocated.  */
     509      2257239 :   if (mode != BLKmode)
     510              :     {
     511              :       /* The slot size can sometimes be smaller than the mode size;
     512              :          e.g. the rs6000 port allocates slots with a vector mode
     513              :          that have the size of only one element.  However, the slot
     514              :          size must always be ordered wrt to the mode size, in the
     515              :          same way as for a subreg.  */
     516       683590 :       gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
     517              :       if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size))
     518              :         bigend_correction = size - GET_MODE_SIZE (mode);
     519              :     }
     520              : 
     521              :   /* If we have already instantiated virtual registers, return the actual
     522              :      address relative to the frame pointer.  */
     523      2257239 :   if (virtuals_instantiated)
     524      1849125 :     addr = plus_constant (Pmode, frame_pointer_rtx,
     525              :                           trunc_int_for_mode
     526      1559942 :                           (slot_offset + bigend_correction
     527      1849125 :                            + targetm.starting_frame_offset (), Pmode));
     528              :   else
     529       729825 :     addr = plus_constant (Pmode, virtual_stack_vars_rtx,
     530              :                           trunc_int_for_mode
     531              :                           (slot_offset + bigend_correction,
     532       697297 :                            Pmode));
     533              : 
     534      2257239 :   x = gen_rtx_MEM (mode, addr);
     535      2257239 :   set_mem_align (x, alignment_in_bits);
     536      2257239 :   MEM_NOTRAP_P (x) = 1;
     537              : 
     538      2257239 :   vec_safe_push (stack_slot_list, x);
     539              : 
     540      2257239 :   if (frame_offset_overflow (frame_offset, current_function_decl))
     541            0 :     frame_offset = 0;
     542              : 
     543      2257239 :   return x;
     544              : }
     545              : 
     546              : /* Wrap up assign_stack_local_1 with last parameter as false.  */
     547              : 
     548              : rtx
     549      2173371 : assign_stack_local (machine_mode mode, poly_int64 size, int align)
     550              : {
     551      2173371 :   return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
     552              : }
     553              : 
     554              : /* In order to evaluate some expressions, such as function calls returning
     555              :    structures in memory, we need to temporarily allocate stack locations.
     556              :    We record each allocated temporary in the following structure.
     557              : 
     558              :    Associated with each temporary slot is a nesting level.  When we pop up
     559              :    one level, all temporaries associated with the previous level are freed.
     560              :    Normally, all temporaries are freed after the execution of the statement
     561              :    in which they were created.  However, if we are inside a ({...}) grouping,
     562              :    the result may be in a temporary and hence must be preserved.  If the
     563              :    result could be in a temporary, we preserve it if we can determine which
     564              :    one it is in.  If we cannot determine which temporary may contain the
     565              :    result, all temporaries are preserved.  A temporary is preserved by
     566              :    pretending it was allocated at the previous nesting level.  */
     567              : 
     568              : class GTY(()) temp_slot {
     569              : public:
     570              :   /* Points to next temporary slot.  */
     571              :   class temp_slot *next;
     572              :   /* Points to previous temporary slot.  */
     573              :   class temp_slot *prev;
     574              :   /* The rtx to used to reference the slot.  */
     575              :   rtx slot;
     576              :   /* The size, in units, of the slot.  */
     577              :   poly_int64 size;
     578              :   /* The type of the object in the slot, or zero if it doesn't correspond
     579              :      to a type.  We use this to determine whether a slot can be reused.
     580              :      It can be reused if objects of the type of the new slot will always
     581              :      conflict with objects of the type of the old slot.  */
     582              :   tree type;
     583              :   /* The alignment (in bits) of the slot.  */
     584              :   unsigned int align;
     585              :   /* True if this temporary is currently in use.  */
     586              :   bool in_use;
     587              :   /* Nesting level at which this slot is being used.  */
     588              :   int level;
     589              :   /* The offset of the slot from the frame_pointer, including extra space
     590              :      for alignment.  This info is for combine_temp_slots.  */
     591              :   poly_int64 base_offset;
     592              :   /* The size of the slot, including extra space for alignment.  This
     593              :      info is for combine_temp_slots.  */
     594              :   poly_int64 full_size;
     595              : };
     596              : 
     597              : /* Entry for the below hash table.  */
     598              : struct GTY((for_user)) temp_slot_address_entry {
     599              :   hashval_t hash;
     600              :   rtx address;
     601              :   class temp_slot *temp_slot;
     602              : };
     603              : 
     604              : struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
     605              : {
     606              :   static hashval_t hash (temp_slot_address_entry *);
     607              :   static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
     608              : };
     609              : 
     610              : /* A table of addresses that represent a stack slot.  The table is a mapping
     611              :    from address RTXen to a temp slot.  */
     612              : static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
     613              : static size_t n_temp_slots_in_use;
     614              : 
     615              : /* Removes temporary slot TEMP from LIST.  */
     616              : 
     617              : static void
     618       212441 : cut_slot_from_list (class temp_slot *temp, class temp_slot **list)
     619              : {
     620            0 :   if (temp->next)
     621        27998 :     temp->next->prev = temp->prev;
     622       212441 :   if (temp->prev)
     623         7960 :     temp->prev->next = temp->next;
     624              :   else
     625       204481 :     *list = temp->next;
     626              : 
     627       212441 :   temp->prev = temp->next = NULL;
     628          697 : }
     629              : 
     630              : /* Inserts temporary slot TEMP to LIST.  */
     631              : 
     632              : static void
     633       296233 : insert_slot_to_list (class temp_slot *temp, class temp_slot **list)
     634              : {
     635       296233 :   temp->next = *list;
     636            0 :   if (*list)
     637        80558 :     (*list)->prev = temp;
     638       296233 :   temp->prev = NULL;
     639       296233 :   *list = temp;
     640            0 : }
     641              : 
     642              : /* Returns the list of used temp slots at LEVEL.  */
     643              : 
     644              : static class temp_slot **
     645     66118491 : temp_slots_at_level (int level)
     646              : {
     647    130823656 :   if (level >= (int) vec_safe_length (used_temp_slots))
     648      1895232 :     vec_safe_grow_cleared (used_temp_slots, level + 1, true);
     649              : 
     650     66118491 :   return &(*used_temp_slots)[level];
     651              : }
     652              : 
     653              : /* Returns the maximal temporary slot level.  */
     654              : 
     655              : static int
     656      1346959 : max_slot_level (void)
     657              : {
     658            0 :   if (!used_temp_slots)
     659              :     return -1;
     660              : 
     661      1292330 :   return used_temp_slots->length () - 1;
     662              : }
     663              : 
     664              : /* Moves temporary slot TEMP to LEVEL.  */
     665              : 
     666              : static void
     667         1232 : move_slot_to_level (class temp_slot *temp, int level)
     668              : {
     669         1232 :   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
     670         1232 :   insert_slot_to_list (temp, temp_slots_at_level (level));
     671         1232 :   temp->level = level;
     672         1232 : }
     673              : 
     674              : /* Make temporary slot TEMP available.  */
     675              : 
     676              : static void
     677       146918 : make_slot_available (class temp_slot *temp)
     678              : {
     679       146918 :   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
     680       146918 :   insert_slot_to_list (temp, &avail_temp_slots);
     681       146918 :   temp->in_use = false;
     682       146918 :   temp->level = -1;
     683       146918 :   n_temp_slots_in_use--;
     684       146918 : }
     685              : 
     686              : /* Compute the hash value for an address -> temp slot mapping.
     687              :    The value is cached on the mapping entry.  */
     688              : static hashval_t
     689      8903153 : temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
     690              : {
     691      8903153 :   int do_not_record = 0;
     692      8903153 :   return hash_rtx (t->address, GET_MODE (t->address),
     693      8903153 :                    &do_not_record, NULL, false);
     694              : }
     695              : 
     696              : /* Return the hash value for an address -> temp slot mapping.  */
     697              : hashval_t
     698        30598 : temp_address_hasher::hash (temp_slot_address_entry *t)
     699              : {
     700        30598 :   return t->hash;
     701              : }
     702              : 
     703              : /* Compare two address -> temp slot mapping entries.  */
     704              : bool
     705        31366 : temp_address_hasher::equal (temp_slot_address_entry *t1,
     706              :                             temp_slot_address_entry *t2)
     707              : {
     708        31366 :   return exp_equiv_p (t1->address, t2->address, 0, true);
     709              : }
     710              : 
     711              : /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping.  */
     712              : static void
     713       147499 : insert_temp_slot_address (rtx address, class temp_slot *temp_slot)
     714              : {
     715       147499 :   struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
     716       147499 :   t->address = copy_rtx (address);
     717       147499 :   t->temp_slot = temp_slot;
     718       147499 :   t->hash = temp_slot_address_compute_hash (t);
     719       147499 :   *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
     720       147499 : }
     721              : 
     722              : /* Remove an address -> temp slot mapping entry if the temp slot is
     723              :    not in use anymore.  Callback for remove_unused_temp_slot_addresses.  */
     724              : int
     725         1055 : remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
     726              : {
     727         1055 :   const struct temp_slot_address_entry *t = *slot;
     728         1055 :   if (! t->temp_slot->in_use)
     729          604 :     temp_slot_address_table->clear_slot (slot);
     730         1055 :   return 1;
     731              : }
     732              : 
     733              : /* Remove all mappings of addresses to unused temp slots.  */
     734              : static void
     735       139497 : remove_unused_temp_slot_addresses (void)
     736              : {
     737              :   /* Use quicker clearing if there aren't any active temp slots.  */
     738       139497 :   if (n_temp_slots_in_use)
     739          434 :     temp_slot_address_table->traverse
     740         1489 :       <void *, remove_unused_temp_slot_addresses_1> (NULL);
     741              :   else
     742       139063 :     temp_slot_address_table->empty ();
     743       139497 : }
     744              : 
     745              : /* Find the temp slot corresponding to the object at address X.  */
     746              : 
     747              : static class temp_slot *
     748      8755654 : find_temp_slot_from_address (rtx x)
     749              : {
     750      8755654 :   class temp_slot *p;
     751      8755654 :   struct temp_slot_address_entry tmp, *t;
     752              : 
     753              :   /* First try the easy way:
     754              :      See if X exists in the address -> temp slot mapping.  */
     755      8755654 :   tmp.address = x;
     756      8755654 :   tmp.temp_slot = NULL;
     757      8755654 :   tmp.hash = temp_slot_address_compute_hash (&tmp);
     758      8755654 :   t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
     759      8755654 :   if (t)
     760         1263 :     return t->temp_slot;
     761              : 
     762              :   /* If we have a sum involving a register, see if it points to a temp
     763              :      slot.  */
     764      1538309 :   if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
     765      9942363 :       && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
     766              :     return p;
     767      1538309 :   else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
     768      8971756 :            && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
     769              :     return p;
     770              : 
     771              :   /* Last resort: Address is a virtual stack var address.  */
     772      8754391 :   poly_int64 offset;
     773      8754391 :   if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
     774              :     {
     775      1346959 :       int i;
     776      5885083 :       for (i = max_slot_level (); i >= 0; i--)
     777      3247319 :         for (p = *temp_slots_at_level (i); p; p = p->next)
     778         3050 :           if (known_in_range_p (offset, p->base_offset, p->full_size))
     779              :             return p;
     780              :     }
     781              : 
     782              :   return NULL;
     783              : }
     784              : 
     785              : /* Allocate a temporary stack slot and record it for possible later
     786              :    reuse.
     787              : 
     788              :    MODE is the machine mode to be given to the returned rtx.
     789              : 
     790              :    SIZE is the size in units of the space required.  We do no rounding here
     791              :    since assign_stack_local will do any required rounding.
     792              : 
     793              :    TYPE is the type that will be used for the stack slot.  */
     794              : 
     795              : rtx
     796       147462 : assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
     797              : {
     798       147462 :   unsigned int align;
     799       147462 :   class temp_slot *p, *best_p = 0, *selected = NULL, **pp;
     800       147462 :   rtx slot;
     801              : 
     802       147462 :   gcc_assert (known_size_p (size));
     803              : 
     804       147462 :   align = get_stack_local_alignment (type, mode);
     805              : 
     806              :   /* Try to find an available, already-allocated temporary of the proper
     807              :      mode which meets the size and alignment requirements.  Choose the
     808              :      smallest one with the closest alignment.
     809              : 
     810              :      If assign_stack_temp is called outside of the tree->rtl expansion,
     811              :      we cannot reuse the stack slots (that may still refer to
     812              :      VIRTUAL_STACK_VARS_REGNUM).  */
     813       147462 :   if (!virtuals_instantiated)
     814              :     {
     815     11106100 :       for (p = avail_temp_slots; p; p = p->next)
     816              :         {
     817     11009107 :           if (p->align >= align
     818     10449344 :               && known_ge (p->size, size)
     819     10432409 :               && GET_MODE (p->slot) == mode
     820     10295326 :               && objects_must_conflict_p (p->type, type)
     821     11072929 :               && (best_p == 0
     822          228 :                   || (known_eq (best_p->size, p->size)
     823          126 :                       ? best_p->align > p->align
     824          102 :                       : known_ge (best_p->size, p->size))))
     825              :             {
     826        63676 :               if (p->align == align && known_eq (p->size, size))
     827              :                 {
     828        50469 :                   selected = p;
     829        50469 :                   cut_slot_from_list (selected, &avail_temp_slots);
     830        50469 :                   best_p = 0;
     831        50469 :                   break;
     832              :                 }
     833              :               best_p = p;
     834              :             }
     835              :         }
     836              :     }
     837              : 
     838              :   /* Make our best, if any, the one to use.  */
     839       147462 :   if (best_p)
     840              :     {
     841        13125 :       selected = best_p;
     842        13125 :       cut_slot_from_list (selected, &avail_temp_slots);
     843              : 
     844              :       /* If there are enough aligned bytes left over, make them into a new
     845              :          temp_slot so that the extra bytes don't get wasted.  Do this only
     846              :          for BLKmode slots, so that we can be sure of the alignment.  */
     847        13125 :       if (GET_MODE (best_p->slot) == BLKmode)
     848              :         {
     849        10804 :           int alignment = best_p->align / BITS_PER_UNIT;
     850        10804 :           poly_int64 rounded_size = aligned_upper_bound (size, alignment);
     851              : 
     852        10804 :           if (known_ge (best_p->size - rounded_size, alignment))
     853              :             {
     854          621 :               p = ggc_alloc<temp_slot> ();
     855          621 :               p->in_use = false;
     856          621 :               p->size = best_p->size - rounded_size;
     857          621 :               p->base_offset = best_p->base_offset + rounded_size;
     858          621 :               p->full_size = best_p->full_size - rounded_size;
     859          621 :               p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
     860          621 :               p->align = best_p->align;
     861          621 :               p->type = best_p->type;
     862          621 :               insert_slot_to_list (p, &avail_temp_slots);
     863              : 
     864          621 :               vec_safe_push (stack_slot_list, p->slot);
     865              : 
     866          621 :               best_p->size = rounded_size;
     867          621 :               best_p->full_size = rounded_size;
     868              :             }
     869              :         }
     870              :     }
     871              : 
     872              :   /* If we still didn't find one, make a new temporary.  */
     873       145141 :   if (selected == 0)
     874              :     {
     875        83868 :       poly_int64 frame_offset_old = frame_offset;
     876              : 
     877        83868 :       p = ggc_alloc<temp_slot> ();
     878              : 
     879              :       /* We are passing an explicit alignment request to assign_stack_local.
     880              :          One side effect of that is assign_stack_local will not round SIZE
     881              :          to ensure the frame offset remains suitably aligned.
     882              : 
     883              :          So for requests which depended on the rounding of SIZE, we go ahead
     884              :          and round it now.  We also make sure ALIGNMENT is at least
     885              :          BIGGEST_ALIGNMENT.  */
     886        93693 :       gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
     887        83868 :       p->slot = assign_stack_local_1 (mode,
     888              :                                       (mode == BLKmode
     889         9918 :                                        ? aligned_upper_bound (size,
     890              :                                                               (int) align
     891              :                                                               / BITS_PER_UNIT)
     892              :                                        : size),
     893              :                                       align, 0);
     894              : 
     895        83868 :       p->align = align;
     896              : 
     897              :       /* The following slot size computation is necessary because we don't
     898              :          know the actual size of the temporary slot until assign_stack_local
     899              :          has performed all the frame alignment and size rounding for the
     900              :          requested temporary.  Note that extra space added for alignment
     901              :          can be either above or below this stack slot depending on which
     902              :          way the frame grows.  We include the extra space if and only if it
     903              :          is above this slot.  */
     904        83868 :       if (FRAME_GROWS_DOWNWARD)
     905        83868 :         p->size = frame_offset_old - frame_offset;
     906              :       else
     907              :         p->size = size;
     908              : 
     909              :       /* Now define the fields used by combine_temp_slots.  */
     910        83868 :       if (FRAME_GROWS_DOWNWARD)
     911              :         {
     912        83868 :           p->base_offset = frame_offset;
     913        83868 :           p->full_size = frame_offset_old - frame_offset;
     914              :         }
     915              :       else
     916              :         {
     917              :           p->base_offset = frame_offset_old;
     918              :           p->full_size = frame_offset - frame_offset_old;
     919              :         }
     920              : 
     921        83868 :       selected = p;
     922              :     }
     923              : 
     924       147462 :   p = selected;
     925       147462 :   p->in_use = true;
     926       147462 :   p->type = type;
     927       147462 :   p->level = temp_slot_level;
     928       147462 :   n_temp_slots_in_use++;
     929              : 
     930       147462 :   pp = temp_slots_at_level (p->level);
     931       147462 :   insert_slot_to_list (p, pp);
     932       147462 :   insert_temp_slot_address (XEXP (p->slot, 0), p);
     933              : 
     934              :   /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
     935       147462 :   slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
     936       147462 :   vec_safe_push (stack_slot_list, slot);
     937              : 
     938              :   /* If we know the alias set for the memory that will be used, use
     939              :      it.  If there's no TYPE, then we don't know anything about the
     940              :      alias set for the memory.  */
     941       147462 :   set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
     942       147462 :   set_mem_align (slot, align);
     943              : 
     944              :   /* If a type is specified, set the relevant flags.  */
     945       147462 :   if (type != 0)
     946        97825 :     MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
     947       147462 :   MEM_NOTRAP_P (slot) = 1;
     948              : 
     949       147462 :   return slot;
     950              : }
     951              : 
     952              : /* Allocate a temporary stack slot and record it for possible later
     953              :    reuse.  First two arguments are same as in preceding function.  */
     954              : 
     955              : rtx
     956        49637 : assign_stack_temp (machine_mode mode, poly_int64 size)
     957              : {
     958        49637 :   return assign_stack_temp_for_type (mode, size, NULL_TREE);
     959              : }
     960              : 
     961              : /* Assign a temporary.
     962              :    If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
     963              :    and so that should be used in error messages.  In either case, we
     964              :    allocate of the given type.
     965              :    MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
     966              :    it is 0 if a register is OK.
     967              :    DONT_PROMOTE is 1 if we should not promote values in register
     968              :    to wider modes.  */
     969              : 
     970              : rtx
     971       997668 : assign_temp (tree type_or_decl, int memory_required,
     972              :              int dont_promote ATTRIBUTE_UNUSED)
     973              : {
     974       997668 :   tree type, decl;
     975       997668 :   machine_mode mode;
     976              : #ifdef PROMOTE_MODE
     977       997668 :   int unsignedp;
     978              : #endif
     979              : 
     980       997668 :   if (DECL_P (type_or_decl))
     981            0 :     decl = type_or_decl, type = TREE_TYPE (decl);
     982              :   else
     983              :     decl = NULL, type = type_or_decl;
     984              : 
     985       997668 :   mode = TYPE_MODE (type);
     986              : #ifdef PROMOTE_MODE
     987       997668 :   unsignedp = TYPE_UNSIGNED (type);
     988              : #endif
     989              : 
     990              :   /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
     991              :      end.  See also create_tmp_var for the gimplification-time check.  */
     992       997668 :   gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
     993              : 
     994       997668 :   if (mode == BLKmode || memory_required)
     995              :     {
     996        92244 :       poly_int64 size;
     997        92244 :       rtx tmp;
     998              : 
     999              :       /* Unfortunately, we don't yet know how to allocate variable-sized
    1000              :          temporaries.  However, sometimes we can find a fixed upper limit on
    1001              :          the size, so try that instead.  */
    1002        92244 :       if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), &size))
    1003            0 :         size = max_int_size_in_bytes (type);
    1004              : 
    1005              :       /* Zero sized arrays are a GNU C extension.  Set size to 1 to avoid
    1006              :          problems with allocating the stack space.  */
    1007        92244 :       if (known_eq (size, 0))
    1008            0 :         size = 1;
    1009              : 
    1010              :       /* The size of the temporary may be too large to fit into an integer.  */
    1011              :       /* ??? Not sure this should happen except for user silliness, so limit
    1012              :          this to things that aren't compiler-generated temporaries.  The
    1013              :          rest of the time we'll die in assign_stack_temp_for_type.  */
    1014        92244 :       if (decl
    1015            0 :           && !known_size_p (size)
    1016        92244 :           && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
    1017              :         {
    1018            0 :           error ("size of variable %q+D is too large", decl);
    1019            0 :           size = 1;
    1020              :         }
    1021              : 
    1022        92244 :       tmp = assign_stack_temp_for_type (mode, size, type);
    1023        92244 :       return tmp;
    1024              :     }
    1025              : 
    1026              : #ifdef PROMOTE_MODE
    1027       905424 :   if (! dont_promote)
    1028            0 :     mode = promote_mode (type, mode, &unsignedp);
    1029              : #endif
    1030              : 
    1031       905424 :   return gen_reg_rtx (mode);
    1032              : }
    1033              : 
    1034              : /* Combine temporary stack slots which are adjacent on the stack.
    1035              : 
    1036              :    This allows for better use of already allocated stack space.  This is only
    1037              :    done for BLKmode slots because we can be sure that we won't have alignment
    1038              :    problems in this case.  */
    1039              : 
    1040              : static void
    1041       139497 : combine_temp_slots (void)
    1042              : {
    1043       139497 :   class temp_slot *p, *q, *next, *next_q;
    1044       139497 :   int num_slots;
    1045              : 
    1046              :   /* We can't combine slots, because the information about which slot
    1047              :      is in which alias set will be lost.  */
    1048       139497 :   if (flag_strict_aliasing)
    1049              :     return;
    1050              : 
    1051              :   /* If there are a lot of temp slots, don't do anything unless
    1052              :      high levels of optimization.  */
    1053        97206 :   if (! flag_expensive_optimizations)
    1054       428102 :     for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
    1055       418974 :       if (num_slots > 100 || (num_slots > 10 && optimize == 0))
    1056              :         return;
    1057              : 
    1058       271961 :   for (p = avail_temp_slots; p; p = next)
    1059              :     {
    1060       187283 :       int delete_p = 0;
    1061              : 
    1062       187283 :       next = p->next;
    1063              : 
    1064       187283 :       if (GET_MODE (p->slot) != BLKmode)
    1065       163652 :         continue;
    1066              : 
    1067        29631 :       for (q = p->next; q; q = next_q)
    1068              :         {
    1069         6010 :           int delete_q = 0;
    1070              : 
    1071         6010 :           next_q = q->next;
    1072              : 
    1073         6010 :           if (GET_MODE (q->slot) != BLKmode)
    1074         5250 :             continue;
    1075              : 
    1076          760 :           if (known_eq (p->base_offset + p->full_size, q->base_offset))
    1077              :             {
    1078              :               /* Q comes after P; combine Q into P.  */
    1079          687 :               p->size += q->size;
    1080        29631 :               p->full_size += q->full_size;
    1081              :               delete_q = 1;
    1082              :             }
    1083           73 :           else if (known_eq (q->base_offset + q->full_size, p->base_offset))
    1084              :             {
    1085              :               /* P comes after Q; combine P into Q.  */
    1086           10 :               q->size += p->size;
    1087           10 :               q->full_size += p->full_size;
    1088              :               delete_p = 1;
    1089              :               break;
    1090              :             }
    1091          687 :           if (delete_q)
    1092         1374 :             cut_slot_from_list (q, &avail_temp_slots);
    1093              :         }
    1094              : 
    1095              :       /* Either delete P or advance past it.  */
    1096        23631 :       if (delete_p)
    1097           20 :         cut_slot_from_list (p, &avail_temp_slots);
    1098              :     }
    1099              : }
    1100              : 
    1101              : /* Indicate that NEW_RTX is an alternate way of referring to the temp
    1102              :    slot that previously was known by OLD_RTX.  */
    1103              : 
    1104              : void
    1105     16379974 : update_temp_slot_address (rtx old_rtx, rtx new_rtx)
    1106              : {
    1107     17217894 :   class temp_slot *p;
    1108              : 
    1109     17217894 :   if (rtx_equal_p (old_rtx, new_rtx))
    1110              :     return;
    1111              : 
    1112      4919970 :   p = find_temp_slot_from_address (old_rtx);
    1113              : 
    1114              :   /* If we didn't find one, see if both OLD_RTX is a PLUS.  If so, and
    1115              :      NEW_RTX is a register, see if one operand of the PLUS is a
    1116              :      temporary location.  If so, NEW_RTX points into it.  Otherwise,
    1117              :      if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
    1118              :      in common between them.  If so, try a recursive call on those
    1119              :      values.  */
    1120      4919970 :   if (p == 0)
    1121              :     {
    1122      4919933 :       if (GET_CODE (old_rtx) != PLUS)
    1123              :         return;
    1124              : 
    1125       922956 :       if (REG_P (new_rtx))
    1126              :         {
    1127       289514 :           update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
    1128       289514 :           update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
    1129       289514 :           return;
    1130              :         }
    1131       633442 :       else if (GET_CODE (new_rtx) != PLUS)
    1132              :         return;
    1133              : 
    1134       633442 :       if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
    1135       328770 :         update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
    1136       304672 :       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
    1137            0 :         update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
    1138       304672 :       else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
    1139        32342 :         update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
    1140       272330 :       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
    1141       187294 :         update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
    1142              : 
    1143              :       return;
    1144              :     }
    1145              : 
    1146              :   /* Otherwise add an alias for the temp's address.  */
    1147           37 :   insert_temp_slot_address (new_rtx, p);
    1148              : }
    1149              : 
    1150              : /* If X could be a reference to a temporary slot, mark that slot as
    1151              :    belonging to the to one level higher than the current level.  If X
    1152              :    matched one of our slots, just mark that one.  Otherwise, we can't
    1153              :    easily predict which it is, so upgrade all of them.
    1154              : 
    1155              :    This is called when an ({...}) construct occurs and a statement
    1156              :    returns a value in memory.  */
    1157              : 
    1158              : void
    1159     24007160 : preserve_temp_slots (rtx x)
    1160              : {
    1161     24007160 :   class temp_slot *p = 0, *next;
    1162              : 
    1163     24007160 :   if (x == 0)
    1164              :     return;
    1165              : 
    1166              :   /* If X is a register that is being used as a pointer, see if we have
    1167              :      a temporary slot we know it points to.  */
    1168     10698825 :   if (REG_P (x) && REG_POINTER (x))
    1169      1756887 :     p = find_temp_slot_from_address (x);
    1170              : 
    1171              :   /* If X is not in memory or is at a constant address, it cannot be in
    1172              :      a temporary slot.  */
    1173     10698825 :   if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
    1174              :     return;
    1175              : 
    1176              :   /* First see if we can find a match.  */
    1177       673460 :   if (p == 0)
    1178       673460 :     p = find_temp_slot_from_address (XEXP (x, 0));
    1179              : 
    1180       673460 :   if (p != 0)
    1181              :     {
    1182         1226 :       if (p->level == temp_slot_level)
    1183         1226 :         move_slot_to_level (p, temp_slot_level - 1);
    1184         1226 :       return;
    1185              :     }
    1186              : 
    1187              :   /* Otherwise, preserve all non-kept slots at this level.  */
    1188       672240 :   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
    1189              :     {
    1190            6 :       next = p->next;
    1191            6 :       move_slot_to_level (p, temp_slot_level - 1);
    1192              :     }
    1193              : }
    1194              : 
    1195              : /* Free all temporaries used so far.  This is normally called at the
    1196              :    end of generating code for a statement.  */
    1197              : 
    1198              : void
    1199     61903619 : free_temp_slots (void)
    1200              : {
    1201     61903619 :   class temp_slot *p, *next;
    1202     61903619 :   bool some_available = false;
    1203              : 
    1204     62050537 :   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
    1205              :     {
    1206       146918 :       next = p->next;
    1207       146918 :       make_slot_available (p);
    1208       146918 :       some_available = true;
    1209              :     }
    1210              : 
    1211     61903619 :   if (some_available)
    1212              :     {
    1213       139497 :       remove_unused_temp_slot_addresses ();
    1214       139497 :       combine_temp_slots ();
    1215              :     }
    1216     61903619 : }
    1217              : 
    1218              : /* Push deeper into the nesting level for stack temporaries.  */
    1219              : 
    1220              : void
    1221     30310496 : push_temp_slots (void)
    1222              : {
    1223     30310496 :   temp_slot_level++;
    1224     30310496 : }
    1225              : 
    1226              : /* Pop a temporary nesting level.  All slots in use in the current level
    1227              :    are freed.  */
    1228              : 
    1229              : void
    1230     30310494 : pop_temp_slots (void)
    1231              : {
    1232     30310494 :   free_temp_slots ();
    1233     30310494 :   temp_slot_level--;
    1234     30310494 : }
    1235              : 
    1236              : /* Initialize temporary slots.  */
    1237              : 
    1238              : void
    1239      3184726 : init_temp_slots (void)
    1240              : {
    1241              :   /* We have not allocated any temporaries yet.  */
    1242      3184726 :   avail_temp_slots = 0;
    1243      3184726 :   vec_alloc (used_temp_slots, 0);
    1244      3184726 :   temp_slot_level = 0;
    1245      3184726 :   n_temp_slots_in_use = 0;
    1246              : 
    1247              :   /* Set up the table to map addresses to temp slots.  */
    1248      3184726 :   if (! temp_slot_address_table)
    1249       211466 :     temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
    1250              :   else
    1251      2973260 :     temp_slot_address_table->empty ();
    1252      3184726 : }
    1253              : 
    1254              : /* Functions and data structures to keep track of the values hard regs
    1255              :    had at the start of the function.  */
    1256              : 
    1257              : /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
    1258              :    and has_hard_reg_initial_val..  */
    1259              : struct GTY(()) initial_value_pair {
    1260              :   rtx hard_reg;
    1261              :   rtx pseudo;
    1262              : };
    1263              : /* ???  This could be a VEC but there is currently no way to define an
    1264              :    opaque VEC type.  This could be worked around by defining struct
    1265              :    initial_value_pair in function.h.  */
    1266              : struct GTY(()) initial_value_struct {
    1267              :   int num_entries;
    1268              :   int max_entries;
    1269              :   initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
    1270              : };
    1271              : 
    1272              : /* If a pseudo represents an initial hard reg (or expression), return
    1273              :    it, else return NULL_RTX.  */
    1274              : 
    1275              : rtx
    1276            0 : get_hard_reg_initial_reg (rtx reg)
    1277              : {
    1278            0 :   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
    1279            0 :   int i;
    1280              : 
    1281            0 :   if (ivs == 0)
    1282              :     return NULL_RTX;
    1283              : 
    1284            0 :   for (i = 0; i < ivs->num_entries; i++)
    1285            0 :     if (rtx_equal_p (ivs->entries[i].pseudo, reg))
    1286            0 :       return ivs->entries[i].hard_reg;
    1287              : 
    1288              :   return NULL_RTX;
    1289              : }
    1290              : 
    1291              : /* Make sure that there's a pseudo register of mode MODE that stores the
    1292              :    initial value of hard register REGNO.  Return an rtx for such a pseudo.  */
    1293              : 
    1294              : rtx
    1295            0 : get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
    1296              : {
    1297            0 :   struct initial_value_struct *ivs;
    1298            0 :   rtx rv;
    1299              : 
    1300            0 :   rv = has_hard_reg_initial_val (mode, regno);
    1301            0 :   if (rv)
    1302              :     return rv;
    1303              : 
    1304            0 :   ivs = crtl->hard_reg_initial_vals;
    1305            0 :   if (ivs == 0)
    1306              :     {
    1307            0 :       ivs = ggc_alloc<initial_value_struct> ();
    1308            0 :       ivs->num_entries = 0;
    1309            0 :       ivs->max_entries = 5;
    1310            0 :       ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
    1311            0 :       crtl->hard_reg_initial_vals = ivs;
    1312              :     }
    1313              : 
    1314            0 :   if (ivs->num_entries >= ivs->max_entries)
    1315              :     {
    1316            0 :       ivs->max_entries += 5;
    1317            0 :       ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
    1318              :                                     ivs->max_entries);
    1319              :     }
    1320              : 
    1321            0 :   ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
    1322            0 :   ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
    1323              : 
    1324            0 :   return ivs->entries[ivs->num_entries++].pseudo;
    1325              : }
    1326              : 
    1327              : /* See if get_hard_reg_initial_val has been used to create a pseudo
    1328              :    for the initial value of hard register REGNO in mode MODE.  Return
    1329              :    the associated pseudo if so, otherwise return NULL.  */
    1330              : 
    1331              : rtx
    1332            0 : has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
    1333              : {
    1334            0 :   struct initial_value_struct *ivs;
    1335            0 :   int i;
    1336              : 
    1337            0 :   ivs = crtl->hard_reg_initial_vals;
    1338            0 :   if (ivs != 0)
    1339            0 :     for (i = 0; i < ivs->num_entries; i++)
    1340            0 :       if (GET_MODE (ivs->entries[i].hard_reg) == mode
    1341            0 :           && REGNO (ivs->entries[i].hard_reg) == regno)
    1342            0 :         return ivs->entries[i].pseudo;
    1343              : 
    1344              :   return NULL_RTX;
    1345              : }
    1346              : 
    1347              : void
    1348      1481725 : emit_initial_value_sets (void)
    1349              : {
    1350      1481725 :   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
    1351      1481725 :   int i;
    1352      1481725 :   rtx_insn *seq;
    1353              : 
    1354      1481725 :   if (ivs == 0)
    1355              :     return;
    1356              : 
    1357            0 :   start_sequence ();
    1358            0 :   for (i = 0; i < ivs->num_entries; i++)
    1359            0 :     emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
    1360            0 :   seq = end_sequence ();
    1361              : 
    1362            0 :   emit_insn_at_entry (seq);
    1363              : }
    1364              : 
    1365              : /* Return the hardreg-pseudoreg initial values pair entry I and
    1366              :    TRUE if I is a valid entry, or FALSE if I is not a valid entry.  */
    1367              : bool
    1368            0 : initial_value_entry (int i, rtx *hreg, rtx *preg)
    1369              : {
    1370            0 :   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
    1371            0 :   if (!ivs || i >= ivs->num_entries)
    1372              :     return false;
    1373              : 
    1374            0 :   *hreg = ivs->entries[i].hard_reg;
    1375            0 :   *preg = ivs->entries[i].pseudo;
    1376            0 :   return true;
    1377              : }
    1378              : 
    1379              : /* These routines are responsible for converting virtual register references
    1380              :    to the actual hard register references once RTL generation is complete.
    1381              : 
    1382              :    The following four variables are used for communication between the
    1383              :    routines.  They contain the offsets of the virtual registers from their
    1384              :    respective hard registers.  */
    1385              : 
    1386              : static poly_int64 in_arg_offset;
    1387              : static poly_int64 var_offset;
    1388              : static poly_int64 dynamic_offset;
    1389              : static poly_int64 out_arg_offset;
    1390              : static poly_int64 cfa_offset;
    1391              : 
    1392              : /* In most machines, the stack pointer register is equivalent to the bottom
    1393              :    of the stack.  */
    1394              : 
    1395              : #ifndef STACK_POINTER_OFFSET
    1396              : #define STACK_POINTER_OFFSET    0
    1397              : #endif
    1398              : 
    1399              : #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
    1400              : #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
    1401              : #endif
    1402              : 
    1403              : /* If not defined, pick an appropriate default for the offset of dynamically
    1404              :    allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
    1405              :    INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
    1406              : 
    1407              : #ifndef STACK_DYNAMIC_OFFSET
    1408              : 
    1409              : /* The bottom of the stack points to the actual arguments.  If
    1410              :    REG_PARM_STACK_SPACE is defined, this includes the space for the register
    1411              :    parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
    1412              :    stack space for register parameters is not pushed by the caller, but
    1413              :    rather part of the fixed stack areas and hence not included in
    1414              :    `crtl->outgoing_args_size'.  Nevertheless, we must allow
    1415              :    for it when allocating stack dynamic objects.  */
    1416              : 
    1417              : #ifdef INCOMING_REG_PARM_STACK_SPACE
    1418              : #define STACK_DYNAMIC_OFFSET(FNDECL)    \
    1419              : ((ACCUMULATE_OUTGOING_ARGS                                                    \
    1420              :   ? (crtl->outgoing_args_size                                      \
    1421              :      + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
    1422              :                                                : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
    1423              :   : 0) + (STACK_POINTER_OFFSET))
    1424              : #else
    1425              : #define STACK_DYNAMIC_OFFSET(FNDECL)    \
    1426              :   ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
    1427              :  + (STACK_POINTER_OFFSET))
    1428              : #endif
    1429              : #endif
    1430              : 
    1431              : 
    1432              : /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
    1433              :    is a virtual register, return the equivalent hard register and set the
    1434              :    offset indirectly through the pointer.  Otherwise, return 0.  */
    1435              : 
    1436              : static rtx
    1437    365216749 : instantiate_new_reg (rtx x, poly_int64 *poffset)
    1438              : {
    1439    365216749 :   rtx new_rtx;
    1440    365216749 :   poly_int64 offset;
    1441              : 
    1442    365216749 :   if (x == virtual_incoming_args_rtx)
    1443              :     {
    1444      3747248 :       if (stack_realign_drap)
    1445              :         {
    1446              :           /* Replace virtual_incoming_args_rtx with internal arg
    1447              :              pointer if DRAP is used to realign stack.  */
    1448        16465 :           new_rtx = crtl->args.internal_arg_pointer;
    1449        16465 :           offset = 0;
    1450              :         }
    1451              :       else
    1452      3730783 :         new_rtx = arg_pointer_rtx, offset = in_arg_offset;
    1453              :     }
    1454    361469501 :   else if (x == virtual_stack_vars_rtx)
    1455     18204395 :     new_rtx = frame_pointer_rtx, offset = var_offset;
    1456    343265106 :   else if (x == virtual_stack_dynamic_rtx)
    1457        53765 :     new_rtx = stack_pointer_rtx, offset = dynamic_offset;
    1458    343211341 :   else if (x == virtual_outgoing_args_rtx)
    1459      1565650 :     new_rtx = stack_pointer_rtx, offset = out_arg_offset;
    1460    341645691 :   else if (x == virtual_cfa_rtx)
    1461              :     {
    1462              : #ifdef FRAME_POINTER_CFA_OFFSET
    1463              :       new_rtx = frame_pointer_rtx;
    1464              : #else
    1465         1802 :       new_rtx = arg_pointer_rtx;
    1466              : #endif
    1467         1802 :       offset = cfa_offset;
    1468              :     }
    1469    341643889 :   else if (x == virtual_preferred_stack_boundary_rtx)
    1470              :     {
    1471       113992 :       new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
    1472       113992 :       offset = 0;
    1473              :     }
    1474              :   else
    1475              :     return NULL_RTX;
    1476              : 
    1477     23686852 :   *poffset = offset;
    1478     23686852 :   return new_rtx;
    1479              : }
    1480              : 
    1481              : /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
    1482              :    registers present inside of *LOC.  The expression is simplified,
    1483              :    as much as possible, but is not to be considered "valid" in any sense
    1484              :    implied by the target.  Return true if any change is made.  */
    1485              : 
    1486              : static bool
    1487    205760173 : instantiate_virtual_regs_in_rtx (rtx *loc)
    1488              : {
    1489    205760173 :   if (!*loc)
    1490              :     return false;
    1491     92756351 :   bool changed = false;
    1492     92756351 :   subrtx_ptr_iterator::array_type array;
    1493    346686448 :   FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
    1494              :     {
    1495    253930097 :       rtx *loc = *iter;
    1496    253930097 :       if (rtx x = *loc)
    1497              :         {
    1498    230338777 :           rtx new_rtx;
    1499    230338777 :           poly_int64 offset;
    1500    230338777 :           switch (GET_CODE (x))
    1501              :             {
    1502     35606893 :             case REG:
    1503     35606893 :               new_rtx = instantiate_new_reg (x, &offset);
    1504     35606893 :               if (new_rtx)
    1505              :                 {
    1506      1656469 :                   *loc = plus_constant (GET_MODE (x), new_rtx, offset);
    1507      1656469 :                   changed = true;
    1508              :                 }
    1509     35606893 :               iter.skip_subrtxes ();
    1510     35606893 :               break;
    1511              : 
    1512     27862897 :             case PLUS:
    1513     27862897 :               new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
    1514     27862897 :               if (new_rtx)
    1515              :                 {
    1516     18946150 :                   XEXP (x, 0) = new_rtx;
    1517     18946150 :                   *loc = plus_constant (GET_MODE (x), x, offset, true);
    1518     18946150 :                   changed = true;
    1519     18946150 :                   iter.skip_subrtxes ();
    1520     18946150 :                   break;
    1521              :                 }
    1522              : 
    1523              :               /* FIXME -- from old code */
    1524              :               /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
    1525              :                  we can commute the PLUS and SUBREG because pointers into the
    1526              :                  frame are well-behaved.  */
    1527              :               break;
    1528              : 
    1529              :             default:
    1530              :               break;
    1531              :             }
    1532              :         }
    1533              :     }
    1534     92756351 :   return changed;
    1535     92756351 : }
    1536              : 
    1537              : /* A subroutine of instantiate_virtual_regs_in_insn.  Return true if X
    1538              :    matches the predicate for insn CODE operand OPERAND.  */
    1539              : 
    1540              : static bool
    1541     29792968 : safe_insn_predicate (int code, int operand, rtx x)
    1542              : {
    1543     29792968 :   return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
    1544              : }
    1545              : 
    1546              : /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
    1547              :    registers present inside of insn.  The result will be a valid insn.  */
    1548              : 
    1549              : static void
    1550     93236213 : instantiate_virtual_regs_in_insn (rtx_insn *insn)
    1551              : {
    1552     93236213 :   poly_int64 offset;
    1553     93236213 :   int insn_code, i;
    1554     93236213 :   bool any_change = false;
    1555     93236213 :   rtx set, new_rtx, x;
    1556     93236213 :   rtx_insn *seq;
    1557              : 
    1558              :   /* There are some special cases to be handled first.  */
    1559     93236213 :   set = single_set (insn);
    1560     93236213 :   if (set)
    1561              :     {
    1562              :       /* We're allowed to assign to a virtual register.  This is interpreted
    1563              :          to mean that the underlying register gets assigned the inverse
    1564              :          transformation.  This is used, for example, in the handling of
    1565              :          non-local gotos.  */
    1566     89039404 :       new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
    1567     89039404 :       if (new_rtx)
    1568              :         {
    1569            0 :           start_sequence ();
    1570              : 
    1571            0 :           instantiate_virtual_regs_in_rtx (&SET_SRC (set));
    1572            0 :           x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
    1573            0 :                                    gen_int_mode (-offset, GET_MODE (new_rtx)));
    1574            0 :           x = force_operand (x, new_rtx);
    1575            0 :           if (x != new_rtx)
    1576            0 :             emit_move_insn (new_rtx, x);
    1577              : 
    1578            0 :           seq = end_sequence ();
    1579              : 
    1580            0 :           emit_insn_before (seq, insn);
    1581            0 :           delete_insn (insn);
    1582        18702 :           return;
    1583              :         }
    1584              : 
    1585              :       /* Handle a straight copy from a virtual register by generating a
    1586              :          new add insn.  The difference between this and falling through
    1587              :          to the generic case is avoiding a new pseudo and eliminating a
    1588              :          move insn in the initial rtl stream.  */
    1589     89039404 :       new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
    1590     89039404 :       if (new_rtx
    1591       322036 :           && maybe_ne (offset, 0)
    1592         3841 :           && REG_P (SET_DEST (set))
    1593     89043245 :           && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
    1594              :         {
    1595         3841 :           start_sequence ();
    1596              : 
    1597         3841 :           x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
    1598              :                                    gen_int_mode (offset,
    1599         3841 :                                                  GET_MODE (SET_DEST (set))),
    1600              :                                    SET_DEST (set), 1, OPTAB_LIB_WIDEN);
    1601         3841 :           if (x != SET_DEST (set))
    1602            0 :             emit_move_insn (SET_DEST (set), x);
    1603              : 
    1604         3841 :           seq = end_sequence ();
    1605              : 
    1606         3841 :           emit_insn_before (seq, insn);
    1607         3841 :           delete_insn (insn);
    1608         3841 :           return;
    1609              :         }
    1610              : 
    1611     89035563 :       extract_insn (insn);
    1612     89035563 :       insn_code = INSN_CODE (insn);
    1613              : 
    1614              :       /* Handle a plus involving a virtual register by determining if the
    1615              :          operands remain valid if they're modified in place.  */
    1616     89035563 :       poly_int64 delta;
    1617     89035563 :       if (GET_CODE (SET_SRC (set)) == PLUS
    1618     10099151 :           && recog_data.n_operands >= 3
    1619     10034302 :           && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
    1620     10033747 :           && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
    1621     10033747 :           && poly_int_rtx_p (recog_data.operand[2], &delta)
    1622     96944944 :           && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
    1623              :         {
    1624      2345865 :           offset += delta;
    1625              : 
    1626              :           /* If the sum is zero, then replace with a plain move.  */
    1627      2345865 :           if (known_eq (offset, 0)
    1628        14861 :               && REG_P (SET_DEST (set))
    1629      2360726 :               && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
    1630              :             {
    1631        14861 :               start_sequence ();
    1632        14861 :               emit_move_insn (SET_DEST (set), new_rtx);
    1633        14861 :               seq = end_sequence ();
    1634              : 
    1635        14861 :               emit_insn_before (seq, insn);
    1636        14861 :               delete_insn (insn);
    1637        14861 :               return;
    1638              :             }
    1639              : 
    1640      2331004 :           x = gen_int_mode (offset, recog_data.operand_mode[2]);
    1641              : 
    1642              :           /* Using validate_change and apply_change_group here leaves
    1643              :              recog_data in an invalid state.  Since we know exactly what
    1644              :              we want to check, do those two by hand.  */
    1645      2331004 :           if (safe_insn_predicate (insn_code, 1, new_rtx)
    1646      2331004 :               && safe_insn_predicate (insn_code, 2, x))
    1647              :             {
    1648      2302506 :               *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
    1649      2302506 :               *recog_data.operand_loc[2] = recog_data.operand[2] = x;
    1650      2302506 :               any_change = true;
    1651              : 
    1652              :               /* Fall through into the regular operand fixup loop in
    1653              :                  order to take care of operands other than 1 and 2.  */
    1654              :             }
    1655              :         }
    1656              :     }
    1657              :   else
    1658              :     {
    1659      4196809 :       extract_insn (insn);
    1660      4196809 :       insn_code = INSN_CODE (insn);
    1661              :     }
    1662              : 
    1663              :   /* In the general case, we expect virtual registers to appear only in
    1664              :      operands, and then only as either bare registers or inside memories.  */
    1665    295281605 :   for (i = 0; i < recog_data.n_operands; ++i)
    1666              :     {
    1667    202064094 :       x = recog_data.operand[i];
    1668    202064094 :       switch (GET_CODE (x))
    1669              :         {
    1670     29098739 :         case MEM:
    1671     29098739 :           {
    1672     29098739 :             rtx addr = XEXP (x, 0);
    1673              : 
    1674     29098739 :             if (!instantiate_virtual_regs_in_rtx (&addr))
    1675     16727176 :               continue;
    1676              : 
    1677     12371563 :             start_sequence ();
    1678     12371563 :             x = replace_equiv_address (x, addr, true);
    1679              :             /* It may happen that the address with the virtual reg
    1680              :                was valid (e.g. based on the virtual stack reg, which might
    1681              :                be acceptable to the predicates with all offsets), whereas
    1682              :                the address now isn't anymore, for instance when the address
    1683              :                is still offsetted, but the base reg isn't virtual-stack-reg
    1684              :                anymore.  Below we would do a force_reg on the whole operand,
    1685              :                but this insn might actually only accept memory.  Hence,
    1686              :                before doing that last resort, try to reload the address into
    1687              :                a register, so this operand stays a MEM.  */
    1688     12371563 :             if (!safe_insn_predicate (insn_code, i, x))
    1689              :               {
    1690            0 :                 addr = force_reg (GET_MODE (addr), addr);
    1691            0 :                 x = replace_equiv_address (x, addr, true);
    1692              :               }
    1693     12371563 :             seq = end_sequence ();
    1694     12371563 :             if (seq)
    1695            0 :               emit_insn_before (seq, insn);
    1696              :           }
    1697     12371563 :           break;
    1698              : 
    1699    113002689 :         case REG:
    1700    113002689 :           new_rtx = instantiate_new_reg (x, &offset);
    1701    113002689 :           if (new_rtx == NULL)
    1702    112586363 :             continue;
    1703       416326 :           if (known_eq (offset, 0))
    1704              :             x = new_rtx;
    1705              :           else
    1706              :             {
    1707            0 :               start_sequence ();
    1708              : 
    1709              :               /* Careful, special mode predicates may have stuff in
    1710              :                  insn_data[insn_code].operand[i].mode that isn't useful
    1711              :                  to us for computing a new value.  */
    1712              :               /* ??? Recognize address_operand and/or "p" constraints
    1713              :                  to see if (plus new offset) is a valid before we put
    1714              :                  this through expand_simple_binop.  */
    1715            0 :               x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
    1716            0 :                                        gen_int_mode (offset, GET_MODE (x)),
    1717              :                                        NULL_RTX, 1, OPTAB_LIB_WIDEN);
    1718            0 :               seq = end_sequence ();
    1719            0 :               emit_insn_before (seq, insn);
    1720              :             }
    1721              :           break;
    1722              : 
    1723      2756081 :         case SUBREG:
    1724      2756081 :           new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
    1725      2756081 :           if (new_rtx == NULL)
    1726      2756075 :             continue;
    1727            6 :           start_sequence ();
    1728            6 :           if (maybe_ne (offset, 0))
    1729            0 :             new_rtx = expand_simple_binop
    1730            0 :               (GET_MODE (new_rtx), PLUS, new_rtx,
    1731            0 :                gen_int_mode (offset, GET_MODE (new_rtx)),
    1732              :                NULL_RTX, 1, OPTAB_LIB_WIDEN);
    1733           12 :           x = force_subreg (recog_data.operand_mode[i], new_rtx,
    1734            6 :                             GET_MODE (new_rtx), SUBREG_BYTE (x));
    1735            6 :           gcc_assert (x);
    1736            6 :           seq = end_sequence ();
    1737            6 :           emit_insn_before (seq, insn);
    1738            6 :           break;
    1739              : 
    1740     57206585 :         default:
    1741     57206585 :           continue;
    1742     57206585 :         }
    1743              : 
    1744              :       /* At this point, X contains the new value for the operand.
    1745              :          Validate the new value vs the insn predicate.  Note that
    1746              :          asm insns will have insn_code -1 here.  */
    1747     12787895 :       if (!safe_insn_predicate (insn_code, i, x))
    1748              :         {
    1749        56996 :           start_sequence ();
    1750        56996 :           if (REG_P (x))
    1751              :             {
    1752            0 :               gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
    1753            0 :               x = copy_to_reg (x);
    1754              :             }
    1755              :           else
    1756        56996 :             x = force_reg (insn_data[insn_code].operand[i].mode, x);
    1757        56996 :           seq = end_sequence ();
    1758        56996 :           if (seq)
    1759        56996 :             emit_insn_before (seq, insn);
    1760              :         }
    1761              : 
    1762     12787895 :       *recog_data.operand_loc[i] = recog_data.operand[i] = x;
    1763     12787895 :       any_change = true;
    1764              :     }
    1765              : 
    1766     93217511 :   if (any_change)
    1767              :     {
    1768              :       /* Propagate operand changes into the duplicates.  */
    1769     15003829 :       for (i = 0; i < recog_data.n_dups; ++i)
    1770        87718 :         *recog_data.dup_loc[i]
    1771        87718 :           = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
    1772              : 
    1773              :       /* Force re-recognition of the instruction for validation.  */
    1774     14916111 :       INSN_CODE (insn) = -1;
    1775              :     }
    1776              : 
    1777     93217511 :   if (asm_noperands (PATTERN (insn)) >= 0)
    1778              :     {
    1779       107991 :       if (!check_asm_operands (PATTERN (insn)))
    1780              :         {
    1781           23 :           error_for_asm (insn, "impossible constraint in %<asm%>");
    1782              :           /* For asm goto, instead of fixing up all the edges
    1783              :              just clear the template and clear input and output operands
    1784              :              and strip away clobbers.  */
    1785           23 :           if (JUMP_P (insn))
    1786              :             {
    1787           14 :               rtx asm_op = extract_asm_operands (PATTERN (insn));
    1788           14 :               PATTERN (insn) = asm_op;
    1789           14 :               PUT_MODE (asm_op, VOIDmode);
    1790           14 :               ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
    1791           14 :               ASM_OPERANDS_OUTPUT_CONSTRAINT (asm_op) = "";
    1792           14 :               ASM_OPERANDS_OUTPUT_IDX (asm_op) = 0;
    1793           14 :               ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
    1794           14 :               ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
    1795              :             }
    1796              :           else
    1797            9 :             delete_insn (insn);
    1798              :         }
    1799              :     }
    1800              :   else
    1801              :     {
    1802     93109520 :       if (recog_memoized (insn) < 0)
    1803            0 :         fatal_insn_not_found (insn);
    1804              :     }
    1805              : }
    1806              : 
    1807              : /* Subroutine of instantiate_decls.  Given RTL representing a decl,
    1808              :    do any instantiation required.  */
    1809              : 
    1810              : void
    1811      9631372 : instantiate_decl_rtl (rtx x)
    1812              : {
    1813      9637289 :   rtx addr;
    1814              : 
    1815      9637289 :   if (x == 0)
    1816              :     return;
    1817              : 
    1818              :   /* If this is a CONCAT, recurse for the pieces.  */
    1819      9637289 :   if (GET_CODE (x) == CONCAT)
    1820              :     {
    1821         5917 :       instantiate_decl_rtl (XEXP (x, 0));
    1822         5917 :       instantiate_decl_rtl (XEXP (x, 1));
    1823         5917 :       return;
    1824              :     }
    1825              : 
    1826              :   /* If this is not a MEM, no need to do anything.  Similarly if the
    1827              :      address is a constant or a register that is not a virtual register.  */
    1828      9631372 :   if (!MEM_P (x))
    1829              :     return;
    1830              : 
    1831      3231949 :   addr = XEXP (x, 0);
    1832      3231949 :   if (CONSTANT_P (addr)
    1833      3231949 :       || (REG_P (addr)
    1834       302640 :           && !VIRTUAL_REGISTER_P (addr)))
    1835              :     return;
    1836              : 
    1837      3011174 :   instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
    1838              : }
    1839              : 
    1840              : /* Helper for instantiate_decls called via walk_tree: Process all decls
    1841              :    in the given DECL_VALUE_EXPR.  */
    1842              : 
    1843              : static tree
    1844      1277964 : instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
    1845              : {
    1846      1277964 :   tree t = *tp;
    1847      1277964 :   if (! EXPR_P (t))
    1848              :     {
    1849       684734 :       *walk_subtrees = 0;
    1850       684734 :       if (DECL_P (t))
    1851              :         {
    1852       578011 :           if (DECL_RTL_SET_P (t))
    1853       169465 :             instantiate_decl_rtl (DECL_RTL (t));
    1854       134538 :           if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
    1855       688979 :               && DECL_INCOMING_RTL (t))
    1856       110968 :             instantiate_decl_rtl (DECL_INCOMING_RTL (t));
    1857       353554 :           if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
    1858       585374 :               && DECL_HAS_VALUE_EXPR_P (t))
    1859              :             {
    1860         7625 :               tree v = DECL_VALUE_EXPR (t);
    1861         7625 :               walk_tree (&v, instantiate_expr, NULL, NULL);
    1862              :             }
    1863              :         }
    1864              :     }
    1865      1277964 :   return NULL;
    1866              : }
    1867              : 
    1868              : /* Subroutine of instantiate_decls: Process all decls in the given
    1869              :    BLOCK node and all its subblocks.  */
    1870              : 
    1871              : static void
    1872     16067816 : instantiate_decls_1 (tree let)
    1873              : {
    1874     16067816 :   tree t;
    1875              : 
    1876     34688064 :   for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
    1877              :     {
    1878     18620248 :       if (DECL_RTL_SET_P (t))
    1879      2298570 :         instantiate_decl_rtl (DECL_RTL (t));
    1880     18620248 :       if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
    1881              :         {
    1882       300433 :           tree v = DECL_VALUE_EXPR (t);
    1883       300433 :           walk_tree (&v, instantiate_expr, NULL, NULL);
    1884              :         }
    1885              :     }
    1886              : 
    1887              :   /* Process all subblocks.  */
    1888     30654690 :   for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
    1889     14586874 :     instantiate_decls_1 (t);
    1890     16067816 : }
    1891              : 
    1892              : /* Scan all decls in FNDECL (both variables and parameters) and instantiate
    1893              :    all virtual registers in their DECL_RTL's.  */
    1894              : 
    1895              : static void
    1896      1480942 : instantiate_decls (tree fndecl)
    1897              : {
    1898      1480942 :   tree decl;
    1899      1480942 :   unsigned ix;
    1900              : 
    1901              :   /* Process all parameters of the function.  */
    1902      4597265 :   for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
    1903              :     {
    1904      3116323 :       instantiate_decl_rtl (DECL_RTL (decl));
    1905      3116323 :       instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
    1906      3116323 :       if (DECL_HAS_VALUE_EXPR_P (decl))
    1907              :         {
    1908          128 :           tree v = DECL_VALUE_EXPR (decl);
    1909          128 :           walk_tree (&v, instantiate_expr, NULL, NULL);
    1910              :         }
    1911              :     }
    1912              : 
    1913      1480942 :   if ((decl = DECL_RESULT (fndecl))
    1914      1480942 :       && TREE_CODE (decl) == RESULT_DECL)
    1915              :     {
    1916      1480942 :       if (DECL_RTL_SET_P (decl))
    1917       785016 :         instantiate_decl_rtl (DECL_RTL (decl));
    1918      1480942 :       if (DECL_HAS_VALUE_EXPR_P (decl))
    1919              :         {
    1920        69180 :           tree v = DECL_VALUE_EXPR (decl);
    1921        69180 :           walk_tree (&v, instantiate_expr, NULL, NULL);
    1922              :         }
    1923              :     }
    1924              : 
    1925              :   /* Process the saved static chain if it exists.  */
    1926      1480942 :   decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
    1927      1480942 :   if (decl && DECL_HAS_VALUE_EXPR_P (decl))
    1928         3912 :     instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
    1929              : 
    1930              :   /* Now process all variables defined in the function or its subblocks.  */
    1931      1480942 :   if (DECL_INITIAL (fndecl))
    1932      1480942 :     instantiate_decls_1 (DECL_INITIAL (fndecl));
    1933              : 
    1934      2785638 :   FOR_EACH_LOCAL_DECL (cfun, ix, decl)
    1935        65360 :     if (DECL_RTL_SET_P (decl))
    1936        24878 :       instantiate_decl_rtl (DECL_RTL (decl));
    1937      1480942 :   vec_free (cfun->local_decls);
    1938      1480942 : }
    1939              : 
    1940              : /* Return the value of STACK_DYNAMIC_OFFSET for the current function.
    1941              :    This is done through a function wrapper so that the macro sees a
    1942              :    predictable set of included files.  */
    1943              : 
    1944              : poly_int64
    1945      1480942 : get_stack_dynamic_offset ()
    1946              : {
    1947      1480942 :   return STACK_DYNAMIC_OFFSET (current_function_decl);
    1948              : }
    1949              : 
    1950              : /* Pass through the INSNS of function FNDECL and convert virtual register
    1951              :    references to hard register references.  */
    1952              : 
    1953              : static void
    1954      1480942 : instantiate_virtual_regs (void)
    1955              : {
    1956      1480942 :   rtx_insn *insn;
    1957              : 
    1958              :   /* Compute the offsets to use for this function.  */
    1959      1480942 :   in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
    1960      1480942 :   var_offset = targetm.starting_frame_offset ();
    1961      1480942 :   dynamic_offset = get_stack_dynamic_offset ();
    1962      1480942 :   out_arg_offset = STACK_POINTER_OFFSET;
    1963              : #ifdef FRAME_POINTER_CFA_OFFSET
    1964              :   cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
    1965              : #else
    1966      1480942 :   cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
    1967              : #endif
    1968              : 
    1969              :   /* Initialize recognition, indicating that volatile is OK.  */
    1970      1480942 :   init_recog ();
    1971              : 
    1972              :   /* Scan through all the insns, instantiating every virtual register still
    1973              :      present.  */
    1974    172963581 :   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
    1975    171482639 :     if (INSN_P (insn))
    1976              :       {
    1977              :         /* These patterns in the instruction stream can never be recognized.
    1978              :            Fortunately, they shouldn't contain virtual registers either.  */
    1979    154231619 :         if (GET_CODE (PATTERN (insn)) == USE
    1980    141595035 :             || GET_CODE (PATTERN (insn)) == CLOBBER
    1981    141423050 :             || GET_CODE (PATTERN (insn)) == ASM_INPUT
    1982    283775715 :             || DEBUG_MARKER_INSN_P (insn))
    1983     11877070 :           continue;
    1984    130477479 :         else if (DEBUG_BIND_INSN_P (insn))
    1985     37241266 :           instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
    1986              :         else
    1987     93236213 :           instantiate_virtual_regs_in_insn (insn);
    1988              : 
    1989    130477479 :         if (insn->deleted ())
    1990        18711 :           continue;
    1991              : 
    1992    130458768 :         instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
    1993              : 
    1994              :         /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
    1995    130458768 :         if (CALL_P (insn))
    1996      5950226 :           instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
    1997              :       }
    1998              : 
    1999              :   /* Instantiate the virtual registers in the DECLs for debugging purposes.  */
    2000      1480942 :   instantiate_decls (current_function_decl);
    2001              : 
    2002      1480942 :   targetm.instantiate_decls ();
    2003              : 
    2004              :   /* Indicate that, from now on, assign_stack_local should use
    2005              :      frame_pointer_rtx.  */
    2006      1480942 :   virtuals_instantiated = 1;
    2007      1480942 : }
    2008              : 
    2009              : namespace {
    2010              : 
    2011              : const pass_data pass_data_instantiate_virtual_regs =
    2012              : {
    2013              :   RTL_PASS, /* type */
    2014              :   "vregs", /* name */
    2015              :   OPTGROUP_NONE, /* optinfo_flags */
    2016              :   TV_NONE, /* tv_id */
    2017              :   0, /* properties_required */
    2018              :   0, /* properties_provided */
    2019              :   0, /* properties_destroyed */
    2020              :   0, /* todo_flags_start */
    2021              :   0, /* todo_flags_finish */
    2022              : };
    2023              : 
    2024              : class pass_instantiate_virtual_regs : public rtl_opt_pass
    2025              : {
    2026              : public:
    2027       287872 :   pass_instantiate_virtual_regs (gcc::context *ctxt)
    2028       575744 :     : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
    2029              :   {}
    2030              : 
    2031              :   /* opt_pass methods: */
    2032      1480942 :   unsigned int execute (function *) final override
    2033              :     {
    2034      1480942 :       instantiate_virtual_regs ();
    2035      1480942 :       return 0;
    2036              :     }
    2037              : 
    2038              : }; // class pass_instantiate_virtual_regs
    2039              : 
    2040              : } // anon namespace
    2041              : 
    2042              : rtl_opt_pass *
    2043       287872 : make_pass_instantiate_virtual_regs (gcc::context *ctxt)
    2044              : {
    2045       287872 :   return new pass_instantiate_virtual_regs (ctxt);
    2046              : }
    2047              : 
    2048              : 
    2049              : /* Return true if EXP is an aggregate type (or a value with aggregate type).
    2050              :    This means a type for which function calls must pass an address to the
    2051              :    function or get an address back from the function.
    2052              :    EXP may be a type node or an expression (whose type is tested).  */
    2053              : 
    2054              : bool
    2055    156761256 : aggregate_value_p (const_tree exp, const_tree fntype)
    2056              : {
    2057    156774501 :   const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
    2058    156774501 :   int i, regno, nregs;
    2059    156774501 :   rtx reg;
    2060              : 
    2061    156774501 :   if (fntype)
    2062    156456699 :     switch (TREE_CODE (fntype))
    2063              :       {
    2064     11740180 :       case CALL_EXPR:
    2065     11740180 :         {
    2066     11740180 :           tree fndecl = get_callee_fndecl (fntype);
    2067     11740180 :           if (fndecl)
    2068     10824948 :             fntype = TREE_TYPE (fndecl);
    2069       915232 :           else if (CALL_EXPR_FN (fntype))
    2070       322431 :             fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
    2071              :           else
    2072              :             /* For internal functions, assume nothing needs to be
    2073              :                returned in memory.  */
    2074              :             return false;
    2075              :         }
    2076              :         break;
    2077    134884916 :       case FUNCTION_DECL:
    2078    134884916 :         fntype = TREE_TYPE (fntype);
    2079    134884916 :         break;
    2080              :       case FUNCTION_TYPE:
    2081              :       case METHOD_TYPE:
    2082              :         break;
    2083              :       case IDENTIFIER_NODE:
    2084       330966 :         fntype = NULL_TREE;
    2085              :         break;
    2086            0 :       default:
    2087              :         /* We don't expect other tree types here.  */
    2088            0 :         gcc_unreachable ();
    2089              :       }
    2090              : 
    2091    156181700 :   if (VOID_TYPE_P (type))
    2092              :     return false;
    2093              : 
    2094    113912200 :   if (error_operand_p (fntype))
    2095              :     return false;
    2096              : 
    2097              :   /* If a record should be passed the same as its first (and only) member
    2098              :      don't pass it as an aggregate.  */
    2099    113912199 :   if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
    2100        13245 :     return aggregate_value_p (first_field (type), fntype);
    2101              : 
    2102              :   /* If the front end has decided that this needs to be passed by
    2103              :      reference, do so.  */
    2104    113898865 :   if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
    2105    208152976 :       && DECL_BY_REFERENCE (exp))
    2106              :     return true;
    2107              : 
    2108              :   /* Function types that are TREE_ADDRESSABLE force return in memory.  */
    2109    113750695 :   if (fntype && TREE_ADDRESSABLE (fntype))
    2110              :     return true;
    2111              : 
    2112              :   /* Types that are TREE_ADDRESSABLE must be constructed in memory,
    2113              :      and thus can't be returned in registers.  */
    2114    113750695 :   if (TREE_ADDRESSABLE (type))
    2115              :     return true;
    2116              : 
    2117    112280003 :   if (TYPE_EMPTY_P (type))
    2118              :     return false;
    2119              : 
    2120    111406144 :   if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
    2121              :     return true;
    2122              : 
    2123    110659994 :   if (targetm.calls.return_in_memory (type, fntype))
    2124              :     return true;
    2125              : 
    2126              :   /* Make sure we have suitable call-clobbered regs to return
    2127              :      the value in; if not, we must return it in memory.  */
    2128    103482931 :   reg = hard_function_value (type, 0, fntype, 0);
    2129              : 
    2130              :   /* If we have something other than a REG (e.g. a PARALLEL), then assume
    2131              :      it is OK.  */
    2132    103482931 :   if (!REG_P (reg))
    2133              :     return false;
    2134              : 
    2135              :   /* Use the default ABI if the type of the function isn't known.
    2136              :      The scheme for handling interoperability between different ABIs
    2137              :      requires us to be able to tell when we're calling a function with
    2138              :      a nondefault ABI.  */
    2139    102638206 :   const predefined_function_abi &abi = (fntype
    2140    102638206 :                                         ? fntype_abi (fntype)
    2141       285084 :                                         : default_function_abi);
    2142    102638206 :   regno = REGNO (reg);
    2143    102638206 :   nregs = hard_regno_nregs (regno, TYPE_MODE (type));
    2144    208166538 :   for (i = 0; i < nregs; i++)
    2145    105528332 :     if (!fixed_regs[regno + i] && !abi.clobbers_full_reg_p (regno + i))
    2146              :       return true;
    2147              : 
    2148              :   return false;
    2149              : }
    2150              : 
    2151              : /* Return true if we should assign DECL a pseudo register; false if it
    2152              :    should live on the local stack.  */
    2153              : 
    2154              : bool
    2155    163352067 : use_register_for_decl (const_tree decl)
    2156              : {
    2157    163352067 :   if (TREE_CODE (decl) == SSA_NAME)
    2158              :     {
    2159              :       /* We often try to use the SSA_NAME, instead of its underlying
    2160              :          decl, to get type information and guide decisions, to avoid
    2161              :          differences of behavior between anonymous and named
    2162              :          variables, but in this one case we have to go for the actual
    2163              :          variable if there is one.  The main reason is that, at least
    2164              :          at -O0, we want to place user variables on the stack, but we
    2165              :          don't mind using pseudos for anonymous or ignored temps.
    2166              :          Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
    2167              :          should go in pseudos, whereas their corresponding variables
    2168              :          might have to go on the stack.  So, disregarding the decl
    2169              :          here would negatively impact debug info at -O0, enable
    2170              :          coalescing between SSA_NAMEs that ought to get different
    2171              :          stack/pseudo assignments, and get the incoming argument
    2172              :          processing thoroughly confused by PARM_DECLs expected to live
    2173              :          in stack slots but assigned to pseudos.  */
    2174    145169123 :       if (!SSA_NAME_VAR (decl))
    2175     99949567 :         return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
    2176     99949567 :           && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
    2177              : 
    2178              :       decl = SSA_NAME_VAR (decl);
    2179              :     }
    2180              : 
    2181              :   /* Honor volatile.  */
    2182     63402500 :   if (TREE_SIDE_EFFECTS (decl))
    2183              :     return false;
    2184              : 
    2185              :   /* Honor addressability.  */
    2186     63284376 :   if (TREE_ADDRESSABLE (decl))
    2187              :     return false;
    2188              : 
    2189              :   /* RESULT_DECLs are a bit special in that they're assigned without
    2190              :      regard to use_register_for_decl, but we generally only store in
    2191              :      them.  If we coalesce their SSA NAMEs, we'd better return a
    2192              :      result that matches the assignment in expand_function_start.  */
    2193     59060368 :   if (TREE_CODE (decl) == RESULT_DECL)
    2194              :     {
    2195              :       /* If it's not an aggregate, we're going to use a REG or a
    2196              :          PARALLEL containing a REG.  */
    2197      3084946 :       if (!aggregate_value_p (decl, current_function_decl))
    2198              :         return true;
    2199              : 
    2200              :       /* If expand_function_start determines the return value, we'll
    2201              :          use MEM if it's not by reference.  */
    2202        30913 :       if (cfun->returns_pcc_struct
    2203        61826 :           || (targetm.calls.struct_value_rtx
    2204        30913 :               (TREE_TYPE (current_function_decl), 1)))
    2205            0 :         return DECL_BY_REFERENCE (decl);
    2206              : 
    2207              :       /* Otherwise, we're taking an extra all.function_result_decl
    2208              :          argument.  It's set up in assign_parms_augmented_arg_list,
    2209              :          under the (negated) conditions above, and then it's used to
    2210              :          set up the RESULT_DECL rtl in assign_params, after looping
    2211              :          over all parameters.  Now, if the RESULT_DECL is not by
    2212              :          reference, we'll use a MEM either way.  */
    2213        30913 :       if (!DECL_BY_REFERENCE (decl))
    2214              :         return false;
    2215              : 
    2216              :       /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
    2217              :          the function_result_decl's assignment.  Since it's a pointer,
    2218              :          we can short-circuit a number of the tests below, and we must
    2219              :          duplicate them because we don't have the function_result_decl
    2220              :          to test.  */
    2221        30913 :       if (!targetm.calls.allocate_stack_slots_for_args ())
    2222              :         return true;
    2223              :       /* We don't set DECL_IGNORED_P for the function_result_decl.  */
    2224        30913 :       if (optimize)
    2225              :         return true;
    2226              :       /* Needed for [[musttail]] which can operate even at -O0 */
    2227         3766 :       if (cfun->tail_call_marked)
    2228              :         return true;
    2229              :       /* We don't set DECL_REGISTER for the function_result_decl.  */
    2230              :       return false;
    2231              :     }
    2232              : 
    2233              :   /* Only register-like things go in registers.  */
    2234     55975422 :   if (DECL_MODE (decl) == BLKmode)
    2235              :     return false;
    2236              : 
    2237              :   /* If -ffloat-store specified, don't put explicit float variables
    2238              :      into registers.  */
    2239              :   /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
    2240              :      propagates values across these stores, and it probably shouldn't.  */
    2241     54206668 :   if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
    2242              :     return false;
    2243              : 
    2244     54204868 :   if (!targetm.calls.allocate_stack_slots_for_args ())
    2245              :     return true;
    2246              : 
    2247              :   /* If we're not interested in tracking debugging information for
    2248              :      this decl, then we can certainly put it in a register.  */
    2249     54204552 :   if (DECL_IGNORED_P (decl))
    2250              :     return true;
    2251              : 
    2252     36788347 :   if (optimize)
    2253              :     return true;
    2254              : 
    2255              :   /* Thunks force a tail call even at -O0 so we need to avoid creating a
    2256              :      dangling reference in case the parameter is passed by reference.  */
    2257      7329891 :   if (TREE_CODE (decl) == PARM_DECL && cfun->tail_call_marked)
    2258              :     return true;
    2259              : 
    2260      7329296 :   if (!DECL_REGISTER (decl))
    2261              :     return false;
    2262              : 
    2263              :   /* When not optimizing, disregard register keyword for types that
    2264              :      could have methods, otherwise the methods won't be callable from
    2265              :      the debugger.  */
    2266        11290 :   if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
    2267              :     return false;
    2268              : 
    2269              :   return true;
    2270              : }
    2271              : 
    2272              : /* Structures to communicate between the subroutines of assign_parms.
    2273              :    The first holds data persistent across all parameters, the second
    2274              :    is cleared out for each parameter.  */
    2275              : 
    2276              : struct assign_parm_data_all
    2277              : {
    2278              :   /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
    2279              :      should become a job of the target or otherwise encapsulated.  */
    2280              :   CUMULATIVE_ARGS args_so_far_v;
    2281              :   cumulative_args_t args_so_far;
    2282              :   struct args_size stack_args_size;
    2283              :   tree function_result_decl;
    2284              :   tree orig_fnargs;
    2285              :   rtx_insn *first_conversion_insn;
    2286              :   rtx_insn *last_conversion_insn;
    2287              :   HOST_WIDE_INT pretend_args_size;
    2288              :   HOST_WIDE_INT extra_pretend_bytes;
    2289              :   int reg_parm_stack_space;
    2290              : };
    2291              : 
    2292     18271652 : struct assign_parm_data_one
    2293              : {
    2294              :   tree nominal_type;
    2295              :   function_arg_info arg;
    2296              :   rtx entry_parm;
    2297              :   rtx stack_parm;
    2298              :   machine_mode nominal_mode;
    2299              :   machine_mode passed_mode;
    2300              :   struct locate_and_pad_arg_data locate;
    2301              :   int partial;
    2302              : };
    2303              : 
    2304              : /* A subroutine of assign_parms.  Initialize ALL.  */
    2305              : 
    2306              : static void
    2307      4356077 : assign_parms_initialize_all (struct assign_parm_data_all *all)
    2308              : {
    2309      4356077 :   tree fntype ATTRIBUTE_UNUSED;
    2310              : 
    2311      4356077 :   memset (all, 0, sizeof (*all));
    2312              : 
    2313      4356077 :   fntype = TREE_TYPE (current_function_decl);
    2314              : 
    2315              : #ifdef INIT_CUMULATIVE_INCOMING_ARGS
    2316              :   INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
    2317              : #else
    2318      4356077 :   INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
    2319              :                         current_function_decl, -1);
    2320              : #endif
    2321      4356077 :   all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
    2322              : 
    2323              : #ifdef INCOMING_REG_PARM_STACK_SPACE
    2324      4356077 :   all->reg_parm_stack_space
    2325      4356077 :     = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
    2326              : #endif
    2327      4356077 : }
    2328              : 
    2329              : /* If ARGS contains entries with complex types, split the entry into two
    2330              :    entries of the component type.  Return a new list of substitutions are
    2331              :    needed, else the old list.  */
    2332              : 
    2333              : static void
    2334            0 : split_complex_args (vec<tree> *args)
    2335              : {
    2336            0 :   unsigned i;
    2337            0 :   tree p;
    2338              : 
    2339            0 :   FOR_EACH_VEC_ELT (*args, i, p)
    2340              :     {
    2341            0 :       tree type = TREE_TYPE (p);
    2342            0 :       if (TREE_CODE (type) == COMPLEX_TYPE
    2343            0 :           && targetm.calls.split_complex_arg (type))
    2344              :         {
    2345            0 :           tree decl;
    2346            0 :           tree subtype = TREE_TYPE (type);
    2347            0 :           bool addressable = TREE_ADDRESSABLE (p);
    2348              : 
    2349              :           /* Rewrite the PARM_DECL's type with its component.  */
    2350            0 :           p = copy_node (p);
    2351            0 :           TREE_TYPE (p) = subtype;
    2352            0 :           DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
    2353            0 :           SET_DECL_MODE (p, VOIDmode);
    2354            0 :           DECL_SIZE (p) = NULL;
    2355            0 :           DECL_SIZE_UNIT (p) = NULL;
    2356              :           /* If this arg must go in memory, put it in a pseudo here.
    2357              :              We can't allow it to go in memory as per normal parms,
    2358              :              because the usual place might not have the imag part
    2359              :              adjacent to the real part.  */
    2360            0 :           DECL_ARTIFICIAL (p) = addressable;
    2361            0 :           DECL_IGNORED_P (p) = addressable;
    2362            0 :           TREE_ADDRESSABLE (p) = 0;
    2363            0 :           layout_decl (p, 0);
    2364            0 :           (*args)[i] = p;
    2365              : 
    2366              :           /* Build a second synthetic decl.  */
    2367            0 :           decl = build_decl (EXPR_LOCATION (p),
    2368              :                              PARM_DECL, NULL_TREE, subtype);
    2369            0 :           DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
    2370            0 :           DECL_ARTIFICIAL (decl) = addressable;
    2371            0 :           DECL_IGNORED_P (decl) = addressable;
    2372            0 :           layout_decl (decl, 0);
    2373            0 :           args->safe_insert (++i, decl);
    2374              :         }
    2375              :     }
    2376            0 : }
    2377              : 
    2378              : /* A subroutine of assign_parms.  Adjust the parameter list to incorporate
    2379              :    the hidden struct return argument, and (abi willing) complex args.
    2380              :    Return the new parameter list.  */
    2381              : 
    2382              : static vec<tree>
    2383      4356077 : assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
    2384              : {
    2385      4356077 :   tree fndecl = current_function_decl;
    2386      4356077 :   tree fntype = TREE_TYPE (fndecl);
    2387      4356077 :   vec<tree> fnargs = vNULL;
    2388      4356077 :   tree arg;
    2389              : 
    2390     13297991 :   for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
    2391      8941914 :     fnargs.safe_push (arg);
    2392              : 
    2393      4356077 :   all->orig_fnargs = DECL_ARGUMENTS (fndecl);
    2394              : 
    2395              :   /* If struct value address is treated as the first argument, make it so.  */
    2396      4356077 :   if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
    2397       193912 :       && ! cfun->returns_pcc_struct
    2398      4549989 :       && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
    2399              :     {
    2400       193912 :       tree type = build_pointer_type (TREE_TYPE (fntype));
    2401       193912 :       tree decl;
    2402              : 
    2403       193912 :       decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
    2404              :                          PARM_DECL, get_identifier (".result_ptr"), type);
    2405       193912 :       DECL_ARG_TYPE (decl) = type;
    2406       193912 :       DECL_ARTIFICIAL (decl) = 1;
    2407       193912 :       DECL_NAMELESS (decl) = 1;
    2408       193912 :       TREE_CONSTANT (decl) = 1;
    2409              :       /* We don't set DECL_IGNORED_P or DECL_REGISTER here.  If this
    2410              :          changes, the end of the RESULT_DECL handling block in
    2411              :          use_register_for_decl must be adjusted to match.  */
    2412              : 
    2413       193912 :       DECL_CHAIN (decl) = all->orig_fnargs;
    2414       193912 :       all->orig_fnargs = decl;
    2415       193912 :       fnargs.safe_insert (0, decl);
    2416              : 
    2417       193912 :       all->function_result_decl = decl;
    2418              :     }
    2419              : 
    2420              :   /* If the target wants to split complex arguments into scalars, do so.  */
    2421      4356077 :   if (targetm.calls.split_complex_arg)
    2422            0 :     split_complex_args (&fnargs);
    2423              : 
    2424      4356077 :   return fnargs;
    2425              : }
    2426              : 
    2427              : /* A subroutine of assign_parms.  Examine PARM and pull out type and mode
    2428              :    data for the parameter.  Incorporate ABI specifics such as pass-by-
    2429              :    reference and type promotion.  */
    2430              : 
    2431              : static void
    2432      9135826 : assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
    2433              :                              struct assign_parm_data_one *data)
    2434              : {
    2435      9135826 :   int unsignedp;
    2436              : 
    2437      9135826 :   *data = assign_parm_data_one ();
    2438              : 
    2439              :   /* NAMED_ARG is a misnomer.  We really mean 'non-variadic'. */
    2440      9135826 :   if (!cfun->stdarg)
    2441      9054029 :     data->arg.named = 1;  /* No variadic parms.  */
    2442        81797 :   else if (DECL_CHAIN (parm))
    2443        38948 :     data->arg.named = 1;  /* Not the last non-variadic parm. */
    2444        42849 :   else if (targetm.calls.strict_argument_naming (all->args_so_far))
    2445        42849 :     data->arg.named = 1;  /* Only variadic ones are unnamed.  */
    2446              :   else
    2447            0 :     data->arg.named = 0;  /* Treat as variadic.  */
    2448              : 
    2449      9135826 :   data->nominal_type = TREE_TYPE (parm);
    2450      9135826 :   data->arg.type = DECL_ARG_TYPE (parm);
    2451              : 
    2452              :   /* Look out for errors propagating this far.  Also, if the parameter's
    2453              :      type is void then its value doesn't matter.  */
    2454      9135826 :   if (TREE_TYPE (parm) == error_mark_node
    2455              :       /* This can happen after weird syntax errors
    2456              :          or if an enum type is defined among the parms.  */
    2457      9135741 :       || TREE_CODE (parm) != PARM_DECL
    2458      9135741 :       || data->arg.type == NULL
    2459     18271567 :       || VOID_TYPE_P (data->nominal_type))
    2460              :     {
    2461           85 :       data->nominal_type = data->arg.type = void_type_node;
    2462           85 :       data->nominal_mode = data->passed_mode = data->arg.mode = VOIDmode;
    2463           85 :       return;
    2464              :     }
    2465              : 
    2466              :   /* Find mode of arg as it is passed, and mode of arg as it should be
    2467              :      during execution of this function.  */
    2468      9135741 :   data->passed_mode = data->arg.mode = TYPE_MODE (data->arg.type);
    2469      9135741 :   data->nominal_mode = TYPE_MODE (data->nominal_type);
    2470              : 
    2471              :   /* If the parm is to be passed as a transparent union or record, use the
    2472              :      type of the first field for the tests below.  We have already verified
    2473              :      that the modes are the same.  */
    2474      9135741 :   if (RECORD_OR_UNION_TYPE_P (data->arg.type)
    2475      9135741 :       && TYPE_TRANSPARENT_AGGR (data->arg.type))
    2476         1536 :     data->arg.type = TREE_TYPE (first_field (data->arg.type));
    2477              : 
    2478              :   /* See if this arg was passed by invisible reference.  */
    2479      9135741 :   if (apply_pass_by_reference_rules (&all->args_so_far_v, data->arg))
    2480              :     {
    2481         9868 :       data->nominal_type = data->arg.type;
    2482         9868 :       data->passed_mode = data->nominal_mode = data->arg.mode;
    2483              :     }
    2484              : 
    2485              :   /* Find mode as it is passed by the ABI.  */
    2486      9135741 :   unsignedp = TYPE_UNSIGNED (data->arg.type);
    2487      9135741 :   data->arg.mode
    2488      9135741 :     = promote_function_mode (data->arg.type, data->arg.mode, &unsignedp,
    2489      9135741 :                              TREE_TYPE (current_function_decl), 0);
    2490              : }
    2491              : 
    2492              : /* A subroutine of assign_parms.  Invoke setup_incoming_varargs.  */
    2493              : 
    2494              : static void
    2495        21534 : assign_parms_setup_varargs (struct assign_parm_data_all *all,
    2496              :                             struct assign_parm_data_one *data, bool no_rtl)
    2497              : {
    2498        21534 :   int varargs_pretend_bytes = 0;
    2499              : 
    2500        21534 :   function_arg_info last_named_arg = data->arg;
    2501        21534 :   last_named_arg.named = true;
    2502        21534 :   targetm.calls.setup_incoming_varargs (all->args_so_far, last_named_arg,
    2503              :                                         &varargs_pretend_bytes, no_rtl);
    2504              : 
    2505              :   /* If the back-end has requested extra stack space, record how much is
    2506              :      needed.  Do not change pretend_args_size otherwise since it may be
    2507              :      nonzero from an earlier partial argument.  */
    2508        21534 :   if (varargs_pretend_bytes > 0)
    2509            0 :     all->pretend_args_size = varargs_pretend_bytes;
    2510        21534 : }
    2511              : 
    2512              : /* A subroutine of assign_parms.  Set DATA->ENTRY_PARM corresponding to
    2513              :    the incoming location of the current parameter.  */
    2514              : 
    2515              : static void
    2516      3186082 : assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
    2517              :                             struct assign_parm_data_one *data)
    2518              : {
    2519      3186082 :   HOST_WIDE_INT pretend_bytes = 0;
    2520      3186082 :   rtx entry_parm;
    2521      3186082 :   bool in_regs;
    2522              : 
    2523      3186082 :   if (data->arg.mode == VOIDmode)
    2524              :     {
    2525            0 :       data->entry_parm = data->stack_parm = const0_rtx;
    2526            0 :       return;
    2527              :     }
    2528              : 
    2529      3186082 :   targetm.calls.warn_parameter_passing_abi (all->args_so_far,
    2530              :                                             data->arg.type);
    2531              : 
    2532      6372164 :   entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
    2533      3186082 :                                                     data->arg);
    2534      3186082 :   if (entry_parm == 0)
    2535      1069407 :     data->arg.mode = data->passed_mode;
    2536              : 
    2537              :   /* Determine parm's home in the stack, in case it arrives in the stack
    2538              :      or we should pretend it did.  Compute the stack position and rtx where
    2539              :      the argument arrives and its size.
    2540              : 
    2541              :      There is one complexity here:  If this was a parameter that would
    2542              :      have been passed in registers, but wasn't only because it is
    2543              :      __builtin_va_alist, we want locate_and_pad_parm to treat it as if
    2544              :      it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
    2545              :      In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
    2546              :      as it was the previous time.  */
    2547      1069407 :   in_regs = (entry_parm != 0);
    2548              : #ifdef STACK_PARMS_IN_REG_PARM_AREA
    2549              :   in_regs = true;
    2550              : #endif
    2551      1069407 :   if (!in_regs && !data->arg.named)
    2552              :     {
    2553            0 :       if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
    2554              :         {
    2555            0 :           rtx tem;
    2556            0 :           function_arg_info named_arg = data->arg;
    2557            0 :           named_arg.named = true;
    2558            0 :           tem = targetm.calls.function_incoming_arg (all->args_so_far,
    2559              :                                                      named_arg);
    2560            0 :           in_regs = tem != NULL;
    2561              :         }
    2562              :     }
    2563              : 
    2564              :   /* If this parameter was passed both in registers and in the stack, use
    2565              :      the copy on the stack.  */
    2566      3186082 :   if (targetm.calls.must_pass_in_stack (data->arg))
    2567              :     entry_parm = 0;
    2568              : 
    2569      3186082 :   if (entry_parm)
    2570              :     {
    2571      2116675 :       int partial;
    2572              : 
    2573      2116675 :       partial = targetm.calls.arg_partial_bytes (all->args_so_far, data->arg);
    2574      2116675 :       data->partial = partial;
    2575              : 
    2576              :       /* The caller might already have allocated stack space for the
    2577              :          register parameters.  */
    2578      2116675 :       if (partial != 0 && all->reg_parm_stack_space == 0)
    2579              :         {
    2580              :           /* Part of this argument is passed in registers and part
    2581              :              is passed on the stack.  Ask the prologue code to extend
    2582              :              the stack part so that we can recreate the full value.
    2583              : 
    2584              :              PRETEND_BYTES is the size of the registers we need to store.
    2585              :              CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
    2586              :              stack space that the prologue should allocate.
    2587              : 
    2588              :              Internally, gcc assumes that the argument pointer is aligned
    2589              :              to STACK_BOUNDARY bits.  This is used both for alignment
    2590              :              optimizations (see init_emit) and to locate arguments that are
    2591              :              aligned to more than PARM_BOUNDARY bits.  We must preserve this
    2592              :              invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
    2593              :              a stack boundary.  */
    2594              : 
    2595              :           /* We assume at most one partial arg, and it must be the first
    2596              :              argument on the stack.  */
    2597            0 :           gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
    2598              : 
    2599            0 :           pretend_bytes = partial;
    2600            0 :           all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
    2601              : 
    2602              :           /* We want to align relative to the actual stack pointer, so
    2603              :              don't include this in the stack size until later.  */
    2604            0 :           all->extra_pretend_bytes = all->pretend_args_size;
    2605              :         }
    2606              :     }
    2607              : 
    2608      3186082 :   locate_and_pad_parm (data->arg.mode, data->arg.type, in_regs,
    2609              :                        all->reg_parm_stack_space,
    2610              :                        entry_parm ? data->partial : 0, current_function_decl,
    2611              :                        &all->stack_args_size, &data->locate);
    2612              : 
    2613              :   /* Update parm_stack_boundary if this parameter is passed in the
    2614              :      stack.  */
    2615      3186082 :   if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
    2616       197611 :     crtl->parm_stack_boundary = data->locate.boundary;
    2617              : 
    2618              :   /* Adjust offsets to include the pretend args.  */
    2619      3186082 :   pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
    2620      3186082 :   data->locate.slot_offset.constant += pretend_bytes;
    2621      3186082 :   data->locate.offset.constant += pretend_bytes;
    2622              : 
    2623      3186082 :   data->entry_parm = entry_parm;
    2624              : }
    2625              : 
    2626              : /* A subroutine of assign_parms.  If there is actually space on the stack
    2627              :    for this parm, count it in stack_args_size and return true.  */
    2628              : 
    2629              : static bool
    2630      3186082 : assign_parm_is_stack_parm (struct assign_parm_data_all *all,
    2631              :                            struct assign_parm_data_one *data)
    2632              : {
    2633              :   /* Trivially true if we've no incoming register.  */
    2634      3186082 :   if (data->entry_parm == NULL)
    2635              :     ;
    2636              :   /* Also true if we're partially in registers and partially not,
    2637              :      since we've arranged to drop the entire argument on the stack.  */
    2638      2116675 :   else if (data->partial != 0)
    2639              :     ;
    2640              :   /* Also true if the target says that it's passed in both registers
    2641              :      and on the stack.  */
    2642      2116675 :   else if (GET_CODE (data->entry_parm) == PARALLEL
    2643        53986 :            && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
    2644              :     ;
    2645              :   /* Also true if the target says that there's stack allocated for
    2646              :      all register parameters.  */
    2647      2116675 :   else if (all->reg_parm_stack_space > 0)
    2648              :     ;
    2649              :   /* Otherwise, no, this parameter has no ABI defined stack slot.  */
    2650              :   else
    2651              :     return false;
    2652              : 
    2653      1179600 :   all->stack_args_size.constant += data->locate.size.constant;
    2654      1179600 :   if (data->locate.size.var)
    2655            0 :     ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
    2656              : 
    2657              :   return true;
    2658              : }
    2659              : 
    2660              : /* A subroutine of assign_parms.  Given that this parameter is allocated
    2661              :    stack space by the ABI, find it.  */
    2662              : 
    2663              : static void
    2664      1179600 : assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
    2665              : {
    2666      1179600 :   rtx offset_rtx, stack_parm;
    2667      1179600 :   unsigned int align, boundary;
    2668              : 
    2669              :   /* If we're passing this arg using a reg, make its stack home the
    2670              :      aligned stack slot.  */
    2671      1179600 :   if (data->entry_parm)
    2672       110193 :     offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
    2673              :   else
    2674      1311712 :     offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
    2675              : 
    2676      1179600 :   stack_parm = crtl->args.internal_arg_pointer;
    2677      1179600 :   if (offset_rtx != const0_rtx)
    2678      1091190 :     stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
    2679      1179600 :   stack_parm = gen_rtx_MEM (data->arg.mode, stack_parm);
    2680              : 
    2681      1179600 :   if (!data->arg.pass_by_reference)
    2682              :     {
    2683      1174706 :       set_mem_attributes (stack_parm, parm, 1);
    2684              :       /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
    2685              :          while promoted mode's size is needed.  */
    2686      1174706 :       if (data->arg.mode != BLKmode
    2687      1174706 :           && data->arg.mode != DECL_MODE (parm))
    2688              :         {
    2689            0 :           set_mem_size (stack_parm, GET_MODE_SIZE (data->arg.mode));
    2690            0 :           if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
    2691              :             {
    2692            0 :               poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
    2693            0 :                                                          data->arg.mode);
    2694            0 :               if (maybe_ne (offset, 0))
    2695            0 :                 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
    2696              :             }
    2697              :         }
    2698              :     }
    2699              : 
    2700      1179600 :   boundary = data->locate.boundary;
    2701      1179600 :   align = BITS_PER_UNIT;
    2702              : 
    2703              :   /* If we're padding upward, we know that the alignment of the slot
    2704              :      is TARGET_FUNCTION_ARG_BOUNDARY.  If we're using slot_offset, we're
    2705              :      intentionally forcing upward padding.  Otherwise we have to come
    2706              :      up with a guess at the alignment based on OFFSET_RTX.  */
    2707      1179600 :   poly_int64 offset;
    2708      1179600 :   if (data->locate.where_pad == PAD_NONE || data->entry_parm)
    2709              :     align = boundary;
    2710      1069407 :   else if (data->locate.where_pad == PAD_UPWARD)
    2711              :     {
    2712      1069407 :       align = boundary;
    2713              :       /* If the argument offset is actually more aligned than the nominal
    2714              :          stack slot boundary, take advantage of that excess alignment.
    2715              :          Don't make any assumptions if STACK_POINTER_OFFSET is in use.  */
    2716      1069407 :       if (poly_int_rtx_p (offset_rtx, &offset)
    2717              :           && known_eq (STACK_POINTER_OFFSET, 0))
    2718              :         {
    2719      1069407 :           unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
    2720      1197573 :           if (offset_align == 0 || offset_align > STACK_BOUNDARY)
    2721       589387 :             offset_align = STACK_BOUNDARY;
    2722      1069407 :           align = MAX (align, offset_align);
    2723              :         }
    2724              :     }
    2725            0 :   else if (poly_int_rtx_p (offset_rtx, &offset))
    2726              :     {
    2727            0 :       align = least_bit_hwi (boundary);
    2728            0 :       unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
    2729            0 :       if (offset_align != 0)
    2730            0 :         align = MIN (align, offset_align);
    2731              :     }
    2732      1179600 :   set_mem_align (stack_parm, align);
    2733              : 
    2734      1179600 :   if (data->entry_parm)
    2735       110193 :     set_reg_attrs_for_parm (data->entry_parm, stack_parm);
    2736              : 
    2737      1179600 :   data->stack_parm = stack_parm;
    2738      1179600 : }
    2739              : 
    2740              : /* A subroutine of assign_parms.  Adjust DATA->ENTRY_RTL such that it's
    2741              :    always valid and contiguous.  */
    2742              : 
    2743              : static void
    2744      1179600 : assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
    2745              : {
    2746      1179600 :   rtx entry_parm = data->entry_parm;
    2747      1179600 :   rtx stack_parm = data->stack_parm;
    2748              : 
    2749              :   /* If this parm was passed part in regs and part in memory, pretend it
    2750              :      arrived entirely in memory by pushing the register-part onto the stack.
    2751              :      In the special case of a DImode or DFmode that is split, we could put
    2752              :      it together in a pseudoreg directly, but for now that's not worth
    2753              :      bothering with.  */
    2754      1179600 :   if (data->partial != 0)
    2755              :     {
    2756              :       /* Handle calls that pass values in multiple non-contiguous
    2757              :          locations.  The Irix 6 ABI has examples of this.  */
    2758            0 :       if (GET_CODE (entry_parm) == PARALLEL)
    2759            0 :         emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
    2760            0 :                           data->arg.type, int_size_in_bytes (data->arg.type));
    2761              :       else
    2762              :         {
    2763            0 :           gcc_assert (data->partial % UNITS_PER_WORD == 0);
    2764            0 :           move_block_from_reg (REGNO (entry_parm),
    2765              :                                validize_mem (copy_rtx (stack_parm)),
    2766              :                                data->partial / UNITS_PER_WORD);
    2767              :         }
    2768              : 
    2769              :       entry_parm = stack_parm;
    2770              :     }
    2771              : 
    2772              :   /* If we didn't decide this parm came in a register, by default it came
    2773              :      on the stack.  */
    2774      1179600 :   else if (entry_parm == NULL)
    2775              :     entry_parm = stack_parm;
    2776              : 
    2777              :   /* When an argument is passed in multiple locations, we can't make use
    2778              :      of this information, but we can save some copying if the whole argument
    2779              :      is passed in a single register.  */
    2780       110193 :   else if (GET_CODE (entry_parm) == PARALLEL
    2781            0 :            && data->nominal_mode != BLKmode
    2782            0 :            && data->passed_mode != BLKmode)
    2783              :     {
    2784            0 :       size_t i, len = XVECLEN (entry_parm, 0);
    2785              : 
    2786            0 :       for (i = 0; i < len; i++)
    2787            0 :         if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
    2788            0 :             && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
    2789            0 :             && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
    2790            0 :                 == data->passed_mode)
    2791            0 :             && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
    2792              :           {
    2793              :             entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
    2794              :             break;
    2795              :           }
    2796              :     }
    2797              : 
    2798      1179600 :   data->entry_parm = entry_parm;
    2799      1179600 : }
    2800              : 
    2801              : /* A subroutine of assign_parms.  Reconstitute any values which were
    2802              :    passed in multiple registers and would fit in a single register.  */
    2803              : 
    2804              : static void
    2805      3112357 : assign_parm_remove_parallels (struct assign_parm_data_one *data)
    2806              : {
    2807      3112357 :   rtx entry_parm = data->entry_parm;
    2808              : 
    2809              :   /* Convert the PARALLEL to a REG of the same mode as the parallel.
    2810              :      This can be done with register operations rather than on the
    2811              :      stack, even if we will store the reconstituted parameter on the
    2812              :      stack later.  */
    2813      3112357 :   if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
    2814              :     {
    2815        50167 :       rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
    2816        50167 :       emit_group_store (parmreg, entry_parm, data->arg.type,
    2817       100334 :                         GET_MODE_SIZE (GET_MODE (entry_parm)));
    2818        50167 :       entry_parm = parmreg;
    2819              :     }
    2820              : 
    2821      3112357 :   data->entry_parm = entry_parm;
    2822      3112357 : }
    2823              : 
    2824              : /* A subroutine of assign_parms.  Adjust DATA->STACK_RTL such that it's
    2825              :    always valid and properly aligned.  */
    2826              : 
    2827              : static void
    2828      3186082 : assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
    2829              : {
    2830      3186082 :   rtx stack_parm = data->stack_parm;
    2831              : 
    2832              :   /* If we can't trust the parm stack slot to be aligned enough for its
    2833              :      ultimate type, don't use that slot after entry.  We'll make another
    2834              :      stack slot, if we need one.  */
    2835      3186082 :   if (stack_parm
    2836      3186082 :       && ((GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)
    2837        24865 :            && ((optab_handler (movmisalign_optab, data->nominal_mode)
    2838              :                 != CODE_FOR_nothing)
    2839        49722 :                || targetm.slow_unaligned_access (data->nominal_mode,
    2840        24861 :                                                  MEM_ALIGN (stack_parm))))
    2841      1178189 :           || (data->nominal_type
    2842      2356378 :               && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
    2843         4485 :               && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
    2844              :     stack_parm = NULL;
    2845              : 
    2846              :   /* If parm was passed in memory, and we need to convert it on entry,
    2847              :      don't store it back in that same slot.  */
    2848      3181593 :   else if (data->entry_parm == stack_parm
    2849      1063511 :            && data->nominal_mode != BLKmode
    2850       995236 :            && data->nominal_mode != data->passed_mode)
    2851              :     stack_parm = NULL;
    2852              : 
    2853              :   /* If stack protection is in effect for this function, don't leave any
    2854              :      pointers in their passed stack slots.  */
    2855      3181593 :   else if (crtl->stack_protect_guard
    2856          188 :            && (flag_stack_protect == SPCT_FLAG_ALL
    2857          151 :                || data->arg.pass_by_reference
    2858          151 :                || POINTER_TYPE_P (data->nominal_type)))
    2859         4623 :     stack_parm = NULL;
    2860              : 
    2861      3186082 :   data->stack_parm = stack_parm;
    2862      3186082 : }
    2863              : 
    2864              : /* A subroutine of assign_parms.  Return true if the current parameter
    2865              :    should be stored as a BLKmode in the current frame.  */
    2866              : 
    2867              : static bool
    2868      3186082 : assign_parm_setup_block_p (struct assign_parm_data_one *data)
    2869              : {
    2870            0 :   if (data->nominal_mode == BLKmode)
    2871              :     return true;
    2872      3112357 :   if (GET_MODE (data->entry_parm) == BLKmode)
    2873            0 :     return true;
    2874              : 
    2875              : #ifdef BLOCK_REG_PADDING
    2876              :   /* Only assign_parm_setup_block knows how to deal with register arguments
    2877              :      that are padded at the least significant end.  */
    2878              :   if (REG_P (data->entry_parm)
    2879              :       && known_lt (GET_MODE_SIZE (data->arg.mode), UNITS_PER_WORD)
    2880              :       && (BLOCK_REG_PADDING (data->passed_mode, data->arg.type, 1)
    2881              :           == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
    2882              :     return true;
    2883              : #endif
    2884              : 
    2885              :   return false;
    2886              : }
    2887              : 
    2888              : /* A subroutine of assign_parms.  Arrange for the parameter to be
    2889              :    present and valid in DATA->STACK_RTL.  */
    2890              : 
    2891              : static void
    2892        73725 : assign_parm_setup_block (struct assign_parm_data_all *all,
    2893              :                          tree parm, struct assign_parm_data_one *data)
    2894              : {
    2895        73725 :   rtx entry_parm = data->entry_parm;
    2896        73725 :   rtx stack_parm = data->stack_parm;
    2897        73725 :   rtx target_reg = NULL_RTX;
    2898        73725 :   bool in_conversion_seq = false;
    2899        73725 :   HOST_WIDE_INT size;
    2900        73725 :   HOST_WIDE_INT size_stored;
    2901              : 
    2902        73725 :   if (GET_CODE (entry_parm) == PARALLEL)
    2903         3819 :     entry_parm = emit_group_move_into_temps (entry_parm);
    2904              : 
    2905              :   /* If we want the parameter in a pseudo, don't use a stack slot.  */
    2906        73725 :   if (is_gimple_reg (parm) && use_register_for_decl (parm))
    2907              :     {
    2908            0 :       tree def = ssa_default_def (cfun, parm);
    2909            0 :       gcc_assert (def);
    2910            0 :       machine_mode mode = promote_ssa_mode (def, NULL);
    2911            0 :       rtx reg = gen_reg_rtx (mode);
    2912            0 :       if (GET_CODE (reg) != CONCAT)
    2913              :         stack_parm = reg;
    2914              :       else
    2915              :         {
    2916            0 :           target_reg = reg;
    2917              :           /* Avoid allocating a stack slot, if there isn't one
    2918              :              preallocated by the ABI.  It might seem like we should
    2919              :              always prefer a pseudo, but converting between
    2920              :              floating-point and integer modes goes through the stack
    2921              :              on various machines, so it's better to use the reserved
    2922              :              stack slot than to risk wasting it and allocating more
    2923              :              for the conversion.  */
    2924            0 :           if (stack_parm == NULL_RTX)
    2925              :             {
    2926            0 :               int save = generating_concat_p;
    2927            0 :               generating_concat_p = 0;
    2928            0 :               stack_parm = gen_reg_rtx (mode);
    2929            0 :               generating_concat_p = save;
    2930              :             }
    2931              :         }
    2932            0 :       data->stack_parm = NULL;
    2933              :     }
    2934              : 
    2935        73725 :   size = int_size_in_bytes (data->arg.type);
    2936        86342 :   size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
    2937        73725 :   if (stack_parm == 0)
    2938              :     {
    2939         5453 :       HOST_WIDE_INT parm_align
    2940              :         = ((STRICT_ALIGNMENT || BITS_PER_WORD <= MAX_SUPPORTED_STACK_ALIGNMENT)
    2941         5665 :            ? MAX (DECL_ALIGN (parm), BITS_PER_WORD) : DECL_ALIGN (parm));
    2942              : 
    2943         5453 :       SET_DECL_ALIGN (parm, parm_align);
    2944         5453 :       if (DECL_ALIGN (parm) > MAX_SUPPORTED_STACK_ALIGNMENT)
    2945              :         {
    2946              :           rtx allocsize = gen_int_mode (size_stored, Pmode);
    2947              :           get_dynamic_stack_size (&allocsize, 0, DECL_ALIGN (parm), NULL);
    2948              :           stack_parm = assign_stack_local (BLKmode, UINTVAL (allocsize),
    2949              :                                            MAX_SUPPORTED_STACK_ALIGNMENT);
    2950              :           rtx addr = align_dynamic_address (XEXP (stack_parm, 0),
    2951              :                                             DECL_ALIGN (parm));
    2952              :           mark_reg_pointer (addr, DECL_ALIGN (parm));
    2953              :           stack_parm = gen_rtx_MEM (GET_MODE (stack_parm), addr);
    2954              :           MEM_NOTRAP_P (stack_parm) = 1;
    2955              :         }
    2956              :       else
    2957         5453 :         stack_parm = assign_stack_local (BLKmode, size_stored,
    2958         5453 :                                          DECL_ALIGN (parm));
    2959        10906 :       if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
    2960           77 :         PUT_MODE (stack_parm, GET_MODE (entry_parm));
    2961         5453 :       set_mem_attributes (stack_parm, parm, 1);
    2962              :     }
    2963              : 
    2964              :   /* If a BLKmode arrives in registers, copy it to a stack slot.  Handle
    2965              :      calls that pass values in multiple non-contiguous locations.  */
    2966        73725 :   if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
    2967              :     {
    2968         5155 :       rtx mem;
    2969              : 
    2970              :       /* Note that we will be storing an integral number of words.
    2971              :          So we have to be careful to ensure that we allocate an
    2972              :          integral number of words.  We do this above when we call
    2973              :          assign_stack_local if space was not allocated in the argument
    2974              :          list.  If it was, this will not work if PARM_BOUNDARY is not
    2975              :          a multiple of BITS_PER_WORD.  It isn't clear how to fix this
    2976              :          if it becomes a problem.  Exception is when BLKmode arrives
    2977              :          with arguments not conforming to word_mode.  */
    2978              : 
    2979         5155 :       if (data->stack_parm == 0)
    2980              :         ;
    2981         5155 :       else if (GET_CODE (entry_parm) == PARALLEL)
    2982              :         ;
    2983              :       else
    2984         5155 :         gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
    2985              : 
    2986         5155 :       mem = validize_mem (copy_rtx (stack_parm));
    2987              : 
    2988              :       /* Handle values in multiple non-contiguous locations.  */
    2989         5155 :       if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
    2990            0 :         emit_group_store (mem, entry_parm, data->arg.type, size);
    2991         5155 :       else if (GET_CODE (entry_parm) == PARALLEL)
    2992              :         {
    2993         3819 :           push_to_sequence2 (all->first_conversion_insn,
    2994              :                              all->last_conversion_insn);
    2995         3819 :           emit_group_store (mem, entry_parm, data->arg.type, size);
    2996         3819 :           all->first_conversion_insn = get_insns ();
    2997         3819 :           all->last_conversion_insn = get_last_insn ();
    2998         3819 :           end_sequence ();
    2999         3819 :           in_conversion_seq = true;
    3000              :         }
    3001              : 
    3002         1336 :       else if (size == 0)
    3003              :         ;
    3004              : 
    3005              :       /* If SIZE is that of a mode no bigger than a word, just use
    3006              :          that mode's store operation.  */
    3007         1320 :       else if (size <= UNITS_PER_WORD)
    3008              :         {
    3009         1316 :           unsigned int bits = size * BITS_PER_UNIT;
    3010         1316 :           machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
    3011              : 
    3012         1316 :           if (mode != BLKmode
    3013              : #ifdef BLOCK_REG_PADDING
    3014              :               && (size == UNITS_PER_WORD
    3015              :                   || (BLOCK_REG_PADDING (mode, data->arg.type, 1)
    3016              :                       != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
    3017              : #endif
    3018              :               )
    3019              :             {
    3020          102 :               rtx reg;
    3021              : 
    3022              :               /* We are really truncating a word_mode value containing
    3023              :                  SIZE bytes into a value of mode MODE.  If such an
    3024              :                  operation requires no actual instructions, we can refer
    3025              :                  to the value directly in mode MODE, otherwise we must
    3026              :                  start with the register in word_mode and explicitly
    3027              :                  convert it.  */
    3028          102 :               if (mode == word_mode
    3029          102 :                   || TRULY_NOOP_TRUNCATION_MODES_P (mode, word_mode))
    3030          102 :                 reg = gen_rtx_REG (mode, REGNO (entry_parm));
    3031              :               else
    3032              :                 {
    3033            0 :                   reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
    3034            0 :                   reg = convert_to_mode (mode, copy_to_reg (reg), 1);
    3035              :                 }
    3036              : 
    3037              :               /* We use adjust_address to get a new MEM with the mode
    3038              :                  changed.  adjust_address is better than change_address
    3039              :                  for this purpose because adjust_address does not lose
    3040              :                  the MEM_EXPR associated with the MEM.
    3041              : 
    3042              :                  If the MEM_EXPR is lost, then optimizations like DSE
    3043              :                  assume the MEM escapes and thus is not subject to DSE.  */
    3044          102 :               emit_move_insn (adjust_address (mem, mode, 0), reg);
    3045              :             }
    3046              : 
    3047              : #ifdef BLOCK_REG_PADDING
    3048              :           /* Storing the register in memory as a full word, as
    3049              :              move_block_from_reg below would do, and then using the
    3050              :              MEM in a smaller mode, has the effect of shifting right
    3051              :              if BYTES_BIG_ENDIAN.  If we're bypassing memory, the
    3052              :              shifting must be explicit.  */
    3053              :           else if (!MEM_P (mem))
    3054              :             {
    3055              :               rtx x;
    3056              : 
    3057              :               /* If the assert below fails, we should have taken the
    3058              :                  mode != BLKmode path above, unless we have downward
    3059              :                  padding of smaller-than-word arguments on a machine
    3060              :                  with little-endian bytes, which would likely require
    3061              :                  additional changes to work correctly.  */
    3062              :               gcc_checking_assert (BYTES_BIG_ENDIAN
    3063              :                                    && (BLOCK_REG_PADDING (mode,
    3064              :                                                           data->arg.type, 1)
    3065              :                                        == PAD_UPWARD));
    3066              : 
    3067              :               int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
    3068              : 
    3069              :               x = gen_rtx_REG (word_mode, REGNO (entry_parm));
    3070              :               x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
    3071              :                                 NULL_RTX, 1);
    3072              :               x = force_reg (word_mode, x);
    3073              :               x = gen_lowpart_SUBREG (GET_MODE (mem), x);
    3074              : 
    3075              :               emit_move_insn (mem, x);
    3076              :             }
    3077              : #endif
    3078              : 
    3079              :           /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
    3080              :              machine must be aligned to the left before storing
    3081              :              to memory.  Note that the previous test doesn't
    3082              :              handle all cases (e.g. SIZE == 3).  */
    3083         1214 :           else if (size != UNITS_PER_WORD
    3084              : #ifdef BLOCK_REG_PADDING
    3085              :                    && (BLOCK_REG_PADDING (mode, data->arg.type, 1)
    3086              :                        == PAD_DOWNWARD)
    3087              : #else
    3088              :                    && BYTES_BIG_ENDIAN
    3089              : #endif
    3090              :                    )
    3091              :             {
    3092              :               rtx tem, x;
    3093              :               int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
    3094              :               rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
    3095              : 
    3096              :               x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
    3097              :               tem = change_address (mem, word_mode, 0);
    3098              :               emit_move_insn (tem, x);
    3099              :             }
    3100              :           else
    3101         2428 :             move_block_from_reg (REGNO (entry_parm), mem,
    3102         1214 :                                  size_stored / UNITS_PER_WORD);
    3103              :         }
    3104            2 :       else if (!MEM_P (mem))
    3105              :         {
    3106            0 :           gcc_checking_assert (size > UNITS_PER_WORD);
    3107              : #ifdef BLOCK_REG_PADDING
    3108              :           gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
    3109              :                                                   data->arg.type, 0)
    3110              :                                == PAD_UPWARD);
    3111              : #endif
    3112            0 :           emit_move_insn (mem, entry_parm);
    3113              :         }
    3114              :       else
    3115            2 :         move_block_from_reg (REGNO (entry_parm), mem,
    3116            2 :                              size_stored / UNITS_PER_WORD);
    3117              :     }
    3118        68570 :   else if (data->stack_parm == 0 && !TYPE_EMPTY_P (data->arg.type))
    3119              :     {
    3120          202 :       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
    3121          202 :       emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
    3122              :                        BLOCK_OP_NORMAL);
    3123          202 :       all->first_conversion_insn = get_insns ();
    3124          202 :       all->last_conversion_insn = get_last_insn ();
    3125          202 :       end_sequence ();
    3126          202 :       in_conversion_seq = true;
    3127              :     }
    3128              : 
    3129        73725 :   if (target_reg)
    3130              :     {
    3131            0 :       if (!in_conversion_seq)
    3132            0 :         emit_move_insn (target_reg, stack_parm);
    3133              :       else
    3134              :         {
    3135            0 :           push_to_sequence2 (all->first_conversion_insn,
    3136              :                              all->last_conversion_insn);
    3137            0 :           emit_move_insn (target_reg, stack_parm);
    3138            0 :           all->first_conversion_insn = get_insns ();
    3139            0 :           all->last_conversion_insn = get_last_insn ();
    3140            0 :           end_sequence ();
    3141              :         }
    3142              :       stack_parm = target_reg;
    3143              :     }
    3144              : 
    3145        73725 :   data->stack_parm = stack_parm;
    3146        73725 :   set_parm_rtl (parm, stack_parm);
    3147        73725 : }
    3148              : 
    3149              : /* A subroutine of assign_parms.  Allocate a pseudo to hold the current
    3150              :    parameter.  Get it there.  Perform all ABI specified conversions.  */
    3151              : 
    3152              : static void
    3153      2284289 : assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
    3154              :                        struct assign_parm_data_one *data)
    3155              : {
    3156      2284289 :   rtx parmreg, validated_mem;
    3157      2284289 :   rtx equiv_stack_parm;
    3158      2284289 :   machine_mode promoted_nominal_mode;
    3159      2284289 :   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
    3160      2284289 :   bool did_conversion = false;
    3161      2284289 :   bool need_conversion, moved;
    3162      2284289 :   enum insn_code icode;
    3163      2284289 :   rtx rtl;
    3164              : 
    3165              :   /* Store the parm in a pseudoregister during the function, but we may
    3166              :      need to do it in a wider mode.  Using 2 here makes the result
    3167              :      consistent with promote_decl_mode and thus expand_expr_real_1.  */
    3168      2284289 :   promoted_nominal_mode
    3169      4568578 :     = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
    3170      2284289 :                              TREE_TYPE (current_function_decl), 2);
    3171              : 
    3172      2284289 :   parmreg = gen_reg_rtx (promoted_nominal_mode);
    3173      2284289 :   if (!DECL_ARTIFICIAL (parm))
    3174      2062223 :     mark_user_reg (parmreg);
    3175              : 
    3176              :   /* If this was an item that we received a pointer to,
    3177              :      set rtl appropriately.  */
    3178      2284289 :   if (data->arg.pass_by_reference)
    3179              :     {
    3180         4930 :       rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->arg.type)), parmreg);
    3181         4930 :       set_mem_attributes (rtl, parm, 1);
    3182              :     }
    3183              :   else
    3184              :     rtl = parmreg;
    3185              : 
    3186      2284289 :   assign_parm_remove_parallels (data);
    3187              : 
    3188              :   /* Copy the value into the register, thus bridging between
    3189              :      assign_parm_find_data_types and expand_expr_real_1.  */
    3190              : 
    3191      2284289 :   equiv_stack_parm = data->stack_parm;
    3192      2284289 :   validated_mem = validize_mem (copy_rtx (data->entry_parm));
    3193              : 
    3194      2284289 :   need_conversion = (data->nominal_mode != data->passed_mode
    3195      2284289 :                      || promoted_nominal_mode != data->arg.mode);
    3196         4171 :   moved = false;
    3197              : 
    3198              :   if (need_conversion
    3199         4171 :       && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
    3200         3660 :       && data->nominal_mode == data->passed_mode
    3201            0 :       && data->nominal_mode == GET_MODE (data->entry_parm))
    3202              :     {
    3203              :       /* ENTRY_PARM has been converted to PROMOTED_MODE, its
    3204              :          mode, by the caller.  We now have to convert it to
    3205              :          NOMINAL_MODE, if different.  However, PARMREG may be in
    3206              :          a different mode than NOMINAL_MODE if it is being stored
    3207              :          promoted.
    3208              : 
    3209              :          If ENTRY_PARM is a hard register, it might be in a register
    3210              :          not valid for operating in its mode (e.g., an odd-numbered
    3211              :          register for a DFmode).  In that case, moves are the only
    3212              :          thing valid, so we can't do a convert from there.  This
    3213              :          occurs when the calling sequence allow such misaligned
    3214              :          usages.
    3215              : 
    3216              :          In addition, the conversion may involve a call, which could
    3217              :          clobber parameters which haven't been copied to pseudo
    3218              :          registers yet.
    3219              : 
    3220              :          First, we try to emit an insn which performs the necessary
    3221              :          conversion.  We verify that this insn does not clobber any
    3222              :          hard registers.  */
    3223              : 
    3224            0 :       rtx op0, op1;
    3225              : 
    3226            0 :       icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
    3227              :                             unsignedp);
    3228              : 
    3229            0 :       op0 = parmreg;
    3230            0 :       op1 = validated_mem;
    3231            0 :       if (icode != CODE_FOR_nothing
    3232            0 :           && insn_operand_matches (icode, 0, op0)
    3233            0 :           && insn_operand_matches (icode, 1, op1))
    3234              :         {
    3235            0 :           enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
    3236            0 :           rtx_insn *insn, *insns;
    3237            0 :           rtx t = op1;
    3238            0 :           HARD_REG_SET hardregs;
    3239              : 
    3240            0 :           start_sequence ();
    3241              :           /* If op1 is a hard register that is likely spilled, first
    3242              :              force it into a pseudo, otherwise combiner might extend
    3243              :              its lifetime too much.  */
    3244            0 :           if (GET_CODE (t) == SUBREG)
    3245            0 :             t = SUBREG_REG (t);
    3246            0 :           if (REG_P (t)
    3247            0 :               && HARD_REGISTER_P (t)
    3248            0 :               && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
    3249            0 :               && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
    3250              :             {
    3251            0 :               t = gen_reg_rtx (GET_MODE (op1));
    3252            0 :               emit_move_insn (t, op1);
    3253              :             }
    3254              :           else
    3255              :             t = op1;
    3256            0 :           rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
    3257              :                                            data->passed_mode, unsignedp);
    3258            0 :           emit_insn (pat);
    3259            0 :           insns = get_insns ();
    3260              : 
    3261            0 :           moved = true;
    3262            0 :           CLEAR_HARD_REG_SET (hardregs);
    3263            0 :           for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
    3264              :             {
    3265            0 :               if (INSN_P (insn))
    3266            0 :                 note_stores (insn, record_hard_reg_sets, &hardregs);
    3267            0 :               if (!hard_reg_set_empty_p (hardregs))
    3268            0 :                 moved = false;
    3269              :             }
    3270              : 
    3271            0 :           end_sequence ();
    3272              : 
    3273            0 :           if (moved)
    3274              :             {
    3275            0 :               emit_insn (insns);
    3276            0 :               if (equiv_stack_parm != NULL_RTX)
    3277            0 :                 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
    3278              :                                                   equiv_stack_parm);
    3279              :             }
    3280              :         }
    3281              :     }
    3282              : 
    3283            0 :   if (moved)
    3284              :     /* Nothing to do.  */
    3285              :     ;
    3286      2284289 :   else if (need_conversion)
    3287              :     {
    3288              :       /* We did not have an insn to convert directly, or the sequence
    3289              :          generated appeared unsafe.  We must first copy the parm to a
    3290              :          pseudo reg, and save the conversion until after all
    3291              :          parameters have been moved.  */
    3292              : 
    3293         4171 :       int save_tree_used;
    3294         4171 :       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
    3295              : 
    3296         4171 :       emit_move_insn (tempreg, validated_mem);
    3297              : 
    3298         4171 :       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
    3299         4171 :       tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
    3300              : 
    3301         4171 :       if (partial_subreg_p (tempreg)
    3302         3660 :           && GET_MODE (tempreg) == data->nominal_mode
    3303         3660 :           && REG_P (SUBREG_REG (tempreg))
    3304         3660 :           && data->nominal_mode == data->passed_mode
    3305         3660 :           && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
    3306              :         {
    3307              :           /* The argument is already sign/zero extended, so note it
    3308              :              into the subreg.  */
    3309            0 :           SUBREG_PROMOTED_VAR_P (tempreg) = 1;
    3310            0 :           SUBREG_PROMOTED_SET (tempreg, unsignedp);
    3311              :         }
    3312              : 
    3313              :       /* TREE_USED gets set erroneously during expand_assignment.  */
    3314         4171 :       save_tree_used = TREE_USED (parm);
    3315         4171 :       SET_DECL_RTL (parm, rtl);
    3316         4171 :       expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
    3317         4171 :       SET_DECL_RTL (parm, NULL_RTX);
    3318         4171 :       TREE_USED (parm) = save_tree_used;
    3319         4171 :       all->first_conversion_insn = get_insns ();
    3320         4171 :       all->last_conversion_insn = get_last_insn ();
    3321         4171 :       end_sequence ();
    3322              : 
    3323         4171 :       did_conversion = true;
    3324              :     }
    3325      2280118 :   else if (MEM_P (data->entry_parm)
    3326       822340 :            && GET_MODE_ALIGNMENT (promoted_nominal_mode)
    3327       822398 :               > MEM_ALIGN (data->entry_parm)
    3328      2303974 :            && (((icode = optab_handler (movmisalign_optab,
    3329              :                                         promoted_nominal_mode))
    3330              :                 != CODE_FOR_nothing)
    3331        23852 :                || targetm.slow_unaligned_access (promoted_nominal_mode,
    3332        23910 :                                                  MEM_ALIGN (data->entry_parm))))
    3333              :     {
    3334            4 :       if (icode != CODE_FOR_nothing)
    3335            4 :         emit_insn (GEN_FCN (icode) (parmreg, validated_mem));
    3336              :       else
    3337            0 :         rtl = parmreg = extract_bit_field (validated_mem,
    3338            0 :                         GET_MODE_BITSIZE (promoted_nominal_mode), 0,
    3339              :                         unsignedp, parmreg,
    3340              :                         promoted_nominal_mode, VOIDmode, false, NULL);
    3341              :     }
    3342              :   else
    3343      2280114 :     emit_move_insn (parmreg, validated_mem);
    3344              : 
    3345              :   /* If we were passed a pointer but the actual value can live in a register,
    3346              :      retrieve it and use it directly.  Note that we cannot use nominal_mode,
    3347              :      because it will have been set to Pmode above, we must use the actual mode
    3348              :      of the parameter instead.  */
    3349      2284289 :   if (data->arg.pass_by_reference && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
    3350              :     {
    3351              :       /* Use a stack slot for debugging purposes if possible.  */
    3352          695 :       if (use_register_for_decl (parm))
    3353              :         {
    3354          365 :           parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
    3355          365 :           mark_user_reg (parmreg);
    3356              :         }
    3357              :       else
    3358              :         {
    3359          330 :           int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
    3360              :                                             TYPE_MODE (TREE_TYPE (parm)),
    3361              :                                             TYPE_ALIGN (TREE_TYPE (parm)));
    3362          330 :           parmreg
    3363          330 :             = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
    3364          660 :                                   GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
    3365              :                                   align);
    3366          330 :           set_mem_attributes (parmreg, parm, 1);
    3367              :         }
    3368              : 
    3369              :       /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
    3370              :          the debug info in case it is not legitimate.  */
    3371          695 :       if (GET_MODE (parmreg) != GET_MODE (rtl))
    3372              :         {
    3373            0 :           rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
    3374            0 :           int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
    3375              : 
    3376            0 :           push_to_sequence2 (all->first_conversion_insn,
    3377              :                              all->last_conversion_insn);
    3378            0 :           emit_move_insn (tempreg, rtl);
    3379            0 :           tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
    3380            0 :           emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
    3381              :                           tempreg);
    3382            0 :           all->first_conversion_insn = get_insns ();
    3383            0 :           all->last_conversion_insn = get_last_insn ();
    3384            0 :           end_sequence ();
    3385              : 
    3386            0 :           did_conversion = true;
    3387              :         }
    3388              :       else
    3389          695 :         emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
    3390              : 
    3391          695 :       rtl = parmreg;
    3392              : 
    3393              :       /* STACK_PARM is the pointer, not the parm, and PARMREG is
    3394              :          now the parm.  */
    3395          695 :       data->stack_parm = NULL;
    3396              :     }
    3397              : 
    3398      2284289 :   set_parm_rtl (parm, rtl);
    3399              : 
    3400              :   /* Mark the register as eliminable if we did no conversion and it was
    3401              :      copied from memory at a fixed offset, and the arg pointer was not
    3402              :      copied to a pseudo-reg.  If the arg pointer is a pseudo reg or the
    3403              :      offset formed an invalid address, such memory-equivalences as we
    3404              :      make here would screw up life analysis for it.  */
    3405      2284289 :   if (data->nominal_mode == data->passed_mode
    3406      2280118 :       && !did_conversion
    3407      2280118 :       && data->stack_parm != 0
    3408       877864 :       && MEM_P (data->stack_parm)
    3409       877864 :       && data->locate.offset.var == 0
    3410      3162153 :       && reg_mentioned_p (virtual_incoming_args_rtx,
    3411       877864 :                           XEXP (data->stack_parm, 0)))
    3412              :     {
    3413       877864 :       rtx_insn *linsn = get_last_insn ();
    3414       877864 :       rtx_insn *sinsn;
    3415       877864 :       rtx set;
    3416              : 
    3417              :       /* Mark complex types separately.  */
    3418       877864 :       if (GET_CODE (parmreg) == CONCAT)
    3419              :         {
    3420         1166 :           scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
    3421         1166 :           int regnor = REGNO (XEXP (parmreg, 0));
    3422         1166 :           int regnoi = REGNO (XEXP (parmreg, 1));
    3423         1166 :           rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
    3424         2332 :           rtx stacki = adjust_address_nv (data->stack_parm, submode,
    3425              :                                           GET_MODE_SIZE (submode));
    3426              : 
    3427              :           /* Scan backwards for the set of the real and
    3428              :              imaginary parts.  */
    3429         6072 :           for (sinsn = linsn; sinsn != 0;
    3430         4906 :                sinsn = prev_nonnote_insn (sinsn))
    3431              :             {
    3432         4906 :               set = single_set (sinsn);
    3433         4906 :               if (set == 0)
    3434            0 :                 continue;
    3435              : 
    3436         4906 :               if (SET_DEST (set) == regno_reg_rtx [regnoi])
    3437         1166 :                 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
    3438         3740 :               else if (SET_DEST (set) == regno_reg_rtx [regnor])
    3439         1166 :                 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
    3440              :             }
    3441              :         }
    3442              :       else
    3443       876698 :         set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
    3444              :     }
    3445              : 
    3446              :   /* For pointer data type, suggest pointer register.  */
    3447      2284289 :   if (POINTER_TYPE_P (TREE_TYPE (parm)))
    3448       933354 :     mark_reg_pointer (parmreg,
    3449       933354 :                       TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
    3450      2284289 : }
    3451              : 
    3452              : /* A subroutine of assign_parms.  Allocate stack space to hold the current
    3453              :    parameter.  Get it there.  Perform all ABI specified conversions.  */
    3454              : 
    3455              : static void
    3456       828068 : assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
    3457              :                          struct assign_parm_data_one *data)
    3458              : {
    3459              :   /* Value must be stored in the stack slot STACK_PARM during function
    3460              :      execution.  */
    3461       828068 :   bool to_conversion = false;
    3462              : 
    3463       828068 :   assign_parm_remove_parallels (data);
    3464              : 
    3465       828068 :   if (data->arg.mode != data->nominal_mode)
    3466              :     {
    3467              :       /* Conversion is required.  */
    3468          835 :       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
    3469              : 
    3470          835 :       emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
    3471              : 
    3472              :       /* Some ABIs require scalar floating point modes to be passed
    3473              :          in a wider scalar integer mode.  We need to explicitly
    3474              :          truncate to an integer mode of the correct precision before
    3475              :          using a SUBREG to reinterpret as a floating point value.  */
    3476          835 :       if (SCALAR_FLOAT_MODE_P (data->nominal_mode)
    3477          101 :           && SCALAR_INT_MODE_P (data->arg.mode)
    3478          835 :           && known_lt (GET_MODE_SIZE (data->nominal_mode),
    3479              :                        GET_MODE_SIZE (data->arg.mode)))
    3480            0 :         tempreg = convert_wider_int_to_float (data->nominal_mode,
    3481              :                                               data->arg.mode, tempreg);
    3482              : 
    3483          835 :       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
    3484          835 :       to_conversion = true;
    3485              : 
    3486         2505 :       data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
    3487          835 :                                           TYPE_UNSIGNED (TREE_TYPE (parm)));
    3488              : 
    3489          835 :       if (data->stack_parm)
    3490              :         {
    3491            0 :           poly_int64 offset
    3492            0 :             = subreg_lowpart_offset (data->nominal_mode,
    3493            0 :                                      GET_MODE (data->stack_parm));
    3494              :           /* ??? This may need a big-endian conversion on sparc64.  */
    3495            0 :           data->stack_parm
    3496            0 :             = adjust_address (data->stack_parm, data->nominal_mode, 0);
    3497            0 :           if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
    3498            0 :             set_mem_offset (data->stack_parm,
    3499            0 :                             MEM_OFFSET (data->stack_parm) + offset);
    3500              :         }
    3501              :     }
    3502              : 
    3503       828068 :   if (data->entry_parm != data->stack_parm)
    3504              :     {
    3505       650966 :       rtx src, dest;
    3506              : 
    3507       650966 :       if (data->stack_parm == 0)
    3508              :         {
    3509       601208 :           int align = STACK_SLOT_ALIGNMENT (data->arg.type,
    3510              :                                             GET_MODE (data->entry_parm),
    3511              :                                             TYPE_ALIGN (data->arg.type));
    3512       601208 :           if (align < (int)GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm))
    3513       603763 :               && ((optab_handler (movmisalign_optab,
    3514         2555 :                                   GET_MODE (data->entry_parm))
    3515              :                    != CODE_FOR_nothing)
    3516         2555 :                   || targetm.slow_unaligned_access (GET_MODE (data->entry_parm),
    3517              :                                                     align)))
    3518            0 :             align = GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm));
    3519       601208 :           data->stack_parm
    3520       601208 :             = assign_stack_local (GET_MODE (data->entry_parm),
    3521      1202416 :                                   GET_MODE_SIZE (GET_MODE (data->entry_parm)),
    3522              :                                   align);
    3523       601208 :           align = MEM_ALIGN (data->stack_parm);
    3524       601208 :           set_mem_attributes (data->stack_parm, parm, 1);
    3525       601208 :           set_mem_align (data->stack_parm, align);
    3526              :         }
    3527              : 
    3528       650966 :       dest = validize_mem (copy_rtx (data->stack_parm));
    3529       650966 :       src = validize_mem (copy_rtx (data->entry_parm));
    3530              : 
    3531       650966 :       if (TYPE_EMPTY_P (data->arg.type))
    3532              :         /* Empty types don't really need to be copied.  */;
    3533       649552 :       else if (MEM_P (src))
    3534              :         {
    3535              :           /* Use a block move to handle potentially misaligned entry_parm.  */
    3536           61 :           if (!to_conversion)
    3537           61 :             push_to_sequence2 (all->first_conversion_insn,
    3538              :                                all->last_conversion_insn);
    3539           61 :           to_conversion = true;
    3540              : 
    3541           61 :           emit_block_move (dest, src,
    3542           61 :                            GEN_INT (int_size_in_bytes (data->arg.type)),
    3543              :                            BLOCK_OP_NORMAL);
    3544              :         }
    3545              :       else
    3546              :         {
    3547       649491 :           if (!REG_P (src))
    3548         1531 :             src = force_reg (GET_MODE (src), src);
    3549       649491 :           emit_move_insn (dest, src);
    3550              :         }
    3551              :     }
    3552              : 
    3553       828068 :   if (to_conversion)
    3554              :     {
    3555          896 :       all->first_conversion_insn = get_insns ();
    3556          896 :       all->last_conversion_insn = get_last_insn ();
    3557          896 :       end_sequence ();
    3558              :     }
    3559              : 
    3560       828068 :   set_parm_rtl (parm, data->stack_parm);
    3561       828068 : }
    3562              : 
    3563              : /* A subroutine of assign_parms.  If the ABI splits complex arguments, then
    3564              :    undo the frobbing that we did in assign_parms_augmented_arg_list.  */
    3565              : 
    3566              : static void
    3567            0 : assign_parms_unsplit_complex (struct assign_parm_data_all *all,
    3568              :                               vec<tree> fnargs)
    3569              : {
    3570            0 :   tree parm;
    3571            0 :   tree orig_fnargs = all->orig_fnargs;
    3572            0 :   unsigned i = 0;
    3573              : 
    3574            0 :   for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
    3575              :     {
    3576            0 :       if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
    3577            0 :           && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
    3578              :         {
    3579            0 :           rtx tmp, real, imag;
    3580            0 :           scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
    3581              : 
    3582            0 :           real = DECL_RTL (fnargs[i]);
    3583            0 :           imag = DECL_RTL (fnargs[i + 1]);
    3584            0 :           if (inner != GET_MODE (real))
    3585              :             {
    3586            0 :               real = gen_lowpart_SUBREG (inner, real);
    3587            0 :               imag = gen_lowpart_SUBREG (inner, imag);
    3588              :             }
    3589              : 
    3590            0 :           if (TREE_ADDRESSABLE (parm))
    3591              :             {
    3592            0 :               rtx rmem, imem;
    3593            0 :               HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
    3594            0 :               int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
    3595              :                                                 DECL_MODE (parm),
    3596              :                                                 TYPE_ALIGN (TREE_TYPE (parm)));
    3597              : 
    3598              :               /* split_complex_arg put the real and imag parts in
    3599              :                  pseudos.  Move them to memory.  */
    3600            0 :               tmp = assign_stack_local (DECL_MODE (parm), size, align);
    3601            0 :               set_mem_attributes (tmp, parm, 1);
    3602            0 :               rmem = adjust_address_nv (tmp, inner, 0);
    3603            0 :               imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
    3604            0 :               push_to_sequence2 (all->first_conversion_insn,
    3605              :                                  all->last_conversion_insn);
    3606            0 :               emit_move_insn (rmem, real);
    3607            0 :               emit_move_insn (imem, imag);
    3608            0 :               all->first_conversion_insn = get_insns ();
    3609            0 :               all->last_conversion_insn = get_last_insn ();
    3610            0 :               end_sequence ();
    3611              :             }
    3612              :           else
    3613            0 :             tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
    3614            0 :           set_parm_rtl (parm, tmp);
    3615              : 
    3616            0 :           real = DECL_INCOMING_RTL (fnargs[i]);
    3617            0 :           imag = DECL_INCOMING_RTL (fnargs[i + 1]);
    3618            0 :           if (inner != GET_MODE (real))
    3619              :             {
    3620            0 :               real = gen_lowpart_SUBREG (inner, real);
    3621            0 :               imag = gen_lowpart_SUBREG (inner, imag);
    3622              :             }
    3623            0 :           tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
    3624            0 :           set_decl_incoming_rtl (parm, tmp, false);
    3625            0 :           i++;
    3626              :         }
    3627              :     }
    3628            0 : }
    3629              : 
    3630              : /* Assign RTL expressions to the function's parameters.  This may involve
    3631              :    copying them into registers and using those registers as the DECL_RTL.  */
    3632              : 
    3633              : static void
    3634      1481726 : assign_parms (tree fndecl)
    3635              : {
    3636      1481726 :   struct assign_parm_data_all all;
    3637      1481726 :   tree parm;
    3638      1481726 :   vec<tree> fnargs;
    3639      1481726 :   unsigned i;
    3640              : 
    3641      1481726 :   crtl->args.internal_arg_pointer
    3642      1481726 :     = targetm.calls.internal_arg_pointer ();
    3643              : 
    3644      1481726 :   assign_parms_initialize_all (&all);
    3645      1481726 :   fnargs = assign_parms_augmented_arg_list (&all);
    3646              : 
    3647      1481726 :   if (TYPE_NO_NAMED_ARGS_STDARG_P (TREE_TYPE (fndecl))
    3648      1481726 :       && fnargs.is_empty ())
    3649              :     {
    3650           96 :       struct assign_parm_data_one data = {};
    3651           96 :       assign_parms_setup_varargs (&all, &data, false);
    3652              :     }
    3653              : 
    3654      4667808 :   FOR_EACH_VEC_ELT (fnargs, i, parm)
    3655              :     {
    3656      3186082 :       struct assign_parm_data_one data;
    3657              : 
    3658              :       /* Extract the type of PARM; adjust it according to ABI.  */
    3659      3186082 :       assign_parm_find_data_types (&all, parm, &data);
    3660              : 
    3661              :       /* Early out for errors and void parameters.  */
    3662      3186082 :       if (data.passed_mode == VOIDmode)
    3663              :         {
    3664            0 :           SET_DECL_RTL (parm, const0_rtx);
    3665            0 :           DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
    3666            0 :           continue;
    3667              :         }
    3668              : 
    3669              :       /* Estimate stack alignment from parameter alignment.  */
    3670      3186082 :       if (SUPPORTS_STACK_ALIGNMENT)
    3671              :         {
    3672      3186082 :           unsigned int align
    3673      6372164 :             = targetm.calls.function_arg_boundary (data.arg.mode,
    3674      3186082 :                                                    data.arg.type);
    3675      3186082 :           align = MINIMUM_ALIGNMENT (data.arg.type, data.arg.mode, align);
    3676      3186082 :           if (TYPE_ALIGN (data.nominal_type) > align)
    3677         4714 :             align = MINIMUM_ALIGNMENT (data.nominal_type,
    3678              :                                        TYPE_MODE (data.nominal_type),
    3679              :                                        TYPE_ALIGN (data.nominal_type));
    3680      3186082 :           if (crtl->stack_alignment_estimated < align)
    3681              :             {
    3682       359282 :               gcc_assert (!crtl->stack_realign_processed);
    3683       359282 :               crtl->stack_alignment_estimated = align;
    3684              :             }
    3685              :         }
    3686              : 
    3687              :       /* Find out where the parameter arrives in this function.  */
    3688      3186082 :       assign_parm_find_entry_rtl (&all, &data);
    3689              : 
    3690              :       /* Find out where stack space for this parameter might be.  */
    3691      3186082 :       if (assign_parm_is_stack_parm (&all, &data))
    3692              :         {
    3693      1179600 :           assign_parm_find_stack_rtl (parm, &data);
    3694      1179600 :           assign_parm_adjust_entry_rtl (&data);
    3695              :           /* For arguments that occupy no space in the parameter
    3696              :              passing area, have non-zero size and have address taken,
    3697              :              force creation of a stack slot so that they have distinct
    3698              :              address from other parameters.  */
    3699      1179600 :           if (TYPE_EMPTY_P (data.arg.type)
    3700         6635 :               && TREE_ADDRESSABLE (parm)
    3701         1573 :               && data.entry_parm == data.stack_parm
    3702         1573 :               && MEM_P (data.entry_parm)
    3703      1181173 :               && int_size_in_bytes (data.arg.type))
    3704         1407 :             data.stack_parm = NULL_RTX;
    3705              :         }
    3706              :       /* Record permanently how this parm was passed.  */
    3707      3186082 :       if (data.arg.pass_by_reference)
    3708              :         {
    3709         4930 :           rtx incoming_rtl
    3710         4930 :             = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.arg.type)),
    3711              :                            data.entry_parm);
    3712         4930 :           set_decl_incoming_rtl (parm, incoming_rtl, true);
    3713              :         }
    3714              :       else
    3715      3181152 :         set_decl_incoming_rtl (parm, data.entry_parm, false);
    3716              : 
    3717      3186082 :       assign_parm_adjust_stack_rtl (&data);
    3718              : 
    3719      3186082 :       if (assign_parm_setup_block_p (&data))
    3720        73725 :         assign_parm_setup_block (&all, parm, &data);
    3721      3112357 :       else if (data.arg.pass_by_reference || use_register_for_decl (parm))
    3722      2284289 :         assign_parm_setup_reg (&all, parm, &data);
    3723              :       else
    3724       828068 :         assign_parm_setup_stack (&all, parm, &data);
    3725              : 
    3726      3186082 :       if (cfun->stdarg && !DECL_CHAIN (parm))
    3727        21438 :         assign_parms_setup_varargs (&all, &data, false);
    3728              : 
    3729              :       /* Update info on where next arg arrives in registers.  */
    3730      3186082 :       targetm.calls.function_arg_advance (all.args_so_far, data.arg);
    3731              :     }
    3732              : 
    3733      1481726 :   if (targetm.calls.split_complex_arg)
    3734            0 :     assign_parms_unsplit_complex (&all, fnargs);
    3735              : 
    3736      1481726 :   fnargs.release ();
    3737              : 
    3738              :   /* Output all parameter conversion instructions (possibly including calls)
    3739              :      now that all parameters have been copied out of hard registers.  */
    3740      1481726 :   emit_insn (all.first_conversion_insn);
    3741              : 
    3742      1481726 :   do_pending_stack_adjust ();
    3743              : 
    3744              :   /* Estimate reload stack alignment from scalar return mode.  */
    3745      1481726 :   if (SUPPORTS_STACK_ALIGNMENT)
    3746              :     {
    3747      1481726 :       if (DECL_RESULT (fndecl))
    3748              :         {
    3749      1481726 :           tree type = TREE_TYPE (DECL_RESULT (fndecl));
    3750      1481726 :           machine_mode mode = TYPE_MODE (type);
    3751              : 
    3752      1481726 :           if (mode != BLKmode
    3753      1430836 :               && mode != VOIDmode
    3754       734326 :               && !AGGREGATE_TYPE_P (type))
    3755              :             {
    3756       673725 :               unsigned int align = GET_MODE_ALIGNMENT (mode);
    3757       673725 :               if (crtl->stack_alignment_estimated < align)
    3758              :                 {
    3759           10 :                   gcc_assert (!crtl->stack_realign_processed);
    3760           10 :                   crtl->stack_alignment_estimated = align;
    3761              :                 }
    3762              :             }
    3763              :         }
    3764              :     }
    3765              : 
    3766              :   /* If we are receiving a struct value address as the first argument, set up
    3767              :      the RTL for the function result. As this might require code to convert
    3768              :      the transmitted address to Pmode, we do this here to ensure that possible
    3769              :      preliminary conversions of the address have been emitted already.  */
    3770      1481726 :   if (all.function_result_decl)
    3771              :     {
    3772        69187 :       tree result = DECL_RESULT (current_function_decl);
    3773        69187 :       rtx addr = DECL_RTL (all.function_result_decl);
    3774        69187 :       rtx x;
    3775              : 
    3776        69187 :       if (DECL_BY_REFERENCE (result))
    3777              :         {
    3778         8663 :           SET_DECL_VALUE_EXPR (result, all.function_result_decl);
    3779         8663 :           x = addr;
    3780              :         }
    3781              :       else
    3782              :         {
    3783        60524 :           SET_DECL_VALUE_EXPR (result,
    3784              :                                build1 (INDIRECT_REF, TREE_TYPE (result),
    3785              :                                        all.function_result_decl));
    3786        60524 :           addr = convert_memory_address (Pmode, addr);
    3787        60524 :           x = gen_rtx_MEM (DECL_MODE (result), addr);
    3788        60524 :           set_mem_attributes (x, result, 1);
    3789              :         }
    3790              : 
    3791        69187 :       DECL_HAS_VALUE_EXPR_P (result) = 1;
    3792              : 
    3793        69187 :       set_parm_rtl (result, x);
    3794              :     }
    3795              : 
    3796              :   /* We have aligned all the args, so add space for the pretend args.  */
    3797      1481726 :   crtl->args.pretend_args_size = all.pretend_args_size;
    3798      1481726 :   all.stack_args_size.constant += all.extra_pretend_bytes;
    3799      1481726 :   crtl->args.size = all.stack_args_size.constant;
    3800              : 
    3801              :   /* Adjust function incoming argument size for alignment and
    3802              :      minimum length.  */
    3803              : 
    3804      1481726 :   crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space);
    3805      2963452 :   crtl->args.size = aligned_upper_bound (crtl->args.size,
    3806      1481726 :                                          PARM_BOUNDARY / BITS_PER_UNIT);
    3807              : 
    3808      1481726 :   if (ARGS_GROW_DOWNWARD)
    3809              :     {
    3810              :       crtl->args.arg_offset_rtx
    3811              :         = (all.stack_args_size.var == 0
    3812              :            ? gen_int_mode (-all.stack_args_size.constant, Pmode)
    3813              :            : expand_expr (size_diffop (all.stack_args_size.var,
    3814              :                                        size_int (-all.stack_args_size.constant)),
    3815              :                           NULL_RTX, VOIDmode, EXPAND_NORMAL));
    3816              :     }
    3817              :   else
    3818      1608114 :     crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
    3819              : 
    3820              :   /* See how many bytes, if any, of its args a function should try to pop
    3821              :      on return.  */
    3822              : 
    3823      1481726 :   crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
    3824      1481726 :                                                          TREE_TYPE (fndecl),
    3825              :                                                          crtl->args.size);
    3826              : 
    3827              :   /* For stdarg.h function, save info about
    3828              :      regs and stack space used by the named args.  */
    3829              : 
    3830      1481726 :   crtl->args.info = all.args_so_far_v;
    3831              : 
    3832              :   /* Set the rtx used for the function return value.  Put this in its
    3833              :      own variable so any optimizers that need this information don't have
    3834              :      to include tree.h.  Do this here so it gets done when an inlined
    3835              :      function gets output.  */
    3836              : 
    3837      1481726 :   crtl->return_rtx
    3838      1481726 :     = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
    3839      2266942 :        ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
    3840              : 
    3841              :   /* If scalar return value was computed in a pseudo-reg, or was a named
    3842              :      return value that got dumped to the stack, copy that to the hard
    3843              :      return register.  */
    3844      1481726 :   if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
    3845              :     {
    3846       785216 :       tree decl_result = DECL_RESULT (fndecl);
    3847       785216 :       rtx decl_rtl = DECL_RTL (decl_result);
    3848              : 
    3849       785216 :       if (REG_P (decl_rtl)
    3850       785216 :           ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
    3851        67194 :           : DECL_REGISTER (decl_result))
    3852              :         {
    3853       722812 :           rtx real_decl_rtl;
    3854              : 
    3855              :           /* Unless the psABI says not to.  */
    3856       722812 :           if (TYPE_EMPTY_P (TREE_TYPE (decl_result)))
    3857              :             real_decl_rtl = NULL_RTX;
    3858              :           else
    3859              :             {
    3860       718190 :               real_decl_rtl
    3861       718190 :                 = targetm.calls.function_value (TREE_TYPE (decl_result),
    3862              :                                                 fndecl, true);
    3863       718190 :               REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
    3864              :             }
    3865              :           /* The delay slot scheduler assumes that crtl->return_rtx
    3866              :              holds the hard register containing the return value, not a
    3867              :              temporary pseudo.  */
    3868       722812 :           crtl->return_rtx = real_decl_rtl;
    3869              :         }
    3870              :     }
    3871      1481726 : }
    3872              : 
    3873              : /* Gimplify the parameter list for current_function_decl.  This involves
    3874              :    evaluating SAVE_EXPRs of variable sized parameters and generating code
    3875              :    to implement callee-copies reference parameters.  Returns a sequence of
    3876              :    statements to add to the beginning of the function.  */
    3877              : 
    3878              : gimple_seq
    3879      2874351 : gimplify_parameters (gimple_seq *cleanup)
    3880              : {
    3881      2874351 :   struct assign_parm_data_all all;
    3882      2874351 :   tree parm;
    3883      2874351 :   gimple_seq stmts = NULL;
    3884      2874351 :   vec<tree> fnargs;
    3885      2874351 :   unsigned i;
    3886              : 
    3887      2874351 :   assign_parms_initialize_all (&all);
    3888      2874351 :   fnargs = assign_parms_augmented_arg_list (&all);
    3889              : 
    3890      8824095 :   FOR_EACH_VEC_ELT (fnargs, i, parm)
    3891              :     {
    3892      5949744 :       struct assign_parm_data_one data;
    3893              : 
    3894              :       /* Extract the type of PARM; adjust it according to ABI.  */
    3895      5949744 :       assign_parm_find_data_types (&all, parm, &data);
    3896              : 
    3897              :       /* Early out for errors and void parameters.  */
    3898      5949744 :       if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
    3899           87 :         continue;
    3900              : 
    3901              :       /* Update info on where next arg arrives in registers.  */
    3902      5949657 :       targetm.calls.function_arg_advance (all.args_so_far, data.arg);
    3903              : 
    3904              :       /* ??? Once upon a time variable_size stuffed parameter list
    3905              :          SAVE_EXPRs (amongst others) onto a pending sizes list.  This
    3906              :          turned out to be less than manageable in the gimple world.
    3907              :          Now we have to hunt them down ourselves.  */
    3908      5949657 :       gimplify_type_sizes (TREE_TYPE (parm), &stmts);
    3909              : 
    3910      5949657 :       if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
    3911              :         {
    3912           41 :           gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
    3913           41 :           gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
    3914              :         }
    3915              : 
    3916      5949657 :       if (data.arg.pass_by_reference)
    3917              :         {
    3918         4938 :           tree type = TREE_TYPE (data.arg.type);
    3919         4938 :           function_arg_info orig_arg (type, data.arg.named);
    3920         4938 :           if (reference_callee_copied (&all.args_so_far_v, orig_arg))
    3921              :             {
    3922            0 :               tree local, t;
    3923              : 
    3924              :               /* For constant-sized objects, this is trivial; for
    3925              :                  variable-sized objects, we have to play games.  */
    3926            0 :               if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
    3927            0 :                   && !(flag_stack_check == GENERIC_STACK_CHECK
    3928            0 :                        && compare_tree_int (DECL_SIZE_UNIT (parm),
    3929              :                                             STACK_CHECK_MAX_VAR_SIZE) > 0))
    3930              :                 {
    3931            0 :                   local = create_tmp_var (type, get_name (parm));
    3932            0 :                   DECL_IGNORED_P (local) = 0;
    3933              :                   /* If PARM was addressable, move that flag over
    3934              :                      to the local copy, as its address will be taken,
    3935              :                      not the PARMs.  Keep the parms address taken
    3936              :                      as we'll query that flag during gimplification.  */
    3937            0 :                   if (TREE_ADDRESSABLE (parm))
    3938            0 :                     TREE_ADDRESSABLE (local) = 1;
    3939            0 :                   if (DECL_NOT_GIMPLE_REG_P (parm))
    3940            0 :                     DECL_NOT_GIMPLE_REG_P (local) = 1;
    3941              : 
    3942            0 :                   if (!is_gimple_reg (local)
    3943            0 :                       && flag_stack_reuse != SR_NONE)
    3944              :                     {
    3945            0 :                       tree clobber = build_clobber (type);
    3946            0 :                       gimple *clobber_stmt;
    3947            0 :                       clobber_stmt = gimple_build_assign (local, clobber);
    3948            0 :                       gimple_seq_add_stmt (cleanup, clobber_stmt);
    3949              :                     }
    3950              :                 }
    3951              :               else
    3952              :                 {
    3953            0 :                   tree ptr_type, addr;
    3954              : 
    3955            0 :                   ptr_type = build_pointer_type (type);
    3956            0 :                   addr = create_tmp_reg (ptr_type, get_name (parm));
    3957            0 :                   DECL_IGNORED_P (addr) = 0;
    3958            0 :                   local = build_fold_indirect_ref (addr);
    3959              : 
    3960            0 :                   t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
    3961            0 :                                               DECL_ALIGN (parm),
    3962              :                                               max_int_size_in_bytes (type));
    3963              :                   /* The call has been built for a variable-sized object.  */
    3964            0 :                   CALL_ALLOCA_FOR_VAR_P (t) = 1;
    3965            0 :                   t = fold_convert (ptr_type, t);
    3966            0 :                   t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
    3967            0 :                   gimplify_and_add (t, &stmts);
    3968              :                 }
    3969              : 
    3970            0 :               gimplify_assign (local, parm, &stmts);
    3971              : 
    3972            0 :               SET_DECL_VALUE_EXPR (parm, local);
    3973            0 :               DECL_HAS_VALUE_EXPR_P (parm) = 1;
    3974              :             }
    3975              :         }
    3976              :     }
    3977              : 
    3978      2874351 :   fnargs.release ();
    3979              : 
    3980      2874351 :   return stmts;
    3981              : }
    3982              : 
    3983              : /* Compute the size and offset from the start of the stacked arguments for a
    3984              :    parm passed in mode PASSED_MODE and with type TYPE.
    3985              : 
    3986              :    INITIAL_OFFSET_PTR points to the current offset into the stacked
    3987              :    arguments.
    3988              : 
    3989              :    The starting offset and size for this parm are returned in
    3990              :    LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
    3991              :    nonzero, the offset is that of stack slot, which is returned in
    3992              :    LOCATE->SLOT_OFFSET.  LOCATE->ALIGNMENT_PAD is the amount of
    3993              :    padding required from the initial offset ptr to the stack slot.
    3994              : 
    3995              :    IN_REGS is nonzero if the argument will be passed in registers.  It will
    3996              :    never be set if REG_PARM_STACK_SPACE is not defined.
    3997              : 
    3998              :    REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
    3999              :    for arguments which are passed in registers.
    4000              : 
    4001              :    FNDECL is the function in which the argument was defined.
    4002              : 
    4003              :    There are two types of rounding that are done.  The first, controlled by
    4004              :    TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
    4005              :    argument list to be aligned to the specific boundary (in bits).  This
    4006              :    rounding affects the initial and starting offsets, but not the argument
    4007              :    size.
    4008              : 
    4009              :    The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
    4010              :    optionally rounds the size of the parm to PARM_BOUNDARY.  The
    4011              :    initial offset is not affected by this rounding, while the size always
    4012              :    is and the starting offset may be.  */
    4013              : 
    4014              : /*  LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
    4015              :     INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
    4016              :     callers pass in the total size of args so far as
    4017              :     INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.  */
    4018              : 
    4019              : void
    4020      5432743 : locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
    4021              :                      int reg_parm_stack_space, int partial,
    4022              :                      tree fndecl ATTRIBUTE_UNUSED,
    4023              :                      struct args_size *initial_offset_ptr,
    4024              :                      struct locate_and_pad_arg_data *locate)
    4025              : {
    4026      5432743 :   tree sizetree;
    4027      5432743 :   pad_direction where_pad;
    4028      5432743 :   unsigned int boundary, round_boundary;
    4029      5432743 :   int part_size_in_regs;
    4030              : 
    4031              :   /* If we have found a stack parm before we reach the end of the
    4032              :      area reserved for registers, skip that area.  */
    4033      5432743 :   if (! in_regs)
    4034              :     {
    4035      3249971 :       if (reg_parm_stack_space > 0)
    4036              :         {
    4037        83755 :           if (initial_offset_ptr->var
    4038        83755 :               || !ordered_p (initial_offset_ptr->constant,
    4039              :                              reg_parm_stack_space))
    4040              :             {
    4041            0 :               initial_offset_ptr->var
    4042            0 :                 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
    4043              :                               ssize_int (reg_parm_stack_space));
    4044            0 :               initial_offset_ptr->constant = 0;
    4045              :             }
    4046              :           else
    4047        83755 :             initial_offset_ptr->constant
    4048        83755 :               = ordered_max (initial_offset_ptr->constant,
    4049              :                              reg_parm_stack_space);
    4050              :         }
    4051              :     }
    4052              : 
    4053      5432743 :   part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
    4054              : 
    4055      5432743 :   sizetree = (type
    4056      5432743 :               ? arg_size_in_bytes (type)
    4057        37849 :               : size_int (GET_MODE_SIZE (passed_mode)));
    4058      5432743 :   where_pad = targetm.calls.function_arg_padding (passed_mode, type);
    4059      5432743 :   boundary = targetm.calls.function_arg_boundary (passed_mode, type);
    4060      5432743 :   round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
    4061              :                                                               type);
    4062      5432743 :   locate->where_pad = where_pad;
    4063              : 
    4064              :   /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT.  */
    4065      5432743 :   if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
    4066              :     boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
    4067              : 
    4068      5432743 :   locate->boundary = boundary;
    4069              : 
    4070      5432743 :   if (SUPPORTS_STACK_ALIGNMENT)
    4071              :     {
    4072              :       /* stack_alignment_estimated can't change after stack has been
    4073              :          realigned.  */
    4074      5432743 :       if (crtl->stack_alignment_estimated < boundary)
    4075              :         {
    4076         5069 :           if (!crtl->stack_realign_processed)
    4077         5069 :             crtl->stack_alignment_estimated = boundary;
    4078              :           else
    4079              :             {
    4080              :               /* If stack is realigned and stack alignment value
    4081              :                  hasn't been finalized, it is OK not to increase
    4082              :                  stack_alignment_estimated.  The bigger alignment
    4083              :                  requirement is recorded in stack_alignment_needed
    4084              :                  below.  */
    4085            0 :               gcc_assert (!crtl->stack_realign_finalized
    4086              :                           && crtl->stack_realign_needed);
    4087              :             }
    4088              :         }
    4089              :     }
    4090              : 
    4091      5432743 :   if (ARGS_GROW_DOWNWARD)
    4092              :     {
    4093              :       locate->slot_offset.constant = -initial_offset_ptr->constant;
    4094              :       if (initial_offset_ptr->var)
    4095              :         locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
    4096              :                                               initial_offset_ptr->var);
    4097              : 
    4098              :       {
    4099              :         tree s2 = sizetree;
    4100              :         if (where_pad != PAD_NONE
    4101              :             && (!tree_fits_uhwi_p (sizetree)
    4102              :                 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
    4103              :           s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
    4104              :         SUB_PARM_SIZE (locate->slot_offset, s2);
    4105              :       }
    4106              : 
    4107              :       locate->slot_offset.constant += part_size_in_regs;
    4108              : 
    4109              :       if (!in_regs || reg_parm_stack_space > 0)
    4110              :         pad_to_arg_alignment (&locate->slot_offset, boundary,
    4111              :                               &locate->alignment_pad);
    4112              : 
    4113              :       locate->size.constant = (-initial_offset_ptr->constant
    4114              :                                - locate->slot_offset.constant);
    4115              :       if (initial_offset_ptr->var)
    4116              :         locate->size.var = size_binop (MINUS_EXPR,
    4117              :                                        size_binop (MINUS_EXPR,
    4118              :                                                    ssize_int (0),
    4119              :                                                    initial_offset_ptr->var),
    4120              :                                        locate->slot_offset.var);
    4121              : 
    4122              :       /* Pad_below needs the pre-rounded size to know how much to pad
    4123              :          below.  */
    4124              :       locate->offset = locate->slot_offset;
    4125              :       if (where_pad == PAD_DOWNWARD)
    4126              :         pad_below (&locate->offset, passed_mode, sizetree);
    4127              : 
    4128              :     }
    4129              :   else
    4130              :     {
    4131      5432743 :       if (!in_regs || reg_parm_stack_space > 0)
    4132      3426261 :         pad_to_arg_alignment (initial_offset_ptr, boundary,
    4133              :                               &locate->alignment_pad);
    4134      5432743 :       locate->slot_offset = *initial_offset_ptr;
    4135              : 
    4136              : #ifdef PUSH_ROUNDING
    4137      5432743 :       if (passed_mode != BLKmode)
    4138      5088311 :         sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
    4139              : #endif
    4140              : 
    4141              :       /* Pad_below needs the pre-rounded size to know how much to pad below
    4142              :          so this must be done before rounding up.  */
    4143      5432743 :       locate->offset = locate->slot_offset;
    4144      5432743 :       if (where_pad == PAD_DOWNWARD)
    4145            0 :         pad_below (&locate->offset, passed_mode, sizetree);
    4146              : 
    4147      5432743 :       if (where_pad != PAD_NONE
    4148      5432743 :           && (!tree_fits_uhwi_p (sizetree)
    4149      5432743 :               || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
    4150        18569 :         sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
    4151              : 
    4152      5432743 :       ADD_PARM_SIZE (locate->size, sizetree);
    4153              : 
    4154      5432743 :       locate->size.constant -= part_size_in_regs;
    4155              :     }
    4156              : 
    4157      5432743 :   locate->offset.constant
    4158      5432743 :     += targetm.calls.function_arg_offset (passed_mode, type);
    4159      5432743 : }
    4160              : 
    4161              : /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
    4162              :    BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
    4163              : 
    4164              : static void
    4165      3426261 : pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
    4166              :                       struct args_size *alignment_pad)
    4167              : {
    4168      3426261 :   tree save_var = NULL_TREE;
    4169      3426261 :   poly_int64 save_constant = 0;
    4170      3426261 :   int boundary_in_bytes = boundary / BITS_PER_UNIT;
    4171      3426261 :   poly_int64 sp_offset = STACK_POINTER_OFFSET;
    4172              : 
    4173              : #ifdef SPARC_STACK_BOUNDARY_HACK
    4174              :   /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
    4175              :      the real alignment of %sp.  However, when it does this, the
    4176              :      alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
    4177              :   if (SPARC_STACK_BOUNDARY_HACK)
    4178              :     sp_offset = 0;
    4179              : #endif
    4180              : 
    4181      5183811 :   if (boundary > PARM_BOUNDARY)
    4182              :     {
    4183       140800 :       save_var = offset_ptr->var;
    4184       140800 :       save_constant = offset_ptr->constant;
    4185              :     }
    4186              : 
    4187      3426261 :   alignment_pad->var = NULL_TREE;
    4188      3426261 :   alignment_pad->constant = 0;
    4189              : 
    4190      3426261 :   if (boundary > BITS_PER_UNIT)
    4191              :     {
    4192      3426261 :       int misalign;
    4193      3426261 :       if (offset_ptr->var
    4194      3426261 :           || !known_misalignment (offset_ptr->constant + sp_offset,
    4195              :                                   boundary_in_bytes, &misalign))
    4196              :         {
    4197            0 :           tree sp_offset_tree = ssize_int (sp_offset);
    4198            0 :           tree offset = size_binop (PLUS_EXPR,
    4199              :                                     ARGS_SIZE_TREE (*offset_ptr),
    4200              :                                     sp_offset_tree);
    4201            0 :           tree rounded;
    4202            0 :           if (ARGS_GROW_DOWNWARD)
    4203              :             rounded = round_down (offset, boundary / BITS_PER_UNIT);
    4204              :           else
    4205            0 :             rounded = round_up   (offset, boundary / BITS_PER_UNIT);
    4206              : 
    4207            0 :           offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
    4208              :           /* ARGS_SIZE_TREE includes constant term.  */
    4209            0 :           offset_ptr->constant = 0;
    4210            0 :           if (boundary > PARM_BOUNDARY)
    4211            0 :             alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
    4212              :                                              save_var);
    4213              :         }
    4214              :       else
    4215              :         {
    4216      3426261 :           if (ARGS_GROW_DOWNWARD)
    4217              :             offset_ptr->constant -= misalign;
    4218              :           else
    4219      3426261 :             offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
    4220              : 
    4221      3426261 :           if (boundary > PARM_BOUNDARY)
    4222       140800 :             alignment_pad->constant = offset_ptr->constant - save_constant;
    4223              :         }
    4224              :     }
    4225      3426261 : }
    4226              : 
    4227              : static void
    4228            0 : pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
    4229              : {
    4230            0 :   unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
    4231            0 :   int misalign;
    4232            0 :   if (passed_mode != BLKmode
    4233            0 :       && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign))
    4234            0 :     offset_ptr->constant += -misalign & (align - 1);
    4235              :   else
    4236              :     {
    4237            0 :       if (TREE_CODE (sizetree) != INTEGER_CST
    4238            0 :           || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
    4239              :         {
    4240              :           /* Round the size up to multiple of PARM_BOUNDARY bits.  */
    4241            0 :           tree s2 = round_up (sizetree, align);
    4242              :           /* Add it in.  */
    4243            0 :           ADD_PARM_SIZE (*offset_ptr, s2);
    4244            0 :           SUB_PARM_SIZE (*offset_ptr, sizetree);
    4245              :         }
    4246              :     }
    4247            0 : }
    4248              : 
    4249              : 
    4250              : /* True if register REGNO was alive at a place where `setjmp' was
    4251              :    called and was set more than once or is an argument.  Such regs may
    4252              :    be clobbered by `longjmp'.  */
    4253              : 
    4254              : static bool
    4255           44 : regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
    4256              : {
    4257              :   /* There appear to be cases where some local vars never reach the
    4258              :      backend but have bogus regnos.  */
    4259           44 :   if (regno >= max_reg_num ())
    4260              :     return false;
    4261              : 
    4262           44 :   return ((REG_N_SETS (regno) > 1
    4263           42 :            || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
    4264              :                                regno))
    4265           44 :           && REGNO_REG_SET_P (setjmp_crosses, regno));
    4266              : }
    4267              : 
    4268              : /* Walk the tree of blocks describing the binding levels within a
    4269              :    function and warn about variables the might be killed by setjmp or
    4270              :    vfork.  This is done after calling flow_analysis before register
    4271              :    allocation since that will clobber the pseudo-regs to hard
    4272              :    regs.  */
    4273              : 
    4274              : static void
    4275           78 : setjmp_vars_warning (bitmap setjmp_crosses, tree block)
    4276              : {
    4277           78 :   tree decl, sub;
    4278              : 
    4279          197 :   for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
    4280              :     {
    4281          119 :       if (VAR_P (decl)
    4282          119 :           && DECL_RTL_SET_P (decl)
    4283           25 :           && REG_P (DECL_RTL (decl))
    4284          137 :           && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
    4285            1 :         warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
    4286              :                  " %<longjmp%> or %<vfork%>", decl);
    4287              :     }
    4288              : 
    4289          134 :   for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
    4290           56 :     setjmp_vars_warning (setjmp_crosses, sub);
    4291           78 : }
    4292              : 
    4293              : /* Do the appropriate part of setjmp_vars_warning
    4294              :    but for arguments instead of local variables.  */
    4295              : 
    4296              : static void
    4297           22 : setjmp_args_warning (bitmap setjmp_crosses)
    4298              : {
    4299           22 :   tree decl;
    4300           22 :   for (decl = DECL_ARGUMENTS (current_function_decl);
    4301           48 :        decl; decl = DECL_CHAIN (decl))
    4302           26 :     if (DECL_RTL (decl) != 0
    4303           26 :         && REG_P (DECL_RTL (decl))
    4304           52 :         && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
    4305            0 :       warning (OPT_Wclobbered,
    4306              :                "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
    4307              :                decl);
    4308           22 : }
    4309              : 
    4310              : /* Generate warning messages for variables live across setjmp.  */
    4311              : 
    4312              : void
    4313       134885 : generate_setjmp_warnings (void)
    4314              : {
    4315       134885 :   bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
    4316              : 
    4317       134885 :   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
    4318       134885 :       || bitmap_empty_p (setjmp_crosses))
    4319              :     return;
    4320              : 
    4321           22 :   setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
    4322           22 :   setjmp_args_warning (setjmp_crosses);
    4323              : }
    4324              : 
    4325              : 
    4326              : /* Reverse the order of elements in the fragment chain T of blocks,
    4327              :    and return the new head of the chain (old last element).
    4328              :    In addition to that clear BLOCK_SAME_RANGE flags when needed
    4329              :    and adjust BLOCK_SUPERCONTEXT from the super fragment to
    4330              :    its super fragment origin.  */
    4331              : 
    4332              : static tree
    4333      5511230 : block_fragments_nreverse (tree t)
    4334              : {
    4335      5511230 :   tree prev = 0, block, next, prev_super = 0;
    4336      5511230 :   tree super = BLOCK_SUPERCONTEXT (t);
    4337      5511230 :   if (BLOCK_FRAGMENT_ORIGIN (super))
    4338      4552379 :     super = BLOCK_FRAGMENT_ORIGIN (super);
    4339     16413964 :   for (block = t; block; block = next)
    4340              :     {
    4341     10902734 :       next = BLOCK_FRAGMENT_CHAIN (block);
    4342     10902734 :       BLOCK_FRAGMENT_CHAIN (block) = prev;
    4343      5391504 :       if ((prev && !BLOCK_SAME_RANGE (prev))
    4344     13981128 :           || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
    4345              :               != prev_super))
    4346      3196446 :         BLOCK_SAME_RANGE (block) = 0;
    4347     10902734 :       prev_super = BLOCK_SUPERCONTEXT (block);
    4348     10902734 :       BLOCK_SUPERCONTEXT (block) = super;
    4349     10902734 :       prev = block;
    4350              :     }
    4351      5511230 :   t = BLOCK_FRAGMENT_ORIGIN (t);
    4352      5511230 :   if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
    4353              :       != prev_super)
    4354      1792380 :     BLOCK_SAME_RANGE (t) = 0;
    4355      5511230 :   BLOCK_SUPERCONTEXT (t) = super;
    4356      5511230 :   return prev;
    4357              : }
    4358              : 
    4359              : /* Reverse the order of elements in the chain T of blocks,
    4360              :    and return the new head of the chain (old last element).
    4361              :    Also do the same on subblocks and reverse the order of elements
    4362              :    in BLOCK_FRAGMENT_CHAIN as well.  */
    4363              : 
    4364              : static tree
    4365     23380919 : blocks_nreverse_all (tree t)
    4366              : {
    4367     23380919 :   tree prev = 0, block, next;
    4368     46189018 :   for (block = t; block; block = next)
    4369              :     {
    4370     22808099 :       next = BLOCK_CHAIN (block);
    4371     22808099 :       BLOCK_CHAIN (block) = prev;
    4372     22808099 :       if (BLOCK_FRAGMENT_CHAIN (block)
    4373     22808099 :           && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
    4374              :         {
    4375     11022460 :           BLOCK_FRAGMENT_CHAIN (block)
    4376      5511230 :             = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
    4377      5511230 :           if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
    4378      2647103 :             BLOCK_SAME_RANGE (block) = 0;
    4379              :         }
    4380     22808099 :       BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
    4381     22808099 :       prev = block;
    4382              :     }
    4383     23380919 :   return prev;
    4384              : }
    4385              : 
    4386              : 
    4387              : /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
    4388              :    and create duplicate blocks.  */
    4389              : /* ??? Need an option to either create block fragments or to create
    4390              :    abstract origin duplicates of a source block.  It really depends
    4391              :    on what optimization has been performed.  */
    4392              : 
    4393              : void
    4394       572820 : reorder_blocks (void)
    4395              : {
    4396       572820 :   tree block = DECL_INITIAL (current_function_decl);
    4397              : 
    4398       572820 :   if (block == NULL_TREE)
    4399            0 :     return;
    4400              : 
    4401       572820 :   auto_vec<tree, 10> block_stack;
    4402              : 
    4403              :   /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
    4404       572820 :   clear_block_marks (block);
    4405              : 
    4406              :   /* Prune the old trees away, so that they don't get in the way.  */
    4407       572820 :   BLOCK_SUBBLOCKS (block) = NULL_TREE;
    4408       572820 :   BLOCK_CHAIN (block) = NULL_TREE;
    4409              : 
    4410              :   /* Recreate the block tree from the note nesting.  */
    4411       572820 :   reorder_blocks_1 (get_insns (), block, &block_stack);
    4412       572820 :   BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
    4413       572820 : }
    4414              : 
    4415              : /* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
    4416              : 
    4417              : void
    4418     24153286 : clear_block_marks (tree block)
    4419              : {
    4420     44859940 :   while (block)
    4421              :     {
    4422     20706654 :       TREE_ASM_WRITTEN (block) = 0;
    4423     20706654 :       clear_block_marks (BLOCK_SUBBLOCKS (block));
    4424     20706654 :       block = BLOCK_CHAIN (block);
    4425              :     }
    4426     24153286 : }
    4427              : 
    4428              : static void
    4429       572820 : reorder_blocks_1 (rtx_insn *insns, tree current_block,
    4430              :                   vec<tree> *p_block_stack)
    4431              : {
    4432       572820 :   rtx_insn *insn;
    4433       572820 :   tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
    4434              : 
    4435    191027686 :   for (insn = insns; insn; insn = NEXT_INSN (insn))
    4436              :     {
    4437    190454866 :       if (NOTE_P (insn))
    4438              :         {
    4439    137405420 :           if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
    4440              :             {
    4441     22808099 :               tree block = NOTE_BLOCK (insn);
    4442     22808099 :               tree origin;
    4443              : 
    4444     22808099 :               gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
    4445     22808099 :               origin = block;
    4446              : 
    4447     22808099 :               if (prev_end)
    4448      1386705 :                 BLOCK_SAME_RANGE (prev_end) = 0;
    4449     22808099 :               prev_end = NULL_TREE;
    4450              : 
    4451              :               /* If we have seen this block before, that means it now
    4452              :                  spans multiple address regions.  Create a new fragment.  */
    4453     22808099 :               if (TREE_ASM_WRITTEN (block))
    4454              :                 {
    4455     10902734 :                   tree new_block = copy_node (block);
    4456              : 
    4457     10902734 :                   BLOCK_SAME_RANGE (new_block) = 0;
    4458     10902734 :                   BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
    4459     10902734 :                   BLOCK_FRAGMENT_CHAIN (new_block)
    4460     10902734 :                     = BLOCK_FRAGMENT_CHAIN (origin);
    4461     10902734 :                   BLOCK_FRAGMENT_CHAIN (origin) = new_block;
    4462              : 
    4463     10902734 :                   NOTE_BLOCK (insn) = new_block;
    4464     10902734 :                   block = new_block;
    4465              :                 }
    4466              : 
    4467     22808099 :               if (prev_beg == current_block && prev_beg)
    4468     14803561 :                 BLOCK_SAME_RANGE (block) = 1;
    4469              : 
    4470     22808099 :               prev_beg = origin;
    4471              : 
    4472     22808099 :               BLOCK_SUBBLOCKS (block) = 0;
    4473     22808099 :               TREE_ASM_WRITTEN (block) = 1;
    4474              :               /* When there's only one block for the entire function,
    4475              :                  current_block == block and we mustn't do this, it
    4476              :                  will cause infinite recursion.  */
    4477     22808099 :               if (block != current_block)
    4478              :                 {
    4479     22808099 :                   tree super;
    4480     22808099 :                   if (block != origin)
    4481     10902734 :                     gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
    4482              :                                 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
    4483              :                                                                       (origin))
    4484              :                                    == current_block);
    4485     22808099 :                   if (p_block_stack->is_empty ())
    4486              :                     super = current_block;
    4487              :                   else
    4488              :                     {
    4489     20484661 :                       super = p_block_stack->last ();
    4490     31584047 :                       gcc_assert (super == current_block
    4491              :                                   || BLOCK_FRAGMENT_ORIGIN (super)
    4492              :                                      == current_block);
    4493              :                     }
    4494     22808099 :                   BLOCK_SUPERCONTEXT (block) = super;
    4495     22808099 :                   BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
    4496     22808099 :                   BLOCK_SUBBLOCKS (current_block) = block;
    4497     22808099 :                   current_block = origin;
    4498              :                 }
    4499     22808099 :               p_block_stack->safe_push (block);
    4500              :             }
    4501    114597321 :           else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
    4502              :             {
    4503     22808099 :               NOTE_BLOCK (insn) = p_block_stack->pop ();
    4504     22808099 :               current_block = BLOCK_SUPERCONTEXT (current_block);
    4505     22808099 :               if (BLOCK_FRAGMENT_ORIGIN (current_block))
    4506      3768399 :                 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
    4507     22808099 :               prev_beg = NULL_TREE;
    4508     37611660 :               prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
    4509     22808099 :                          ? NOTE_BLOCK (insn) : NULL_TREE;
    4510              :             }
    4511              :         }
    4512              :       else
    4513              :         {
    4514     53049446 :           prev_beg = NULL_TREE;
    4515     53049446 :           if (prev_end)
    4516       749509 :             BLOCK_SAME_RANGE (prev_end) = 0;
    4517              :           prev_end = NULL_TREE;
    4518              :         }
    4519              :     }
    4520       572820 : }
    4521              : 
    4522              : /* Reverse the order of elements in the chain T of blocks,
    4523              :    and return the new head of the chain (old last element).  */
    4524              : 
    4525              : tree
    4526     30139745 : blocks_nreverse (tree t)
    4527              : {
    4528     30139745 :   tree prev = 0, block, next;
    4529     52901868 :   for (block = t; block; block = next)
    4530              :     {
    4531     22762123 :       next = BLOCK_CHAIN (block);
    4532     22762123 :       BLOCK_CHAIN (block) = prev;
    4533     22762123 :       prev = block;
    4534              :     }
    4535     30139745 :   return prev;
    4536              : }
    4537              : 
    4538              : /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
    4539              :    by modifying the last node in chain 1 to point to chain 2.  */
    4540              : 
    4541              : tree
    4542     90695339 : block_chainon (tree op1, tree op2)
    4543              : {
    4544     90695339 :   tree t1;
    4545              : 
    4546     90695339 :   if (!op1)
    4547              :     return op2;
    4548      4417149 :   if (!op2)
    4549              :     return op1;
    4550              : 
    4551     26101525 :   for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
    4552     21684376 :     continue;
    4553      4417149 :   BLOCK_CHAIN (t1) = op2;
    4554              : 
    4555              : #ifdef ENABLE_TREE_CHECKING
    4556      4417149 :   {
    4557      4417149 :     tree t2;
    4558      8850344 :     for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
    4559      4433195 :       gcc_assert (t2 != t1);
    4560              :   }
    4561              : #endif
    4562              : 
    4563              :   return op1;
    4564     21684376 : }
    4565              : 
    4566              : /* Count the subblocks of the list starting with BLOCK.  If VECTOR is
    4567              :    non-NULL, list them all into VECTOR, in a depth-first preorder
    4568              :    traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
    4569              :    blocks.  */
    4570              : 
    4571              : static int
    4572    119745636 : all_blocks (tree block, tree *vector)
    4573              : {
    4574    119745636 :   int n_blocks = 0;
    4575              : 
    4576    235229980 :   while (block)
    4577              :     {
    4578    115484344 :       TREE_ASM_WRITTEN (block) = 0;
    4579              : 
    4580              :       /* Record this block.  */
    4581    115484344 :       if (vector)
    4582     57742172 :         vector[n_blocks] = block;
    4583              : 
    4584    115484344 :       ++n_blocks;
    4585              : 
    4586              :       /* Record the subblocks, and their subblocks...  */
    4587    173226516 :       n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
    4588     57742172 :                               vector ? vector + n_blocks : 0);
    4589    115484344 :       block = BLOCK_CHAIN (block);
    4590              :     }
    4591              : 
    4592    119745636 :   return n_blocks;
    4593              : }
    4594              : 
    4595              : /* Return a vector containing all the blocks rooted at BLOCK.  The
    4596              :    number of elements in the vector is stored in N_BLOCKS_P.  The
    4597              :    vector is dynamically allocated; it is the caller's responsibility
    4598              :    to call `free' on the pointer returned.  */
    4599              : 
    4600              : static tree *
    4601      2130646 : get_block_vector (tree block, int *n_blocks_p)
    4602              : {
    4603      2130646 :   tree *block_vector;
    4604              : 
    4605      2130646 :   *n_blocks_p = all_blocks (block, NULL);
    4606      2130646 :   block_vector = XNEWVEC (tree, *n_blocks_p);
    4607      2130646 :   all_blocks (block, block_vector);
    4608              : 
    4609      2130646 :   return block_vector;
    4610              : }
    4611              : 
    4612              : static GTY(()) int next_block_index = 2;
    4613              : 
    4614              : /* Set BLOCK_NUMBER for all the blocks in FN.  */
    4615              : 
    4616              : void
    4617      2130646 : number_blocks (tree fn)
    4618              : {
    4619      2130646 :   int i;
    4620      2130646 :   int n_blocks;
    4621      2130646 :   tree *block_vector;
    4622              : 
    4623      2130646 :   block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
    4624              : 
    4625              :   /* The top-level BLOCK isn't numbered at all.  */
    4626     59872818 :   for (i = 1; i < n_blocks; ++i)
    4627              :     /* We number the blocks from two.  */
    4628     55611526 :     BLOCK_NUMBER (block_vector[i]) = next_block_index++;
    4629              : 
    4630      2130646 :   free (block_vector);
    4631              : 
    4632      2130646 :   return;
    4633              : }
    4634              : 
    4635              : /* If VAR is present in a subblock of BLOCK, return the subblock.  */
    4636              : 
    4637              : DEBUG_FUNCTION tree
    4638            0 : debug_find_var_in_block_tree (tree var, tree block)
    4639              : {
    4640            0 :   tree t;
    4641              : 
    4642            0 :   for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
    4643            0 :     if (t == var)
    4644              :       return block;
    4645              : 
    4646            0 :   for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
    4647              :     {
    4648            0 :       tree ret = debug_find_var_in_block_tree (var, t);
    4649            0 :       if (ret)
    4650              :         return ret;
    4651              :     }
    4652              : 
    4653              :   return NULL_TREE;
    4654              : }
    4655              : 
    4656              : /* Keep track of whether we're in a dummy function context.  If we are,
    4657              :    we don't want to invoke the set_current_function hook, because we'll
    4658              :    get into trouble if the hook calls target_reinit () recursively or
    4659              :    when the initial initialization is not yet complete.  */
    4660              : 
    4661              : static bool in_dummy_function;
    4662              : 
    4663              : /* Invoke the target hook when setting cfun.  Update the optimization options
    4664              :    if the function uses different options than the default.  */
    4665              : 
    4666              : static void
    4667    847871743 : invoke_set_current_function_hook (tree fndecl)
    4668              : {
    4669    847871743 :   if (!in_dummy_function)
    4670              :     {
    4671    847440303 :       tree opts = ((fndecl)
    4672    847440303 :                    ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
    4673    847440303 :                    : optimization_default_node);
    4674              : 
    4675    847440303 :       if (!opts)
    4676    394781596 :         opts = optimization_default_node;
    4677              : 
    4678              :       /* Change optimization options if needed.  */
    4679    847440303 :       if (optimization_current_node != opts)
    4680              :         {
    4681      3233650 :           optimization_current_node = opts;
    4682      3233650 :           cl_optimization_restore (&global_options, &global_options_set,
    4683      3233650 :                                    TREE_OPTIMIZATION (opts));
    4684              :         }
    4685              : 
    4686    847440303 :       targetm.set_current_function (fndecl);
    4687    847440303 :       this_fn_optabs = this_target_optabs;
    4688              : 
    4689              :       /* Initialize global alignment variables after op.  */
    4690    847440303 :       parse_alignment_opts ();
    4691              : 
    4692    847440303 :       if (opts != optimization_default_node)
    4693              :         {
    4694      1651806 :           init_tree_optimization_optabs (opts);
    4695      1651806 :           if (TREE_OPTIMIZATION_OPTABS (opts))
    4696       111796 :             this_fn_optabs = (struct target_optabs *)
    4697       111796 :               TREE_OPTIMIZATION_OPTABS (opts);
    4698              :         }
    4699              :     }
    4700    847871743 : }
    4701              : 
    4702              : /* Set cfun to NEW_CFUN and switch to the optimization and target options
    4703              :    associated with NEW_FNDECL.
    4704              : 
    4705              :    FORCE says whether we should do the switch even if NEW_CFUN is the current
    4706              :    function, e.g. because there has been a change in optimization or target
    4707              :    options.  */
    4708              : 
    4709              : static void
    4710   1888285301 : set_function_decl (function *new_cfun, tree new_fndecl, bool force)
    4711              : {
    4712   1888285301 :   if (cfun != new_cfun || force)
    4713              :     {
    4714    644785571 :       cfun = new_cfun;
    4715    644785571 :       invoke_set_current_function_hook (new_fndecl);
    4716    644785571 :       redirect_edge_var_map_empty ();
    4717              :     }
    4718   1888285301 : }
    4719              : 
    4720              : /* cfun should never be set directly; use this function.  */
    4721              : 
    4722              : void
    4723   1167106668 : set_cfun (struct function *new_cfun, bool force)
    4724              : {
    4725   1167106668 :   set_function_decl (new_cfun, new_cfun ? new_cfun->decl : NULL_TREE, force);
    4726   1167106668 : }
    4727              : 
    4728              : /* Initialized with NOGC, making this poisonous to the garbage collector.  */
    4729              : 
    4730              : static vec<function *> cfun_stack;
    4731              : 
    4732              : /* Push the current cfun onto the stack, then switch to function NEW_CFUN
    4733              :    and FUNCTION_DECL NEW_FNDECL.  FORCE is as for set_function_decl.  */
    4734              : 
    4735              : static void
    4736    721178633 : push_function_decl (function *new_cfun, tree new_fndecl, bool force)
    4737              : {
    4738    721178633 :   gcc_assert ((!cfun && !current_function_decl)
    4739              :               || (cfun && current_function_decl == cfun->decl));
    4740    721178633 :   cfun_stack.safe_push (cfun);
    4741    721178633 :   current_function_decl = new_fndecl;
    4742    721178633 :   set_function_decl (new_cfun, new_fndecl, force);
    4743    721178633 : }
    4744              : 
    4745              : /* Push the current cfun onto the stack and switch to function declaration
    4746              :    NEW_FNDECL, which might or might not have a function body.  FORCE is as for
    4747              :    set_function_decl.  */
    4748              : 
    4749              : void
    4750            0 : push_function_decl (tree new_fndecl, bool force)
    4751              : {
    4752            0 :   force |= current_function_decl != new_fndecl;
    4753            0 :   push_function_decl (DECL_STRUCT_FUNCTION (new_fndecl), new_fndecl, force);
    4754            0 : }
    4755              : 
    4756              : /* Push the current cfun onto the stack, and set cfun to new_cfun.  Also set
    4757              :    current_function_decl accordingly.  */
    4758              : 
    4759              : void
    4760    721178633 : push_cfun (struct function *new_cfun)
    4761              : {
    4762    721178633 :   push_function_decl (new_cfun, new_cfun ? new_cfun->decl : NULL_TREE, false);
    4763    721178633 : }
    4764              : 
    4765              : /* A common subroutine for pop_cfun and pop_function_decl.  FORCE is as
    4766              :    for set_function_decl.  */
    4767              : 
    4768              : static void
    4769    722031704 : pop_cfun_1 (bool force)
    4770              : {
    4771    722031704 :   struct function *new_cfun = cfun_stack.pop ();
    4772              :   /* When in_dummy_function, we do have a cfun but current_function_decl is
    4773              :      NULL.  We also allow pushing NULL cfun and subsequently changing
    4774              :      current_function_decl to something else and have both restored by
    4775              :      pop_cfun.  */
    4776    722031704 :   gcc_checking_assert (in_dummy_function
    4777              :                        || !cfun
    4778              :                        || current_function_decl == cfun->decl);
    4779    722031704 :   set_cfun (new_cfun, force);
    4780    722031704 :   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
    4781    722031704 : }
    4782              : 
    4783              : /* Pop cfun from the stack.  Also set current_function_decl accordingly.  */
    4784              : 
    4785              : void
    4786    722031704 : pop_cfun (void)
    4787              : {
    4788    722031704 :   pop_cfun_1 (false);
    4789    722031704 : }
    4790              : 
    4791              : /* Undo push_function_decl.  */
    4792              : 
    4793              : void
    4794            0 : pop_function_decl (void)
    4795              : {
    4796              :   /* If the previous cfun was null, the options should be reset to the
    4797              :      global set.  Checking the current cfun against the new (popped) cfun
    4798              :      wouldn't catch this if the current function decl has no function
    4799              :      struct.  */
    4800            0 :   pop_cfun_1 (!cfun_stack.last ());
    4801            0 : }
    4802              : 
    4803              : /* Return value of funcdef and increase it.  */
    4804              : int
    4805    202870437 : get_next_funcdef_no (void)
    4806              : {
    4807    202870437 :   return funcdef_no++;
    4808              : }
    4809              : 
    4810              : /* Return value of funcdef.  */
    4811              : int
    4812            0 : get_last_funcdef_no (void)
    4813              : {
    4814            0 :   return funcdef_no;
    4815              : }
    4816              : 
    4817              : /* Allocate and initialize the stack usage info data structure for the
    4818              :    current function.  */
    4819              : static void
    4820          690 : allocate_stack_usage_info (void)
    4821              : {
    4822          690 :   gcc_assert (!cfun->su);
    4823          690 :   cfun->su = ggc_cleared_alloc<stack_usage> ();
    4824          690 :   cfun->su->static_stack_size = -1;
    4825          690 : }
    4826              : 
    4827              : /* Allocate a function structure for FNDECL and set its contents
    4828              :    to the defaults.  Set cfun to the newly-allocated object.
    4829              :    Some of the helper functions invoked during initialization assume
    4830              :    that cfun has already been set.  Therefore, assign the new object
    4831              :    directly into cfun and invoke the back end hook explicitly at the
    4832              :    very end, rather than initializing a temporary and calling set_cfun
    4833              :    on it.
    4834              : 
    4835              :    ABSTRACT_P is true if this is a function that will never be seen by
    4836              :    the middle-end.  Such functions are front-end concepts (like C++
    4837              :    function templates) that do not correspond directly to functions
    4838              :    placed in object files.  */
    4839              : 
    4840              : void
    4841    203086172 : allocate_struct_function (tree fndecl, bool abstract_p)
    4842              : {
    4843    203086172 :   tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
    4844              : 
    4845    203086172 :   cfun = ggc_cleared_alloc<function> ();
    4846              : 
    4847    203086172 :   init_eh_for_function ();
    4848              : 
    4849    203086172 :   if (init_machine_status)
    4850    203086172 :     cfun->machine = (*init_machine_status) ();
    4851              : 
    4852              : #ifdef OVERRIDE_ABI_FORMAT
    4853    203086172 :   OVERRIDE_ABI_FORMAT (fndecl);
    4854              : #endif
    4855              : 
    4856    203086172 :   if (fndecl != NULL_TREE)
    4857              :     {
    4858    202870437 :       DECL_STRUCT_FUNCTION (fndecl) = cfun;
    4859    202870437 :       cfun->decl = fndecl;
    4860    202870437 :       current_function_funcdef_no = get_next_funcdef_no ();
    4861              :     }
    4862              : 
    4863    203086172 :   invoke_set_current_function_hook (fndecl);
    4864              : 
    4865    203086172 :   if (fndecl != NULL_TREE)
    4866              :     {
    4867    202870437 :       tree result = DECL_RESULT (fndecl);
    4868              : 
    4869    202870437 :       if (!abstract_p)
    4870              :         {
    4871              :           /* Now that we have activated any function-specific attributes
    4872              :              that might affect layout, particularly vector modes, relayout
    4873              :              each of the parameters and the result.  */
    4874    110603706 :           relayout_decl (result);
    4875    332953810 :           for (tree parm = DECL_ARGUMENTS (fndecl); parm;
    4876    222350104 :                parm = DECL_CHAIN (parm))
    4877    222350104 :             relayout_decl (parm);
    4878              : 
    4879              :           /* Similarly relayout the function decl.  */
    4880    110603706 :           targetm.target_option.relayout_function (fndecl);
    4881              :         }
    4882              : 
    4883    110603706 :       if (!abstract_p && aggregate_value_p (result, fndecl))
    4884              :         {
    4885              : #ifdef PCC_STATIC_STRUCT_RETURN
    4886              :           cfun->returns_pcc_struct = 1;
    4887              : #endif
    4888      4274418 :           cfun->returns_struct = 1;
    4889              :         }
    4890              : 
    4891    202870437 :       cfun->stdarg = stdarg_p (fntype);
    4892              : 
    4893              :       /* Assume all registers in stdarg functions need to be saved.  */
    4894    202870437 :       cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
    4895    202870437 :       cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
    4896              : 
    4897              :       /* ??? This could be set on a per-function basis by the front-end
    4898              :          but is this worth the hassle?  */
    4899    202870437 :       cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
    4900    202870437 :       cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
    4901              : 
    4902    202870437 :       if (!profile_flag && !flag_instrument_function_entry_exit)
    4903    202869965 :         DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
    4904              : 
    4905    202870437 :       if (flag_callgraph_info)
    4906            1 :         allocate_stack_usage_info ();
    4907              :     }
    4908              : 
    4909              :   /* Don't enable begin stmt markers if var-tracking at assignments is
    4910              :      disabled.  The markers make little sense without the variable
    4911              :      binding annotations among them.  */
    4912    406172344 :   cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
    4913    203086172 :     && MAY_HAVE_DEBUG_MARKER_STMTS;
    4914    203086172 : }
    4915              : 
    4916              : /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
    4917              :    instead of just setting it.  */
    4918              : 
    4919              : void
    4920       853096 : push_struct_function (tree fndecl, bool abstract_p)
    4921              : {
    4922              :   /* When in_dummy_function we might be in the middle of a pop_cfun and
    4923              :      current_function_decl and cfun may not match.  */
    4924       853096 :   gcc_assert (in_dummy_function
    4925              :               || (!cfun && !current_function_decl)
    4926              :               || (cfun && current_function_decl == cfun->decl));
    4927       853096 :   cfun_stack.safe_push (cfun);
    4928       853096 :   current_function_decl = fndecl;
    4929       853096 :   allocate_struct_function (fndecl, abstract_p);
    4930       853096 : }
    4931              : 
    4932              : /* Reset crtl and other non-struct-function variables to defaults as
    4933              :    appropriate for emitting rtl at the start of a function.  */
    4934              : 
    4935              : static void
    4936      1702963 : prepare_function_start (void)
    4937              : {
    4938      1702963 :   gcc_assert (!get_last_insn ());
    4939              : 
    4940      1702963 :   if (in_dummy_function)
    4941       215715 :     crtl->abi = &default_function_abi;
    4942              :   else
    4943      1487248 :     crtl->abi = &fndecl_abi (cfun->decl).base_abi ();
    4944              : 
    4945      1702963 :   init_temp_slots ();
    4946      1702963 :   init_emit ();
    4947      1702963 :   init_varasm_status ();
    4948      1702963 :   init_expr ();
    4949      1702963 :   default_rtl_profile ();
    4950              : 
    4951      1702963 :   if (flag_stack_usage_info && !flag_callgraph_info)
    4952          689 :     allocate_stack_usage_info ();
    4953              : 
    4954      1702963 :   cse_not_expected = ! optimize;
    4955              : 
    4956              :   /* Caller save not needed yet.  */
    4957      1702963 :   caller_save_needed = 0;
    4958              : 
    4959              :   /* We haven't done register allocation yet.  */
    4960      1702963 :   reg_renumber = 0;
    4961              : 
    4962              :   /* Indicate that we have not instantiated virtual registers yet.  */
    4963      1702963 :   virtuals_instantiated = 0;
    4964              : 
    4965              :   /* Indicate that we want CONCATs now.  */
    4966      1702963 :   generating_concat_p = 1;
    4967              : 
    4968              :   /* Indicate we have no need of a frame pointer yet.  */
    4969      1702963 :   frame_pointer_needed = 0;
    4970              : 
    4971              :   /* Reset the cache of the "extended" flag in the target's
    4972              :      _BitInt info struct.  */
    4973      1702963 :   bitint_extended = -1;
    4974      1702963 : }
    4975              : 
    4976              : void
    4977       215720 : push_dummy_function (bool with_decl)
    4978              : {
    4979       215720 :   tree fn_decl, fn_type, fn_result_decl;
    4980              : 
    4981       215720 :   gcc_assert (!in_dummy_function);
    4982       215720 :   in_dummy_function = true;
    4983              : 
    4984       215720 :   if (with_decl)
    4985              :     {
    4986            5 :       fn_type = build_function_type_list (void_type_node, NULL_TREE);
    4987            5 :       fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
    4988              :                             fn_type);
    4989            5 :       fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
    4990              :                                          NULL_TREE, void_type_node);
    4991            5 :       DECL_RESULT (fn_decl) = fn_result_decl;
    4992            5 :       DECL_ARTIFICIAL (fn_decl) = 1;
    4993            5 :       tree fn_name = get_identifier (" ");
    4994            5 :       SET_DECL_ASSEMBLER_NAME (fn_decl, fn_name);
    4995              :     }
    4996              :   else
    4997              :     fn_decl = NULL_TREE;
    4998              : 
    4999       215720 :   push_struct_function (fn_decl);
    5000       215720 : }
    5001              : 
    5002              : /* Initialize the rtl expansion mechanism so that we can do simple things
    5003              :    like generate sequences.  This is used to provide a context during global
    5004              :    initialization of some passes.  You must call expand_dummy_function_end
    5005              :    to exit this context.  */
    5006              : 
    5007              : void
    5008       215715 : init_dummy_function_start (void)
    5009              : {
    5010       215715 :   push_dummy_function (false);
    5011       215715 :   prepare_function_start ();
    5012       215715 : }
    5013              : 
    5014              : /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
    5015              :    and initialize static variables for generating RTL for the statements
    5016              :    of the function.  */
    5017              : 
    5018              : void
    5019      1487248 : init_function_start (tree subr)
    5020              : {
    5021              :   /* Initialize backend, if needed.  */
    5022      1487248 :   initialize_rtl ();
    5023              : 
    5024      1487248 :   prepare_function_start ();
    5025      1487248 :   decide_function_section (subr);
    5026              : 
    5027              :   /* Warn if this value is an aggregate type,
    5028              :      regardless of which calling convention we are using for it.  */
    5029      1487248 :   if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
    5030       107373 :     warning_at (DECL_SOURCE_LOCATION (DECL_RESULT (subr)),
    5031       107373 :                 OPT_Waggregate_return, "function returns an aggregate");
    5032      1487248 : }
    5033              : 
    5034              : /* Expand code to verify the stack_protect_guard.  This is invoked at
    5035              :    the end of a function to be protected.  */
    5036              : 
    5037              : void
    5038          283 : stack_protect_epilogue (void)
    5039              : {
    5040          283 :   tree guard_decl = crtl->stack_protect_guard_decl;
    5041          283 :   rtx_code_label *label = gen_label_rtx ();
    5042          283 :   rtx x, y;
    5043          283 :   rtx_insn *seq = NULL;
    5044              : 
    5045          283 :   x = expand_normal (crtl->stack_protect_guard);
    5046              : 
    5047          283 :   if (targetm.have_stack_protect_combined_test () && guard_decl)
    5048              :     {
    5049            0 :       gcc_assert (DECL_P (guard_decl));
    5050            0 :       y = DECL_RTL (guard_decl);
    5051              :       /* Allow the target to compute address of Y and compare it with X without
    5052              :          leaking Y into a register.  This combined address + compare pattern
    5053              :          allows the target to prevent spilling of any intermediate results by
    5054              :          splitting it after register allocator.  */
    5055            0 :       seq = targetm.gen_stack_protect_combined_test (x, y, label);
    5056              :     }
    5057              :   else
    5058              :     {
    5059          283 :       if (guard_decl)
    5060          283 :         y = expand_normal (guard_decl);
    5061              :       else
    5062            0 :         y = const0_rtx;
    5063              : 
    5064              :       /* Allow the target to compare Y with X without leaking either into
    5065              :          a register.  */
    5066          283 :       if (targetm.have_stack_protect_test ())
    5067          283 :         seq = targetm.gen_stack_protect_test (x, y, label);
    5068              :     }
    5069              : 
    5070          283 :   if (seq)
    5071          283 :     emit_insn (seq);
    5072              :   else
    5073            0 :     emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
    5074              : 
    5075              :   /* The noreturn predictor has been moved to the tree level.  The rtl-level
    5076              :      predictors estimate this branch about 20%, which isn't enough to get
    5077              :      things moved out of line.  Since this is the only extant case of adding
    5078              :      a noreturn function at the rtl level, it doesn't seem worth doing ought
    5079              :      except adding the prediction by hand.  */
    5080          283 :   rtx_insn *tmp = get_last_insn ();
    5081          283 :   if (JUMP_P (tmp))
    5082          283 :     predict_insn_def (tmp, PRED_NORETURN, TAKEN);
    5083              : 
    5084          283 :   expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
    5085          283 :   free_temp_slots ();
    5086          283 :   emit_label (label);
    5087          283 : }
    5088              : 
    5089              : /* Start the RTL for a new function, and set variables used for
    5090              :    emitting RTL.
    5091              :    SUBR is the FUNCTION_DECL node.
    5092              :    PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
    5093              :    the function's parameters, which must be run at any return statement.  */
    5094              : 
    5095              : bool currently_expanding_function_start;
    5096              : void
    5097      1481726 : expand_function_start (tree subr)
    5098              : {
    5099      1481726 :   currently_expanding_function_start = true;
    5100              : 
    5101              :   /* Make sure volatile mem refs aren't considered
    5102              :      valid operands of arithmetic insns.  */
    5103      1481726 :   init_recog_no_volatile ();
    5104              : 
    5105      1481726 :   crtl->profile
    5106      2963452 :     = (profile_flag
    5107      1482073 :        && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
    5108              : 
    5109      1481726 :   crtl->limit_stack
    5110      1481726 :     = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
    5111              : 
    5112              :   /* Make the label for return statements to jump to.  Do not special
    5113              :      case machines with special return instructions -- they will be
    5114              :      handled later during jump, ifcvt, or epilogue creation.  */
    5115      1481726 :   return_label = gen_label_rtx ();
    5116              : 
    5117              :   /* Initialize rtx used to return the value.  */
    5118              :   /* Do this before assign_parms so that we copy the struct value address
    5119              :      before any library calls that assign parms might generate.  */
    5120              : 
    5121              :   /* Decide whether to return the value in memory or in a register.  */
    5122      1481726 :   tree res = DECL_RESULT (subr);
    5123      1481726 :   if (aggregate_value_p (res, subr))
    5124              :     {
    5125              :       /* Returning something that won't go in a register.  */
    5126        69187 :       rtx value_address = 0;
    5127              : 
    5128              : #ifdef PCC_STATIC_STRUCT_RETURN
    5129              :       if (cfun->returns_pcc_struct)
    5130              :         {
    5131              :           int size = int_size_in_bytes (TREE_TYPE (res));
    5132              :           value_address = assemble_static_space (size);
    5133              :         }
    5134              :       else
    5135              : #endif
    5136        69187 :         {
    5137        69187 :           rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
    5138              :           /* Expect to be passed the address of a place to store the value.
    5139              :              If it is passed as an argument, assign_parms will take care of
    5140              :              it.  */
    5141        69187 :           if (sv)
    5142              :             {
    5143            0 :               value_address = gen_reg_rtx (Pmode);
    5144            0 :               emit_move_insn (value_address, sv);
    5145              :             }
    5146              :         }
    5147            0 :       if (value_address)
    5148              :         {
    5149            0 :           rtx x = value_address;
    5150            0 :           if (!DECL_BY_REFERENCE (res))
    5151              :             {
    5152            0 :               x = gen_rtx_MEM (DECL_MODE (res), x);
    5153            0 :               set_mem_attributes (x, res, 1);
    5154              :             }
    5155            0 :           set_parm_rtl (res, x);
    5156              :         }
    5157              :     }
    5158      1412539 :   else if (DECL_MODE (res) == VOIDmode)
    5159              :     /* If return mode is void, this decl rtl should not be used.  */
    5160       696510 :     set_parm_rtl (res, NULL_RTX);
    5161              :   else
    5162              :     {
    5163              :       /* Compute the return values into a pseudo reg, which we will copy
    5164              :          into the true return register after the cleanups are done.  */
    5165       716029 :       tree return_type = TREE_TYPE (res);
    5166              : 
    5167              :       /* If we may coalesce this result, make sure it has the expected mode
    5168              :          in case it was promoted.  But we need not bother about BLKmode.  */
    5169       716029 :       machine_mode promoted_mode
    5170       531428 :         = flag_tree_coalesce_vars && is_gimple_reg (res)
    5171      1210088 :           ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
    5172              :           : BLKmode;
    5173              : 
    5174       494059 :       if (promoted_mode != BLKmode)
    5175       494058 :         set_parm_rtl (res, gen_reg_rtx (promoted_mode));
    5176       221971 :       else if (TYPE_MODE (return_type) != BLKmode
    5177       221971 :                && targetm.calls.return_in_msb (return_type))
    5178              :         /* expand_function_end will insert the appropriate padding in
    5179              :            this case.  Use the return value's natural (unpadded) mode
    5180              :            within the function proper.  */
    5181            0 :         set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
    5182              :       else
    5183              :         {
    5184              :           /* In order to figure out what mode to use for the pseudo, we
    5185              :              figure out what the mode of the eventual return register will
    5186              :              actually be, and use that.  */
    5187       221971 :           rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
    5188              : 
    5189              :           /* Structures that are returned in registers are not
    5190              :              aggregate_value_p, so we may see a PARALLEL or a REG.  */
    5191       221971 :           if (REG_P (hard_reg))
    5192       218957 :             set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
    5193              :           else
    5194              :             {
    5195         3014 :               gcc_assert (GET_CODE (hard_reg) == PARALLEL);
    5196         3014 :               set_parm_rtl (res, gen_group_rtx (hard_reg));
    5197              :             }
    5198              :         }
    5199              : 
    5200              :       /* Set DECL_REGISTER flag so that expand_function_end will copy the
    5201              :          result to the real return register(s).  */
    5202       716029 :       DECL_REGISTER (res) = 1;
    5203              :     }
    5204              : 
    5205              :   /* Initialize rtx for parameters and local variables.
    5206              :      In some cases this requires emitting insns.  */
    5207      1481726 :   assign_parms (subr);
    5208              : 
    5209              :   /* If function gets a static chain arg, store it.  */
    5210      1481726 :   if (cfun->static_chain_decl)
    5211              :     {
    5212        19322 :       tree parm = cfun->static_chain_decl;
    5213        19322 :       rtx local, chain;
    5214        19322 :       rtx_insn *insn;
    5215        19322 :       int unsignedp;
    5216              : 
    5217        19322 :       local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
    5218        19322 :       chain = targetm.calls.static_chain (current_function_decl, true);
    5219              : 
    5220        19322 :       set_decl_incoming_rtl (parm, chain, false);
    5221        19322 :       set_parm_rtl (parm, local);
    5222        19322 :       mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
    5223              : 
    5224        19322 :       if (GET_MODE (local) != GET_MODE (chain))
    5225              :         {
    5226            1 :           convert_move (local, chain, unsignedp);
    5227            1 :           insn = get_last_insn ();
    5228              :         }
    5229              :       else
    5230        19321 :         insn = emit_move_insn (local, chain);
    5231              : 
    5232              :       /* Mark the register as eliminable, similar to parameters.  */
    5233        19322 :       if (MEM_P (chain)
    5234        19322 :           && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
    5235            0 :         set_dst_reg_note (insn, REG_EQUIV, chain, local);
    5236              : 
    5237              :       /* If we aren't optimizing, save the static chain onto the stack.  */
    5238        19322 :       if (!optimize)
    5239              :         {
    5240         3913 :           tree saved_static_chain_decl
    5241         3913 :             = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
    5242         3913 :                           DECL_NAME (parm), TREE_TYPE (parm));
    5243         3913 :           rtx saved_static_chain_rtx
    5244         7826 :             = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
    5245         3913 :           SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
    5246         3913 :           emit_move_insn (saved_static_chain_rtx, chain);
    5247         3913 :           SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
    5248         3913 :           DECL_HAS_VALUE_EXPR_P (parm) = 1;
    5249              :         }
    5250              :     }
    5251              : 
    5252              :   /* The following was moved from init_function_start.
    5253              :      The move was supposed to make sdb output more accurate.  */
    5254              :   /* Indicate the beginning of the function body,
    5255              :      as opposed to parm setup.  */
    5256      1481726 :   emit_note (NOTE_INSN_FUNCTION_BEG);
    5257              : 
    5258      1481726 :   gcc_assert (NOTE_P (get_last_insn ()));
    5259              : 
    5260      1481726 :   function_beg_insn = parm_birth_insn = get_last_insn ();
    5261              : 
    5262              :   /* If the function receives a non-local goto, then store the
    5263              :      bits we need to restore the frame pointer.  */
    5264      1481726 :   if (cfun->nonlocal_goto_save_area)
    5265              :     {
    5266          393 :       tree t_save;
    5267          393 :       rtx r_save;
    5268              : 
    5269          393 :       tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
    5270          393 :       gcc_assert (DECL_RTL_SET_P (var));
    5271              : 
    5272          393 :       t_save = build4 (ARRAY_REF,
    5273          393 :                        TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
    5274              :                        cfun->nonlocal_goto_save_area,
    5275              :                        integer_zero_node, NULL_TREE, NULL_TREE);
    5276          393 :       r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
    5277          393 :       gcc_assert (GET_MODE (r_save) == Pmode);
    5278              : 
    5279          393 :       emit_move_insn (r_save, hard_frame_pointer_rtx);
    5280          393 :       update_nonlocal_goto_save_area ();
    5281              :     }
    5282              : 
    5283      1481726 :   if (crtl->profile)
    5284              :     {
    5285              : #ifdef PROFILE_HOOK
    5286              :       PROFILE_HOOK (current_function_funcdef_no);
    5287              : #endif
    5288              :     }
    5289              : 
    5290              :   /* If we are doing generic stack checking, the probe should go here.  */
    5291      1481726 :   if (flag_stack_check == GENERIC_STACK_CHECK)
    5292           47 :     stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
    5293              : 
    5294      1481726 :   currently_expanding_function_start = false;
    5295      1481726 : }
    5296              : 
    5297              : void
    5298       215720 : pop_dummy_function (void)
    5299              : {
    5300       215720 :   pop_cfun ();
    5301       215720 :   in_dummy_function = false;
    5302       215720 : }
    5303              : 
    5304              : /* Undo the effects of init_dummy_function_start.  */
    5305              : void
    5306       215715 : expand_dummy_function_end (void)
    5307              : {
    5308       215715 :   gcc_assert (in_dummy_function);
    5309              : 
    5310              :   /* End any sequences that failed to be closed due to syntax errors.  */
    5311       215715 :   while (in_sequence_p ())
    5312            0 :     end_sequence ();
    5313              : 
    5314              :   /* Outside function body, can't compute type's actual size
    5315              :      until next function's body starts.  */
    5316              : 
    5317       215715 :   free_after_parsing (cfun);
    5318       215715 :   free_after_compilation (cfun);
    5319       215715 :   pop_dummy_function ();
    5320       215715 : }
    5321              : 
    5322              : /* Helper for diddle_return_value.  */
    5323              : 
    5324              : void
    5325     20182243 : diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
    5326              : {
    5327     20182243 :   if (! outgoing)
    5328              :     return;
    5329              : 
    5330     10496651 :   if (REG_P (outgoing))
    5331     10441632 :     (*doit) (outgoing, arg);
    5332        55019 :   else if (GET_CODE (outgoing) == PARALLEL)
    5333              :     {
    5334              :       int i;
    5335              : 
    5336       129403 :       for (i = 0; i < XVECLEN (outgoing, 0); i++)
    5337              :         {
    5338        76763 :           rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
    5339              : 
    5340        76763 :           if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
    5341        76763 :             (*doit) (x, arg);
    5342              :         }
    5343              :     }
    5344              : }
    5345              : 
    5346              : /* Call DOIT for each hard register used as a return value from
    5347              :    the current function.  */
    5348              : 
    5349              : void
    5350     20182243 : diddle_return_value (void (*doit) (rtx, void *), void *arg)
    5351              : {
    5352     20182243 :   diddle_return_value_1 (doit, arg, crtl->return_rtx);
    5353     20182243 : }
    5354              : 
    5355              : static void
    5356        12702 : do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
    5357              : {
    5358         5200 :   emit_clobber (reg);
    5359         7502 : }
    5360              : 
    5361              : void
    5362       631045 : clobber_return_register (void)
    5363              : {
    5364       631045 :   diddle_return_value (do_clobber_return_reg, NULL);
    5365              : 
    5366              :   /* In case we do use pseudo to return value, clobber it too.  */
    5367       631045 :   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
    5368              :     {
    5369         7534 :       tree decl_result = DECL_RESULT (current_function_decl);
    5370         7534 :       rtx decl_rtl = DECL_RTL (decl_result);
    5371         7534 :       if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
    5372              :         {
    5373         7502 :           do_clobber_return_reg (decl_rtl, NULL);
    5374              :         }
    5375              :     }
    5376       631045 : }
    5377              : 
    5378              : static void
    5379       782239 : do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
    5380              : {
    5381       782239 :   emit_use (reg);
    5382       782239 : }
    5383              : 
    5384              : static void
    5385      1481725 : use_return_register (void)
    5386              : {
    5387            0 :   diddle_return_value (do_use_return_reg, NULL);
    5388            0 : }
    5389              : 
    5390              : /* Generate RTL for the end of the current function.  */
    5391              : 
    5392              : void
    5393      1481725 : expand_function_end (void)
    5394              : {
    5395              :   /* If arg_pointer_save_area was referenced only from a nested
    5396              :      function, we will not have initialized it yet.  Do that now.  */
    5397      1481725 :   if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
    5398            0 :     get_arg_pointer_save_area ();
    5399              : 
    5400              :   /* If we are doing generic stack checking and this function makes calls,
    5401              :      do a stack probe at the start of the function to ensure we have enough
    5402              :      space for another stack frame.  */
    5403      1481725 :   if (flag_stack_check == GENERIC_STACK_CHECK)
    5404              :     {
    5405           47 :       rtx_insn *insn, *seq;
    5406              : 
    5407          626 :       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
    5408          611 :         if (CALL_P (insn))
    5409              :           {
    5410           32 :             rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
    5411           32 :             start_sequence ();
    5412           32 :             if (STACK_CHECK_MOVING_SP)
    5413           32 :               anti_adjust_stack_and_probe (max_frame_size, true);
    5414              :             else
    5415              :               probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
    5416           32 :             seq = end_sequence ();
    5417           32 :             set_insn_locations (seq, prologue_location);
    5418           32 :             emit_insn_before (seq, stack_check_probe_note);
    5419           32 :             break;
    5420              :           }
    5421              :     }
    5422              : 
    5423              :   /* End any sequences that failed to be closed due to syntax errors.  */
    5424      1481725 :   while (in_sequence_p ())
    5425            0 :     end_sequence ();
    5426              : 
    5427      1481725 :   clear_pending_stack_adjust ();
    5428      1481725 :   do_pending_stack_adjust ();
    5429              : 
    5430              :   /* Output a linenumber for the end of the function.
    5431              :      SDB depended on this.  */
    5432      1481725 :   set_curr_insn_location (input_location);
    5433              : 
    5434              :   /* Before the return label (if any), clobber the return
    5435              :      registers so that they are not propagated live to the rest of
    5436              :      the function.  This can only happen with functions that drop
    5437              :      through; if there had been a return statement, there would
    5438              :      have either been a return rtx, or a jump to the return label.
    5439              : 
    5440              :      We delay actual code generation after the current_function_value_rtx
    5441              :      is computed.  */
    5442      1481725 :   rtx_insn *clobber_after = get_last_insn ();
    5443              : 
    5444              :   /* Output the label for the actual return from the function.  */
    5445      1481725 :   emit_label (return_label);
    5446              : 
    5447      1481725 :   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
    5448              :     {
    5449              :       /* Let except.cc know where it should emit the call to unregister
    5450              :          the function context for sjlj exceptions.  */
    5451            0 :       if (flag_exceptions)
    5452            0 :         sjlj_emit_function_exit_after (get_last_insn ());
    5453              :     }
    5454              : 
    5455              :   /* If this is an implementation of throw, do what's necessary to
    5456              :      communicate between __builtin_eh_return and the epilogue.  */
    5457      1481725 :   expand_eh_return ();
    5458              : 
    5459              :   /* If stack protection is enabled for this function, check the guard.  */
    5460      1481725 :   if (crtl->stack_protect_guard
    5461          251 :       && targetm.stack_protect_runtime_enabled_p ()
    5462      1481975 :       && naked_return_label == NULL_RTX)
    5463          250 :     stack_protect_epilogue ();
    5464              : 
    5465              :   /* If scalar return value was computed in a pseudo-reg, or was a named
    5466              :      return value that got dumped to the stack, copy that to the hard
    5467              :      return register.  */
    5468      1481725 :   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
    5469              :     {
    5470       785215 :       tree decl_result = DECL_RESULT (current_function_decl);
    5471       785215 :       rtx decl_rtl = DECL_RTL (decl_result);
    5472              : 
    5473       785215 :       if ((REG_P (decl_rtl)
    5474       785215 :            ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
    5475        67194 :            : DECL_REGISTER (decl_result))
    5476              :           /* Unless the psABI says not to.  */
    5477       785215 :           && !TYPE_EMPTY_P (TREE_TYPE (decl_result)))
    5478              :         {
    5479       718190 :           rtx real_decl_rtl = crtl->return_rtx;
    5480       718190 :           complex_mode cmode;
    5481              : 
    5482              :           /* This should be set in assign_parms.  */
    5483       718190 :           gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
    5484              : 
    5485              :           /* If this is a BLKmode structure being returned in registers,
    5486              :              then use the mode computed in expand_return.  Note that if
    5487              :              decl_rtl is memory, then its mode may have been changed,
    5488              :              but that crtl->return_rtx has not.  */
    5489       718190 :           if (GET_MODE (real_decl_rtl) == BLKmode)
    5490         2683 :             PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
    5491              : 
    5492              :           /* If a non-BLKmode return value should be padded at the least
    5493              :              significant end of the register, shift it left by the appropriate
    5494              :              amount.  BLKmode results are handled using the group load/store
    5495              :              machinery.  */
    5496       718190 :           if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
    5497       715504 :               && REG_P (real_decl_rtl)
    5498      1431050 :               && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
    5499              :             {
    5500            0 :               emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
    5501              :                                            REGNO (real_decl_rtl)),
    5502              :                               decl_rtl);
    5503            0 :               shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
    5504              :             }
    5505       718190 :           else if (GET_CODE (real_decl_rtl) == PARALLEL)
    5506              :             {
    5507              :               /* If expand_function_start has created a PARALLEL for decl_rtl,
    5508              :                  move the result to the real return registers.  Otherwise, do
    5509              :                  a group load from decl_rtl for a named return.  */
    5510         4103 :               if (GET_CODE (decl_rtl) == PARALLEL)
    5511         3014 :                 emit_group_move (real_decl_rtl, decl_rtl);
    5512              :               else
    5513         1089 :                 emit_group_load (real_decl_rtl, decl_rtl,
    5514         1089 :                                  TREE_TYPE (decl_result),
    5515         1089 :                                  int_size_in_bytes (TREE_TYPE (decl_result)));
    5516              :             }
    5517              :           /* In the case of complex integer modes smaller than a word, we'll
    5518              :              need to generate some non-trivial bitfield insertions.  Do that
    5519              :              on a pseudo and not the hard register.  */
    5520       714087 :           else if (GET_CODE (decl_rtl) == CONCAT
    5521          687 :                    && is_complex_int_mode (GET_MODE (decl_rtl), &cmode)
    5522       714211 :                    && GET_MODE_BITSIZE (cmode) <= BITS_PER_WORD)
    5523              :             {
    5524           86 :               int old_generating_concat_p;
    5525           86 :               rtx tmp;
    5526              : 
    5527           86 :               old_generating_concat_p = generating_concat_p;
    5528           86 :               generating_concat_p = 0;
    5529           86 :               tmp = gen_reg_rtx (GET_MODE (decl_rtl));
    5530           86 :               generating_concat_p = old_generating_concat_p;
    5531              : 
    5532           86 :               emit_move_insn (tmp, decl_rtl);
    5533           86 :               emit_move_insn (real_decl_rtl, tmp);
    5534              :             }
    5535              :           /* If a named return value dumped decl_return to memory, then
    5536              :              we may need to re-do the PROMOTE_MODE signed/unsigned
    5537              :              extension.  */
    5538       714001 :           else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
    5539              :             {
    5540            0 :               int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
    5541            0 :               promote_function_mode (TREE_TYPE (decl_result),
    5542              :                                      GET_MODE (decl_rtl), &unsignedp,
    5543            0 :                                      TREE_TYPE (current_function_decl), 1);
    5544              : 
    5545            0 :               convert_move (real_decl_rtl, decl_rtl, unsignedp);
    5546              :             }
    5547              :           else
    5548       714001 :             emit_move_insn (real_decl_rtl, decl_rtl);
    5549              :         }
    5550              :     }
    5551              : 
    5552              :   /* If returning a structure, arrange to return the address of the value
    5553              :      in a place where debuggers expect to find it.
    5554              : 
    5555              :      If returning a structure PCC style,
    5556              :      the caller also depends on this value.
    5557              :      And cfun->returns_pcc_struct is not necessarily set.  */
    5558      1481725 :   if ((cfun->returns_struct || cfun->returns_pcc_struct)
    5559        68982 :       && !targetm.calls.omit_struct_return_reg)
    5560              :     {
    5561        68982 :       rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
    5562        68982 :       tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
    5563        68982 :       rtx outgoing;
    5564              : 
    5565        68982 :       if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
    5566         8663 :         type = TREE_TYPE (type);
    5567              :       else
    5568        60319 :         value_address = XEXP (value_address, 0);
    5569              : 
    5570        68982 :       outgoing = targetm.calls.function_value (build_pointer_type (type),
    5571              :                                                current_function_decl, true);
    5572              : 
    5573              :       /* Mark this as a function return value so integrate will delete the
    5574              :          assignment and USE below when inlining this function.  */
    5575        68982 :       REG_FUNCTION_VALUE_P (outgoing) = 1;
    5576              : 
    5577              :       /* The address may be ptr_mode and OUTGOING may be Pmode.  */
    5578        68982 :       scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing));
    5579        68982 :       value_address = convert_memory_address (mode, value_address);
    5580              : 
    5581        68982 :       emit_move_insn (outgoing, value_address);
    5582              : 
    5583              :       /* Show return register used to hold result (in this case the address
    5584              :          of the result.  */
    5585        68982 :       crtl->return_rtx = outgoing;
    5586              :     }
    5587              : 
    5588              :   /* Emit the actual code to clobber return register.  Don't emit
    5589              :      it if clobber_after is a barrier, then the previous basic block
    5590              :      certainly doesn't fall thru into the exit block.  */
    5591      1481725 :   if (!BARRIER_P (clobber_after))
    5592              :     {
    5593       562390 :       start_sequence ();
    5594       562390 :       clobber_return_register ();
    5595       562390 :       rtx_insn *seq = end_sequence ();
    5596              : 
    5597       562390 :       emit_insn_after (seq, clobber_after);
    5598              :     }
    5599              : 
    5600              :   /* Output the label for the naked return from the function.  */
    5601      1481725 :   if (naked_return_label)
    5602          379 :     emit_label (naked_return_label);
    5603              : 
    5604              :   /* @@@ This is a kludge.  We want to ensure that instructions that
    5605              :      may trap are not moved into the epilogue by scheduling, because
    5606              :      we don't always emit unwind information for the epilogue.  */
    5607      1481725 :   if (cfun->can_throw_non_call_exceptions
    5608      1481725 :       && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
    5609       262872 :     emit_insn (gen_blockage ());
    5610              : 
    5611              :   /* If stack protection is enabled for this function, check the guard.  */
    5612      1481725 :   if (crtl->stack_protect_guard
    5613          251 :       && targetm.stack_protect_runtime_enabled_p ()
    5614      1481975 :       && naked_return_label)
    5615            0 :     stack_protect_epilogue ();
    5616              : 
    5617              :   /* If we had calls to alloca, and this machine needs
    5618              :      an accurate stack pointer to exit the function,
    5619              :      insert some code to save and restore the stack pointer.  */
    5620      1481725 :   if (! EXIT_IGNORE_STACK
    5621              :       && cfun->calls_alloca)
    5622              :     {
    5623              :       rtx tem = 0;
    5624              : 
    5625              :       start_sequence ();
    5626              :       emit_stack_save (SAVE_FUNCTION, &tem);
    5627              :       rtx_insn *seq = end_sequence ();
    5628              :       emit_insn_before (seq, parm_birth_insn);
    5629              : 
    5630              :       emit_stack_restore (SAVE_FUNCTION, tem);
    5631              :     }
    5632              : 
    5633              :   /* ??? This should no longer be necessary since stupid is no longer with
    5634              :      us, but there are some parts of the compiler (eg reload_combine, and
    5635              :      sh mach_dep_reorg) that still try and compute their own lifetime info
    5636              :      instead of using the general framework.  */
    5637      1481725 :   use_return_register ();
    5638      1481725 : }
    5639              : 
    5640              : rtx
    5641            0 : get_arg_pointer_save_area (void)
    5642              : {
    5643            0 :   rtx ret = arg_pointer_save_area;
    5644              : 
    5645            0 :   if (! ret)
    5646              :     {
    5647            0 :       ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
    5648            0 :       arg_pointer_save_area = ret;
    5649              :     }
    5650              : 
    5651            0 :   if (! crtl->arg_pointer_save_area_init)
    5652              :     {
    5653              :       /* Save the arg pointer at the beginning of the function.  The
    5654              :          generated stack slot may not be a valid memory address, so we
    5655              :          have to check it and fix it if necessary.  */
    5656            0 :       start_sequence ();
    5657            0 :       emit_move_insn (validize_mem (copy_rtx (ret)),
    5658              :                       crtl->args.internal_arg_pointer);
    5659            0 :       rtx_insn *seq = end_sequence ();
    5660              : 
    5661            0 :       push_topmost_sequence ();
    5662            0 :       emit_insn_after (seq, entry_of_function ());
    5663            0 :       pop_topmost_sequence ();
    5664              : 
    5665            0 :       crtl->arg_pointer_save_area_init = true;
    5666              :     }
    5667              : 
    5668            0 :   return ret;
    5669              : }
    5670              : 
    5671              : 
    5672              : /* If debugging dumps are requested, dump information about how the
    5673              :    target handled -fstack-check=clash for the prologue.
    5674              : 
    5675              :    PROBES describes what if any probes were emitted.
    5676              : 
    5677              :    RESIDUALS indicates if the prologue had any residual allocation
    5678              :    (i.e. total allocation was not a multiple of PROBE_INTERVAL).  */
    5679              : 
    5680              : void
    5681          126 : dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
    5682              : {
    5683          126 :   if (!dump_file)
    5684              :     return;
    5685              : 
    5686           17 :   switch (probes)
    5687              :     {
    5688            1 :     case NO_PROBE_NO_FRAME:
    5689            1 :       fprintf (dump_file,
    5690              :                "Stack clash no probe no stack adjustment in prologue.\n");
    5691            1 :       break;
    5692           11 :     case NO_PROBE_SMALL_FRAME:
    5693           11 :       fprintf (dump_file,
    5694              :                "Stack clash no probe small stack adjustment in prologue.\n");
    5695           11 :       break;
    5696            3 :     case PROBE_INLINE:
    5697            3 :       fprintf (dump_file, "Stack clash inline probes in prologue.\n");
    5698            3 :       break;
    5699            2 :     case PROBE_LOOP:
    5700            2 :       fprintf (dump_file, "Stack clash probe loop in prologue.\n");
    5701            2 :       break;
    5702              :     }
    5703              : 
    5704           17 :   if (residuals)
    5705           16 :     fprintf (dump_file, "Stack clash residual allocation in prologue.\n");
    5706              :   else
    5707            1 :     fprintf (dump_file, "Stack clash no residual allocation in prologue.\n");
    5708              : 
    5709           17 :   if (frame_pointer_needed)
    5710            0 :     fprintf (dump_file, "Stack clash frame pointer needed.\n");
    5711              :   else
    5712           17 :     fprintf (dump_file, "Stack clash no frame pointer needed.\n");
    5713              : 
    5714           17 :   if (TREE_THIS_VOLATILE (cfun->decl))
    5715            1 :     fprintf (dump_file,
    5716              :              "Stack clash noreturn prologue, assuming no implicit"
    5717              :              " probes in caller.\n");
    5718              :   else
    5719           16 :     fprintf (dump_file,
    5720              :              "Stack clash not noreturn prologue.\n");
    5721              : }
    5722              : 
    5723              : /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
    5724              :    for the first time.  */
    5725              : 
    5726              : static void
    5727      3690858 : record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
    5728              : {
    5729      3690858 :   rtx_insn *tmp;
    5730      3690858 :   hash_table<insn_cache_hasher> *hash = *hashp;
    5731              : 
    5732      3690858 :   if (hash == NULL)
    5733      2961896 :     *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
    5734              : 
    5735     14300047 :   for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
    5736              :     {
    5737     10609189 :       rtx *slot = hash->find_slot (tmp, INSERT);
    5738     10609189 :       gcc_assert (*slot == NULL);
    5739     10609189 :       *slot = tmp;
    5740              :     }
    5741      3690858 : }
    5742              : 
    5743              : /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
    5744              :    basic block, splitting or peepholes.  If INSN is a prologue or epilogue
    5745              :    insn, then record COPY as well.  */
    5746              : 
    5747              : void
    5748      3711396 : maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
    5749              : {
    5750      3711396 :   hash_table<insn_cache_hasher> *hash;
    5751      3711396 :   rtx *slot;
    5752              : 
    5753      3711396 :   hash = epilogue_insn_hash;
    5754      3711396 :   if (!hash || !hash->find (insn))
    5755              :     {
    5756      3199568 :       hash = prologue_insn_hash;
    5757      3199568 :       if (!hash || !hash->find (insn))
    5758      3124215 :         return;
    5759              :     }
    5760              : 
    5761       587181 :   slot = hash->find_slot (copy, INSERT);
    5762       587181 :   gcc_assert (*slot == NULL);
    5763       587181 :   *slot = copy;
    5764              : }
    5765              : 
    5766              : /* Determine if any INSNs in HASH are, or are part of, INSN.  Because
    5767              :    we can be running after reorg, SEQUENCE rtl is possible.  */
    5768              : 
    5769              : static bool
    5770    284140845 : contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
    5771              : {
    5772    284140845 :   if (hash == NULL)
    5773              :     return false;
    5774              : 
    5775    284087255 :   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
    5776              :     {
    5777            0 :       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
    5778            0 :       int i;
    5779            0 :       for (i = seq->len () - 1; i >= 0; i--)
    5780            0 :         if (hash->find (seq->element (i)))
    5781              :           return true;
    5782              :       return false;
    5783              :     }
    5784              : 
    5785    284087255 :   return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
    5786              : }
    5787              : 
    5788              : bool
    5789    108186233 : prologue_contains (const rtx_insn *insn)
    5790              : {
    5791    108186233 :   return contains (insn, prologue_insn_hash);
    5792              : }
    5793              : 
    5794              : bool
    5795    108186233 : epilogue_contains (const rtx_insn *insn)
    5796              : {
    5797    108186233 :   return contains (insn, epilogue_insn_hash);
    5798              : }
    5799              : 
    5800              : bool
    5801         4479 : prologue_epilogue_contains (const rtx_insn *insn)
    5802              : {
    5803         4479 :   if (contains (insn, prologue_insn_hash))
    5804              :     return true;
    5805         4289 :   if (contains (insn, epilogue_insn_hash))
    5806              :     return true;
    5807              :   return false;
    5808              : }
    5809              : 
    5810              : void
    5811       170402 : record_prologue_seq (rtx_insn *seq)
    5812              : {
    5813       170402 :   record_insns (seq, NULL, &prologue_insn_hash);
    5814       170402 : }
    5815              : 
    5816              : void
    5817       151231 : record_epilogue_seq (rtx_insn *seq)
    5818              : {
    5819       151231 :   record_insns (seq, NULL, &epilogue_insn_hash);
    5820       151231 : }
    5821              : 
    5822              : /* Set JUMP_LABEL for a return insn.  */
    5823              : 
    5824              : void
    5825      1526549 : set_return_jump_label (rtx_insn *returnjump)
    5826              : {
    5827      1526549 :   rtx pat = PATTERN (returnjump);
    5828      1526549 :   if (GET_CODE (pat) == PARALLEL)
    5829        27140 :     pat = XVECEXP (pat, 0, 0);
    5830      1526549 :   if (ANY_RETURN_P (pat))
    5831      1526549 :     JUMP_LABEL (returnjump) = pat;
    5832              :   else
    5833            0 :     JUMP_LABEL (returnjump) = ret_rtx;
    5834      1526549 : }
    5835              : 
    5836              : /* Return a sequence to be used as the split prologue for the current
    5837              :    function, or NULL.  */
    5838              : 
    5839              : static rtx_insn *
    5840      1525891 : make_split_prologue_seq (void)
    5841              : {
    5842      1525891 :   if (!flag_split_stack
    5843      1525891 :       || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
    5844      1265960 :     return NULL;
    5845              : 
    5846       259931 :   start_sequence ();
    5847       259931 :   emit_insn (targetm.gen_split_stack_prologue ());
    5848       259931 :   rtx_insn *seq = end_sequence ();
    5849              : 
    5850       259931 :   record_insns (seq, NULL, &prologue_insn_hash);
    5851       259931 :   set_insn_locations (seq, prologue_location);
    5852              : 
    5853       259931 :   return seq;
    5854              : }
    5855              : 
    5856              : /* Return a sequence to be used as the prologue for the current function,
    5857              :    or NULL.  */
    5858              : 
    5859              : static rtx_insn *
    5860      1525891 : make_prologue_seq (void)
    5861              : {
    5862      1525891 :   if (!targetm.have_prologue ())
    5863              :     return NULL;
    5864              : 
    5865      1525891 :   start_sequence ();
    5866      1525891 :   rtx_insn *seq = targetm.gen_prologue ();
    5867      1525891 :   emit_insn (seq);
    5868              : 
    5869              :   /* Insert an explicit USE for the frame pointer
    5870              :      if the profiling is on and the frame pointer is required.  */
    5871      1525891 :   if (crtl->profile && frame_pointer_needed)
    5872          306 :     emit_use (hard_frame_pointer_rtx);
    5873              : 
    5874              :   /* Retain a map of the prologue insns.  */
    5875      1525891 :   record_insns (seq, NULL, &prologue_insn_hash);
    5876      1525891 :   emit_note (NOTE_INSN_PROLOGUE_END);
    5877              : 
    5878              :   /* Ensure that instructions are not moved into the prologue when
    5879              :      profiling is on.  The call to the profiling routine can be
    5880              :      emitted within the live range of a call-clobbered register.  */
    5881      1525891 :   if (!targetm.profile_before_prologue () && crtl->profile)
    5882           12 :     emit_insn (gen_blockage ());
    5883              : 
    5884      1525891 :   seq = end_sequence ();
    5885      1525891 :   set_insn_locations (seq, prologue_location);
    5886              : 
    5887      1525891 :   return seq;
    5888              : }
    5889              : 
    5890              : /* Emit a sequence of insns to zero the call-used registers before RET
    5891              :    according to ZERO_REGS_TYPE.  */
    5892              : 
    5893              : static void
    5894          176 : gen_call_used_regs_seq (rtx_insn *ret, unsigned int zero_regs_type)
    5895              : {
    5896          176 :   bool only_gpr = true;
    5897          176 :   bool only_used = true;
    5898          176 :   bool only_arg = true;
    5899              : 
    5900              :   /* No need to zero call-used-regs in main ().  */
    5901          176 :   if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
    5902           46 :     return;
    5903              : 
    5904              :   /* No need to zero call-used-regs if __builtin_eh_return is called
    5905              :      since it isn't a normal function return.  */
    5906          134 :   if (crtl->calls_eh_return)
    5907              :     return;
    5908              : 
    5909              :   /* If only_gpr is true, only zero call-used registers that are
    5910              :      general-purpose registers; if only_used is true, only zero
    5911              :      call-used registers that are used in the current function;
    5912              :      if only_arg is true, only zero call-used registers that pass
    5913              :      parameters defined by the flatform's calling conversion.  */
    5914              : 
    5915          134 :   using namespace zero_regs_flags;
    5916              : 
    5917          134 :   only_gpr = zero_regs_type & ONLY_GPR;
    5918          134 :   only_used = zero_regs_type & ONLY_USED;
    5919          134 :   only_arg = zero_regs_type & ONLY_ARG;
    5920              : 
    5921          134 :   if ((zero_regs_type & LEAFY_MODE) && leaf_function_p ())
    5922              :     only_used = true;
    5923              : 
    5924              :   /* For each of the hard registers, we should zero it if:
    5925              :             1. it is a call-used register;
    5926              :         and 2. it is not a fixed register;
    5927              :         and 3. it is not live at the return of the routine;
    5928              :         and 4. it is general registor if only_gpr is true;
    5929              :         and 5. it is used in the routine if only_used is true;
    5930              :         and 6. it is a register that passes parameter if only_arg is true.  */
    5931              : 
    5932              :   /* First, prepare the data flow information.  */
    5933          134 :   basic_block bb = BLOCK_FOR_INSN (ret);
    5934          134 :   auto_bitmap live_out;
    5935          134 :   bitmap_copy (live_out, df_get_live_out (bb));
    5936          134 :   df_simulate_initialize_backwards (bb, live_out);
    5937          134 :   df_simulate_one_insn_backwards (bb, ret, live_out);
    5938              : 
    5939          134 :   HARD_REG_SET selected_hardregs;
    5940          134 :   HARD_REG_SET all_call_used_regs;
    5941          536 :   CLEAR_HARD_REG_SET (selected_hardregs);
    5942        12462 :   CLEAR_HARD_REG_SET (all_call_used_regs);
    5943        12462 :   for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
    5944              :     {
    5945        12328 :       if (!crtl->abi->clobbers_full_reg_p (regno))
    5946         1206 :         continue;
    5947        11122 :       if (fixed_regs[regno])
    5948         5612 :         continue;
    5949         5510 :       if (REGNO_REG_SET_P (live_out, regno))
    5950          124 :         continue;
    5951              : #ifdef LEAF_REG_REMAP
    5952              :       if (crtl->uses_only_leaf_regs && LEAF_REG_REMAP (regno) < 0)
    5953              :         continue;
    5954              : #endif
    5955              :       /* This is a call used register that is dead at return.  */
    5956         5386 :       SET_HARD_REG_BIT (all_call_used_regs, regno);
    5957              : 
    5958         7306 :       if (only_gpr
    5959         5386 :           && !TEST_HARD_REG_BIT (reg_class_contents[GENERAL_REGS], regno))
    5960         1920 :         continue;
    5961         3466 :       if (only_used && !df_regs_ever_live_p (regno))
    5962         1806 :         continue;
    5963         1660 :       if (only_arg && !FUNCTION_ARG_REGNO_P (regno))
    5964          364 :         continue;
    5965              : 
    5966              :       /* Now this is a register that we might want to zero.  */
    5967         1296 :       SET_HARD_REG_BIT (selected_hardregs, regno);
    5968              :     }
    5969              : 
    5970          134 :   if (hard_reg_set_empty_p (selected_hardregs))
    5971            4 :     return;
    5972              : 
    5973              :   /* Now that we have a hard register set that needs to be zeroed, pass it to
    5974              :      target to generate zeroing sequence.  */
    5975          130 :   HARD_REG_SET zeroed_hardregs;
    5976          130 :   start_sequence ();
    5977          130 :   zeroed_hardregs = targetm.calls.zero_call_used_regs (selected_hardregs);
    5978              : 
    5979              :   /* For most targets, the returned set of registers is a subset of
    5980              :      selected_hardregs, however, for some of the targets (for example MIPS),
    5981              :      clearing some registers that are in selected_hardregs requires clearing
    5982              :      other call used registers that are not in the selected_hardregs, under
    5983              :      such situation, the returned set of registers must be a subset of
    5984              :      all call used registers.  */
    5985          260 :   gcc_assert (hard_reg_set_subset_p (zeroed_hardregs, all_call_used_regs));
    5986              : 
    5987          130 :   rtx_insn *seq = end_sequence ();
    5988          130 :   if (seq)
    5989              :     {
    5990              :       /* Emit the memory blockage and register clobber asm volatile before
    5991              :          the whole sequence.  */
    5992          130 :       start_sequence ();
    5993          130 :       expand_asm_reg_clobber_mem_blockage (zeroed_hardregs);
    5994          130 :       rtx_insn *seq_barrier = end_sequence ();
    5995              : 
    5996          130 :       emit_insn_before (seq_barrier, ret);
    5997          130 :       emit_insn_before (seq, ret);
    5998              : 
    5999              :       /* Update the data flow information.  */
    6000          130 :       crtl->must_be_zero_on_return |= zeroed_hardregs;
    6001          130 :       df_update_exit_block_uses ();
    6002              :     }
    6003          134 : }
    6004              : 
    6005              : 
    6006              : /* Return a sequence to be used as the epilogue for the current function,
    6007              :    or NULL.  */
    6008              : 
    6009              : static rtx_insn *
    6010      1525891 : make_epilogue_seq (void)
    6011              : {
    6012      1525891 :   if (!targetm.have_epilogue ())
    6013              :     return NULL;
    6014              : 
    6015      1525891 :   start_sequence ();
    6016      1525891 :   emit_note (NOTE_INSN_EPILOGUE_BEG);
    6017      1525891 :   rtx_insn *seq = targetm.gen_epilogue ();
    6018      1525891 :   if (seq)
    6019      1525891 :     emit_jump_insn (seq);
    6020              : 
    6021              :   /* Retain a map of the epilogue insns.  */
    6022      1525891 :   record_insns (seq, NULL, &epilogue_insn_hash);
    6023      1525891 :   set_insn_locations (seq, epilogue_location);
    6024              : 
    6025      1525891 :   seq = get_insns ();
    6026      1525891 :   rtx_insn *returnjump = get_last_insn ();
    6027      1525891 :   end_sequence ();
    6028              : 
    6029      1525891 :   if (JUMP_P (returnjump))
    6030      1525817 :     set_return_jump_label (returnjump);
    6031              : 
    6032              :   return seq;
    6033              : }
    6034              : 
    6035              : 
    6036              : /* Generate the prologue and epilogue RTL if the machine supports it.  Thread
    6037              :    this into place with notes indicating where the prologue ends and where
    6038              :    the epilogue begins.  Update the basic block information when possible.
    6039              : 
    6040              :    Notes on epilogue placement:
    6041              :    There are several kinds of edges to the exit block:
    6042              :    * a single fallthru edge from LAST_BB
    6043              :    * possibly, edges from blocks containing sibcalls
    6044              :    * possibly, fake edges from infinite loops
    6045              : 
    6046              :    The epilogue is always emitted on the fallthru edge from the last basic
    6047              :    block in the function, LAST_BB, into the exit block.
    6048              : 
    6049              :    If LAST_BB is empty except for a label, it is the target of every
    6050              :    other basic block in the function that ends in a return.  If a
    6051              :    target has a return or simple_return pattern (possibly with
    6052              :    conditional variants), these basic blocks can be changed so that a
    6053              :    return insn is emitted into them, and their target is adjusted to
    6054              :    the real exit block.
    6055              : 
    6056              :    Notes on shrink wrapping: We implement a fairly conservative
    6057              :    version of shrink-wrapping rather than the textbook one.  We only
    6058              :    generate a single prologue and a single epilogue.  This is
    6059              :    sufficient to catch a number of interesting cases involving early
    6060              :    exits.
    6061              : 
    6062              :    First, we identify the blocks that require the prologue to occur before
    6063              :    them.  These are the ones that modify a call-saved register, or reference
    6064              :    any of the stack or frame pointer registers.  To simplify things, we then
    6065              :    mark everything reachable from these blocks as also requiring a prologue.
    6066              :    This takes care of loops automatically, and avoids the need to examine
    6067              :    whether MEMs reference the frame, since it is sufficient to check for
    6068              :    occurrences of the stack or frame pointer.
    6069              : 
    6070              :    We then compute the set of blocks for which the need for a prologue
    6071              :    is anticipatable (borrowing terminology from the shrink-wrapping
    6072              :    description in Muchnick's book).  These are the blocks which either
    6073              :    require a prologue themselves, or those that have only successors
    6074              :    where the prologue is anticipatable.  The prologue needs to be
    6075              :    inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
    6076              :    is not.  For the moment, we ensure that only one such edge exists.
    6077              : 
    6078              :    The epilogue is placed as described above, but we make a
    6079              :    distinction between inserting return and simple_return patterns
    6080              :    when modifying other blocks that end in a return.  Blocks that end
    6081              :    in a sibcall omit the sibcall_epilogue if the block is not in
    6082              :    ANTIC.  */
    6083              : 
    6084              : void
    6085      1480948 : thread_prologue_and_epilogue_insns (void)
    6086              : {
    6087      1480948 :   df_analyze ();
    6088              : 
    6089              :   /* Can't deal with multiple successors of the entry block at the
    6090              :      moment.  Function should always have at least one entry
    6091              :      point.  */
    6092      1480948 :   gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
    6093              : 
    6094      1480948 :   edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
    6095      1480948 :   edge orig_entry_edge = entry_edge;
    6096              : 
    6097      1480948 :   rtx_insn *split_prologue_seq = make_split_prologue_seq ();
    6098      1480948 :   rtx_insn *prologue_seq = make_prologue_seq ();
    6099      1480948 :   rtx_insn *epilogue_seq = make_epilogue_seq ();
    6100              : 
    6101              :   /* Try to perform a kind of shrink-wrapping, making sure the
    6102              :      prologue/epilogue is emitted only around those parts of the
    6103              :      function that require it.  */
    6104      1480948 :   try_shrink_wrapping (&entry_edge, prologue_seq);
    6105              : 
    6106              :   /* If the target can handle splitting the prologue/epilogue into separate
    6107              :      components, try to shrink-wrap these components separately.  */
    6108      1480948 :   try_shrink_wrapping_separate (entry_edge->dest);
    6109              : 
    6110              :   /* If that did anything for any component we now need the generate the
    6111              :      "main" prologue again.  Because some targets require some of these
    6112              :      to be called in a specific order (i386 requires the split prologue
    6113              :      to be first, for example), we create all three sequences again here.
    6114              :      If this does not work for some target, that target should not enable
    6115              :      separate shrink-wrapping.  */
    6116      1480948 :   if (crtl->shrink_wrapped_separate)
    6117              :     {
    6118        44943 :       split_prologue_seq = make_split_prologue_seq ();
    6119        44943 :       prologue_seq = make_prologue_seq ();
    6120        44943 :       epilogue_seq = make_epilogue_seq ();
    6121              :     }
    6122              : 
    6123      1480948 :   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
    6124              : 
    6125              :   /* A small fib -- epilogue is not yet completed, but we wish to re-use
    6126              :      this marker for the splits of EH_RETURN patterns, and nothing else
    6127              :      uses the flag in the meantime.  */
    6128      1480948 :   epilogue_completed = 1;
    6129              : 
    6130              :   /* Find non-fallthru edges that end with EH_RETURN instructions.  On
    6131              :      some targets, these get split to a special version of the epilogue
    6132              :      code.  In order to be able to properly annotate these with unwind
    6133              :      info, try to split them now.  If we get a valid split, drop an
    6134              :      EPILOGUE_BEG note and mark the insns as epilogue insns.  */
    6135      1480948 :   edge e;
    6136      1480948 :   edge_iterator ei;
    6137      3016565 :   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
    6138              :     {
    6139      1535617 :       rtx_insn *prev, *last, *trial;
    6140              : 
    6141      1535617 :       if (e->flags & EDGE_FALLTHRU)
    6142      1353004 :         continue;
    6143       182613 :       last = BB_END (e->src);
    6144       182613 :       if (!eh_returnjump_p (last))
    6145       182584 :         continue;
    6146              : 
    6147           29 :       prev = PREV_INSN (last);
    6148           29 :       trial = try_split (PATTERN (last), last, 1);
    6149           29 :       if (trial == last)
    6150            0 :         continue;
    6151              : 
    6152           29 :       record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
    6153           29 :       emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
    6154              :     }
    6155              : 
    6156      1480948 :   edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
    6157              : 
    6158      1480948 :   if (exit_fallthru_edge)
    6159              :     {
    6160      1353004 :       if (epilogue_seq)
    6161              :         {
    6162      1353004 :           insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
    6163      1353004 :           commit_edge_insertions ();
    6164              : 
    6165              :           /* The epilogue insns we inserted may cause the exit edge to no longer
    6166              :              be fallthru.  */
    6167      2787180 :           FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
    6168              :             {
    6169      1434176 :               if (((e->flags & EDGE_FALLTHRU) != 0)
    6170      1434176 :                   && returnjump_p (BB_END (e->src)))
    6171            0 :                 e->flags &= ~EDGE_FALLTHRU;
    6172              :             }
    6173              : 
    6174      1353004 :           find_sub_basic_blocks (BLOCK_FOR_INSN (epilogue_seq));
    6175              :         }
    6176            0 :       else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
    6177              :         {
    6178              :           /* We have a fall-through edge to the exit block, the source is not
    6179              :              at the end of the function, and there will be an assembler epilogue
    6180              :              at the end of the function.
    6181              :              We can't use force_nonfallthru here, because that would try to
    6182              :              use return.  Inserting a jump 'by hand' is extremely messy, so
    6183              :              we take advantage of cfg_layout_finalize using
    6184              :              fixup_fallthru_exit_predecessor.  */
    6185            0 :           cfg_layout_initialize (0);
    6186            0 :           basic_block cur_bb;
    6187            0 :           FOR_EACH_BB_FN (cur_bb, cfun)
    6188            0 :             if (cur_bb->index >= NUM_FIXED_BLOCKS
    6189            0 :                 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
    6190            0 :               cur_bb->aux = cur_bb->next_bb;
    6191            0 :           cfg_layout_finalize ();
    6192              :         }
    6193              :     }
    6194              : 
    6195              :   /* Insert the prologue.  */
    6196              : 
    6197      1480948 :   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
    6198              : 
    6199      1480948 :   if (split_prologue_seq || prologue_seq)
    6200              :     {
    6201      1480948 :       rtx_insn *split_prologue_insn = split_prologue_seq;
    6202      1480948 :       if (split_prologue_seq)
    6203              :         {
    6204       259927 :           while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
    6205            0 :             split_prologue_insn = NEXT_INSN (split_prologue_insn);
    6206       259927 :           insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
    6207              :         }
    6208              : 
    6209      1480948 :       rtx_insn *prologue_insn = prologue_seq;
    6210      1480948 :       if (prologue_seq)
    6211              :         {
    6212      1869171 :           while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
    6213       388223 :             prologue_insn = NEXT_INSN (prologue_insn);
    6214      1480948 :           insert_insn_on_edge (prologue_seq, entry_edge);
    6215              :         }
    6216              : 
    6217      1480948 :       commit_edge_insertions ();
    6218              : 
    6219              :       /* Look for basic blocks within the prologue insns.  */
    6220      1480948 :       if (split_prologue_insn
    6221      1480948 :           && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
    6222              :         split_prologue_insn = NULL;
    6223      1480948 :       if (prologue_insn
    6224      1480948 :           && BLOCK_FOR_INSN (prologue_insn) == NULL)
    6225              :         prologue_insn = NULL;
    6226      1480948 :       if (split_prologue_insn || prologue_insn)
    6227              :         {
    6228      1116368 :           auto_sbitmap blocks (last_basic_block_for_fn (cfun));
    6229      1116368 :           bitmap_clear (blocks);
    6230      1116368 :           if (split_prologue_insn)
    6231       259927 :             bitmap_set_bit (blocks,
    6232       259927 :                             BLOCK_FOR_INSN (split_prologue_insn)->index);
    6233      1116368 :           if (prologue_insn)
    6234      1092725 :             bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
    6235      1116368 :           find_many_sub_basic_blocks (blocks);
    6236      1116368 :         }
    6237              :     }
    6238              : 
    6239      1480948 :   default_rtl_profile ();
    6240              : 
    6241              :   /* Emit sibling epilogues before any sibling call sites.  */
    6242      1480948 :   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
    6243      3016565 :        (e = ei_safe_edge (ei));
    6244      1535617 :        ei_next (&ei))
    6245              :     {
    6246              :       /* Skip those already handled, the ones that run without prologue.  */
    6247      1535617 :       if (e->flags & EDGE_IGNORE)
    6248              :         {
    6249         5072 :           e->flags &= ~EDGE_IGNORE;
    6250         5072 :           continue;
    6251              :         }
    6252              : 
    6253      1530545 :       rtx_insn *insn = BB_END (e->src);
    6254              : 
    6255      1530545 :       if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
    6256      1406790 :         continue;
    6257              : 
    6258       123755 :       rtx_insn *ep_seq;
    6259       123755 :       if (targetm.emit_epilogue_for_sibcall)
    6260              :         {
    6261            0 :           start_sequence ();
    6262            0 :           targetm.emit_epilogue_for_sibcall (as_a<rtx_call_insn *> (insn));
    6263            0 :           ep_seq = end_sequence ();
    6264              :         }
    6265              :       else
    6266       123755 :         ep_seq = targetm.gen_sibcall_epilogue ();
    6267       123755 :       if (ep_seq)
    6268              :         {
    6269        57483 :           start_sequence ();
    6270        57483 :           emit_note (NOTE_INSN_EPILOGUE_BEG);
    6271        57483 :           emit_insn (ep_seq);
    6272        57483 :           rtx_insn *seq = end_sequence ();
    6273              : 
    6274              :           /* Retain a map of the epilogue insns.  Used in life analysis to
    6275              :              avoid getting rid of sibcall epilogue insns.  Do this before we
    6276              :              actually emit the sequence.  */
    6277        57483 :           record_insns (seq, NULL, &epilogue_insn_hash);
    6278        57483 :           set_insn_locations (seq, epilogue_location);
    6279              : 
    6280        57483 :           emit_insn_before (seq, insn);
    6281              : 
    6282        57483 :           find_sub_basic_blocks (BLOCK_FOR_INSN (insn));
    6283              :         }
    6284              :     }
    6285              : 
    6286      1480948 :   if (epilogue_seq)
    6287              :     {
    6288              :       rtx_insn *insn, *next;
    6289              : 
    6290              :       /* Similarly, move any line notes that appear after the epilogue.
    6291              :          There is no need, however, to be quite so anal about the existence
    6292              :          of such a note.  Also possibly move
    6293              :          NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
    6294              :          info generation.  */
    6295     11558104 :       for (insn = epilogue_seq; insn; insn = next)
    6296              :         {
    6297     10077156 :           next = NEXT_INSN (insn);
    6298     10077156 :           if (NOTE_P (insn)
    6299      3098273 :               && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
    6300            0 :             reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
    6301              :         }
    6302              :     }
    6303              : 
    6304              :   /* Threading the prologue and epilogue changes the artificial refs in the
    6305              :      entry and exit blocks, and may invalidate DF info for tail calls.
    6306              :      This is also needed for [[musttail]] conversion even when not
    6307              :      optimizing.  */
    6308      1480948 :   if (optimize
    6309       440720 :       || cfun->tail_call_marked
    6310       440559 :       || flag_optimize_sibling_calls
    6311       440518 :       || flag_ipa_icf_functions
    6312       440461 :       || in_lto_p)
    6313      1048927 :     df_update_entry_exit_and_calls ();
    6314              :   else
    6315              :     {
    6316       432021 :       df_update_entry_block_defs ();
    6317       432021 :       df_update_exit_block_uses ();
    6318              :     }
    6319      1480948 : }
    6320              : 
    6321              : /* Reposition the prologue-end and epilogue-begin notes after
    6322              :    instruction scheduling.  */
    6323              : 
    6324              : void
    6325       960355 : reposition_prologue_and_epilogue_notes (void)
    6326              : {
    6327       960355 :   if (!targetm.have_prologue ()
    6328            0 :       && !targetm.have_epilogue ()
    6329            0 :       && !targetm.have_sibcall_epilogue ()
    6330       960355 :       && !targetm.emit_epilogue_for_sibcall)
    6331              :     return;
    6332              : 
    6333              :   /* Since the hash table is created on demand, the fact that it is
    6334              :      non-null is a signal that it is non-empty.  */
    6335       960355 :   if (prologue_insn_hash != NULL)
    6336              :     {
    6337       960355 :       size_t len = prologue_insn_hash->elements ();
    6338       960355 :       rtx_insn *insn, *last = NULL, *note = NULL;
    6339              : 
    6340              :       /* Scan from the beginning until we reach the last prologue insn.  */
    6341              :       /* ??? While we do have the CFG intact, there are two problems:
    6342              :          (1) The prologue can contain loops (typically probing the stack),
    6343              :              which means that the end of the prologue isn't in the first bb.
    6344              :          (2) Sometimes the PROLOGUE_END note gets pushed into the next bb.  */
    6345     68534737 :       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
    6346              :         {
    6347     68001424 :           if (NOTE_P (insn))
    6348              :             {
    6349     12827358 :               if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
    6350     67574382 :                 note = insn;
    6351              :             }
    6352     55174066 :           else if (contains (insn, prologue_insn_hash))
    6353              :             {
    6354      3448383 :               last = insn;
    6355      3448383 :               if (--len == 0)
    6356              :                 break;
    6357              :             }
    6358              :         }
    6359              : 
    6360       960355 :       if (last)
    6361              :         {
    6362       624478 :           if (note == NULL)
    6363              :             {
    6364              :               /* Scan forward looking for the PROLOGUE_END note.  It should
    6365              :                  be right at the beginning of the block, possibly with other
    6366              :                  insn notes that got moved there.  */
    6367         1394 :               for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
    6368              :                 {
    6369         1394 :                   if (NOTE_P (note)
    6370         1280 :                       && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
    6371              :                     break;
    6372              :                 }
    6373              :             }
    6374              : 
    6375              :           /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
    6376       624478 :           if (LABEL_P (last))
    6377          229 :             last = NEXT_INSN (last);
    6378       624478 :           reorder_insns (note, note, last);
    6379              :         }
    6380              :     }
    6381              : 
    6382       960355 :   if (epilogue_insn_hash != NULL)
    6383              :     {
    6384       960355 :       edge_iterator ei;
    6385       960355 :       edge e;
    6386              : 
    6387      2185569 :       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
    6388              :         {
    6389      1225214 :           rtx_insn *insn, *first = NULL, *note = NULL;
    6390      1225214 :           basic_block bb = e->src;
    6391              : 
    6392              :           /* Scan from the beginning until we reach the first epilogue insn. */
    6393     16698945 :           FOR_BB_INSNS (bb, insn)
    6394              :             {
    6395     16548564 :               if (NOTE_P (insn))
    6396              :                 {
    6397      3923119 :                   if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
    6398              :                     {
    6399      1074833 :                       note = insn;
    6400      1074833 :                       if (first != NULL)
    6401              :                         break;
    6402              :                     }
    6403              :                 }
    6404     12625445 :               else if (first == NULL && contains (insn, epilogue_insn_hash))
    6405              :                 {
    6406      1074837 :                   first = insn;
    6407      1074837 :                   if (note != NULL)
    6408              :                     break;
    6409              :                 }
    6410              :             }
    6411              : 
    6412      1225214 :           if (note)
    6413              :             {
    6414              :               /* If the function has a single basic block, and no real
    6415              :                  epilogue insns (e.g. sibcall with no cleanup), the
    6416              :                  epilogue note can get scheduled before the prologue
    6417              :                  note.  If we have frame related prologue insns, having
    6418              :                  them scanned during the epilogue will result in a crash.
    6419              :                  In this case re-order the epilogue note to just before
    6420              :                  the last insn in the block.  */
    6421      1074833 :               if (first == NULL)
    6422            0 :                 first = BB_END (bb);
    6423              : 
    6424      1074833 :               if (PREV_INSN (first) != note)
    6425        30712 :                 reorder_insns (note, note, PREV_INSN (first));
    6426              :             }
    6427              :         }
    6428              :     }
    6429              : }
    6430              : 
    6431              : /* Returns the name of function declared by FNDECL.  */
    6432              : const char *
    6433       119337 : fndecl_name (tree fndecl)
    6434              : {
    6435       119337 :   if (fndecl == NULL)
    6436              :     return "(nofn)";
    6437       119322 :   return lang_hooks.decl_printable_name (fndecl, 1);
    6438              : }
    6439              : 
    6440              : /* Returns the name of function FN.  */
    6441              : const char *
    6442       119284 : function_name (const function *fn)
    6443              : {
    6444       119284 :   tree fndecl = (fn == NULL) ? NULL : fn->decl;
    6445       119284 :   return fndecl_name (fndecl);
    6446              : }
    6447              : 
    6448              : /* Returns the name of the current function.  */
    6449              : const char *
    6450         8725 : current_function_name (void)
    6451              : {
    6452         8725 :   return function_name (cfun);
    6453              : }
    6454              : 
    6455              : 
    6456              : static void
    6457            0 : rest_of_handle_check_leaf_regs (void)
    6458              : {
    6459              : #ifdef LEAF_REGISTERS
    6460              :   crtl->uses_only_leaf_regs
    6461              :     = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
    6462              : #endif
    6463            0 : }
    6464              : 
    6465              : /* Insert a TYPE into the used types hash table of CFUN.  */
    6466              : 
    6467              : static void
    6468     66166799 : used_types_insert_helper (tree type, struct function *func)
    6469              : {
    6470     66166799 :   if (type != NULL && func != NULL)
    6471              :     {
    6472     66166799 :       if (func->used_types_hash == NULL)
    6473     18931220 :         func->used_types_hash = hash_set<tree>::create_ggc (37);
    6474              : 
    6475     66166799 :       func->used_types_hash->add (type);
    6476              :     }
    6477     66166799 : }
    6478              : 
    6479              : /* Given a type, insert it into the used hash table in cfun.  */
    6480              : void
    6481    218500549 : used_types_insert (tree t)
    6482              : {
    6483    229161402 :   while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
    6484     11053739 :     if (TYPE_NAME (t))
    6485              :       break;
    6486              :     else
    6487     10660853 :       t = TREE_TYPE (t);
    6488    218500549 :   if (TREE_CODE (t) == ERROR_MARK)
    6489              :     return;
    6490    218500544 :   if (TYPE_NAME (t) == NULL_TREE
    6491    218500544 :       || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
    6492     86995494 :     t = TYPE_MAIN_VARIANT (t);
    6493    218500544 :   if (debug_info_level > DINFO_LEVEL_NONE)
    6494              :     {
    6495     94427306 :       if (cfun)
    6496     66166799 :         used_types_insert_helper (t, cfun);
    6497              :       else
    6498              :         {
    6499              :           /* So this might be a type referenced by a global variable.
    6500              :              Record that type so that we can later decide to emit its
    6501              :              debug information.  */
    6502     28260507 :           vec_safe_push (types_used_by_cur_var_decl, t);
    6503              :         }
    6504              :     }
    6505              : }
    6506              : 
    6507              : /* Helper to Hash a struct types_used_by_vars_entry.  */
    6508              : 
    6509              : static hashval_t
    6510    188880043 : hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
    6511              : {
    6512    188880043 :   gcc_assert (entry && entry->var_decl && entry->type);
    6513              : 
    6514    188880043 :   return iterative_hash_object (entry->type,
    6515              :                                 iterative_hash_object (entry->var_decl, 0));
    6516              : }
    6517              : 
    6518              : /* Hash function of the types_used_by_vars_entry hash table.  */
    6519              : 
    6520              : hashval_t
    6521    188880043 : used_type_hasher::hash (types_used_by_vars_entry *entry)
    6522              : {
    6523    188880043 :   return hash_types_used_by_vars_entry (entry);
    6524              : }
    6525              : 
    6526              : /*Equality function of the types_used_by_vars_entry hash table.  */
    6527              : 
    6528              : bool
    6529    200036673 : used_type_hasher::equal (types_used_by_vars_entry *e1,
    6530              :                          types_used_by_vars_entry *e2)
    6531              : {
    6532    200036673 :   return (e1->var_decl == e2->var_decl && e1->type == e2->type);
    6533              : }
    6534              : 
    6535              : /* Inserts an entry into the types_used_by_vars_hash hash table. */
    6536              : 
    6537              : void
    6538     28189765 : types_used_by_var_decl_insert (tree type, tree var_decl)
    6539              : {
    6540     28189765 :   if (type != NULL && var_decl != NULL)
    6541              :     {
    6542     28189765 :       types_used_by_vars_entry **slot;
    6543     28189765 :       struct types_used_by_vars_entry e;
    6544     28189765 :       e.var_decl = var_decl;
    6545     28189765 :       e.type = type;
    6546     28189765 :       if (types_used_by_vars_hash == NULL)
    6547        14202 :         types_used_by_vars_hash
    6548        14202 :           = hash_table<used_type_hasher>::create_ggc (37);
    6549              : 
    6550     28189765 :       slot = types_used_by_vars_hash->find_slot (&e, INSERT);
    6551     28189765 :       if (*slot == NULL)
    6552              :         {
    6553      8819257 :           struct types_used_by_vars_entry *entry;
    6554      8819257 :           entry = ggc_alloc<types_used_by_vars_entry> ();
    6555      8819257 :           entry->type = type;
    6556      8819257 :           entry->var_decl = var_decl;
    6557      8819257 :           *slot = entry;
    6558              :         }
    6559              :     }
    6560     28189765 : }
    6561              : 
    6562              : namespace {
    6563              : 
    6564              : const pass_data pass_data_leaf_regs =
    6565              : {
    6566              :   RTL_PASS, /* type */
    6567              :   "*leaf_regs", /* name */
    6568              :   OPTGROUP_NONE, /* optinfo_flags */
    6569              :   TV_NONE, /* tv_id */
    6570              :   0, /* properties_required */
    6571              :   0, /* properties_provided */
    6572              :   0, /* properties_destroyed */
    6573              :   0, /* todo_flags_start */
    6574              :   0, /* todo_flags_finish */
    6575              : };
    6576              : 
    6577              : class pass_leaf_regs : public rtl_opt_pass
    6578              : {
    6579              : public:
    6580       287872 :   pass_leaf_regs (gcc::context *ctxt)
    6581       575744 :     : rtl_opt_pass (pass_data_leaf_regs, ctxt)
    6582              :   {}
    6583              : 
    6584              :   /* opt_pass methods: */
    6585      1480948 :   unsigned int execute (function *) final override
    6586              :     {
    6587      1480948 :       rest_of_handle_check_leaf_regs ();
    6588      1480948 :       return 0;
    6589              :     }
    6590              : 
    6591              : }; // class pass_leaf_regs
    6592              : 
    6593              : } // anon namespace
    6594              : 
    6595              : rtl_opt_pass *
    6596       287872 : make_pass_leaf_regs (gcc::context *ctxt)
    6597              : {
    6598       287872 :   return new pass_leaf_regs (ctxt);
    6599              : }
    6600              : 
    6601              : static void
    6602      1480948 : rest_of_handle_thread_prologue_and_epilogue (function *fun)
    6603              : {
    6604              :   /* prepare_shrink_wrap is sensitive to the block structure of the control
    6605              :      flow graph, so clean it up first.  */
    6606      1480948 :   if (cfun->tail_call_marked || optimize)
    6607      1040389 :     cleanup_cfg (0);
    6608              : 
    6609              :   /* On some machines, the prologue and epilogue code, or parts thereof,
    6610              :      can be represented as RTL.  Doing so lets us schedule insns between
    6611              :      it and the rest of the code and also allows delayed branch
    6612              :      scheduling to operate in the epilogue.  */
    6613      1480948 :   thread_prologue_and_epilogue_insns ();
    6614              : 
    6615              :   /* Some non-cold blocks may now be only reachable from cold blocks.
    6616              :      Fix that up.  */
    6617      1480948 :   fixup_partitions ();
    6618              : 
    6619              :   /* After prologue and epilogue generation, the judgement on whether
    6620              :      one memory access onto stack frame may trap or not could change,
    6621              :      since we get more exact stack information by now.  So try to
    6622              :      remove any EH edges here, see PR90259.  */
    6623      1480948 :   if (fun->can_throw_non_call_exceptions)
    6624       262869 :     purge_all_dead_edges ();
    6625              : 
    6626              :   /* Shrink-wrapping can result in unreachable edges in the epilogue,
    6627              :      see PR57320.  */
    6628      1921668 :   cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
    6629              : 
    6630              :   /* The stack usage info is finalized during prologue expansion.  */
    6631      1480948 :   if (flag_stack_usage_info || flag_callgraph_info)
    6632          356 :     output_stack_usage ();
    6633      1480948 : }
    6634              : 
    6635              : /* Record a final call to CALLEE at LOCATION.  */
    6636              : 
    6637              : void
    6638            0 : record_final_call (tree callee, location_t location)
    6639              : {
    6640            0 :   struct callinfo_callee datum = { location, callee };
    6641            0 :   vec_safe_push (cfun->su->callees, datum);
    6642            0 : }
    6643              : 
    6644              : /* Record a dynamic allocation made for DECL_OR_EXP.  */
    6645              : 
    6646              : void
    6647            0 : record_dynamic_alloc (tree decl_or_exp)
    6648              : {
    6649            0 :   struct callinfo_dalloc datum;
    6650              : 
    6651            0 :   if (DECL_P (decl_or_exp))
    6652              :     {
    6653            0 :       datum.location = DECL_SOURCE_LOCATION (decl_or_exp);
    6654            0 :       const char *name = lang_hooks.decl_printable_name (decl_or_exp, 2);
    6655            0 :       const char *dot = strrchr (name, '.');
    6656            0 :       if (dot)
    6657            0 :         name = dot + 1;
    6658            0 :       datum.name = ggc_strdup (name);
    6659              :     }
    6660              :   else
    6661              :     {
    6662            0 :       datum.location = EXPR_LOCATION (decl_or_exp);
    6663            0 :       datum.name = NULL;
    6664              :     }
    6665              : 
    6666            0 :   vec_safe_push (cfun->su->dallocs, datum);
    6667            0 : }
    6668              : 
    6669              : namespace {
    6670              : 
    6671              : const pass_data pass_data_thread_prologue_and_epilogue =
    6672              : {
    6673              :   RTL_PASS, /* type */
    6674              :   "pro_and_epilogue", /* name */
    6675              :   OPTGROUP_NONE, /* optinfo_flags */
    6676              :   TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
    6677              :   0, /* properties_required */
    6678              :   0, /* properties_provided */
    6679              :   0, /* properties_destroyed */
    6680              :   0, /* todo_flags_start */
    6681              :   ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
    6682              : };
    6683              : 
    6684              : class pass_thread_prologue_and_epilogue : public rtl_opt_pass
    6685              : {
    6686              : public:
    6687       287872 :   pass_thread_prologue_and_epilogue (gcc::context *ctxt)
    6688       575744 :     : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
    6689              :   {}
    6690              : 
    6691              :   /* opt_pass methods: */
    6692      1480955 :   bool gate (function *) final override
    6693              :     {
    6694      1480955 :       return !targetm.use_late_prologue_epilogue ();
    6695              :     }
    6696              : 
    6697      1480948 :   unsigned int execute (function * fun) final override
    6698              :     {
    6699      1480948 :       rest_of_handle_thread_prologue_and_epilogue (fun);
    6700      1480948 :       return 0;
    6701              :     }
    6702              : 
    6703              : }; // class pass_thread_prologue_and_epilogue
    6704              : 
    6705              : const pass_data pass_data_late_thread_prologue_and_epilogue =
    6706              : {
    6707              :   RTL_PASS, /* type */
    6708              :   "late_pro_and_epilogue", /* name */
    6709              :   OPTGROUP_NONE, /* optinfo_flags */
    6710              :   TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
    6711              :   0, /* properties_required */
    6712              :   0, /* properties_provided */
    6713              :   0, /* properties_destroyed */
    6714              :   0, /* todo_flags_start */
    6715              :   ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
    6716              : };
    6717              : 
    6718              : class pass_late_thread_prologue_and_epilogue : public rtl_opt_pass
    6719              : {
    6720              : public:
    6721       287872 :   pass_late_thread_prologue_and_epilogue (gcc::context *ctxt)
    6722       575744 :     : rtl_opt_pass (pass_data_late_thread_prologue_and_epilogue, ctxt)
    6723              :   {}
    6724              : 
    6725              :   /* opt_pass methods: */
    6726      1480955 :   bool gate (function *) final override
    6727              :     {
    6728      1480955 :       return targetm.use_late_prologue_epilogue ();
    6729              :     }
    6730              : 
    6731            0 :   unsigned int execute (function *fn) final override
    6732              :     {
    6733              :       /* It's not currently possible to have both delay slots and
    6734              :          late prologue/epilogue, since the latter has to run before
    6735              :          the former, and the former won't honor whatever restrictions
    6736              :          the latter is trying to enforce.  */
    6737            0 :       gcc_assert (!DELAY_SLOTS);
    6738            0 :       rest_of_handle_thread_prologue_and_epilogue (fn);
    6739            0 :       return 0;
    6740              :     }
    6741              : }; // class pass_late_thread_prologue_and_epilogue
    6742              : 
    6743              : } // anon namespace
    6744              : 
    6745              : rtl_opt_pass *
    6746       287872 : make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
    6747              : {
    6748       287872 :   return new pass_thread_prologue_and_epilogue (ctxt);
    6749              : }
    6750              : 
    6751              : rtl_opt_pass *
    6752       287872 : make_pass_late_thread_prologue_and_epilogue (gcc::context *ctxt)
    6753              : {
    6754       287872 :   return new pass_late_thread_prologue_and_epilogue (ctxt);
    6755              : }
    6756              : 
    6757              : namespace {
    6758              : 
    6759              : const pass_data pass_data_zero_call_used_regs =
    6760              : {
    6761              :   RTL_PASS, /* type */
    6762              :   "zero_call_used_regs", /* name */
    6763              :   OPTGROUP_NONE, /* optinfo_flags */
    6764              :   TV_NONE, /* tv_id */
    6765              :   0, /* properties_required */
    6766              :   0, /* properties_provided */
    6767              :   0, /* properties_destroyed */
    6768              :   0, /* todo_flags_start */
    6769              :   0, /* todo_flags_finish */
    6770              : };
    6771              : 
    6772              : class pass_zero_call_used_regs: public rtl_opt_pass
    6773              : {
    6774              : public:
    6775       287872 :   pass_zero_call_used_regs (gcc::context *ctxt)
    6776       575744 :     : rtl_opt_pass (pass_data_zero_call_used_regs, ctxt)
    6777              :   {}
    6778              : 
    6779              :   /* opt_pass methods: */
    6780              :   unsigned int execute (function *) final override;
    6781              : 
    6782              : }; // class pass_zero_call_used_regs
    6783              : 
    6784              : unsigned int
    6785      1480948 : pass_zero_call_used_regs::execute (function *fun)
    6786              : {
    6787      1480948 :   using namespace zero_regs_flags;
    6788      1480948 :   unsigned int zero_regs_type = UNSET;
    6789              : 
    6790      1480948 :   tree attr_zero_regs = lookup_attribute ("zero_call_used_regs",
    6791      1480948 :                                           DECL_ATTRIBUTES (fun->decl));
    6792              : 
    6793              :   /* Get the type of zero_call_used_regs from function attribute.
    6794              :      We have filtered out invalid attribute values already at this point.  */
    6795      1480948 :   if (attr_zero_regs)
    6796              :     {
    6797              :       /* The TREE_VALUE of an attribute is a TREE_LIST whose TREE_VALUE
    6798              :          is the attribute argument's value.  */
    6799           88 :       attr_zero_regs = TREE_VALUE (attr_zero_regs);
    6800           88 :       gcc_assert (TREE_CODE (attr_zero_regs) == TREE_LIST);
    6801           88 :       attr_zero_regs = TREE_VALUE (attr_zero_regs);
    6802           88 :       gcc_assert (TREE_CODE (attr_zero_regs) == STRING_CST);
    6803              : 
    6804          496 :       for (unsigned int i = 0; zero_call_used_regs_opts[i].name != NULL; ++i)
    6805          496 :         if (strcmp (TREE_STRING_POINTER (attr_zero_regs),
    6806          496 :                      zero_call_used_regs_opts[i].name) == 0)
    6807              :           {
    6808           88 :             zero_regs_type = zero_call_used_regs_opts[i].flag;
    6809           88 :             break;
    6810              :           }
    6811              :     }
    6812              : 
    6813           88 :   if (!zero_regs_type)
    6814      1480860 :     zero_regs_type = flag_zero_call_used_regs;
    6815              : 
    6816              :   /* No need to zero call-used-regs when no user request is present.  */
    6817      1480948 :   if (!(zero_regs_type & ENABLED))
    6818              :     return 0;
    6819              : 
    6820          182 :   edge_iterator ei;
    6821          182 :   edge e;
    6822              : 
    6823              :   /* This pass needs data flow information.  */
    6824          182 :   df_analyze ();
    6825              : 
    6826              :   /* Iterate over the function's return instructions and insert any
    6827              :      register zeroing required by the -fzero-call-used-regs command-line
    6828              :      option or the "zero_call_used_regs" function attribute.  */
    6829          365 :   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
    6830              :     {
    6831          183 :       rtx_insn *insn = BB_END (e->src);
    6832          183 :       if (JUMP_P (insn) && ANY_RETURN_P (JUMP_LABEL (insn)))
    6833          176 :         gen_call_used_regs_seq (insn, zero_regs_type);
    6834              :     }
    6835              : 
    6836              :   return 0;
    6837              : }
    6838              : 
    6839              : } // anon namespace
    6840              : 
    6841              : rtl_opt_pass *
    6842       287872 : make_pass_zero_call_used_regs (gcc::context *ctxt)
    6843              : {
    6844       287872 :   return new pass_zero_call_used_regs (ctxt);
    6845              : }
    6846              : 
    6847              : /* If CONSTRAINT is a matching constraint, then return its number.
    6848              :    Otherwise, return -1.  */
    6849              : 
    6850              : static int
    6851        43504 : matching_constraint_num (const char *constraint)
    6852              : {
    6853        43504 :   if (*constraint == '%')
    6854         1140 :     constraint++;
    6855              : 
    6856        43504 :   if (IN_RANGE (*constraint, '0', '9'))
    6857        31773 :     return strtoul (constraint, NULL, 10);
    6858              : 
    6859              :   return -1;
    6860              : }
    6861              : 
    6862              : /* This mini-pass fixes fall-out from SSA in asm statements that have
    6863              :    in-out constraints.  Say you start with
    6864              : 
    6865              :      orig = inout;
    6866              :      asm ("": "+mr" (inout));
    6867              :      use (orig);
    6868              : 
    6869              :    which is transformed very early to use explicit output and match operands:
    6870              : 
    6871              :      orig = inout;
    6872              :      asm ("": "=mr" (inout) : "0" (inout));
    6873              :      use (orig);
    6874              : 
    6875              :    Or, after SSA and copyprop,
    6876              : 
    6877              :      asm ("": "=mr" (inout_2) : "0" (inout_1));
    6878              :      use (inout_1);
    6879              : 
    6880              :    Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
    6881              :    they represent two separate values, so they will get different pseudo
    6882              :    registers during expansion.  Then, since the two operands need to match
    6883              :    per the constraints, but use different pseudo registers, reload can
    6884              :    only register a reload for these operands.  But reloads can only be
    6885              :    satisfied by hardregs, not by memory, so we need a register for this
    6886              :    reload, just because we are presented with non-matching operands.
    6887              :    So, even though we allow memory for this operand, no memory can be
    6888              :    used for it, just because the two operands don't match.  This can
    6889              :    cause reload failures on register-starved targets.
    6890              : 
    6891              :    So it's a symptom of reload not being able to use memory for reloads
    6892              :    or, alternatively it's also a symptom of both operands not coming into
    6893              :    reload as matching (in which case the pseudo could go to memory just
    6894              :    fine, as the alternative allows it, and no reload would be necessary).
    6895              :    We fix the latter problem here, by transforming
    6896              : 
    6897              :      asm ("": "=mr" (inout_2) : "0" (inout_1));
    6898              : 
    6899              :    back to
    6900              : 
    6901              :      inout_2 = inout_1;
    6902              :      asm ("": "=mr" (inout_2) : "0" (inout_2));  */
    6903              : 
    6904              : static void
    6905        34566 : match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
    6906              : {
    6907        34566 :   int i;
    6908        34566 :   bool changed = false;
    6909        34566 :   rtx op = SET_SRC (p_sets[0]);
    6910        34566 :   int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
    6911        34566 :   rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
    6912        34566 :   bool *output_matched = XALLOCAVEC (bool, noutputs);
    6913              : 
    6914        34566 :   memset (output_matched, 0, noutputs * sizeof (bool));
    6915        77141 :   for (i = 0; i < ninputs; i++)
    6916              :     {
    6917        42575 :       rtx input, output;
    6918        42575 :       rtx_insn *insns;
    6919        42575 :       const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
    6920        42575 :       int match, j;
    6921              : 
    6922        42575 :       match = matching_constraint_num (constraint);
    6923        42575 :       if (match < 0)
    6924        11727 :         continue;
    6925              : 
    6926        30848 :       gcc_assert (match < noutputs);
    6927        30848 :       output = SET_DEST (p_sets[match]);
    6928        30848 :       input = RTVEC_ELT (inputs, i);
    6929              :       /* Only do the transformation for pseudos.  */
    6930        31821 :       if (! REG_P (output)
    6931        30671 :           || rtx_equal_p (output, input)
    6932        30014 :           || !(REG_P (input) || SUBREG_P (input)
    6933         3160 :                || MEM_P (input) || CONSTANT_P (input))
    6934        60861 :           || !general_operand (input, GET_MODE (output)))
    6935          973 :         continue;
    6936              : 
    6937              :       /* We can't do anything if the output is also used as input,
    6938              :          as we're going to overwrite it.  */
    6939        81611 :       for (j = 0; j < ninputs; j++)
    6940        51736 :         if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
    6941              :           break;
    6942        29875 :       if (j != ninputs)
    6943            0 :         continue;
    6944              : 
    6945              :       /* Avoid changing the same input several times.  For
    6946              :          asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
    6947              :          only change it once (to out1), rather than changing it
    6948              :          first to out1 and afterwards to out2.  */
    6949        29875 :       if (i > 0)
    6950              :         {
    6951        39902 :           for (j = 0; j < noutputs; j++)
    6952        32585 :             if (output_matched[j] && input == SET_DEST (p_sets[j]))
    6953              :               break;
    6954         7388 :           if (j != noutputs)
    6955           71 :             continue;
    6956              :         }
    6957        29804 :       output_matched[match] = true;
    6958              : 
    6959        29804 :       start_sequence ();
    6960        29804 :       emit_move_insn (output, copy_rtx (input));
    6961        29804 :       insns = end_sequence ();
    6962        29804 :       emit_insn_before (insns, insn);
    6963              : 
    6964        29804 :       constraint = ASM_OPERANDS_OUTPUT_CONSTRAINT(SET_SRC(p_sets[match]));
    6965        29804 :       bool early_clobber_p = strchr (constraint, '&') != NULL;
    6966              : 
    6967              :       /* Now replace all mentions of the input with output.  We can't
    6968              :          just replace the occurrence in inputs[i], as the register might
    6969              :          also be used in some other input (or even in an address of an
    6970              :          output), which would mean possibly increasing the number of
    6971              :          inputs by one (namely 'output' in addition), which might pose
    6972              :          a too complicated problem for reload to solve.  E.g. this situation:
    6973              : 
    6974              :            asm ("" : "=r" (output), "=m" (input) : "0" (input))
    6975              : 
    6976              :          Here 'input' is used in two occurrences as input (once for the
    6977              :          input operand, once for the address in the second output operand).
    6978              :          If we would replace only the occurrence of the input operand (to
    6979              :          make the matching) we would be left with this:
    6980              : 
    6981              :            output = input
    6982              :            asm ("" : "=r" (output), "=m" (input) : "0" (output))
    6983              : 
    6984              :          Now we suddenly have two different input values (containing the same
    6985              :          value, but different pseudos) where we formerly had only one.
    6986              :          With more complicated asms this might lead to reload failures
    6987              :          which wouldn't have happen without this pass.  So, iterate over
    6988              :          all operands and replace all occurrences of the register used.
    6989              : 
    6990              :          However, if one or more of the 'input' uses have a non-matching
    6991              :          constraint and the matched output operand is an early clobber
    6992              :          operand, then do not replace the input operand, since by definition
    6993              :          it conflicts with the output operand and cannot share the same
    6994              :          register.  See PR89313 for details.  */
    6995              : 
    6996       146159 :       for (j = 0; j < noutputs; j++)
    6997       116355 :         if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
    6998       116355 :             && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
    6999          635 :           SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
    7000              :                                               input, output);
    7001        81252 :       for (j = 0; j < ninputs; j++)
    7002        51448 :         if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
    7003              :           {
    7004        28011 :             if (!early_clobber_p
    7005        28940 :                 || match == matching_constraint_num
    7006          929 :                               (ASM_OPERANDS_INPUT_CONSTRAINT (op, j)))
    7007        28007 :               RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
    7008              :                                                    input, output);
    7009              :           }
    7010              : 
    7011              :       changed = true;
    7012              :     }
    7013              : 
    7014        34566 :   if (changed)
    7015        22832 :     df_insn_rescan (insn);
    7016        34566 : }
    7017              : 
    7018              : /* It is expected and desired that optimizations coalesce multiple pseudos into
    7019              :    one whenever possible.  However, in case of hard register constraints we may
    7020              :    have to undo this and introduce copies since otherwise we could constraint a
    7021              :    single pseudo to different hard registers.  For example, during register
    7022              :    allocation the following insn would be unsatisfiable since pseudo 60 is
    7023              :    constrained to hard register r5 and r6 at the same time.
    7024              : 
    7025              :    (insn 7 5 0 2 (asm_operands/v ("foo") ("") 0 [
    7026              :                (reg:DI 60) repeated x2
    7027              :            ]
    7028              :             [
    7029              :                (asm_input:DI ("{r5}") t.c:4)
    7030              :                (asm_input:DI ("{r6}") t.c:4)
    7031              :            ]
    7032              :             [] t.c:4) "t.c":4:3 -1
    7033              :         (expr_list:REG_DEAD (reg:DI 60)
    7034              :            (nil)))
    7035              : 
    7036              :    Therefore, introduce a copy of pseudo 60 and transform it into
    7037              : 
    7038              :    (insn 10 5 7 2 (set (reg:DI 62)
    7039              :            (reg:DI 60)) "t.c":4:3 1503 {*movdi_64}
    7040              :         (nil))
    7041              :    (insn 7 10 11 2 (asm_operands/v ("foo") ("") 0 [
    7042              :                (reg:DI 60)
    7043              :                (reg:DI 62)
    7044              :            ]
    7045              :             [
    7046              :                (asm_input:DI ("{r5}") t.c:4)
    7047              :                (asm_input:DI ("{r6}") t.c:4)
    7048              :            ]
    7049              :             [] t.c:4) "t.c":4:3 -1
    7050              :         (expr_list:REG_DEAD (reg:DI 62)
    7051              :            (expr_list:REG_DEAD (reg:DI 60)
    7052              :                (nil))))
    7053              : 
    7054              :    Now, LRA can assign pseudo 60 to r5, and pseudo 62 to r6.
    7055              : 
    7056              :    TODO: The current implementation is conservative and we could do a bit
    7057              :    better in case of alternatives.  For example
    7058              : 
    7059              :    (insn 7 5 0 2 (asm_operands/v ("foo") ("") 0 [
    7060              :                (reg:DI 60) repeated x2
    7061              :            ]
    7062              :             [
    7063              :                (asm_input:DI ("r,{r5}") t.c:4)
    7064              :                (asm_input:DI ("{r6},r") t.c:4)
    7065              :            ]
    7066              :             [] t.c:4) "t.c":4:3 -1
    7067              :         (expr_list:REG_DEAD (reg:DI 60)
    7068              :            (nil)))
    7069              : 
    7070              :    For this insn we wouldn't need to come up with a copy of pseudo 60 since in
    7071              :    each alternative pseudo 60 is constrained exactly one time.  */
    7072              : 
    7073              : static void
    7074      3579037 : match_asm_constraints_2 (rtx_insn *insn, rtx pat)
    7075              : {
    7076      3579037 :   rtx op;
    7077      3579037 :   if (GET_CODE (pat) == SET && GET_CODE (SET_SRC (pat)) == ASM_OPERANDS)
    7078              :     op = SET_SRC (pat);
    7079      3503137 :   else if (GET_CODE (pat) == ASM_OPERANDS)
    7080              :     op = pat;
    7081              :   else
    7082      3430657 :     return;
    7083       148380 :   int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
    7084       148380 :   rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
    7085       148380 :   bool changed = false;
    7086       148380 :   auto_bitmap constrained_regs;
    7087              : 
    7088       268328 :   for (int i = 0; i < ninputs; ++i)
    7089              :     {
    7090       119948 :       rtx input = RTVEC_ELT (inputs, i);
    7091       119948 :       const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
    7092        23559 :       if ((!REG_P (input) && !SUBREG_P (input))
    7093        96506 :           || (REG_P (input) && HARD_REGISTER_P (input))
    7094       215374 :           || strchr (constraint, '{') == nullptr)
    7095       119900 :         continue;
    7096           48 :       int regno;
    7097           48 :       if (SUBREG_P (input))
    7098              :         {
    7099            0 :           if (REG_P (SUBREG_REG (input)))
    7100            0 :             regno = REGNO (SUBREG_REG (input));
    7101              :           else
    7102            0 :             continue;
    7103              :         }
    7104              :       else
    7105           48 :         regno = REGNO (input);
    7106              :       /* Keep the first usage of a constrained pseudo as is and only
    7107              :          introduce copies for subsequent usages.  */
    7108           48 :       if (! bitmap_bit_p (constrained_regs, regno))
    7109              :         {
    7110           48 :           bitmap_set_bit (constrained_regs, regno);
    7111           48 :           continue;
    7112              :         }
    7113            0 :       rtx tmp = gen_reg_rtx (GET_MODE (input));
    7114            0 :       start_sequence ();
    7115            0 :       emit_move_insn (tmp, input);
    7116            0 :       rtx_insn *insns = get_insns ();
    7117            0 :       end_sequence ();
    7118            0 :       emit_insn_before (insns, insn);
    7119            0 :       RTVEC_ELT (inputs, i) = tmp;
    7120            0 :       changed = true;
    7121              :     }
    7122              : 
    7123       148380 :   if (changed)
    7124            0 :     df_insn_rescan (insn);
    7125       148380 : }
    7126              : 
    7127              : /* Add the decl D to the local_decls list of FUN.  */
    7128              : 
    7129              : void
    7130     37525476 : add_local_decl (struct function *fun, tree d)
    7131              : {
    7132     37525476 :   gcc_assert (VAR_P (d));
    7133     37525476 :   vec_safe_push (fun->local_decls, d);
    7134     37525476 : }
    7135              : 
    7136              : namespace {
    7137              : 
    7138              : const pass_data pass_data_match_asm_constraints =
    7139              : {
    7140              :   RTL_PASS, /* type */
    7141              :   "asmcons", /* name */
    7142              :   OPTGROUP_NONE, /* optinfo_flags */
    7143              :   TV_NONE, /* tv_id */
    7144              :   0, /* properties_required */
    7145              :   0, /* properties_provided */
    7146              :   0, /* properties_destroyed */
    7147              :   0, /* todo_flags_start */
    7148              :   0, /* todo_flags_finish */
    7149              : };
    7150              : 
    7151              : class pass_match_asm_constraints : public rtl_opt_pass
    7152              : {
    7153              : public:
    7154       287872 :   pass_match_asm_constraints (gcc::context *ctxt)
    7155       575744 :     : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
    7156              :   {}
    7157              : 
    7158              :   /* opt_pass methods: */
    7159              :   unsigned int execute (function *) final override;
    7160              : 
    7161              : }; // class pass_match_asm_constraints
    7162              : 
    7163              : unsigned
    7164      1480946 : pass_match_asm_constraints::execute (function *fun)
    7165              : {
    7166      1480946 :   basic_block bb;
    7167      1480946 :   rtx_insn *insn;
    7168      1480946 :   rtx pat, *p_sets;
    7169      1480946 :   int noutputs;
    7170              : 
    7171      1480946 :   if (!crtl->has_asm_statement)
    7172              :     return 0;
    7173              : 
    7174        34369 :   df_set_flags (DF_DEFER_INSN_RESCAN);
    7175       332866 :   FOR_EACH_BB_FN (bb, fun)
    7176              :     {
    7177      3951791 :       FOR_BB_INSNS (bb, insn)
    7178              :         {
    7179      3653294 :           if (!INSN_P (insn))
    7180       654305 :             continue;
    7181              : 
    7182      2998989 :           pat = PATTERN (insn);
    7183              : 
    7184      2998989 :           if (GET_CODE (pat) == PARALLEL)
    7185      1482422 :             for (int i = XVECLEN (pat, 0) - 1; i >= 0; --i)
    7186      1031235 :               match_asm_constraints_2 (insn, XVECEXP (pat, 0, i));
    7187              :           else
    7188      2547802 :             match_asm_constraints_2 (insn, pat);
    7189              : 
    7190      2998989 :           if (GET_CODE (pat) == PARALLEL)
    7191       451187 :             p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
    7192      2547802 :           else if (GET_CODE (pat) == SET)
    7193      1730035 :             p_sets = &PATTERN (insn), noutputs = 1;
    7194              :           else
    7195       817767 :             continue;
    7196              : 
    7197      2181222 :           if (GET_CODE (*p_sets) == SET
    7198      2104948 :               && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
    7199        34566 :             match_asm_constraints_1 (insn, p_sets, noutputs);
    7200              :          }
    7201              :     }
    7202              : 
    7203              :   return TODO_df_finish;
    7204              : }
    7205              : 
    7206              : } // anon namespace
    7207              : 
    7208              : rtl_opt_pass *
    7209       287872 : make_pass_match_asm_constraints (gcc::context *ctxt)
    7210              : {
    7211       287872 :   return new pass_match_asm_constraints (ctxt);
    7212              : }
    7213              : 
    7214              : 
    7215              : #include "gt-function.h"
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.