LCOV - code coverage report
Current view: top level - gcc - cfgexpand.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 88.4 % 3520 3111
Test Date: 2026-03-28 14:25:54 Functions: 94.3 % 105 99
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* A pass for lowering trees to RTL.
       2              :    Copyright (C) 2004-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify
       7              : it under the terms of the GNU General Public License as published by
       8              : the Free Software Foundation; either version 3, or (at your option)
       9              : any later version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful,
      12              : but WITHOUT ANY WARRANTY; without even the implied warranty of
      13              : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
      14              : GNU General Public License for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : #include "config.h"
      21              : #include "system.h"
      22              : #include "coretypes.h"
      23              : #include "backend.h"
      24              : #include "target.h"
      25              : #include "rtl.h"
      26              : #include "tree.h"
      27              : #include "gimple.h"
      28              : #include "cfghooks.h"
      29              : #include "tree-pass.h"
      30              : #include "memmodel.h"
      31              : #include "tm_p.h"
      32              : #include "ssa.h"
      33              : #include "optabs.h"
      34              : #include "regs.h" /* For reg_renumber.  */
      35              : #include "emit-rtl.h"
      36              : #include "recog.h"
      37              : #include "cgraph.h"
      38              : #include "diagnostic.h"
      39              : #include "fold-const.h"
      40              : #include "varasm.h"
      41              : #include "stor-layout.h"
      42              : #include "stmt.h"
      43              : #include "print-tree.h"
      44              : #include "cfgrtl.h"
      45              : #include "cfganal.h"
      46              : #include "cfgbuild.h"
      47              : #include "cfgcleanup.h"
      48              : #include "dojump.h"
      49              : #include "explow.h"
      50              : #include "calls.h"
      51              : #include "expr.h"
      52              : #include "internal-fn.h"
      53              : #include "tree-eh.h"
      54              : #include "gimple-iterator.h"
      55              : #include "gimple-expr.h"
      56              : #include "gimple-walk.h"
      57              : #include "tree-cfg.h"
      58              : #include "tree-dfa.h"
      59              : #include "tree-ssa.h"
      60              : #include "except.h"
      61              : #include "gimple-pretty-print.h"
      62              : #include "toplev.h"
      63              : #include "debug.h"
      64              : #include "tree-inline.h"
      65              : #include "value-prof.h"
      66              : #include "tree-ssa-live.h"
      67              : #include "tree-outof-ssa.h"
      68              : #include "cfgloop.h"
      69              : #include "insn-attr.h" /* For INSN_SCHEDULING.  */
      70              : #include "stringpool.h"
      71              : #include "attribs.h"
      72              : #include "asan.h"
      73              : #include "tree-ssa-address.h"
      74              : #include "output.h"
      75              : #include "builtins.h"
      76              : #include "opts.h"
      77              : #include "gimple-range.h"
      78              : #include "rtl-iter.h"
      79              : 
      80              : /* Some systems use __main in a way incompatible with its use in gcc, in these
      81              :    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
      82              :    give the same symbol without quotes for an alternative entry point.  You
      83              :    must define both, or neither.  */
      84              : #ifndef NAME__MAIN
      85              : #define NAME__MAIN "__main"
      86              : #endif
      87              : 
      88              : /* This variable holds information helping the rewriting of SSA trees
      89              :    into RTL.  */
      90              : struct ssaexpand SA;
      91              : 
      92              : /* This variable holds the currently expanded gimple statement for purposes
      93              :    of communicating the profile info to the builtin expanders.  */
      94              : gimple *currently_expanding_gimple_stmt;
      95              : 
      96              : static rtx expand_debug_expr (tree);
      97              : 
      98              : static bool defer_stack_allocation (tree, bool);
      99              : 
     100              : static void record_alignment_for_reg_var (unsigned int);
     101              : 
     102              : /* Return an expression tree corresponding to the RHS of GIMPLE
     103              :    statement STMT.  */
     104              : 
     105              : tree
     106      3306772 : gimple_assign_rhs_to_tree (gimple *stmt)
     107              : {
     108      3306772 :   tree t;
     109      3306772 :   switch (gimple_assign_rhs_class (stmt))
     110              :     {
     111         1719 :     case GIMPLE_TERNARY_RHS:
     112         1719 :       t = build3 (gimple_assign_rhs_code (stmt),
     113         1719 :                   TREE_TYPE (gimple_assign_lhs (stmt)),
     114              :                   gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt),
     115              :                   gimple_assign_rhs3 (stmt));
     116         1719 :       break;
     117      1245262 :     case GIMPLE_BINARY_RHS:
     118      1245262 :       t = build2 (gimple_assign_rhs_code (stmt),
     119      1245262 :                   TREE_TYPE (gimple_assign_lhs (stmt)),
     120              :                   gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
     121      1245262 :       break;
     122       336273 :     case GIMPLE_UNARY_RHS:
     123       336273 :       t = build1 (gimple_assign_rhs_code (stmt),
     124       336273 :                   TREE_TYPE (gimple_assign_lhs (stmt)),
     125              :                   gimple_assign_rhs1 (stmt));
     126       336273 :       break;
     127      1723518 :     case GIMPLE_SINGLE_RHS:
     128      1723518 :       {
     129      1723518 :         t = gimple_assign_rhs1 (stmt);
     130              :         /* Avoid modifying this tree in place below.  */
     131      3369728 :         if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
     132      1603014 :              && gimple_location (stmt) != EXPR_LOCATION (t))
     133      1938352 :             || (gimple_block (stmt) && currently_expanding_to_rtl
     134        37394 :                 && EXPR_P (t)))
     135      1462253 :           t = copy_node (t);
     136              :         break;
     137              :       }
     138            0 :     default:
     139            0 :       gcc_unreachable ();
     140              :     }
     141              : 
     142      3306772 :   if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
     143      3122482 :     SET_EXPR_LOCATION (t, gimple_location (stmt));
     144              : 
     145      3306772 :   return t;
     146              : }
     147              : 
     148              : 
     149              : #ifndef STACK_ALIGNMENT_NEEDED
     150              : #define STACK_ALIGNMENT_NEEDED 1
     151              : #endif
     152              : 
     153              : #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
     154              : 
     155              : /* Choose either CUR or NEXT as the leader DECL for a partition.
     156              :    Prefer ignored decls, to simplify debug dumps and reduce ambiguity
     157              :    out of the same user variable being in multiple partitions (this is
     158              :    less likely for compiler-introduced temps).  */
     159              : 
     160              : static tree
     161     59589371 : leader_merge (tree cur, tree next)
     162              : {
     163     59589371 :   if (cur == NULL || cur == next)
     164              :     return next;
     165              : 
     166      3192627 :   if (DECL_P (cur) && DECL_IGNORED_P (cur))
     167              :     return cur;
     168              : 
     169      2385760 :   if (DECL_P (next) && DECL_IGNORED_P (next))
     170       155135 :     return next;
     171              : 
     172              :   return cur;
     173              : }
     174              : 
     175              : /* Associate declaration T with storage space X.  If T is no
     176              :    SSA name this is exactly SET_DECL_RTL, otherwise make the
     177              :    partition of T associated with X.  */
     178              : static inline void
     179     70377719 : set_rtl (tree t, rtx x)
     180              : {
     181     70377719 :   gcc_checking_assert (!x
     182              :                        || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
     183              :                        || (use_register_for_decl (t)
     184              :                            ? (REG_P (x)
     185              :                               || (GET_CODE (x) == CONCAT
     186              :                                   && (REG_P (XEXP (x, 0))
     187              :                                       || SUBREG_P (XEXP (x, 0)))
     188              :                                   && (REG_P (XEXP (x, 1))
     189              :                                       || SUBREG_P (XEXP (x, 1))))
     190              :                               /* We need to accept PARALLELs for RESUT_DECLs
     191              :                                  because of vector types with BLKmode returned
     192              :                                  in multiple registers, but they are supposed
     193              :                                  to be uncoalesced.  */
     194              :                               || (GET_CODE (x) == PARALLEL
     195              :                                   && SSAVAR (t)
     196              :                                   && TREE_CODE (SSAVAR (t)) == RESULT_DECL
     197              :                                   && (GET_MODE (x) == BLKmode
     198              :                                       || !flag_tree_coalesce_vars)))
     199              :                            : (MEM_P (x) || x == pc_rtx
     200              :                               || (GET_CODE (x) == CONCAT
     201              :                                   && MEM_P (XEXP (x, 0))
     202              :                                   && MEM_P (XEXP (x, 1))))));
     203              :   /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
     204              :      RESULT_DECLs has the expected mode.  For memory, we accept
     205              :      unpromoted modes, since that's what we're likely to get.  For
     206              :      PARM_DECLs and RESULT_DECLs, we'll have been called by
     207              :      set_parm_rtl, which will give us the default def, so we don't
     208              :      have to compute it ourselves.  For RESULT_DECLs, we accept mode
     209              :      mismatches too, as long as we have BLKmode or are not coalescing
     210              :      across variables, so that we don't reject BLKmode PARALLELs or
     211              :      unpromoted REGs.  */
     212     65219152 :   gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
     213              :                        || (SSAVAR (t)
     214              :                            && TREE_CODE (SSAVAR (t)) == RESULT_DECL
     215              :                            && (promote_ssa_mode (t, NULL) == BLKmode
     216              :                                || !flag_tree_coalesce_vars))
     217              :                        || !use_register_for_decl (t)
     218              :                        || GET_MODE (x) == promote_ssa_mode (t, NULL));
     219              : 
     220              :   if (x)
     221              :     {
     222     65614360 :       bool skip = false;
     223     65614360 :       tree cur = NULL_TREE;
     224              :       rtx xm = x;
     225              : 
     226     65614360 :     retry:
     227     65614360 :       if (MEM_P (xm))
     228      4740662 :         cur = MEM_EXPR (xm);
     229              :       else if (REG_P (xm))
     230     54848709 :         cur = REG_EXPR (xm);
     231              :       else if (SUBREG_P (xm))
     232              :         {
     233            0 :           gcc_assert (subreg_lowpart_p (xm));
     234            0 :           xm = SUBREG_REG (xm);
     235            0 :           goto retry;
     236              :         }
     237              :       else if (GET_CODE (xm) == CONCAT)
     238              :         {
     239       391922 :           xm = XEXP (xm, 0);
     240       391922 :           goto retry;
     241              :         }
     242              :       else if (GET_CODE (xm) == PARALLEL)
     243              :         {
     244         3286 :           xm = XVECEXP (xm, 0, 0);
     245         3286 :           gcc_assert (GET_CODE (xm) == EXPR_LIST);
     246         3286 :           xm = XEXP (xm, 0);
     247         3286 :           goto retry;
     248              :         }
     249      5629781 :       else if (xm == pc_rtx)
     250              :         skip = true;
     251              :       else
     252            0 :         gcc_unreachable ();
     253              : 
     254     59589371 :       tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
     255              : 
     256     65219152 :       if (cur != next)
     257              :         {
     258     27513653 :           if (MEM_P (x))
     259      3944588 :             set_mem_attributes (x,
     260      1972294 :                                 next && TREE_CODE (next) == SSA_NAME
     261        12368 :                                 ? TREE_TYPE (next)
     262              :                                 : next, true);
     263              :           else
     264     25541359 :             set_reg_attrs_for_decl_rtl (next, x);
     265              :         }
     266              :     }
     267              : 
     268     70377719 :   if (TREE_CODE (t) == SSA_NAME)
     269              :     {
     270     57137522 :       int part = var_to_partition (SA.map, t);
     271     57137522 :       if (part != NO_PARTITION)
     272              :         {
     273     57137522 :           if (SA.partition_to_pseudo[part])
     274     31257389 :             gcc_assert (SA.partition_to_pseudo[part] == x);
     275     25880133 :           else if (x != pc_rtx)
     276     25877192 :             SA.partition_to_pseudo[part] = x;
     277              :         }
     278              :       /* For the benefit of debug information at -O0 (where
     279              :          vartracking doesn't run) record the place also in the base
     280              :          DECL.  For PARMs and RESULTs, do so only when setting the
     281              :          default def.  */
     282    114272103 :       if (x && x != pc_rtx && SSA_NAME_VAR (t)
     283     75059041 :           && (VAR_P (SSA_NAME_VAR (t))
     284      7418879 :               || SSA_NAME_IS_DEFAULT_DEF (t)))
     285              :         {
     286     17742656 :           tree var = SSA_NAME_VAR (t);
     287              :           /* If we don't yet have something recorded, just record it now.  */
     288     17742656 :           if (!DECL_RTL_SET_P (var))
     289      7483747 :             SET_DECL_RTL (var, x);
     290              :           /* If we have it set already to "multiple places" don't
     291              :              change this.  */
     292     10258909 :           else if (DECL_RTL (var) == pc_rtx)
     293              :             ;
     294              :           /* If we have something recorded and it's not the same place
     295              :              as we want to record now, we have multiple partitions for the
     296              :              same base variable, with different places.  We can't just
     297              :              randomly chose one, hence we have to say that we don't know.
     298              :              This only happens with optimization, and there var-tracking
     299              :              will figure out the right thing.  */
     300      9115920 :           else if (DECL_RTL (var) != x)
     301       240720 :             SET_DECL_RTL (var, pc_rtx);
     302              :         }
     303              :     }
     304              :   else
     305     13240197 :     SET_DECL_RTL (t, x);
     306     70377719 : }
     307              : 
     308              : /* This structure holds data relevant to one variable that will be
     309              :    placed in a stack slot.  */
     310              : class stack_var
     311              : {
     312              : public:
     313              :   /* The Variable.  */
     314              :   tree decl;
     315              : 
     316              :   /* Initially, the size of the variable.  Later, the size of the partition,
     317              :      if this variable becomes it's partition's representative.  */
     318              :   poly_uint64 size;
     319              : 
     320              :   /* The *byte* alignment required for this variable.  Or as, with the
     321              :      size, the alignment for this partition.  */
     322              :   unsigned int alignb;
     323              : 
     324              :   /* The partition representative.  */
     325              :   unsigned representative;
     326              : 
     327              :   /* The next stack variable in the partition, or EOC.  */
     328              :   unsigned next;
     329              : 
     330              :   /* The numbers of conflicting stack variables.  */
     331              :   bitmap conflicts;
     332              : };
     333              : 
     334              : #define EOC  ((unsigned)-1)
     335              : 
     336              : /* We have an array of such objects while deciding allocation.  */
     337              : static class stack_var *stack_vars;
     338              : static unsigned stack_vars_alloc;
     339              : static unsigned stack_vars_num;
     340              : static hash_map<tree, unsigned> *decl_to_stack_part;
     341              : 
     342              : #define INVALID_STACK_INDEX ((unsigned)-1)
     343              : 
     344              : /* Conflict bitmaps go on this obstack.  This allows us to destroy
     345              :    all of them in one big sweep.  */
     346              : static bitmap_obstack stack_var_bitmap_obstack;
     347              : 
     348              : /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
     349              :    is non-decreasing.  */
     350              : static unsigned *stack_vars_sorted;
     351              : 
     352              : /* The phase of the stack frame.  This is the known misalignment of
     353              :    virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
     354              :    (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
     355              : static int frame_phase;
     356              : 
     357              : /* Used during expand_used_vars to remember if we saw any decls for
     358              :    which we'd like to enable stack smashing protection.  */
     359              : static bool has_protected_decls;
     360              : 
     361              : /* Used during expand_used_vars.  Remember if we say a character buffer
     362              :    smaller than our cutoff threshold.  Used for -Wstack-protector.  */
     363              : static bool has_short_buffer;
     364              : 
     365              : /* Compute the byte alignment to use for DECL.  Ignore alignment
     366              :    we can't do with expected alignment of the stack boundary.  */
     367              : 
     368              : static unsigned int
     369      6426197 : align_local_variable (tree decl, bool really_expand)
     370              : {
     371      6426197 :   unsigned int align;
     372              : 
     373      6426197 :   if (TREE_CODE (decl) == SSA_NAME)
     374              :     {
     375       449796 :       tree type = TREE_TYPE (decl);
     376       449796 :       machine_mode mode = TYPE_MODE (type);
     377              : 
     378       449796 :       align = TYPE_ALIGN (type);
     379       449796 :       if (mode != BLKmode
     380       449796 :           && align < GET_MODE_ALIGNMENT (mode))
     381          458 :         align = GET_MODE_ALIGNMENT (mode);
     382              :     }
     383              :   else
     384      5976401 :     align = LOCAL_DECL_ALIGNMENT (decl);
     385              : 
     386      6426197 :   if (hwassist_sanitize_stack_p ())
     387          271 :     align = MAX (align, (unsigned) HWASAN_TAG_GRANULE_SIZE * BITS_PER_UNIT);
     388              : 
     389      6426197 :   if (TREE_CODE (decl) != SSA_NAME && really_expand)
     390              :     /* Don't change DECL_ALIGN when called from estimated_stack_frame_size.
     391              :        That is done before IPA and could bump alignment based on host
     392              :        backend even for offloaded code which wants different
     393              :        LOCAL_DECL_ALIGNMENT.  */
     394      1514344 :     SET_DECL_ALIGN (decl, align);
     395              : 
     396      6426197 :   return align / BITS_PER_UNIT;
     397              : }
     398              : 
     399              : /* Align given offset BASE with ALIGN.  Truncate up if ALIGN_UP is true,
     400              :    down otherwise.  Return truncated BASE value.  */
     401              : 
     402              : static inline unsigned HOST_WIDE_INT
     403              : align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
     404              : {
     405              :   return align_up ? (base + align - 1) & -align : base & -align;
     406              : }
     407              : 
     408              : /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
     409              :    Return the frame offset.  */
     410              : 
     411              : static poly_int64
     412      1718177 : alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
     413              : {
     414      1718177 :   poly_int64 offset, new_frame_offset;
     415              : 
     416      1718177 :   if (FRAME_GROWS_DOWNWARD)
     417              :     {
     418      1718177 :       new_frame_offset
     419      1718177 :         = aligned_lower_bound (frame_offset - frame_phase - size,
     420      1718177 :                                align) + frame_phase;
     421      1718177 :       offset = new_frame_offset;
     422              :     }
     423              :   else
     424              :     {
     425              :       new_frame_offset
     426              :         = aligned_upper_bound (frame_offset - frame_phase,
     427              :                                align) + frame_phase;
     428              :       offset = new_frame_offset;
     429              :       new_frame_offset += size;
     430              :     }
     431      1718177 :   frame_offset = new_frame_offset;
     432              : 
     433      1718177 :   if (frame_offset_overflow (frame_offset, cfun->decl))
     434            0 :     frame_offset = offset = 0;
     435              : 
     436      1718177 :   return offset;
     437              : }
     438              : 
     439              : /* Ensure that the stack is aligned to ALIGN bytes.
     440              :    Return the new frame offset.  */
     441              : static poly_int64
     442         1810 : align_frame_offset (unsigned HOST_WIDE_INT align)
     443              : {
     444            0 :   return alloc_stack_frame_space (0, align);
     445              : }
     446              : 
     447              : /* Accumulate DECL into STACK_VARS.  */
     448              : 
     449              : static void
     450      5629781 : add_stack_var (tree decl, bool really_expand)
     451              : {
     452      5629781 :   class stack_var *v;
     453              : 
     454      5629781 :   if (stack_vars_num >= stack_vars_alloc)
     455              :     {
     456      1325563 :       if (stack_vars_alloc)
     457        33957 :         stack_vars_alloc = stack_vars_alloc * 3 / 2;
     458              :       else
     459      1291606 :         stack_vars_alloc = 32;
     460      1325563 :       stack_vars
     461      1325563 :         = XRESIZEVEC (class stack_var, stack_vars, stack_vars_alloc);
     462              :     }
     463      5629781 :   if (!decl_to_stack_part)
     464            0 :     decl_to_stack_part = new hash_map<tree, unsigned>;
     465              : 
     466      5629781 :   v = &stack_vars[stack_vars_num];
     467      5629781 :   decl_to_stack_part->put (decl, stack_vars_num);
     468              : 
     469      5629781 :   v->decl = decl;
     470      5629781 :   tree size = TREE_CODE (decl) == SSA_NAME
     471      5629781 :     ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
     472      5629781 :     : DECL_SIZE_UNIT (decl);
     473      5629781 :   v->size = tree_to_poly_uint64 (size);
     474              :   /* Ensure that all variables have size, so that &a != &b for any two
     475              :      variables that are simultaneously live.  */
     476      5629781 :   if (known_eq (v->size, 0U))
     477        24135 :     v->size = 1;
     478      5629781 :   v->alignb = align_local_variable (decl, really_expand);
     479              :   /* An alignment of zero can mightily confuse us later.  */
     480      5629781 :   gcc_assert (v->alignb != 0);
     481              : 
     482              :   /* All variables are initially in their own partition.  */
     483      5629781 :   v->representative = stack_vars_num;
     484      5629781 :   v->next = EOC;
     485              : 
     486              :   /* All variables initially conflict with no other.  */
     487      5629781 :   v->conflicts = NULL;
     488              : 
     489              :   /* Ensure that this decl doesn't get put onto the list twice.  */
     490      5629781 :   set_rtl (decl, pc_rtx);
     491              : 
     492      5629781 :   stack_vars_num++;
     493      5629781 : }
     494              : 
     495              : /* Make the decls associated with luid's X and Y conflict.  */
     496              : 
     497              : static void
     498     11126634 : add_stack_var_conflict (unsigned x, unsigned y)
     499              : {
     500     11126634 :   class stack_var *a = &stack_vars[x];
     501     11126634 :   class stack_var *b = &stack_vars[y];
     502     11126634 :   if (x == y)
     503              :     return;
     504     10094193 :   if (!a->conflicts)
     505       631802 :     a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
     506     10094193 :   if (!b->conflicts)
     507       110444 :     b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
     508     10094193 :   bitmap_set_bit (a->conflicts, y);
     509     10094193 :   bitmap_set_bit (b->conflicts, x);
     510              : }
     511              : 
     512              : /* Check whether the decls associated with luid's X and Y conflict.  */
     513              : 
     514              : static bool
     515     11241159 : stack_var_conflict_p (unsigned x, unsigned y)
     516              : {
     517     11241159 :   class stack_var *a = &stack_vars[x];
     518     11241159 :   class stack_var *b = &stack_vars[y];
     519     11241159 :   if (x == y)
     520              :     return false;
     521              :   /* Partitions containing an SSA name result from gimple registers
     522              :      with things like unsupported modes.  They are top-level and
     523              :      hence conflict with everything else.  */
     524     11241159 :   if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
     525              :     return true;
     526              : 
     527     11206770 :   if (!a->conflicts || !b->conflicts)
     528              :     return false;
     529     11152509 :   return bitmap_bit_p (a->conflicts, y);
     530              : }
     531              : 
     532              : /* Returns the DECL's index into the stack_vars array.
     533              :    If the DECL does not exist return INVALID_STACK_INDEX.  */
     534              : static unsigned
     535     45518281 : decl_stack_index (tree decl)
     536              : {
     537     45518281 :   if (!decl)
     538              :     return INVALID_STACK_INDEX;
     539     45518281 :   if (!DECL_P (decl))
     540              :     return INVALID_STACK_INDEX;
     541     34995346 :   if (DECL_RTL_IF_SET (decl) != pc_rtx)
     542              :     return INVALID_STACK_INDEX;
     543     23492248 :   unsigned *v = decl_to_stack_part->get (decl);
     544     23492248 :   if (!v)
     545              :     return INVALID_STACK_INDEX;
     546              : 
     547     23492248 :   unsigned indx = *v;
     548     23492248 :   gcc_checking_assert (indx != INVALID_STACK_INDEX);
     549     23492248 :   gcc_checking_assert (indx < stack_vars_num);
     550              :   return indx;
     551              : }
     552              : 
     553              : /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
     554              :    enter its partition number into bitmap DATA.  */
     555              : 
     556              : static bool
     557     29030731 : visit_op (gimple *, tree op, tree, void *data)
     558              : {
     559     29030731 :   bitmap active = (bitmap)data;
     560     29030731 :   op = get_base_address (op);
     561     29030731 :   unsigned idx = decl_stack_index (op);
     562     29030731 :   if (idx != INVALID_STACK_INDEX)
     563     14420100 :     bitmap_set_bit (active, idx);
     564     29030731 :   return false;
     565              : }
     566              : 
     567              : /* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
     568              :    record conflicts between it and all currently active other partitions
     569              :    from bitmap DATA.  */
     570              : 
     571              : static bool
     572     13247104 : visit_conflict (gimple *, tree op, tree, void *data)
     573              : {
     574     13247104 :   bitmap active = (bitmap)data;
     575     13247104 :   op = get_base_address (op);
     576     13247104 :   unsigned num = decl_stack_index (op);
     577     13247104 :   if (num != INVALID_STACK_INDEX
     578     13247104 :       && bitmap_set_bit (active, num))
     579              :     {
     580       959620 :       bitmap_iterator bi;
     581       959620 :       unsigned i;
     582       959620 :       gcc_assert (num < stack_vars_num);
     583     10655764 :       EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
     584      9696144 :         add_stack_var_conflict (num, i);
     585              :     }
     586     13247104 :   return false;
     587              : }
     588              : 
     589              : /* A cache for ssa name to address of stack variables.
     590              :    When taking into account if a ssa name refers to an
     591              :    address of a stack variable, we need to walk the
     592              :    expressions backwards to find the addresses. This
     593              :    cache is there so we don't need to walk the expressions
     594              :    all the time.  */
     595              : struct vars_ssa_cache
     596              : {
     597              : private:
     598              :   /* Currently an entry is a bitmap of all of the known stack variables
     599              :      addresses that are referenced by the ssa name.
     600              :      When the bitmap is the nullptr, then there is no cache.
     601              :      Currently only empty bitmaps are shared.
     602              :      The reason for why empty cache is not just a null is so we know the
     603              :      cache for an entry is filled in.  */
     604              :   struct entry
     605              :   {
     606              :     bitmap bmap = nullptr;
     607              :   };
     608              :   entry *vars_ssa_caches;
     609              : public:
     610              : 
     611              :   vars_ssa_cache();
     612              :   ~vars_ssa_cache();
     613              :   const_bitmap operator() (tree name);
     614              :   void dump (FILE *file);
     615              : 
     616              : private:
     617              :   /* Can't copy. */
     618              :   vars_ssa_cache(const vars_ssa_cache&) = delete;
     619              :   vars_ssa_cache(vars_ssa_cache&&) = delete;
     620              : 
     621              :   /* The shared empty bitmap.  */
     622              :   bitmap empty;
     623              : 
     624              :   /* Unshare the index, currently only need
     625              :      to unshare if the entry was empty. */
     626      1039650 :   void unshare(int indx)
     627              :   {
     628      1039650 :     if (vars_ssa_caches[indx].bmap == empty)
     629       443975 :         vars_ssa_caches[indx].bmap = BITMAP_ALLOC (&stack_var_bitmap_obstack);
     630      1039650 :   }
     631              :   void create (tree);
     632              :   bool exists (tree use);
     633              :   void add_one (tree old_name, unsigned);
     634              :   bool update (tree old_name, tree use);
     635              : };
     636              : 
     637              : /* Constructor of the cache, create the cache array. */
     638       145086 : vars_ssa_cache::vars_ssa_cache ()
     639              : {
     640     31908590 :   vars_ssa_caches = new entry[num_ssa_names]{};
     641              : 
     642              :   /* Create the shared empty bitmap too. */
     643       145086 :   empty = BITMAP_ALLOC (&stack_var_bitmap_obstack);
     644       145086 : }
     645              : 
     646              : /* Delete the array. The bitmaps will be freed
     647              :    when stack_var_bitmap_obstack is freed.  */
     648       145086 : vars_ssa_cache::~vars_ssa_cache ()
     649              : {
     650       145086 :   delete []vars_ssa_caches;
     651       145086 : }
     652              : 
     653              : /* Create an empty entry for the USE ssa name.  */
     654              : void
     655     10623560 : vars_ssa_cache::create (tree use)
     656              : {
     657     10623560 :   int num = SSA_NAME_VERSION (use);
     658     10623560 :   if (vars_ssa_caches[num].bmap)
     659              :     return;
     660     10623560 :   vars_ssa_caches[num].bmap = empty;
     661              : }
     662              : 
     663              : /* Returns true if the cache for USE exists.  */
     664              : bool
     665     69691418 : vars_ssa_cache::exists (tree use)
     666              : {
     667     69691418 :   int num = SSA_NAME_VERSION (use);
     668     69691418 :   return vars_ssa_caches[num].bmap != nullptr;
     669              : }
     670              : 
     671              : /* Add to USE's bitmap for stack variable IDX.  */
     672              : void
     673       150875 : vars_ssa_cache::add_one (tree use, unsigned idx)
     674              : {
     675       150875 :   gcc_assert (idx != INVALID_STACK_INDEX);
     676       150875 :   int num = SSA_NAME_VERSION (use);
     677       150875 :   gcc_assert (vars_ssa_caches[num].bmap);
     678       150875 :   unshare (num);
     679       150875 :   bitmap_set_bit (vars_ssa_caches[num].bmap, idx);
     680       150875 : }
     681              : 
     682              : /* Update cache of OLD_NAME from the USE's cache. */
     683              : bool
     684     24503222 : vars_ssa_cache::update (tree old_name, tree use)
     685              : {
     686     24503222 :   if (old_name == use)
     687              :     return false;
     688     15664320 :   int num = SSA_NAME_VERSION (use);
     689     15664320 :   int old_num = SSA_NAME_VERSION (old_name);
     690              : 
     691              :   /* If the old name was empty, then there is nothing to be updated. */
     692     15664320 :   if (vars_ssa_caches[num].bmap == empty)
     693              :     return false;
     694       888775 :   unshare (old_num);
     695       888775 :   return bitmap_ior_into (vars_ssa_caches[old_num].bmap, vars_ssa_caches[num].bmap);
     696              : }
     697              : 
     698              : /* Dump out the cache. Note empty and non-filled
     699              :    in ssa names are not printed out. */
     700              : void
     701            0 : vars_ssa_cache::dump (FILE *file)
     702              : {
     703            0 :   fprintf (file, "var ssa address cache\n");
     704            0 :   for (unsigned num = 0; num < num_ssa_names; num++)
     705              :     {
     706            0 :       if (!vars_ssa_caches[num].bmap
     707            0 :           || vars_ssa_caches[num].bmap == empty)
     708            0 :         continue;
     709            0 :       fprintf (file, "_%d refers to:\n", num);
     710            0 :       bitmap_iterator bi;
     711            0 :       unsigned i;
     712            0 :       EXECUTE_IF_SET_IN_BITMAP (vars_ssa_caches[num].bmap, 0, i, bi)
     713              :         {
     714            0 :           fputc ('\t', file);
     715            0 :           print_generic_expr (file, stack_vars[i].decl, dump_flags);
     716              :         }
     717            0 :       fputc ('\n', file);
     718              :   }
     719            0 :   fputc ('\n', file);
     720            0 : }
     721              : 
     722              : /* Returns the filled in cache for NAME.
     723              :    This will fill in the cache if it does not exist already.
     724              :    Returns an empty for ssa names that can't contain pointers
     725              :    (only intergal types and pointer types will contain pointers).  */
     726              : 
     727              : const_bitmap
     728     54649167 : vars_ssa_cache::operator() (tree name)
     729              : {
     730     54649167 :   gcc_assert (TREE_CODE (name) == SSA_NAME);
     731              : 
     732     90081331 :   if (!POINTER_TYPE_P (TREE_TYPE (name))
     733     89026016 :       && !ANY_INTEGRAL_TYPE_P (TREE_TYPE (name)))
     734      2408258 :     return empty;
     735              : 
     736     52240909 :   if (exists (name))
     737     43447663 :     return vars_ssa_caches[SSA_NAME_VERSION (name)].bmap;
     738              : 
     739      8793246 :   auto_vec<std::pair<tree,tree>, 4> work_list;
     740      8793246 :   auto_vec<std::pair<tree,tree>, 4> update_cache_list;
     741              : 
     742      8793246 :   work_list.safe_push (std::make_pair (name, name));
     743              : 
     744     32451379 :   while (!work_list.is_empty ())
     745              :     {
     746     23658133 :       auto item = work_list.pop();
     747     23658133 :       tree use = item.first;
     748     23658133 :       tree old_name = item.second;
     749     23658133 :       if (TREE_CODE (use) == ADDR_EXPR)
     750              :         {
     751       397484 :           tree op = TREE_OPERAND (use, 0);
     752       397484 :           op = get_base_address (op);
     753       397484 :           unsigned idx = decl_stack_index (op);
     754       397484 :           if (idx != INVALID_STACK_INDEX)
     755       150875 :             add_one (old_name, idx);
     756     13034573 :           continue;
     757       397484 :         }
     758              : 
     759     23260649 :       if (TREE_CODE (use) != SSA_NAME)
     760      5734648 :         continue;
     761              : 
     762     30577647 :       if (!POINTER_TYPE_P (TREE_TYPE (use))
     763     30463685 :           && !ANY_INTEGRAL_TYPE_P (TREE_TYPE (use)))
     764        75492 :         continue;
     765              : 
     766              :       /* Mark the old ssa name needs to be update from the use. */
     767     17450509 :       update_cache_list.safe_push (item);
     768              : 
     769              :       /* If the cache exists for the use, don't try to recreate it. */
     770     17450509 :       if (exists (use))
     771              :         {
     772              :           /* Update the cache here, this can reduce the number of
     773              :              times through the update loop below.  */
     774      6826949 :           update (old_name, use);
     775      6826949 :           continue;
     776              :         }
     777              : 
     778              :       /* Create the cache bitmap for the use and also
     779              :          so we don't go into an infinite loop for some phi nodes with loops.  */
     780     10623560 :       create (use);
     781              : 
     782     10623560 :       gimple *g = SSA_NAME_DEF_STMT (use);
     783              :  
     784              :       /* CONSTRUCTOR here is always a vector initialization,
     785              :          walk each element too. */
     786     10623560 :       if (gimple_assign_single_p (g)
     787     10623560 :           && TREE_CODE (gimple_assign_rhs1 (g)) == CONSTRUCTOR)
     788              :         {
     789              :           tree ctr = gimple_assign_rhs1 (g);
     790              :           unsigned i;
     791              :           tree elm;
     792       302935 :           FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctr), i, elm)
     793       212337 :             work_list.safe_push (std::make_pair (elm, use));
     794              :         }
     795              :       /* For assignments, walk each operand for possible addresses.
     796              :          For PHI nodes, walk each argument. */
     797     10532962 :       else if (gassign *a = dyn_cast <gassign *> (g))
     798              :         {
     799              :           /* operand 0 is the lhs. */
     800     18810371 :           for (unsigned i = 1; i < gimple_num_ops (g); i++)
     801     10871041 :             work_list.safe_push (std::make_pair (gimple_op (a, i), use));
     802              :         }
     803     12157830 :       else if (gphi *p = dyn_cast <gphi *> (g))
     804      5315779 :         for (unsigned i = 0; i < gimple_phi_num_args (p); ++i)
     805      3781509 :           work_list.safe_push (std::make_pair (gimple_phi_arg_def (p, i), use));
     806              :     }
     807              : 
     808              :   /* Update the cache. Note a loop is needed as phi nodes could
     809              :      cause a loop to form. The number of times through this loop
     810              :      will be small though.  */
     811      8838868 :   bool changed;
     812     17677736 :   do {
     813      8838868 :     changed = false;
     814      8838868 :     unsigned int i;
     815      8838868 :     std::pair<tree,tree> *e;
     816     44147255 :     FOR_EACH_VEC_ELT_REVERSE (update_cache_list, i, e)
     817              :       {
     818     17676273 :         if (update (e->second, e->first))
     819        52584 :           changed = true;
     820              :       }
     821              :   } while (changed);
     822              : 
     823      8793246 :   return vars_ssa_caches[SSA_NAME_VERSION (name)].bmap;
     824      8793246 : }
     825              : 
     826              : /* Helper function for add_scope_conflicts_1.  For USE on
     827              :    a stmt, if it is a SSA_NAME and in its defining statement
     828              :    is known to be based on some ADDR_EXPR, invoke VISIT
     829              :    on that ADDR_EXPR.  */
     830              : 
     831              : static inline void
     832     54649167 : add_scope_conflicts_2 (vars_ssa_cache &cache, tree name,
     833              :                        bitmap work, walk_stmt_load_store_addr_fn visit)
     834              : {
     835     54649167 :   gcc_assert (TREE_CODE (name) == SSA_NAME);
     836              : 
     837              :   /* Query the cache for the mapping of addresses that are referenced by
     838              :      ssa name NAME.  Querying it will fill in it.  */
     839     54649167 :   bitmap_iterator bi;
     840     54649167 :   unsigned i;
     841     54649167 :   const_bitmap bmap = cache (name);
     842              :   /* Visit each stack variable that is referenced.  */
     843     56962168 :   EXECUTE_IF_SET_IN_BITMAP (bmap, 0, i, bi)
     844      2313001 :     visit (nullptr, stack_vars[i].decl, nullptr, work);
     845     54649167 : }
     846              : 
     847              : /* Helper routine for add_scope_conflicts, calculating the active partitions
     848              :    at the end of BB, leaving the result in WORK.  We're called to generate
     849              :    conflicts when FOR_CONFLICT is true, otherwise we're just tracking
     850              :    liveness.  */
     851              : 
     852              : static void
     853     15931959 : add_scope_conflicts_1 (vars_ssa_cache &cache, basic_block bb, bitmap work, bool for_conflict)
     854              : {
     855     15931959 :   edge e;
     856     15931959 :   edge_iterator ei;
     857     15931959 :   gimple_stmt_iterator gsi;
     858     15931959 :   walk_stmt_load_store_addr_fn visit;
     859     15931959 :   use_operand_p use_p;
     860     15931959 :   ssa_op_iter iter;
     861     15931959 :   bool had_non_clobbers = false;
     862              : 
     863     15931959 :   bitmap_clear (work);
     864              :   /* The copy what was alive out going from the edges. */
     865     40291330 :   FOR_EACH_EDGE (e, ei, bb->preds)
     866     24359371 :     bitmap_ior_into (work, (bitmap)e->src->aux);
     867              : 
     868     15931959 :   visit = for_conflict ? visit_conflict : visit_op;
     869              : 
     870              :   /* Addresses coming into the bb via phis are alive at the entry point. */
     871     20921739 :   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     872      4989780 :     add_scope_conflicts_2 (cache, gimple_phi_result (gsi_stmt (gsi)), work, visit_op);
     873              : 
     874    174297636 :   for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     875              :     {
     876    158365677 :       gimple *stmt = gsi_stmt (gsi);
     877              : 
     878              :       /* Debug statements are not considered for liveness. */
     879    158365677 :       if (is_gimple_debug (stmt))
     880     96914475 :         continue;
     881              : 
     882              :       /* If we had `var = {CLOBBER}`, then var is no longer
     883              :          considered alive after this point but might become
     884              :          alive later on. */
     885     61451202 :       if (gimple_clobber_p (stmt))
     886              :         {
     887      2910888 :           tree lhs = gimple_assign_lhs (stmt);
     888              :           /* Handle only plain var clobbers, not partial ones.
     889              :              Nested functions lowering and C++ front-end inserts clobbers
     890              :              which are partial clobbers.  */
     891      2910888 :           if (!VAR_P (lhs))
     892        67926 :             continue;
     893      2842962 :           unsigned indx = decl_stack_index (lhs);
     894      2842962 :           if (indx != INVALID_STACK_INDEX)
     895      2331259 :             bitmap_clear_bit (work, indx);
     896              :         }
     897              :       else
     898              :         {
     899     58540314 :           if (for_conflict && !had_non_clobbers)
     900              :             {
     901              :               /* When we are inheriting live variables from our predecessors
     902              :                  through a CFG merge we might not see an actual mention of
     903              :                  the variables to record the approprate conflict as defs/uses
     904              :                  might be through indirect stores/loads.  For this reason
     905              :                  we have to make sure each live variable conflicts with
     906              :                  each other.  When there's just a single predecessor the
     907              :                  set of conflicts is already up-to-date.
     908              :                  We perform this delayed at the first real instruction to
     909              :                  allow clobbers starting this block to remove variables from
     910              :                  the set of live variables.  */
     911      4818580 :               bitmap_iterator bi;
     912      4818580 :               unsigned i;
     913      4818580 :               if (EDGE_COUNT (bb->preds) > 1)
     914     12412223 :                 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
     915              :                   {
     916     10993941 :                     class stack_var *a = &stack_vars[i];
     917     10993941 :                     if (!a->conflicts)
     918       255496 :                       a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
     919     10993941 :                     bitmap_ior_into (a->conflicts, work);
     920              :                   }
     921      4818580 :               had_non_clobbers = true;
     922              :             }
     923     58540314 :           walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
     924    108199701 :           FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
     925     49659387 :             add_scope_conflicts_2 (cache, USE_FROM_PTR (use_p), work, visit);
     926              :         }
     927              :     }
     928              : 
     929              :   /* When there was no real instruction but there's a CFG merge we need
     930              :      to add the conflicts now.  */
     931     15931959 :   if (for_conflict && !had_non_clobbers && EDGE_COUNT (bb->preds) > 1)
     932              :     {
     933       127824 :       bitmap_iterator bi;
     934       127824 :       unsigned i;
     935       914848 :       EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
     936              :         {
     937       787024 :           class stack_var *a = &stack_vars[i];
     938       787024 :           if (!a->conflicts)
     939        22166 :             a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
     940       787024 :           bitmap_ior_into (a->conflicts, work);
     941              :         }
     942              :     }
     943     15931959 : }
     944              : 
     945              : /* Generate stack partition conflicts between all partitions that are
     946              :    simultaneously live.  */
     947              : 
     948              : static void
     949       230501 : add_scope_conflicts (void)
     950              : {
     951              :   /* If there is only one variable, there is nothing to be done as
     952              :      there is only possible partition.  */
     953       230501 :   if (stack_vars_num == 1)
     954        85415 :     return;
     955              : 
     956       145086 :   basic_block bb;
     957       145086 :   bool changed;
     958       145086 :   bitmap work = BITMAP_ALLOC (NULL);
     959       145086 :   int *rpo;
     960       145086 :   int n_bbs;
     961              : 
     962       145086 :   vars_ssa_cache cache;
     963              : 
     964              :   /* We approximate the live range of a stack variable by taking the first
     965              :      mention of its name as starting point(s), and by the end-of-scope
     966              :      death clobber added by gimplify as ending point(s) of the range.
     967              :      This overapproximates in the case we for instance moved an address-taken
     968              :      operation upward, without also moving a dereference to it upwards.
     969              :      But it's conservatively correct as a variable never can hold values
     970              :      before its name is mentioned at least once.
     971              : 
     972              :      We then do a mostly classical bitmap liveness algorithm.  */
     973              : 
     974      5448239 :   FOR_ALL_BB_FN (bb, cfun)
     975      5303153 :     bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
     976              : 
     977       145086 :   rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
     978       145086 :   n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
     979              : 
     980       145086 :   changed = true;
     981       587475 :   while (changed)
     982              :     {
     983              :       int i;
     984              :       changed = false;
     985     11216281 :       for (i = 0; i < n_bbs; i++)
     986              :         {
     987     10918978 :           bitmap active;
     988     10918978 :           bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
     989     10918978 :           active = (bitmap)bb->aux;
     990     10918978 :           add_scope_conflicts_1 (cache, bb, work, false);
     991     10918978 :           if (bitmap_ior_into (active, work))
     992      4481811 :             changed = true;
     993              :         }
     994              :     }
     995              : 
     996      5158067 :   FOR_EACH_BB_FN (bb, cfun)
     997      5012981 :     add_scope_conflicts_1 (cache, bb, work, true);
     998              : 
     999       145086 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1000            0 :     cache.dump (dump_file);
    1001              : 
    1002       145086 :   free (rpo);
    1003       145086 :   BITMAP_FREE (work);
    1004      5448239 :   FOR_ALL_BB_FN (bb, cfun)
    1005      5303153 :     BITMAP_FREE (bb->aux);
    1006       145086 : }
    1007              : 
    1008              : /* A subroutine of partition_stack_vars.  A comparison function for qsort,
    1009              :    sorting an array of indices by the properties of the object.  */
    1010              : 
    1011              : static int
    1012     19298118 : stack_var_cmp (const void *a, const void *b)
    1013              : {
    1014     19298118 :   unsigned ia = *(const unsigned *)a;
    1015     19298118 :   unsigned ib = *(const unsigned *)b;
    1016     19298118 :   unsigned int aligna = stack_vars[ia].alignb;
    1017     19298118 :   unsigned int alignb = stack_vars[ib].alignb;
    1018     19298118 :   poly_int64 sizea = stack_vars[ia].size;
    1019     19298118 :   poly_int64 sizeb = stack_vars[ib].size;
    1020     19298118 :   tree decla = stack_vars[ia].decl;
    1021     19298118 :   tree declb = stack_vars[ib].decl;
    1022     19298118 :   bool largea, largeb;
    1023     19298118 :   unsigned int uida, uidb;
    1024              : 
    1025              :   /* Primary compare on "large" alignment.  Large comes first.  */
    1026     19298118 :   largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
    1027     19298118 :   largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
    1028     19298118 :   if (largea != largeb)
    1029            0 :     return (int)largeb - (int)largea;
    1030              : 
    1031              :   /* Secondary compare on size, decreasing  */
    1032     19298118 :   int diff = compare_sizes_for_sort (sizeb, sizea);
    1033     19298118 :   if (diff != 0)
    1034      8320939 :     return diff;
    1035              : 
    1036              :   /* Tertiary compare on true alignment, decreasing.  */
    1037     10977179 :   if (aligna < alignb)
    1038              :     return -1;
    1039     10640151 :   if (aligna > alignb)
    1040              :     return 1;
    1041              : 
    1042              :   /* Final compare on ID for sort stability, increasing.
    1043              :      Two SSA names are compared by their version, SSA names come before
    1044              :      non-SSA names, and two normal decls are compared by their DECL_UID.  */
    1045     10361467 :   if (TREE_CODE (decla) == SSA_NAME)
    1046              :     {
    1047        37131 :       if (TREE_CODE (declb) == SSA_NAME)
    1048        32628 :         uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
    1049              :       else
    1050              :         return -1;
    1051              :     }
    1052     10324336 :   else if (TREE_CODE (declb) == SSA_NAME)
    1053              :     return 1;
    1054              :   else
    1055     10318893 :     uida = DECL_UID (decla), uidb = DECL_UID (declb);
    1056     10351521 :   if (uida < uidb)
    1057              :     return 1;
    1058      5156648 :   if (uida > uidb)
    1059      5156648 :     return -1;
    1060              :   return 0;
    1061              : }
    1062              : 
    1063              : struct part_traits : unbounded_int_hashmap_traits <unsigned , bitmap> {};
    1064              : typedef hash_map<unsigned, bitmap, part_traits> part_hashmap;
    1065              : 
    1066              : /* If the points-to solution *PI points to variables that are in a partition
    1067              :    together with other variables add all partition members to the pointed-to
    1068              :    variables bitmap.  */
    1069              : 
    1070              : static void
    1071      1535095 : add_partitioned_vars_to_ptset (struct pt_solution *pt,
    1072              :                                part_hashmap *decls_to_partitions,
    1073              :                                hash_set<bitmap> *visited, bitmap temp)
    1074              : {
    1075      1535095 :   bitmap_iterator bi;
    1076      1535095 :   unsigned i;
    1077      1535095 :   bitmap *part;
    1078              : 
    1079      1535095 :   if (pt->anything
    1080      1467089 :       || pt->vars == NULL
    1081              :       /* The pointed-to vars bitmap is shared, it is enough to
    1082              :          visit it once.  */
    1083      3002184 :       || visited->add (pt->vars))
    1084      1253718 :     return;
    1085              : 
    1086       281377 :   bitmap_clear (temp);
    1087              : 
    1088              :   /* By using a temporary bitmap to store all members of the partitions
    1089              :      we have to add we make sure to visit each of the partitions only
    1090              :      once.  */
    1091      1235679 :   EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
    1092       954302 :     if ((!temp
    1093       954302 :          || !bitmap_bit_p (temp, i))
    1094      1551174 :         && (part = decls_to_partitions->get (i)))
    1095       156929 :       bitmap_ior_into (temp, *part);
    1096       281377 :   if (!bitmap_empty_p (temp))
    1097       135676 :     bitmap_ior_into (pt->vars, temp);
    1098              : }
    1099              : 
    1100              : /* Update points-to sets based on partition info, so we can use them on RTL.
    1101              :    The bitmaps representing stack partitions will be saved until expand,
    1102              :    where partitioned decls used as bases in memory expressions will be
    1103              :    rewritten.
    1104              : 
    1105              :    It is not necessary to update TBAA info on accesses to the coalesced
    1106              :    storage since our memory model doesn't allow TBAA to be used for
    1107              :    WAW or WAR dependences.  For RAW when the write is to an old object
    1108              :    the new object would not have been initialized at the point of the
    1109              :    read, invoking undefined behavior.  */
    1110              : 
    1111              : static void
    1112       145086 : update_alias_info_with_stack_vars (void)
    1113              : {
    1114       145086 :   part_hashmap *decls_to_partitions = NULL;
    1115       145086 :   unsigned i, j;
    1116       145086 :   tree var = NULL_TREE;
    1117              : 
    1118      1227395 :   for (i = 0; i < stack_vars_num; i++)
    1119              :     {
    1120      1082309 :       bitmap part = NULL;
    1121      1082309 :       tree name;
    1122      1082309 :       struct ptr_info_def *pi;
    1123              : 
    1124              :       /* Not interested in partitions with single variable.  */
    1125      1082309 :       if (stack_vars[i].representative != i
    1126       832908 :           || stack_vars[i].next == EOC)
    1127      1017330 :         continue;
    1128              : 
    1129        64979 :       if (!decls_to_partitions)
    1130              :         {
    1131        40482 :           decls_to_partitions = new part_hashmap;
    1132        40482 :           cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
    1133              :         }
    1134              : 
    1135              :       /* Create an SSA_NAME that points to the partition for use
    1136              :          as base during alias-oracle queries on RTL for bases that
    1137              :          have been partitioned.  */
    1138        64979 :       if (var == NULL_TREE)
    1139        40482 :         var = create_tmp_var (ptr_type_node);
    1140        64979 :       name = make_ssa_name (var);
    1141              : 
    1142              :       /* Create bitmaps representing partitions.  They will be used for
    1143              :          points-to sets later, so use GGC alloc.  */
    1144        64979 :       part = BITMAP_GGC_ALLOC ();
    1145       379359 :       for (j = i; j != EOC; j = stack_vars[j].next)
    1146              :         {
    1147       314380 :           tree decl = stack_vars[j].decl;
    1148       314380 :           unsigned int uid = DECL_PT_UID (decl);
    1149       314380 :           bitmap_set_bit (part, uid);
    1150       314380 :           decls_to_partitions->put (uid, part);
    1151       314380 :           cfun->gimple_df->decls_to_pointers->put (decl, name);
    1152       314380 :           if (TREE_ADDRESSABLE (decl))
    1153       285413 :             TREE_ADDRESSABLE (name) = 1;
    1154              :         }
    1155              : 
    1156              :       /* Make the SSA name point to all partition members.  */
    1157        64979 :       pi = get_ptr_info (name);
    1158        64979 :       pt_solution_set (&pi->pt, part, false);
    1159              :     }
    1160              : 
    1161              :   /* Make all points-to sets that contain one member of a partition
    1162              :      contain all members of the partition.  */
    1163       145086 :   if (decls_to_partitions)
    1164              :     {
    1165        40482 :       unsigned i;
    1166        40482 :       tree name;
    1167        40482 :       hash_set<bitmap> visited;
    1168        40482 :       bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
    1169              : 
    1170     16019007 :       FOR_EACH_SSA_NAME (i, name, cfun)
    1171              :         {
    1172      9933881 :           struct ptr_info_def *pi;
    1173              : 
    1174     18223723 :           if (POINTER_TYPE_P (TREE_TYPE (name))
    1175     10009225 :               && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
    1176      1454131 :             add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
    1177              :                                            &visited, temp);
    1178              :         }
    1179              : 
    1180        40482 :       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
    1181              :                                      decls_to_partitions, &visited, temp);
    1182        40482 :       add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped_return,
    1183              :                                      decls_to_partitions, &visited, temp);
    1184        40482 :       delete decls_to_partitions;
    1185        40482 :       BITMAP_FREE (temp);
    1186        40482 :     }
    1187       145086 : }
    1188              : 
    1189              : /* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
    1190              :    partitioning algorithm.  Partitions A and B are known to be non-conflicting.
    1191              :    Merge them into a single partition A.  */
    1192              : 
    1193              : static void
    1194       249401 : union_stack_vars (unsigned a, unsigned b)
    1195              : {
    1196       249401 :   class stack_var *vb = &stack_vars[b];
    1197       249401 :   bitmap_iterator bi;
    1198       249401 :   unsigned u;
    1199              : 
    1200       249401 :   gcc_assert (stack_vars[b].next == EOC);
    1201              :    /* Add B to A's partition.  */
    1202       249401 :   stack_vars[b].next = stack_vars[a].next;
    1203       249401 :   stack_vars[b].representative = a;
    1204       249401 :   stack_vars[a].next = b;
    1205              : 
    1206              :   /* Make sure A is big enough to hold B.  */
    1207       249401 :   stack_vars[a].size = upper_bound (stack_vars[a].size, stack_vars[b].size);
    1208              : 
    1209              :   /* Update the required alignment of partition A to account for B.  */
    1210       249401 :   if (stack_vars[a].alignb < stack_vars[b].alignb)
    1211         1978 :     stack_vars[a].alignb = stack_vars[b].alignb;
    1212              : 
    1213              :   /* Update the interference graph and merge the conflicts.  */
    1214       249401 :   if (vb->conflicts)
    1215              :     {
    1216      1627402 :       EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
    1217      1430161 :         add_stack_var_conflict (a, stack_vars[u].representative);
    1218       197241 :       BITMAP_FREE (vb->conflicts);
    1219              :     }
    1220       249401 : }
    1221              : 
    1222              : /* A subroutine of expand_used_vars.  Binpack the variables into
    1223              :    partitions constrained by the interference graph.  The overall
    1224              :    algorithm used is as follows:
    1225              : 
    1226              :         Sort the objects by size in descending order.
    1227              :         For each object A {
    1228              :           S = size(A)
    1229              :           O = 0
    1230              :           loop {
    1231              :             Look for the largest non-conflicting object B with size <= S.
    1232              :             UNION (A, B)
    1233              :           }
    1234              :         }
    1235              : */
    1236              : 
    1237              : static void
    1238       230501 : partition_stack_vars (void)
    1239              : {
    1240       230501 :   unsigned si, sj, n = stack_vars_num;
    1241              : 
    1242       230501 :   stack_vars_sorted = XNEWVEC (unsigned, stack_vars_num);
    1243      1398225 :   for (si = 0; si < n; ++si)
    1244      1167724 :     stack_vars_sorted[si] = si;
    1245              : 
    1246       230501 :   if (n == 1)
    1247              :     return;
    1248              : 
    1249       145086 :   qsort (stack_vars_sorted, n, sizeof (unsigned), stack_var_cmp);
    1250              : 
    1251      1227395 :   for (si = 0; si < n; ++si)
    1252              :     {
    1253      1082309 :       unsigned i = stack_vars_sorted[si];
    1254      1082309 :       unsigned int ialign = stack_vars[i].alignb;
    1255      1082309 :       poly_int64 isize = stack_vars[i].size;
    1256              : 
    1257              :       /* Ignore objects that aren't partition representatives. If we
    1258              :          see a var that is not a partition representative, it must
    1259              :          have been merged earlier.  */
    1260      1082309 :       if (stack_vars[i].representative != i)
    1261       249401 :         continue;
    1262              : 
    1263     12636690 :       for (sj = si + 1; sj < n; ++sj)
    1264              :         {
    1265     11804702 :           unsigned j = stack_vars_sorted[sj];
    1266     11804702 :           unsigned int jalign = stack_vars[j].alignb;
    1267     11804702 :           poly_int64 jsize = stack_vars[j].size;
    1268              : 
    1269              :           /* Ignore objects that aren't partition representatives.  */
    1270     11804702 :           if (stack_vars[j].representative != j)
    1271     11803782 :             continue;
    1272              : 
    1273              :           /* Do not mix objects of "small" (supported) alignment
    1274              :              and "large" (unsupported) alignment.  */
    1275     11242079 :           if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
    1276     11242079 :               != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
    1277              :             break;
    1278              : 
    1279              :           /* For Address Sanitizer do not mix objects with different
    1280              :              sizes, as the shorter vars wouldn't be adequately protected.
    1281              :              Don't do that for "large" (unsupported) alignment objects,
    1282              :              those aren't protected anyway.  */
    1283     11242079 :           if (asan_sanitize_stack_p ()
    1284         5389 :               && maybe_ne (isize, jsize)
    1285     11242999 :               && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
    1286              :             break;
    1287              : 
    1288              :           /* Ignore conflicting objects.  */
    1289     11241159 :           if (stack_var_conflict_p (i, j))
    1290     10991758 :             continue;
    1291              : 
    1292              :           /* UNION the objects, placing J at OFFSET.  */
    1293       249401 :           union_stack_vars (i, j);
    1294              :         }
    1295              :     }
    1296              : 
    1297       145086 :   update_alias_info_with_stack_vars ();
    1298              : }
    1299              : 
    1300              : /* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
    1301              : 
    1302              : static void
    1303           29 : dump_stack_var_partition (void)
    1304              : {
    1305           29 :   unsigned si, i, j, n = stack_vars_num;
    1306              : 
    1307           65 :   for (si = 0; si < n; ++si)
    1308              :     {
    1309           36 :       i = stack_vars_sorted[si];
    1310              : 
    1311              :       /* Skip variables that aren't partition representatives, for now.  */
    1312           36 :       if (stack_vars[i].representative != i)
    1313            3 :         continue;
    1314              : 
    1315           33 :       fprintf (dump_file, "Partition %u: size ", i);
    1316           33 :       print_dec (stack_vars[i].size, dump_file);
    1317           33 :       fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
    1318              : 
    1319          102 :       for (j = i; j != EOC; j = stack_vars[j].next)
    1320              :         {
    1321           36 :           fputc ('\t', dump_file);
    1322           36 :           print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
    1323              :         }
    1324           33 :       fputc ('\n', dump_file);
    1325              :     }
    1326           29 : }
    1327              : 
    1328              : /* Assign rtl to DECL at BASE + OFFSET.  */
    1329              : 
    1330              : static void
    1331      1964140 : expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
    1332              :                          poly_int64 offset)
    1333              : {
    1334      1964140 :   unsigned align;
    1335      1964140 :   rtx x;
    1336              : 
    1337              :   /* If this fails, we've overflowed the stack frame.  Error nicely?  */
    1338      2239940 :   gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
    1339              : 
    1340      1964140 :   if (hwassist_sanitize_stack_p ())
    1341           91 :     x = targetm.memtag.add_tag (base, offset,
    1342           91 :                                 hwasan_current_frame_tag ());
    1343              :   else
    1344      2239849 :     x = plus_constant (Pmode, base, offset);
    1345              : 
    1346      3928280 :   x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
    1347       449796 :                    ? TYPE_MODE (TREE_TYPE (decl))
    1348      1514344 :                    : DECL_MODE (decl), x);
    1349              : 
    1350              :   /* Set alignment we actually gave this decl if it isn't an SSA name.
    1351              :      If it is we generate stack slots only accidentally so it isn't as
    1352              :      important, we'll simply set the alignment directly on the MEM.  */
    1353              : 
    1354      1964140 :   if (stack_vars_base_reg_p (base))
    1355      1961126 :     offset -= frame_phase;
    1356      1964140 :   align = known_alignment (offset);
    1357      1964140 :   align *= BITS_PER_UNIT;
    1358      1964140 :   if (align == 0 || align > base_align)
    1359       798050 :     align = base_align;
    1360              : 
    1361      1964140 :   if (TREE_CODE (decl) != SSA_NAME)
    1362              :     {
    1363              :       /* One would think that we could assert that we're not decreasing
    1364              :          alignment here, but (at least) the i386 port does exactly this
    1365              :          via the MINIMUM_ALIGNMENT hook.  */
    1366              : 
    1367      1514344 :       SET_DECL_ALIGN (decl, align);
    1368      1514344 :       DECL_USER_ALIGN (decl) = 0;
    1369              :     }
    1370              : 
    1371      1964140 :   set_rtl (decl, x);
    1372              : 
    1373      1964140 :   set_mem_align (x, align);
    1374      1964140 : }
    1375              : 
    1376       230501 : class stack_vars_data
    1377              : {
    1378              : public:
    1379              :   /* Vector of offset pairs, always end of some padding followed
    1380              :      by start of the padding that needs Address Sanitizer protection.
    1381              :      The vector is in reversed, highest offset pairs come first.  */
    1382              :   auto_vec<HOST_WIDE_INT> asan_vec;
    1383              : 
    1384              :   /* Vector of partition representative decls in between the paddings.  */
    1385              :   auto_vec<tree> asan_decl_vec;
    1386              : 
    1387              :   /* Base pseudo register for Address Sanitizer protected automatic vars.  */
    1388              :   rtx asan_base;
    1389              : 
    1390              :   /* Alignment needed for the Address Sanitizer protected automatic vars.  */
    1391              :   unsigned int asan_alignb;
    1392              : };
    1393              : 
    1394              : /* A subroutine of expand_used_vars.  Give each partition representative
    1395              :    a unique location within the stack frame.  Update each partition member
    1396              :    with that location.  */
    1397              : static void
    1398       232549 : expand_stack_vars (bool (*pred) (unsigned), class stack_vars_data *data)
    1399              : {
    1400       232549 :   unsigned si, i, j, n = stack_vars_num;
    1401       232549 :   poly_uint64 large_size = 0, large_alloc = 0;
    1402       232549 :   rtx large_base = NULL;
    1403       232549 :   rtx large_untagged_base = NULL;
    1404       232549 :   unsigned large_align = 0;
    1405       232549 :   bool large_allocation_done = false;
    1406       232549 :   tree decl;
    1407              : 
    1408              :   /* Determine if there are any variables requiring "large" alignment.
    1409              :      Since these are dynamically allocated, we only process these if
    1410              :      no predicate involved.  */
    1411       232549 :   large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
    1412       232549 :   if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
    1413              :     {
    1414              :       /* Find the total size of these variables.  */
    1415            0 :       for (si = 0; si < n; ++si)
    1416              :         {
    1417            0 :           unsigned alignb;
    1418              : 
    1419            0 :           i = stack_vars_sorted[si];
    1420            0 :           alignb = stack_vars[i].alignb;
    1421              : 
    1422              :           /* All "large" alignment decls come before all "small" alignment
    1423              :              decls, but "large" alignment decls are not sorted based on
    1424              :              their alignment.  Increase large_align to track the largest
    1425              :              required alignment.  */
    1426            0 :           if ((alignb * BITS_PER_UNIT) > large_align)
    1427              :             large_align = alignb * BITS_PER_UNIT;
    1428              : 
    1429              :           /* Stop when we get to the first decl with "small" alignment.  */
    1430            0 :           if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
    1431              :             break;
    1432              : 
    1433              :           /* Skip variables that aren't partition representatives.  */
    1434            0 :           if (stack_vars[i].representative != i)
    1435            0 :             continue;
    1436              : 
    1437              :           /* Skip variables that have already had rtl assigned.  See also
    1438              :              add_stack_var where we perpetrate this pc_rtx hack.  */
    1439            0 :           decl = stack_vars[i].decl;
    1440            0 :           if (TREE_CODE (decl) == SSA_NAME
    1441            0 :               ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
    1442            0 :               : DECL_RTL (decl) != pc_rtx)
    1443            0 :             continue;
    1444              : 
    1445            0 :           large_size = aligned_upper_bound (large_size, alignb);
    1446            0 :           large_size += stack_vars[i].size;
    1447              :         }
    1448              :     }
    1449              : 
    1450      1404564 :   for (si = 0; si < n; ++si)
    1451              :     {
    1452      1172015 :       rtx base;
    1453      1172015 :       unsigned base_align, alignb;
    1454      1172015 :       poly_int64 offset = 0;
    1455              : 
    1456      1172015 :       i = stack_vars_sorted[si];
    1457              : 
    1458              :       /* Skip variables that aren't partition representatives, for now.  */
    1459      1172015 :       if (stack_vars[i].representative != i)
    1460      1421427 :         continue;
    1461              : 
    1462              :       /* Skip variables that have already had rtl assigned.  See also
    1463              :          add_stack_var where we perpetrate this pc_rtx hack.  */
    1464       922603 :       decl = stack_vars[i].decl;
    1465       922603 :       if (TREE_CODE (decl) == SSA_NAME
    1466       922603 :           ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
    1467       919042 :           : DECL_RTL (decl) != pc_rtx)
    1468         3281 :         continue;
    1469              : 
    1470              :       /* Check the predicate to see whether this variable should be
    1471              :          allocated in this pass.  */
    1472       919322 :       if (pred && !pred (i))
    1473          999 :         continue;
    1474              : 
    1475       918323 :       base = (hwassist_sanitize_stack_p ()
    1476       918323 :               ? hwasan_frame_base ()
    1477       918267 :               : virtual_stack_vars_rtx);
    1478       918323 :       alignb = stack_vars[i].alignb;
    1479       918323 :       if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
    1480              :         {
    1481       918323 :           poly_int64 hwasan_orig_offset;
    1482       918323 :           if (hwassist_sanitize_stack_p ())
    1483              :             {
    1484              :               /* There must be no tag granule "shared" between different
    1485              :                  objects.  This means that no HWASAN_TAG_GRANULE_SIZE byte
    1486              :                  chunk can have more than one object in it.
    1487              : 
    1488              :                  We ensure this by forcing the end of the last bit of data to
    1489              :                  be aligned to HWASAN_TAG_GRANULE_SIZE bytes here, and setting
    1490              :                  the start of each variable to be aligned to
    1491              :                  HWASAN_TAG_GRANULE_SIZE bytes in `align_local_variable`.
    1492              : 
    1493              :                  We can't align just one of the start or end, since there are
    1494              :                  untagged things stored on the stack which we do not align to
    1495              :                  HWASAN_TAG_GRANULE_SIZE bytes.  If we only aligned the start
    1496              :                  or the end of tagged objects then untagged objects could end
    1497              :                  up sharing the first granule of a tagged object or sharing the
    1498              :                  last granule of a tagged object respectively.  */
    1499           56 :               hwasan_orig_offset = align_frame_offset (HWASAN_TAG_GRANULE_SIZE);
    1500           56 :               gcc_assert (stack_vars[i].alignb >= HWASAN_TAG_GRANULE_SIZE);
    1501              :             }
    1502              :           /* ASAN description strings don't yet have a syntax for expressing
    1503              :              polynomial offsets.  */
    1504       918323 :           HOST_WIDE_INT prev_offset;
    1505       918323 :           if (asan_sanitize_stack_p ()
    1506         3708 :               && pred
    1507         3009 :               && frame_offset.is_constant (&prev_offset)
    1508       918323 :               && stack_vars[i].size.is_constant ())
    1509              :             {
    1510         3009 :               if (data->asan_vec.is_empty ())
    1511              :                 {
    1512         1628 :                   align_frame_offset (ASAN_RED_ZONE_SIZE);
    1513         1628 :                   prev_offset = frame_offset.to_constant ();
    1514              :                 }
    1515         3009 :               prev_offset = align_base (prev_offset,
    1516              :                                         ASAN_MIN_RED_ZONE_SIZE,
    1517              :                                         !FRAME_GROWS_DOWNWARD);
    1518         3009 :               tree repr_decl = NULL_TREE;
    1519         3009 :               unsigned HOST_WIDE_INT size
    1520         3009 :                 = asan_var_and_redzone_size (stack_vars[i].size.to_constant ());
    1521         3009 :               if (data->asan_vec.is_empty ())
    1522         1628 :                 size = MAX (size, ASAN_RED_ZONE_SIZE);
    1523              : 
    1524         3009 :               unsigned HOST_WIDE_INT alignment = MAX (alignb,
    1525              :                                                       ASAN_MIN_RED_ZONE_SIZE);
    1526         3009 :               offset = alloc_stack_frame_space (size, alignment);
    1527              : 
    1528         3009 :               data->asan_vec.safe_push (prev_offset);
    1529              :               /* Allocating a constant amount of space from a constant
    1530              :                  starting offset must give a constant result.  */
    1531         3009 :               data->asan_vec.safe_push ((offset + stack_vars[i].size)
    1532         3009 :                                         .to_constant ());
    1533              :               /* Find best representative of the partition.
    1534              :                  Prefer those with DECL_NAME, even better
    1535              :                  satisfying asan_protect_stack_decl predicate.  */
    1536         3528 :               for (j = i; j != EOC; j = stack_vars[j].next)
    1537         3013 :                 if (asan_protect_stack_decl (stack_vars[j].decl)
    1538         3013 :                     && DECL_NAME (stack_vars[j].decl))
    1539              :                   {
    1540         2494 :                     repr_decl = stack_vars[j].decl;
    1541         2494 :                     break;
    1542              :                   }
    1543          519 :                 else if (repr_decl == NULL_TREE
    1544          519 :                          && DECL_P (stack_vars[j].decl)
    1545         1038 :                          && DECL_NAME (stack_vars[j].decl))
    1546            0 :                   repr_decl = stack_vars[j].decl;
    1547         3009 :               if (repr_decl == NULL_TREE)
    1548          515 :                 repr_decl = stack_vars[i].decl;
    1549         3009 :               data->asan_decl_vec.safe_push (repr_decl);
    1550              : 
    1551              :               /* Make sure a representative is unpoison if another
    1552              :                  variable in the partition is handled by
    1553              :                  use-after-scope sanitization.  */
    1554         3009 :               if (asan_handled_variables != NULL
    1555         3009 :                   && !asan_handled_variables->contains (repr_decl))
    1556              :                 {
    1557          394 :                   for (j = i; j != EOC; j = stack_vars[j].next)
    1558          199 :                     if (asan_handled_variables->contains (stack_vars[j].decl))
    1559              :                       break;
    1560          197 :                   if (j != EOC)
    1561            2 :                     asan_handled_variables->add (repr_decl);
    1562              :                 }
    1563              : 
    1564         3009 :               data->asan_alignb = MAX (data->asan_alignb, alignb);
    1565         3009 :               if (data->asan_base == NULL)
    1566         1628 :                 data->asan_base = gen_reg_rtx (Pmode);
    1567         3009 :               base = data->asan_base;
    1568              : 
    1569         3009 :               if (!STRICT_ALIGNMENT)
    1570         3009 :                 base_align = crtl->max_used_stack_slot_alignment;
    1571              :               else
    1572              :                 base_align = MAX (crtl->max_used_stack_slot_alignment,
    1573              :                                   GET_MODE_ALIGNMENT (SImode)
    1574              :                                   << ASAN_SHADOW_SHIFT);
    1575              :             }
    1576              :           else
    1577              :             {
    1578       915314 :               offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
    1579       915314 :               base_align = crtl->max_used_stack_slot_alignment;
    1580              : 
    1581       915314 :               if (hwassist_sanitize_stack_p ())
    1582              :                 {
    1583              :                   /* Align again since the point of this alignment is to handle
    1584              :                      the "end" of the object (i.e. smallest address after the
    1585              :                      stack object).  For FRAME_GROWS_DOWNWARD that requires
    1586              :                      aligning the stack before allocating, but for a frame that
    1587              :                      grows upwards that requires aligning the stack after
    1588              :                      allocation.
    1589              : 
    1590              :                      Use `frame_offset` to record the offset value rather than
    1591              :                      `offset` since the `frame_offset` describes the extent
    1592              :                      allocated for this particular variable while `offset`
    1593              :                      describes the address that this variable starts at.  */
    1594           56 :                   align_frame_offset (HWASAN_TAG_GRANULE_SIZE);
    1595           56 :                   hwasan_record_stack_var (virtual_stack_vars_rtx, base,
    1596              :                                            hwasan_orig_offset, frame_offset);
    1597              :                 }
    1598              :             }
    1599              :         }
    1600              :       else
    1601              :         {
    1602              :           /* Large alignment is only processed in the last pass.  */
    1603            0 :           if (pred)
    1604            0 :             continue;
    1605              : 
    1606              :           /* If there were any variables requiring "large" alignment, allocate
    1607              :              space.  */
    1608            0 :           if (maybe_ne (large_size, 0U) && ! large_allocation_done)
    1609              :             {
    1610            0 :               poly_int64 loffset;
    1611            0 :               rtx large_allocsize;
    1612              : 
    1613            0 :               large_allocsize = gen_int_mode (large_size, Pmode);
    1614            0 :               get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
    1615            0 :               loffset = alloc_stack_frame_space
    1616            0 :                 (rtx_to_poly_int64 (large_allocsize),
    1617            0 :                  PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
    1618            0 :               large_base = get_dynamic_stack_base (loffset, large_align, base);
    1619            0 :               large_allocation_done = true;
    1620              :             }
    1621              : 
    1622            0 :           gcc_assert (large_base != NULL);
    1623            0 :           large_alloc = aligned_upper_bound (large_alloc, alignb);
    1624            0 :           offset = large_alloc;
    1625            0 :           large_alloc += stack_vars[i].size;
    1626            0 :           if (hwassist_sanitize_stack_p ())
    1627              :             {
    1628              :               /* An object with a large alignment requirement means that the
    1629              :                  alignment requirement is greater than the required alignment
    1630              :                  for tags.  */
    1631            0 :               if (!large_untagged_base)
    1632            0 :                 large_untagged_base
    1633            0 :                   = targetm.memtag.untagged_pointer (large_base, NULL_RTX);
    1634              :               /* Ensure the end of the variable is also aligned correctly.  */
    1635            0 :               poly_int64 align_again
    1636            0 :                 = aligned_upper_bound (large_alloc, HWASAN_TAG_GRANULE_SIZE);
    1637              :               /* For large allocations we always allocate a chunk of space
    1638              :                  (which is addressed by large_untagged_base/large_base) and
    1639              :                  then use positive offsets from that.  Hence the farthest
    1640              :                  offset is `align_again` and the nearest offset from the base
    1641              :                  is `offset`.  */
    1642            0 :               hwasan_record_stack_var (large_untagged_base, large_base,
    1643              :                                        offset, align_again);
    1644              :             }
    1645              : 
    1646              :           base = large_base;
    1647              :           base_align = large_align;
    1648              :         }
    1649              : 
    1650              :       /* Create rtl for each variable based on their location within the
    1651              :          partition.  */
    1652      2086047 :       for (j = i; j != EOC; j = stack_vars[j].next)
    1653              :         {
    1654      1167724 :           expand_one_stack_var_at (stack_vars[j].decl,
    1655              :                                    base, base_align, offset);
    1656              :         }
    1657       918323 :       if (hwassist_sanitize_stack_p ())
    1658           56 :         hwasan_increment_frame_tag ();
    1659              :     }
    1660              : 
    1661       232549 :   gcc_assert (known_eq (large_alloc, large_size));
    1662       232549 : }
    1663              : 
    1664              : /* Take into account all sizes of partitions and reset DECL_RTLs.  */
    1665              : static poly_uint64
    1666      1061105 : account_stack_vars (void)
    1667              : {
    1668      1061105 :   unsigned si, j, i, n = stack_vars_num;
    1669      1061105 :   poly_uint64 size = 0;
    1670              : 
    1671      5523162 :   for (si = 0; si < n; ++si)
    1672              :     {
    1673      4462057 :       i = stack_vars_sorted[si];
    1674              : 
    1675              :       /* Skip variables that aren't partition representatives, for now.  */
    1676      4462057 :       if (stack_vars[i].representative != i)
    1677            0 :         continue;
    1678              : 
    1679      8924114 :       size += stack_vars[i].size;
    1680      8924114 :       for (j = i; j != EOC; j = stack_vars[j].next)
    1681      4462057 :         set_rtl (stack_vars[j].decl, NULL);
    1682              :     }
    1683      1061105 :   return size;
    1684              : }
    1685              : 
    1686              : /* Record the RTL assignment X for the default def of PARM.  */
    1687              : 
    1688              : extern void
    1689      4687130 : set_parm_rtl (tree parm, rtx x)
    1690              : {
    1691      4687130 :   gcc_assert (TREE_CODE (parm) == PARM_DECL
    1692              :               || TREE_CODE (parm) == RESULT_DECL);
    1693              : 
    1694      4687130 :   if (x && !MEM_P (x))
    1695              :     {
    1696      3021858 :       unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
    1697              :                                               TYPE_MODE (TREE_TYPE (parm)),
    1698              :                                               TYPE_ALIGN (TREE_TYPE (parm)));
    1699              : 
    1700              :       /* If the variable alignment is very large we'll dynamicaly
    1701              :          allocate it, which means that in-frame portion is just a
    1702              :          pointer.  ??? We've got a pseudo for sure here, do we
    1703              :          actually dynamically allocate its spilling area if needed?
    1704              :          ??? Isn't it a problem when Pmode alignment also exceeds
    1705              :          MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32?  */
    1706      3021858 :       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
    1707            0 :         align = GET_MODE_ALIGNMENT (Pmode);
    1708              : 
    1709      3021858 :       record_alignment_for_reg_var (align);
    1710              :     }
    1711              : 
    1712      4687130 :   tree ssa = ssa_default_def (cfun, parm);
    1713      4687130 :   if (!ssa)
    1714      1067122 :     return set_rtl (parm, x);
    1715              : 
    1716      3620008 :   int part = var_to_partition (SA.map, ssa);
    1717      3620008 :   gcc_assert (part != NO_PARTITION);
    1718              : 
    1719      3620008 :   bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
    1720      3620008 :   gcc_assert (changed);
    1721              : 
    1722      3620008 :   set_rtl (ssa, x);
    1723      3620008 :   gcc_assert (DECL_RTL (parm) == x);
    1724              : }
    1725              : 
    1726              : /* A subroutine of expand_one_var.  Called to immediately assign rtl
    1727              :    to a variable to be allocated in the stack frame.  */
    1728              : 
    1729              : static void
    1730       796416 : expand_one_stack_var_1 (tree var)
    1731              : {
    1732       796416 :   poly_uint64 size;
    1733       796416 :   poly_int64 offset;
    1734       796416 :   unsigned byte_align;
    1735              : 
    1736       796416 :   if (TREE_CODE (var) == SSA_NAME)
    1737              :     {
    1738       446855 :       tree type = TREE_TYPE (var);
    1739       446855 :       size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
    1740              :     }
    1741              :   else
    1742       349561 :     size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
    1743              : 
    1744       796416 :   byte_align = align_local_variable (var, true);
    1745              : 
    1746              :   /* We handle highly aligned variables in expand_stack_vars.  */
    1747       796416 :   gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
    1748              : 
    1749       796416 :   rtx base;
    1750       796416 :   if (hwassist_sanitize_stack_p ())
    1751              :     {
    1752              :       /* Allocate zero bytes to align the stack.  */
    1753           35 :       poly_int64 hwasan_orig_offset
    1754           35 :         = align_frame_offset (HWASAN_TAG_GRANULE_SIZE);
    1755           35 :       offset = alloc_stack_frame_space (size, byte_align);
    1756           35 :       align_frame_offset (HWASAN_TAG_GRANULE_SIZE);
    1757           35 :       base = hwasan_frame_base ();
    1758              :       /* Use `frame_offset` to automatically account for machines where the
    1759              :          frame grows upwards.
    1760              : 
    1761              :          `offset` will always point to the "start" of the stack object, which
    1762              :          will be the smallest address, for ! FRAME_GROWS_DOWNWARD this is *not*
    1763              :          the "furthest" offset from the base delimiting the current stack
    1764              :          object.  `frame_offset` will always delimit the extent that the frame.
    1765              :          */
    1766           35 :       hwasan_record_stack_var (virtual_stack_vars_rtx, base,
    1767              :                                hwasan_orig_offset, frame_offset);
    1768              :     }
    1769              :   else
    1770              :     {
    1771       796381 :       offset = alloc_stack_frame_space (size, byte_align);
    1772       796381 :       base = virtual_stack_vars_rtx;
    1773              :     }
    1774              : 
    1775       796416 :   expand_one_stack_var_at (var, base,
    1776              :                            crtl->max_used_stack_slot_alignment, offset);
    1777              : 
    1778       796416 :   if (hwassist_sanitize_stack_p ())
    1779           35 :     hwasan_increment_frame_tag ();
    1780       796416 : }
    1781              : 
    1782              : /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
    1783              :    already assigned some MEM.  */
    1784              : 
    1785              : static void
    1786       349561 : expand_one_stack_var (tree var)
    1787              : {
    1788       349561 :   if (TREE_CODE (var) == SSA_NAME)
    1789              :     {
    1790            0 :       int part = var_to_partition (SA.map, var);
    1791            0 :       if (part != NO_PARTITION)
    1792              :         {
    1793            0 :           rtx x = SA.partition_to_pseudo[part];
    1794            0 :           gcc_assert (x);
    1795            0 :           gcc_assert (MEM_P (x));
    1796              :           return;
    1797              :         }
    1798              :     }
    1799              : 
    1800       349561 :   return expand_one_stack_var_1 (var);
    1801              : }
    1802              : 
    1803              : /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
    1804              :    that will reside in a hard register.  */
    1805              : 
    1806              : static void
    1807         1082 : expand_one_hard_reg_var (tree var)
    1808              : {
    1809            0 :   rest_of_decl_compilation (var, 0, 0);
    1810            0 : }
    1811              : 
    1812              : /* Record the alignment requirements of some variable assigned to a
    1813              :    pseudo.  */
    1814              : 
    1815              : static void
    1816     40299470 : record_alignment_for_reg_var (unsigned int align)
    1817              : {
    1818     40299470 :   if (SUPPORTS_STACK_ALIGNMENT
    1819     40299470 :       && crtl->stack_alignment_estimated < align)
    1820              :     {
    1821              :       /* stack_alignment_estimated shouldn't change after stack
    1822              :          realign decision made */
    1823      1710809 :       gcc_assert (!crtl->stack_realign_processed);
    1824      1710809 :       crtl->stack_alignment_estimated = align;
    1825              :     }
    1826              : 
    1827              :   /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
    1828              :      So here we only make sure stack_alignment_needed >= align.  */
    1829     40299470 :   if (crtl->stack_alignment_needed < align)
    1830       554707 :     crtl->stack_alignment_needed = align;
    1831     40299470 :   if (crtl->max_used_stack_slot_alignment < align)
    1832       554707 :     crtl->max_used_stack_slot_alignment = align;
    1833     40299470 : }
    1834              : 
    1835              : /* Create RTL for an SSA partition.  */
    1836              : 
    1837              : static void
    1838     22257184 : expand_one_ssa_partition (tree var)
    1839              : {
    1840     22257184 :   int part = var_to_partition (SA.map, var);
    1841     22257184 :   gcc_assert (part != NO_PARTITION);
    1842              : 
    1843     22257184 :   if (SA.partition_to_pseudo[part])
    1844              :     return;
    1845              : 
    1846     22257184 :   unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
    1847              :                                           TYPE_MODE (TREE_TYPE (var)),
    1848              :                                           TYPE_ALIGN (TREE_TYPE (var)));
    1849              : 
    1850              :   /* If the variable alignment is very large we'll dynamicaly allocate
    1851              :      it, which means that in-frame portion is just a pointer.  */
    1852     22257184 :   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
    1853            0 :     align = GET_MODE_ALIGNMENT (Pmode);
    1854              : 
    1855     22257184 :   record_alignment_for_reg_var (align);
    1856              : 
    1857     22257184 :   if (!use_register_for_decl (var))
    1858              :     {
    1859       449796 :       if (defer_stack_allocation (var, true))
    1860         2941 :         add_stack_var (var, true);
    1861              :       else
    1862       446855 :         expand_one_stack_var_1 (var);
    1863       449796 :       return;
    1864              :     }
    1865              : 
    1866     21807388 :   machine_mode reg_mode = promote_ssa_mode (var, NULL);
    1867     21807388 :   rtx x = gen_reg_rtx (reg_mode);
    1868              : 
    1869     21807388 :   set_rtl (var, x);
    1870              : 
    1871              :   /* For a promoted variable, X will not be used directly but wrapped in a
    1872              :      SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
    1873              :      will assume that its upper bits can be inferred from its lower bits.
    1874              :      Therefore, if X isn't initialized on every path from the entry, then
    1875              :      we must do it manually in order to fulfill the above assumption.  */
    1876     21807388 :   if (reg_mode != TYPE_MODE (TREE_TYPE (var))
    1877     21807388 :       && bitmap_bit_p (SA.partitions_for_undefined_values, part))
    1878            0 :     emit_move_insn (x, CONST0_RTX (reg_mode));
    1879              : }
    1880              : 
    1881              : /* Record the association between the RTL generated for partition PART
    1882              :    and the underlying variable of the SSA_NAME VAR.  */
    1883              : 
    1884              : static void
    1885     48489793 : adjust_one_expanded_partition_var (tree var)
    1886              : {
    1887     48489793 :   if (!var)
    1888              :     return;
    1889              : 
    1890     48489793 :   tree decl = SSA_NAME_VAR (var);
    1891              : 
    1892     48489793 :   int part = var_to_partition (SA.map, var);
    1893     48489793 :   if (part == NO_PARTITION)
    1894              :     return;
    1895              : 
    1896     31257389 :   rtx x = SA.partition_to_pseudo[part];
    1897              : 
    1898     31257389 :   gcc_assert (x);
    1899              : 
    1900     31257389 :   set_rtl (var, x);
    1901              : 
    1902     31257389 :   if (!REG_P (x))
    1903              :     return;
    1904              : 
    1905              :   /* Note if the object is a user variable.  */
    1906     29252145 :   if (decl && !DECL_ARTIFICIAL (decl))
    1907      4153154 :     mark_user_reg (x);
    1908              : 
    1909     29252145 :   if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
    1910      7498205 :     mark_reg_pointer (x, get_pointer_alignment (var));
    1911              : }
    1912              : 
    1913              : /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
    1914              :    that will reside in a pseudo register.  */
    1915              : 
    1916              : static void
    1917       569834 : expand_one_register_var (tree var)
    1918              : {
    1919       569834 :   if (TREE_CODE (var) == SSA_NAME)
    1920              :     {
    1921            0 :       int part = var_to_partition (SA.map, var);
    1922            0 :       if (part != NO_PARTITION)
    1923              :         {
    1924            0 :           rtx x = SA.partition_to_pseudo[part];
    1925            0 :           gcc_assert (x);
    1926            0 :           gcc_assert (REG_P (x));
    1927              :           return;
    1928              :         }
    1929            0 :       gcc_unreachable ();
    1930              :     }
    1931              : 
    1932       569834 :   tree decl = var;
    1933       569834 :   tree type = TREE_TYPE (decl);
    1934       569834 :   machine_mode reg_mode = promote_decl_mode (decl, NULL);
    1935       569834 :   rtx x = gen_reg_rtx (reg_mode);
    1936              : 
    1937       569834 :   set_rtl (var, x);
    1938              : 
    1939              :   /* Note if the object is a user variable.  */
    1940       569834 :   if (!DECL_ARTIFICIAL (decl))
    1941        87455 :     mark_user_reg (x);
    1942              : 
    1943       569834 :   if (POINTER_TYPE_P (type))
    1944           99 :     mark_reg_pointer (x, get_pointer_alignment (var));
    1945              : }
    1946              : 
    1947              : /* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
    1948              :    has some associated error, e.g. its type is error-mark.  We just need
    1949              :    to pick something that won't crash the rest of the compiler.  */
    1950              : 
    1951              : static void
    1952           32 : expand_one_error_var (tree var)
    1953              : {
    1954           32 :   machine_mode mode = DECL_MODE (var);
    1955           32 :   rtx x;
    1956              : 
    1957           32 :   if (mode == BLKmode)
    1958            1 :     x = gen_rtx_MEM (BLKmode, const0_rtx);
    1959           31 :   else if (mode == VOIDmode)
    1960            0 :     x = const0_rtx;
    1961              :   else
    1962           31 :     x = gen_reg_rtx (mode);
    1963              : 
    1964           32 :   SET_DECL_RTL (var, x);
    1965           32 : }
    1966              : 
    1967              : /* A subroutine of expand_one_var.  VAR is a variable that will be
    1968              :    allocated to the local stack frame.  Return true if we wish to
    1969              :    add VAR to STACK_VARS so that it will be coalesced with other
    1970              :    variables.  Return false to allocate VAR immediately.
    1971              : 
    1972              :    This function is used to reduce the number of variables considered
    1973              :    for coalescing, which reduces the size of the quadratic problem.  */
    1974              : 
    1975              : static bool
    1976      6560924 : defer_stack_allocation (tree var, bool toplevel)
    1977              : {
    1978      6560924 :   tree size_unit = TREE_CODE (var) == SSA_NAME
    1979      6560924 :     ? TYPE_SIZE_UNIT (TREE_TYPE (var))
    1980      6560924 :     : DECL_SIZE_UNIT (var);
    1981      6560924 :   poly_uint64 size;
    1982              : 
    1983              :   /* Whether the variable is small enough for immediate allocation not to be
    1984              :      a problem with regard to the frame size.  */
    1985      6560924 :   bool smallish
    1986      6560924 :     = (poly_int_tree_p (size_unit, &size)
    1987      6560924 :        && (estimated_poly_value (size)
    1988      6560924 :            < param_min_size_for_stack_sharing));
    1989              : 
    1990              :   /* If stack protection is enabled, *all* stack variables must be deferred,
    1991              :      so that we can re-order the strings to the top of the frame.
    1992              :      Similarly for Address Sanitizer.  */
    1993      6560924 :   if (flag_stack_protect || asan_sanitize_stack_p ())
    1994        12614 :     return true;
    1995              : 
    1996      6548310 :   unsigned int align = TREE_CODE (var) == SSA_NAME
    1997      6548310 :     ? TYPE_ALIGN (TREE_TYPE (var))
    1998      6099137 :     : DECL_ALIGN (var);
    1999              : 
    2000              :   /* We handle "large" alignment via dynamic allocation.  We want to handle
    2001              :      this extra complication in only one place, so defer them.  */
    2002              :   if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
    2003              :     return true;
    2004              : 
    2005      6548310 :   bool ignored = TREE_CODE (var) == SSA_NAME
    2006      6554542 :     ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
    2007      6099137 :     : DECL_IGNORED_P (var);
    2008              : 
    2009              :   /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
    2010              :      might be detached from their block and appear at toplevel when we reach
    2011              :      here.  We want to coalesce them with variables from other blocks when
    2012              :      the immediate contribution to the frame size would be noticeable.  */
    2013      6548310 :   if (toplevel && optimize > 0 && ignored && !smallish)
    2014              :     return true;
    2015              : 
    2016              :   /* Variables declared in the outermost scope automatically conflict
    2017              :      with every other variable.  The only reason to want to defer them
    2018              :      at all is that, after sorting, we can more efficiently pack
    2019              :      small variables in the stack frame.  Continue to defer at -O2.  */
    2020      5080243 :   if (toplevel && optimize < 2)
    2021              :     return false;
    2022              : 
    2023              :   /* Without optimization, *most* variables are allocated from the
    2024              :      stack, which makes the quadratic problem large exactly when we
    2025              :      want compilation to proceed as quickly as possible.  On the
    2026              :      other hand, we don't want the function's stack frame size to
    2027              :      get completely out of hand.  So we avoid adding scalars and
    2028              :      "small" aggregates to the list at all.  */
    2029      4432378 :   if (optimize == 0 && smallish)
    2030              :     return false;
    2031              : 
    2032              :   return true;
    2033              : }
    2034              : 
    2035              : /* A subroutine of expand_used_vars.  Expand one variable according to
    2036              :    its flavor.  Variables to be placed on the stack are not actually
    2037              :    expanded yet, merely recorded.
    2038              :    When REALLY_EXPAND is false, only add stack values to be allocated.
    2039              :    Return stack usage this variable is supposed to take.
    2040              : */
    2041              : 
    2042              : static poly_uint64
    2043     15322833 : expand_one_var (tree var, bool toplevel, bool really_expand,
    2044              :                 bitmap forced_stack_var = NULL)
    2045              : {
    2046     15322833 :   unsigned int align = BITS_PER_UNIT;
    2047     15322833 :   tree origvar = var;
    2048              : 
    2049     15322833 :   var = SSAVAR (var);
    2050              : 
    2051     15322833 :   if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
    2052              :     {
    2053     15322833 :       if (is_global_var (var))
    2054       302405 :         return 0;
    2055              : 
    2056              :       /* Because we don't know if VAR will be in register or on stack,
    2057              :          we conservatively assume it will be on stack even if VAR is
    2058              :          eventually put into register after RA pass.  For non-automatic
    2059              :          variables, which won't be on stack, we collect alignment of
    2060              :          type and ignore user specified alignment.  Similarly for
    2061              :          SSA_NAMEs for which use_register_for_decl returns true.  */
    2062     15020428 :       if (TREE_STATIC (var)
    2063     15020428 :           || DECL_EXTERNAL (var)
    2064     30040856 :           || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
    2065            0 :         align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
    2066              :                                    TYPE_MODE (TREE_TYPE (var)),
    2067              :                                    TYPE_ALIGN (TREE_TYPE (var)));
    2068     15020428 :       else if (DECL_HAS_VALUE_EXPR_P (var)
    2069     15020428 :                || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
    2070              :         /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
    2071              :            or variables which were assigned a stack slot already by
    2072              :            expand_one_stack_var_at - in the latter case DECL_ALIGN has been
    2073              :            changed from the offset chosen to it.  */
    2074        16654 :         align = crtl->stack_alignment_estimated;
    2075              :       else
    2076     15003774 :         align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
    2077              : 
    2078              :       /* If the variable alignment is very large we'll dynamicaly allocate
    2079              :          it, which means that in-frame portion is just a pointer.  */
    2080     15020428 :       if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
    2081            0 :         align = GET_MODE_ALIGNMENT (Pmode);
    2082              :     }
    2083              : 
    2084     15020428 :   record_alignment_for_reg_var (align);
    2085              : 
    2086     15020428 :   poly_uint64 size;
    2087     15020428 :   if (TREE_CODE (origvar) == SSA_NAME)
    2088              :     {
    2089            0 :       gcc_assert (!VAR_P (var)
    2090              :                   || (!DECL_EXTERNAL (var)
    2091              :                       && !DECL_HAS_VALUE_EXPR_P (var)
    2092              :                       && !TREE_STATIC (var)
    2093              :                       && TREE_TYPE (var) != error_mark_node
    2094              :                       && !DECL_HARD_REGISTER (var)
    2095              :                       && really_expand));
    2096              :     }
    2097     15020428 :   if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
    2098              :     ;
    2099     15020428 :   else if (DECL_EXTERNAL (var))
    2100              :     ;
    2101     15020428 :   else if (DECL_HAS_VALUE_EXPR_P (var))
    2102              :     ;
    2103     15003867 :   else if (TREE_STATIC (var))
    2104              :     ;
    2105     15003867 :   else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
    2106              :     ;
    2107     15003631 :   else if (TREE_TYPE (var) == error_mark_node)
    2108              :     {
    2109            0 :       if (really_expand)
    2110            0 :         expand_one_error_var (var);
    2111              :     }
    2112     15003631 :   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
    2113              :     {
    2114         4109 :       if (really_expand)
    2115              :         {
    2116         1082 :           expand_one_hard_reg_var (var);
    2117         1082 :           if (!DECL_HARD_REGISTER (var))
    2118              :             /* Invalid register specification.  */
    2119           32 :             expand_one_error_var (var);
    2120              :         }
    2121              :     }
    2122     14999522 :   else if (use_register_for_decl (var)
    2123     14999522 :            && (!forced_stack_var
    2124       579085 :                || !bitmap_bit_p (forced_stack_var, DECL_UID (var))))
    2125              :     {
    2126      8888394 :       if (really_expand)
    2127       569834 :         expand_one_register_var (origvar);
    2128              :     }
    2129      6111128 :   else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
    2130      6111128 :            || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
    2131              :     {
    2132              :       /* Reject variables which cover more than half of the address-space.  */
    2133            0 :       if (really_expand)
    2134              :         {
    2135            0 :           if (DECL_NONLOCAL_FRAME (var))
    2136            0 :             error_at (DECL_SOURCE_LOCATION (current_function_decl),
    2137              :                       "total size of local objects is too large");
    2138              :           else
    2139            0 :             error_at (DECL_SOURCE_LOCATION (var),
    2140              :                       "size of variable %q+D is too large", var);
    2141            0 :           expand_one_error_var (var);
    2142              :         }
    2143              :     }
    2144      6111128 :   else if (defer_stack_allocation (var, toplevel))
    2145      5626840 :     add_stack_var (origvar, really_expand);
    2146              :   else
    2147              :     {
    2148       484288 :       if (really_expand)
    2149              :         {
    2150       349310 :           if (lookup_attribute ("naked",
    2151       349310 :                                 DECL_ATTRIBUTES (current_function_decl)))
    2152            0 :             error ("cannot allocate stack for variable %q+D, naked function",
    2153              :                    var);
    2154              : 
    2155       349310 :           expand_one_stack_var (origvar);
    2156              :         }
    2157       484288 :       return size;
    2158              :     }
    2159     14536140 :   return 0;
    2160              : }
    2161              : 
    2162              : /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
    2163              :    expanding variables.  Those variables that can be put into registers
    2164              :    are allocated pseudos; those that can't are put on the stack.
    2165              : 
    2166              :    TOPLEVEL is true if this is the outermost BLOCK.  */
    2167              : 
    2168              : static void
    2169     16070919 : expand_used_vars_for_block (tree block, bool toplevel, bitmap forced_stack_vars)
    2170              : {
    2171     16070919 :   tree t;
    2172              : 
    2173              :   /* Expand all variables at this level.  */
    2174     34697361 :   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
    2175     18626442 :     if (TREE_USED (t)
    2176     18626442 :         && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
    2177       750363 :             || !DECL_NONSHAREABLE (t)))
    2178       750363 :       expand_one_var (t, toplevel, true, forced_stack_vars);
    2179              : 
    2180              :   /* Expand all variables at containing levels.  */
    2181     30660112 :   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
    2182     14589193 :     expand_used_vars_for_block (t, false, forced_stack_vars);
    2183     16070919 : }
    2184              : 
    2185              : /* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
    2186              :    and clear TREE_USED on all local variables.  */
    2187              : 
    2188              : static void
    2189     16070919 : clear_tree_used (tree block)
    2190              : {
    2191     16070919 :   tree t;
    2192              : 
    2193     34697361 :   for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
    2194              :     /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
    2195      1007915 :     if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
    2196     18626442 :         || !DECL_NONSHAREABLE (t))
    2197     18626442 :       TREE_USED (t) = 0;
    2198              : 
    2199     30660112 :   for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
    2200     14589193 :     clear_tree_used (t);
    2201     16070919 : }
    2202              : 
    2203              : /* Examine TYPE and determine a bit mask of the following features.  */
    2204              : 
    2205              : #define SPCT_HAS_LARGE_CHAR_ARRAY       1
    2206              : #define SPCT_HAS_SMALL_CHAR_ARRAY       2
    2207              : #define SPCT_HAS_ARRAY                  4
    2208              : #define SPCT_HAS_AGGREGATE              8
    2209              : 
    2210              : static unsigned int
    2211         2675 : stack_protect_classify_type (tree type)
    2212              : {
    2213         2675 :   unsigned int ret = 0;
    2214         2675 :   tree t;
    2215              : 
    2216         2675 :   switch (TREE_CODE (type))
    2217              :     {
    2218          657 :     case ARRAY_TYPE:
    2219          657 :       t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
    2220          657 :       if (t == char_type_node
    2221          175 :           || t == signed_char_type_node
    2222          175 :           || t == unsigned_char_type_node)
    2223              :         {
    2224          482 :           unsigned HOST_WIDE_INT max = param_ssp_buffer_size;
    2225          482 :           unsigned HOST_WIDE_INT len;
    2226              : 
    2227          482 :           if (!TYPE_SIZE_UNIT (type)
    2228          482 :               || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
    2229              :             len = max;
    2230              :           else
    2231          482 :             len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
    2232              : 
    2233          482 :           if (len < max)
    2234              :             ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
    2235              :           else
    2236          450 :             ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
    2237              :         }
    2238              :       else
    2239              :         ret = SPCT_HAS_ARRAY;
    2240              :       break;
    2241              : 
    2242         1014 :     case UNION_TYPE:
    2243         1014 :     case QUAL_UNION_TYPE:
    2244         1014 :     case RECORD_TYPE:
    2245         1014 :       ret = SPCT_HAS_AGGREGATE;
    2246        10891 :       for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
    2247         9877 :         if (TREE_CODE (t) == FIELD_DECL)
    2248         1685 :           ret |= stack_protect_classify_type (TREE_TYPE (t));
    2249              :       break;
    2250              : 
    2251              :     default:
    2252              :       break;
    2253              :     }
    2254              : 
    2255         2675 :   return ret;
    2256              : }
    2257              : 
    2258              : /* Return nonzero if DECL should be segregated into the "vulnerable" upper
    2259              :    part of the local stack frame.  Remember if we ever return nonzero for
    2260              :    any variable in this function.  The return value is the phase number in
    2261              :    which the variable should be allocated.  */
    2262              : 
    2263              : static int
    2264          990 : stack_protect_decl_phase (tree decl)
    2265              : {
    2266          990 :   unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
    2267          990 :   int ret = 0;
    2268              : 
    2269          990 :   if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
    2270           28 :     has_short_buffer = true;
    2271              : 
    2272          990 :   tree attribs = DECL_ATTRIBUTES (current_function_decl);
    2273          990 :   if (!lookup_attribute ("no_stack_protector", attribs)
    2274          990 :       && (flag_stack_protect == SPCT_FLAG_ALL
    2275          990 :           || flag_stack_protect == SPCT_FLAG_STRONG
    2276          191 :           || (flag_stack_protect == SPCT_FLAG_EXPLICIT
    2277            5 :               && lookup_attribute ("stack_protect", attribs))))
    2278              :     {
    2279          804 :       if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
    2280          344 :           && !(bits & SPCT_HAS_AGGREGATE))
    2281              :         ret = 1;
    2282          748 :       else if (bits & SPCT_HAS_ARRAY)
    2283              :         ret = 2;
    2284              :     }
    2285              :   else
    2286          186 :     ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
    2287              : 
    2288          186 :   if (ret)
    2289          639 :     has_protected_decls = true;
    2290              : 
    2291          990 :   return ret;
    2292              : }
    2293              : 
    2294              : /* Two helper routines that check for phase 1 and phase 2.  These are used
    2295              :    as callbacks for expand_stack_vars.  */
    2296              : 
    2297              : static bool
    2298          328 : stack_protect_decl_phase_1 (unsigned i)
    2299              : {
    2300          328 :   return stack_protect_decl_phase (stack_vars[i].decl) == 1;
    2301              : }
    2302              : 
    2303              : static bool
    2304          216 : stack_protect_decl_phase_2 (unsigned i)
    2305              : {
    2306          216 :   return stack_protect_decl_phase (stack_vars[i].decl) == 2;
    2307              : }
    2308              : 
    2309              : /* And helper function that checks for asan phase (with stack protector
    2310              :    it is phase 3).  This is used as callback for expand_stack_vars.
    2311              :    Returns true if any of the vars in the partition need to be protected.  */
    2312              : 
    2313              : static bool
    2314         3623 : asan_decl_phase_3 (unsigned i)
    2315              : {
    2316         4322 :   while (i != EOC)
    2317              :     {
    2318         3623 :       if (asan_protect_stack_decl (stack_vars[i].decl))
    2319              :         return true;
    2320          699 :       i = stack_vars[i].next;
    2321              :     }
    2322              :   return false;
    2323              : }
    2324              : 
    2325              : /* Ensure that variables in different stack protection phases conflict
    2326              :    so that they are not merged and share the same stack slot.
    2327              :    Return true if there are any address taken variables.  */
    2328              : 
    2329              : static bool
    2330          235 : add_stack_protection_conflicts (void)
    2331              : {
    2332          235 :   unsigned i, j, n = stack_vars_num;
    2333          235 :   unsigned char *phase;
    2334          235 :   bool ret = false;
    2335              : 
    2336          235 :   phase = XNEWVEC (unsigned char, n);
    2337          916 :   for (i = 0; i < n; ++i)
    2338              :     {
    2339          446 :       phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
    2340          446 :       if (TREE_ADDRESSABLE (stack_vars[i].decl))
    2341          371 :         ret = true;
    2342              :     }
    2343              : 
    2344          681 :   for (i = 0; i < n; ++i)
    2345              :     {
    2346          446 :       unsigned char ph_i = phase[i];
    2347         1162 :       for (j = i + 1; j < n; ++j)
    2348          716 :         if (ph_i != phase[j])
    2349          329 :           add_stack_var_conflict (i, j);
    2350              :     }
    2351              : 
    2352          235 :   XDELETEVEC (phase);
    2353          235 :   return ret;
    2354              : }
    2355              : 
    2356              : /* Create a decl for the guard at the top of the stack frame.  */
    2357              : 
    2358              : static void
    2359          251 : create_stack_guard (void)
    2360              : {
    2361          251 :   tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
    2362              :                            VAR_DECL, NULL, ptr_type_node);
    2363          251 :   TREE_THIS_VOLATILE (guard) = 1;
    2364          251 :   TREE_USED (guard) = 1;
    2365          251 :   expand_one_stack_var (guard);
    2366          251 :   crtl->stack_protect_guard = guard;
    2367          251 : }
    2368              : 
    2369              : /* Prepare for expanding variables.  */
    2370              : static void
    2371      7702888 : init_vars_expansion (void)
    2372              : {
    2373              :   /* Conflict bitmaps, and a few related temporary bitmaps, go here.  */
    2374      7702888 :   bitmap_obstack_initialize (&stack_var_bitmap_obstack);
    2375              : 
    2376              :   /* A map from decl to stack partition.  */
    2377      7702888 :   decl_to_stack_part = new hash_map<tree, unsigned>;
    2378              : 
    2379              :   /* Initialize local stack smashing state.  */
    2380      7702888 :   has_protected_decls = false;
    2381      7702888 :   has_short_buffer = false;
    2382      7702888 :   if (hwassist_sanitize_stack_p ())
    2383         1142 :     hwasan_record_frame_init ();
    2384      7702888 : }
    2385              : 
    2386              : /* Free up stack variable graph data.  */
    2387              : static void
    2388      7702888 : fini_vars_expansion (void)
    2389              : {
    2390      7702888 :   bitmap_obstack_release (&stack_var_bitmap_obstack);
    2391      7702888 :   if (stack_vars)
    2392      1291606 :     XDELETEVEC (stack_vars);
    2393      7702888 :   if (stack_vars_sorted)
    2394      1291606 :     XDELETEVEC (stack_vars_sorted);
    2395      7702888 :   stack_vars = NULL;
    2396      7702888 :   stack_vars_sorted = NULL;
    2397      7702888 :   stack_vars_alloc = stack_vars_num = 0;
    2398     15405776 :   delete decl_to_stack_part;
    2399      7702888 :   decl_to_stack_part = NULL;
    2400      7702888 : }
    2401              : 
    2402              : /* Make a fair guess for the size of the stack frame of the function
    2403              :    in NODE.  This doesn't have to be exact, the result is only used in
    2404              :    the inline heuristics.  So we don't want to run the full stack var
    2405              :    packing algorithm (which is quadratic in the number of stack vars).
    2406              :    Instead, we calculate the total size of all stack vars.  This turns
    2407              :    out to be a pretty fair estimate -- packing of stack vars doesn't
    2408              :    happen very often.  */
    2409              : 
    2410              : HOST_WIDE_INT
    2411      6221162 : estimated_stack_frame_size (struct cgraph_node *node)
    2412              : {
    2413      6221162 :   poly_int64 size = 0;
    2414      6221162 :   unsigned i;
    2415      6221162 :   tree var;
    2416      6221162 :   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
    2417              : 
    2418      6221162 :   push_cfun (fn);
    2419              : 
    2420      6221162 :   init_vars_expansion ();
    2421              : 
    2422     24820657 :   FOR_EACH_LOCAL_DECL (fn, i, var)
    2423     13500114 :     if (auto_var_in_fn_p (var, fn->decl))
    2424     12918748 :       size += expand_one_var (var, true, false);
    2425              : 
    2426      6221162 :   if (stack_vars_num > 0)
    2427              :     {
    2428              :       /* Fake sorting the stack vars for account_stack_vars ().  */
    2429      1061105 :       stack_vars_sorted = XNEWVEC (unsigned , stack_vars_num);
    2430      5523162 :       for (i = 0; i < stack_vars_num; ++i)
    2431      4462057 :         stack_vars_sorted[i] = i;
    2432      1061105 :       size += account_stack_vars ();
    2433              :     }
    2434              : 
    2435      6221162 :   fini_vars_expansion ();
    2436      6221162 :   pop_cfun ();
    2437      6221162 :   return estimated_poly_value (size);
    2438              : }
    2439              : 
    2440              : /* Check if the current function has calls that use a return slot.  */
    2441              : 
    2442              : static bool
    2443          417 : stack_protect_return_slot_p ()
    2444              : {
    2445          417 :   basic_block bb;
    2446              : 
    2447         1860 :   FOR_ALL_BB_FN (bb, cfun)
    2448         2944 :     for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
    2449         3970 :          !gsi_end_p (gsi); gsi_next (&gsi))
    2450              :       {
    2451         2527 :         gimple *stmt = gsi_stmt (gsi);
    2452              :         /* This assumes that calls to internal-only functions never
    2453              :            use a return slot.  */
    2454         2527 :         if (is_gimple_call (stmt)
    2455          542 :             && !gimple_call_internal_p (stmt)
    2456         3052 :             && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
    2457          525 :                                   gimple_call_fndecl (stmt)))
    2458          417 :           return true;
    2459              :       }
    2460              :   return false;
    2461              : }
    2462              : 
    2463              : /* Expand all variables used in the function.  */
    2464              : 
    2465              : static rtx_insn *
    2466      1481726 : expand_used_vars (bitmap forced_stack_vars)
    2467              : {
    2468      1481726 :   tree var, outer_block = DECL_INITIAL (current_function_decl);
    2469      1481726 :   auto_vec<tree> maybe_local_decls;
    2470      1481726 :   rtx_insn *var_end_seq = NULL;
    2471      1481726 :   unsigned i;
    2472      1481726 :   unsigned len;
    2473      1481726 :   bool gen_stack_protect_signal = false;
    2474              : 
    2475              :   /* Compute the phase of the stack frame for this function.  */
    2476      1481726 :   {
    2477      1481726 :     int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
    2478      1481726 :     int off = targetm.starting_frame_offset () % align;
    2479      1481726 :     frame_phase = off ? align - off : 0;
    2480              :   }
    2481              : 
    2482              :   /* Set TREE_USED on all variables in the local_decls.  */
    2483     10528229 :   FOR_EACH_LOCAL_DECL (cfun, i, var)
    2484      7806484 :     TREE_USED (var) = 1;
    2485              :   /* Clear TREE_USED on all variables associated with a block scope.  */
    2486      1481726 :   clear_tree_used (DECL_INITIAL (current_function_decl));
    2487              : 
    2488      1481726 :   init_vars_expansion ();
    2489              : 
    2490      1481726 :   if (targetm.use_pseudo_pic_reg ())
    2491        80395 :     pic_offset_table_rtx = gen_reg_rtx (Pmode);
    2492              : 
    2493     27358918 :   for (i = 0; i < SA.map->num_partitions; i++)
    2494              :     {
    2495     25877192 :       if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
    2496      3620008 :         continue;
    2497              : 
    2498     22257184 :       tree var = partition_to_var (SA.map, i);
    2499              : 
    2500     44514368 :       gcc_assert (!virtual_operand_p (var));
    2501              : 
    2502     22257184 :       expand_one_ssa_partition (var);
    2503              :     }
    2504              : 
    2505      1481726 :   if (flag_stack_protect == SPCT_FLAG_STRONG)
    2506          417 :     gen_stack_protect_signal = stack_protect_return_slot_p ();
    2507              : 
    2508              :   /* At this point all variables on the local_decls with TREE_USED
    2509              :      set are not associated with any block scope.  Lay them out.  */
    2510              : 
    2511      1481726 :   len = vec_safe_length (cfun->local_decls);
    2512      9288210 :   FOR_EACH_LOCAL_DECL (cfun, i, var)
    2513              :     {
    2514      7806484 :       bool expand_now = false;
    2515              : 
    2516              :       /* Expanded above already.  */
    2517      7806484 :       if (is_gimple_reg (var))
    2518              :         {
    2519      5488950 :           TREE_USED (var) = 0;
    2520      5488950 :           goto next;
    2521              :         }
    2522              :       /* We didn't set a block for static or extern because it's hard
    2523              :          to tell the difference between a global variable (re)declared
    2524              :          in a local scope, and one that's really declared there to
    2525              :          begin with.  And it doesn't really matter much, since we're
    2526              :          not giving them stack space.  Expand them now.  */
    2527      2317534 :       else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
    2528              :         expand_now = true;
    2529              : 
    2530              :       /* Expand variables not associated with any block now.  Those created by
    2531              :          the optimizers could be live anywhere in the function.  Those that
    2532              :          could possibly have been scoped originally and detached from their
    2533              :          block will have their allocation deferred so we coalesce them with
    2534              :          others when optimization is enabled.  */
    2535      2101680 :       else if (TREE_USED (var))
    2536      1653722 :         expand_now = true;
    2537              : 
    2538              :       /* Finally, mark all variables on the list as used.  We'll use
    2539              :          this in a moment when we expand those associated with scopes.  */
    2540      2317534 :       TREE_USED (var) = 1;
    2541              : 
    2542      2317534 :       if (expand_now)
    2543      1653722 :         expand_one_var (var, true, true, forced_stack_vars);
    2544              : 
    2545       663812 :     next:
    2546      7806484 :       if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
    2547              :         {
    2548       409774 :           rtx rtl = DECL_RTL_IF_SET (var);
    2549              : 
    2550              :           /* Keep artificial non-ignored vars in cfun->local_decls
    2551              :              chain until instantiate_decls.  */
    2552       306322 :           if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
    2553        20320 :             add_local_decl (cfun, var);
    2554       103452 :           else if (rtl == NULL_RTX)
    2555              :             /* If rtl isn't set yet, which can happen e.g. with
    2556              :                -fstack-protector, retry before returning from this
    2557              :                function.  */
    2558       103452 :             maybe_local_decls.safe_push (var);
    2559              :         }
    2560              :     }
    2561              : 
    2562              :   /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
    2563              : 
    2564              :      +-----------------+-----------------+
    2565              :      | ...processed... | ...duplicates...|
    2566              :      +-----------------+-----------------+
    2567              :                        ^
    2568              :                        +-- LEN points here.
    2569              : 
    2570              :      We just want the duplicates, as those are the artificial
    2571              :      non-ignored vars that we want to keep until instantiate_decls.
    2572              :      Move them down and truncate the array.  */
    2573      1481726 :   if (!vec_safe_is_empty (cfun->local_decls))
    2574       833238 :     cfun->local_decls->block_remove (0, len);
    2575              : 
    2576              :   /* At this point, all variables within the block tree with TREE_USED
    2577              :      set are actually used by the optimized function.  Lay them out.  */
    2578      1481726 :   expand_used_vars_for_block (outer_block, true, forced_stack_vars);
    2579              : 
    2580      1481726 :   tree attribs = DECL_ATTRIBUTES (current_function_decl);
    2581      1481726 :   if (stack_vars_num > 0)
    2582              :     {
    2583       230501 :       bool has_addressable_vars = false;
    2584              : 
    2585       230501 :       add_scope_conflicts ();
    2586              : 
    2587              :       /* If stack protection is enabled, we don't share space between
    2588              :          vulnerable data and non-vulnerable data.  */
    2589       230501 :       if (flag_stack_protect != 0
    2590          235 :           && !lookup_attribute ("no_stack_protector", attribs)
    2591       230736 :           && (flag_stack_protect != SPCT_FLAG_EXPLICIT
    2592              :               || (flag_stack_protect == SPCT_FLAG_EXPLICIT
    2593            1 :                   && lookup_attribute ("stack_protect", attribs))))
    2594          235 :         has_addressable_vars = add_stack_protection_conflicts ();
    2595              : 
    2596       230501 :       if (flag_stack_protect == SPCT_FLAG_STRONG && has_addressable_vars)
    2597       230501 :         gen_stack_protect_signal = true;
    2598              : 
    2599              :       /* Now that we have collected all stack variables, and have computed a
    2600              :          minimal interference graph, attempt to save some stack space.  */
    2601       230501 :       partition_stack_vars ();
    2602       230501 :       if (dump_file)
    2603           29 :         dump_stack_var_partition ();
    2604              :     }
    2605              : 
    2606              : 
    2607      1481726 :   if (!lookup_attribute ("no_stack_protector", attribs))
    2608      1481717 :     switch (flag_stack_protect)
    2609              :       {
    2610           19 :       case SPCT_FLAG_ALL:
    2611           19 :         create_stack_guard ();
    2612           19 :         break;
    2613              : 
    2614          417 :       case SPCT_FLAG_STRONG:
    2615          417 :         if (gen_stack_protect_signal
    2616          273 :             || cfun->calls_alloca
    2617          271 :             || has_protected_decls
    2618          680 :             || lookup_attribute ("stack_protect", attribs))
    2619          154 :           create_stack_guard ();
    2620              :         break;
    2621              : 
    2622          163 :       case SPCT_FLAG_DEFAULT:
    2623          163 :         if (cfun->calls_alloca
    2624          160 :             || has_protected_decls
    2625          258 :             || lookup_attribute ("stack_protect", attribs))
    2626           68 :           create_stack_guard ();
    2627              :         break;
    2628              : 
    2629           14 :       case SPCT_FLAG_EXPLICIT:
    2630           14 :         if (lookup_attribute ("stack_protect", attribs))
    2631           10 :           create_stack_guard ();
    2632              :         break;
    2633              : 
    2634              :       default:
    2635              :         break;
    2636              :       }
    2637              : 
    2638              :   /* Assign rtl to each variable based on these partitions.  */
    2639      1481726 :   if (stack_vars_num > 0)
    2640              :     {
    2641       230501 :       class stack_vars_data data;
    2642              : 
    2643       230501 :       data.asan_base = NULL_RTX;
    2644       230501 :       data.asan_alignb = 0;
    2645              : 
    2646              :       /* Reorder decls to be protected by iterating over the variables
    2647              :          array multiple times, and allocating out of each phase in turn.  */
    2648              :       /* ??? We could probably integrate this into the qsort we did
    2649              :          earlier, such that we naturally see these variables first,
    2650              :          and thus naturally allocate things in the right order.  */
    2651       230501 :       if (has_protected_decls)
    2652              :         {
    2653              :           /* Phase 1 contains only character arrays.  */
    2654          141 :           expand_stack_vars (stack_protect_decl_phase_1, &data);
    2655              : 
    2656              :           /* Phase 2 contains other kinds of arrays.  */
    2657          141 :           if (!lookup_attribute ("no_stack_protector", attribs)
    2658          141 :               && (flag_stack_protect == SPCT_FLAG_ALL
    2659          141 :                   || flag_stack_protect == SPCT_FLAG_STRONG
    2660           66 :                   || (flag_stack_protect == SPCT_FLAG_EXPLICIT
    2661            1 :                       && lookup_attribute ("stack_protect", attribs))))
    2662           76 :             expand_stack_vars (stack_protect_decl_phase_2, &data);
    2663              :         }
    2664              : 
    2665       230501 :       if (asan_sanitize_stack_p ())
    2666              :         /* Phase 3, any partitions that need asan protection
    2667              :            in addition to phase 1 and 2.  */
    2668         1831 :         expand_stack_vars (asan_decl_phase_3, &data);
    2669              : 
    2670              :       /* ASAN description strings don't yet have a syntax for expressing
    2671              :          polynomial offsets.  */
    2672       230501 :       HOST_WIDE_INT prev_offset;
    2673       232129 :       if (!data.asan_vec.is_empty ()
    2674         1628 :           && frame_offset.is_constant (&prev_offset))
    2675              :         {
    2676         1628 :           HOST_WIDE_INT offset, sz, redzonesz;
    2677         1628 :           redzonesz = ASAN_RED_ZONE_SIZE;
    2678         1628 :           sz = data.asan_vec[0] - prev_offset;
    2679         1628 :           if (data.asan_alignb > ASAN_RED_ZONE_SIZE
    2680         1628 :               && data.asan_alignb <= 4096
    2681           45 :               && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
    2682           45 :             redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
    2683           45 :                          & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
    2684              :           /* Allocating a constant amount of space from a constant
    2685              :              starting offset must give a constant result.  */
    2686         1628 :           offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
    2687         1628 :                     .to_constant ());
    2688         1628 :           data.asan_vec.safe_push (prev_offset);
    2689         1628 :           data.asan_vec.safe_push (offset);
    2690              :           /* Leave space for alignment if STRICT_ALIGNMENT.  */
    2691         1628 :           if (STRICT_ALIGNMENT)
    2692              :             alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
    2693              :                                       << ASAN_SHADOW_SHIFT)
    2694              :                                      / BITS_PER_UNIT, 1);
    2695              : 
    2696         1628 :           var_end_seq
    2697         3256 :             = asan_emit_stack_protection (virtual_stack_vars_rtx,
    2698              :                                           data.asan_base,
    2699              :                                           data.asan_alignb,
    2700              :                                           data.asan_vec.address (),
    2701              :                                           data.asan_decl_vec.address (),
    2702         1628 :                                           data.asan_vec.length ());
    2703              :         }
    2704              : 
    2705       230501 :       expand_stack_vars (NULL, &data);
    2706       230501 :     }
    2707              : 
    2708      1481726 :   if (hwassist_sanitize_stack_p ())
    2709          398 :     hwasan_emit_prologue ();
    2710      1481726 :   if (asan_sanitize_allocas_p () && cfun->calls_alloca)
    2711          182 :     var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
    2712              :                                               virtual_stack_vars_rtx,
    2713              :                                               var_end_seq);
    2714      2962690 :   else if ((hwasan_sanitize_allocas_p () || memtag_sanitize_p ())
    2715      1481544 :            && cfun->calls_alloca)
    2716              :     /* When using out-of-line instrumentation we only want to emit one function
    2717              :        call for clearing the tags in a region of shadow stack.  When there are
    2718              :        alloca calls in this frame we want to emit a call using the
    2719              :        virtual_stack_dynamic_rtx, but when not we use the hwasan_frame_extent
    2720              :        rtx we created in expand_stack_vars.  */
    2721            0 :     var_end_seq = hwasan_emit_untag_frame (virtual_stack_dynamic_rtx,
    2722              :                                            virtual_stack_vars_rtx);
    2723      1481544 :   else if (hwassist_sanitize_stack_p ())
    2724              :     /* If no variables were stored on the stack, `hwasan_get_frame_extent`
    2725              :        will return NULL_RTX and hence `hwasan_emit_untag_frame` will return
    2726              :        NULL (i.e. an empty sequence).  */
    2727          398 :     var_end_seq = hwasan_emit_untag_frame (hwasan_get_frame_extent (),
    2728              :                                            virtual_stack_vars_rtx);
    2729              : 
    2730      1481726 :   fini_vars_expansion ();
    2731              : 
    2732              :   /* If there were any artificial non-ignored vars without rtl
    2733              :      found earlier, see if deferred stack allocation hasn't assigned
    2734              :      rtl to them.  */
    2735      1626098 :   FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
    2736              :     {
    2737       103452 :       rtx rtl = DECL_RTL_IF_SET (var);
    2738              : 
    2739              :       /* Keep artificial non-ignored vars in cfun->local_decls
    2740              :          chain until instantiate_decls.  */
    2741         4439 :       if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
    2742         4297 :         add_local_decl (cfun, var);
    2743              :     }
    2744              : 
    2745              :   /* If the target requires that FRAME_OFFSET be aligned, do it.  */
    2746      1481726 :   if (STACK_ALIGNMENT_NEEDED)
    2747              :     {
    2748      1481726 :       HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
    2749      1481726 :       if (FRAME_GROWS_DOWNWARD)
    2750      1481726 :         frame_offset = aligned_lower_bound (frame_offset, align);
    2751              :       else
    2752              :         frame_offset = aligned_upper_bound (frame_offset, align);
    2753              :     }
    2754              : 
    2755      1481726 :   return var_end_seq;
    2756      1481726 : }
    2757              : 
    2758              : 
    2759              : /* If we need to produce a detailed dump, print the tree representation
    2760              :    for STMT to the dump file.  SINCE is the last RTX after which the RTL
    2761              :    generated for STMT should have been appended.  */
    2762              : 
    2763              : static void
    2764     41754006 : maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
    2765              : {
    2766     41754006 :   if (dump_file && (dump_flags & TDF_DETAILS))
    2767              :     {
    2768          389 :       fprintf (dump_file, "\n;; ");
    2769          389 :       print_gimple_stmt (dump_file, stmt, 0,
    2770              :                          TDF_SLIM | (dump_flags & TDF_LINENO));
    2771          389 :       fprintf (dump_file, "\n");
    2772              : 
    2773          389 :       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
    2774              :     }
    2775     41754006 : }
    2776              : 
    2777              : /* Temporary storage for BB_HEAD and BB_END of bbs until they are converted
    2778              :    to BB_RTL.  */
    2779              : static vec<std::pair <rtx_insn *, rtx_insn *>> head_end_for_bb;
    2780              : 
    2781              : /* Maps the blocks that do not contain tree labels to rtx labels.  */
    2782              : 
    2783              : static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
    2784              : 
    2785              : /* Returns the label_rtx expression for a label starting basic block BB.  */
    2786              : 
    2787              : static rtx_code_label *
    2788      7686863 : label_rtx_for_bb (basic_block bb)
    2789              : {
    2790      7686863 :   if (bb->flags & BB_RTL)
    2791            0 :     return block_label (bb);
    2792              : 
    2793      7686863 :   if ((unsigned) bb->index < head_end_for_bb.length ()
    2794      7686863 :       && head_end_for_bb[bb->index].first)
    2795              :     {
    2796      1585574 :       if (!LABEL_P (head_end_for_bb[bb->index].first))
    2797              :         {
    2798       960647 :           head_end_for_bb[bb->index].first
    2799      1921294 :             = emit_label_before (gen_label_rtx (),
    2800       960647 :                                  head_end_for_bb[bb->index].first);
    2801              :         }
    2802      1585574 :       return as_a <rtx_code_label *> (head_end_for_bb[bb->index].first);
    2803              :     }
    2804              : 
    2805      6101289 :   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
    2806      6101289 :   if (elt)
    2807      1277115 :     return *elt;
    2808              : 
    2809              :   /* Find the tree label if it is present.  */
    2810      4824174 :   gimple_stmt_iterator gsi = gsi_start_bb (bb);
    2811      4824174 :   glabel *lab_stmt;
    2812      4824174 :   if (!gsi_end_p (gsi)
    2813      4680838 :       && (lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
    2814      5047980 :       && !DECL_NONLOCAL (gimple_label_label (lab_stmt)))
    2815       223806 :     return jump_target_rtx (gimple_label_label (lab_stmt));
    2816              : 
    2817      4600368 :   rtx_code_label *l = gen_label_rtx ();
    2818      4600368 :   lab_rtx_for_bb->put (bb, l);
    2819      4600368 :   return l;
    2820              : }
    2821              : 
    2822              : 
    2823              : /* Wrapper around remove_edge during expansion.  */
    2824              : 
    2825              : void
    2826       131365 : expand_remove_edge (edge e)
    2827              : {
    2828       131365 :   if (current_ir_type () != IR_GIMPLE
    2829       131365 :       && (e->dest->flags & BB_RTL) == 0
    2830       196077 :       && !gimple_seq_empty_p (phi_nodes (e->dest)))
    2831         9046 :     remove_phi_args (e);
    2832       131365 :   remove_edge (e);
    2833       131365 : }
    2834              : 
    2835              : /* Split edge E during expansion and instead of creating a new
    2836              :    bb on that edge, add there BB.  FLAGS should be flags on the
    2837              :    new edge from BB to former E->dest.  */
    2838              : 
    2839              : static void
    2840      1884816 : expand_split_edge (edge e, basic_block bb, int flags)
    2841              : {
    2842      1884816 :   unsigned int dest_idx = e->dest_idx;
    2843      1884816 :   basic_block dest = e->dest;
    2844      1884816 :   redirect_edge_succ (e, bb);
    2845      1884816 :   e = make_single_succ_edge (bb, dest, flags);
    2846      1884816 :   if ((dest->flags & BB_RTL) == 0
    2847      1884816 :       && phi_nodes (dest)
    2848      2020632 :       && e->dest_idx != dest_idx)
    2849              :     {
    2850              :       /* If there are any PHI nodes on dest, swap the new succ edge
    2851              :          with the one moved into false_edge's former position, so that
    2852              :          PHI arguments don't need adjustment.  */
    2853        82319 :       edge e2 = EDGE_PRED (dest, dest_idx);
    2854        82319 :       std::swap (e->dest_idx, e2->dest_idx);
    2855        82319 :       std::swap (EDGE_PRED (dest, e->dest_idx),
    2856              :                  EDGE_PRED (dest, e2->dest_idx));
    2857              :     }
    2858      1884816 : }
    2859              : 
    2860              : 
    2861              : /* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
    2862              :    of a basic block where we just expanded the conditional at the end,
    2863              :    possibly clean up the CFG and instruction sequence.  LAST is the
    2864              :    last instruction before the just emitted jump sequence.  */
    2865              : 
    2866              : static void
    2867      5273182 : maybe_cleanup_end_of_block (edge e, rtx_insn *last)
    2868              : {
    2869              :   /* Special case: when jumpif decides that the condition is
    2870              :      trivial it emits an unconditional jump (and the necessary
    2871              :      barrier).  But we still have two edges, the fallthru one is
    2872              :      wrong.  purge_dead_edges would clean this up later.  Unfortunately
    2873              :      we have to insert insns (and split edges) before
    2874              :      find_many_sub_basic_blocks and hence before purge_dead_edges.
    2875              :      But splitting edges might create new blocks which depend on the
    2876              :      fact that if there are two edges there's no barrier.  So the
    2877              :      barrier would get lost and verify_flow_info would ICE.  Instead
    2878              :      of auditing all edge splitters to care for the barrier (which
    2879              :      normally isn't there in a cleaned CFG), fix it here.  */
    2880      5273182 :   if (BARRIER_P (get_last_insn ()))
    2881              :     {
    2882          131 :       rtx_insn *insn;
    2883          131 :       expand_remove_edge (e);
    2884              :       /* Now, we have a single successor block, if we have insns to
    2885              :          insert on the remaining edge we potentially will insert
    2886              :          it at the end of this block (if the dest block isn't feasible)
    2887              :          in order to avoid splitting the edge.  This insertion will take
    2888              :          place in front of the last jump.  But we might have emitted
    2889              :          multiple jumps (conditional and one unconditional) to the
    2890              :          same destination.  Inserting in front of the last one then
    2891              :          is a problem.  See PR 40021.  We fix this by deleting all
    2892              :          jumps except the last unconditional one.  */
    2893          131 :       insn = PREV_INSN (get_last_insn ());
    2894              :       /* Make sure we have an unconditional jump.  Otherwise we're
    2895              :          confused.  */
    2896          131 :       gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
    2897          262 :       for (insn = PREV_INSN (insn); insn != last;)
    2898              :         {
    2899            0 :           insn = PREV_INSN (insn);
    2900            0 :           if (JUMP_P (NEXT_INSN (insn)))
    2901              :             {
    2902            0 :               if (!any_condjump_p (NEXT_INSN (insn)))
    2903              :                 {
    2904            0 :                   gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
    2905            0 :                   delete_insn (NEXT_INSN (NEXT_INSN (insn)));
    2906              :                 }
    2907            0 :               delete_insn (NEXT_INSN (insn));
    2908              :             }
    2909              :         }
    2910              :     }
    2911      5273182 : }
    2912              : 
    2913              : /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
    2914              :    Returns a new basic block if we've terminated the current basic
    2915              :    block and created a new one.  */
    2916              : 
    2917              : static basic_block
    2918      5676272 : expand_gimple_cond (basic_block bb, gcond *stmt)
    2919              : {
    2920      5676272 :   basic_block new_bb, dest;
    2921      5676272 :   edge true_edge;
    2922      5676272 :   edge false_edge;
    2923      5676272 :   rtx_insn *last2, *last;
    2924      5676272 :   enum tree_code code;
    2925      5676272 :   tree op0, op1;
    2926              : 
    2927      5676272 :   code = gimple_cond_code (stmt);
    2928      5676272 :   op0 = gimple_cond_lhs (stmt);
    2929      5676272 :   op1 = gimple_cond_rhs (stmt);
    2930              :   /* We're sometimes presented with such code:
    2931              :        D.123_1 = x < y;
    2932              :        if (D.123_1 != 0)
    2933              :          ...
    2934              :      This would expand to two comparisons which then later might
    2935              :      be cleaned up by combine.  But some pattern matchers like if-conversion
    2936              :      work better when there's only one compare, so make up for this
    2937              :      here as special exception if TER would have made the same change.  */
    2938      5676272 :   if (SA.values
    2939      4149009 :       && TREE_CODE (op0) == SSA_NAME
    2940      4148298 :       && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
    2941       370772 :       && TREE_CODE (op1) == INTEGER_CST
    2942       361134 :       && ((gimple_cond_code (stmt) == NE_EXPR
    2943       361132 :            && integer_zerop (op1))
    2944            2 :           || (gimple_cond_code (stmt) == EQ_EXPR
    2945            2 :               && integer_onep (op1)))
    2946      6037406 :       && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
    2947              :     {
    2948       189774 :       gimple *second = SSA_NAME_DEF_STMT (op0);
    2949       189774 :       if (gimple_code (second) == GIMPLE_ASSIGN)
    2950              :         {
    2951       189770 :           enum tree_code code2 = gimple_assign_rhs_code (second);
    2952       189770 :           if (TREE_CODE_CLASS (code2) == tcc_comparison)
    2953              :             {
    2954          785 :               code = code2;
    2955          785 :               op0 = gimple_assign_rhs1 (second);
    2956          785 :               op1 = gimple_assign_rhs2 (second);
    2957              :             }
    2958              :           /* If jumps are cheap and the target does not support conditional
    2959              :              compare, turn some more codes into jumpy sequences.  */
    2960       188985 :           else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
    2961       188985 :                    && !targetm.have_ccmp ())
    2962              :             {
    2963       188982 :               if ((code2 == BIT_AND_EXPR
    2964        55185 :                    && TYPE_PRECISION (TREE_TYPE (op0)) == 1
    2965        55185 :                    && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
    2966       188982 :                   || code2 == TRUTH_AND_EXPR)
    2967              :                 {
    2968        55185 :                   code = TRUTH_ANDIF_EXPR;
    2969        55185 :                   op0 = gimple_assign_rhs1 (second);
    2970        55185 :                   op1 = gimple_assign_rhs2 (second);
    2971              :                 }
    2972       133797 :               else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
    2973              :                 {
    2974        58299 :                   code = TRUTH_ORIF_EXPR;
    2975        58299 :                   op0 = gimple_assign_rhs1 (second);
    2976        58299 :                   op1 = gimple_assign_rhs2 (second);
    2977              :                 }
    2978              :             }
    2979              :         }
    2980              :     }
    2981              : 
    2982              :   /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
    2983              :      into (x - C2) * C3 < C4.  */
    2984      5676272 :   if ((code == EQ_EXPR || code == NE_EXPR)
    2985      4444248 :       && TREE_CODE (op0) == SSA_NAME
    2986      4443469 :       && TREE_CODE (op1) == INTEGER_CST)
    2987      3072703 :     code = maybe_optimize_mod_cmp (code, &op0, &op1);
    2988              : 
    2989              :   /* Optimize (x - y) < 0 into x < y if x - y has undefined overflow.  */
    2990      5676272 :   if (!TYPE_UNSIGNED (TREE_TYPE (op0))
    2991      2276129 :       && (code == LT_EXPR || code == LE_EXPR
    2992      2276129 :           || code == GT_EXPR || code == GE_EXPR)
    2993       690409 :       && integer_zerop (op1)
    2994      5876374 :       && TREE_CODE (op0) == SSA_NAME)
    2995       200102 :     maybe_optimize_sub_cmp_0 (code, &op0, &op1);
    2996              : 
    2997      5676272 :   last2 = last = get_last_insn ();
    2998              : 
    2999      5676272 :   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
    3000      5676272 :   set_curr_insn_location (gimple_location (stmt));
    3001              : 
    3002              :   /* We can either have a pure conditional jump with one fallthru edge or
    3003              :      two-way jump that needs to be decomposed into two basic blocks.  */
    3004      5676272 :   if (false_edge->dest == bb->next_bb)
    3005              :     {
    3006      2270507 :       jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
    3007              :                 true_edge->probability);
    3008      2270507 :       maybe_dump_rtl_for_gimple_stmt (stmt, last);
    3009      2270507 :       if (true_edge->goto_locus != UNKNOWN_LOCATION)
    3010       342735 :         set_curr_insn_location (true_edge->goto_locus);
    3011      2270507 :       false_edge->flags |= EDGE_FALLTHRU;
    3012      2270507 :       maybe_cleanup_end_of_block (false_edge, last);
    3013      2270507 :       return NULL;
    3014              :     }
    3015      3405765 :   if (true_edge->dest == bb->next_bb)
    3016              :     {
    3017      3002675 :       jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
    3018              :                    false_edge->probability);
    3019      3002675 :       maybe_dump_rtl_for_gimple_stmt (stmt, last);
    3020      3002675 :       if (false_edge->goto_locus != UNKNOWN_LOCATION)
    3021        24061 :         set_curr_insn_location (false_edge->goto_locus);
    3022      3002675 :       true_edge->flags |= EDGE_FALLTHRU;
    3023      3002675 :       maybe_cleanup_end_of_block (true_edge, last);
    3024      3002675 :       return NULL;
    3025              :     }
    3026              : 
    3027       403090 :   jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
    3028              :             true_edge->probability);
    3029       403090 :   last = get_last_insn ();
    3030       403090 :   if (false_edge->goto_locus != UNKNOWN_LOCATION)
    3031         1326 :     set_curr_insn_location (false_edge->goto_locus);
    3032       403090 :   emit_jump (label_rtx_for_bb (false_edge->dest));
    3033              : 
    3034       403090 :   head_end_for_bb[bb->index].second = last;
    3035       403090 :   if (BARRIER_P (head_end_for_bb[bb->index].second))
    3036          114 :     head_end_for_bb[bb->index].second
    3037           57 :       = PREV_INSN (head_end_for_bb[bb->index].second);
    3038       806180 :   update_bb_for_insn_chain (head_end_for_bb[bb->index].first,
    3039       403090 :                             head_end_for_bb[bb->index].second, bb);
    3040              : 
    3041       403090 :   new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
    3042       403090 :   dest = false_edge->dest;
    3043       403090 :   expand_split_edge (false_edge, new_bb, 0);
    3044       403090 :   false_edge->flags |= EDGE_FALLTHRU;
    3045       403090 :   new_bb->count = false_edge->count ();
    3046       403090 :   loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
    3047       403090 :   add_bb_to_loop (new_bb, loop);
    3048       403090 :   if (loop->latch == bb
    3049        26533 :       && loop->header == dest)
    3050        26531 :     loop->latch = new_bb;
    3051       403090 :   if (BARRIER_P (BB_END (new_bb)))
    3052       403090 :     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
    3053       403090 :   update_bb_for_insn (new_bb);
    3054              : 
    3055       403090 :   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
    3056              : 
    3057       403090 :   if (true_edge->goto_locus != UNKNOWN_LOCATION)
    3058              :     {
    3059        45557 :       set_curr_insn_location (true_edge->goto_locus);
    3060        45557 :       true_edge->goto_locus = curr_insn_location ();
    3061              :     }
    3062              : 
    3063              :   return new_bb;
    3064              : }
    3065              : 
    3066              : /* Mark all calls that can have a transaction restart.  */
    3067              : 
    3068              : static void
    3069      6602370 : mark_transaction_restart_calls (gimple *stmt)
    3070              : {
    3071      6602370 :   struct tm_restart_node dummy;
    3072      6602370 :   tm_restart_node **slot;
    3073              : 
    3074      6602370 :   if (!cfun->gimple_df->tm_restart)
    3075      6599382 :     return;
    3076              : 
    3077         2988 :   dummy.stmt = stmt;
    3078         2988 :   slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
    3079         2988 :   if (slot)
    3080              :     {
    3081            0 :       struct tm_restart_node *n = *slot;
    3082            0 :       tree list = n->label_or_list;
    3083            0 :       rtx_insn *insn;
    3084              : 
    3085            0 :       for (insn = next_real_insn (get_last_insn ());
    3086            0 :            !CALL_P (insn);
    3087            0 :            insn = next_real_insn (insn))
    3088            0 :         continue;
    3089              : 
    3090            0 :       if (TREE_CODE (list) == LABEL_DECL)
    3091            0 :         add_reg_note (insn, REG_TM, label_rtx (list));
    3092              :       else
    3093            0 :         for (; list ; list = TREE_CHAIN (list))
    3094            0 :           add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
    3095            0 :     }
    3096              : }
    3097              : 
    3098              : /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
    3099              :    statement STMT.  */
    3100              : 
    3101              : static void
    3102      6823180 : expand_call_stmt (gcall *stmt)
    3103              : {
    3104      6823180 :   tree exp, decl, lhs;
    3105      6823180 :   bool builtin_p;
    3106      6823180 :   size_t i;
    3107              : 
    3108      6823180 :   if (gimple_call_internal_p (stmt))
    3109              :     {
    3110       186222 :       expand_internal_call (stmt);
    3111       186222 :       return;
    3112              :     }
    3113              : 
    3114              :   /* If this is a call to a built-in function and it has no effect other
    3115              :      than setting the lhs, try to implement it using an internal function
    3116              :      instead.  */
    3117      6636958 :   decl = gimple_call_fndecl (stmt);
    3118      6636958 :   if (gimple_call_lhs (stmt)
    3119      2361883 :       && !gimple_has_side_effects (stmt)
    3120      7340756 :       && (optimize || (decl && called_as_built_in (decl))))
    3121              :     {
    3122       683666 :       internal_fn ifn = replacement_internal_fn (stmt);
    3123       683666 :       if (ifn != IFN_LAST)
    3124              :         {
    3125        34587 :           expand_internal_call (ifn, stmt);
    3126        34587 :           return;
    3127              :         }
    3128              :     }
    3129              : 
    3130      6602371 :   exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
    3131              : 
    3132      6602371 :   CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
    3133      6602371 :   builtin_p = decl && fndecl_built_in_p (decl);
    3134              : 
    3135              :   /* If this is not a builtin function, the function type through which the
    3136              :      call is made may be different from the type of the function.  */
    3137      4643662 :   if (!builtin_p)
    3138      9287324 :     CALL_EXPR_FN (exp)
    3139     13930986 :       = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
    3140              :                       CALL_EXPR_FN (exp));
    3141              : 
    3142      6602371 :   TREE_TYPE (exp) = gimple_call_return_type (stmt);
    3143      6602371 :   CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
    3144              : 
    3145     19896910 :   for (i = 0; i < gimple_call_num_args (stmt); i++)
    3146              :     {
    3147     13294539 :       tree arg = gimple_call_arg (stmt, i);
    3148     13294539 :       gimple *def;
    3149              :       /* TER addresses into arguments of builtin functions so we have a
    3150              :          chance to infer more correct alignment information.  See PR39954.  */
    3151     13294539 :       if (builtin_p
    3152      4023674 :           && TREE_CODE (arg) == SSA_NAME
    3153      1377273 :           && (def = get_gimple_for_ssa_name (arg))
    3154       335377 :           && is_gimple_assign (def)
    3155     13629755 :           && gimple_assign_rhs_code (def) == ADDR_EXPR)
    3156        14515 :         arg = gimple_assign_rhs1 (def);
    3157     13294539 :       CALL_EXPR_ARG (exp, i) = arg;
    3158              :     }
    3159              : 
    3160      6602371 :   if (gimple_has_side_effects (stmt)
    3161              :       /* ???  Downstream in expand_expr_real_1 we assume that expressions
    3162              :          w/o side-effects do not throw so work around this here.  */
    3163      6602371 :       || stmt_could_throw_p (cfun, stmt))
    3164      5935404 :     TREE_SIDE_EFFECTS (exp) = 1;
    3165              : 
    3166      6602371 :   if (gimple_call_nothrow_p (stmt))
    3167      2937904 :     TREE_NOTHROW (exp) = 1;
    3168              : 
    3169      6602371 :   CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
    3170      6602371 :   CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
    3171      6602371 :   CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
    3172      6602371 :   if (decl
    3173      6416702 :       && fndecl_built_in_p (decl, BUILT_IN_NORMAL)
    3174      8387434 :       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
    3175        28057 :     CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
    3176              :   else
    3177      6574314 :     CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
    3178      6602371 :   CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
    3179      6602371 :   CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
    3180      6602371 :   SET_EXPR_LOCATION (exp, gimple_location (stmt));
    3181              : 
    3182              :   /* Must come after copying location.  */
    3183      6602371 :   copy_warning (exp, stmt);
    3184              : 
    3185              :   /* Ensure RTL is created for debug args.  */
    3186     13019073 :   if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
    3187              :     {
    3188        66046 :       vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
    3189        66046 :       unsigned int ix;
    3190        66046 :       tree dtemp;
    3191              : 
    3192        66046 :       if (debug_args)
    3193       144318 :         for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
    3194              :           {
    3195        78272 :             gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
    3196        78272 :             expand_debug_expr (dtemp);
    3197              :           }
    3198              :     }
    3199              : 
    3200      6602371 :   rtx_insn *before_call = get_last_insn ();
    3201      6602371 :   lhs = gimple_call_lhs (stmt);
    3202      6602371 :   if (lhs)
    3203      2327296 :     expand_assignment (lhs, exp, false);
    3204              :   else
    3205      4275075 :     expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
    3206              : 
    3207              :   /* If the gimple call is an indirect call and has 'nocf_check'
    3208              :      attribute find a generated CALL insn to mark it as no
    3209              :      control-flow verification is needed.  */
    3210      6602370 :   if (gimple_call_nocf_check_p (stmt)
    3211      6602391 :       && !gimple_call_fndecl (stmt))
    3212              :     {
    3213           20 :       rtx_insn *last = get_last_insn ();
    3214           20 :       while (!CALL_P (last)
    3215           34 :              && last != before_call)
    3216           14 :         last = PREV_INSN (last);
    3217              : 
    3218           20 :       if (last != before_call)
    3219           20 :         add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
    3220              :     }
    3221              : 
    3222      6602370 :   mark_transaction_restart_calls (stmt);
    3223              : }
    3224              : 
    3225              : 
    3226              : /* Generate RTL for an asm statement (explicit assembler code).
    3227              :    STRING is a STRING_CST node containing the assembler code text,
    3228              :    or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
    3229              :    insn is volatile; don't optimize it.  */
    3230              : 
    3231              : static void
    3232         2666 : expand_asm_loc (tree string, int vol, location_t locus)
    3233              : {
    3234         2666 :   rtx body;
    3235              : 
    3236         2666 :   body = gen_rtx_ASM_INPUT_loc (VOIDmode,
    3237              :                                 ggc_strdup (TREE_STRING_POINTER (string)),
    3238              :                                 locus);
    3239              : 
    3240         2666 :   MEM_VOLATILE_P (body) = vol;
    3241              : 
    3242              :   /* Non-empty basic ASM implicitly clobbers memory.  */
    3243         2666 :   if (TREE_STRING_LENGTH (string) != 0)
    3244              :     {
    3245          782 :       rtx asm_op, clob;
    3246          782 :       unsigned i, nclobbers;
    3247          782 :       auto_vec<rtx> input_rvec, output_rvec;
    3248          782 :       auto_vec<machine_mode> input_mode;
    3249          782 :       auto_vec<const char *> constraints;
    3250          782 :       auto_vec<rtx> use_rvec;
    3251          782 :       auto_vec<rtx> clobber_rvec;
    3252          782 :       HARD_REG_SET clobbered_regs;
    3253          782 :       CLEAR_HARD_REG_SET (clobbered_regs);
    3254              : 
    3255          782 :       clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
    3256          782 :       clobber_rvec.safe_push (clob);
    3257              : 
    3258          782 :       if (targetm.md_asm_adjust)
    3259          782 :         targetm.md_asm_adjust (output_rvec, input_rvec, input_mode,
    3260              :                                constraints, use_rvec, clobber_rvec,
    3261              :                                clobbered_regs, locus);
    3262              : 
    3263          782 :       asm_op = body;
    3264          782 :       nclobbers = clobber_rvec.length ();
    3265          782 :       auto nuses = use_rvec.length ();
    3266          782 :       body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nuses + nclobbers));
    3267              : 
    3268          782 :       i = 0;
    3269          782 :       XVECEXP (body, 0, i++) = asm_op;
    3270          782 :       for (rtx use : use_rvec)
    3271            0 :         XVECEXP (body, 0, i++) = gen_rtx_USE (VOIDmode, use);
    3272         3910 :       for (rtx clobber : clobber_rvec)
    3273         1564 :         XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobber);
    3274          782 :     }
    3275              : 
    3276         2666 :   emit_insn (body);
    3277         2666 : }
    3278              : 
    3279              : /* Check for overlap between registers marked in CLOBBERED_REGS and
    3280              :    anything inappropriate in T.  Emit error and return the register
    3281              :    variable definition for error, NULL_TREE for ok.  */
    3282              : 
    3283              : static bool
    3284       131869 : tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs,
    3285              :                                 location_t loc)
    3286              : {
    3287              :   /* Conflicts between asm-declared register variables and the clobber
    3288              :      list are not allowed.  */
    3289       131869 :   tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
    3290              : 
    3291       131869 :   if (overlap)
    3292              :     {
    3293           21 :       error_at (loc, "%<asm%> specifier for variable %qE conflicts with "
    3294           21 :                 "%<asm%> clobber list", DECL_NAME (overlap));
    3295              : 
    3296              :       /* Reset registerness to stop multiple errors emitted for a single
    3297              :          variable.  */
    3298           21 :       DECL_REGISTER (overlap) = 0;
    3299           21 :       return true;
    3300              :     }
    3301              : 
    3302              :   return false;
    3303              : }
    3304              : 
    3305              : /* Check that the given REGNO spanning NREGS is a valid
    3306              :    asm clobber operand.  Some HW registers cannot be
    3307              :    saved/restored, hence they should not be clobbered by
    3308              :    asm statements.  */
    3309              : static bool
    3310        41543 : asm_clobber_reg_is_valid (int regno, int nregs, const char *regname)
    3311              : {
    3312        41543 :   bool is_valid = true;
    3313        41543 :   HARD_REG_SET regset;
    3314              : 
    3315        41543 :   CLEAR_HARD_REG_SET (regset);
    3316              : 
    3317        41543 :   add_range_to_hard_reg_set (&regset, regno, nregs);
    3318              : 
    3319              :   /* Clobbering the PIC register is an error.  */
    3320        41543 :   if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
    3321            0 :       && overlaps_hard_reg_set_p (regset, Pmode, PIC_OFFSET_TABLE_REGNUM))
    3322              :     {
    3323              :       /* ??? Diagnose during gimplification?  */
    3324            0 :       error ("PIC register clobbered by %qs in %<asm%>", regname);
    3325            0 :       is_valid = false;
    3326              :     }
    3327        41543 :   else if (!in_hard_reg_set_p
    3328        41543 :            (accessible_reg_set, reg_raw_mode[regno], regno))
    3329              :     {
    3330              :       /* ??? Diagnose during gimplification?  */
    3331            0 :       error ("the register %qs cannot be clobbered in %<asm%>"
    3332              :              " for the current target", regname);
    3333            0 :       is_valid = false;
    3334              :     }
    3335              : 
    3336              :   /* Clobbering the stack pointer register is deprecated.  GCC expects
    3337              :      the value of the stack pointer after an asm statement to be the same
    3338              :      as it was before, so no asm can validly clobber the stack pointer in
    3339              :      the usual sense.  Adding the stack pointer to the clobber list has
    3340              :      traditionally had some undocumented and somewhat obscure side-effects.  */
    3341        41563 :   if (overlaps_hard_reg_set_p (regset, Pmode, STACK_POINTER_REGNUM))
    3342              :     {
    3343            1 :       crtl->sp_is_clobbered_by_asm = true;
    3344            1 :       if (warning (OPT_Wdeprecated, "listing the stack pointer register"
    3345              :                    " %qs in a clobber list is deprecated", regname))
    3346            1 :         inform (input_location, "the value of the stack pointer after"
    3347              :                 " an %<asm%> statement must be the same as it was before"
    3348              :                 " the statement");
    3349              :     }
    3350              : 
    3351        41543 :   return is_valid;
    3352              : }
    3353              : 
    3354              : /* Generate RTL for an asm statement with arguments.
    3355              :    STRING is the instruction template.
    3356              :    OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
    3357              :    Each output or input has an expression in the TREE_VALUE and
    3358              :    a tree list in TREE_PURPOSE which in turn contains a constraint
    3359              :    name in TREE_VALUE (or NULL_TREE) and a constraint string
    3360              :    in TREE_PURPOSE.
    3361              :    CLOBBERS is a list of STRING_CST nodes each naming a hard register
    3362              :    that is clobbered by this insn.
    3363              : 
    3364              :    LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
    3365              :    should be the fallthru basic block of the asm goto.
    3366              : 
    3367              :    Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
    3368              :    Some elements of OUTPUTS may be replaced with trees representing temporary
    3369              :    values.  The caller should copy those temporary values to the originally
    3370              :    specified lvalues.
    3371              : 
    3372              :    VOL nonzero means the insn is volatile; don't optimize it.  */
    3373              : 
    3374              : static void
    3375       109936 : expand_asm_stmt (gasm *stmt)
    3376              : {
    3377       109936 :   class save_input_location
    3378              :   {
    3379              :     location_t old;
    3380              : 
    3381              :   public:
    3382       107270 :     explicit save_input_location(location_t where)
    3383       107270 :     {
    3384       107270 :       old = input_location;
    3385       107270 :       input_location = where;
    3386              :     }
    3387              : 
    3388       107270 :     ~save_input_location()
    3389              :     {
    3390       107270 :       input_location = old;
    3391            5 :     }
    3392              :   };
    3393              : 
    3394       109936 :   location_t locus = gimple_location (stmt);
    3395              : 
    3396       109936 :   if (gimple_asm_basic_p (stmt))
    3397              :     {
    3398         2666 :       const char *s = gimple_asm_string (stmt);
    3399         2666 :       tree string = build_string (strlen (s), s);
    3400         2666 :       expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
    3401         5337 :       return;
    3402              :     }
    3403              : 
    3404              :   /* There are some legacy diagnostics in here.  */
    3405       107270 :   save_input_location s_i_l(locus);
    3406              : 
    3407       107270 :   unsigned noutputs = gimple_asm_noutputs (stmt);
    3408       107270 :   unsigned ninputs = gimple_asm_ninputs (stmt);
    3409       107270 :   unsigned nlabels = gimple_asm_nlabels (stmt);
    3410       107270 :   unsigned i;
    3411       107270 :   bool error_seen = false;
    3412              : 
    3413              :   /* ??? Diagnose during gimplification?  */
    3414       107270 :   if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
    3415              :     {
    3416            5 :       error_at (locus, "more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
    3417            5 :       return;
    3418              :     }
    3419              : 
    3420       107265 :   auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
    3421       107265 :   auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
    3422       107265 :   auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
    3423              : 
    3424              :   /* Copy the gimple vectors into new vectors that we can manipulate.  */
    3425              : 
    3426       107265 :   output_tvec.safe_grow (noutputs, true);
    3427       107265 :   input_tvec.safe_grow (ninputs, true);
    3428       107265 :   constraints.safe_grow (noutputs + ninputs, true);
    3429              : 
    3430       183468 :   for (i = 0; i < noutputs; ++i)
    3431              :     {
    3432        76203 :       tree t = gimple_asm_output_op (stmt, i);
    3433        76203 :       output_tvec[i] = TREE_VALUE (t);
    3434        76203 :       constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
    3435              :     }
    3436       162935 :   for (i = 0; i < ninputs; i++)
    3437              :     {
    3438        55670 :       tree t = gimple_asm_input_op (stmt, i);
    3439        55670 :       input_tvec[i] = TREE_VALUE (t);
    3440        55670 :       constraints[i + noutputs]
    3441        55670 :         = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
    3442              :     }
    3443              : 
    3444              :   /* Count the number of meaningful clobbered registers, ignoring what
    3445              :      we would ignore later.  */
    3446       107265 :   auto_vec<rtx> clobber_rvec;
    3447       107265 :   HARD_REG_SET clobbered_regs;
    3448       107265 :   CLEAR_HARD_REG_SET (clobbered_regs);
    3449              : 
    3450       107265 :   if (unsigned n = gimple_asm_nclobbers (stmt))
    3451              :     {
    3452        69510 :       clobber_rvec.reserve (n);
    3453       166577 :       for (i = 0; i < n; i++)
    3454              :         {
    3455        97067 :           tree t = gimple_asm_clobber_op (stmt, i);
    3456        97067 :           const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
    3457        97067 :           int nregs, j;
    3458              : 
    3459        97067 :           j = decode_reg_name_and_count (regname, &nregs);
    3460        97067 :           if (j < 0)
    3461              :             {
    3462        55524 :               if (j == -2)
    3463              :                 {
    3464              :                   /* ??? Diagnose during gimplification?  */
    3465           15 :                   error_at (locus, "unknown register name %qs in %<asm%>",
    3466              :                             regname);
    3467           15 :                   error_seen = true;
    3468              :                 }
    3469        55509 :               else if (j == -4)
    3470              :                 {
    3471        39772 :                   rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
    3472        39772 :                   clobber_rvec.safe_push (x);
    3473              :                 }
    3474        15737 :               else if (j == -5)
    3475              :                 {
    3476            2 :                   if (targetm.redzone_clobber)
    3477            2 :                     if (rtx x = targetm.redzone_clobber ())
    3478            2 :                       clobber_rvec.safe_push (x);
    3479              :                 }
    3480              :               else
    3481              :                 {
    3482              :                   /* Otherwise we should have -1 == empty string
    3483              :                      or -3 == cc, which is not a register.  */
    3484        15735 :                   gcc_assert (j == -1 || j == -3);
    3485              :                 }
    3486              :             }
    3487              :           else
    3488        83086 :             for (int reg = j; reg < j + nregs; reg++)
    3489              :               {
    3490        41543 :                 if (!asm_clobber_reg_is_valid (reg, nregs, regname))
    3491            0 :                   return;
    3492              : 
    3493        41543 :                 SET_HARD_REG_BIT (clobbered_regs, reg);
    3494        41543 :                 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
    3495        41543 :                 clobber_rvec.safe_push (x);
    3496              :               }
    3497              :         }
    3498              :     }
    3499              : 
    3500              :   /* First pass over inputs and outputs checks validity and sets
    3501              :      mark_addressable if needed.  */
    3502              :   /* ??? Diagnose during gimplification?  */
    3503              : 
    3504       183468 :   for (i = 0; i < noutputs; ++i)
    3505              :     {
    3506        76203 :       tree val = output_tvec[i];
    3507        76203 :       tree type = TREE_TYPE (val);
    3508        76203 :       const char *constraint;
    3509        76203 :       bool is_inout;
    3510        76203 :       bool allows_reg;
    3511        76203 :       bool allows_mem;
    3512              : 
    3513              :       /* Try to parse the output constraint.  If that fails, there's
    3514              :          no point in going further.  */
    3515        76203 :       constraint = constraints[i];
    3516        76203 :       if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
    3517              :                                     &allows_mem, &allows_reg, &is_inout,
    3518              :                                     nullptr))
    3519            0 :         return;
    3520              : 
    3521              :       /* If the output is a hard register, verify it doesn't conflict with
    3522              :          any other operand's possible hard register use.  */
    3523        76203 :       if (DECL_P (val)
    3524         7891 :           && REG_P (DECL_RTL (val))
    3525        77160 :           && HARD_REGISTER_P (DECL_RTL (val)))
    3526              :         {
    3527          938 :           unsigned j, output_hregno = REGNO (DECL_RTL (val));
    3528          938 :           bool early_clobber_p = strchr (constraints[i], '&') != NULL;
    3529          938 :           unsigned long match;
    3530              : 
    3531              :           /* Verify the other outputs do not use the same hard register.  */
    3532         1098 :           for (j = i + 1; j < noutputs; ++j)
    3533          160 :             if (DECL_P (output_tvec[j])
    3534          159 :                 && REG_P (DECL_RTL (output_tvec[j]))
    3535          159 :                 && HARD_REGISTER_P (DECL_RTL (output_tvec[j]))
    3536          319 :                 && output_hregno == REGNO (DECL_RTL (output_tvec[j])))
    3537              :               {
    3538            1 :                 error_at (locus, "invalid hard register usage between output "
    3539              :                           "operands");
    3540            1 :                 error_seen = true;
    3541              :               }
    3542              : 
    3543              :           /* Verify matching constraint operands use the same hard register
    3544              :              and that the non-matching constraint operands do not use the same
    3545              :              hard register if the output is an early clobber operand.  */
    3546         1849 :           for (j = 0; j < ninputs; ++j)
    3547          911 :             if (DECL_P (input_tvec[j])
    3548          772 :                 && REG_P (DECL_RTL (input_tvec[j]))
    3549         1683 :                 && HARD_REGISTER_P (DECL_RTL (input_tvec[j])))
    3550              :               {
    3551          772 :                 unsigned input_hregno = REGNO (DECL_RTL (input_tvec[j]));
    3552          772 :                 switch (*constraints[j + noutputs])
    3553              :                   {
    3554          761 :                   case '0':  case '1':  case '2':  case '3':  case '4':
    3555          761 :                   case '5':  case '6':  case '7':  case '8':  case '9':
    3556          761 :                     match = strtoul (constraints[j + noutputs], NULL, 10);
    3557          761 :                     break;
    3558              :                   default:
    3559              :                     match = ULONG_MAX;
    3560              :                     break;
    3561              :                   }
    3562          761 :                 if (i == match
    3563          661 :                     && output_hregno != input_hregno)
    3564              :                   {
    3565            1 :                     error_at (locus, "invalid hard register usage between "
    3566              :                               "output operand and matching constraint operand");
    3567            1 :                     error_seen = true;
    3568              :                   }
    3569          771 :                 else if (early_clobber_p
    3570            4 :                          && i != match
    3571            2 :                          && output_hregno == input_hregno)
    3572              :                   {
    3573            0 :                     error_at (locus, "invalid hard register usage between "
    3574              :                               "earlyclobber operand and input operand");
    3575            0 :                     error_seen = true;
    3576              :                   }
    3577              :               }
    3578              :         }
    3579              : 
    3580        76203 :       if (! allows_reg
    3581        76203 :           && (allows_mem
    3582            0 :               || is_inout
    3583            0 :               || (DECL_P (val)
    3584            0 :                   && REG_P (DECL_RTL (val))
    3585            0 :                   && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
    3586         7476 :         mark_addressable (val);
    3587              :     }
    3588              : 
    3589       162935 :   for (i = 0; i < ninputs; ++i)
    3590              :     {
    3591        55670 :       bool allows_reg, allows_mem;
    3592        55670 :       const char *constraint;
    3593              : 
    3594        55670 :       constraint = constraints[i + noutputs];
    3595        55670 :       if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
    3596        55670 :                                     constraints.address (), &allows_mem,
    3597              :                                     &allows_reg, nullptr))
    3598            0 :         return;
    3599              : 
    3600        55670 :       if (! allows_reg && allows_mem)
    3601         8109 :         mark_addressable (input_tvec[i]);
    3602              :     }
    3603              : 
    3604              :   /* Second pass evaluates arguments.  */
    3605              : 
    3606              :   /* Make sure stack is consistent for asm goto.  */
    3607       107265 :   if (nlabels > 0)
    3608          542 :     do_pending_stack_adjust ();
    3609       107265 :   int old_generating_concat_p = generating_concat_p;
    3610              : 
    3611              :   /* Vector of RTX's of evaluated output operands.  */
    3612       214530 :   auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
    3613       107265 :   auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
    3614       107265 :   rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
    3615              : 
    3616       107265 :   output_rvec.safe_grow (noutputs, true);
    3617              : 
    3618       183468 :   for (i = 0; i < noutputs; ++i)
    3619              :     {
    3620        76203 :       tree val = output_tvec[i];
    3621        76203 :       tree type = TREE_TYPE (val);
    3622        76203 :       bool is_inout, allows_reg, allows_mem, ok;
    3623        76203 :       rtx op;
    3624              : 
    3625        76203 :       ok = parse_output_constraint (&constraints[i], i, ninputs,
    3626              :                                     noutputs, &allows_mem, &allows_reg,
    3627              :                                     &is_inout, nullptr);
    3628        76203 :       gcc_assert (ok);
    3629              : 
    3630              :       /* If an output operand is not a decl or indirect ref and our constraint
    3631              :          allows a register, make a temporary to act as an intermediate.
    3632              :          Make the asm insn write into that, then we will copy it to
    3633              :          the real output operand.  Likewise for promoted variables.  */
    3634              : 
    3635        76203 :       generating_concat_p = 0;
    3636              : 
    3637        76203 :       gcc_assert (TREE_CODE (val) != INDIRECT_REF);
    3638        76203 :       if (((TREE_CODE (val) == MEM_REF
    3639         3067 :             && TREE_CODE (TREE_OPERAND (val, 0)) != ADDR_EXPR)
    3640         2717 :            && allows_mem)
    3641        75194 :           || (DECL_P (val)
    3642         7891 :               && (allows_mem || REG_P (DECL_RTL (val)))
    3643         8136 :               && ! (REG_P (DECL_RTL (val))
    3644          957 :                     && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
    3645        68015 :           || ! allows_reg
    3646        67562 :           || is_inout
    3647        67562 :           || TREE_ADDRESSABLE (type)
    3648       143765 :           || (!tree_fits_poly_int64_p (TYPE_SIZE (type))
    3649            0 :               && !known_size_p (max_int_size_in_bytes (type))))
    3650              :         {
    3651         8641 :           op = expand_expr (val, NULL_RTX, VOIDmode,
    3652              :                             !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
    3653         8641 :           if (MEM_P (op))
    3654         7684 :             op = validize_mem (op);
    3655              : 
    3656         8641 :           if (! allows_reg && !MEM_P (op))
    3657              :             {
    3658            0 :               error_at (locus, "output number %d not directly addressable", i);
    3659            0 :               error_seen = true;
    3660              :             }
    3661         8641 :           if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
    3662         8641 :               || GET_CODE (op) == CONCAT)
    3663              :             {
    3664            0 :               rtx old_op = op;
    3665            0 :               op = gen_reg_rtx (GET_MODE (op));
    3666              : 
    3667            0 :               generating_concat_p = old_generating_concat_p;
    3668              : 
    3669            0 :               if (is_inout)
    3670            0 :                 emit_move_insn (op, old_op);
    3671              : 
    3672            0 :               push_to_sequence2 (after_rtl_seq, after_rtl_end);
    3673            0 :               emit_move_insn (old_op, op);
    3674            0 :               after_rtl_seq = get_insns ();
    3675            0 :               after_rtl_end = get_last_insn ();
    3676            0 :               end_sequence ();
    3677              :             }
    3678              :         }
    3679              :       else
    3680              :         {
    3681        67562 :           op = assign_temp (type, 0, 1);
    3682        67562 :           op = validize_mem (op);
    3683        67562 :           if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
    3684        64391 :             set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
    3685              : 
    3686        67562 :           generating_concat_p = old_generating_concat_p;
    3687              : 
    3688        67562 :           push_to_sequence2 (after_rtl_seq, after_rtl_end);
    3689        67562 :           expand_assignment (val, make_tree (type, op), false);
    3690        67562 :           after_rtl_seq = get_insns ();
    3691        67562 :           after_rtl_end = get_last_insn ();
    3692        67562 :           end_sequence ();
    3693              :         }
    3694        76203 :       output_rvec[i] = op;
    3695              : 
    3696        76203 :       if (is_inout)
    3697            0 :         inout_opnum.safe_push (i);
    3698              :     }
    3699              : 
    3700       107265 :   const char *str = gimple_asm_string (stmt);
    3701       107265 :   if (error_seen)
    3702              :     {
    3703           17 :       ninputs = 0;
    3704           17 :       noutputs = 0;
    3705           17 :       inout_opnum.truncate (0);
    3706           17 :       output_rvec.truncate (0);
    3707           17 :       clobber_rvec.truncate (0);
    3708           17 :       constraints.truncate (0);
    3709           17 :       CLEAR_HARD_REG_SET (clobbered_regs);
    3710              :       str = "";
    3711              :     }
    3712              : 
    3713       214530 :   auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
    3714       107265 :   auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
    3715              : 
    3716       107265 :   input_rvec.safe_grow (ninputs, true);
    3717       107265 :   input_mode.safe_grow (ninputs, true);
    3718              : 
    3719       107265 :   generating_concat_p = 0;
    3720              : 
    3721       162934 :   for (i = 0; i < ninputs; ++i)
    3722              :     {
    3723        55669 :       tree val = input_tvec[i];
    3724        55669 :       tree type = TREE_TYPE (val);
    3725        55669 :       bool allows_reg, allows_mem, ok;
    3726        55669 :       const char *constraint;
    3727        55669 :       rtx op;
    3728              : 
    3729        55669 :       constraint = constraints[i + noutputs];
    3730        55669 :       ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
    3731        55669 :                                    constraints.address (),
    3732              :                                    &allows_mem, &allows_reg, nullptr);
    3733        55669 :       gcc_assert (ok);
    3734              : 
    3735              :       /* EXPAND_INITIALIZER will not generate code for valid initializer
    3736              :          constants, but will still generate code for other types of operand.
    3737              :          This is the behavior we want for constant constraints.  */
    3738        55669 :       op = expand_expr (val, NULL_RTX, VOIDmode,
    3739              :                         allows_reg ? EXPAND_NORMAL
    3740         9843 :                         : allows_mem ? EXPAND_MEMORY
    3741              :                         : EXPAND_INITIALIZER);
    3742              : 
    3743              :       /* Never pass a CONCAT to an ASM.  */
    3744        55669 :       if (GET_CODE (op) == CONCAT)
    3745            2 :         op = force_reg (GET_MODE (op), op);
    3746        55667 :       else if (MEM_P (op))
    3747        13070 :         op = validize_mem (op);
    3748              : 
    3749        55669 :       if (asm_operand_ok (op, constraint, NULL) <= 0)
    3750              :         {
    3751        25238 :           if (allows_reg && TYPE_MODE (type) != BLKmode)
    3752        25218 :             op = force_reg (TYPE_MODE (type), op);
    3753           20 :           else if (!allows_mem)
    3754           20 :             warning_at (locus, 0, "%<asm%> operand %d probably does not match "
    3755              :                         "constraints", i + noutputs);
    3756            0 :           else if (MEM_P (op))
    3757              :             {
    3758              :               /* We won't recognize either volatile memory or memory
    3759              :                  with a queued address as available a memory_operand
    3760              :                  at this point.  Ignore it: clearly this *is* a memory.  */
    3761              :             }
    3762              :           else
    3763            0 :             gcc_unreachable ();
    3764              :         }
    3765        55669 :       input_rvec[i] = op;
    3766        55669 :       input_mode[i] = TYPE_MODE (type);
    3767              :     }
    3768              : 
    3769              :   /* For in-out operands, copy output rtx to input rtx.  */
    3770       107265 :   unsigned ninout = inout_opnum.length ();
    3771       107265 :   for (i = 0; i < ninout; i++)
    3772              :     {
    3773            0 :       int j = inout_opnum[i];
    3774            0 :       rtx o = output_rvec[j];
    3775              : 
    3776            0 :       input_rvec.safe_push (o);
    3777            0 :       input_mode.safe_push (GET_MODE (o));
    3778              : 
    3779            0 :       char buffer[16];
    3780            0 :       sprintf (buffer, "%d", j);
    3781            0 :       constraints.safe_push (ggc_strdup (buffer));
    3782              :     }
    3783       107265 :   ninputs += ninout;
    3784              : 
    3785              :   /* Sometimes we wish to automatically clobber registers across an asm.
    3786              :      Case in point is when the i386 backend moved from cc0 to a hard reg --
    3787              :      maintaining source-level compatibility means automatically clobbering
    3788              :      the flags register.  */
    3789       214530 :   auto_vec<rtx> use_rvec;
    3790       107265 :   if (targetm.md_asm_adjust)
    3791              :     {
    3792       107265 :       rtx_insn *after_md_seq
    3793       107265 :         = targetm.md_asm_adjust (output_rvec, input_rvec, input_mode,
    3794              :                                  constraints, use_rvec, clobber_rvec,
    3795              :                                  clobbered_regs, locus);
    3796       107265 :       if (after_md_seq)
    3797              :         {
    3798           72 :           push_to_sequence (after_md_seq);
    3799           72 :           emit_insn (after_rtl_seq);
    3800           72 :           after_rtl_seq = get_insns ();
    3801           72 :           after_rtl_end = get_last_insn ();
    3802           72 :           end_sequence ();
    3803              :         }
    3804              :     }
    3805              : 
    3806              :   /* Do not allow the hook to change the output and input count,
    3807              :      lest it mess up the operand numbering.  */
    3808       214530 :   gcc_assert (output_rvec.length() == noutputs);
    3809       214530 :   gcc_assert (input_rvec.length() == ninputs);
    3810       214530 :   gcc_assert (constraints.length() == noutputs + ninputs);
    3811              : 
    3812              :   /* But it certainly can adjust the uses and clobbers.  */
    3813       107265 :   unsigned nuses = use_rvec.length ();
    3814       107265 :   unsigned nclobbers = clobber_rvec.length ();
    3815              : 
    3816              :   /* Third pass checks for easy conflicts.  */
    3817              :   /* ??? Why are we doing this on trees instead of rtx.  */
    3818              : 
    3819       107265 :   bool clobber_conflict_found = 0;
    3820       183465 :   for (i = 0; i < noutputs; ++i)
    3821        76200 :     if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs, locus))
    3822           10 :         clobber_conflict_found = 1;
    3823       162934 :   for (i = 0; i < ninputs - ninout; ++i)
    3824        55669 :     if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs, locus))
    3825           11 :         clobber_conflict_found = 1;
    3826              : 
    3827              :   /* Make vectors for the expression-rtx, constraint strings,
    3828              :      and named operands.  */
    3829              : 
    3830       107265 :   rtvec argvec = rtvec_alloc (ninputs);
    3831       107265 :   rtvec constraintvec = rtvec_alloc (ninputs);
    3832       107265 :   rtvec labelvec = rtvec_alloc (nlabels);
    3833              : 
    3834       142060 :   rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
    3835              :                                     : GET_MODE (output_rvec[0])),
    3836              :                                    ggc_strdup (str),
    3837              :                                    "", 0, argvec, constraintvec,
    3838              :                                    labelvec, locus);
    3839       107265 :   MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
    3840              : 
    3841       162934 :   for (i = 0; i < ninputs; ++i)
    3842              :     {
    3843        55669 :       ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
    3844       111338 :       ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
    3845        55669 :         = gen_rtx_ASM_INPUT_loc (input_mode[i],
    3846              :                                  constraints[i + noutputs],
    3847              :                                  locus);
    3848              :     }
    3849              : 
    3850              :   /* Copy labels to the vector.  */
    3851       107265 :   rtx_code_label *fallthru_label = NULL;
    3852       107265 :   if (nlabels > 0)
    3853              :     {
    3854          542 :       basic_block fallthru_bb = NULL;
    3855          542 :       edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
    3856          542 :       if (fallthru)
    3857          542 :         fallthru_bb = fallthru->dest;
    3858              : 
    3859         1343 :       for (i = 0; i < nlabels; ++i)
    3860              :         {
    3861          801 :           tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
    3862          801 :           rtx_insn *r;
    3863              :           /* If asm goto has any labels in the fallthru basic block, use
    3864              :              a label that we emit immediately after the asm goto.  Expansion
    3865              :              may insert further instructions into the same basic block after
    3866              :              asm goto and if we don't do this, insertion of instructions on
    3867              :              the fallthru edge might misbehave.  See PR58670.  */
    3868          801 :           if (fallthru_bb && label_to_block (cfun, label) == fallthru_bb)
    3869              :             {
    3870          277 :               if (fallthru_label == NULL_RTX)
    3871          272 :                 fallthru_label = gen_label_rtx ();
    3872              :               r = fallthru_label;
    3873              :             }
    3874              :           else
    3875          524 :             r = label_rtx (label);
    3876          801 :           ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
    3877              :         }
    3878              :     }
    3879              : 
    3880              :   /* Now, for each output, construct an rtx
    3881              :      (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
    3882              :                                ARGVEC CONSTRAINTS OPNAMES))
    3883              :      If there is more than one, put them inside a PARALLEL.  */
    3884              : 
    3885       107265 :   if (noutputs == 0 && nuses == 0 && nclobbers == 0)
    3886              :     {
    3887              :       /* No output operands: put in a raw ASM_OPERANDS rtx.  */
    3888            0 :       if (nlabels > 0)
    3889            0 :         emit_jump_insn (body);
    3890              :       else
    3891            0 :         emit_insn (body);
    3892              :     }
    3893       107265 :   else if (noutputs == 1 && nuses == 0 && nclobbers == 0)
    3894              :     {
    3895           62 :       ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
    3896           62 :       if (nlabels > 0)
    3897            0 :         emit_jump_insn (gen_rtx_SET (output_rvec[0], body));
    3898              :       else
    3899           62 :         emit_insn (gen_rtx_SET (output_rvec[0], body));
    3900              :     }
    3901              :   else
    3902              :     {
    3903       107203 :       rtx obody = body;
    3904       107203 :       int num = noutputs;
    3905              : 
    3906       107203 :       if (num == 0)
    3907        72470 :         num = 1;
    3908              : 
    3909       107203 :       body = gen_rtx_PARALLEL (VOIDmode,
    3910              :                                rtvec_alloc (num + nuses + nclobbers));
    3911              : 
    3912              :       /* For each output operand, store a SET.  */
    3913       183341 :       for (i = 0; i < noutputs; ++i)
    3914              :         {
    3915        76138 :           rtx src, o = output_rvec[i];
    3916        76138 :           if (i == 0)
    3917              :             {
    3918        34733 :               ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
    3919        34733 :               src = obody;
    3920              :             }
    3921              :           else
    3922              :             {
    3923        41405 :               src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
    3924              :                                           ASM_OPERANDS_TEMPLATE (obody),
    3925              :                                           constraints[i], i, argvec,
    3926              :                                           constraintvec, labelvec, locus);
    3927        41405 :               MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
    3928              :             }
    3929        76138 :           XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
    3930              :         }
    3931              : 
    3932              :       /* If there are no outputs (but there are some clobbers)
    3933              :          store the bare ASM_OPERANDS into the PARALLEL.  */
    3934       107203 :       if (i == 0)
    3935        72470 :         XVECEXP (body, 0, i++) = obody;
    3936              : 
    3937              :       /* Add the uses specified by the target hook.  No checking should
    3938              :          be needed since this doesn't come directly from user code.  */
    3939       107203 :       for (rtx use : use_rvec)
    3940            0 :         XVECEXP (body, 0, i++) = gen_rtx_USE (VOIDmode, use);
    3941              : 
    3942              :       /* Store (clobber REG) for each clobbered register specified.  */
    3943       295710 :       for (unsigned j = 0; j < nclobbers; ++j)
    3944              :         {
    3945       188507 :           rtx clobbered_reg = clobber_rvec[j];
    3946              : 
    3947              :           /* Do sanity check for overlap between clobbers and respectively
    3948              :              input and outputs that hasn't been handled.  Such overlap
    3949              :              should have been detected and reported above.  */
    3950       188507 :           if (!clobber_conflict_found && REG_P (clobbered_reg))
    3951              :             {
    3952              :               /* We test the old body (obody) contents to avoid
    3953              :                  tripping over the under-construction body.  */
    3954       225788 :               for (unsigned k = 0; k < noutputs; ++k)
    3955        77085 :                 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
    3956            0 :                   internal_error ("%<asm%> clobber conflict with "
    3957              :                                   "output operand");
    3958              : 
    3959       207113 :               for (unsigned k = 0; k < ninputs - ninout; ++k)
    3960        58410 :                 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
    3961            0 :                   internal_error ("%<asm%> clobber conflict with "
    3962              :                                   "input operand");
    3963              :             }
    3964              : 
    3965       188507 :           XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
    3966              :         }
    3967              : 
    3968       107203 :       if (nlabels > 0)
    3969          542 :         emit_jump_insn (body);
    3970              :       else
    3971       106661 :         emit_insn (body);
    3972              :     }
    3973              : 
    3974       107265 :   generating_concat_p = old_generating_concat_p;
    3975              : 
    3976       107265 :   if (fallthru_label)
    3977          272 :     emit_label (fallthru_label);
    3978              : 
    3979       107265 :   if (after_rtl_seq)
    3980              :     {
    3981        27714 :       if (nlabels == 0)
    3982        27588 :         emit_insn (after_rtl_seq);
    3983              :       else
    3984              :         {
    3985          126 :           edge e;
    3986          126 :           edge_iterator ei;
    3987          126 :           unsigned int cnt = EDGE_COUNT (gimple_bb (stmt)->succs);
    3988              : 
    3989          461 :           FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->succs)
    3990              :             {
    3991          335 :               rtx_insn *copy;
    3992          335 :               if (--cnt == 0)
    3993              :                 copy = after_rtl_seq;
    3994              :               else
    3995              :                 {
    3996          209 :                   start_sequence ();
    3997          209 :                   duplicate_insn_chain (after_rtl_seq, after_rtl_end,
    3998              :                                         NULL, NULL);
    3999          209 :                   copy = end_sequence ();
    4000              :                 }
    4001          335 :               prepend_insn_to_edge (copy, e);
    4002              :             }
    4003              :         }
    4004              :     }
    4005              : 
    4006       107265 :   free_temp_slots ();
    4007       107265 :   crtl->has_asm_statement = 1;
    4008       107270 : }
    4009              : 
    4010              : /* Emit code to jump to the address
    4011              :    specified by the pointer expression EXP.  */
    4012              : 
    4013              : static void
    4014          405 : expand_computed_goto (tree exp)
    4015              : {
    4016          405 :   rtx x = expand_normal (exp);
    4017              : 
    4018          405 :   do_pending_stack_adjust ();
    4019          405 :   emit_indirect_jump (x);
    4020          405 : }
    4021              : 
    4022              : /* Generate RTL code for a `goto' statement with target label LABEL.
    4023              :    LABEL should be a LABEL_DECL tree node that was or will later be
    4024              :    defined with `expand_label'.  */
    4025              : 
    4026              : static void
    4027            0 : expand_goto (tree label)
    4028              : {
    4029            0 :   if (flag_checking)
    4030              :     {
    4031              :       /* Check for a nonlocal goto to a containing function.  Should have
    4032              :          gotten translated to __builtin_nonlocal_goto.  */
    4033            0 :       tree context = decl_function_context (label);
    4034            0 :       gcc_assert (!context || context == current_function_decl);
    4035              :     }
    4036              : 
    4037            0 :   emit_jump (jump_target_rtx (label));
    4038            0 : }
    4039              : 
    4040              : /* Output a return with no value.  */
    4041              : 
    4042              : static void
    4043       820601 : expand_null_return_1 (void)
    4044              : {
    4045       820601 :   clear_pending_stack_adjust ();
    4046       820601 :   do_pending_stack_adjust ();
    4047       820601 :   emit_jump (return_label);
    4048       820601 : }
    4049              : 
    4050              : /* Generate RTL to return from the current function, with no value.
    4051              :    (That is, we do not do anything about returning any value.)  */
    4052              : 
    4053              : void
    4054        68626 : expand_null_return (void)
    4055              : {
    4056              :   /* If this function was declared to return a value, but we
    4057              :      didn't, clobber the return registers so that they are not
    4058              :      propagated live to the rest of the function.  */
    4059        68626 :   clobber_return_register ();
    4060              : 
    4061        68626 :   expand_null_return_1 ();
    4062        68626 : }
    4063              : 
    4064              : /* Generate RTL to return from the current function, with value VAL.  */
    4065              : 
    4066              : static void
    4067       751975 : expand_value_return (rtx val)
    4068              : {
    4069              :   /* Copy the value to the return location unless it's already there.  */
    4070              : 
    4071       751975 :   tree decl = DECL_RESULT (current_function_decl);
    4072       751975 :   rtx return_reg = DECL_RTL (decl);
    4073       751975 :   if (return_reg != val)
    4074              :     {
    4075       432603 :       tree funtype = TREE_TYPE (current_function_decl);
    4076       432603 :       tree type = TREE_TYPE (decl);
    4077       432603 :       int unsignedp = TYPE_UNSIGNED (type);
    4078       432603 :       machine_mode old_mode = DECL_MODE (decl);
    4079       432603 :       machine_mode mode;
    4080       432603 :       if (DECL_BY_REFERENCE (decl))
    4081            0 :         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
    4082              :       else
    4083       432603 :         mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
    4084              : 
    4085       432603 :       if (mode != old_mode)
    4086              :         {
    4087              :           /* Some ABIs require scalar floating point modes to be returned
    4088              :              in a wider scalar integer mode.  We need to explicitly
    4089              :              reinterpret to an integer mode of the correct precision
    4090              :              before extending to the desired result.  */
    4091            8 :           if (SCALAR_INT_MODE_P (mode)
    4092            8 :               && SCALAR_FLOAT_MODE_P (old_mode)
    4093            8 :               && known_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (old_mode)))
    4094            0 :             val = convert_float_to_wider_int (mode, old_mode, val);
    4095              :           else
    4096            8 :             val = convert_modes (mode, old_mode, val, unsignedp);
    4097              :         }
    4098              : 
    4099       432603 :       if (GET_CODE (return_reg) == PARALLEL)
    4100         3001 :         emit_group_load (return_reg, val, type, int_size_in_bytes (type));
    4101              :       else
    4102       429602 :         emit_move_insn (return_reg, val);
    4103              :     }
    4104              : 
    4105       751975 :   expand_null_return_1 ();
    4106       751975 : }
    4107              : 
    4108              : /* Generate RTL to evaluate the expression RETVAL and return it
    4109              :    from the current function.  */
    4110              : 
    4111              : static void
    4112       754207 : expand_return (tree retval)
    4113              : {
    4114       754207 :   rtx result_rtl;
    4115       754207 :   rtx val = 0;
    4116       754207 :   tree retval_rhs;
    4117              : 
    4118              :   /* If function wants no value, give it none.  */
    4119       754207 :   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
    4120              :     {
    4121            0 :       expand_normal (retval);
    4122            0 :       expand_null_return ();
    4123            0 :       return;
    4124              :     }
    4125              : 
    4126       754207 :   if (retval == error_mark_node)
    4127              :     {
    4128              :       /* Treat this like a return of no value from a function that
    4129              :          returns a value.  */
    4130            0 :       expand_null_return ();
    4131            0 :       return;
    4132              :     }
    4133       754207 :   else if ((TREE_CODE (retval) == MODIFY_EXPR
    4134       754207 :             || TREE_CODE (retval) == INIT_EXPR)
    4135       754207 :            && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
    4136       691672 :     retval_rhs = TREE_OPERAND (retval, 1);
    4137              :   else
    4138              :     retval_rhs = retval;
    4139              : 
    4140       754207 :   result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
    4141              : 
    4142              :   /* If we are returning the RESULT_DECL, then the value has already
    4143              :      been stored into it, so we don't have to do anything special.  */
    4144       754207 :   if (TREE_CODE (retval_rhs) == RESULT_DECL)
    4145        62535 :     expand_value_return (result_rtl);
    4146              : 
    4147              :   /* If the result is an aggregate that is being returned in one (or more)
    4148              :      registers, load the registers here.  */
    4149              : 
    4150       691672 :   else if (retval_rhs != 0
    4151       691672 :            && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
    4152         4911 :            && REG_P (result_rtl))
    4153              :     {
    4154         3456 :       val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
    4155         3456 :       if (val)
    4156              :         {
    4157              :           /* Use the mode of the result value on the return register.  */
    4158         1224 :           PUT_MODE (result_rtl, GET_MODE (val));
    4159         1224 :           expand_value_return (val);
    4160              :         }
    4161              :       else
    4162         2232 :         expand_null_return ();
    4163              :     }
    4164       688216 :   else if (retval_rhs != 0
    4165       688216 :            && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
    4166       688216 :            && (REG_P (result_rtl)
    4167         5991 :                || (GET_CODE (result_rtl) == PARALLEL)))
    4168              :     {
    4169              :       /* Compute the return value into a temporary (usually a pseudo reg).  */
    4170       685226 :       val
    4171       685226 :         = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
    4172       685226 :       val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
    4173       685226 :       val = force_not_mem (val);
    4174       685226 :       expand_value_return (val);
    4175              :     }
    4176              :   else
    4177              :     {
    4178              :       /* No hard reg used; calculate value into hard return reg.  */
    4179         2990 :       expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
    4180         2990 :       expand_value_return (result_rtl);
    4181              :     }
    4182              : }
    4183              : 
    4184              : /* Expand a clobber of LHS.  If LHS is stored it in a multi-part
    4185              :    register, tell the rtl optimizers that its value is no longer
    4186              :    needed.  */
    4187              : 
    4188              : static void
    4189      1298709 : expand_clobber (tree lhs)
    4190              : {
    4191      1298709 :   if (DECL_P (lhs))
    4192              :     {
    4193      1274175 :       rtx decl_rtl = DECL_RTL_IF_SET (lhs);
    4194      1274084 :       if (decl_rtl && REG_P (decl_rtl))
    4195              :         {
    4196       173767 :           machine_mode decl_mode = GET_MODE (decl_rtl);
    4197       347534 :           if (maybe_gt (GET_MODE_SIZE (decl_mode),
    4198              :                         REGMODE_NATURAL_SIZE (decl_mode)))
    4199       137730 :             emit_clobber (decl_rtl);
    4200              :         }
    4201              :     }
    4202      1298709 : }
    4203              : 
    4204              : /* A subroutine of expand_gimple_stmt, expanding one gimple statement
    4205              :    STMT that doesn't require special handling for outgoing edges.  That
    4206              :    is no tailcalls and no GIMPLE_COND.  */
    4207              : 
    4208              : static void
    4209     31477366 : expand_gimple_stmt_1 (gimple *stmt)
    4210              : {
    4211     31477366 :   tree op0;
    4212              : 
    4213     31477366 :   set_curr_insn_location (gimple_location (stmt));
    4214              : 
    4215     31477366 :   switch (gimple_code (stmt))
    4216              :     {
    4217          405 :     case GIMPLE_GOTO:
    4218          405 :       op0 = gimple_goto_dest (stmt);
    4219          405 :       if (TREE_CODE (op0) == LABEL_DECL)
    4220            0 :         expand_goto (op0);
    4221              :       else
    4222          405 :         expand_computed_goto (op0);
    4223              :       break;
    4224       633579 :     case GIMPLE_LABEL:
    4225       633579 :       expand_label (gimple_label_label (as_a <glabel *> (stmt)));
    4226       633579 :       break;
    4227              :     case GIMPLE_NOP:
    4228              :     case GIMPLE_PREDICT:
    4229              :       break;
    4230         8212 :     case GIMPLE_SWITCH:
    4231         8212 :       {
    4232         8212 :         gswitch *swtch = as_a <gswitch *> (stmt);
    4233         8212 :         if (gimple_switch_num_labels (swtch) == 1)
    4234            0 :           expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
    4235              :         else
    4236         8212 :           expand_case (swtch);
    4237              :       }
    4238              :       break;
    4239       109936 :     case GIMPLE_ASM:
    4240       109936 :       expand_asm_stmt (as_a <gasm *> (stmt));
    4241       109936 :       break;
    4242      6823180 :     case GIMPLE_CALL:
    4243      6823180 :       expand_call_stmt (as_a <gcall *> (stmt));
    4244      6823180 :       break;
    4245              : 
    4246       820601 :     case GIMPLE_RETURN:
    4247       820601 :       {
    4248       820601 :         op0 = gimple_return_retval (as_a <greturn *> (stmt));
    4249              : 
    4250              :         /* If a return doesn't have a location, it very likely represents
    4251              :            multiple user returns so we cannot let it inherit the location
    4252              :            of the last statement of the previous basic block in RTL.  */
    4253       820601 :         if (!gimple_has_location (stmt))
    4254       214185 :           set_curr_insn_location (cfun->function_end_locus);
    4255              : 
    4256       820601 :         if (op0 && op0 != error_mark_node)
    4257              :           {
    4258       754207 :             tree result = DECL_RESULT (current_function_decl);
    4259              : 
    4260              :             /* If we are not returning the current function's RESULT_DECL,
    4261              :                build an assignment to it.  */
    4262       754207 :             if (op0 != result)
    4263              :               {
    4264              :                 /* I believe that a function's RESULT_DECL is unique.  */
    4265       691672 :                 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
    4266              : 
    4267              :                 /* ??? We'd like to use simply expand_assignment here,
    4268              :                    but this fails if the value is of BLKmode but the return
    4269              :                    decl is a register.  expand_return has special handling
    4270              :                    for this combination, which eventually should move
    4271              :                    to common code.  See comments there.  Until then, let's
    4272              :                    build a modify expression :-/  */
    4273       691672 :                 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
    4274              :                               result, op0);
    4275              :               }
    4276              :           }
    4277              : 
    4278       754207 :         if (!op0)
    4279        66394 :           expand_null_return ();
    4280              :         else
    4281       754207 :           expand_return (op0);
    4282              :       }
    4283              :       break;
    4284              : 
    4285     23035676 :     case GIMPLE_ASSIGN:
    4286     23035676 :       {
    4287     23035676 :         gassign *assign_stmt = as_a <gassign *> (stmt);
    4288     23035676 :         tree lhs = gimple_assign_lhs (assign_stmt);
    4289              : 
    4290              :         /* Tree expand used to fiddle with |= and &= of two bitfield
    4291              :            COMPONENT_REFs here.  This can't happen with gimple, the LHS
    4292              :            of binary assigns must be a gimple reg.  */
    4293              : 
    4294     23035676 :         if (TREE_CODE (lhs) != SSA_NAME
    4295     36713849 :             || gimple_assign_rhs_class (assign_stmt) == GIMPLE_SINGLE_RHS)
    4296              :           {
    4297     17143997 :             tree rhs = gimple_assign_rhs1 (assign_stmt);
    4298     17143997 :             gcc_assert (gimple_assign_rhs_class (assign_stmt)
    4299              :                         == GIMPLE_SINGLE_RHS);
    4300     32749980 :             if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
    4301              :                 /* Do not put locations on possibly shared trees.  */
    4302     23112678 :                 && !is_gimple_min_invariant (rhs))
    4303      4839984 :               SET_EXPR_LOCATION (rhs, gimple_location (stmt));
    4304     17143997 :             if (TREE_CLOBBER_P (rhs))
    4305              :               /* This is a clobber to mark the going out of scope for
    4306              :                  this LHS.  */
    4307      1298709 :               expand_clobber (lhs);
    4308              :             else
    4309     15845288 :               expand_assignment (lhs, rhs,
    4310     15845288 :                                  gimple_assign_nontemporal_move_p (
    4311              :                                    assign_stmt));
    4312              :           }
    4313              :         else
    4314              :           {
    4315      5891679 :             rtx target, temp;
    4316      5891679 :             gcc_assert (!gimple_assign_nontemporal_move_p (assign_stmt));
    4317      5891679 :             bool promoted = false;
    4318              : 
    4319      5891679 :             target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    4320      5891679 :             if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
    4321              :               promoted = true;
    4322              : 
    4323              :            /* If we store into a promoted register, don't directly
    4324              :               expand to target.  */
    4325              :             temp = promoted ? NULL_RTX : target;
    4326     11783358 :             temp = expand_expr_real_gassign (assign_stmt, temp,
    4327      5891679 :                                              GET_MODE (target), EXPAND_NORMAL);
    4328              : 
    4329      5891679 :             if (temp == target)
    4330              :               ;
    4331       810181 :             else if (promoted)
    4332              :               {
    4333            3 :                 int unsignedp = SUBREG_PROMOTED_SIGN (target);
    4334              :                 /* If TEMP is a VOIDmode constant, use convert_modes to make
    4335              :                    sure that we properly convert it.  */
    4336            3 :                 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
    4337              :                   {
    4338            0 :                     temp = convert_modes (GET_MODE (target),
    4339            0 :                                           TYPE_MODE (TREE_TYPE (lhs)),
    4340              :                                           temp, unsignedp);
    4341            0 :                     temp = convert_modes (GET_MODE (SUBREG_REG (target)),
    4342            0 :                                           GET_MODE (target), temp, unsignedp);
    4343              :                   }
    4344              : 
    4345            3 :                 convert_move (SUBREG_REG (target), temp, unsignedp);
    4346              :               }
    4347              :             else
    4348              :               {
    4349       810178 :                 temp = force_operand (temp, target);
    4350       810178 :                 if (temp == target)
    4351              :                   ;
    4352       810178 :                 else if (GET_MODE (target) != BLKmode)
    4353       810177 :                   emit_move_insn (target, temp);
    4354              :                 else
    4355            1 :                   emit_block_move (target, temp, expr_size (lhs),
    4356              :                                    BLOCK_OP_NORMAL);
    4357              :               }
    4358              :           }
    4359              :       }
    4360              :       break;
    4361              : 
    4362            0 :     default:
    4363            0 :       gcc_unreachable ();
    4364              :     }
    4365     31477365 : }
    4366              : 
    4367              : /* Expand one gimple statement STMT and return the last RTL instruction
    4368              :    before any of the newly generated ones.
    4369              : 
    4370              :    In addition to generating the necessary RTL instructions this also
    4371              :    sets REG_EH_REGION notes if necessary and sets the current source
    4372              :    location for diagnostics.  */
    4373              : 
    4374              : static rtx_insn *
    4375     31477366 : expand_gimple_stmt (gimple *stmt)
    4376              : {
    4377     31477366 :   location_t saved_location = input_location;
    4378     31477366 :   rtx_insn *last = get_last_insn ();
    4379     31477366 :   int lp_nr;
    4380              : 
    4381     31477366 :   gcc_assert (cfun);
    4382              : 
    4383              :   /* We need to save and restore the current source location so that errors
    4384              :      discovered during expansion are emitted with the right location.  But
    4385              :      it would be better if the diagnostic routines used the source location
    4386              :      embedded in the tree nodes rather than globals.  */
    4387     31477366 :   if (gimple_has_location (stmt))
    4388     27644750 :     input_location = gimple_location (stmt);
    4389              : 
    4390     31477366 :   expand_gimple_stmt_1 (stmt);
    4391              : 
    4392              :   /* Free any temporaries used to evaluate this statement.  */
    4393     31477365 :   free_temp_slots ();
    4394              : 
    4395     31477365 :   input_location = saved_location;
    4396              : 
    4397              :   /* Mark all insns that may trap.  */
    4398     31477365 :   lp_nr = lookup_stmt_eh_lp (stmt);
    4399     31477365 :   if (lp_nr)
    4400              :     {
    4401       689682 :       rtx_insn *insn;
    4402      3953968 :       for (insn = next_real_insn (last); insn;
    4403      3264286 :            insn = next_real_insn (insn))
    4404              :         {
    4405      3264286 :           if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
    4406              :               /* If we want exceptions for non-call insns, any
    4407              :                  may_trap_p instruction may throw.  */
    4408      3264226 :               && GET_CODE (PATTERN (insn)) != CLOBBER
    4409      3264156 :               && GET_CODE (PATTERN (insn)) != USE
    4410      6528442 :               && insn_could_throw_p (insn))
    4411       747098 :             make_reg_eh_region_note (insn, 0, lp_nr);
    4412              :         }
    4413              :     }
    4414              : 
    4415     31477365 :   return last;
    4416              : }
    4417              : 
    4418              : /* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
    4419              :    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
    4420              :    generated a tail call (something that might be denied by the ABI
    4421              :    rules governing the call; see calls.cc).
    4422              : 
    4423              :    Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
    4424              :    can still reach the rest of BB.  The case here is __builtin_sqrt,
    4425              :    where the NaN result goes through the external function (with a
    4426              :    tailcall) and the normal result happens via a sqrt instruction.  */
    4427              : 
    4428              : static basic_block
    4429       176808 : expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru,
    4430              :                         rtx_insn *asan_epilog_seq)
    4431              : {
    4432       176808 :   rtx_insn *last2, *last, *first = get_last_insn ();
    4433       176808 :   edge e;
    4434       176808 :   edge_iterator ei;
    4435       176808 :   profile_probability probability;
    4436              : 
    4437       176808 :   last2 = last = expand_gimple_stmt (stmt);
    4438              : 
    4439       769784 :   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
    4440       720791 :     if (CALL_P (last) && SIBLING_CALL_P (last))
    4441       127815 :       goto found;
    4442              : 
    4443        48993 :   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
    4444              : 
    4445        48993 :   *can_fallthru = true;
    4446        48993 :   return NULL;
    4447              : 
    4448       127815 :  found:
    4449              : 
    4450       127815 :   if (asan_epilog_seq)
    4451              :     {
    4452              :       /* We need to emit a copy of the asan_epilog_seq before
    4453              :          the insns emitted by expand_gimple_stmt above.  The sequence
    4454              :          can contain labels, which need to be remapped.  */
    4455          117 :       hash_map<rtx, rtx> label_map;
    4456          117 :       start_sequence ();
    4457          117 :       emit_note (NOTE_INSN_DELETED);
    4458         1650 :       for (rtx_insn *insn = asan_epilog_seq; insn; insn = NEXT_INSN (insn))
    4459         1533 :         switch (GET_CODE (insn))
    4460              :           {
    4461         1182 :           case INSN:
    4462         1182 :           case CALL_INSN:
    4463         1182 :           case JUMP_INSN:
    4464         1182 :             emit_copy_of_insn_after (insn, get_last_insn ());
    4465         1182 :             break;
    4466          234 :           case CODE_LABEL:
    4467          234 :             label_map.put ((rtx) insn, (rtx) emit_label (gen_label_rtx ()));
    4468          234 :             break;
    4469          117 :           case BARRIER:
    4470          117 :             emit_barrier ();
    4471          117 :             break;
    4472            0 :           default:
    4473            0 :             gcc_unreachable ();
    4474              :           }
    4475         1767 :       for (rtx_insn *insn = get_insns (); insn; insn = NEXT_INSN (insn))
    4476         1650 :         if (JUMP_P (insn))
    4477              :           {
    4478          234 :             subrtx_ptr_iterator::array_type array;
    4479         1521 :             FOR_EACH_SUBRTX_PTR (iter, array, &PATTERN (insn), ALL)
    4480              :               {
    4481         1287 :                 rtx *loc = *iter;
    4482         1287 :                 if (LABEL_REF_P (*loc))
    4483              :                   {
    4484          234 :                     rtx *lab = label_map.get ((rtx) label_ref_label (*loc));
    4485          234 :                     gcc_assert (lab);
    4486          234 :                     set_label_ref_label (*loc, as_a <rtx_insn *> (*lab));
    4487              :                   }
    4488              :               }
    4489          234 :             if (JUMP_LABEL (insn))
    4490              :               {
    4491          234 :                 rtx *lab = label_map.get (JUMP_LABEL (insn));
    4492          234 :                 gcc_assert (lab);
    4493          234 :                 JUMP_LABEL (insn) = *lab;
    4494              :               }
    4495          234 :           }
    4496          117 :       asan_epilog_seq = NEXT_INSN (get_insns ());
    4497          117 :       end_sequence ();
    4498          117 :       emit_insn_before (asan_epilog_seq, NEXT_INSN (first));
    4499          117 :     }
    4500              : 
    4501              :   /* ??? Wouldn't it be better to just reset any pending stack adjust?
    4502              :      Any instructions emitted here are about to be deleted.  */
    4503       127815 :   do_pending_stack_adjust ();
    4504              : 
    4505              :   /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
    4506              :   /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
    4507              :      EH or abnormal edges, we shouldn't have created a tail call in
    4508              :      the first place.  So it seems to me we should just be removing
    4509              :      all edges here, or redirecting the existing fallthru edge to
    4510              :      the exit block.  */
    4511              : 
    4512       127815 :   probability = profile_probability::never ();
    4513              : 
    4514       255624 :   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
    4515              :     {
    4516       127809 :       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
    4517              :         {
    4518       127803 :           if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
    4519        61150 :             e->dest->count -= e->count ();
    4520       127803 :           probability += e->probability;
    4521       127803 :           expand_remove_edge (e);
    4522              :         }
    4523              :       else
    4524            6 :         ei_next (&ei);
    4525              :     }
    4526              : 
    4527              :   /* This is somewhat ugly: the call_expr expander often emits instructions
    4528              :      after the sibcall (to perform the function return).  These confuse the
    4529              :      find_many_sub_basic_blocks code, so we need to get rid of these.  */
    4530       127815 :   last = NEXT_INSN (last);
    4531       127815 :   gcc_assert (BARRIER_P (last));
    4532              : 
    4533       127815 :   *can_fallthru = false;
    4534       159042 :   while (NEXT_INSN (last))
    4535              :     {
    4536              :       /* For instance an sqrt builtin expander expands if with
    4537              :          sibcall in the then and label for `else`.  */
    4538        31227 :       if (LABEL_P (NEXT_INSN (last)))
    4539              :         {
    4540            0 :           *can_fallthru = true;
    4541            0 :           break;
    4542              :         }
    4543        31227 :       delete_insn (NEXT_INSN (last));
    4544              :     }
    4545              : 
    4546       127815 :   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
    4547              :                  | EDGE_SIBCALL);
    4548       127815 :   e->probability = probability;
    4549       127815 :   head_end_for_bb[bb->index].second = last;
    4550       255630 :   update_bb_for_insn_chain (head_end_for_bb[bb->index].first,
    4551       127815 :                             head_end_for_bb[bb->index].second, bb);
    4552              : 
    4553       127815 :   if (NEXT_INSN (last))
    4554              :     {
    4555            0 :       bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
    4556              : 
    4557            0 :       last = BB_END (bb);
    4558            0 :       if (BARRIER_P (last))
    4559            0 :         BB_END (bb) = PREV_INSN (last);
    4560              :     }
    4561              : 
    4562       127815 :   maybe_dump_rtl_for_gimple_stmt (stmt, last2);
    4563              : 
    4564       127815 :   return bb;
    4565              : }
    4566              : 
    4567              : /* Return the difference between the floor and the truncated result of
    4568              :    a signed division by OP1 with remainder MOD.  */
    4569              : static rtx
    4570            4 : floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
    4571              : {
    4572              :   /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
    4573            4 :   return gen_rtx_IF_THEN_ELSE
    4574              :     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
    4575              :      gen_rtx_IF_THEN_ELSE
    4576              :      (mode, gen_rtx_LT (BImode,
    4577              :                         gen_rtx_DIV (mode, op1, mod),
    4578              :                         const0_rtx),
    4579              :       constm1_rtx, const0_rtx),
    4580              :      const0_rtx);
    4581              : }
    4582              : 
    4583              : /* Return the difference between the ceil and the truncated result of
    4584              :    a signed division by OP1 with remainder MOD.  */
    4585              : static rtx
    4586            3 : ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
    4587              : {
    4588              :   /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
    4589            3 :   return gen_rtx_IF_THEN_ELSE
    4590              :     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
    4591              :      gen_rtx_IF_THEN_ELSE
    4592              :      (mode, gen_rtx_GT (BImode,
    4593              :                         gen_rtx_DIV (mode, op1, mod),
    4594              :                         const0_rtx),
    4595              :       const1_rtx, const0_rtx),
    4596              :      const0_rtx);
    4597              : }
    4598              : 
    4599              : /* Return the difference between the ceil and the truncated result of
    4600              :    an unsigned division by OP1 with remainder MOD.  */
    4601              : static rtx
    4602            0 : ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
    4603              : {
    4604              :   /* (mod != 0 ? 1 : 0) */
    4605            0 :   return gen_rtx_IF_THEN_ELSE
    4606              :     (mode, gen_rtx_NE (BImode, mod, const0_rtx),
    4607              :      const1_rtx, const0_rtx);
    4608              : }
    4609              : 
    4610              : /* Return the difference between the rounded and the truncated result
    4611              :    of a signed division by OP1 with remainder MOD.  Halfway cases are
    4612              :    rounded away from zero, rather than to the nearest even number.  */
    4613              : static rtx
    4614            0 : round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
    4615              : {
    4616              :   /* (abs (mod) >= abs (op1) - abs (mod)
    4617              :       ? (op1 / mod > 0 ? 1 : -1)
    4618              :       : 0) */
    4619            0 :   return gen_rtx_IF_THEN_ELSE
    4620              :     (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
    4621              :                        gen_rtx_MINUS (mode,
    4622              :                                       gen_rtx_ABS (mode, op1),
    4623              :                                       gen_rtx_ABS (mode, mod))),
    4624              :      gen_rtx_IF_THEN_ELSE
    4625              :      (mode, gen_rtx_GT (BImode,
    4626              :                         gen_rtx_DIV (mode, op1, mod),
    4627              :                         const0_rtx),
    4628              :       const1_rtx, constm1_rtx),
    4629              :      const0_rtx);
    4630              : }
    4631              : 
    4632              : /* Return the difference between the rounded and the truncated result
    4633              :    of a unsigned division by OP1 with remainder MOD.  Halfway cases
    4634              :    are rounded away from zero, rather than to the nearest even
    4635              :    number.  */
    4636              : static rtx
    4637            0 : round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
    4638              : {
    4639              :   /* (mod >= op1 - mod ? 1 : 0) */
    4640            0 :   return gen_rtx_IF_THEN_ELSE
    4641              :     (mode, gen_rtx_GE (BImode, mod,
    4642              :                        gen_rtx_MINUS (mode, op1, mod)),
    4643              :      const1_rtx, const0_rtx);
    4644              : }
    4645              : 
    4646              : /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
    4647              :    any rtl.  */
    4648              : 
    4649              : static rtx
    4650      6941626 : convert_debug_memory_address (scalar_int_mode mode, rtx x,
    4651              :                               addr_space_t as)
    4652              : {
    4653              : #ifndef POINTERS_EXTEND_UNSIGNED
    4654              :   gcc_assert (mode == Pmode
    4655              :               || mode == targetm.addr_space.address_mode (as));
    4656              :   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
    4657              : #else
    4658      6941626 :   rtx temp;
    4659              : 
    4660      6941626 :   gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
    4661              : 
    4662      6941626 :   if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
    4663              :     return x;
    4664              : 
    4665              :   /* X must have some form of address mode already.  */
    4666            0 :   scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
    4667            0 :   if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
    4668            0 :     x = lowpart_subreg (mode, x, xmode);
    4669            0 :   else if (POINTERS_EXTEND_UNSIGNED > 0)
    4670            0 :     x = gen_rtx_ZERO_EXTEND (mode, x);
    4671              :   else if (!POINTERS_EXTEND_UNSIGNED)
    4672              :     x = gen_rtx_SIGN_EXTEND (mode, x);
    4673              :   else
    4674              :     {
    4675              :       switch (GET_CODE (x))
    4676              :         {
    4677              :         case SUBREG:
    4678              :           if ((SUBREG_PROMOTED_VAR_P (x)
    4679              :                || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
    4680              :                || (GET_CODE (SUBREG_REG (x)) == PLUS
    4681              :                    && REG_P (XEXP (SUBREG_REG (x), 0))
    4682              :                    && REG_POINTER (XEXP (SUBREG_REG (x), 0))
    4683              :                    && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
    4684              :               && GET_MODE (SUBREG_REG (x)) == mode)
    4685              :             return SUBREG_REG (x);
    4686              :           break;
    4687              :         case LABEL_REF:
    4688              :           temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
    4689              :           LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
    4690              :           return temp;
    4691              :         case SYMBOL_REF:
    4692              :           temp = shallow_copy_rtx (x);
    4693              :           PUT_MODE (temp, mode);
    4694              :           return temp;
    4695              :         case CONST:
    4696              :           temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
    4697              :           if (temp)
    4698              :             temp = gen_rtx_CONST (mode, temp);
    4699              :           return temp;
    4700              :         case PLUS:
    4701              :         case MINUS:
    4702              :           if (CONST_INT_P (XEXP (x, 1)))
    4703              :             {
    4704              :               temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
    4705              :               if (temp)
    4706              :                 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
    4707              :             }
    4708              :           break;
    4709              :         default:
    4710              :           break;
    4711              :         }
    4712              :       /* Don't know how to express ptr_extend as operation in debug info.  */
    4713              :       return NULL;
    4714              :     }
    4715              : #endif /* POINTERS_EXTEND_UNSIGNED */
    4716              : 
    4717              :   return x;
    4718              : }
    4719              : 
    4720              : /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
    4721              :    by avoid_deep_ter_for_debug.  */
    4722              : 
    4723              : static hash_map<tree, tree> *deep_ter_debug_map;
    4724              : 
    4725              : /* Split too deep TER chains for debug stmts using debug temporaries.  */
    4726              : 
    4727              : static void
    4728     39171466 : avoid_deep_ter_for_debug (gimple *stmt, int depth)
    4729              : {
    4730     39171466 :   use_operand_p use_p;
    4731     39171466 :   ssa_op_iter iter;
    4732     48854287 :   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
    4733              :     {
    4734      9682821 :       tree use = USE_FROM_PTR (use_p);
    4735      9682821 :       if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
    4736      8133700 :         continue;
    4737      8346748 :       gimple *g = get_gimple_for_ssa_name (use);
    4738      8346748 :       if (g == NULL)
    4739      6796914 :         continue;
    4740      1549834 :       if ((depth > 6 || !is_gimple_assign (g)) && !stmt_ends_bb_p (g))
    4741              :         {
    4742         3360 :           if (deep_ter_debug_map == NULL)
    4743          627 :             deep_ter_debug_map = new hash_map<tree, tree>;
    4744              : 
    4745         3360 :           tree &vexpr = deep_ter_debug_map->get_or_insert (use);
    4746         3360 :           if (vexpr != NULL)
    4747          713 :             continue;
    4748         2647 :           vexpr = build_debug_expr_decl (TREE_TYPE (use));
    4749         2647 :           gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
    4750         2647 :           gimple_stmt_iterator gsi = gsi_for_stmt (g);
    4751         2647 :           gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
    4752         2647 :           avoid_deep_ter_for_debug (def_temp, 0);
    4753              :         }
    4754              :       else
    4755      1546474 :         avoid_deep_ter_for_debug (g, depth + 1);
    4756              :     }
    4757     39171466 : }
    4758              : 
    4759              : /* Return an RTX equivalent to the value of the parameter DECL.  */
    4760              : 
    4761              : static rtx
    4762       172035 : expand_debug_parm_decl (tree decl)
    4763              : {
    4764       172035 :   rtx incoming = DECL_INCOMING_RTL (decl);
    4765              : 
    4766       172035 :   if (incoming
    4767        19694 :       && GET_MODE (incoming) != BLKmode
    4768       191725 :       && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
    4769         1735 :           || (MEM_P (incoming)
    4770         1649 :               && REG_P (XEXP (incoming, 0))
    4771          703 :               && HARD_REGISTER_P (XEXP (incoming, 0)))))
    4772              :     {
    4773        17983 :       rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
    4774              : 
    4775              : #ifdef HAVE_window_save
    4776              :       /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
    4777              :          If the target machine has an explicit window save instruction, the
    4778              :          actual entry value is the corresponding OUTGOING_REGNO instead.  */
    4779              :       if (REG_P (incoming)
    4780              :           && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
    4781              :         incoming
    4782              :           = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
    4783              :                                 OUTGOING_REGNO (REGNO (incoming)), 0);
    4784              :       else if (MEM_P (incoming))
    4785              :         {
    4786              :           rtx reg = XEXP (incoming, 0);
    4787              :           if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
    4788              :             {
    4789              :               reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
    4790              :               incoming = replace_equiv_address_nv (incoming, reg);
    4791              :             }
    4792              :           else
    4793              :             incoming = copy_rtx (incoming);
    4794              :         }
    4795              : #endif
    4796              : 
    4797        17983 :       ENTRY_VALUE_EXP (rtl) = incoming;
    4798        17983 :       return rtl;
    4799              :     }
    4800              : 
    4801       154052 :   if (incoming
    4802         1711 :       && GET_MODE (incoming) != BLKmode
    4803         1707 :       && !TREE_ADDRESSABLE (decl)
    4804         1707 :       && MEM_P (incoming)
    4805         1621 :       && (XEXP (incoming, 0) == virtual_incoming_args_rtx
    4806          946 :           || (GET_CODE (XEXP (incoming, 0)) == PLUS
    4807          946 :               && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
    4808          931 :               && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
    4809         1606 :     return copy_rtx (incoming);
    4810              : 
    4811              :   return NULL_RTX;
    4812              : }
    4813              : 
    4814              : /* Return an RTX equivalent to the value of the tree expression EXP.  */
    4815              : 
    4816              : static rtx
    4817     39586669 : expand_debug_expr (tree exp)
    4818              : {
    4819     39586669 :   rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
    4820     39586669 :   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
    4821     39586669 :   machine_mode inner_mode = VOIDmode;
    4822     39586669 :   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
    4823     39586669 :   addr_space_t as;
    4824     39586669 :   scalar_int_mode op0_mode, op1_mode, addr_mode;
    4825              : 
    4826     39586669 :   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
    4827              :     {
    4828      6557309 :     case tcc_expression:
    4829      6557309 :       switch (TREE_CODE (exp))
    4830              :         {
    4831         8496 :         case COND_EXPR:
    4832         8496 :         case DOT_PROD_EXPR:
    4833         8496 :         case SAD_EXPR:
    4834         8496 :         case WIDEN_MULT_PLUS_EXPR:
    4835         8496 :         case WIDEN_MULT_MINUS_EXPR:
    4836         8496 :           goto ternary;
    4837              : 
    4838            0 :         case TRUTH_ANDIF_EXPR:
    4839            0 :         case TRUTH_ORIF_EXPR:
    4840            0 :         case TRUTH_AND_EXPR:
    4841            0 :         case TRUTH_OR_EXPR:
    4842            0 :         case TRUTH_XOR_EXPR:
    4843            0 :           goto binary;
    4844              : 
    4845            0 :         case TRUTH_NOT_EXPR:
    4846            0 :           goto unary;
    4847              : 
    4848              :         default:
    4849              :           break;
    4850              :         }
    4851              :       break;
    4852              : 
    4853         8496 :     ternary:
    4854         8496 :       op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
    4855         8496 :       if (!op2)
    4856              :         return NULL_RTX;
    4857              :       /* Fall through.  */
    4858              : 
    4859         8496 :     binary:
    4860      1371337 :     case tcc_binary:
    4861      1371337 :       if (mode == BLKmode)
    4862              :         return NULL_RTX;
    4863      1371327 :       op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
    4864      1371327 :       if (!op1)
    4865              :         return NULL_RTX;
    4866      1370775 :       switch (TREE_CODE (exp))
    4867              :         {
    4868        58294 :         case LSHIFT_EXPR:
    4869        58294 :         case RSHIFT_EXPR:
    4870        58294 :         case LROTATE_EXPR:
    4871        58294 :         case RROTATE_EXPR:
    4872        58294 :         case WIDEN_LSHIFT_EXPR:
    4873              :           /* Ensure second operand isn't wider than the first one.  */
    4874        58294 :           inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
    4875        58294 :           if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
    4876        58286 :               && (GET_MODE_UNIT_PRECISION (mode)
    4877        58286 :                   < GET_MODE_PRECISION (op1_mode)))
    4878         5238 :             op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
    4879              :           break;
    4880              :         default:
    4881              :           break;
    4882              :         }
    4883              :       /* Fall through.  */
    4884              : 
    4885      1936965 :     unary:
    4886      1936965 :     case tcc_unary:
    4887      1936965 :       if (mode == BLKmode)
    4888              :         return NULL_RTX;
    4889      1936963 :       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
    4890      1936963 :       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
    4891      1936963 :       if (!op0)
    4892              :         return NULL_RTX;
    4893              :       break;
    4894              : 
    4895        58770 :     case tcc_comparison:
    4896        58770 :       unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
    4897        58770 :       goto binary;
    4898              : 
    4899            0 :     case tcc_type:
    4900            0 :     case tcc_statement:
    4901            0 :       gcc_unreachable ();
    4902              : 
    4903              :     case tcc_constant:
    4904              :     case tcc_exceptional:
    4905              :     case tcc_declaration:
    4906              :     case tcc_reference:
    4907              :     case tcc_vl_exp:
    4908              :       break;
    4909              :     }
    4910              : 
    4911     39584557 :   switch (TREE_CODE (exp))
    4912              :     {
    4913       295742 :     case STRING_CST:
    4914       295742 :       if (!lookup_constant_def (exp))
    4915              :         {
    4916       120766 :           if (strlen (TREE_STRING_POINTER (exp)) + 1
    4917       120766 :               != (size_t) TREE_STRING_LENGTH (exp))
    4918              :             return NULL_RTX;
    4919       120741 :           op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
    4920       120156 :           op0 = gen_rtx_MEM (BLKmode, op0);
    4921       120156 :           set_mem_attributes (op0, exp, 0);
    4922       120156 :           return op0;
    4923              :         }
    4924              :       /* Fall through.  */
    4925              : 
    4926      5321180 :     case INTEGER_CST:
    4927      5321180 :       if (TREE_CODE (TREE_TYPE (exp)) == BITINT_TYPE
    4928      5321180 :           && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
    4929              :         return NULL;
    4930              :       /* FALLTHRU */
    4931      5383681 :     case REAL_CST:
    4932      5383681 :     case FIXED_CST:
    4933      5383681 :       op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
    4934      5383681 :       return op0;
    4935              : 
    4936            0 :     case POLY_INT_CST:
    4937            0 :       return immed_wide_int_const (poly_int_cst_value (exp), mode);
    4938              : 
    4939         4066 :     case COMPLEX_CST:
    4940         4066 :       gcc_assert (COMPLEX_MODE_P (mode));
    4941         4066 :       op0 = expand_debug_expr (TREE_REALPART (exp));
    4942         4066 :       op1 = expand_debug_expr (TREE_IMAGPART (exp));
    4943         4066 :       return gen_rtx_CONCAT (mode, op0, op1);
    4944              : 
    4945      5510013 :     case DEBUG_EXPR_DECL:
    4946      5510013 :       op0 = DECL_RTL_IF_SET (exp);
    4947              : 
    4948      4366362 :       if (op0)
    4949              :         {
    4950      4366362 :           if (GET_MODE (op0) != mode)
    4951            0 :             gcc_assert (VECTOR_TYPE_P (TREE_TYPE (exp)));
    4952              :           else
    4953              :             return op0;
    4954              :         }
    4955              : 
    4956      1143651 :       op0 = gen_rtx_DEBUG_EXPR (mode);
    4957      1143651 :       DEBUG_EXPR_TREE_DECL (op0) = exp;
    4958      1143651 :       SET_DECL_RTL (exp, op0);
    4959              : 
    4960      1143651 :       return op0;
    4961              : 
    4962      5133285 :     case VAR_DECL:
    4963      5133285 :     case PARM_DECL:
    4964      5133285 :     case FUNCTION_DECL:
    4965      5133285 :     case LABEL_DECL:
    4966      5133285 :     case CONST_DECL:
    4967      5133285 :     case RESULT_DECL:
    4968      5133285 :       op0 = DECL_RTL_IF_SET (exp);
    4969              : 
    4970              :       /* This decl was probably optimized away.  */
    4971      3583394 :       if (!op0
    4972              :           /* At least label RTXen are sometimes replaced by
    4973              :              NOTE_INSN_DELETED_LABEL.  Any notes here are not
    4974              :              handled by copy_rtx.  */
    4975      3583394 :           || NOTE_P (op0))
    4976              :         {
    4977      1549892 :           if (!VAR_P (exp)
    4978      1539014 :               || DECL_EXTERNAL (exp)
    4979      1538487 :               || !TREE_STATIC (exp)
    4980       100381 :               || !DECL_NAME (exp)
    4981       100381 :               || DECL_HARD_REGISTER (exp)
    4982       100381 :               || DECL_IN_CONSTANT_POOL (exp)
    4983       100381 :               || mode == VOIDmode
    4984      1650273 :               || symtab_node::get (exp) == NULL)
    4985              :             return NULL;
    4986              : 
    4987        15355 :           op0 = make_decl_rtl_for_debug (exp);
    4988        15355 :           if (!MEM_P (op0)
    4989        15355 :               || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
    4990        30710 :               || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
    4991              :             return NULL;
    4992              :         }
    4993      3583393 :       else if (VAR_P (exp)
    4994      3475222 :                && is_global_var (exp)
    4995      3820323 :                && symtab_node::get (exp) == NULL)
    4996              :         return NULL;
    4997              :       else
    4998      3583393 :         op0 = copy_rtx (op0);
    4999              : 
    5000      3598748 :       if (GET_MODE (op0) == BLKmode
    5001              :           /* If op0 is not BLKmode, but mode is, adjust_mode
    5002              :              below would ICE.  While it is likely a FE bug,
    5003              :              try to be robust here.  See PR43166.  */
    5004       468371 :           || mode == BLKmode
    5005       468371 :           || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
    5006              :         {
    5007      3130377 :           gcc_assert (MEM_P (op0));
    5008      3130377 :           op0 = adjust_address_nv (op0, mode, 0);
    5009      3130377 :           return op0;
    5010              :         }
    5011              : 
    5012              :       /* Fall through.  */
    5013              : 
    5014       468371 :     adjust_mode:
    5015     10320926 :     case PAREN_EXPR:
    5016     10320926 :     CASE_CONVERT:
    5017     10320926 :       {
    5018     10320926 :         inner_mode = GET_MODE (op0);
    5019              : 
    5020     10320926 :         if (mode == inner_mode)
    5021              :           return op0;
    5022              : 
    5023       141705 :         if (inner_mode == VOIDmode)
    5024              :           {
    5025         5786 :             if (TREE_CODE (exp) == SSA_NAME)
    5026           31 :               inner_mode = TYPE_MODE (TREE_TYPE (exp));
    5027              :             else
    5028         5755 :               inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
    5029         5786 :             if (mode == inner_mode)
    5030              :               return op0;
    5031              :           }
    5032              : 
    5033       138562 :         if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
    5034              :           {
    5035         6155 :             if (GET_MODE_UNIT_BITSIZE (mode)
    5036        12310 :                 == GET_MODE_UNIT_BITSIZE (inner_mode))
    5037            4 :               op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
    5038         6151 :             else if (GET_MODE_UNIT_BITSIZE (mode)
    5039        12302 :                      < GET_MODE_UNIT_BITSIZE (inner_mode))
    5040         3524 :               op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
    5041              :             else
    5042         2627 :               op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
    5043              :           }
    5044       132407 :         else if (FLOAT_MODE_P (mode))
    5045              :           {
    5046            0 :             gcc_assert (TREE_CODE (exp) != SSA_NAME);
    5047            0 :             if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
    5048            0 :               op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
    5049              :             else
    5050            0 :               op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
    5051              :           }
    5052       132407 :         else if (FLOAT_MODE_P (inner_mode))
    5053              :           {
    5054            0 :             if (unsignedp)
    5055            0 :               op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
    5056              :             else
    5057            0 :               op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
    5058              :           }
    5059       264814 :         else if (GET_MODE_UNIT_PRECISION (mode)
    5060       132407 :                  == GET_MODE_UNIT_PRECISION (inner_mode))
    5061            0 :           op0 = lowpart_subreg (mode, op0, inner_mode);
    5062       264814 :         else if (GET_MODE_UNIT_PRECISION (mode)
    5063       132407 :                  < GET_MODE_UNIT_PRECISION (inner_mode))
    5064        62037 :           op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
    5065        70370 :         else if (UNARY_CLASS_P (exp)
    5066        70370 :                  ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
    5067            0 :                  : unsignedp)
    5068        48246 :           op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
    5069              :         else
    5070        22124 :           op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
    5071              : 
    5072              :         return op0;
    5073              :       }
    5074              : 
    5075      2077640 :     case MEM_REF:
    5076      2077640 :       if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
    5077              :         {
    5078       828895 :           tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
    5079              :                                      TREE_OPERAND (exp, 0),
    5080              :                                      TREE_OPERAND (exp, 1));
    5081       828895 :           if (newexp)
    5082            0 :             return expand_debug_expr (newexp);
    5083              :         }
    5084              :       /* FALLTHROUGH */
    5085      2077640 :     case INDIRECT_REF:
    5086      2077640 :       inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
    5087      2077640 :       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
    5088      2077640 :       if (!op0)
    5089              :         return NULL;
    5090              : 
    5091      2074010 :       if (TREE_CODE (exp) == MEM_REF)
    5092              :         {
    5093      2074010 :           if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
    5094      2029927 :               || (GET_CODE (op0) == PLUS
    5095       291988 :                   && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
    5096              :             /* (mem (debug_implicit_ptr)) might confuse aliasing.
    5097              :                Instead just use get_inner_reference.  */
    5098        44083 :             goto component_ref;
    5099              : 
    5100      2029927 :           op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
    5101      2029927 :           poly_int64 offset;
    5102      2029927 :           if (!op1 || !poly_int_rtx_p (op1, &offset))
    5103            0 :             return NULL;
    5104              : 
    5105      2029927 :           op0 = plus_constant (inner_mode, op0, offset);
    5106              :         }
    5107              : 
    5108      2029927 :       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
    5109              : 
    5110      2029927 :       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
    5111              :                                           op0, as);
    5112      2029927 :       if (op0 == NULL_RTX)
    5113              :         return NULL;
    5114              : 
    5115      2029927 :       op0 = gen_rtx_MEM (mode, op0);
    5116      2029927 :       set_mem_attributes (op0, exp, 0);
    5117      2029927 :       if (TREE_CODE (exp) == MEM_REF
    5118      2029927 :           && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
    5119       828895 :         set_mem_expr (op0, NULL_TREE);
    5120      2029927 :       set_mem_addr_space (op0, as);
    5121              : 
    5122      2029927 :       return op0;
    5123              : 
    5124        59640 :     case TARGET_MEM_REF:
    5125        59640 :       if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
    5126        59640 :           && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
    5127              :         return NULL;
    5128              : 
    5129        59630 :       op0 = expand_debug_expr
    5130        59630 :             (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
    5131        59630 :       if (!op0)
    5132              :         return NULL;
    5133              : 
    5134        59630 :       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
    5135        59630 :       op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
    5136              :                                           op0, as);
    5137        59630 :       if (op0 == NULL_RTX)
    5138              :         return NULL;
    5139              : 
    5140        59630 :       op0 = gen_rtx_MEM (mode, op0);
    5141              : 
    5142        59630 :       set_mem_attributes (op0, exp, 0);
    5143        59630 :       set_mem_addr_space (op0, as);
    5144              : 
    5145        59630 :       return op0;
    5146              : 
    5147      3519044 :     component_ref:
    5148      3519044 :     case ARRAY_REF:
    5149      3519044 :     case ARRAY_RANGE_REF:
    5150      3519044 :     case COMPONENT_REF:
    5151      3519044 :     case BIT_FIELD_REF:
    5152      3519044 :     case REALPART_EXPR:
    5153      3519044 :     case IMAGPART_EXPR:
    5154      3519044 :     case VIEW_CONVERT_EXPR:
    5155      3519044 :       {
    5156      3519044 :         machine_mode mode1;
    5157      3519044 :         poly_int64 bitsize, bitpos;
    5158      3519044 :         tree offset;
    5159      3519044 :         int reversep, volatilep = 0;
    5160      3519044 :         tree tem
    5161      3519044 :           = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
    5162              :                                  &unsignedp, &reversep, &volatilep);
    5163      3519044 :         rtx orig_op0;
    5164              : 
    5165      3519044 :         if (known_eq (bitsize, 0))
    5166              :           return NULL;
    5167              : 
    5168      3468599 :         orig_op0 = op0 = expand_debug_expr (tem);
    5169              : 
    5170      3468599 :         if (!op0)
    5171              :           return NULL;
    5172              : 
    5173      3161691 :         if (offset)
    5174              :           {
    5175        12583 :             machine_mode addrmode, offmode;
    5176              : 
    5177        12583 :             if (!MEM_P (op0))
    5178              :               return NULL;
    5179              : 
    5180        12583 :             op0 = XEXP (op0, 0);
    5181        12583 :             addrmode = GET_MODE (op0);
    5182        12583 :             if (addrmode == VOIDmode)
    5183            0 :               addrmode = Pmode;
    5184              : 
    5185        12583 :             op1 = expand_debug_expr (offset);
    5186        12583 :             if (!op1)
    5187              :               return NULL;
    5188              : 
    5189        12581 :             offmode = GET_MODE (op1);
    5190        12581 :             if (offmode == VOIDmode)
    5191            6 :               offmode = TYPE_MODE (TREE_TYPE (offset));
    5192              : 
    5193        12581 :             if (addrmode != offmode)
    5194            0 :               op1 = lowpart_subreg (addrmode, op1, offmode);
    5195              : 
    5196              :             /* Don't use offset_address here, we don't need a
    5197              :                recognizable address, and we don't want to generate
    5198              :                code.  */
    5199        12581 :             op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
    5200              :                                                           op0, op1));
    5201              :           }
    5202              : 
    5203      3161689 :         if (MEM_P (op0))
    5204              :           {
    5205      3034887 :             if (mode1 == VOIDmode)
    5206              :               {
    5207         4006 :                 if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
    5208       357397 :                   return NULL;
    5209              :                 /* Bitfield.  */
    5210         4006 :                 mode1 = smallest_int_mode_for_size (bitsize).require ();
    5211              :               }
    5212      3034887 :             poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
    5213      3034887 :             if (maybe_ne (bytepos, 0))
    5214              :               {
    5215      1303016 :                 op0 = adjust_address_nv (op0, mode1, bytepos);
    5216      1303016 :                 bitpos = num_trailing_bits (bitpos);
    5217              :               }
    5218      1731871 :             else if (known_eq (bitpos, 0)
    5219      3462981 :                      && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
    5220       468675 :               op0 = adjust_address_nv (op0, mode, 0);
    5221      1263196 :             else if (GET_MODE (op0) != mode1)
    5222         3005 :               op0 = adjust_address_nv (op0, mode1, 0);
    5223              :             else
    5224      1260191 :               op0 = copy_rtx (op0);
    5225      3034887 :             if (op0 == orig_op0)
    5226       131728 :               op0 = shallow_copy_rtx (op0);
    5227      3034887 :             if (TREE_CODE (tem) != SSA_NAME)
    5228      3032523 :               set_mem_attributes (op0, exp, 0);
    5229              :           }
    5230              : 
    5231      3161689 :         if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
    5232              :           return op0;
    5233              : 
    5234       101137 :         if (maybe_lt (bitpos, 0))
    5235              :           return NULL;
    5236              : 
    5237       101135 :         if (GET_MODE (op0) == BLKmode || mode == BLKmode)
    5238              :           return NULL;
    5239              : 
    5240       101119 :         poly_int64 bytepos;
    5241       101838 :         if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
    5242       197548 :             && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
    5243              :           {
    5244        98079 :             machine_mode opmode = GET_MODE (op0);
    5245              : 
    5246        98079 :             if (opmode == VOIDmode)
    5247            0 :               opmode = TYPE_MODE (TREE_TYPE (tem));
    5248              : 
    5249              :             /* This condition may hold if we're expanding the address
    5250              :                right past the end of an array that turned out not to
    5251              :                be addressable (i.e., the address was only computed in
    5252              :                debug stmts).  The gen_subreg below would rightfully
    5253              :                crash, and the address doesn't really exist, so just
    5254              :                drop it.  */
    5255       196158 :             if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
    5256              :               return NULL;
    5257              : 
    5258       196110 :             if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
    5259        98031 :               return simplify_gen_subreg (mode, op0, opmode, bytepos);
    5260              :           }
    5261              : 
    5262         6128 :         return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
    5263         2585 :                                      && TYPE_UNSIGNED (TREE_TYPE (exp))
    5264              :                                      ? SIGN_EXTRACT
    5265              :                                      : ZERO_EXTRACT, mode,
    5266         3064 :                                      GET_MODE (op0) != VOIDmode
    5267              :                                      ? GET_MODE (op0)
    5268            0 :                                      : TYPE_MODE (TREE_TYPE (tem)),
    5269              :                                      op0, gen_int_mode (bitsize, word_mode),
    5270         3064 :                                      gen_int_mode (bitpos, word_mode));
    5271              :       }
    5272              : 
    5273         1392 :     case ABS_EXPR:
    5274         1392 :     case ABSU_EXPR:
    5275         1392 :       return simplify_gen_unary (ABS, mode, op0, mode);
    5276              : 
    5277        13721 :     case NEGATE_EXPR:
    5278        13721 :       return simplify_gen_unary (NEG, mode, op0, mode);
    5279              : 
    5280        17482 :     case BIT_NOT_EXPR:
    5281        17482 :       return simplify_gen_unary (NOT, mode, op0, mode);
    5282              : 
    5283         2359 :     case FLOAT_EXPR:
    5284         2359 :       return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
    5285              :                                                                          0)))
    5286              :                                  ? UNSIGNED_FLOAT : FLOAT, mode, op0,
    5287         2359 :                                  inner_mode);
    5288              : 
    5289          400 :     case FIX_TRUNC_EXPR:
    5290          400 :       return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
    5291          400 :                                  inner_mode);
    5292              : 
    5293       230316 :     case POINTER_PLUS_EXPR:
    5294              :       /* For the rare target where pointers are not the same size as
    5295              :          size_t, we need to check for mis-matched modes and correct
    5296              :          the addend.  */
    5297       230316 :       if (op0 && op1
    5298       230316 :           && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
    5299       230231 :           && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
    5300       359283 :           && op0_mode != op1_mode)
    5301              :         {
    5302            0 :           if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
    5303              :               /* If OP0 is a partial mode, then we must truncate, even
    5304              :                  if it has the same bitsize as OP1 as GCC's
    5305              :                  representation of partial modes is opaque.  */
    5306            0 :               || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
    5307            0 :                   && (GET_MODE_BITSIZE (op0_mode)
    5308            0 :                       == GET_MODE_BITSIZE (op1_mode))))
    5309            0 :             op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
    5310              :           else
    5311              :             /* We always sign-extend, regardless of the signedness of
    5312              :                the operand, because the operand is always unsigned
    5313              :                here even if the original C expression is signed.  */
    5314            0 :             op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
    5315              :         }
    5316              :       /* Fall through.  */
    5317       717750 :     case PLUS_EXPR:
    5318       717750 :       return simplify_gen_binary (PLUS, mode, op0, op1);
    5319              : 
    5320       111864 :     case MINUS_EXPR:
    5321       111864 :     case POINTER_DIFF_EXPR:
    5322       111864 :       return simplify_gen_binary (MINUS, mode, op0, op1);
    5323              : 
    5324       126270 :     case MULT_EXPR:
    5325       126270 :       return simplify_gen_binary (MULT, mode, op0, op1);
    5326              : 
    5327         3190 :     case RDIV_EXPR:
    5328         3190 :       gcc_assert (FLOAT_MODE_P (mode)
    5329              :                   || ALL_FIXED_POINT_MODE_P (mode));
    5330              :       /* Fall through.  */
    5331        77801 :     case TRUNC_DIV_EXPR:
    5332        77801 :     case EXACT_DIV_EXPR:
    5333        77801 :       if (unsignedp)
    5334         9817 :         return simplify_gen_binary (UDIV, mode, op0, op1);
    5335              :       else
    5336        67984 :         return simplify_gen_binary (DIV, mode, op0, op1);
    5337              : 
    5338         3455 :     case TRUNC_MOD_EXPR:
    5339         6910 :       return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
    5340              : 
    5341            3 :     case FLOOR_DIV_EXPR:
    5342            3 :       if (unsignedp)
    5343            0 :         return simplify_gen_binary (UDIV, mode, op0, op1);
    5344              :       else
    5345              :         {
    5346            3 :           rtx div = simplify_gen_binary (DIV, mode, op0, op1);
    5347            3 :           rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
    5348            3 :           rtx adj = floor_sdiv_adjust (mode, mod, op1);
    5349            3 :           return simplify_gen_binary (PLUS, mode, div, adj);
    5350              :         }
    5351              : 
    5352            1 :     case FLOOR_MOD_EXPR:
    5353            1 :       if (unsignedp)
    5354            0 :         return simplify_gen_binary (UMOD, mode, op0, op1);
    5355              :       else
    5356              :         {
    5357            1 :           rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
    5358            1 :           rtx adj = floor_sdiv_adjust (mode, mod, op1);
    5359            1 :           adj = simplify_gen_unary (NEG, mode,
    5360              :                                     simplify_gen_binary (MULT, mode, adj, op1),
    5361              :                                     mode);
    5362            1 :           return simplify_gen_binary (PLUS, mode, mod, adj);
    5363              :         }
    5364              : 
    5365            3 :     case CEIL_DIV_EXPR:
    5366            3 :       if (unsignedp)
    5367              :         {
    5368            0 :           rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
    5369            0 :           rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
    5370            0 :           rtx adj = ceil_udiv_adjust (mode, mod, op1);
    5371            0 :           return simplify_gen_binary (PLUS, mode, div, adj);
    5372              :         }
    5373              :       else
    5374              :         {
    5375            3 :           rtx div = simplify_gen_binary (DIV, mode, op0, op1);
    5376            3 :           rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
    5377            3 :           rtx adj = ceil_sdiv_adjust (mode, mod, op1);
    5378            3 :           return simplify_gen_binary (PLUS, mode, div, adj);
    5379              :         }
    5380              : 
    5381            0 :     case CEIL_MOD_EXPR:
    5382            0 :       if (unsignedp)
    5383              :         {
    5384            0 :           rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
    5385            0 :           rtx adj = ceil_udiv_adjust (mode, mod, op1);
    5386            0 :           adj = simplify_gen_unary (NEG, mode,
    5387              :                                     simplify_gen_binary (MULT, mode, adj, op1),
    5388              :                                     mode);
    5389            0 :           return simplify_gen_binary (PLUS, mode, mod, adj);
    5390              :         }
    5391              :       else
    5392              :         {
    5393            0 :           rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
    5394            0 :           rtx adj = ceil_sdiv_adjust (mode, mod, op1);
    5395            0 :           adj = simplify_gen_unary (NEG, mode,
    5396              :                                     simplify_gen_binary (MULT, mode, adj, op1),
    5397              :                                     mode);
    5398            0 :           return simplify_gen_binary (PLUS, mode, mod, adj);
    5399              :         }
    5400              : 
    5401            0 :     case ROUND_DIV_EXPR:
    5402            0 :       if (unsignedp)
    5403              :         {
    5404            0 :           rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
    5405            0 :           rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
    5406            0 :           rtx adj = round_udiv_adjust (mode, mod, op1);
    5407            0 :           return simplify_gen_binary (PLUS, mode, div, adj);
    5408              :         }
    5409              :       else
    5410              :         {
    5411            0 :           rtx div = simplify_gen_binary (DIV, mode, op0, op1);
    5412            0 :           rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
    5413            0 :           rtx adj = round_sdiv_adjust (mode, mod, op1);
    5414            0 :           return simplify_gen_binary (PLUS, mode, div, adj);
    5415              :         }
    5416              : 
    5417            0 :     case ROUND_MOD_EXPR:
    5418            0 :       if (unsignedp)
    5419              :         {
    5420            0 :           rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
    5421            0 :           rtx adj = round_udiv_adjust (mode, mod, op1);
    5422            0 :           adj = simplify_gen_unary (NEG, mode,
    5423              :                                     simplify_gen_binary (MULT, mode, adj, op1),
    5424              :                                     mode);
    5425            0 :           return simplify_gen_binary (PLUS, mode, mod, adj);
    5426              :         }
    5427              :       else
    5428              :         {
    5429            0 :           rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
    5430            0 :           rtx adj = round_sdiv_adjust (mode, mod, op1);
    5431            0 :           adj = simplify_gen_unary (NEG, mode,
    5432              :                                     simplify_gen_binary (MULT, mode, adj, op1),
    5433              :                                     mode);
    5434            0 :           return simplify_gen_binary (PLUS, mode, mod, adj);
    5435              :         }
    5436              : 
    5437        13243 :     case LSHIFT_EXPR:
    5438        13243 :       return simplify_gen_binary (ASHIFT, mode, op0, op1);
    5439              : 
    5440        44091 :     case RSHIFT_EXPR:
    5441        44091 :       if (unsignedp)
    5442        41212 :         return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
    5443              :       else
    5444         2879 :         return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
    5445              : 
    5446            2 :     case LROTATE_EXPR:
    5447            2 :       return simplify_gen_binary (ROTATE, mode, op0, op1);
    5448              : 
    5449          701 :     case RROTATE_EXPR:
    5450          701 :       return simplify_gen_binary (ROTATERT, mode, op0, op1);
    5451              : 
    5452        32218 :     case MIN_EXPR:
    5453        64436 :       return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
    5454              : 
    5455        37204 :     case MAX_EXPR:
    5456        74408 :       return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
    5457              : 
    5458        75266 :     case BIT_AND_EXPR:
    5459        75266 :     case TRUTH_AND_EXPR:
    5460        75266 :       return simplify_gen_binary (AND, mode, op0, op1);
    5461              : 
    5462        40071 :     case BIT_IOR_EXPR:
    5463        40071 :     case TRUTH_OR_EXPR:
    5464        40071 :       return simplify_gen_binary (IOR, mode, op0, op1);
    5465              : 
    5466         7869 :     case BIT_XOR_EXPR:
    5467         7869 :     case TRUTH_XOR_EXPR:
    5468         7869 :       return simplify_gen_binary (XOR, mode, op0, op1);
    5469              : 
    5470            0 :     case TRUTH_ANDIF_EXPR:
    5471            0 :       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
    5472              : 
    5473            0 :     case TRUTH_ORIF_EXPR:
    5474            0 :       return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
    5475              : 
    5476            0 :     case TRUTH_NOT_EXPR:
    5477            0 :       return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
    5478              : 
    5479         4223 :     case LT_EXPR:
    5480         4223 :       return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
    5481         4223 :                                       op0, op1);
    5482              : 
    5483         4839 :     case LE_EXPR:
    5484         4839 :       return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
    5485         4839 :                                       op0, op1);
    5486              : 
    5487         9519 :     case GT_EXPR:
    5488         9519 :       return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
    5489         9519 :                                       op0, op1);
    5490              : 
    5491         2073 :     case GE_EXPR:
    5492         2073 :       return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
    5493         2073 :                                       op0, op1);
    5494              : 
    5495        22889 :     case EQ_EXPR:
    5496        22889 :       return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
    5497              : 
    5498        15202 :     case NE_EXPR:
    5499        15202 :       return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
    5500              : 
    5501            2 :     case UNORDERED_EXPR:
    5502            2 :       return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
    5503              : 
    5504            0 :     case ORDERED_EXPR:
    5505            0 :       return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
    5506              : 
    5507            0 :     case UNLT_EXPR:
    5508            0 :       return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
    5509              : 
    5510            1 :     case UNLE_EXPR:
    5511            1 :       return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
    5512              : 
    5513            0 :     case UNGT_EXPR:
    5514            0 :       return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
    5515              : 
    5516            0 :     case UNGE_EXPR:
    5517            0 :       return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
    5518              : 
    5519            0 :     case UNEQ_EXPR:
    5520            0 :       return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
    5521              : 
    5522            0 :     case LTGT_EXPR:
    5523            0 :       return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
    5524              : 
    5525         8496 :     case COND_EXPR:
    5526         8496 :       return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
    5527              : 
    5528        10308 :     case COMPLEX_EXPR:
    5529        10308 :       gcc_assert (COMPLEX_MODE_P (mode));
    5530        10308 :       if (GET_MODE (op0) == VOIDmode)
    5531            0 :         op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
    5532        10308 :       if (GET_MODE (op1) == VOIDmode)
    5533            0 :         op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
    5534        10308 :       return gen_rtx_CONCAT (mode, op0, op1);
    5535              : 
    5536            0 :     case CONJ_EXPR:
    5537            0 :       if (GET_CODE (op0) == CONCAT)
    5538            0 :         return gen_rtx_CONCAT (mode, XEXP (op0, 0),
    5539              :                                simplify_gen_unary (NEG, GET_MODE_INNER (mode),
    5540              :                                                    XEXP (op0, 1),
    5541              :                                                    GET_MODE_INNER (mode)));
    5542              :       else
    5543              :         {
    5544            0 :           scalar_mode imode = GET_MODE_INNER (mode);
    5545            0 :           rtx re, im;
    5546              : 
    5547            0 :           if (MEM_P (op0))
    5548              :             {
    5549            0 :               re = adjust_address_nv (op0, imode, 0);
    5550            0 :               im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
    5551              :             }
    5552              :           else
    5553              :             {
    5554            0 :               scalar_int_mode ifmode;
    5555            0 :               scalar_int_mode ihmode;
    5556            0 :               rtx halfsize;
    5557            0 :               if (!int_mode_for_mode (mode).exists (&ifmode)
    5558            0 :                   || !int_mode_for_mode (imode).exists (&ihmode))
    5559            0 :                 return NULL;
    5560            0 :               halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
    5561            0 :               re = op0;
    5562            0 :               if (mode != ifmode)
    5563            0 :                 re = gen_rtx_SUBREG (ifmode, re, 0);
    5564            0 :               re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
    5565            0 :               if (imode != ihmode)
    5566            0 :                 re = gen_rtx_SUBREG (imode, re, 0);
    5567            0 :               im = copy_rtx (op0);
    5568            0 :               if (mode != ifmode)
    5569            0 :                 im = gen_rtx_SUBREG (ifmode, im, 0);
    5570            0 :               im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
    5571            0 :               if (imode != ihmode)
    5572            0 :                 im = gen_rtx_SUBREG (imode, im, 0);
    5573              :             }
    5574            0 :           im = gen_rtx_NEG (imode, im);
    5575            0 :           return gen_rtx_CONCAT (mode, re, im);
    5576              :         }
    5577              : 
    5578      6545730 :     case ADDR_EXPR:
    5579      6545730 :       op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
    5580      6545730 :       if (!op0 || !MEM_P (op0))
    5581              :         {
    5582      1693661 :           if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
    5583       379000 :                || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
    5584       334269 :                || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
    5585      1738392 :               && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
    5586       140560 :                   || target_for_debug_bind (TREE_OPERAND (exp, 0))))
    5587      1227100 :             return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
    5588              : 
    5589       466561 :           if (handled_component_p (TREE_OPERAND (exp, 0)))
    5590              :             {
    5591       314402 :               poly_int64 bitoffset, bitsize, maxsize, byteoffset;
    5592       314402 :               bool reverse;
    5593       314402 :               tree decl
    5594       314402 :                 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
    5595              :                                            &bitsize, &maxsize, &reverse);
    5596       314402 :               if ((VAR_P (decl)
    5597              :                    || TREE_CODE (decl) == PARM_DECL
    5598              :                    || TREE_CODE (decl) == RESULT_DECL)
    5599       287769 :                   && (!TREE_ADDRESSABLE (decl)
    5600        24937 :                       || target_for_debug_bind (decl))
    5601        66942 :                   && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
    5602       262832 :                   && known_gt (bitsize, 0)
    5603       247462 :                   && known_eq (bitsize, maxsize))
    5604              :                 {
    5605       247460 :                   rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
    5606       247460 :                   return plus_constant (mode, base, byteoffset);
    5607              :                 }
    5608              :             }
    5609              : 
    5610       219101 :           if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
    5611       219101 :               && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
    5612              :                  == ADDR_EXPR)
    5613              :             {
    5614         8554 :               op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
    5615              :                                                      0));
    5616         8554 :               if (op0 != NULL
    5617         5204 :                   && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
    5618            0 :                       || (GET_CODE (op0) == PLUS
    5619            0 :                           && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
    5620            0 :                           && CONST_INT_P (XEXP (op0, 1)))))
    5621              :                 {
    5622         5204 :                   op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
    5623              :                                                          1));
    5624         5204 :                   poly_int64 offset;
    5625        10408 :                   if (!op1 || !poly_int_rtx_p (op1, &offset))
    5626              :                     return NULL;
    5627              : 
    5628         5204 :                   return plus_constant (mode, op0, offset);
    5629              :                 }
    5630              :             }
    5631              : 
    5632       213897 :           return NULL;
    5633              :         }
    5634              : 
    5635      4852069 :       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
    5636      4852069 :       addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
    5637      4852069 :       op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
    5638              : 
    5639      4852069 :       return op0;
    5640              : 
    5641         5207 :     case VECTOR_CST:
    5642         5207 :       {
    5643         5207 :         unsigned HOST_WIDE_INT i, nelts;
    5644              : 
    5645         5207 :         if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
    5646              :           return NULL;
    5647              : 
    5648         5207 :         op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
    5649              : 
    5650        29672 :         for (i = 0; i < nelts; ++i)
    5651              :           {
    5652        24465 :             op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
    5653        24465 :             if (!op1)
    5654              :               return NULL;
    5655        24465 :             XVECEXP (op0, 0, i) = op1;
    5656              :           }
    5657              : 
    5658              :         return op0;
    5659              :       }
    5660              : 
    5661          558 :     case CONSTRUCTOR:
    5662          558 :       if (TREE_CLOBBER_P (exp))
    5663              :         return NULL;
    5664          558 :       else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
    5665              :         {
    5666          558 :           unsigned i;
    5667          558 :           poly_uint64 elts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp));
    5668          558 :           unsigned HOST_WIDE_INT nelts = constant_lower_bound (elts);
    5669          558 :           tree val;
    5670              : 
    5671          558 :           op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
    5672              : 
    5673         2471 :           FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
    5674              :             {
    5675         1984 :               op1 = expand_debug_expr (val);
    5676         1984 :               if (!op1)
    5677              :                 return NULL;
    5678         1913 :               XVECEXP (op0, 0, i) = op1;
    5679              :             }
    5680              : 
    5681          487 :           if (i < nelts)
    5682              :             {
    5683           63 :               op1 = expand_debug_expr
    5684           63 :                 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
    5685              : 
    5686           63 :               if (!op1)
    5687              :                 return NULL;
    5688              : 
    5689          264 :               for (; i < nelts; i++)
    5690          201 :                 XVECEXP (op0, 0, i) = op1;
    5691              :             }
    5692              : 
    5693          487 :           return op0;
    5694              :         }
    5695              :       else
    5696            0 :         goto flag_unsupported;
    5697              : 
    5698              :     case CALL_EXPR:
    5699              :       /* ??? Maybe handle some builtins?  */
    5700              :       return NULL;
    5701              : 
    5702      9330494 :     case SSA_NAME:
    5703      9330494 :       {
    5704      9330494 :         gimple *g = get_gimple_for_ssa_name (exp);
    5705      9330494 :         if (g)
    5706              :           {
    5707      1385305 :             tree t = NULL_TREE;
    5708      1385305 :             if (deep_ter_debug_map)
    5709              :               {
    5710        65261 :                 tree *slot = deep_ter_debug_map->get (exp);
    5711        65261 :                 if (slot)
    5712         6788 :                   t = *slot;
    5713              :               }
    5714         6788 :             if (t == NULL_TREE)
    5715              :               {
    5716      1381164 :                 if (is_gimple_assign (g))
    5717      1380922 :                   t = gimple_assign_rhs_to_tree (g);
    5718              :                 else
    5719              :                   /* expand_debug_expr doesn't handle CALL_EXPR right now.  */
    5720              :                   return NULL;
    5721              :               }
    5722      1385063 :             op0 = expand_debug_expr (t);
    5723      1385063 :             if (!op0)
    5724              :               return NULL;
    5725              :           }
    5726              :         else
    5727              :           {
    5728              :             /* If this is a reference to an incoming value of
    5729              :                parameter that is never used in the code or where the
    5730              :                incoming value is never used in the code, use
    5731              :                PARM_DECL's DECL_RTL if set.  */
    5732      7945189 :             if (SSA_NAME_IS_DEFAULT_DEF (exp)
    5733      1307767 :                 && SSA_NAME_VAR (exp)
    5734      1307767 :                 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
    5735      9195408 :                 && has_zero_uses (exp))
    5736              :               {
    5737        18813 :                 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
    5738        18813 :                 if (op0)
    5739        18723 :                   goto adjust_mode;
    5740           90 :                 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
    5741           90 :                 if (op0)
    5742           90 :                   goto adjust_mode;
    5743              :               }
    5744              : 
    5745      7926376 :             int part = var_to_partition (SA.map, exp);
    5746              : 
    5747      7926376 :             if (part == NO_PARTITION)
    5748              :               return NULL;
    5749              : 
    5750      7921986 :             gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
    5751              : 
    5752      7921986 :             op0 = copy_rtx (SA.partition_to_pseudo[part]);
    5753              :           }
    5754      9303672 :         goto adjust_mode;
    5755              :       }
    5756              : 
    5757              :     case ERROR_MARK:
    5758              :       return NULL;
    5759              : 
    5760              :     /* Vector stuff.  For most of the codes we don't have rtl codes.  */
    5761              :     case REALIGN_LOAD_EXPR:
    5762              :     case VEC_COND_EXPR:
    5763              :     case VEC_PACK_FIX_TRUNC_EXPR:
    5764              :     case VEC_PACK_FLOAT_EXPR:
    5765              :     case VEC_PACK_SAT_EXPR:
    5766              :     case VEC_PACK_TRUNC_EXPR:
    5767              :     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
    5768              :     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
    5769              :     case VEC_UNPACK_FLOAT_HI_EXPR:
    5770              :     case VEC_UNPACK_FLOAT_LO_EXPR:
    5771              :     case VEC_UNPACK_HI_EXPR:
    5772              :     case VEC_UNPACK_LO_EXPR:
    5773              :     case VEC_WIDEN_MULT_HI_EXPR:
    5774              :     case VEC_WIDEN_MULT_LO_EXPR:
    5775              :     case VEC_WIDEN_MULT_EVEN_EXPR:
    5776              :     case VEC_WIDEN_MULT_ODD_EXPR:
    5777              :     case VEC_WIDEN_LSHIFT_HI_EXPR:
    5778              :     case VEC_WIDEN_LSHIFT_LO_EXPR:
    5779              :     case VEC_PERM_EXPR:
    5780              :     case VEC_DUPLICATE_EXPR:
    5781              :     case VEC_SERIES_EXPR:
    5782              :     case SAD_EXPR:
    5783              :       return NULL;
    5784              : 
    5785              :     /* Misc codes.  */
    5786              :     case ADDR_SPACE_CONVERT_EXPR:
    5787              :     case FIXED_CONVERT_EXPR:
    5788              :     case OBJ_TYPE_REF:
    5789              :     case WITH_SIZE_EXPR:
    5790              :     case BIT_INSERT_EXPR:
    5791              :       return NULL;
    5792              : 
    5793            0 :     case DOT_PROD_EXPR:
    5794            0 :       if (SCALAR_INT_MODE_P (GET_MODE (op0))
    5795            0 :           && SCALAR_INT_MODE_P (mode))
    5796              :         {
    5797            0 :           op0
    5798            0 :             = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
    5799              :                                                                           0)))
    5800              :                                   ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
    5801              :                                   inner_mode);
    5802            0 :           op1
    5803            0 :             = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
    5804              :                                                                           1)))
    5805              :                                   ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
    5806              :                                   inner_mode);
    5807            0 :           op0 = simplify_gen_binary (MULT, mode, op0, op1);
    5808            0 :           return simplify_gen_binary (PLUS, mode, op0, op2);
    5809              :         }
    5810              :       return NULL;
    5811              : 
    5812         4075 :     case WIDEN_MULT_EXPR:
    5813         4075 :     case WIDEN_MULT_PLUS_EXPR:
    5814         4075 :     case WIDEN_MULT_MINUS_EXPR:
    5815         4075 :       if (SCALAR_INT_MODE_P (GET_MODE (op0))
    5816         4075 :           && SCALAR_INT_MODE_P (mode))
    5817              :         {
    5818         4075 :           inner_mode = GET_MODE (op0);
    5819         4075 :           if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
    5820         3459 :             op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
    5821              :           else
    5822          616 :             op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
    5823         4075 :           if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
    5824         3459 :             op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
    5825              :           else
    5826          616 :             op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
    5827         4075 :           op0 = simplify_gen_binary (MULT, mode, op0, op1);
    5828         4075 :           if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
    5829              :             return op0;
    5830            0 :           else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
    5831            0 :             return simplify_gen_binary (PLUS, mode, op0, op2);
    5832              :           else
    5833            0 :             return simplify_gen_binary (MINUS, mode, op2, op0);
    5834              :         }
    5835              :       return NULL;
    5836              : 
    5837              :     case MULT_HIGHPART_EXPR:
    5838              :       /* ??? Similar to the above.  */
    5839              :       return NULL;
    5840              : 
    5841            0 :     case WIDEN_SUM_EXPR:
    5842            0 :     case WIDEN_LSHIFT_EXPR:
    5843            0 :       if (SCALAR_INT_MODE_P (GET_MODE (op0))
    5844            0 :           && SCALAR_INT_MODE_P (mode))
    5845              :         {
    5846            0 :           op0
    5847            0 :             = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
    5848              :                                                                           0)))
    5849              :                                   ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
    5850              :                                   inner_mode);
    5851            0 :           return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
    5852            0 :                                       ? ASHIFT : PLUS, mode, op0, op1);
    5853              :         }
    5854              :       return NULL;
    5855              : 
    5856            0 :     default:
    5857            0 :     flag_unsupported:
    5858            0 :       if (flag_checking)
    5859              :         {
    5860            0 :           debug_tree (exp);
    5861            0 :           gcc_unreachable ();
    5862              :         }
    5863              :       return NULL;
    5864              :     }
    5865              : }
    5866              : 
    5867              : /* Return an RTX equivalent to the source bind value of the tree expression
    5868              :    EXP.  */
    5869              : 
    5870              : static rtx
    5871       153831 : expand_debug_source_expr (tree exp)
    5872              : {
    5873       291617 :   rtx op0 = NULL_RTX;
    5874       291617 :   machine_mode mode = VOIDmode, inner_mode;
    5875              : 
    5876       291617 :   switch (TREE_CODE (exp))
    5877              :     {
    5878       137786 :     case VAR_DECL:
    5879       137786 :       if (DECL_ABSTRACT_ORIGIN (exp))
    5880       137786 :         return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp));
    5881              :       break;
    5882       153222 :     case PARM_DECL:
    5883       153222 :       {
    5884       153222 :         mode = DECL_MODE (exp);
    5885       153222 :         op0 = expand_debug_parm_decl (exp);
    5886       153222 :         if (op0)
    5887              :            break;
    5888              :         /* See if this isn't an argument that has been completely
    5889              :            optimized out.  */
    5890       152356 :         if (!DECL_RTL_SET_P (exp)
    5891       152341 :             && !DECL_INCOMING_RTL (exp)
    5892       304697 :             && DECL_ABSTRACT_ORIGIN (current_function_decl))
    5893              :           {
    5894        49469 :             tree aexp = DECL_ORIGIN (exp);
    5895        49469 :             if (DECL_CONTEXT (aexp)
    5896        49469 :                 == DECL_ABSTRACT_ORIGIN (current_function_decl))
    5897              :               {
    5898        18162 :                 vec<tree, va_gc> **debug_args;
    5899        18162 :                 unsigned int ix;
    5900        18162 :                 tree ddecl;
    5901        18162 :                 debug_args = decl_debug_args_lookup (current_function_decl);
    5902        18162 :                 if (debug_args != NULL)
    5903              :                   {
    5904        22619 :                     for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
    5905         4461 :                          ix += 2)
    5906        22611 :                       if (ddecl == aexp)
    5907        18150 :                         return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
    5908              :                   }
    5909              :               }
    5910              :           }
    5911              :         break;
    5912              :       }
    5913              :     default:
    5914              :       break;
    5915              :     }
    5916              : 
    5917          866 :   if (op0 == NULL_RTX)
    5918       134815 :     return NULL_RTX;
    5919              : 
    5920          866 :   inner_mode = GET_MODE (op0);
    5921          866 :   if (mode == inner_mode)
    5922              :     return op0;
    5923              : 
    5924           21 :   if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
    5925              :     {
    5926            7 :       if (GET_MODE_UNIT_BITSIZE (mode)
    5927           14 :           == GET_MODE_UNIT_BITSIZE (inner_mode))
    5928            0 :         op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
    5929            7 :       else if (GET_MODE_UNIT_BITSIZE (mode)
    5930           14 :                < GET_MODE_UNIT_BITSIZE (inner_mode))
    5931            7 :         op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
    5932              :       else
    5933            0 :         op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
    5934              :     }
    5935           14 :   else if (FLOAT_MODE_P (mode))
    5936            0 :     gcc_unreachable ();
    5937           14 :   else if (FLOAT_MODE_P (inner_mode))
    5938              :     {
    5939            0 :       if (TYPE_UNSIGNED (TREE_TYPE (exp)))
    5940            0 :         op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
    5941              :       else
    5942            0 :         op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
    5943              :     }
    5944           28 :   else if (GET_MODE_UNIT_PRECISION (mode)
    5945           14 :            == GET_MODE_UNIT_PRECISION (inner_mode))
    5946            0 :     op0 = lowpart_subreg (mode, op0, inner_mode);
    5947           28 :   else if (GET_MODE_UNIT_PRECISION (mode)
    5948           14 :            < GET_MODE_UNIT_PRECISION (inner_mode))
    5949           14 :     op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
    5950            0 :   else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
    5951            0 :     op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
    5952              :   else
    5953            0 :     op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
    5954              : 
    5955              :   return op0;
    5956              : }
    5957              : 
    5958              : /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
    5959              :    Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
    5960              :    deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN.  */
    5961              : 
    5962              : static void
    5963     48921906 : avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
    5964              : {
    5965     48921906 :   rtx exp = *exp_p;
    5966              : 
    5967     48921906 :   if (exp == NULL_RTX)
    5968              :     return;
    5969              : 
    5970     48921906 :   if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
    5971              :     return;
    5972              : 
    5973      6340593 :   if (depth == 4)
    5974              :     {
    5975              :       /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
    5976        19174 :       rtx dval = make_debug_expr_from_rtl (exp);
    5977              : 
    5978              :       /* Emit a debug bind insn before INSN.  */
    5979        19174 :       rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
    5980              :                                        DEBUG_EXPR_TREE_DECL (dval), exp,
    5981              :                                        VAR_INIT_STATUS_INITIALIZED);
    5982              : 
    5983        19174 :       emit_debug_insn_before (bind, insn);
    5984        19174 :       *exp_p = dval;
    5985        19174 :       return;
    5986              :     }
    5987              : 
    5988      6321419 :   const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
    5989      6321419 :   int i, j;
    5990     18862537 :   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
    5991     12541118 :     switch (*format_ptr++)
    5992              :       {
    5993     11507619 :       case 'e':
    5994     11507619 :         avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
    5995     11507619 :         break;
    5996              : 
    5997              :       case 'E':
    5998              :       case 'V':
    5999            0 :         for (j = 0; j < XVECLEN (exp, i); j++)
    6000            0 :           avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
    6001              :         break;
    6002              : 
    6003              :       default:
    6004              :         break;
    6005              :       }
    6006              : }
    6007              : 
    6008              : /* Expand the _LOCs in debug insns.  We run this after expanding all
    6009              :    regular insns, so that any variables referenced in the function
    6010              :    will have their DECL_RTLs set.  */
    6011              : 
    6012              : static void
    6013       496176 : expand_debug_locations (void)
    6014              : {
    6015       496176 :   rtx_insn *insn;
    6016       496176 :   rtx_insn *last = get_last_insn ();
    6017       496176 :   int save_strict_alias = flag_strict_aliasing;
    6018              : 
    6019              :   /* New alias sets while setting up memory attributes cause
    6020              :      -fcompare-debug failures, even though it doesn't bring about any
    6021              :      codegen changes.  */
    6022       496176 :   flag_strict_aliasing = 0;
    6023              : 
    6024    103512272 :   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
    6025    103016096 :     if (DEBUG_BIND_INSN_P (insn))
    6026              :       {
    6027     37395113 :         tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
    6028     37395113 :         rtx val;
    6029     37395113 :         rtx_insn *prev_insn, *insn2;
    6030     37395113 :         machine_mode mode;
    6031              : 
    6032     37395113 :         if (value == NULL_TREE)
    6033              :           val = NULL_RTX;
    6034              :         else
    6035              :           {
    6036     20717778 :             if (INSN_VAR_LOCATION_STATUS (insn)
    6037              :                 == VAR_INIT_STATUS_UNINITIALIZED)
    6038       153831 :               val = expand_debug_source_expr (value);
    6039              :             /* The avoid_deep_ter_for_debug function inserts
    6040              :                debug bind stmts after SSA_NAME definition, with the
    6041              :                SSA_NAME as the whole bind location.  Disable temporarily
    6042              :                expansion of that SSA_NAME into the DEBUG_EXPR_DECL
    6043              :                being defined in this DEBUG_INSN.  */
    6044     20563947 :             else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
    6045              :               {
    6046       112891 :                 tree *slot = deep_ter_debug_map->get (value);
    6047       112891 :                 if (slot)
    6048              :                   {
    6049         2974 :                     if (*slot == INSN_VAR_LOCATION_DECL (insn))
    6050         2647 :                       *slot = NULL_TREE;
    6051              :                     else
    6052              :                       slot = NULL;
    6053              :                   }
    6054       112891 :                 val = expand_debug_expr (value);
    6055       112891 :                 if (slot)
    6056         2647 :                   *slot = INSN_VAR_LOCATION_DECL (insn);
    6057              :               }
    6058              :             else
    6059     20451056 :               val = expand_debug_expr (value);
    6060     20717778 :             gcc_assert (last == get_last_insn ());
    6061              :           }
    6062              : 
    6063     20717778 :         if (!val)
    6064     17082927 :           val = gen_rtx_UNKNOWN_VAR_LOC ();
    6065              :         else
    6066              :           {
    6067     20312186 :             mode = GET_MODE (INSN_VAR_LOCATION (insn));
    6068              : 
    6069     20312186 :             gcc_assert (mode == GET_MODE (val)
    6070              :                         || (GET_MODE (val) == VOIDmode
    6071              :                             && (CONST_SCALAR_INT_P (val)
    6072              :                                 || GET_CODE (val) == CONST_FIXED
    6073              :                                 || GET_CODE (val) == LABEL_REF)));
    6074              :           }
    6075              : 
    6076     37395113 :         INSN_VAR_LOCATION_LOC (insn) = val;
    6077     37395113 :         prev_insn = PREV_INSN (insn);
    6078     74809400 :         for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
    6079     37414287 :           avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
    6080              :       }
    6081              : 
    6082       496176 :   flag_strict_aliasing = save_strict_alias;
    6083       496176 : }
    6084              : 
    6085              : /* Performs swapping operands of commutative operations to expand
    6086              :    the expensive one first.  */
    6087              : 
    6088              : static void
    6089      9279237 : reorder_operands (basic_block bb)
    6090              : {
    6091      9279237 :   unsigned int *lattice;  /* Hold cost of each statement.  */
    6092      9279237 :   unsigned int i = 0, n = 0;
    6093      9279237 :   gimple_stmt_iterator gsi;
    6094      9279237 :   gimple_seq stmts;
    6095      9279237 :   gimple *stmt;
    6096      9279237 :   bool swap;
    6097      9279237 :   tree op0, op1;
    6098      9279237 :   ssa_op_iter iter;
    6099      9279237 :   use_operand_p use_p;
    6100      9279237 :   gimple *def0, *def1;
    6101              : 
    6102              :   /* Compute cost of each statement using estimate_num_insns.  */
    6103      9279237 :   stmts = bb_seq (bb);
    6104     91729623 :   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
    6105              :     {
    6106     82450386 :       stmt = gsi_stmt (gsi);
    6107     82450386 :       if (!is_gimple_debug (stmt))
    6108     33692778 :         gimple_set_uid (stmt, n++);
    6109              :     }
    6110      9279237 :   lattice = XNEWVEC (unsigned int, n);
    6111     91729623 :   for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
    6112              :     {
    6113     82450386 :       unsigned cost;
    6114     82450386 :       stmt = gsi_stmt (gsi);
    6115     82450386 :       if (is_gimple_debug (stmt))
    6116     48757608 :         continue;
    6117     33692778 :       cost = estimate_num_insns (stmt, &eni_size_weights);
    6118     33692778 :       lattice[i] = cost;
    6119     64163462 :       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
    6120              :         {
    6121     30470684 :           tree use = USE_FROM_PTR (use_p);
    6122     30470684 :           gimple *def_stmt;
    6123     30470684 :           if (TREE_CODE (use) != SSA_NAME)
    6124            0 :             continue;
    6125     30470684 :           def_stmt = get_gimple_for_ssa_name (use);
    6126     30470684 :           if (!def_stmt)
    6127     21764324 :             continue;
    6128      8706360 :           lattice[i] += lattice[gimple_uid (def_stmt)];
    6129              :         }
    6130     33692778 :       i++;
    6131     33692778 :       if (!is_gimple_assign (stmt)
    6132     33692778 :           || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
    6133     29581743 :         continue;
    6134      4111035 :       op0 = gimple_op (stmt, 1);
    6135      4111035 :       op1 = gimple_op (stmt, 2);
    6136      4111035 :       if (TREE_CODE (op0) != SSA_NAME
    6137      4110810 :           || TREE_CODE (op1) != SSA_NAME)
    6138      2628187 :         continue;
    6139              :       /* Swap operands if the second one is more expensive.  */
    6140      1482848 :       def0 = get_gimple_for_ssa_name (op0);
    6141      1482848 :       def1 = get_gimple_for_ssa_name (op1);
    6142      1482848 :       if (!def1)
    6143       822844 :         continue;
    6144       660004 :       swap = false;
    6145       660004 :       if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
    6146       313210 :         swap = true;
    6147       313210 :       if (swap)
    6148              :         {
    6149       313210 :           if (dump_file && (dump_flags & TDF_DETAILS))
    6150              :             {
    6151            2 :               fprintf (dump_file, "Swap operands in stmt:\n");
    6152            2 :               print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
    6153            2 :               fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
    6154            1 :                        def0 ? lattice[gimple_uid (def0)] : 0,
    6155            2 :                        lattice[gimple_uid (def1)]);
    6156              :             }
    6157       313210 :           swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
    6158              :                              gimple_assign_rhs2_ptr (stmt));
    6159              :         }
    6160              :     }
    6161      9279237 :   XDELETE (lattice);
    6162      9279237 : }
    6163              : 
    6164              : /* Expand basic block BB from GIMPLE trees to RTL.  */
    6165              : 
    6166              : static basic_block
    6167     12592639 : expand_gimple_basic_block (basic_block bb, rtx_insn *asan_epilog_seq)
    6168              : {
    6169     12592639 :   gimple_stmt_iterator gsi;
    6170     12592639 :   gimple *stmt = NULL;
    6171     12592639 :   rtx_note *note = NULL;
    6172     12592639 :   rtx_insn *last;
    6173     12592639 :   edge e;
    6174     12592639 :   edge_iterator ei;
    6175     12592639 :   bool nondebug_stmt_seen = false;
    6176              : 
    6177     12592639 :   if (dump_file)
    6178          774 :     fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
    6179              :              bb->index);
    6180              : 
    6181              :   /* Note that since we are now transitioning from GIMPLE to RTL, we
    6182              :      cannot use the gsi_*_bb() routines because they expect the basic
    6183              :      block to be in GIMPLE, instead of RTL.  Therefore, we need to
    6184              :      access the BB sequence directly.  */
    6185     12592639 :   if (optimize)
    6186      9279237 :     reorder_operands (bb);
    6187     12592639 :   rtl_profile_for_bb (bb);
    6188              : 
    6189              :   /* Remove the RETURN_EXPR if we may fall though to the exit
    6190              :      instead.  */
    6191     12592639 :   gsi = gsi_last_bb (bb);
    6192     12592639 :   if (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
    6193              :     {
    6194      1449459 :       greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
    6195              : 
    6196      1449459 :       gcc_assert (single_succ_p (bb));
    6197      1449459 :       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
    6198              : 
    6199      1449459 :       if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
    6200      1449459 :           && !gimple_return_retval (ret_stmt))
    6201              :         {
    6202       612357 :           gsi_remove (&gsi, false);
    6203       612357 :           single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
    6204              :         }
    6205              :     }
    6206              : 
    6207     12592639 :   gsi = gsi_start_bb (bb);
    6208     12592639 :   if (!gsi_end_p (gsi))
    6209              :     {
    6210     12052814 :       stmt = gsi_stmt (gsi);
    6211     12052814 :       if (gimple_code (stmt) != GIMPLE_LABEL)
    6212     11974794 :         stmt = NULL;
    6213              :     }
    6214              : 
    6215     12592639 :   rtx_code_label **elt = lab_rtx_for_bb->get (bb);
    6216     12592639 :   if ((unsigned) bb->index >= head_end_for_bb.length ())
    6217     10659475 :     head_end_for_bb.safe_grow_cleared (bb->index + 1);
    6218              : 
    6219     12592639 :   if (stmt || elt)
    6220              :     {
    6221      5218213 :       gcc_checking_assert (!note);
    6222      5218213 :       last = get_last_insn ();
    6223              : 
    6224      5218213 :       if (stmt)
    6225              :         {
    6226       617845 :           expand_gimple_stmt (stmt);
    6227       617845 :           gsi_next (&gsi);
    6228              :         }
    6229              : 
    6230      5218213 :       if (elt)
    6231      4600368 :         emit_label (*elt);
    6232              : 
    6233      5218213 :       head_end_for_bb[bb->index].first = NEXT_INSN (last);
    6234      5218213 :       if (NOTE_P (head_end_for_bb[bb->index].first))
    6235            0 :         head_end_for_bb[bb->index].first
    6236            0 :           = NEXT_INSN (head_end_for_bb[bb->index].first);
    6237      5218213 :       gcc_assert (LABEL_P (head_end_for_bb[bb->index].first));
    6238     10436426 :       note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
    6239      5218213 :                               head_end_for_bb[bb->index].first);
    6240              : 
    6241      5218213 :       maybe_dump_rtl_for_gimple_stmt (stmt, last);
    6242              :     }
    6243              :   else
    6244      7374426 :     head_end_for_bb[bb->index].first = note = emit_note (NOTE_INSN_BASIC_BLOCK);
    6245              : 
    6246     12592639 :   if (note)
    6247     12592639 :     NOTE_BASIC_BLOCK (note) = bb;
    6248              : 
    6249     65304065 :   for (; !gsi_end_p (gsi); gsi_next (&gsi))
    6250              :     {
    6251     53242332 :       basic_block new_bb;
    6252              : 
    6253     53242332 :       stmt = gsi_stmt (gsi);
    6254     53242332 :       if (!is_gimple_debug (stmt))
    6255     45242041 :         nondebug_stmt_seen = true;
    6256              : 
    6257              :       /* If this statement is a non-debug one, and we generate debug
    6258              :          insns, then this one might be the last real use of a TERed
    6259              :          SSA_NAME, but where there are still some debug uses further
    6260              :          down.  Expanding the current SSA name in such further debug
    6261              :          uses by their RHS might lead to wrong debug info, as coalescing
    6262              :          might make the operands of such RHS be placed into the same
    6263              :          pseudo as something else.  Like so:
    6264              :            a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
    6265              :            use(a_1);
    6266              :            a_2 = ...
    6267              :            #DEBUG ... => a_1
    6268              :          As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
    6269              :          If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
    6270              :          the write to a_2 would actually have clobbered the place which
    6271              :          formerly held a_0.
    6272              : 
    6273              :          So, instead of that, we recognize the situation, and generate
    6274              :          debug temporaries at the last real use of TERed SSA names:
    6275              :            a_1 = a_0 + 1;
    6276              :            #DEBUG #D1 => a_1
    6277              :            use(a_1);
    6278              :            a_2 = ...
    6279              :            #DEBUG ... => #D1
    6280              :          */
    6281     53242332 :       if (MAY_HAVE_DEBUG_BIND_INSNS
    6282     28608069 :           && SA.values
    6283     80630592 :           && !is_gimple_debug (stmt))
    6284              :         {
    6285     19778629 :           ssa_op_iter iter;
    6286     19778629 :           tree op;
    6287     19778629 :           gimple *def;
    6288              : 
    6289     19778629 :           location_t sloc = curr_insn_location ();
    6290              : 
    6291              :           /* Look for SSA names that have their last use here (TERed
    6292              :              names always have only one real use).  */
    6293     38021605 :           FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
    6294     18242976 :             if ((def = get_gimple_for_ssa_name (op))
    6295     18242976 :                 && is_gimple_assign (def))
    6296              :               {
    6297      4890719 :                 imm_use_iterator imm_iter;
    6298      4890719 :                 use_operand_p use_p;
    6299      4890719 :                 bool have_debug_uses = false;
    6300              : 
    6301     14646202 :                 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
    6302              :                   {
    6303      4912011 :                     if (gimple_debug_bind_p (USE_STMT (use_p)))
    6304              :                       {
    6305              :                         have_debug_uses = true;
    6306              :                         break;
    6307              :                       }
    6308      4890719 :                   }
    6309              : 
    6310      4890719 :                 if (have_debug_uses)
    6311              :                   {
    6312              :                     /* OP is a TERed SSA name, with DEF its defining
    6313              :                        statement, and where OP is used in further debug
    6314              :                        instructions.  Generate a debug temporary, and
    6315              :                        replace all uses of OP in debug insns with that
    6316              :                        temporary.  */
    6317        47247 :                     gimple *debugstmt;
    6318        47247 :                     tree value = gimple_assign_rhs_to_tree (def);
    6319        47247 :                     tree vexpr = build_debug_expr_decl (TREE_TYPE (value));
    6320        47247 :                     rtx val;
    6321        47247 :                     machine_mode mode;
    6322              : 
    6323        47247 :                     set_curr_insn_location (gimple_location (def));
    6324              : 
    6325        47247 :                     if (DECL_P (value))
    6326          364 :                       mode = DECL_MODE (value);
    6327              :                     else
    6328        46883 :                       mode = TYPE_MODE (TREE_TYPE (value));
    6329              :                     /* FIXME: Is setting the mode really necessary? */
    6330        47247 :                     SET_DECL_MODE (vexpr, mode);
    6331              : 
    6332        47247 :                     val = gen_rtx_VAR_LOCATION
    6333        47247 :                         (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
    6334              : 
    6335        47247 :                     emit_debug_insn (val);
    6336              : 
    6337       219680 :                     FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
    6338              :                       {
    6339       125186 :                         if (!gimple_debug_bind_p (debugstmt))
    6340        47247 :                           continue;
    6341              : 
    6342       237055 :                         FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
    6343        79558 :                           SET_USE (use_p, vexpr);
    6344              : 
    6345        77939 :                         update_stmt (debugstmt);
    6346        47247 :                       }
    6347              :                   }
    6348              :               }
    6349     19778629 :           set_curr_insn_location (sloc);
    6350              :         }
    6351              : 
    6352     53242332 :       currently_expanding_gimple_stmt = stmt;
    6353              : 
    6354              :       /* Expand this statement, then evaluate the resulting RTL and
    6355              :          fixup the CFG accordingly.  */
    6356     53242332 :       if (gimple_code (stmt) == GIMPLE_COND)
    6357              :         {
    6358      5676272 :           new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
    6359      5676272 :           if (new_bb)
    6360              :             {
    6361       403090 :               currently_expanding_gimple_stmt = NULL;
    6362       403090 :               return new_bb;
    6363              :             }
    6364              :         }
    6365     47566060 :       else if (is_gimple_debug (stmt))
    6366              :         {
    6367      8000291 :           location_t sloc = curr_insn_location ();
    6368      8000291 :           gimple_stmt_iterator nsi = gsi;
    6369              : 
    6370     48623108 :           for (;;)
    6371              :             {
    6372     48623108 :               tree var;
    6373     48623108 :               tree value = NULL_TREE;
    6374     48623108 :               rtx val = NULL_RTX;
    6375     48623108 :               machine_mode mode;
    6376              : 
    6377     48623108 :               if (!gimple_debug_nonbind_marker_p (stmt))
    6378              :                 {
    6379     37657110 :                   if (gimple_debug_bind_p (stmt))
    6380              :                     {
    6381     37503279 :                       var = gimple_debug_bind_get_var (stmt);
    6382              : 
    6383     37503279 :                       if (TREE_CODE (var) != DEBUG_EXPR_DECL
    6384     34868653 :                           && TREE_CODE (var) != LABEL_DECL
    6385     72362242 :                           && !target_for_debug_bind (var))
    6386       309244 :                         goto delink_debug_stmt;
    6387              : 
    6388     37194035 :                       if (DECL_P (var) && !VECTOR_TYPE_P (TREE_TYPE (var)))
    6389     37153898 :                         mode = DECL_MODE (var);
    6390              :                       else
    6391        40137 :                         mode = TYPE_MODE (TREE_TYPE (var));
    6392              : 
    6393     37194035 :                       if (gimple_debug_bind_has_value_p (stmt))
    6394     20516700 :                         value = gimple_debug_bind_get_value (stmt);
    6395              : 
    6396     37194035 :                       val = gen_rtx_VAR_LOCATION
    6397     37194035 :                         (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
    6398              :                     }
    6399       153831 :                   else if (gimple_debug_source_bind_p (stmt))
    6400              :                     {
    6401       153831 :                       var = gimple_debug_source_bind_get_var (stmt);
    6402              : 
    6403       153831 :                       value = gimple_debug_source_bind_get_value (stmt);
    6404              : 
    6405       153831 :                       if (!VECTOR_TYPE_P (TREE_TYPE (var)))
    6406       153831 :                         mode = DECL_MODE (var);
    6407              :                       else
    6408            0 :                         mode = TYPE_MODE (TREE_TYPE (var));
    6409              : 
    6410       153831 :                       val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
    6411              :                                                   VAR_INIT_STATUS_UNINITIALIZED);
    6412              :                     }
    6413              :                   else
    6414            0 :                     gcc_unreachable ();
    6415              :                 }
    6416              :               /* If this function was first compiled with markers
    6417              :                  enabled, but they're now disable (e.g. LTO), drop
    6418              :                  them on the floor.  */
    6419     10965998 :               else if (gimple_debug_nonbind_marker_p (stmt)
    6420     10965998 :                        && !MAY_HAVE_DEBUG_MARKER_INSNS)
    6421            0 :                 goto delink_debug_stmt;
    6422     10965998 :               else if (gimple_debug_begin_stmt_p (stmt))
    6423      3971569 :                 val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
    6424      6994429 :               else if (gimple_debug_inline_entry_p (stmt))
    6425      6994429 :                 val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
    6426              :               else
    6427              :                 gcc_unreachable ();
    6428              : 
    6429     48313864 :               last = get_last_insn ();
    6430              : 
    6431     48313864 :               set_curr_insn_location (gimple_location (stmt));
    6432              : 
    6433     48313864 :               emit_debug_insn (val);
    6434              : 
    6435     48313864 :               if (dump_file && (dump_flags & TDF_DETAILS))
    6436              :                 {
    6437              :                   /* We can't dump the insn with a TREE where an RTX
    6438              :                      is expected.  */
    6439            1 :                   if (GET_CODE (val) == VAR_LOCATION)
    6440              :                     {
    6441            0 :                       gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
    6442            0 :                       PAT_VAR_LOCATION_LOC (val) = const0_rtx;
    6443              :                     }
    6444            1 :                   maybe_dump_rtl_for_gimple_stmt (stmt, last);
    6445            1 :                   if (GET_CODE (val) == VAR_LOCATION)
    6446            0 :                     PAT_VAR_LOCATION_LOC (val) = (rtx)value;
    6447              :                 }
    6448              : 
    6449     48623108 :             delink_debug_stmt:
    6450              :               /* In order not to generate too many debug temporaries,
    6451              :                  we delink all uses of debug statements we already expanded.
    6452              :                  Therefore debug statements between definition and real
    6453              :                  use of TERed SSA names will continue to use the SSA name,
    6454              :                  and not be replaced with debug temps.  */
    6455     48623108 :               delink_stmt_imm_use (stmt);
    6456              : 
    6457     48623108 :               gsi = nsi;
    6458     48623108 :               gsi_next (&nsi);
    6459     48623108 :               if (gsi_end_p (nsi))
    6460              :                 break;
    6461     47978029 :               stmt = gsi_stmt (nsi);
    6462     47978029 :               if (!is_gimple_debug (stmt))
    6463              :                 break;
    6464              :             }
    6465              : 
    6466      8000291 :           set_curr_insn_location (sloc);
    6467              :         }
    6468              :       else
    6469              :         {
    6470     39565769 :           gcall *call_stmt = dyn_cast <gcall *> (stmt);
    6471     39565769 :           if (call_stmt
    6472     39565769 :               && asan_epilog_seq
    6473        19581 :               && gimple_call_tail_p (call_stmt)
    6474     39565903 :               && !gimple_call_must_tail_p (call_stmt))
    6475           17 :             gimple_call_set_tail (call_stmt, false);
    6476              : 
    6477     39565769 :           if (call_stmt && gimple_call_tail_p (call_stmt))
    6478              :             {
    6479       176808 :               bool can_fallthru;
    6480       176808 :               new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru,
    6481              :                                                asan_epilog_seq);
    6482       176808 :               if (new_bb)
    6483              :                 {
    6484       127815 :                   if (can_fallthru)
    6485            0 :                     bb = new_bb;
    6486              :                   else
    6487              :                     {
    6488       127815 :                       currently_expanding_gimple_stmt = NULL;
    6489       127815 :                       return new_bb;
    6490              :                     }
    6491              :                 }
    6492              :             }
    6493              :           else
    6494              :             {
    6495     39388961 :               def_operand_p def_p;
    6496     39388961 :               def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
    6497              : 
    6498     39388961 :               if (def_p != NULL)
    6499              :                 {
    6500              :                   /* Ignore this stmt if it is in the list of
    6501              :                      replaceable expressions.  */
    6502     33244518 :                   if (SA.values
    6503     41295262 :                       && bitmap_bit_p (SA.values,
    6504     16756992 :                                        SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
    6505      8706248 :                     continue;
    6506              :                 }
    6507     30682713 :               last = expand_gimple_stmt (stmt);
    6508     30682712 :               maybe_dump_rtl_for_gimple_stmt (stmt, last);
    6509              :             }
    6510              :         }
    6511              :     }
    6512              : 
    6513     12061733 :   currently_expanding_gimple_stmt = NULL;
    6514              : 
    6515              :   /* Expand implicit goto and convert goto_locus.  */
    6516     29312689 :   FOR_EACH_EDGE (e, ei, bb->succs)
    6517              :     {
    6518     17250956 :       if (e->goto_locus != UNKNOWN_LOCATION || !nondebug_stmt_seen)
    6519      3393046 :         set_curr_insn_location (e->goto_locus);
    6520     17250956 :       if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
    6521              :         {
    6522      1607501 :           emit_jump (label_rtx_for_bb (e->dest));
    6523      1607501 :           e->flags &= ~EDGE_FALLTHRU;
    6524              :         }
    6525              :     }
    6526              : 
    6527              :   /* Expanded RTL can create a jump in the last instruction of block.
    6528              :      This later might be assumed to be a jump to successor and break edge insertion.
    6529              :      We need to insert dummy move to prevent this. PR41440. */
    6530     12061733 :   if (single_succ_p (bb)
    6531      5439877 :       && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
    6532      3090089 :       && (last = get_last_insn ())
    6533     14977292 :       && (JUMP_P (last)
    6534      3090041 :           || (DEBUG_INSN_P (last)
    6535       364496 :               && JUMP_P (prev_nondebug_insn (last)))))
    6536              :     {
    6537           59 :       rtx dummy = gen_reg_rtx (SImode);
    6538           59 :       emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
    6539              :     }
    6540              : 
    6541              :   /* A __builtin_unreachable () will insert a barrier that should end
    6542              :      the basic block.  In gimple, any code after it will have already
    6543              :      deleted, even without optimization.  If we emit additional code
    6544              :      here, as we would to adjust the stack after a call, it should be
    6545              :      eventually deleted, but it confuses internal checkers (PR118006)
    6546              :      and optimizers before it does, because we don't expect to find
    6547              :      barriers inside basic blocks.  */
    6548     12061733 :   if (!BARRIER_P (get_last_insn ()))
    6549      8823659 :     do_pending_stack_adjust ();
    6550              :   else
    6551      3238074 :     discard_pending_stack_adjust ();
    6552              : 
    6553              :   /* Find the block tail.  The last insn in the block is the insn
    6554              :      before a barrier and/or table jump insn.  */
    6555     12061733 :   last = get_last_insn ();
    6556     12061733 :   if (BARRIER_P (last))
    6557      3238074 :     last = PREV_INSN (last);
    6558     12061733 :   if (JUMP_TABLE_DATA_P (last))
    6559         8212 :     last = PREV_INSN (PREV_INSN (last));
    6560     12061733 :   if (BARRIER_P (last))
    6561         7232 :     last = PREV_INSN (last);
    6562     12061733 :   head_end_for_bb[bb->index].second = last;
    6563              : 
    6564     24123466 :   update_bb_for_insn_chain (head_end_for_bb[bb->index].first,
    6565     12061733 :                             head_end_for_bb[bb->index].second, bb);
    6566              : 
    6567     12061733 :   return bb;
    6568              : }
    6569              : 
    6570              : 
    6571              : /* Create a basic block for initialization code.  */
    6572              : 
    6573              : static basic_block
    6574      1481726 : construct_init_block (void)
    6575              : {
    6576      1481726 :   basic_block init_block;
    6577      1481726 :   edge e = NULL;
    6578      1481726 :   int flags;
    6579              : 
    6580              :   /* Multiple entry points not supported yet.  */
    6581      1481726 :   gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
    6582      1481726 :   init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
    6583      1481726 :   init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
    6584      1481726 :   ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
    6585      1481726 :   EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
    6586              : 
    6587      1481726 :   e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
    6588              : 
    6589              :   /* When entry edge points to first basic block, we don't need jump,
    6590              :      otherwise we have to jump into proper target.  */
    6591      1481726 :   if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
    6592              :     {
    6593            0 :       tree label = gimple_block_label (e->dest);
    6594              : 
    6595            0 :       emit_jump (jump_target_rtx (label));
    6596            0 :       flags = 0;
    6597            0 :     }
    6598              :   else
    6599              :     flags = EDGE_FALLTHRU;
    6600              : 
    6601      1481726 :   init_block = create_basic_block (NEXT_INSN (get_insns ()),
    6602              :                                    get_last_insn (),
    6603      1481726 :                                    ENTRY_BLOCK_PTR_FOR_FN (cfun));
    6604      1481726 :   init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
    6605      1481726 :   add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
    6606      1481726 :   if (e)
    6607      1481726 :     expand_split_edge (e, init_block, flags);
    6608              :   else
    6609            0 :     make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
    6610              :                            EDGE_FALLTHRU);
    6611              : 
    6612      1481726 :   update_bb_for_insn (init_block);
    6613      1481726 :   return init_block;
    6614              : }
    6615              : 
    6616              : /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
    6617              :    found in the block tree.  */
    6618              : 
    6619              : static void
    6620     17552643 : set_block_levels (tree block, int level)
    6621              : {
    6622     33623561 :   while (block)
    6623              :     {
    6624     16070918 :       BLOCK_NUMBER (block) = level;
    6625     16070918 :       set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
    6626     16070918 :       block = BLOCK_CHAIN (block);
    6627              :     }
    6628     17552643 : }
    6629              : 
    6630              : /* Create a block containing landing pads and similar stuff.  */
    6631              : 
    6632              : static void
    6633      1481725 : construct_exit_block (void)
    6634              : {
    6635      1481725 :   rtx_insn *head = get_last_insn ();
    6636      1481725 :   rtx_insn *end;
    6637      1481725 :   basic_block exit_block;
    6638      1481725 :   edge e, e2;
    6639      1481725 :   unsigned ix;
    6640      1481725 :   edge_iterator ei;
    6641      1481725 :   basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
    6642      1481725 :   rtx_insn *orig_end = BB_END (prev_bb);
    6643              : 
    6644      1481725 :   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
    6645              : 
    6646              :   /* Make sure the locus is set to the end of the function, so that
    6647              :      epilogue line numbers and warnings are set properly.  */
    6648      1481725 :   if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
    6649      1164480 :     input_location = cfun->function_end_locus;
    6650              : 
    6651              :   /* Generate rtl for function exit.  */
    6652      1481725 :   expand_function_end ();
    6653              : 
    6654      1481725 :   end = get_last_insn ();
    6655      1481725 :   if (head == end)
    6656            0 :     return;
    6657              :   /* While emitting the function end we could move end of the last basic
    6658              :      block.  */
    6659      1481725 :   BB_END (prev_bb) = orig_end;
    6660      1481725 :   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
    6661              :     head = NEXT_INSN (head);
    6662              :   /* But make sure exit_block starts with RETURN_LABEL, otherwise the
    6663              :      bb count counting will be confused.  Any instructions before that
    6664              :      label are emitted for the case where PREV_BB falls through into the
    6665              :      exit block, so append those instructions to prev_bb in that case.  */
    6666      1481725 :   if (NEXT_INSN (head) != return_label)
    6667              :     {
    6668        14915 :       while (NEXT_INSN (head) != return_label)
    6669              :         {
    6670         9903 :           if (!NOTE_P (NEXT_INSN (head)))
    6671         9903 :             BB_END (prev_bb) = NEXT_INSN (head);
    6672         9903 :           head = NEXT_INSN (head);
    6673              :         }
    6674              :     }
    6675      1481725 :   exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
    6676      1481725 :   exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
    6677      1481725 :   add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
    6678              : 
    6679      1481725 :   ix = 0;
    6680      4474450 :   while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
    6681              :     {
    6682      1511000 :       e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
    6683      1511000 :       if (!(e->flags & EDGE_ABNORMAL))
    6684      1383185 :         redirect_edge_succ (e, exit_block);
    6685              :       else
    6686       127815 :         ix++;
    6687              :     }
    6688              : 
    6689      1481725 :   e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
    6690              :                              EDGE_FALLTHRU);
    6691      3091265 :   FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
    6692      1609540 :     if (e2 != e)
    6693              :       {
    6694       127815 :         exit_block->count -= e2->count ();
    6695              :       }
    6696      1481725 :   update_bb_for_insn (exit_block);
    6697              : }
    6698              : 
    6699              : /* Helper function for discover_nonconstant_array_refs.
    6700              :    Look for ARRAY_REF nodes with non-constant indexes and mark them
    6701              :    addressable.  */
    6702              : 
    6703              : static tree
    6704    162386318 : discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
    6705              :                                    void *data)
    6706              : {
    6707    162386318 :   tree t = *tp;
    6708    162386318 :   bitmap forced_stack_vars = (bitmap)((walk_stmt_info *)data)->info;
    6709              : 
    6710    162386318 :   if (IS_TYPE_OR_DECL_P (t))
    6711     33275382 :     *walk_subtrees = 0;
    6712    129110936 :   else if (REFERENCE_CLASS_P (t) && TREE_THIS_VOLATILE (t))
    6713              :     {
    6714        71037 :       t = get_base_address (t);
    6715        71037 :       if (t && DECL_P (t)
    6716        58522 :           && DECL_MODE (t) != BLKmode
    6717       107762 :           && !TREE_ADDRESSABLE (t))
    6718        32683 :         bitmap_set_bit (forced_stack_vars, DECL_UID (t));
    6719        71037 :       *walk_subtrees = 0;
    6720              :     }
    6721    129039899 :   else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
    6722              :     {
    6723      6186813 :       while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
    6724      2972793 :               && is_gimple_min_invariant (TREE_OPERAND (t, 1))
    6725      2498311 :               && (!TREE_OPERAND (t, 2)
    6726        37259 :                   || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
    6727      3722634 :              || (TREE_CODE (t) == COMPONENT_REF
    6728       910189 :                  && (!TREE_OPERAND (t,2)
    6729            0 :                      || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
    6730      2812445 :              || TREE_CODE (t) == BIT_FIELD_REF
    6731              :              || TREE_CODE (t) == REALPART_EXPR
    6732              :              || TREE_CODE (t) == IMAGPART_EXPR
    6733              :              || TREE_CODE (t) == VIEW_CONVERT_EXPR
    6734      6186813 :              || CONVERT_EXPR_P (t))
    6735      3384256 :         t = TREE_OPERAND (t, 0);
    6736              : 
    6737      2802557 :       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
    6738              :         {
    6739       508614 :           t = get_base_address (t);
    6740       508614 :           if (t && DECL_P (t)
    6741       409581 :               && DECL_MODE (t) != BLKmode
    6742       532176 :               && !TREE_ADDRESSABLE (t))
    6743        14885 :             bitmap_set_bit (forced_stack_vars, DECL_UID (t));
    6744              :         }
    6745              : 
    6746      2802557 :       *walk_subtrees = 0;
    6747              :     }
    6748              :   /* References of size POLY_INT_CST to a fixed-size object must go
    6749              :      through memory.  It's more efficient to force that here than
    6750              :      to create temporary slots on the fly.
    6751              :      RTL expansion expectes TARGET_MEM_REF to always address actual memory.
    6752              :      Also, force to stack non-BLKmode vars accessed through VIEW_CONVERT_EXPR
    6753              :      to BLKmode type.  */
    6754    126237342 :   else if (TREE_CODE (t) == TARGET_MEM_REF
    6755              :            || (TREE_CODE (t) == MEM_REF
    6756    125385966 :                && TYPE_SIZE (TREE_TYPE (t))
    6757              :                && POLY_INT_CST_P (TYPE_SIZE (TREE_TYPE (t))))
    6758    251623308 :            || (TREE_CODE (t) == VIEW_CONVERT_EXPR
    6759       324509 :                && TYPE_MODE (TREE_TYPE (t)) == BLKmode))
    6760              :     {
    6761       929900 :       tree base = get_base_address (t);
    6762       929900 :       if (base
    6763       929900 :           && DECL_P (base)
    6764       270566 :           && !TREE_ADDRESSABLE (base)
    6765       124781 :           && DECL_MODE (base) != BLKmode
    6766       935724 :           && GET_MODE_SIZE (DECL_MODE (base)).is_constant ())
    6767         5824 :         bitmap_set_bit (forced_stack_vars, DECL_UID (base));
    6768       929900 :       *walk_subtrees = 0;
    6769              :     }
    6770              : 
    6771    162386318 :   return NULL_TREE;
    6772              : }
    6773              : 
    6774              : /* If there's a chance to get a pseudo for t then if it would be of float mode
    6775              :    and the actual access is via an integer mode (lowered memcpy or similar
    6776              :    access) then avoid the register expansion if the mode likely is not storage
    6777              :    suitable for raw bits processing (like XFmode on i?86).  */
    6778              : 
    6779              : static void
    6780      6576236 : avoid_type_punning_on_regs (tree t, bitmap forced_stack_vars)
    6781              : {
    6782      6576236 :   machine_mode access_mode = TYPE_MODE (TREE_TYPE (t));
    6783      6576236 :   if (access_mode != BLKmode
    6784      6410017 :       && !SCALAR_INT_MODE_P (access_mode))
    6785              :     return;
    6786      5605318 :   tree base = get_base_address (t);
    6787      5605318 :   if (DECL_P (base)
    6788      4139883 :       && !TREE_ADDRESSABLE (base)
    6789      1231899 :       && FLOAT_MODE_P (DECL_MODE (base))
    6790           72 :       && maybe_lt (GET_MODE_PRECISION (DECL_MODE (base)),
    6791          216 :                    GET_MODE_BITSIZE (GET_MODE_INNER (DECL_MODE (base))))
    6792              :       /* Double check in the expensive way we really would get a pseudo.  */
    6793      5605323 :       && use_register_for_decl (base))
    6794            5 :     bitmap_set_bit (forced_stack_vars, DECL_UID (base));
    6795              : }
    6796              : 
    6797              : /* RTL expansion is not able to compile array references with variable
    6798              :    offsets for arrays stored in single register.  Discover such
    6799              :    expressions and mark variables as addressable to avoid this
    6800              :    scenario.  */
    6801              : 
    6802              : static void
    6803      1481726 : discover_nonconstant_array_refs (bitmap forced_stack_vars)
    6804              : {
    6805      1481726 :   basic_block bb;
    6806      1481726 :   gimple_stmt_iterator gsi;
    6807              : 
    6808      1481726 :   walk_stmt_info wi = {};
    6809      1481726 :   wi.info = forced_stack_vars;
    6810     14071727 :   FOR_EACH_BB_FN (bb, cfun)
    6811    120429528 :     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
    6812              :       {
    6813     95249526 :         gimple *stmt = gsi_stmt (gsi);
    6814     95249526 :         if (!is_gimple_debug (stmt))
    6815              :           {
    6816     46491902 :             walk_gimple_op (stmt, discover_nonconstant_array_refs_r, &wi);
    6817     46491902 :             gcall *call = dyn_cast <gcall *> (stmt);
    6818      6859951 :             if (call && gimple_call_internal_p (call))
    6819              :               {
    6820       222987 :                 tree cand = NULL_TREE;
    6821       222987 :                 switch (gimple_call_internal_fn (call))
    6822              :                   {
    6823            0 :                   case IFN_LOAD_LANES:
    6824              :                     /* The source must be a MEM.  */
    6825            0 :                     cand = gimple_call_arg (call, 0);
    6826            0 :                     break;
    6827            0 :                   case IFN_STORE_LANES:
    6828              :                     /* The destination must be a MEM.  */
    6829            0 :                     cand = gimple_call_lhs (call);
    6830            0 :                     break;
    6831              :                   default:
    6832              :                     break;
    6833              :                   }
    6834            0 :                 if (cand)
    6835            0 :                   cand = get_base_address (cand);
    6836            0 :                 if (cand
    6837            0 :                     && DECL_P (cand)
    6838            0 :                     && use_register_for_decl (cand))
    6839            0 :                   bitmap_set_bit (forced_stack_vars, DECL_UID (cand));
    6840              :               }
    6841    150774217 :             if (gimple_vdef (stmt))
    6842              :               {
    6843     15348617 :                 tree t = gimple_get_lhs (stmt);
    6844     15348617 :                 if (t && REFERENCE_CLASS_P (t))
    6845      6576236 :                   avoid_type_punning_on_regs (t, forced_stack_vars);
    6846              :               }
    6847              :           }
    6848              :       }
    6849      1481726 : }
    6850              : 
    6851              : /* This function sets crtl->args.internal_arg_pointer to a virtual
    6852              :    register if DRAP is needed.  Local register allocator will replace
    6853              :    virtual_incoming_args_rtx with the virtual register.  */
    6854              : 
    6855              : static void
    6856      1481725 : expand_stack_alignment (void)
    6857              : {
    6858      1481725 :   rtx drap_rtx;
    6859      1481725 :   unsigned int preferred_stack_boundary;
    6860              : 
    6861      1481725 :   if (! SUPPORTS_STACK_ALIGNMENT)
    6862              :     return;
    6863              : 
    6864      1481725 :   if (cfun->calls_alloca
    6865      1466091 :       || cfun->has_nonlocal_label
    6866      1465289 :       || crtl->has_nonlocal_goto)
    6867        16839 :     crtl->need_drap = true;
    6868              : 
    6869              :   /* Call update_stack_boundary here again to update incoming stack
    6870              :      boundary.  It may set incoming stack alignment to a different
    6871              :      value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
    6872              :      use the minimum incoming stack alignment to check if it is OK
    6873              :      to perform sibcall optimization since sibcall optimization will
    6874              :      only align the outgoing stack to incoming stack boundary.  */
    6875      1481725 :   if (targetm.calls.update_stack_boundary)
    6876      1481725 :     targetm.calls.update_stack_boundary ();
    6877              : 
    6878              :   /* The incoming stack frame has to be aligned at least at
    6879              :      parm_stack_boundary.  */
    6880      1481725 :   gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
    6881              : 
    6882              :   /* Update crtl->stack_alignment_estimated and use it later to align
    6883              :      stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
    6884              :      exceptions since callgraph doesn't collect incoming stack alignment
    6885              :      in this case.  */
    6886      1481725 :   if (cfun->can_throw_non_call_exceptions
    6887       262872 :       && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
    6888              :     preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
    6889              :   else
    6890      1453774 :     preferred_stack_boundary = crtl->preferred_stack_boundary;
    6891      1481725 :   if (preferred_stack_boundary > crtl->stack_alignment_estimated)
    6892       716678 :     crtl->stack_alignment_estimated = preferred_stack_boundary;
    6893      1481725 :   if (preferred_stack_boundary > crtl->stack_alignment_needed)
    6894       668851 :     crtl->stack_alignment_needed = preferred_stack_boundary;
    6895              : 
    6896      1481725 :   gcc_assert (crtl->stack_alignment_needed
    6897              :               <= crtl->stack_alignment_estimated);
    6898              : 
    6899      1481725 :   crtl->stack_realign_needed
    6900      1481725 :     = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
    6901      1481725 :   crtl->stack_realign_tried = crtl->stack_realign_needed;
    6902              : 
    6903      1481725 :   crtl->stack_realign_processed = true;
    6904              : 
    6905              :   /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
    6906              :      alignment.  */
    6907      1481725 :   gcc_assert (targetm.calls.get_drap_rtx != NULL);
    6908      1481725 :   drap_rtx = targetm.calls.get_drap_rtx ();
    6909              : 
    6910              :   /* stack_realign_drap and drap_rtx must match.  */
    6911      1481725 :   gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
    6912              : 
    6913              :   /* Do nothing if NULL is returned, which means DRAP is not needed.  */
    6914      1481725 :   if (drap_rtx != NULL)
    6915              :     {
    6916         7296 :       crtl->args.internal_arg_pointer = drap_rtx;
    6917              : 
    6918              :       /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
    6919              :          needed. */
    6920         7296 :       fixup_tail_calls ();
    6921              :     }
    6922              : }
    6923              : 
    6924              : 
    6925              : static void
    6926            0 : expand_main_function (void)
    6927              : {
    6928              : #if (defined(INVOKE__main)                              \
    6929              :      || (!defined(HAS_INIT_SECTION)                     \
    6930              :          && !defined(INIT_SECTION_ASM_OP)               \
    6931              :          && !defined(INIT_ARRAY_SECTION_ASM_OP)))
    6932              :   emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
    6933              : #endif
    6934            0 : }
    6935              : 
    6936              : 
    6937              : /* Expand code to initialize the stack_protect_guard.  This is invoked at
    6938              :    the beginning of a function to be protected.  */
    6939              : 
    6940              : static void
    6941          250 : stack_protect_prologue (void)
    6942              : {
    6943          250 :   tree guard_decl = targetm.stack_protect_guard ();
    6944          250 :   rtx x, y;
    6945              : 
    6946          250 :   crtl->stack_protect_guard_decl = guard_decl;
    6947          250 :   x = expand_normal (crtl->stack_protect_guard);
    6948              : 
    6949          250 :   if (targetm.have_stack_protect_combined_set () && guard_decl)
    6950              :     {
    6951            0 :       gcc_assert (DECL_P (guard_decl));
    6952            0 :       y = DECL_RTL (guard_decl);
    6953              : 
    6954              :       /* Allow the target to compute address of Y and copy it to X without
    6955              :          leaking Y into a register.  This combined address + copy pattern
    6956              :          allows the target to prevent spilling of any intermediate results by
    6957              :          splitting it after register allocator.  */
    6958            0 :       if (rtx_insn *insn = targetm.gen_stack_protect_combined_set (x, y))
    6959              :         {
    6960            0 :           emit_insn (insn);
    6961            0 :           return;
    6962              :         }
    6963              :     }
    6964              : 
    6965          250 :   if (guard_decl)
    6966          250 :     y = expand_normal (guard_decl);
    6967              :   else
    6968            0 :     y = const0_rtx;
    6969              : 
    6970              :   /* Allow the target to copy from Y to X without leaking Y into a
    6971              :      register.  */
    6972          250 :   if (targetm.have_stack_protect_set ())
    6973          250 :     if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
    6974              :       {
    6975          250 :         emit_insn (insn);
    6976          250 :         return;
    6977              :       }
    6978              : 
    6979              :   /* Otherwise do a straight move.  */
    6980            0 :   emit_move_insn (x, y);
    6981              : }
    6982              : 
    6983              : /* Translate the intermediate representation contained in the CFG
    6984              :    from GIMPLE trees to RTL.
    6985              : 
    6986              :    We do conversion per basic block and preserve/update the tree CFG.
    6987              :    This implies we have to do some magic as the CFG can simultaneously
    6988              :    consist of basic blocks containing RTL and GIMPLE trees.  This can
    6989              :    confuse the CFG hooks, so be careful to not manipulate CFG during
    6990              :    the expansion.  */
    6991              : 
    6992              : namespace {
    6993              : 
    6994              : const pass_data pass_data_expand =
    6995              : {
    6996              :   RTL_PASS, /* type */
    6997              :   "expand", /* name */
    6998              :   OPTGROUP_NONE, /* optinfo_flags */
    6999              :   TV_EXPAND, /* tv_id */
    7000              :   ( PROP_ssa | PROP_gimple_leh | PROP_cfg
    7001              :     | PROP_gimple_lcx
    7002              :     | PROP_gimple_lvec
    7003              :     | PROP_gimple_lva), /* properties_required */
    7004              :   PROP_rtl, /* properties_provided */
    7005              :   ( PROP_ssa | PROP_gimple ), /* properties_destroyed */
    7006              :   0, /* todo_flags_start */
    7007              :   0, /* todo_flags_finish */
    7008              : };
    7009              : 
    7010              : class pass_expand : public rtl_opt_pass
    7011              : {
    7012              : public:
    7013       287876 :   pass_expand (gcc::context *ctxt)
    7014       575752 :     : rtl_opt_pass (pass_data_expand, ctxt)
    7015              :   {}
    7016              : 
    7017              :   /* opt_pass methods: */
    7018              :   unsigned int execute (function *) final override;
    7019              : 
    7020              : }; // class pass_expand
    7021              : 
    7022              : unsigned int
    7023      1481726 : pass_expand::execute (function *fun)
    7024              : {
    7025      1481726 :   basic_block bb, init_block;
    7026      1481726 :   edge_iterator ei;
    7027      1481726 :   edge e;
    7028      1481726 :   rtx_insn *var_seq, *var_ret_seq;
    7029      1481726 :   unsigned i;
    7030              : 
    7031      1481726 :   timevar_push (TV_OUT_OF_SSA);
    7032      1481726 :   rewrite_out_of_ssa (&SA);
    7033      1481726 :   timevar_pop (TV_OUT_OF_SSA);
    7034      1481726 :   SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
    7035              : 
    7036      1481726 :   if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
    7037              :     {
    7038       496151 :       gimple_stmt_iterator gsi;
    7039      6734303 :       FOR_EACH_BB_FN (bb, cfun)
    7040     82273116 :         for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
    7041    107419157 :           if (gimple_debug_bind_p (gsi_stmt (gsi)))
    7042     37622345 :             avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
    7043              :     }
    7044              : 
    7045              :   /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
    7046      1481726 :   auto_bitmap forced_stack_vars;
    7047      1481726 :   discover_nonconstant_array_refs (forced_stack_vars);
    7048              : 
    7049              :   /* Make sure all values used by the optimization passes have sane
    7050              :      defaults.  */
    7051      1481726 :   reg_renumber = 0;
    7052              : 
    7053              :   /* Some backends want to know that we are expanding to RTL.  */
    7054      1481726 :   currently_expanding_to_rtl = 1;
    7055              :   /* Dominators are not kept up-to-date as we may create new basic-blocks.  */
    7056      1481726 :   free_dominance_info (CDI_DOMINATORS);
    7057              : 
    7058      1481726 :   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
    7059              : 
    7060      1481726 :   insn_locations_init ();
    7061      1481726 :   if (!DECL_IS_UNDECLARED_BUILTIN (current_function_decl))
    7062              :     {
    7063              :       /* Eventually, all FEs should explicitly set function_start_locus.  */
    7064      1358500 :       if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
    7065       636804 :         set_curr_insn_location
    7066       636804 :           (DECL_SOURCE_LOCATION (current_function_decl));
    7067              :       else
    7068       721696 :         set_curr_insn_location (fun->function_start_locus);
    7069              :     }
    7070              :   else
    7071       123226 :     set_curr_insn_location (UNKNOWN_LOCATION);
    7072      1481726 :   prologue_location = curr_insn_location ();
    7073              : 
    7074              : #ifdef INSN_SCHEDULING
    7075      1481726 :   init_sched_attrs ();
    7076              : #endif
    7077              : 
    7078              :   /* Make sure first insn is a note even if we don't want linenums.
    7079              :      This makes sure the first insn will never be deleted.
    7080              :      Also, final expects a note to appear there.  */
    7081      1481726 :   emit_note (NOTE_INSN_DELETED);
    7082              : 
    7083      1481726 :   targetm.expand_to_rtl_hook ();
    7084      1481726 :   crtl->init_stack_alignment ();
    7085      1481726 :   fun->cfg->max_jumptable_ents = 0;
    7086              : 
    7087              :   /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
    7088              :      of the function section at exapnsion time to predict distance of calls.  */
    7089      1481726 :   resolve_unique_section (current_function_decl, 0, flag_function_sections);
    7090              : 
    7091              :   /* Expand the variables recorded during gimple lowering.  */
    7092      1481726 :   timevar_push (TV_VAR_EXPAND);
    7093      1481726 :   start_sequence ();
    7094              : 
    7095      1481726 :   var_ret_seq = expand_used_vars (forced_stack_vars);
    7096              : 
    7097      1481726 :   var_seq = end_sequence ();
    7098      1481726 :   timevar_pop (TV_VAR_EXPAND);
    7099              : 
    7100              :   /* Honor stack protection warnings.  */
    7101      1481726 :   if (warn_stack_protect)
    7102              :     {
    7103            0 :       if (fun->calls_alloca)
    7104            0 :         warning (OPT_Wstack_protector,
    7105              :                  "stack protector not protecting local variables: "
    7106              :                  "variable length buffer");
    7107            0 :       if (has_short_buffer && !crtl->stack_protect_guard)
    7108            0 :         warning (OPT_Wstack_protector,
    7109              :                  "stack protector not protecting function: "
    7110              :                  "all local arrays are less than %d bytes long",
    7111            0 :                  (int) param_ssp_buffer_size);
    7112              :     }
    7113              : 
    7114              :   /* Temporarily mark PARM_DECLs and RESULT_DECLs we need to expand to
    7115              :      memory addressable so expand_function_start can emit the required
    7116              :      copies.  */
    7117      1481726 :   auto_vec<tree, 16> marked_parms;
    7118      4598621 :   for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
    7119      3116895 :        parm = DECL_CHAIN (parm))
    7120      3116895 :     if (!TREE_ADDRESSABLE (parm)
    7121      3116895 :         && bitmap_bit_p (forced_stack_vars, DECL_UID (parm)))
    7122              :       {
    7123          108 :         TREE_ADDRESSABLE (parm) = 1;
    7124          108 :         marked_parms.safe_push (parm);
    7125              :       }
    7126      1481726 :   if (DECL_RESULT (current_function_decl)
    7127      1481726 :       && !TREE_ADDRESSABLE (DECL_RESULT (current_function_decl))
    7128      2956693 :       && bitmap_bit_p (forced_stack_vars,
    7129      1474967 :                        DECL_UID (DECL_RESULT (current_function_decl))))
    7130              :     {
    7131            0 :       TREE_ADDRESSABLE (DECL_RESULT (current_function_decl)) = 1;
    7132            0 :       marked_parms.safe_push (DECL_RESULT (current_function_decl));
    7133              :     }
    7134              : 
    7135              :   /* Set up parameters and prepare for return, for the function.  */
    7136      1481726 :   expand_function_start (current_function_decl);
    7137              : 
    7138              :   /* Clear TREE_ADDRESSABLE again.  */
    7139      2963560 :   while (!marked_parms.is_empty ())
    7140          108 :     TREE_ADDRESSABLE (marked_parms.pop ()) = 0;
    7141              : 
    7142              :   /* If we emitted any instructions for setting up the variables,
    7143              :      emit them before the FUNCTION_START note.  */
    7144      1481726 :   if (var_seq)
    7145              :     {
    7146         1693 :       emit_insn_before (var_seq, parm_birth_insn);
    7147              : 
    7148              :       /* In expand_function_end we'll insert the alloca save/restore
    7149              :          before parm_birth_insn.  We've just insertted an alloca call.
    7150              :          Adjust the pointer to match.  */
    7151         1693 :       parm_birth_insn = var_seq;
    7152              :     }
    7153              : 
    7154              :   /* Now propagate the RTL assignment of each partition to the
    7155              :      underlying var of each SSA_NAME.  */
    7156              :   tree name;
    7157              : 
    7158     72844031 :   FOR_EACH_SSA_NAME (i, name, cfun)
    7159              :     {
    7160              :       /* We might have generated new SSA names in
    7161              :          update_alias_info_with_stack_vars.  They will have a NULL
    7162              :          defining statements, and won't be part of the partitioning,
    7163              :          so ignore those.  */
    7164     48554772 :       if (!SSA_NAME_DEF_STMT (name))
    7165        64979 :         continue;
    7166              : 
    7167     48489793 :       adjust_one_expanded_partition_var (name);
    7168              :     }
    7169              : 
    7170              :   /* Clean up RTL of variables that straddle across multiple
    7171              :      partitions, and check that the rtl of any PARM_DECLs that are not
    7172              :      cleaned up is that of their default defs.  */
    7173     72844031 :   FOR_EACH_SSA_NAME (i, name, cfun)
    7174              :     {
    7175     48554772 :       int part;
    7176              : 
    7177              :       /* We might have generated new SSA names in
    7178              :          update_alias_info_with_stack_vars.  They will have a NULL
    7179              :          defining statements, and won't be part of the partitioning,
    7180              :          so ignore those.  */
    7181     48554772 :       if (!SSA_NAME_DEF_STMT (name))
    7182        64979 :         continue;
    7183     48489793 :       part = var_to_partition (SA.map, name);
    7184     48489793 :       if (part == NO_PARTITION)
    7185     17232404 :         continue;
    7186              : 
    7187              :       /* If this decl was marked as living in multiple places, reset
    7188              :          this now to NULL.  */
    7189     31257389 :       tree var = SSA_NAME_VAR (name);
    7190     10452089 :       if (var && DECL_RTL_IF_SET (var) == pc_rtx)
    7191       240720 :         SET_DECL_RTL (var, NULL);
    7192              :       /* Check that the pseudos chosen by assign_parms are those of
    7193              :          the corresponding default defs.  */
    7194     31016669 :       else if (SSA_NAME_IS_DEFAULT_DEF (name)
    7195     31016669 :                && (TREE_CODE (var) == PARM_DECL
    7196      3677840 :                    || TREE_CODE (var) == RESULT_DECL))
    7197              :         {
    7198      3620008 :           rtx in = DECL_RTL_IF_SET (var);
    7199      3620008 :           gcc_assert (in);
    7200      3620008 :           rtx out = SA.partition_to_pseudo[part];
    7201      3620008 :           gcc_assert (in == out);
    7202              : 
    7203              :           /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
    7204              :              those expected by debug backends for each parm and for
    7205              :              the result.  This is particularly important for stabs,
    7206              :              whose register elimination from parm's DECL_RTL may cause
    7207              :              -fcompare-debug differences as SET_DECL_RTL changes reg's
    7208              :              attrs.  So, make sure the RTL already has the parm as the
    7209              :              EXPR, so that it won't change.  */
    7210      3620008 :           SET_DECL_RTL (var, NULL_RTX);
    7211      3620008 :           if (MEM_P (in))
    7212       775364 :             set_mem_attributes (in, var, true);
    7213      3620008 :           SET_DECL_RTL (var, in);
    7214              :         }
    7215              :     }
    7216              : 
    7217              :   /* If this function is `main', emit a call to `__main'
    7218              :      to run global initializers, etc.  */
    7219      1481726 :   if (DECL_NAME (current_function_decl)
    7220      1481726 :       && MAIN_NAME_P (DECL_NAME (current_function_decl))
    7221      1593133 :       && DECL_FILE_SCOPE_P (current_function_decl))
    7222      1481726 :     expand_main_function ();
    7223              : 
    7224              :   /* Initialize the stack_protect_guard field.  This must happen after the
    7225              :      call to __main (if any) so that the external decl is initialized.  */
    7226      1481726 :   if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
    7227          250 :     stack_protect_prologue ();
    7228              : 
    7229      1481726 :   expand_phi_nodes (&SA);
    7230              : 
    7231              :   /* Release any stale SSA redirection data.  */
    7232      1481726 :   redirect_edge_var_map_empty ();
    7233              : 
    7234              :   /* Register rtl specific functions for cfg.  */
    7235      1481726 :   rtl_register_cfg_hooks ();
    7236              : 
    7237      1481726 :   init_block = construct_init_block ();
    7238              : 
    7239              :   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
    7240              :      remaining edges later.  */
    7241      2963452 :   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
    7242      1481726 :     e->flags &= ~EDGE_EXECUTABLE;
    7243              : 
    7244              :   /* If the function has too many markers, drop them while expanding.  */
    7245      1481726 :   if (cfun->debug_marker_count
    7246      1481726 :       >= param_max_debug_marker_count)
    7247            1 :     cfun->debug_nonbind_markers = false;
    7248              : 
    7249      1481726 :   enable_ranger (fun);
    7250      1481726 :   lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
    7251      1481726 :   head_end_for_bb.create (last_basic_block_for_fn (fun));
    7252     14074364 :   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
    7253              :                   next_bb)
    7254     12592639 :     bb = expand_gimple_basic_block (bb, var_ret_seq);
    7255      1481725 :   disable_ranger (fun);
    7256     14477449 :   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
    7257              :                   next_bb)
    7258              :     {
    7259     12995724 :       if ((bb->flags & BB_RTL) == 0)
    7260              :         {
    7261     12592634 :           bb->il.gimple.seq = NULL;
    7262     12592634 :           bb->il.gimple.phi_nodes = NULL;
    7263     12592634 :           init_rtl_bb_info (bb);
    7264     12592634 :           bb->flags |= BB_RTL;
    7265     12592634 :           BB_HEAD (bb) = head_end_for_bb[bb->index].first;
    7266     12592634 :           BB_END (bb) = head_end_for_bb[bb->index].second;
    7267              :         }
    7268              :       /* These flags have no purpose in RTL land.  */
    7269     12995724 :       if (EDGE_COUNT (bb->succs) == 2)
    7270              :         {
    7271      6268064 :           EDGE_SUCC (bb, 0)->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
    7272      6268064 :           EDGE_SUCC (bb, 1)->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
    7273              :         }
    7274     18966498 :       else if (single_succ_p (bb))
    7275      5970774 :         single_succ_edge (bb)->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
    7276              :     }
    7277      1481725 :   head_end_for_bb.release ();
    7278              : 
    7279      1481725 :   if (MAY_HAVE_DEBUG_BIND_INSNS)
    7280       496176 :     expand_debug_locations ();
    7281              : 
    7282      1481725 :   if (deep_ter_debug_map)
    7283              :     {
    7284          627 :       delete deep_ter_debug_map;
    7285          627 :       deep_ter_debug_map = NULL;
    7286              :     }
    7287              : 
    7288              :   /* Free stuff we no longer need after GIMPLE optimizations.  */
    7289      1481725 :   free_dominance_info (CDI_DOMINATORS);
    7290      1481725 :   free_dominance_info (CDI_POST_DOMINATORS);
    7291      1481725 :   delete_tree_cfg_annotations (fun);
    7292              : 
    7293      1481725 :   timevar_push (TV_OUT_OF_SSA);
    7294      1481725 :   finish_out_of_ssa (&SA);
    7295      1481725 :   timevar_pop (TV_OUT_OF_SSA);
    7296              : 
    7297      1481725 :   timevar_push (TV_POST_EXPAND);
    7298              :   /* We are no longer in SSA form.  */
    7299      1481725 :   fun->gimple_df->in_ssa_p = false;
    7300      1481725 :   loops_state_clear (LOOP_CLOSED_SSA);
    7301              : 
    7302              :   /* Expansion is used by optimization passes too, set maybe_hot_insn_p
    7303              :      conservatively to true until they are all profile aware.  */
    7304      2963450 :   delete lab_rtx_for_bb;
    7305      1481725 :   free_histograms (fun);
    7306              : 
    7307      1481725 :   construct_exit_block ();
    7308      1481725 :   insn_locations_finalize ();
    7309              : 
    7310      1481725 :   if (var_ret_seq)
    7311              :     {
    7312         1837 :       rtx_insn *after = return_label;
    7313         1837 :       rtx_insn *next = NEXT_INSN (after);
    7314         1837 :       if (next && NOTE_INSN_BASIC_BLOCK_P (next))
    7315         1837 :         after = next;
    7316         1837 :       emit_insn_after (var_ret_seq, after);
    7317              :     }
    7318              : 
    7319      1481725 :   if (hwassist_sanitize_stack_p ())
    7320          398 :     hwasan_maybe_emit_frame_base_init ();
    7321              : 
    7322              :   /* Zap the tree EH table.  */
    7323      1481725 :   set_eh_throw_stmt_table (fun, NULL);
    7324              : 
    7325              :   /* We need JUMP_LABEL be set in order to redirect jumps, and hence
    7326              :      split edges which edge insertions might do.  */
    7327      1481725 :   rebuild_jump_labels (get_insns ());
    7328              : 
    7329              :   /* If we have a single successor to the entry block, put the pending insns
    7330              :      after parm birth, but before NOTE_INSNS_FUNCTION_BEG.  */
    7331      1481725 :   if (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
    7332              :     {
    7333      1481725 :       edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun));
    7334      1481725 :       if (e->insns.r)
    7335              :         {
    7336           11 :           rtx_insn *insns = e->insns.r;
    7337           11 :           e->insns.r = NULL;
    7338           11 :           rebuild_jump_labels_chain (insns);
    7339           11 :           if (NOTE_P (parm_birth_insn)
    7340           11 :               && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
    7341           11 :             emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
    7342              :           else
    7343            0 :             emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
    7344              :         }
    7345              :     }
    7346              : 
    7347              :   /* Otherwise, as well as for other edges, take the usual way.  */
    7348      1481725 :   commit_edge_insertions ();
    7349              : 
    7350              :   /* We're done expanding trees to RTL.  */
    7351      1481725 :   currently_expanding_to_rtl = 0;
    7352              : 
    7353      1481725 :   flush_mark_addressable_queue ();
    7354              : 
    7355     18061391 :   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
    7356              :                   EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
    7357              :     {
    7358     16579666 :       edge e;
    7359     16579666 :       edge_iterator ei;
    7360     38751649 :       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
    7361              :         {
    7362              :           /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
    7363     22171983 :           e->flags &= ~EDGE_EXECUTABLE;
    7364              : 
    7365              :           /* At the moment not all abnormal edges match the RTL
    7366              :              representation.  It is safe to remove them here as
    7367              :              find_many_sub_basic_blocks will rediscover them.
    7368              :              In the future we should get this fixed properly.  */
    7369     22171983 :           if ((e->flags & EDGE_ABNORMAL)
    7370       137997 :               && !(e->flags & EDGE_SIBCALL))
    7371        10182 :             remove_edge (e);
    7372              :           else
    7373     22161801 :             ei_next (&ei);
    7374              :         }
    7375              :     }
    7376              : 
    7377      1481725 :   auto_sbitmap blocks (last_basic_block_for_fn (fun));
    7378      1481725 :   bitmap_ones (blocks);
    7379      1481725 :   find_many_sub_basic_blocks (blocks);
    7380      1481725 :   purge_all_dead_edges ();
    7381              : 
    7382              :   /* After initial rtl generation, call back to finish generating
    7383              :      exception support code.  We need to do this before cleaning up
    7384              :      the CFG as the code does not expect dead landing pads.  */
    7385      1481725 :   if (fun->eh->region_tree != NULL)
    7386        61925 :     finish_eh_generation ();
    7387              : 
    7388              :   /* Call expand_stack_alignment after finishing all
    7389              :      updates to crtl->preferred_stack_boundary.  */
    7390      1481725 :   expand_stack_alignment ();
    7391              : 
    7392              :   /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
    7393              :      function.  */
    7394      1481725 :   if (crtl->tail_call_emit)
    7395       115297 :     fixup_tail_calls ();
    7396              : 
    7397      1481725 :   HOST_WIDE_INT patch_area_size, patch_area_entry;
    7398      1481725 :   parse_and_check_patch_area (flag_patchable_function_entry, false,
    7399              :                               &patch_area_size, &patch_area_entry);
    7400              : 
    7401      1481725 :   tree patchable_function_entry_attr
    7402      1481725 :     = lookup_attribute ("patchable_function_entry",
    7403      1481725 :                         DECL_ATTRIBUTES (cfun->decl));
    7404      1481725 :   if (patchable_function_entry_attr)
    7405              :     {
    7406           16 :       tree pp_val = TREE_VALUE (patchable_function_entry_attr);
    7407           16 :       tree patchable_function_entry_value1 = TREE_VALUE (pp_val);
    7408              : 
    7409           16 :       patch_area_size = tree_to_uhwi (patchable_function_entry_value1);
    7410           16 :       patch_area_entry = 0;
    7411           16 :       if (TREE_CHAIN (pp_val) != NULL_TREE)
    7412              :         {
    7413            8 :           tree patchable_function_entry_value2
    7414            8 :             = TREE_VALUE (TREE_CHAIN (pp_val));
    7415            8 :           patch_area_entry = tree_to_uhwi (patchable_function_entry_value2);
    7416              :         }
    7417              :     }
    7418              : 
    7419      1481725 :   if (patch_area_entry > patch_area_size)
    7420              :     {
    7421            0 :       if (patch_area_size > 0)
    7422            0 :         warning (OPT_Wattributes,
    7423              :                  "patchable function entry %wu exceeds size %wu",
    7424              :                  patch_area_entry, patch_area_size);
    7425            0 :       patch_area_entry = 0;
    7426              :     }
    7427              : 
    7428      1481725 :   crtl->patch_area_size = patch_area_size;
    7429      1481725 :   crtl->patch_area_entry = patch_area_entry;
    7430              : 
    7431              :   /* BB subdivision may have created basic blocks that are only reachable
    7432              :      from unlikely bbs but not marked as such in the profile.  */
    7433      1481725 :   if (optimize)
    7434      1040564 :     propagate_unlikely_bbs_forward ();
    7435              : 
    7436              :   /* Remove unreachable blocks, otherwise we cannot compute dominators
    7437              :      which are needed for loop state verification.  As a side-effect
    7438              :      this also compacts blocks.
    7439              :      ???  We cannot remove trivially dead insns here as for example
    7440              :      the DRAP reg on i?86 is not magically live at this point.
    7441              :      gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise.  */
    7442      1481725 :   cleanup_cfg (CLEANUP_NO_INSN_DEL);
    7443              : 
    7444      1481725 :   checking_verify_flow_info ();
    7445              : 
    7446              :   /* Initialize pseudos allocated for hard registers.  */
    7447      1481725 :   emit_initial_value_sets ();
    7448              : 
    7449              :   /* And finally unshare all RTL.  */
    7450      1481725 :   unshare_all_rtl ();
    7451              : 
    7452              :   /* There's no need to defer outputting this function any more; we
    7453              :      know we want to output it.  */
    7454      1481725 :   DECL_DEFER_OUTPUT (current_function_decl) = 0;
    7455              : 
    7456              :   /* Now that we're done expanding trees to RTL, we shouldn't have any
    7457              :      more CONCATs anywhere.  */
    7458      1481725 :   generating_concat_p = 0;
    7459              : 
    7460      1481725 :   if (dump_file)
    7461              :     {
    7462          366 :       fprintf (dump_file,
    7463              :                "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
    7464              :       /* And the pass manager will dump RTL for us.  */
    7465              :     }
    7466              : 
    7467              :   /* If we're emitting a nested function, make sure its parent gets
    7468              :      emitted as well.  Doing otherwise confuses debug info.  */
    7469      1481725 :     {
    7470      1481725 :       tree parent;
    7471      1481725 :       for (parent = DECL_CONTEXT (current_function_decl);
    7472      3170205 :            parent != NULL_TREE;
    7473      1688480 :            parent = get_containing_scope (parent))
    7474      1688480 :         if (TREE_CODE (parent) == FUNCTION_DECL)
    7475        74213 :           TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
    7476              :     }
    7477              : 
    7478      1481725 :   TREE_ASM_WRITTEN (current_function_decl) = 1;
    7479              : 
    7480              :   /* After expanding, the return labels are no longer needed. */
    7481      1481725 :   return_label = NULL;
    7482      1481725 :   naked_return_label = NULL;
    7483              : 
    7484              :   /* After expanding, the tm_restart map is no longer needed.  */
    7485      1481725 :   if (fun->gimple_df->tm_restart)
    7486          265 :     fun->gimple_df->tm_restart = NULL;
    7487              : 
    7488              :   /* Tag the blocks with a depth number so that change_scope can find
    7489              :      the common parent easily.  */
    7490      1481725 :   set_block_levels (DECL_INITIAL (fun->decl), 0);
    7491      1481725 :   default_rtl_profile ();
    7492              : 
    7493              :   /* For -dx discard loops now, otherwise IL verify in clean_state will
    7494              :      ICE.  */
    7495      1481725 :   if (rtl_dump_and_exit)
    7496              :     {
    7497           79 :       cfun->curr_properties &= ~PROP_loops;
    7498           79 :       loop_optimizer_finalize ();
    7499              :     }
    7500              : 
    7501      1481725 :   timevar_pop (TV_POST_EXPAND);
    7502              : 
    7503      1481725 :   return 0;
    7504      1481725 : }
    7505              : 
    7506              : } // anon namespace
    7507              : 
    7508              : rtl_opt_pass *
    7509       287876 : make_pass_expand (gcc::context *ctxt)
    7510              : {
    7511       287876 :   return new pass_expand (ctxt);
    7512              : }
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.