LCOV - code coverage report
Current view: top level - gcc - emit-rtl.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 83.4 % 2817 2349
Test Date: 2026-02-28 14:20:25 Functions: 86.7 % 264 229
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Emit RTL for the GCC expander.
       2              :    Copyright (C) 1987-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify it under
       7              : the terms of the GNU General Public License as published by the Free
       8              : Software Foundation; either version 3, or (at your option) any later
       9              : version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      12              : WARRANTY; without even the implied warranty of MERCHANTABILITY or
      13              : FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      14              : for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : 
      21              : /* Middle-to-low level generation of rtx code and insns.
      22              : 
      23              :    This file contains support functions for creating rtl expressions
      24              :    and manipulating them in the doubly-linked chain of insns.
      25              : 
      26              :    The patterns of the insns are created by machine-dependent
      27              :    routines in insn-emit.cc, which is generated automatically from
      28              :    the machine description.  These routines make the individual rtx's
      29              :    of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
      30              :    which are automatically generated from rtl.def; what is machine
      31              :    dependent is the kind of rtx's they make and what arguments they
      32              :    use.  */
      33              : 
      34              : #include "config.h"
      35              : #include "system.h"
      36              : #include "coretypes.h"
      37              : #include "memmodel.h"
      38              : #include "backend.h"
      39              : #include "target.h"
      40              : #include "rtl.h"
      41              : #include "tree.h"
      42              : #include "df.h"
      43              : #include "tm_p.h"
      44              : #include "stringpool.h"
      45              : #include "insn-config.h"
      46              : #include "regs.h"
      47              : #include "emit-rtl.h"
      48              : #include "recog.h"
      49              : #include "diagnostic-core.h"
      50              : #include "alias.h"
      51              : #include "fold-const.h"
      52              : #include "varasm.h"
      53              : #include "cfgrtl.h"
      54              : #include "tree-eh.h"
      55              : #include "explow.h"
      56              : #include "expr.h"
      57              : #include "builtins.h"
      58              : #include "rtl-iter.h"
      59              : #include "stor-layout.h"
      60              : #include "opts.h"
      61              : #include "optabs.h"
      62              : #include "predict.h"
      63              : #include "rtx-vector-builder.h"
      64              : #include "gimple.h"
      65              : #include "gimple-ssa.h"
      66              : #include "gimplify.h"
      67              : #include "bbitmap.h"
      68              : 
      69              : struct target_rtl default_target_rtl;
      70              : #if SWITCHABLE_TARGET
      71              : struct target_rtl *this_target_rtl = &default_target_rtl;
      72              : #endif
      73              : 
      74              : #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
      75              : 
      76              : /* Commonly used modes.  */
      77              : 
      78              : scalar_int_mode byte_mode;      /* Mode whose width is BITS_PER_UNIT.  */
      79              : scalar_int_mode word_mode;      /* Mode whose width is BITS_PER_WORD.  */
      80              : scalar_int_mode ptr_mode;       /* Mode whose width is POINTER_SIZE.  */
      81              : 
      82              : /* Datastructures maintained for currently processed function in RTL form.  */
      83              : 
      84              : struct rtl_data x_rtl;
      85              : 
      86              : /* Indexed by pseudo register number, gives the rtx for that pseudo.
      87              :    Allocated in parallel with regno_pointer_align.
      88              :    FIXME: We could put it into emit_status struct, but gengtype is not able to deal
      89              :    with length attribute nested in top level structures.  */
      90              : 
      91              : rtx * regno_reg_rtx;
      92              : 
      93              : /* This is *not* reset after each function.  It gives each CODE_LABEL
      94              :    in the entire compilation a unique label number.  */
      95              : 
      96              : static GTY(()) int label_num = 1;
      97              : 
      98              : /* We record floating-point CONST_DOUBLEs in each floating-point mode for
      99              :    the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
     100              :    record a copy of const[012]_rtx and constm1_rtx.  CONSTM1_RTX
     101              :    is set only for MODE_INT and MODE_VECTOR_INT modes.  */
     102              : 
     103              : rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
     104              : 
     105              : rtx const_true_rtx;
     106              : 
     107              : REAL_VALUE_TYPE dconst0;
     108              : REAL_VALUE_TYPE dconst1;
     109              : REAL_VALUE_TYPE dconst2;
     110              : REAL_VALUE_TYPE dconstm0;
     111              : REAL_VALUE_TYPE dconstm1;
     112              : REAL_VALUE_TYPE dconsthalf;
     113              : REAL_VALUE_TYPE dconstinf;
     114              : REAL_VALUE_TYPE dconstninf;
     115              : 
     116              : /* Record fixed-point constant 0 and 1.  */
     117              : FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
     118              : FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
     119              : 
     120              : /* We make one copy of (const_int C) where C is in
     121              :    [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
     122              :    to save space during the compilation and simplify comparisons of
     123              :    integers.  */
     124              : 
     125              : rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
     126              : 
     127              : /* Standard pieces of rtx, to be substituted directly into things.  */
     128              : rtx pc_rtx;
     129              : rtx ret_rtx;
     130              : rtx simple_return_rtx;
     131              : 
     132              : /* Marker used for denoting an INSN, which should never be accessed (i.e.,
     133              :    this pointer should normally never be dereferenced), but is required to be
     134              :    distinct from NULL_RTX.  Currently used by peephole2 pass.  */
     135              : rtx_insn *invalid_insn_rtx;
     136              : 
     137              : /* A hash table storing CONST_INTs whose absolute value is greater
     138              :    than MAX_SAVED_CONST_INT.  */
     139              : 
     140              : struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
     141              : {
     142              :   typedef HOST_WIDE_INT compare_type;
     143              : 
     144              :   static hashval_t hash (rtx i);
     145              :   static bool equal (rtx i, HOST_WIDE_INT h);
     146              : };
     147              : 
     148              : static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
     149              : 
     150              : struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
     151              : {
     152              :   static hashval_t hash (rtx x);
     153              :   static bool equal (rtx x, rtx y);
     154              : };
     155              : 
     156              : static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
     157              : 
     158              : struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
     159              : {
     160              :   typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
     161              : 
     162              :   static hashval_t hash (rtx x);
     163              :   static bool equal (rtx x, const compare_type &y);
     164              : };
     165              : 
     166              : static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
     167              : 
     168              : /* A hash table storing register attribute structures.  */
     169              : struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
     170              : {
     171              :   static hashval_t hash (reg_attrs *x);
     172              :   static bool equal (reg_attrs *a, reg_attrs *b);
     173              : };
     174              : 
     175              : static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
     176              : 
     177              : /* A hash table storing all CONST_DOUBLEs.  */
     178              : struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
     179              : {
     180              :   static hashval_t hash (rtx x);
     181              :   static bool equal (rtx x, rtx y);
     182              : };
     183              : 
     184              : static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
     185              : 
     186              : /* A hash table storing all CONST_FIXEDs.  */
     187              : struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
     188              : {
     189              :   static hashval_t hash (rtx x);
     190              :   static bool equal (rtx x, rtx y);
     191              : };
     192              : 
     193              : static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
     194              : 
     195              : #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
     196              : #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
     197              : #define first_label_num (crtl->emit.x_first_label_num)
     198              : 
     199              : static void set_used_decls (tree);
     200              : static void mark_label_nuses (rtx);
     201              : #if TARGET_SUPPORTS_WIDE_INT
     202              : static rtx lookup_const_wide_int (rtx);
     203              : #endif
     204              : static rtx lookup_const_double (rtx);
     205              : static rtx lookup_const_fixed (rtx);
     206              : static rtx gen_const_vector (machine_mode, int);
     207              : static void copy_rtx_if_shared_1 (rtx *orig);
     208              : 
     209              : /* Probability of the conditional branch currently proceeded by try_split.  */
     210              : profile_probability split_branch_probability;
     211              : 
     212              : /* Returns a hash code for X (which is a really a CONST_INT).  */
     213              : 
     214              : hashval_t
     215   3367004765 : const_int_hasher::hash (rtx x)
     216              : {
     217   3367004765 :   return (hashval_t) INTVAL (x);
     218              : }
     219              : 
     220              : /* Returns true if the value represented by X (which is really a
     221              :    CONST_INT) is the same as that given by Y (which is really a
     222              :    HOST_WIDE_INT *).  */
     223              : 
     224              : bool
     225   4238590936 : const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
     226              : {
     227   4238590936 :   return (INTVAL (x) == y);
     228              : }
     229              : 
     230              : #if TARGET_SUPPORTS_WIDE_INT
     231              : /* Returns a hash code for X (which is a really a CONST_WIDE_INT).  */
     232              : 
     233              : hashval_t
     234      1450840 : const_wide_int_hasher::hash (rtx x)
     235              : {
     236      1450840 :   int i;
     237      1450840 :   unsigned HOST_WIDE_INT hash = 0;
     238      1450840 :   const_rtx xr = x;
     239              : 
     240      4378837 :   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
     241      2927997 :     hash += CONST_WIDE_INT_ELT (xr, i);
     242              : 
     243      1450840 :   return (hashval_t) hash;
     244              : }
     245              : 
     246              : /* Returns true if the value represented by X (which is really a
     247              :    CONST_WIDE_INT) is the same as that given by Y (which is really a
     248              :    CONST_WIDE_INT).  */
     249              : 
     250              : bool
     251      1477145 : const_wide_int_hasher::equal (rtx x, rtx y)
     252              : {
     253      1477145 :   int i;
     254      1477145 :   const_rtx xr = x;
     255      1477145 :   const_rtx yr = y;
     256      1477145 :   if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
     257              :     return false;
     258              : 
     259      2544480 :   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
     260      2039625 :     if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
     261              :       return false;
     262              : 
     263              :   return true;
     264              : }
     265              : #endif
     266              : 
     267              : /* Returns a hash code for CONST_POLY_INT X.  */
     268              : 
     269              : hashval_t
     270            0 : const_poly_int_hasher::hash (rtx x)
     271              : {
     272            0 :   inchash::hash h;
     273            0 :   h.add_int (GET_MODE (x));
     274            0 :   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
     275            0 :     h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
     276            0 :   return h.end ();
     277              : }
     278              : 
     279              : /* Returns true if CONST_POLY_INT X is an rtx representation of Y.  */
     280              : 
     281              : bool
     282            0 : const_poly_int_hasher::equal (rtx x, const compare_type &y)
     283              : {
     284            0 :   if (GET_MODE (x) != y.first)
     285              :     return false;
     286            0 :   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
     287            0 :     if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
     288              :       return false;
     289              :   return true;
     290              : }
     291              : 
     292              : /* Returns a hash code for X (which is really a CONST_DOUBLE).  */
     293              : hashval_t
     294     18374275 : const_double_hasher::hash (rtx x)
     295              : {
     296     18374275 :   const_rtx const value = x;
     297     18374275 :   hashval_t h;
     298              : 
     299     18374275 :   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
     300              :     h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
     301              :   else
     302              :     {
     303     18374275 :       h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
     304              :       /* MODE is used in the comparison, so it should be in the hash.  */
     305     18374275 :       h ^= GET_MODE (value);
     306              :     }
     307     18374275 :   return h;
     308              : }
     309              : 
     310              : /* Returns true if the value represented by X (really a ...)
     311              :    is the same as that represented by Y (really a ...) */
     312              : bool
     313     12707831 : const_double_hasher::equal (rtx x, rtx y)
     314              : {
     315     12707831 :   const_rtx const a = x, b = y;
     316              : 
     317     12707831 :   if (GET_MODE (a) != GET_MODE (b))
     318              :     return false;
     319      4597979 :   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
     320              :     return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
     321              :             && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
     322              :   else
     323      4597979 :     return real_identical (CONST_DOUBLE_REAL_VALUE (a),
     324      4597979 :                            CONST_DOUBLE_REAL_VALUE (b));
     325              : }
     326              : 
     327              : /* Returns a hash code for X (which is really a CONST_FIXED).  */
     328              : 
     329              : hashval_t
     330     11710273 : const_fixed_hasher::hash (rtx x)
     331              : {
     332     11710273 :   const_rtx const value = x;
     333     11710273 :   hashval_t h;
     334              : 
     335     11710273 :   h = fixed_hash (CONST_FIXED_VALUE (value));
     336              :   /* MODE is used in the comparison, so it should be in the hash.  */
     337     11710273 :   h ^= GET_MODE (value);
     338     11710273 :   return h;
     339              : }
     340              : 
     341              : /* Returns true if the value represented by X is the same as that
     342              :    represented by Y.  */
     343              : 
     344              : bool
     345      5858758 : const_fixed_hasher::equal (rtx x, rtx y)
     346              : {
     347      5858758 :   const_rtx const a = x, b = y;
     348              : 
     349      5858758 :   if (GET_MODE (a) != GET_MODE (b))
     350              :     return false;
     351       557444 :   return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
     352              : }
     353              : 
     354              : /* Return true if the given memory attributes are equal.  */
     355              : 
     356              : bool
     357    226602791 : mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
     358              : {
     359    226602791 :   if (p == q)
     360              :     return true;
     361    223082213 :   if (!p || !q)
     362              :     return false;
     363    222543307 :   return (p->alias == q->alias
     364    140879464 :           && p->offset_known_p == q->offset_known_p
     365    111665672 :           && (!p->offset_known_p || known_eq (p->offset, q->offset))
     366     96318540 :           && p->size_known_p == q->size_known_p
     367     93017437 :           && (!p->size_known_p || known_eq (p->size, q->size))
     368     79965186 :           && p->align == q->align
     369     65572938 :           && p->addrspace == q->addrspace
     370    287833884 :           && (p->expr == q->expr
     371     30420824 :               || (p->expr != NULL_TREE && q->expr != NULL_TREE
     372     23611578 :                   && operand_equal_p (p->expr, q->expr, 0))));
     373              : }
     374              : 
     375              : /* Set MEM's memory attributes so that they are the same as ATTRS.  */
     376              : 
     377              : static void
     378     99293542 : set_mem_attrs (rtx mem, mem_attrs *attrs)
     379              : {
     380              :   /* If everything is the default, we can just clear the attributes.  */
     381     99293542 :   if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
     382              :     {
     383      3623771 :       MEM_ATTRS (mem) = 0;
     384      3623771 :       return;
     385              :     }
     386              : 
     387     95669771 :   if (!MEM_ATTRS (mem)
     388     95669771 :       || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
     389              :     {
     390     73048656 :       MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
     391     73048656 :       memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
     392              :     }
     393              : }
     394              : 
     395              : /* Returns a hash code for X (which is a really a reg_attrs *).  */
     396              : 
     397              : hashval_t
     398    313822984 : reg_attr_hasher::hash (reg_attrs *x)
     399              : {
     400    313822984 :   const reg_attrs *const p = x;
     401              : 
     402    313822984 :   inchash::hash h;
     403    313822984 :   h.add_ptr (p->decl);
     404    313822984 :   h.add_poly_hwi (p->offset);
     405    313822984 :   return h.end ();
     406              : }
     407              : 
     408              : /* Returns true if the value represented by X  is the same as that given by
     409              :    Y.  */
     410              : 
     411              : bool
     412    303524352 : reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
     413              : {
     414    303524352 :   const reg_attrs *const p = x;
     415    303524352 :   const reg_attrs *const q = y;
     416              : 
     417    303524352 :   return (p->decl == q->decl && known_eq (p->offset, q->offset));
     418              : }
     419              : /* Allocate a new reg_attrs structure and insert it into the hash table if
     420              :    one identical to it is not already in the table.  We are doing this for
     421              :    MEM of mode MODE.  */
     422              : 
     423              : static reg_attrs *
     424     64520117 : get_reg_attrs (tree decl, poly_int64 offset)
     425              : {
     426     64520117 :   reg_attrs attrs;
     427              : 
     428              :   /* If everything is the default, we can just return zero.  */
     429     64520117 :   if (decl == 0 && known_eq (offset, 0))
     430              :     return 0;
     431              : 
     432     58958670 :   attrs.decl = decl;
     433     58958670 :   attrs.offset = offset;
     434              : 
     435     58958670 :   reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
     436     58958670 :   if (*slot == 0)
     437              :     {
     438     29706048 :       *slot = ggc_alloc<reg_attrs> ();
     439     29706048 :       memcpy (*slot, &attrs, sizeof (reg_attrs));
     440              :     }
     441              : 
     442     58958670 :   return *slot;
     443              : }
     444              : 
     445              : 
     446              : #if !HAVE_blockage
     447              : /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
     448              :    and to block register equivalences to be seen across this insn.  */
     449              : 
     450              : rtx
     451              : gen_blockage (void)
     452              : {
     453              :   rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
     454              :   MEM_VOLATILE_P (x) = true;
     455              :   return x;
     456              : }
     457              : #endif
     458              : 
     459              : 
     460              : /* Set the mode and register number of X to MODE and REGNO.  */
     461              : 
     462              : void
     463   1674755977 : set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
     464              : {
     465   1674755977 :   unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
     466   1674755977 :                         ? hard_regno_nregs (regno, mode)
     467   1255207369 :                         : 1);
     468   1674755977 :   PUT_MODE_RAW (x, mode);
     469   1674755977 :   set_regno_raw (x, regno, nregs);
     470   1674755977 : }
     471              : 
     472              : /* Initialize a fresh REG rtx with mode MODE and register REGNO.  */
     473              : 
     474              : rtx
     475    351517216 : init_raw_REG (rtx x, machine_mode mode, unsigned int regno)
     476              : {
     477    351517216 :   set_mode_and_regno (x, mode, regno);
     478    351517216 :   REG_ATTRS (x) = NULL;
     479    351517216 :   ORIGINAL_REGNO (x) = regno;
     480    351517216 :   return x;
     481              : }
     482              : 
     483              : /* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
     484              :    don't attempt to share with the various global pieces of rtl (such as
     485              :    frame_pointer_rtx).  */
     486              : 
     487              : rtx
     488    349559984 : gen_raw_REG (machine_mode mode, unsigned int regno)
     489              : {
     490    349559984 :   rtx x = rtx_alloc (REG MEM_STAT_INFO);
     491    349559984 :   init_raw_REG (x, mode, regno);
     492    349559984 :   return x;
     493              : }
     494              : 
     495              : /* There are some RTL codes that require special attention; the generation
     496              :    functions do the raw handling.  If you add to this list, modify
     497              :    special_rtx in gengenrtl.cc as well.  */
     498              : 
     499              : rtx_expr_list *
     500    205707775 : gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
     501              : {
     502    205707775 :   return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
     503    205707775 :                                                  expr_list));
     504              : }
     505              : 
     506              : rtx_insn_list *
     507     94304429 : gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
     508              : {
     509     94304429 :   return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
     510     94304429 :                                                  insn_list));
     511              : }
     512              : 
     513              : rtx_insn *
     514       818089 : gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
     515              :               basic_block bb, rtx pattern, location_t location, int code,
     516              :               rtx reg_notes)
     517              : {
     518       818089 :   return as_a <rtx_insn *> (gen_rtx_fmt_uuBeLie (INSN, mode,
     519              :                                                  prev_insn, next_insn,
     520              :                                                  bb, pattern, location, code,
     521       818089 :                                                  reg_notes));
     522              : }
     523              : 
     524              : rtx
     525   1287996988 : gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
     526              : {
     527   1287996988 :   if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
     528    829617673 :     return const_int_rtx[arg + MAX_SAVED_CONST_INT];
     529              : 
     530              : #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
     531              :   if (const_true_rtx && arg == STORE_FLAG_VALUE)
     532              :     return const_true_rtx;
     533              : #endif
     534              : 
     535              :   /* Look up the CONST_INT in the hash table.  */
     536    458379315 :   rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
     537              :                                                    INSERT);
     538    458379315 :   if (*slot == 0)
     539     34701314 :     *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
     540              : 
     541    458379315 :   return *slot;
     542              : }
     543              : 
     544              : rtx
     545   1204763456 : gen_int_mode (poly_int64 c, machine_mode mode)
     546              : {
     547   1204763456 :   c = trunc_int_for_mode (c, mode);
     548   1204763456 :   if (c.is_constant ())
     549   1204763456 :     return GEN_INT (c.coeffs[0]);
     550              :   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
     551              :   return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
     552              : }
     553              : 
     554              : /* CONST_DOUBLEs might be created from pairs of integers, or from
     555              :    REAL_VALUE_TYPEs.  Also, their length is known only at run time,
     556              :    so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
     557              : 
     558              : /* Determine whether REAL, a CONST_DOUBLE, already exists in the
     559              :    hash table.  If so, return its counterpart; otherwise add it
     560              :    to the hash table and return it.  */
     561              : static rtx
     562      9706326 : lookup_const_double (rtx real)
     563              : {
     564      9706326 :   rtx *slot = const_double_htab->find_slot (real, INSERT);
     565      9706326 :   if (*slot == 0)
     566      7726446 :     *slot = real;
     567              : 
     568      9706326 :   return *slot;
     569              : }
     570              : 
     571              : /* Return a CONST_DOUBLE rtx for a floating-point value specified by
     572              :    VALUE in mode MODE.  */
     573              : rtx
     574      9706326 : const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
     575              : {
     576      9706326 :   rtx real = rtx_alloc (CONST_DOUBLE);
     577      9706326 :   PUT_MODE (real, mode);
     578              : 
     579      9706326 :   real->u.rv = value;
     580              : 
     581      9706326 :   return lookup_const_double (real);
     582              : }
     583              : 
     584              : /* Determine whether FIXED, a CONST_FIXED, already exists in the
     585              :    hash table.  If so, return its counterpart; otherwise add it
     586              :    to the hash table and return it.  */
     587              : 
     588              : static rtx
     589      7244666 : lookup_const_fixed (rtx fixed)
     590              : {
     591      7244666 :   rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
     592      7244666 :   if (*slot == 0)
     593      7244666 :     *slot = fixed;
     594              : 
     595      7244666 :   return *slot;
     596              : }
     597              : 
     598              : /* Return a CONST_FIXED rtx for a fixed-point value specified by
     599              :    VALUE in mode MODE.  */
     600              : 
     601              : rtx
     602      7244666 : const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
     603              : {
     604      7244666 :   rtx fixed = rtx_alloc (CONST_FIXED);
     605      7244666 :   PUT_MODE (fixed, mode);
     606              : 
     607      7244666 :   fixed->u.fv = value;
     608              : 
     609      7244666 :   return lookup_const_fixed (fixed);
     610              : }
     611              : 
     612              : #if TARGET_SUPPORTS_WIDE_INT == 0
     613              : /* Constructs double_int from rtx CST.  */
     614              : 
     615              : double_int
     616              : rtx_to_double_int (const_rtx cst)
     617              : {
     618              :   double_int r;
     619              : 
     620              :   if (CONST_INT_P (cst))
     621              :       r = double_int::from_shwi (INTVAL (cst));
     622              :   else if (CONST_DOUBLE_AS_INT_P (cst))
     623              :     {
     624              :       r.low = CONST_DOUBLE_LOW (cst);
     625              :       r.high = CONST_DOUBLE_HIGH (cst);
     626              :     }
     627              :   else
     628              :     gcc_unreachable ();
     629              : 
     630              :   return r;
     631              : }
     632              : #endif
     633              : 
     634              : #if TARGET_SUPPORTS_WIDE_INT
     635              : /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
     636              :    If so, return its counterpart; otherwise add it to the hash table and
     637              :    return it.  */
     638              : 
     639              : static rtx
     640       562077 : lookup_const_wide_int (rtx wint)
     641              : {
     642       562077 :   rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
     643       562077 :   if (*slot == 0)
     644        57222 :     *slot = wint;
     645              : 
     646       562077 :   return *slot;
     647              : }
     648              : #endif
     649              : 
     650              : /* Return an rtx constant for V, given that the constant has mode MODE.
     651              :    The returned rtx will be a CONST_INT if V fits, otherwise it will be
     652              :    a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
     653              :    (if TARGET_SUPPORTS_WIDE_INT).  */
     654              : 
     655              : static rtx
     656    615634373 : immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
     657              : {
     658    615634373 :   unsigned int len = v.get_len ();
     659              :   /* Not scalar_int_mode because we also allow pointer bound modes.  */
     660    615634373 :   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
     661              : 
     662              :   /* Allow truncation but not extension since we do not know if the
     663              :      number is signed or unsigned.  */
     664    615634373 :   gcc_assert (prec <= v.get_precision ());
     665              : 
     666    615634373 :   if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
     667    615072296 :     return gen_int_mode (v.elt (0), mode);
     668              : 
     669              : #if TARGET_SUPPORTS_WIDE_INT
     670       562077 :   {
     671       562077 :     unsigned int i;
     672       562077 :     rtx value;
     673       562077 :     unsigned int blocks_needed
     674       562077 :       = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
     675              : 
     676       562077 :     if (len > blocks_needed)
     677              :       len = blocks_needed;
     678              : 
     679       562077 :     value = const_wide_int_alloc (len);
     680              : 
     681              :     /* It is so tempting to just put the mode in here.  Must control
     682              :        myself ... */
     683       562077 :     PUT_MODE (value, VOIDmode);
     684       562077 :     CWI_PUT_NUM_ELEM (value, len);
     685              : 
     686      1693928 :     for (i = 0; i < len; i++)
     687      1131851 :       CONST_WIDE_INT_ELT (value, i) = v.elt (i);
     688              : 
     689       562077 :     return lookup_const_wide_int (value);
     690              :   }
     691              : #else
     692              :   return immed_double_const (v.elt (0), v.elt (1), mode);
     693              : #endif
     694              : }
     695              : 
     696              : #if TARGET_SUPPORTS_WIDE_INT == 0
     697              : /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
     698              :    of ints: I0 is the low-order word and I1 is the high-order word.
     699              :    For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
     700              :    implied upper bits are copies of the high bit of i1.  The value
     701              :    itself is neither signed nor unsigned.  Do not use this routine for
     702              :    non-integer modes; convert to REAL_VALUE_TYPE and use
     703              :    const_double_from_real_value.  */
     704              : 
     705              : rtx
     706              : immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
     707              : {
     708              :   rtx value;
     709              :   unsigned int i;
     710              : 
     711              :   /* There are the following cases (note that there are no modes with
     712              :      HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
     713              : 
     714              :      1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
     715              :         gen_int_mode.
     716              :      2) If the value of the integer fits into HOST_WIDE_INT anyway
     717              :         (i.e., i1 consists only from copies of the sign bit, and sign
     718              :         of i0 and i1 are the same), then we return a CONST_INT for i0.
     719              :      3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
     720              :   scalar_mode smode;
     721              :   if (is_a <scalar_mode> (mode, &smode)
     722              :       && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
     723              :     return gen_int_mode (i0, mode);
     724              : 
     725              :   /* If this integer fits in one word, return a CONST_INT.  */
     726              :   if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
     727              :     return GEN_INT (i0);
     728              : 
     729              :   /* We use VOIDmode for integers.  */
     730              :   value = rtx_alloc (CONST_DOUBLE);
     731              :   PUT_MODE (value, VOIDmode);
     732              : 
     733              :   CONST_DOUBLE_LOW (value) = i0;
     734              :   CONST_DOUBLE_HIGH (value) = i1;
     735              : 
     736              :   for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
     737              :     XWINT (value, i) = 0;
     738              : 
     739              :   return lookup_const_double (value);
     740              : }
     741              : #endif
     742              : 
     743              : /* Return an rtx representation of C in mode MODE.  */
     744              : 
     745              : rtx
     746    615634373 : immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
     747              : {
     748    615634373 :   if (c.is_constant ())
     749    615634373 :     return immed_wide_int_const_1 (c.coeffs[0], mode);
     750              : 
     751              :   /* Not scalar_int_mode because we also allow pointer bound modes.  */
     752              :   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
     753              : 
     754              :   /* Allow truncation but not extension since we do not know if the
     755              :      number is signed or unsigned.  */
     756              :   gcc_assert (prec <= c.coeffs[0].get_precision ());
     757              :   poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
     758              : 
     759              :   /* See whether we already have an rtx for this constant.  */
     760              :   inchash::hash h;
     761              :   h.add_int (mode);
     762              :   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
     763              :     h.add_wide_int (newc.coeffs[i]);
     764              :   const_poly_int_hasher::compare_type typed_value (mode, newc);
     765              :   rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
     766              :                                                         h.end (), INSERT);
     767              :   rtx x = *slot;
     768              :   if (x)
     769              :     return x;
     770              : 
     771              :   /* Create a new rtx.  There's a choice to be made here between installing
     772              :      the actual mode of the rtx or leaving it as VOIDmode (for consistency
     773              :      with CONST_INT).  In practice the handling of the codes is different
     774              :      enough that we get no benefit from using VOIDmode, and various places
     775              :      assume that VOIDmode implies CONST_INT.  Using the real mode seems like
     776              :      the right long-term direction anyway.  */
     777              :   typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
     778              :   size_t extra_size = twi::extra_size (prec);
     779              :   x = rtx_alloc_v (CONST_POLY_INT,
     780              :                    sizeof (struct const_poly_int_def) + extra_size);
     781              :   PUT_MODE (x, mode);
     782              :   CONST_POLY_INT_COEFFS (x).set_precision (prec);
     783              :   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
     784              :     CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
     785              : 
     786              :   *slot = x;
     787              :   return x;
     788              : }
     789              : 
     790              : rtx
     791    195016927 : gen_rtx_REG (machine_mode mode, unsigned int regno)
     792              : {
     793              :   /* In case the MD file explicitly references the frame pointer, have
     794              :      all such references point to the same frame pointer.  This is
     795              :      used during frame pointer elimination to distinguish the explicit
     796              :      references to these registers from pseudos that happened to be
     797              :      assigned to them.
     798              : 
     799              :      If we have eliminated the frame pointer or arg pointer, we will
     800              :      be using it as a normal register, for example as a spill
     801              :      register.  In such cases, we might be accessing it in a mode that
     802              :      is not Pmode and therefore cannot use the pre-allocated rtx.
     803              : 
     804              :      Also don't do this when we are making new REGs in reload, since
     805              :      we don't want to get confused with the real pointers.  */
     806              : 
     807    209424574 :   if (mode == Pmode && !reload_in_progress && !lra_in_progress)
     808              :     {
     809     65157948 :       if (regno == FRAME_POINTER_REGNUM
     810      2949646 :           && (!reload_completed || frame_pointer_needed))
     811      2949646 :         return frame_pointer_rtx;
     812              : 
     813     62208302 :       if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
     814              :           && regno == HARD_FRAME_POINTER_REGNUM
     815      4218032 :           && (!reload_completed || frame_pointer_needed))
     816      3702307 :         return hard_frame_pointer_rtx;
     817              : #if !HARD_FRAME_POINTER_IS_ARG_POINTER
     818     58505995 :       if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
     819              :           && regno == ARG_POINTER_REGNUM)
     820      2952763 :         return arg_pointer_rtx;
     821              : #endif
     822              : #ifdef RETURN_ADDRESS_POINTER_REGNUM
     823              :       if (regno == RETURN_ADDRESS_POINTER_REGNUM)
     824              :         return return_address_pointer_rtx;
     825              : #endif
     826     55553232 :       if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
     827            0 :           && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
     828     55553232 :           && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
     829            0 :         return pic_offset_table_rtx;
     830     55553232 :       if (regno == STACK_POINTER_REGNUM)
     831      3969040 :         return stack_pointer_rtx;
     832              :     }
     833              : 
     834              : #if 0
     835              :   /* If the per-function register table has been set up, try to re-use
     836              :      an existing entry in that table to avoid useless generation of RTL.
     837              : 
     838              :      This code is disabled for now until we can fix the various backends
     839              :      which depend on having non-shared hard registers in some cases.   Long
     840              :      term we want to re-enable this code as it can significantly cut down
     841              :      on the amount of useless RTL that gets generated.
     842              : 
     843              :      We'll also need to fix some code that runs after reload that wants to
     844              :      set ORIGINAL_REGNO.  */
     845              : 
     846              :   if (cfun
     847              :       && cfun->emit
     848              :       && regno_reg_rtx
     849              :       && regno < FIRST_PSEUDO_REGISTER
     850              :       && reg_raw_mode[regno] == mode)
     851              :     return regno_reg_rtx[regno];
     852              : #endif
     853              : 
     854    181443171 :   return gen_raw_REG (mode, regno);
     855              : }
     856              : 
     857              : rtx
     858    248086003 : gen_rtx_MEM (machine_mode mode, rtx addr)
     859              : {
     860    248086003 :   rtx rt = gen_rtx_raw_MEM (mode, addr);
     861              : 
     862              :   /* This field is not cleared by the mere allocation of the rtx, so
     863              :      we clear it here.  */
     864    248086003 :   MEM_ATTRS (rt) = 0;
     865              : 
     866    248086003 :   return rt;
     867              : }
     868              : 
     869              : /* Generate a memory referring to non-trapping constant memory.  */
     870              : 
     871              : rtx
     872      2090129 : gen_const_mem (machine_mode mode, rtx addr)
     873              : {
     874      2090129 :   rtx mem = gen_rtx_MEM (mode, addr);
     875      2090129 :   MEM_READONLY_P (mem) = 1;
     876      2090129 :   MEM_NOTRAP_P (mem) = 1;
     877      2090129 :   return mem;
     878              : }
     879              : 
     880              : /* Generate a MEM referring to fixed portions of the frame, e.g., register
     881              :    save areas.  */
     882              : 
     883              : rtx
     884      1271379 : gen_frame_mem (machine_mode mode, rtx addr)
     885              : {
     886      1271379 :   rtx mem = gen_rtx_MEM (mode, addr);
     887      1271379 :   MEM_NOTRAP_P (mem) = 1;
     888      1271379 :   set_mem_alias_set (mem, get_frame_alias_set ());
     889      1271379 :   return mem;
     890              : }
     891              : 
     892              : /* Generate a MEM referring to a temporary use of the stack, not part
     893              :     of the fixed stack frame.  For example, something which is pushed
     894              :     by a target splitter.  */
     895              : rtx
     896            0 : gen_tmp_stack_mem (machine_mode mode, rtx addr)
     897              : {
     898            0 :   rtx mem = gen_rtx_MEM (mode, addr);
     899            0 :   MEM_NOTRAP_P (mem) = 1;
     900            0 :   if (!cfun->calls_alloca)
     901            0 :     set_mem_alias_set (mem, get_frame_alias_set ());
     902            0 :   return mem;
     903              : }
     904              : 
     905              : /* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
     906              :    this construct would be valid, and false otherwise.  */
     907              : 
     908              : bool
     909     47228695 : validate_subreg (machine_mode omode, machine_mode imode,
     910              :                  const_rtx reg, poly_uint64 offset)
     911              : {
     912     94457390 :   poly_uint64 isize = GET_MODE_SIZE (imode);
     913     94457390 :   poly_uint64 osize = GET_MODE_SIZE (omode);
     914              : 
     915              :   /* The sizes must be ordered, so that we know whether the subreg
     916              :      is partial, paradoxical or complete.  */
     917     47228695 :   if (!ordered_p (isize, osize))
     918              :     return false;
     919              : 
     920              :   /* All subregs must be aligned.  */
     921     47306133 :   if (!multiple_p (offset, osize))
     922              :     return false;
     923              : 
     924              :   /* The subreg offset cannot be outside the inner object.  */
     925     47228675 :   if (maybe_ge (offset, isize))
     926              :     return false;
     927              : 
     928     47228675 :   poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
     929              : 
     930              :   /* ??? This should not be here.  Temporarily continue to allow word_mode
     931              :      subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
     932              :      Generally, backends are doing something sketchy but it'll take time to
     933              :      fix them all.  */
     934     47228675 :   if (omode == word_mode)
     935              :     ;
     936              :   /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
     937              :      is the culprit here, and not the backends.  */
     938     28404000 :   else if (known_ge (osize, regsize) && known_ge (isize, osize))
     939              :     ;
     940              :   /* Allow component subregs of complex and vector.  Though given the below
     941              :      extraction rules, it's not always clear what that means.  */
     942     22479825 :   else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
     943     23321733 :            && GET_MODE_INNER (imode) == omode)
     944              :     ;
     945              :   /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
     946              :      i.e. (subreg:V4SF (reg:SF) 0) or (subreg:V4SF (reg:V2SF) 0).  This
     947              :      surely isn't the cleanest way to represent this.  It's questionable
     948              :      if this ought to be represented at all -- why can't this all be hidden
     949              :      in post-reload splitters that make arbitrarily mode changes to the
     950              :      registers themselves.  */
     951     21112800 :   else if (VECTOR_MODE_P (omode)
     952     23339720 :            && GET_MODE_UNIT_SIZE (omode) == GET_MODE_UNIT_SIZE (imode))
     953              :     ;
     954              :   /* Subregs involving floating point modes are not allowed to
     955              :      change size unless it's an insert into a complex mode.
     956              :      Therefore (subreg:DI (reg:DF) 0) and (subreg:CS (reg:SF) 0) are fine, but
     957              :      (subreg:SI (reg:DF) 0) isn't.  */
     958     21011417 :   else if ((FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
     959       262474 :            && !COMPLEX_MODE_P (omode))
     960              :     {
     961       262314 :       if (! (known_eq (isize, osize)
     962              :              /* LRA can use subreg to store a floating point value in
     963              :                 an integer mode.  Although the floating point and the
     964              :                 integer modes need the same number of hard registers,
     965              :                 the size of floating point mode can be less than the
     966              :                 integer mode.  LRA also uses subregs for a register
     967              :                 should be used in different mode in on insn.  */
     968        75242 :              || lra_in_progress))
     969              :         return false;
     970              :     }
     971              : 
     972              :   /* Paradoxical subregs must have offset zero.  */
     973     47155238 :   if (maybe_gt (osize, isize) && !known_eq (offset, 0U))
     974              :     return false;
     975              : 
     976              :   /* Verify that the offset is representable.  */
     977              : 
     978              :   /* Ensure that subregs of hard registers can be folded.  In other words,
     979              :      the hardware register must be valid in the subreg's outer mode,
     980              :      and consequently the subreg can be replaced with a hardware register.  */
     981     47155238 :   if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
     982              :     {
     983       347248 :       unsigned int regno = REGNO (reg);
     984              : 
     985       347248 :       if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
     986       673969 :           && GET_MODE_INNER (imode) == omode)
     987              :         ;
     988       122522 :       else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
     989              :         return false;
     990              : 
     991              :       /* Pass true to allow_stack_regs because targets like x86
     992              :          expect to be able to take subregs of the stack pointer.  */
     993       347161 :       return simplify_subreg_regno (regno, imode, offset, omode, true) >= 0;
     994              :     }
     995              :   /* Do not allow normal SUBREG with stricter alignment than the inner MEM.
     996              : 
     997              :      PR120329: Combine can create paradoxical mem subregs even for
     998              :      strict-alignment targets.  Allow it until combine is fixed.  */
     999     46807990 :   else if (reg && MEM_P (reg) && STRICT_ALIGNMENT
    1000              :            && MEM_ALIGN (reg) < GET_MODE_ALIGNMENT (omode)
    1001              :            && known_le (osize, isize))
    1002              :     return false;
    1003              : 
    1004              :   /* If ISIZE is greater than REGSIZE, the inner value is split into blocks
    1005              :      of size REGSIZE.  The outer size must then be ordered wrt REGSIZE,
    1006              :      otherwise we wouldn't know at compile time how many blocks the
    1007              :      outer mode occupies.  */
    1008     46807990 :   if (maybe_gt (isize, regsize) && !ordered_p (osize, regsize))
    1009              :     return false;
    1010              : 
    1011              :   /* For normal pseudo registers, we want most of the same checks.  Namely:
    1012              : 
    1013              :      Assume that the pseudo register will be allocated to hard registers
    1014              :      that can hold REGSIZE bytes each.  If OSIZE is not a multiple of REGSIZE,
    1015              :      the remainder must correspond to the lowpart of the containing hard
    1016              :      register.  If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
    1017              :      otherwise it is at the lowest offset.
    1018              : 
    1019              :      Given that we've already checked the mode and offset alignment,
    1020              :      we only have to check subblock subregs here.
    1021              : 
    1022              :      For paradoxical little-endian registers, this check is redundant.  The
    1023              :      offset has already been validated to be zero.
    1024              : 
    1025              :      For paradoxical big-endian registers, this check is not valid
    1026              :      because the offset is zero.  */
    1027     46807990 :   if (maybe_lt (osize, regsize)
    1028     21491481 :       && known_le (osize, isize)
    1029     60815593 :       && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
    1030              :     {
    1031              :       /* It is invalid for the target to pick a register size for a mode
    1032              :          that isn't ordered wrt to the size of that mode.  */
    1033     13992629 :       poly_uint64 block_size = ordered_min (isize, regsize);
    1034     13992629 :       unsigned int start_reg;
    1035     13992629 :       poly_uint64 offset_within_reg;
    1036     13992629 :       if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
    1037     13992629 :           || (BYTES_BIG_ENDIAN
    1038              :               ? maybe_ne (offset_within_reg, block_size - osize)
    1039     13992629 :               : maybe_ne (offset_within_reg, 0U)))
    1040        77458 :         return false;
    1041              :     }
    1042              :   return true;
    1043              : }
    1044              : 
    1045              : rtx
    1046     23516409 : gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
    1047              : {
    1048     23516409 :   gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
    1049     23516409 :   return gen_rtx_raw_SUBREG (mode, reg, offset);
    1050              : }
    1051              : 
    1052              : /* Generate a SUBREG representing the least-significant part of REG if MODE
    1053              :    is smaller than mode of REG, otherwise paradoxical SUBREG.  */
    1054              : 
    1055              : rtx
    1056       590831 : gen_lowpart_SUBREG (machine_mode mode, rtx reg)
    1057              : {
    1058       590831 :   machine_mode inmode;
    1059              : 
    1060       590831 :   inmode = GET_MODE (reg);
    1061       590831 :   if (inmode == VOIDmode)
    1062            0 :     inmode = mode;
    1063       590831 :   return gen_rtx_SUBREG (mode, reg,
    1064       590831 :                          subreg_lowpart_offset (mode, inmode));
    1065              : }
    1066              : 
    1067              : rtx
    1068     97262409 : gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
    1069              :                       enum var_init_status status)
    1070              : {
    1071     97262409 :   rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
    1072     97262409 :   PAT_VAR_LOCATION_STATUS (x) = status;
    1073     97262409 :   return x;
    1074              : }
    1075              : 
    1076              : 
    1077              : /* Create an rtvec and stores within it the RTXen passed in the arguments.  */
    1078              : 
    1079              : rtvec
    1080     24070075 : gen_rtvec (int n, ...)
    1081              : {
    1082     24070075 :   int i;
    1083     24070075 :   rtvec rt_val;
    1084     24070075 :   va_list p;
    1085              : 
    1086     24070075 :   va_start (p, n);
    1087              : 
    1088              :   /* Don't allocate an empty rtvec...  */
    1089     24070075 :   if (n == 0)
    1090              :     {
    1091            0 :       va_end (p);
    1092            0 :       return NULL_RTVEC;
    1093              :     }
    1094              : 
    1095     24070075 :   rt_val = rtvec_alloc (n);
    1096              : 
    1097     91889625 :   for (i = 0; i < n; i++)
    1098     43749475 :     rt_val->elem[i] = va_arg (p, rtx);
    1099              : 
    1100     24070075 :   va_end (p);
    1101     24070075 :   return rt_val;
    1102              : }
    1103              : 
    1104              : rtvec
    1105       465002 : gen_rtvec_v (int n, rtx *argp)
    1106              : {
    1107       465002 :   int i;
    1108       465002 :   rtvec rt_val;
    1109              : 
    1110              :   /* Don't allocate an empty rtvec...  */
    1111       465002 :   if (n == 0)
    1112              :     return NULL_RTVEC;
    1113              : 
    1114       465002 :   rt_val = rtvec_alloc (n);
    1115              : 
    1116      2126331 :   for (i = 0; i < n; i++)
    1117      1196327 :     rt_val->elem[i] = *argp++;
    1118              : 
    1119              :   return rt_val;
    1120              : }
    1121              : 
    1122              : rtvec
    1123            0 : gen_rtvec_v (int n, rtx_insn **argp)
    1124              : {
    1125            0 :   int i;
    1126            0 :   rtvec rt_val;
    1127              : 
    1128              :   /* Don't allocate an empty rtvec...  */
    1129            0 :   if (n == 0)
    1130              :     return NULL_RTVEC;
    1131              : 
    1132            0 :   rt_val = rtvec_alloc (n);
    1133              : 
    1134            0 :   for (i = 0; i < n; i++)
    1135            0 :     rt_val->elem[i] = *argp++;
    1136              : 
    1137              :   return rt_val;
    1138              : }
    1139              : 
    1140              : 
    1141              : /* Return the number of bytes between the start of an OUTER_MODE
    1142              :    in-memory value and the start of an INNER_MODE in-memory value,
    1143              :    given that the former is a lowpart of the latter.  It may be a
    1144              :    paradoxical lowpart, in which case the offset will be negative
    1145              :    on big-endian targets.  */
    1146              : 
    1147              : poly_int64
    1148     72863030 : byte_lowpart_offset (machine_mode outer_mode,
    1149              :                      machine_mode inner_mode)
    1150              : {
    1151     72863030 :   if (paradoxical_subreg_p (outer_mode, inner_mode))
    1152      3790704 :     return -subreg_lowpart_offset (inner_mode, outer_mode);
    1153              :   else
    1154     69072326 :     return subreg_lowpart_offset (outer_mode, inner_mode);
    1155              : }
    1156              : 
    1157              : /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
    1158              :    from address X.  For paradoxical big-endian subregs this is a
    1159              :    negative value, otherwise it's the same as OFFSET.  */
    1160              : 
    1161              : poly_int64
    1162     11859672 : subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
    1163              :                       poly_uint64 offset)
    1164              : {
    1165     11859672 :   if (paradoxical_subreg_p (outer_mode, inner_mode))
    1166              :     {
    1167      3376646 :       gcc_assert (known_eq (offset, 0U));
    1168      3376646 :       return -subreg_lowpart_offset (inner_mode, outer_mode);
    1169              :     }
    1170      8483026 :   return offset;
    1171              : }
    1172              : 
    1173              : /* As above, but return the offset that existing subreg X would have
    1174              :    if SUBREG_REG (X) were stored in memory.  The only significant thing
    1175              :    about the current SUBREG_REG is its mode.  */
    1176              : 
    1177              : poly_int64
    1178       582517 : subreg_memory_offset (const_rtx x)
    1179              : {
    1180      1165034 :   return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
    1181       582517 :                                SUBREG_BYTE (x));
    1182              : }
    1183              : 
    1184              : /* Generate a REG rtx for a new pseudo register of mode MODE.
    1185              :    This pseudo is assigned the next sequential register number.  */
    1186              : 
    1187              : rtx
    1188     66892029 : gen_reg_rtx (machine_mode mode)
    1189              : {
    1190     66892029 :   rtx val;
    1191     66892029 :   unsigned int align = GET_MODE_ALIGNMENT (mode);
    1192              : 
    1193     66892029 :   gcc_assert (can_create_pseudo_p ());
    1194              : 
    1195              :   /* If a virtual register with bigger mode alignment is generated,
    1196              :      increase stack alignment estimation because it might be spilled
    1197              :      to stack later.  */
    1198     66892029 :   if (SUPPORTS_STACK_ALIGNMENT
    1199     66892029 :       && crtl->stack_alignment_estimated < align
    1200       350435 :       && !crtl->stack_realign_processed)
    1201              :     {
    1202       349361 :       unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
    1203       349361 :       if (crtl->stack_alignment_estimated < min_align)
    1204       349361 :         crtl->stack_alignment_estimated = min_align;
    1205              :     }
    1206              : 
    1207     66892029 :   if (generating_concat_p
    1208     50409206 :       && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
    1209     50409206 :           || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
    1210              :     {
    1211              :       /* For complex modes, don't make a single pseudo.
    1212              :          Instead, make a CONCAT of two pseudos.
    1213              :          This allows noncontiguous allocation of the real and imaginary parts,
    1214              :          which makes much better code.  Besides, allocating DCmode
    1215              :          pseudos overstrains reload on some machines like the 386.  */
    1216       204631 :       rtx realpart, imagpart;
    1217       204631 :       machine_mode partmode = GET_MODE_INNER (mode);
    1218              : 
    1219       204631 :       realpart = gen_reg_rtx (partmode);
    1220       204631 :       imagpart = gen_reg_rtx (partmode);
    1221       204631 :       return gen_rtx_CONCAT (mode, realpart, imagpart);
    1222              :     }
    1223              : 
    1224              :   /* Do not call gen_reg_rtx with uninitialized crtl.  */
    1225     66687398 :   gcc_assert (crtl->emit.regno_pointer_align_length);
    1226              : 
    1227     66687398 :   crtl->emit.ensure_regno_capacity ();
    1228     66687398 :   gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
    1229              : 
    1230     66687398 :   val = gen_raw_REG (mode, reg_rtx_no);
    1231     66687398 :   regno_reg_rtx[reg_rtx_no++] = val;
    1232     66687398 :   return val;
    1233              : }
    1234              : 
    1235              : /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
    1236              :    enough to have elements in the range 0 <= idx <= reg_rtx_no.  */
    1237              : 
    1238              : void
    1239     66688624 : emit_status::ensure_regno_capacity ()
    1240              : {
    1241     66688624 :   int old_size = regno_pointer_align_length;
    1242              : 
    1243     66688624 :   if (reg_rtx_no < old_size)
    1244              :     return;
    1245              : 
    1246       175675 :   int new_size = old_size * 2;
    1247       175675 :   while (reg_rtx_no >= new_size)
    1248            0 :     new_size *= 2;
    1249              : 
    1250       175675 :   char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
    1251       175675 :   memset (tmp + old_size, 0, new_size - old_size);
    1252       175675 :   regno_pointer_align = (unsigned char *) tmp;
    1253              : 
    1254       175675 :   rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
    1255       175675 :   memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
    1256       175675 :   regno_reg_rtx = new1;
    1257              : 
    1258       175675 :   crtl->emit.regno_pointer_align_length = new_size;
    1259              : }
    1260              : 
    1261              : /* Return TRUE if REG is a PARM_DECL, FALSE otherwise.  */
    1262              : 
    1263              : bool
    1264        10201 : reg_is_parm_p (rtx reg)
    1265              : {
    1266        10201 :   tree decl;
    1267              : 
    1268        10201 :   gcc_assert (REG_P (reg));
    1269        10201 :   decl = REG_EXPR (reg);
    1270         7771 :   return (decl && TREE_CODE (decl) == PARM_DECL);
    1271              : }
    1272              : 
    1273              : /* Update NEW with the same attributes as REG, but with OFFSET added
    1274              :    to the REG_OFFSET.  */
    1275              : 
    1276              : static void
    1277     18053579 : update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
    1278              : {
    1279     18053579 :   REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
    1280     18053579 :                                        REG_OFFSET (reg) + offset);
    1281     18053579 : }
    1282              : 
    1283              : /* Generate a register with same attributes as REG, but with OFFSET
    1284              :    added to the REG_OFFSET.  */
    1285              : 
    1286              : rtx
    1287     10612666 : gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
    1288              :                     poly_int64 offset)
    1289              : {
    1290              :   /* Use gen_raw_REG rather than gen_rtx_REG, because otherwise we'd
    1291              :      overwrite REG_ATTRS (and in the callers often ORIGINAL_REGNO too)
    1292              :      of the shared REG rtxes like stack_pointer_rtx etc.  This should
    1293              :      happen only for SUBREGs from DEBUG_INSNs, RA should ensure
    1294              :      multi-word registers don't overlap the special registers like
    1295              :      stack pointer.  */
    1296     10612666 :   rtx new_rtx = gen_raw_REG (mode, regno);
    1297              : 
    1298     10612666 :   update_reg_offset (new_rtx, reg, offset);
    1299     10612666 :   return new_rtx;
    1300              : }
    1301              : 
    1302              : /* Generate a new pseudo-register with the same attributes as REG, but
    1303              :    with OFFSET added to the REG_OFFSET.  */
    1304              : 
    1305              : rtx
    1306       603808 : gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
    1307              : {
    1308       603808 :   rtx new_rtx = gen_reg_rtx (mode);
    1309              : 
    1310       603808 :   update_reg_offset (new_rtx, reg, offset);
    1311       603808 :   return new_rtx;
    1312              : }
    1313              : 
    1314              : /* Adjust REG in-place so that it has mode MODE.  It is assumed that the
    1315              :    new register is a (possibly paradoxical) lowpart of the old one.  */
    1316              : 
    1317              : void
    1318      2842601 : adjust_reg_mode (rtx reg, machine_mode mode)
    1319              : {
    1320      2842601 :   update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
    1321      2842601 :   PUT_MODE (reg, mode);
    1322      2842601 : }
    1323              : 
    1324              : /* Copy REG's attributes from X, if X has any attributes.  If REG and X
    1325              :    have different modes, REG is a (possibly paradoxical) lowpart of X.  */
    1326              : 
    1327              : void
    1328     73730794 : set_reg_attrs_from_value (rtx reg, rtx x)
    1329              : {
    1330     73730794 :   poly_int64 offset;
    1331     73730794 :   bool can_be_reg_pointer = true;
    1332              : 
    1333              :   /* Don't call mark_reg_pointer for incompatible pointer sign
    1334              :      extension.  */
    1335     73730794 :   while (GET_CODE (x) == SIGN_EXTEND
    1336              :          || GET_CODE (x) == ZERO_EXTEND
    1337     74687466 :          || GET_CODE (x) == TRUNCATE
    1338     74687466 :          || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
    1339              :     {
    1340              : #if defined(POINTERS_EXTEND_UNSIGNED)
    1341       956672 :       if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
    1342              :            || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
    1343       375177 :            || (paradoxical_subreg_p (x)
    1344        18106 :                && ! (SUBREG_PROMOTED_VAR_P (x)
    1345            0 :                      && SUBREG_CHECK_PROMOTED_SIGN (x,
    1346              :                                                     POINTERS_EXTEND_UNSIGNED))))
    1347       974778 :           && !targetm.have_ptr_extend ())
    1348              :         can_be_reg_pointer = false;
    1349              : #endif
    1350       956672 :       x = XEXP (x, 0);
    1351              :     }
    1352              : 
    1353              :   /* Hard registers can be reused for multiple purposes within the same
    1354              :      function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
    1355              :      on them is wrong.  */
    1356     73730794 :   if (HARD_REGISTER_P (reg))
    1357     47752791 :     return;
    1358              : 
    1359     25978003 :   offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
    1360     25978003 :   if (MEM_P (x))
    1361              :     {
    1362      5911442 :       if (MEM_OFFSET_KNOWN_P (x))
    1363      2661698 :         REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
    1364      2661698 :                                          MEM_OFFSET (x) + offset);
    1365      5296540 :       if (can_be_reg_pointer && MEM_POINTER (x))
    1366       642945 :         mark_reg_pointer (reg, 0);
    1367              :     }
    1368     20681463 :   else if (REG_P (x))
    1369              :     {
    1370      6651602 :       if (REG_ATTRS (x))
    1371      3994504 :         update_reg_offset (reg, x, offset);
    1372      6651602 :       if (can_be_reg_pointer && REG_POINTER (x))
    1373      1874275 :         mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
    1374              :     }
    1375              : }
    1376              : 
    1377              : /* Generate a REG rtx for a new pseudo register, copying the mode
    1378              :    and attributes from X.  */
    1379              : 
    1380              : rtx
    1381       561294 : gen_reg_rtx_and_attrs (rtx x)
    1382              : {
    1383       561294 :   rtx reg = gen_reg_rtx (GET_MODE (x));
    1384       561294 :   set_reg_attrs_from_value (reg, x);
    1385       561294 :   return reg;
    1386              : }
    1387              : 
    1388              : /* Set the register attributes for registers contained in PARM_RTX.
    1389              :    Use needed values from memory attributes of MEM.  */
    1390              : 
    1391              : void
    1392       110193 : set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
    1393              : {
    1394       110193 :   if (REG_P (parm_rtx))
    1395       110193 :     set_reg_attrs_from_value (parm_rtx, mem);
    1396            0 :   else if (GET_CODE (parm_rtx) == PARALLEL)
    1397              :     {
    1398              :       /* Check for a NULL entry in the first slot, used to indicate that the
    1399              :          parameter goes both on the stack and in registers.  */
    1400            0 :       int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
    1401            0 :       for (; i < XVECLEN (parm_rtx, 0); i++)
    1402              :         {
    1403            0 :           rtx x = XVECEXP (parm_rtx, 0, i);
    1404            0 :           if (REG_P (XEXP (x, 0)))
    1405            0 :             REG_ATTRS (XEXP (x, 0))
    1406            0 :               = get_reg_attrs (MEM_EXPR (mem),
    1407            0 :                                INTVAL (XEXP (x, 1)));
    1408              :         }
    1409              :     }
    1410       110193 : }
    1411              : 
    1412              : /* Set the REG_ATTRS for registers in value X, given that X represents
    1413              :    decl T.  */
    1414              : 
    1415              : void
    1416     65679805 : set_reg_attrs_for_decl_rtl (tree t, rtx x)
    1417              : {
    1418     65679805 :   if (!t)
    1419              :     return;
    1420     65679611 :   tree tdecl = t;
    1421     65679611 :   if (GET_CODE (x) == SUBREG)
    1422              :     {
    1423          522 :       gcc_assert (subreg_lowpart_p (x));
    1424          522 :       x = SUBREG_REG (x);
    1425              :     }
    1426     65679611 :   if (REG_P (x))
    1427     43272820 :     REG_ATTRS (x)
    1428     43272820 :       = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
    1429     43272820 :                                                DECL_P (tdecl)
    1430     21301348 :                                                ? DECL_MODE (tdecl)
    1431     21971472 :                                                : TYPE_MODE (TREE_TYPE (tdecl))));
    1432     65679611 :   if (GET_CODE (x) == CONCAT)
    1433              :     {
    1434       209934 :       if (REG_P (XEXP (x, 0)))
    1435       209934 :         REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
    1436       209934 :       if (REG_P (XEXP (x, 1)))
    1437       209934 :         REG_ATTRS (XEXP (x, 1))
    1438       419868 :           = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
    1439              :     }
    1440     65679611 :   if (GET_CODE (x) == PARALLEL)
    1441              :     {
    1442        60045 :       int i, start;
    1443              : 
    1444              :       /* Check for a NULL entry, used to indicate that the parameter goes
    1445              :          both on the stack and in registers.  */
    1446        60045 :       if (XEXP (XVECEXP (x, 0, 0), 0))
    1447              :         start = 0;
    1448              :       else
    1449            0 :         start = 1;
    1450              : 
    1451       172197 :       for (i = start; i < XVECLEN (x, 0); i++)
    1452              :         {
    1453       112152 :           rtx y = XVECEXP (x, 0, i);
    1454       112152 :           if (REG_P (XEXP (y, 0)))
    1455       112152 :             REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
    1456              :         }
    1457              :     }
    1458              : }
    1459              : 
    1460              : /* Assign the RTX X to declaration T.  */
    1461              : 
    1462              : void
    1463    671076736 : set_decl_rtl (tree t, rtx x)
    1464              : {
    1465    671076736 :   DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
    1466    671076736 :   if (x)
    1467     33152503 :     set_reg_attrs_for_decl_rtl (t, x);
    1468    671076736 : }
    1469              : 
    1470              : /* Assign the RTX X to parameter declaration T.  BY_REFERENCE_P is true
    1471              :    if the ABI requires the parameter to be passed by reference.  */
    1472              : 
    1473              : void
    1474      3191400 : set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
    1475              : {
    1476      3191400 :   DECL_INCOMING_RTL (t) = x;
    1477      3191400 :   if (x && !by_reference_p)
    1478      3186470 :     set_reg_attrs_for_decl_rtl (t, x);
    1479      3191400 : }
    1480              : 
    1481              : /* Identify REG (which may be a CONCAT) as a user register.  */
    1482              : 
    1483              : void
    1484      6302652 : mark_user_reg (rtx reg)
    1485              : {
    1486      6302652 :   if (GET_CODE (reg) == CONCAT)
    1487              :     {
    1488         3589 :       REG_USERVAR_P (XEXP (reg, 0)) = 1;
    1489         3589 :       REG_USERVAR_P (XEXP (reg, 1)) = 1;
    1490              :     }
    1491              :   else
    1492              :     {
    1493      6299063 :       gcc_assert (REG_P (reg));
    1494      6299063 :       REG_USERVAR_P (reg) = 1;
    1495              :     }
    1496      6302652 : }
    1497              : 
    1498              : /* Identify REG as a probable pointer register and show its alignment
    1499              :    as ALIGN, if nonzero.  */
    1500              : 
    1501              : void
    1502     17002161 : mark_reg_pointer (rtx reg, int align)
    1503              : {
    1504     17002161 :   if (! REG_POINTER (reg))
    1505              :     {
    1506     10750941 :       REG_POINTER (reg) = 1;
    1507              : 
    1508     10750941 :       if (align)
    1509      9337467 :         REGNO_POINTER_ALIGN (REGNO (reg)) = align;
    1510              :     }
    1511      6251220 :   else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
    1512              :     /* We can no-longer be sure just how aligned this pointer is.  */
    1513      1293054 :     REGNO_POINTER_ALIGN (REGNO (reg)) = align;
    1514     17002161 : }
    1515              : 
    1516              : /* Return 1 plus largest pseudo reg number used in the current function.  */
    1517              : 
    1518              : int
    1519   6939244893 : max_reg_num (void)
    1520              : {
    1521   6939244893 :   return reg_rtx_no;
    1522              : }
    1523              : 
    1524              : /* Return 1 + the largest label number used so far in the current function.  */
    1525              : 
    1526              : int
    1527      3997003 : max_label_num (void)
    1528              : {
    1529      3997003 :   return label_num;
    1530              : }
    1531              : 
    1532              : /* Return first label number used in this function (if any were used).  */
    1533              : 
    1534              : int
    1535      2523872 : get_first_label_num (void)
    1536              : {
    1537      2523872 :   return first_label_num;
    1538              : }
    1539              : 
    1540              : /* If the rtx for label was created during the expansion of a nested
    1541              :    function, then first_label_num won't include this label number.
    1542              :    Fix this now so that array indices work later.  */
    1543              : 
    1544              : void
    1545        26407 : maybe_set_first_label_num (rtx_code_label *x)
    1546              : {
    1547        26407 :   if (CODE_LABEL_NUMBER (x) < first_label_num)
    1548          389 :     first_label_num = CODE_LABEL_NUMBER (x);
    1549        26407 : }
    1550              : 
    1551              : /* For use by the RTL function loader, when mingling with normal
    1552              :    functions.
    1553              :    Ensure that label_num is greater than the label num of X, to avoid
    1554              :    duplicate labels in the generated assembler.  */
    1555              : 
    1556              : void
    1557           28 : maybe_set_max_label_num (rtx_code_label *x)
    1558              : {
    1559           28 :   if (CODE_LABEL_NUMBER (x) >= label_num)
    1560           24 :     label_num = CODE_LABEL_NUMBER (x) + 1;
    1561           28 : }
    1562              : 
    1563              : 
    1564              : /* Return a value representing some low-order bits of X, where the number
    1565              :    of low-order bits is given by MODE.  Note that no conversion is done
    1566              :    between floating-point and fixed-point values, rather, the bit
    1567              :    representation is returned.
    1568              : 
    1569              :    This function handles the cases in common between gen_lowpart, below,
    1570              :    and two variants in cse.cc and combine.cc.  These are the cases that can
    1571              :    be safely handled at all points in the compilation.
    1572              : 
    1573              :    If this is not a case we can handle, return 0.  */
    1574              : 
    1575              : rtx
    1576     50639360 : gen_lowpart_common (machine_mode mode, rtx x)
    1577              : {
    1578    101295692 :   poly_uint64 msize = GET_MODE_SIZE (mode);
    1579     50647846 :   machine_mode innermode;
    1580              : 
    1581              :   /* Unfortunately, this routine doesn't take a parameter for the mode of X,
    1582              :      so we have to make one up.  Yuk.  */
    1583     50647846 :   innermode = GET_MODE (x);
    1584     50647846 :   if (CONST_INT_P (x)
    1585     50647846 :       && known_le (msize * BITS_PER_UNIT,
    1586              :                    (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
    1587     16124649 :     innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
    1588     34523197 :   else if (innermode == VOIDmode)
    1589       192935 :     innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
    1590              : 
    1591     50647846 :   gcc_assert (innermode != VOIDmode && innermode != BLKmode);
    1592              : 
    1593     50647846 :   if (innermode == mode)
    1594              :     return x;
    1595              : 
    1596              :   /* The size of the outer and inner modes must be ordered.  */
    1597     57105788 :   poly_uint64 xsize = GET_MODE_SIZE (innermode);
    1598     28552894 :   if (!ordered_p (msize, xsize))
    1599              :     return 0;
    1600              : 
    1601     28552894 :   if (SCALAR_FLOAT_MODE_P (mode))
    1602              :     {
    1603              :       /* Don't allow paradoxical FLOAT_MODE subregs.  */
    1604       260200 :       if (maybe_gt (msize, xsize))
    1605              :         return 0;
    1606              :     }
    1607              :   else
    1608              :     {
    1609              :       /* MODE must occupy no more of the underlying registers than X.  */
    1610     28292694 :       poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
    1611     28292694 :       unsigned int mregs, xregs;
    1612     44331464 :       if (!can_div_away_from_zero_p (msize, regsize, &mregs)
    1613     28292694 :           || !can_div_away_from_zero_p (xsize, regsize, &xregs)
    1614     28292694 :           || mregs > xregs)
    1615      8413969 :         return 0;
    1616              :     }
    1617              : 
    1618     28551554 :   scalar_int_mode int_mode, int_innermode, from_mode;
    1619     28551554 :   if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
    1620       151933 :       && is_a <scalar_int_mode> (mode, &int_mode)
    1621     28551554 :       && is_a <scalar_int_mode> (innermode, &int_innermode)
    1622     28703435 :       && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
    1623              :     {
    1624              :       /* If we are getting the low-order part of something that has been
    1625              :          sign- or zero-extended, we can either just use the object being
    1626              :          extended or make a narrower extension.  If we want an even smaller
    1627              :          piece than the size of the object being extended, call ourselves
    1628              :          recursively.
    1629              : 
    1630              :          This case is used mostly by combine and cse.  */
    1631              : 
    1632       151881 :       if (from_mode == int_mode)
    1633              :         return XEXP (x, 0);
    1634        35178 :       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
    1635              :         return gen_lowpart_common (int_mode, XEXP (x, 0));
    1636         9720 :       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
    1637         2965 :         return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
    1638              :     }
    1639     28399673 :   else if (GET_CODE (x) == SUBREG || REG_P (x)
    1640              :            || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
    1641     15788098 :            || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
    1642              :            || CONST_POLY_INT_P (x))
    1643     19987319 :     return lowpart_subreg (mode, x, innermode);
    1644              : 
    1645              :   /* Otherwise, we can't do this.  */
    1646              :   return 0;
    1647              : }
    1648              : 
    1649              : rtx
    1650         8702 : gen_highpart (machine_mode mode, rtx x)
    1651              : {
    1652        17404 :   poly_uint64 msize = GET_MODE_SIZE (mode);
    1653         8702 :   rtx result;
    1654              : 
    1655              :   /* This case loses if X is a subreg.  To catch bugs early,
    1656              :      complain if an invalid MODE is used even in other cases.  */
    1657        10408 :   gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
    1658              :               || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
    1659              : 
    1660              :   /* gen_lowpart_common handles a lot of special cases due to needing to handle
    1661              :      paradoxical subregs; it only calls simplify_gen_subreg when certain that
    1662              :      it will produce something meaningful.  The only case we need to handle
    1663              :      specially here is MEM.  */
    1664         8702 :   if (MEM_P (x))
    1665              :     {
    1666          139 :       poly_int64 offset = subreg_highpart_offset (mode, GET_MODE (x));
    1667          139 :       return adjust_address (x, mode, offset);
    1668              :     }
    1669              : 
    1670         8563 :   result = simplify_gen_subreg (mode, x, GET_MODE (x),
    1671         8563 :                                 subreg_highpart_offset (mode, GET_MODE (x)));
    1672              :   /* Since we handle MEM directly above, we should never get a MEM back
    1673              :      from simplify_gen_subreg.  */
    1674         8563 :   gcc_assert (result && !MEM_P (result));
    1675              : 
    1676              :   return result;
    1677              : }
    1678              : 
    1679              : /* Like gen_highpart, but accept mode of EXP operand in case EXP can
    1680              :    be VOIDmode constant.  */
    1681              : rtx
    1682            0 : gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
    1683              : {
    1684            0 :   if (GET_MODE (exp) != VOIDmode)
    1685              :     {
    1686            0 :       gcc_assert (GET_MODE (exp) == innermode);
    1687            0 :       return gen_highpart (outermode, exp);
    1688              :     }
    1689            0 :   return simplify_gen_subreg (outermode, exp, innermode,
    1690            0 :                               subreg_highpart_offset (outermode, innermode));
    1691              : }
    1692              : 
    1693              : /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
    1694              :    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
    1695              : 
    1696              : poly_uint64
    1697    218707819 : subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
    1698              : {
    1699    218707819 :   gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
    1700    218707819 :   if (maybe_gt (outer_bytes, inner_bytes))
    1701              :     /* Paradoxical subregs always have a SUBREG_BYTE of 0.  */
    1702     50293522 :     return 0;
    1703              : 
    1704    168414297 :   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
    1705              :     return inner_bytes - outer_bytes;
    1706    168414297 :   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
    1707    168414297 :     return 0;
    1708              :   else
    1709              :     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
    1710              : }
    1711              : 
    1712              : /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
    1713              :    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
    1714              : 
    1715              : poly_uint64
    1716        43845 : subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
    1717              : {
    1718        43845 :   gcc_assert (known_ge (inner_bytes, outer_bytes));
    1719              : 
    1720              :   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
    1721              :     return 0;
    1722              :   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
    1723        43845 :     return inner_bytes - outer_bytes;
    1724              :   else
    1725              :     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
    1726              :                                         (inner_bytes - outer_bytes)
    1727              :                                         * BITS_PER_UNIT);
    1728              : }
    1729              : 
    1730              : /* Return true iff X, assumed to be a SUBREG,
    1731              :    refers to the least significant part of its containing reg.
    1732              :    If X is not a SUBREG, always return true (it is its own low part!).  */
    1733              : 
    1734              : bool
    1735     50357842 : subreg_lowpart_p (const_rtx x)
    1736              : {
    1737     50357842 :   if (GET_CODE (x) != SUBREG)
    1738              :     return true;
    1739     50357842 :   else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
    1740              :     return false;
    1741              : 
    1742     50357842 :   return known_eq (subreg_lowpart_offset (GET_MODE (x),
    1743              :                                           GET_MODE (SUBREG_REG (x))),
    1744              :                    SUBREG_BYTE (x));
    1745              : }
    1746              : 
    1747              : /* Return subword OFFSET of operand OP.
    1748              :    The word number, OFFSET, is interpreted as the word number starting
    1749              :    at the low-order address.  OFFSET 0 is the low-order word if not
    1750              :    WORDS_BIG_ENDIAN, otherwise it is the high-order word.
    1751              : 
    1752              :    If we cannot extract the required word, we return zero.  Otherwise,
    1753              :    an rtx corresponding to the requested word will be returned.
    1754              : 
    1755              :    VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
    1756              :    reload has completed, a valid address will always be returned.  After
    1757              :    reload, if a valid address cannot be returned, we return zero.
    1758              : 
    1759              :    If VALIDATE_ADDRESS is zero, we simply form the required address; validating
    1760              :    it is the responsibility of the caller.
    1761              : 
    1762              :    MODE is the mode of OP in case it is a CONST_INT.
    1763              : 
    1764              :    ??? This is still rather broken for some cases.  The problem for the
    1765              :    moment is that all callers of this thing provide no 'goal mode' to
    1766              :    tell us to work with.  This exists because all callers were written
    1767              :    in a word based SUBREG world.
    1768              :    Now use of this function can be deprecated by simplify_subreg in most
    1769              :    cases.
    1770              :  */
    1771              : 
    1772              : rtx
    1773        83137 : operand_subword (rtx op, poly_uint64 offset, int validate_address,
    1774              :                  machine_mode mode)
    1775              : {
    1776        83137 :   if (mode == VOIDmode)
    1777         2888 :     mode = GET_MODE (op);
    1778              : 
    1779         2888 :   gcc_assert (mode != VOIDmode);
    1780              : 
    1781              :   /* If OP is narrower than a word, fail.  */
    1782        83137 :   if (mode != BLKmode
    1783       179796 :       && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
    1784              :     return 0;
    1785              : 
    1786              :   /* If we want a word outside OP, return zero.  */
    1787        83137 :   if (mode != BLKmode
    1788       179796 :       && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
    1789            0 :     return const0_rtx;
    1790              : 
    1791              :   /* Form a new MEM at the requested address.  */
    1792        83137 :   if (MEM_P (op))
    1793              :     {
    1794        10926 :       rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
    1795              : 
    1796         9662 :       if (! validate_address)
    1797              :         return new_rtx;
    1798              : 
    1799         9662 :       else if (reload_completed)
    1800              :         {
    1801            0 :           if (! strict_memory_address_addr_space_p (word_mode,
    1802              :                                                     XEXP (new_rtx, 0),
    1803            0 :                                                     MEM_ADDR_SPACE (op)))
    1804              :             return 0;
    1805              :         }
    1806              :       else
    1807         9662 :         return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
    1808              :     }
    1809              : 
    1810              :   /* Rest can be handled by simplify_subreg.  */
    1811        88837 :   return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
    1812              : }
    1813              : 
    1814              : /* Similar to `operand_subword', but never return 0.  If we can't
    1815              :    extract the required subword, put OP into a register and try again.
    1816              :    The second attempt must succeed.  We always validate the address in
    1817              :    this case.
    1818              : 
    1819              :    MODE is the mode of OP, in case it is CONST_INT.  */
    1820              : 
    1821              : rtx
    1822        62746 : operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
    1823              : {
    1824        62746 :   rtx result = operand_subword (op, offset, 1, mode);
    1825              : 
    1826        62746 :   if (result)
    1827              :     return result;
    1828              : 
    1829            0 :   if (mode != BLKmode && mode != VOIDmode)
    1830              :     {
    1831              :       /* If this is a register which cannot be accessed by words, copy it
    1832              :          to a pseudo register.  */
    1833            0 :       if (REG_P (op))
    1834            0 :         op = copy_to_reg (op);
    1835              :       else
    1836            0 :         op = force_reg (mode, op);
    1837              :     }
    1838              : 
    1839            0 :   result = operand_subword (op, offset, 1, mode);
    1840            0 :   gcc_assert (result);
    1841              : 
    1842              :   return result;
    1843              : }
    1844              : 
    1845     31117741 : mem_attrs::mem_attrs ()
    1846     31117741 :   : expr (NULL_TREE),
    1847     31117741 :     offset (0),
    1848     31117741 :     size (0),
    1849     31117741 :     alias (0),
    1850     31117741 :     align (0),
    1851     31117741 :     addrspace (ADDR_SPACE_GENERIC),
    1852     31117741 :     offset_known_p (false),
    1853     31117741 :     size_known_p (false)
    1854     31117741 : {}
    1855              : 
    1856              : /* Returns true if both MEM_EXPR can be considered equal
    1857              :    and false otherwise.  */
    1858              : 
    1859              : bool
    1860        47629 : mem_expr_equal_p (const_tree expr1, const_tree expr2)
    1861              : {
    1862        47629 :   if (expr1 == expr2)
    1863              :     return true;
    1864              : 
    1865        46971 :   if (! expr1 || ! expr2)
    1866              :     return false;
    1867              : 
    1868        36628 :   if (TREE_CODE (expr1) != TREE_CODE (expr2))
    1869              :     return false;
    1870              : 
    1871        35161 :   return operand_equal_p (expr1, expr2, 0);
    1872              : }
    1873              : 
    1874              : /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
    1875              :    bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
    1876              :    -1 if not known.  */
    1877              : 
    1878              : int
    1879           20 : get_mem_align_offset (rtx mem, unsigned int align)
    1880              : {
    1881           20 :   tree expr;
    1882           20 :   poly_uint64 offset;
    1883              : 
    1884              :   /* This function can't use
    1885              :      if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
    1886              :          || (MAX (MEM_ALIGN (mem),
    1887              :                   MAX (align, get_object_alignment (MEM_EXPR (mem))))
    1888              :              < align))
    1889              :        return -1;
    1890              :      else
    1891              :        return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
    1892              :      for two reasons:
    1893              :      - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
    1894              :        for <variable>.  get_inner_reference doesn't handle it and
    1895              :        even if it did, the alignment in that case needs to be determined
    1896              :        from DECL_FIELD_CONTEXT's TYPE_ALIGN.
    1897              :      - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
    1898              :        isn't sufficiently aligned, the object it is in might be.  */
    1899           20 :   gcc_assert (MEM_P (mem));
    1900           20 :   expr = MEM_EXPR (mem);
    1901           20 :   if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
    1902              :     return -1;
    1903              : 
    1904           19 :   offset = MEM_OFFSET (mem);
    1905           19 :   if (DECL_P (expr))
    1906              :     {
    1907            2 :       if (DECL_ALIGN (expr) < align)
    1908              :         return -1;
    1909              :     }
    1910           17 :   else if (INDIRECT_REF_P (expr))
    1911              :     {
    1912            0 :       if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
    1913              :         return -1;
    1914              :     }
    1915           17 :   else if (TREE_CODE (expr) == COMPONENT_REF)
    1916              :     {
    1917            0 :       while (1)
    1918              :         {
    1919            0 :           tree inner = TREE_OPERAND (expr, 0);
    1920            0 :           tree field = TREE_OPERAND (expr, 1);
    1921            0 :           tree byte_offset = component_ref_field_offset (expr);
    1922            0 :           tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
    1923              : 
    1924            0 :           poly_uint64 suboffset;
    1925            0 :           if (!byte_offset
    1926            0 :               || !poly_int_tree_p (byte_offset, &suboffset)
    1927            0 :               || !tree_fits_uhwi_p (bit_offset))
    1928            0 :             return -1;
    1929              : 
    1930            0 :           offset += suboffset;
    1931            0 :           offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
    1932              : 
    1933            0 :           if (inner == NULL_TREE)
    1934              :             {
    1935            0 :               if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
    1936              :                   < (unsigned int) align)
    1937              :                 return -1;
    1938            0 :               break;
    1939              :             }
    1940            0 :           else if (DECL_P (inner))
    1941              :             {
    1942            0 :               if (DECL_ALIGN (inner) < align)
    1943              :                 return -1;
    1944              :               break;
    1945              :             }
    1946            0 :           else if (TREE_CODE (inner) != COMPONENT_REF)
    1947              :             return -1;
    1948            0 :           expr = inner;
    1949            0 :         }
    1950              :     }
    1951              :   else
    1952              :     return -1;
    1953              : 
    1954            0 :   HOST_WIDE_INT misalign;
    1955            0 :   if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
    1956              :     return -1;
    1957            0 :   return misalign;
    1958              : }
    1959              : 
    1960              : /* Given REF (a MEM) and T, either the type of X or the expression
    1961              :    corresponding to REF, set the memory attributes.  OBJECTP is nonzero
    1962              :    if we are making a new object of this type.  BITPOS is nonzero if
    1963              :    there is an offset outstanding on T that will be applied later.  */
    1964              : 
    1965              : void
    1966     31117741 : set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
    1967              :                                  poly_int64 bitpos)
    1968              : {
    1969     31117741 :   poly_int64 apply_bitpos = 0;
    1970     31117741 :   tree type;
    1971     31117741 :   class mem_attrs attrs, *defattrs, *refattrs;
    1972     31117741 :   addr_space_t as;
    1973              : 
    1974              :   /* It can happen that type_for_mode was given a mode for which there
    1975              :      is no language-level type.  In which case it returns NULL, which
    1976              :      we can see here.  */
    1977     31117741 :   if (t == NULL_TREE)
    1978            0 :     return;
    1979              : 
    1980     31117741 :   type = TYPE_P (t) ? t : TREE_TYPE (t);
    1981     31117741 :   if (type == error_mark_node)
    1982              :     return;
    1983              : 
    1984              :   /* If we have already set DECL_RTL = ref, get_alias_set will get the
    1985              :      wrong answer, as it assumes that DECL_RTL already has the right alias
    1986              :      info.  Callers should not set DECL_RTL until after the call to
    1987              :      set_mem_attributes.  */
    1988     31117741 :   gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
    1989              : 
    1990              :   /* Get the alias set from the expression or type (perhaps using a
    1991              :      front-end routine) and use it.  */
    1992     31117741 :   attrs.alias = get_alias_set (t);
    1993              : 
    1994     31117741 :   MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
    1995     31117741 :   MEM_POINTER (ref) = POINTER_TYPE_P (type);
    1996              : 
    1997              :   /* Default values from pre-existing memory attributes if present.  */
    1998     31117741 :   refattrs = MEM_ATTRS (ref);
    1999     31117741 :   if (refattrs)
    2000              :     {
    2001              :       /* ??? Can this ever happen?  Calling this routine on a MEM that
    2002              :          already carries memory attributes should probably be invalid.  */
    2003     12401354 :       attrs.expr = refattrs->expr;
    2004     12401354 :       attrs.offset_known_p = refattrs->offset_known_p;
    2005     12401354 :       attrs.offset = refattrs->offset;
    2006     12401354 :       attrs.size_known_p = refattrs->size_known_p;
    2007     12401354 :       attrs.size = refattrs->size;
    2008     12401354 :       attrs.align = refattrs->align;
    2009              :     }
    2010              : 
    2011              :   /* Otherwise, default values from the mode of the MEM reference.  */
    2012              :   else
    2013              :     {
    2014     18716387 :       defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
    2015     18716387 :       gcc_assert (!defattrs->expr);
    2016     18716387 :       gcc_assert (!defattrs->offset_known_p);
    2017              : 
    2018              :       /* Respect mode size.  */
    2019     18716387 :       attrs.size_known_p = defattrs->size_known_p;
    2020     18716387 :       attrs.size = defattrs->size;
    2021              :       /* ??? Is this really necessary?  We probably should always get
    2022              :          the size from the type below.  */
    2023              : 
    2024              :       /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
    2025              :          if T is an object, always compute the object alignment below.  */
    2026     18716387 :       if (TYPE_P (t))
    2027      2028288 :         attrs.align = defattrs->align;
    2028              :       else
    2029     16688099 :         attrs.align = BITS_PER_UNIT;
    2030              :       /* ??? If T is a type, respecting mode alignment may *also* be wrong
    2031              :          e.g. if the type carries an alignment attribute.  Should we be
    2032              :          able to simply always use TYPE_ALIGN?  */
    2033              :     }
    2034              : 
    2035              :   /* We can set the alignment from the type if we are making an object or if
    2036              :      this is an INDIRECT_REF.  */
    2037     31117741 :   if (objectp || TREE_CODE (t) == INDIRECT_REF)
    2038      9772941 :     attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
    2039              : 
    2040              :   /* If the size is known, we can set that.  */
    2041     31117741 :   tree new_size = TYPE_SIZE_UNIT (type);
    2042              : 
    2043              :   /* The address-space is that of the type.  */
    2044     31117741 :   as = TYPE_ADDR_SPACE (type);
    2045              : 
    2046              :   /* If T is not a type, we may be able to deduce some more information about
    2047              :      the expression.  */
    2048     31117741 :   if (! TYPE_P (t))
    2049              :     {
    2050     29083223 :       tree base;
    2051              : 
    2052     29083223 :       if (TREE_THIS_VOLATILE (t))
    2053       177615 :         MEM_VOLATILE_P (ref) = 1;
    2054              : 
    2055              :       /* Now remove any conversions: they don't change what the underlying
    2056              :          object is.  Likewise for SAVE_EXPR.  */
    2057     29087729 :       while (CONVERT_EXPR_P (t)
    2058              :              || TREE_CODE (t) == VIEW_CONVERT_EXPR
    2059     29087729 :              || TREE_CODE (t) == SAVE_EXPR)
    2060         4506 :         t = TREE_OPERAND (t, 0);
    2061              : 
    2062              :       /* Note whether this expression can trap.  */
    2063     29083223 :       MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
    2064              : 
    2065     29083223 :       base = get_base_address (t);
    2066     29083223 :       if (base)
    2067              :         {
    2068     29083223 :           if (DECL_P (base)
    2069     16856173 :               && TREE_READONLY (base)
    2070      2175018 :               && (TREE_STATIC (base) || DECL_EXTERNAL (base))
    2071     30936900 :               && !TREE_THIS_VOLATILE (base))
    2072      1853302 :             MEM_READONLY_P (ref) = 1;
    2073              : 
    2074              :           /* Mark static const strings readonly as well.  */
    2075     29083223 :           if (TREE_CODE (base) == STRING_CST
    2076       326107 :               && TREE_READONLY (base)
    2077     29384973 :               && TREE_STATIC (base))
    2078       301750 :             MEM_READONLY_P (ref) = 1;
    2079              : 
    2080              :           /* Address-space information is on the base object.  */
    2081     29083223 :           if (TREE_CODE (base) == MEM_REF
    2082     29083223 :               || TREE_CODE (base) == TARGET_MEM_REF)
    2083     11900931 :             as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
    2084              :                                                                       0))));
    2085              :           else
    2086     17182292 :             as = TYPE_ADDR_SPACE (TREE_TYPE (base));
    2087              :         }
    2088              : 
    2089              :       /* If this expression uses it's parent's alias set, mark it such
    2090              :          that we won't change it.  */
    2091     29083223 :       if (component_uses_parent_alias_set_from (t) != NULL_TREE)
    2092      1159114 :         MEM_KEEP_ALIAS_SET_P (ref) = 1;
    2093              : 
    2094              :       /* If this is a decl, set the attributes of the MEM from it.  */
    2095     29083223 :       if (DECL_P (t))
    2096              :         {
    2097      7771226 :           attrs.expr = t;
    2098      7771226 :           attrs.offset_known_p = true;
    2099      7771226 :           attrs.offset = 0;
    2100      7771226 :           apply_bitpos = bitpos;
    2101      7771226 :           new_size = DECL_SIZE_UNIT (t);
    2102              :         }
    2103              : 
    2104              :       /* ???  If we end up with a constant or a descriptor do not
    2105              :          record a MEM_EXPR.  */
    2106     21311997 :       else if (CONSTANT_CLASS_P (t)
    2107     21183460 :                || TREE_CODE (t) == CONSTRUCTOR)
    2108              :         ;
    2109              : 
    2110              :       /* If this is a field reference, record it.  */
    2111     21183460 :       else if (TREE_CODE (t) == COMPONENT_REF)
    2112              :         {
    2113      9521767 :           attrs.expr = t;
    2114      9521767 :           attrs.offset_known_p = true;
    2115      9521767 :           attrs.offset = 0;
    2116      9521767 :           apply_bitpos = bitpos;
    2117      9521767 :           if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
    2118        67415 :             new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
    2119              :         }
    2120              : 
    2121              :       /* Else record it.  */
    2122              :       else
    2123              :         {
    2124     11661693 :           gcc_assert (handled_component_p (t)
    2125              :                       || TREE_CODE (t) == MEM_REF
    2126              :                       || TREE_CODE (t) == TARGET_MEM_REF);
    2127     11661693 :           attrs.expr = t;
    2128     11661693 :           attrs.offset_known_p = true;
    2129     11661693 :           attrs.offset = 0;
    2130     11661693 :           apply_bitpos = bitpos;
    2131              :         }
    2132              : 
    2133              :       /* If this is a reference based on a partitioned decl replace the
    2134              :          base with a MEM_REF of the pointer representative we created
    2135              :          during stack slot partitioning.  */
    2136     29083223 :       if (attrs.expr
    2137     28954686 :           && VAR_P (base)
    2138     13816806 :           && ! is_global_var (base)
    2139     37539048 :           && cfun->gimple_df->decls_to_pointers != NULL)
    2140              :         {
    2141      3836063 :           tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
    2142      3836063 :           if (namep)
    2143              :             {
    2144      2219925 :               attrs.expr = unshare_expr (attrs.expr);
    2145      2219925 :               tree *orig_base = &attrs.expr;
    2146      4706295 :               while (handled_component_p (*orig_base))
    2147      2486370 :                 orig_base = &TREE_OPERAND (*orig_base, 0);
    2148      2219925 :               if (TREE_CODE (*orig_base) == MEM_REF
    2149      2219925 :                   || TREE_CODE (*orig_base) == TARGET_MEM_REF)
    2150       661820 :                 TREE_OPERAND (*orig_base, 0) = *namep;
    2151              :               else
    2152              :                 {
    2153      1558105 :                   tree aptrt = reference_alias_ptr_type (*orig_base);
    2154      1558105 :                   *orig_base = build2 (MEM_REF, TREE_TYPE (*orig_base),
    2155              :                                        *namep, build_int_cst (aptrt, 0));
    2156              :                 }
    2157              :             }
    2158              :         }
    2159              : 
    2160              :       /* Compute the alignment.  */
    2161     29083223 :       unsigned int obj_align;
    2162     29083223 :       unsigned HOST_WIDE_INT obj_bitpos;
    2163     29083223 :       get_object_alignment_1 (t, &obj_align, &obj_bitpos);
    2164     29083223 :       unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
    2165     29083223 :       if (diff_align != 0)
    2166      3841760 :         obj_align = MIN (obj_align, diff_align);
    2167     29083223 :       attrs.align = MAX (attrs.align, obj_align);
    2168              :     }
    2169              : 
    2170     31117741 :   poly_uint64 const_size;
    2171     31117741 :   if (poly_int_tree_p (new_size, &const_size))
    2172              :     {
    2173     30525628 :       attrs.size_known_p = true;
    2174     30525628 :       attrs.size = const_size;
    2175              :     }
    2176              : 
    2177              :   /* If we modified OFFSET based on T, then subtract the outstanding
    2178              :      bit position offset.  Similarly, increase the size of the accessed
    2179              :      object to contain the negative offset.  */
    2180     31117741 :   if (maybe_ne (apply_bitpos, 0))
    2181              :     {
    2182      2714281 :       gcc_assert (attrs.offset_known_p);
    2183      2714281 :       poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
    2184      2714281 :       attrs.offset -= bytepos;
    2185      2714281 :       if (attrs.size_known_p)
    2186     31117741 :         attrs.size += bytepos;
    2187              :     }
    2188              : 
    2189              :   /* Now set the attributes we computed above.  */
    2190     31117741 :   attrs.addrspace = as;
    2191     31117741 :   set_mem_attrs (ref, &attrs);
    2192              : }
    2193              : 
    2194              : void
    2195     27129542 : set_mem_attributes (rtx ref, tree t, int objectp)
    2196              : {
    2197     27129542 :   set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
    2198     27129542 : }
    2199              : 
    2200              : /* Set the alias set of MEM to SET.  */
    2201              : 
    2202              : void
    2203      7672575 : set_mem_alias_set (rtx mem, alias_set_type set)
    2204              : {
    2205              :   /* If the new and old alias sets don't conflict, something is wrong.  */
    2206     10785037 :   gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
    2207      7672575 :   mem_attrs attrs (*get_mem_attrs (mem));
    2208      7672575 :   attrs.alias = set;
    2209      7672575 :   set_mem_attrs (mem, &attrs);
    2210      7672575 : }
    2211              : 
    2212              : /* Set the address space of MEM to ADDRSPACE (target-defined).  */
    2213              : 
    2214              : void
    2215      9908863 : set_mem_addr_space (rtx mem, addr_space_t addrspace)
    2216              : {
    2217      9908863 :   mem_attrs attrs (*get_mem_attrs (mem));
    2218      9908863 :   attrs.addrspace = addrspace;
    2219      9908863 :   set_mem_attrs (mem, &attrs);
    2220      9908863 : }
    2221              : 
    2222              : /* Set the alignment of MEM to ALIGN bits.  */
    2223              : 
    2224              : void
    2225     12573378 : set_mem_align (rtx mem, unsigned int align)
    2226              : {
    2227     12573378 :   mem_attrs attrs (*get_mem_attrs (mem));
    2228     12573378 :   attrs.align = align;
    2229     12573378 :   set_mem_attrs (mem, &attrs);
    2230     12573378 : }
    2231              : 
    2232              : /* Set the expr for MEM to EXPR.  */
    2233              : 
    2234              : void
    2235      6743195 : set_mem_expr (rtx mem, tree expr)
    2236              : {
    2237      6743195 :   mem_attrs attrs (*get_mem_attrs (mem));
    2238      6743195 :   attrs.expr = expr;
    2239      6743195 :   set_mem_attrs (mem, &attrs);
    2240      6743195 : }
    2241              : 
    2242              : /* Set the offset of MEM to OFFSET.  */
    2243              : 
    2244              : void
    2245          134 : set_mem_offset (rtx mem, poly_int64 offset)
    2246              : {
    2247          134 :   mem_attrs attrs (*get_mem_attrs (mem));
    2248          134 :   attrs.offset_known_p = true;
    2249          134 :   attrs.offset = offset;
    2250          134 :   set_mem_attrs (mem, &attrs);
    2251          134 : }
    2252              : 
    2253              : /* Clear the offset of MEM.  */
    2254              : 
    2255              : void
    2256        95815 : clear_mem_offset (rtx mem)
    2257              : {
    2258        95815 :   mem_attrs attrs (*get_mem_attrs (mem));
    2259        95815 :   attrs.offset_known_p = false;
    2260        95815 :   set_mem_attrs (mem, &attrs);
    2261        95815 : }
    2262              : 
    2263              : /* Set the size of MEM to SIZE.  */
    2264              : 
    2265              : void
    2266      2471934 : set_mem_size (rtx mem, poly_int64 size)
    2267              : {
    2268      2471934 :   mem_attrs attrs (*get_mem_attrs (mem));
    2269      2471934 :   attrs.size_known_p = true;
    2270      2471934 :   attrs.size = size;
    2271      2471934 :   set_mem_attrs (mem, &attrs);
    2272      2471934 : }
    2273              : 
    2274              : /* Clear the size of MEM.  */
    2275              : 
    2276              : void
    2277          543 : clear_mem_size (rtx mem)
    2278              : {
    2279          543 :   mem_attrs attrs (*get_mem_attrs (mem));
    2280          543 :   attrs.size_known_p = false;
    2281          543 :   set_mem_attrs (mem, &attrs);
    2282          543 : }
    2283              : 
    2284              : /* Return a memory reference like MEMREF, but with its mode changed to MODE
    2285              :    and its address changed to ADDR.  (VOIDmode means don't change the mode.
    2286              :    NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
    2287              :    returned memory location is required to be valid.  INPLACE is true if any
    2288              :    changes can be made directly to MEMREF or false if MEMREF must be treated
    2289              :    as immutable.
    2290              : 
    2291              :    The memory attributes are not changed.  */
    2292              : 
    2293              : static rtx
    2294    496874359 : change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
    2295              :                   bool inplace)
    2296              : {
    2297    496874359 :   addr_space_t as;
    2298    496874359 :   rtx new_rtx;
    2299              : 
    2300    496874359 :   gcc_assert (MEM_P (memref));
    2301    496874359 :   as = MEM_ADDR_SPACE (memref);
    2302    496874359 :   if (mode == VOIDmode)
    2303    470586300 :     mode = GET_MODE (memref);
    2304    496874359 :   if (addr == 0)
    2305           60 :     addr = XEXP (memref, 0);
    2306    476870394 :   if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
    2307    835616331 :       && (!validate || memory_address_addr_space_p (mode, addr, as)))
    2308    338617316 :     return memref;
    2309              : 
    2310              :   /* Don't validate address for LRA.  LRA can make the address valid
    2311              :      by itself in most efficient way.  */
    2312    158257043 :   if (validate && !lra_in_progress)
    2313              :     {
    2314     22929676 :       if (reload_in_progress || reload_completed)
    2315      3800455 :         gcc_assert (memory_address_addr_space_p (mode, addr, as));
    2316              :       else
    2317     19129221 :         addr = memory_address_addr_space (mode, addr, as);
    2318              :     }
    2319              : 
    2320    158257043 :   if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
    2321              :     return memref;
    2322              : 
    2323    154253673 :   if (inplace)
    2324              :     {
    2325       258926 :       XEXP (memref, 0) = addr;
    2326       258926 :       return memref;
    2327              :     }
    2328              : 
    2329    153994747 :   new_rtx = gen_rtx_MEM (mode, addr);
    2330    153994747 :   MEM_COPY_ATTRIBUTES (new_rtx, memref);
    2331    153994747 :   return new_rtx;
    2332              : }
    2333              : 
    2334              : /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
    2335              :    way we are changing MEMREF, so we only preserve the alias set.  */
    2336              : 
    2337              : rtx
    2338       306688 : change_address (rtx memref, machine_mode mode, rtx addr)
    2339              : {
    2340       306688 :   rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
    2341       306688 :   machine_mode mmode = GET_MODE (new_rtx);
    2342       306688 :   class mem_attrs *defattrs;
    2343              : 
    2344       306688 :   mem_attrs attrs (*get_mem_attrs (memref));
    2345       306688 :   defattrs = mode_mem_attrs[(int) mmode];
    2346       306688 :   attrs.expr = NULL_TREE;
    2347       306688 :   attrs.offset_known_p = false;
    2348       306688 :   attrs.size_known_p = defattrs->size_known_p;
    2349       306688 :   attrs.size = defattrs->size;
    2350       306688 :   attrs.align = defattrs->align;
    2351              : 
    2352              :   /* If there are no changes, just return the original memory reference.  */
    2353       306688 :   if (new_rtx == memref)
    2354              :     {
    2355        70105 :       if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
    2356              :         return new_rtx;
    2357              : 
    2358        56763 :       new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
    2359        56763 :       MEM_COPY_ATTRIBUTES (new_rtx, memref);
    2360              :     }
    2361              : 
    2362       293346 :   set_mem_attrs (new_rtx, &attrs);
    2363       293346 :   return new_rtx;
    2364              : }
    2365              : 
    2366              : /* Return a memory reference like MEMREF, but with its mode changed
    2367              :    to MODE and its address offset by OFFSET bytes.  If VALIDATE is
    2368              :    nonzero, the memory address is forced to be valid.
    2369              :    If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
    2370              :    and the caller is responsible for adjusting MEMREF base register.
    2371              :    If ADJUST_OBJECT is zero, the underlying object associated with the
    2372              :    memory reference is left unchanged and the caller is responsible for
    2373              :    dealing with it.  Otherwise, if the new memory reference is outside
    2374              :    the underlying object, even partially, then the object is dropped.
    2375              :    SIZE, if nonzero, is the size of an access in cases where MODE
    2376              :    has no inherent size.  */
    2377              : 
    2378              : rtx
    2379     34772918 : adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
    2380              :                   int validate, int adjust_address, int adjust_object,
    2381              :                   poly_int64 size)
    2382              : {
    2383     34772918 :   rtx addr = XEXP (memref, 0);
    2384     34772918 :   rtx new_rtx;
    2385     34772918 :   scalar_int_mode address_mode;
    2386     34772918 :   class mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
    2387     34772918 :   unsigned HOST_WIDE_INT max_align;
    2388              : #ifdef POINTERS_EXTEND_UNSIGNED
    2389     34772918 :   scalar_int_mode pointer_mode
    2390     34772918 :     = targetm.addr_space.pointer_mode (attrs.addrspace);
    2391              : #endif
    2392              : 
    2393              :   /* VOIDmode means no mode change for change_address_1.  */
    2394     34772918 :   if (mode == VOIDmode)
    2395        16335 :     mode = GET_MODE (memref);
    2396              : 
    2397              :   /* Take the size of non-BLKmode accesses from the mode.  */
    2398     34772918 :   defattrs = mode_mem_attrs[(int) mode];
    2399     34772918 :   if (defattrs->size_known_p)
    2400     28864935 :     size = defattrs->size;
    2401              : 
    2402              :   /* If there are no changes, just return the original memory reference.  */
    2403     34772918 :   if (mode == GET_MODE (memref)
    2404     14918552 :       && known_eq (offset, 0)
    2405      8792178 :       && (known_eq (size, 0)
    2406      3713006 :           || (attrs.size_known_p && known_eq (attrs.size, size)))
    2407     48422787 :       && (!validate || memory_address_addr_space_p (mode, addr,
    2408      4858316 :                                                     attrs.addrspace)))
    2409      8791547 :     return memref;
    2410              : 
    2411              :   /* ??? Prefer to create garbage instead of creating shared rtl.
    2412              :      This may happen even if offset is nonzero -- consider
    2413              :      (plus (plus reg reg) const_int) -- so do this always.  */
    2414     25981371 :   addr = copy_rtx (addr);
    2415              : 
    2416              :   /* Convert a possibly large offset to a signed value within the
    2417              :      range of the target address space.  */
    2418     25981371 :   address_mode = get_address_mode (memref);
    2419     25981371 :   offset = trunc_int_for_mode (offset, address_mode);
    2420              : 
    2421     25981371 :   if (adjust_address)
    2422              :     {
    2423              :       /* If MEMREF is a LO_SUM and the offset is within the alignment of the
    2424              :          object, we can merge it into the LO_SUM.  */
    2425     25956242 :       if (GET_MODE (memref) != BLKmode
    2426     11900770 :           && GET_CODE (addr) == LO_SUM
    2427     25956242 :           && known_in_range_p (offset,
    2428            0 :                                0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
    2429            0 :                                    / BITS_PER_UNIT)))
    2430            0 :         addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
    2431              :                                plus_constant (address_mode,
    2432              :                                               XEXP (addr, 1), offset));
    2433              : #ifdef POINTERS_EXTEND_UNSIGNED
    2434              :       /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
    2435              :          in that mode, we merge it into the ZERO_EXTEND.  We take advantage of
    2436              :          the fact that pointers are not allowed to overflow.  */
    2437     25956242 :       else if (POINTERS_EXTEND_UNSIGNED > 0
    2438     25956242 :                && GET_CODE (addr) == ZERO_EXTEND
    2439           12 :                && GET_MODE (XEXP (addr, 0)) == pointer_mode
    2440     25956244 :                && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
    2441            2 :         addr = gen_rtx_ZERO_EXTEND (address_mode,
    2442              :                                     plus_constant (pointer_mode,
    2443              :                                                    XEXP (addr, 0), offset));
    2444              : #endif
    2445              :       else
    2446     25956240 :         addr = plus_constant (address_mode, addr, offset);
    2447              :     }
    2448              : 
    2449     25981371 :   new_rtx = change_address_1 (memref, mode, addr, validate, false);
    2450              : 
    2451              :   /* If the address is a REG, change_address_1 rightfully returns memref,
    2452              :      but this would destroy memref's MEM_ATTRS.  */
    2453     25981371 :   if (new_rtx == memref && maybe_ne (offset, 0))
    2454        25084 :     new_rtx = copy_rtx (new_rtx);
    2455              : 
    2456              :   /* Conservatively drop the object if we don't know where we start from.  */
    2457     25981371 :   if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
    2458              :     {
    2459         4309 :       attrs.expr = NULL_TREE;
    2460         4309 :       attrs.alias = 0;
    2461              :     }
    2462              : 
    2463              :   /* Compute the new values of the memory attributes due to this adjustment.
    2464              :      We add the offsets and update the alignment.  */
    2465     25981371 :   if (attrs.offset_known_p)
    2466              :     {
    2467     22135934 :       attrs.offset += offset;
    2468              : 
    2469              :       /* Drop the object if the new left end is not within its bounds.  */
    2470     22135934 :       if (adjust_object && maybe_lt (attrs.offset, 0))
    2471              :         {
    2472        26074 :           attrs.expr = NULL_TREE;
    2473        26074 :           attrs.alias = 0;
    2474              :         }
    2475              :     }
    2476              : 
    2477              :   /* Compute the new alignment by taking the MIN of the alignment and the
    2478              :      lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
    2479              :      if zero.  */
    2480     25981371 :   if (maybe_ne (offset, 0))
    2481              :     {
    2482     11886266 :       max_align = known_alignment (offset) * BITS_PER_UNIT;
    2483     11886266 :       attrs.align = MIN (attrs.align, max_align);
    2484              :     }
    2485              : 
    2486     25981371 :   if (maybe_ne (size, 0))
    2487              :     {
    2488              :       /* Drop the object if the new right end is not within its bounds.  */
    2489     25317454 :       if (adjust_object && maybe_gt (offset + size, attrs.size))
    2490              :         {
    2491        65716 :           attrs.expr = NULL_TREE;
    2492        65716 :           attrs.alias = 0;
    2493              :         }
    2494     25317454 :       attrs.size_known_p = true;
    2495     25317454 :       attrs.size = size;
    2496              :     }
    2497       663917 :   else if (attrs.size_known_p)
    2498              :     {
    2499       657551 :       gcc_assert (!adjust_object);
    2500     25981371 :       attrs.size -= offset;
    2501              :       /* ??? The store_by_pieces machinery generates negative sizes,
    2502              :          so don't assert for that here.  */
    2503              :     }
    2504              : 
    2505     25981371 :   set_mem_attrs (new_rtx, &attrs);
    2506              : 
    2507     25981371 :   return new_rtx;
    2508              : }
    2509              : 
    2510              : /* Return a memory reference like MEMREF, but with its mode changed
    2511              :    to MODE and its address changed to ADDR, which is assumed to be
    2512              :    MEMREF offset by OFFSET bytes.  If VALIDATE is
    2513              :    nonzero, the memory address is forced to be valid.  */
    2514              : 
    2515              : rtx
    2516        48509 : adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
    2517              :                              poly_int64 offset, int validate)
    2518              : {
    2519        48509 :   memref = change_address_1 (memref, VOIDmode, addr, validate, false);
    2520        48509 :   return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
    2521              : }
    2522              : 
    2523              : /* Return a memory reference like MEMREF, but whose address is changed by
    2524              :    adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
    2525              :    known to be in OFFSET (possibly 1).  */
    2526              : 
    2527              : rtx
    2528       984126 : offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
    2529              : {
    2530       984126 :   rtx new_rtx, addr = XEXP (memref, 0);
    2531       984126 :   machine_mode address_mode;
    2532       984126 :   class mem_attrs *defattrs;
    2533              : 
    2534       984126 :   mem_attrs attrs (*get_mem_attrs (memref));
    2535       984126 :   address_mode = get_address_mode (memref);
    2536       984126 :   new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
    2537              : 
    2538              :   /* At this point we don't know _why_ the address is invalid.  It
    2539              :      could have secondary memory references, multiplies or anything.
    2540              : 
    2541              :      However, if we did go and rearrange things, we can wind up not
    2542              :      being able to recognize the magic around pic_offset_table_rtx.
    2543              :      This stuff is fragile, and is yet another example of why it is
    2544              :      bad to expose PIC machinery too early.  */
    2545      1966512 :   if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
    2546       984126 :                                      attrs.addrspace)
    2547       150909 :       && GET_CODE (addr) == PLUS
    2548      1082793 :       && XEXP (addr, 0) == pic_offset_table_rtx)
    2549              :     {
    2550         1740 :       addr = force_reg (GET_MODE (addr), addr);
    2551         1740 :       new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
    2552              :     }
    2553              : 
    2554       984126 :   update_temp_slot_address (XEXP (memref, 0), new_rtx);
    2555       984126 :   new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
    2556              : 
    2557              :   /* If there are no changes, just return the original memory reference.  */
    2558       984126 :   if (new_rtx == memref)
    2559              :     return new_rtx;
    2560              : 
    2561              :   /* Update the alignment to reflect the offset.  Reset the offset, which
    2562              :      we don't know.  */
    2563       984126 :   defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
    2564       984126 :   attrs.offset_known_p = false;
    2565       984126 :   attrs.size_known_p = defattrs->size_known_p;
    2566       984126 :   attrs.size = defattrs->size;
    2567       984126 :   attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
    2568       984126 :   set_mem_attrs (new_rtx, &attrs);
    2569       984126 :   return new_rtx;
    2570              : }
    2571              : 
    2572              : /* Return a memory reference like MEMREF, but with its address changed to
    2573              :    ADDR.  The caller is asserting that the actual piece of memory pointed
    2574              :    to is the same, just the form of the address is being changed, such as
    2575              :    by putting something into a register.  INPLACE is true if any changes
    2576              :    can be made directly to MEMREF or false if MEMREF must be treated as
    2577              :    immutable.  */
    2578              : 
    2579              : rtx
    2580     12626031 : replace_equiv_address (rtx memref, rtx addr, bool inplace)
    2581              : {
    2582              :   /* change_address_1 copies the memory attribute structure without change
    2583              :      and that's exactly what we want here.  */
    2584     12626031 :   update_temp_slot_address (XEXP (memref, 0), addr);
    2585     12626031 :   return change_address_1 (memref, VOIDmode, addr, 1, inplace);
    2586              : }
    2587              : 
    2588              : /* Likewise, but the reference is not required to be valid.  */
    2589              : 
    2590              : rtx
    2591    456927634 : replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
    2592              : {
    2593    456927634 :   return change_address_1 (memref, VOIDmode, addr, 0, inplace);
    2594              : }
    2595              : 
    2596              : 
    2597              : /* Emit insns to reload VALUE into a new register.  VALUE is an
    2598              :    auto-increment or auto-decrement RTX whose operand is a register or
    2599              :    memory location; so reloading involves incrementing that location.
    2600              : 
    2601              :    INC_AMOUNT is the number to increment or decrement by (always
    2602              :    positive and ignored for POST_MODIFY/PRE_MODIFY).
    2603              : 
    2604              :    Return a pseudo containing the result.  */
    2605              : rtx
    2606            0 : address_reload_context::emit_autoinc (rtx value, poly_int64 inc_amount)
    2607              : {
    2608              :   /* Since we're going to call recog, and might be called within recog,
    2609              :      we need to ensure we save and restore recog_data.  */
    2610            0 :   recog_data_saver recog_save;
    2611              : 
    2612              :   /* REG or MEM to be copied and incremented.  */
    2613            0 :   rtx incloc = XEXP (value, 0);
    2614              : 
    2615            0 :   const rtx_code code = GET_CODE (value);
    2616            0 :   const bool post_p
    2617            0 :     = code == POST_DEC || code == POST_INC || code == POST_MODIFY;
    2618              : 
    2619            0 :   bool plus_p = true;
    2620            0 :   rtx inc;
    2621            0 :   if (code == PRE_MODIFY || code == POST_MODIFY)
    2622              :     {
    2623            0 :       gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS
    2624              :                   || GET_CODE (XEXP (value, 1)) == MINUS);
    2625            0 :       gcc_assert (rtx_equal_p (XEXP (XEXP (value, 1), 0), XEXP (value, 0)));
    2626            0 :       plus_p = GET_CODE (XEXP (value, 1)) == PLUS;
    2627            0 :       inc = XEXP (XEXP (value, 1), 1);
    2628              :     }
    2629              :   else
    2630              :     {
    2631            0 :       if (code == PRE_DEC || code == POST_DEC)
    2632            0 :         inc_amount = -inc_amount;
    2633              : 
    2634            0 :       inc = gen_int_mode (inc_amount, GET_MODE (value));
    2635              :     }
    2636              : 
    2637            0 :   rtx result;
    2638            0 :   if (!post_p && REG_P (incloc))
    2639              :     result = incloc;
    2640              :   else
    2641              :     {
    2642            0 :       result = get_reload_reg ();
    2643              :       /* First copy the location to the result register.  */
    2644            0 :       emit_insn (gen_move_insn (result, incloc));
    2645              :     }
    2646              : 
    2647              :   /* See if we can directly increment INCLOC.  */
    2648            0 :   rtx_insn *last = get_last_insn ();
    2649            0 :   rtx_insn *add_insn = emit_insn (plus_p
    2650            0 :                                   ? gen_add2_insn (incloc, inc)
    2651            0 :                                   : gen_sub2_insn (incloc, inc));
    2652            0 :   const int icode = recog_memoized (add_insn);
    2653            0 :   if (icode >= 0)
    2654              :     {
    2655            0 :       if (!post_p && result != incloc)
    2656            0 :         emit_insn (gen_move_insn (result, incloc));
    2657            0 :       return result;
    2658              :     }
    2659            0 :   delete_insns_since (last);
    2660              : 
    2661              :   /* If couldn't do the increment directly, must increment in RESULT.
    2662              :      The way we do this depends on whether this is pre- or
    2663              :      post-increment.  For pre-increment, copy INCLOC to the reload
    2664              :      register, increment it there, then save back.  */
    2665            0 :   if (!post_p)
    2666              :     {
    2667            0 :       if (incloc != result)
    2668            0 :         emit_insn (gen_move_insn (result, incloc));
    2669            0 :       if (plus_p)
    2670            0 :         emit_insn (gen_add2_insn (result, inc));
    2671              :       else
    2672            0 :         emit_insn (gen_sub2_insn (result, inc));
    2673            0 :       if (incloc != result)
    2674            0 :         emit_insn (gen_move_insn (incloc, result));
    2675              :     }
    2676              :   else
    2677              :     {
    2678              :       /* Post-increment.
    2679              : 
    2680              :          Because this might be a jump insn or a compare, and because
    2681              :          RESULT may not be available after the insn in an input
    2682              :          reload, we must do the incrementing before the insn being
    2683              :          reloaded for.
    2684              : 
    2685              :          We have already copied INCLOC to RESULT.  Increment the copy in
    2686              :          RESULT, save that back, then decrement RESULT so it has
    2687              :          the original value.  */
    2688            0 :       if (plus_p)
    2689            0 :         emit_insn (gen_add2_insn (result, inc));
    2690              :       else
    2691            0 :         emit_insn (gen_sub2_insn (result, inc));
    2692            0 :       emit_insn (gen_move_insn (incloc, result));
    2693              :       /* Restore non-modified value for the result.  We prefer this
    2694              :          way because it does not require an additional hard
    2695              :          register.  */
    2696            0 :       if (plus_p)
    2697              :         {
    2698            0 :           poly_int64 offset;
    2699            0 :           if (poly_int_rtx_p (inc, &offset))
    2700            0 :             emit_insn (gen_add2_insn (result,
    2701              :                                       gen_int_mode (-offset,
    2702            0 :                                                     GET_MODE (result))));
    2703              :           else
    2704            0 :             emit_insn (gen_sub2_insn (result, inc));
    2705              :         }
    2706              :       else
    2707            0 :         emit_insn (gen_add2_insn (result, inc));
    2708              :     }
    2709              :   return result;
    2710            0 : }
    2711              : 
    2712              : /* Return a memory reference like MEM, but with the address reloaded into a
    2713              :    pseudo register.  */
    2714              : 
    2715              : rtx
    2716            0 : force_reload_address (rtx mem)
    2717              : {
    2718            0 :   rtx addr = XEXP (mem, 0);
    2719            0 :   if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
    2720              :     {
    2721            0 :       const auto size = GET_MODE_SIZE (GET_MODE (mem));
    2722            0 :       addr = address_reload_context ().emit_autoinc (addr, size);
    2723              :     }
    2724              :   else
    2725            0 :     addr = force_reg (Pmode, addr);
    2726              : 
    2727            0 :   return replace_equiv_address (mem, addr);
    2728              : }
    2729              : 
    2730              : /* Return a memory reference like MEMREF, but with its mode widened to
    2731              :    MODE and offset by OFFSET.  This would be used by targets that e.g.
    2732              :    cannot issue QImode memory operations and have to use SImode memory
    2733              :    operations plus masking logic.  */
    2734              : 
    2735              : rtx
    2736            0 : widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
    2737              : {
    2738            0 :   rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
    2739            0 :   poly_uint64 size = GET_MODE_SIZE (mode);
    2740              : 
    2741              :   /* If there are no changes, just return the original memory reference.  */
    2742            0 :   if (new_rtx == memref)
    2743              :     return new_rtx;
    2744              : 
    2745            0 :   mem_attrs attrs (*get_mem_attrs (new_rtx));
    2746              : 
    2747              :   /* If we don't know what offset we were at within the expression, then
    2748              :      we can't know if we've overstepped the bounds.  */
    2749            0 :   if (! attrs.offset_known_p)
    2750            0 :     attrs.expr = NULL_TREE;
    2751              : 
    2752            0 :   while (attrs.expr)
    2753              :     {
    2754            0 :       if (TREE_CODE (attrs.expr) == COMPONENT_REF)
    2755              :         {
    2756            0 :           tree field = TREE_OPERAND (attrs.expr, 1);
    2757            0 :           tree offset = component_ref_field_offset (attrs.expr);
    2758              : 
    2759            0 :           if (! DECL_SIZE_UNIT (field))
    2760              :             {
    2761            0 :               attrs.expr = NULL_TREE;
    2762            0 :               break;
    2763              :             }
    2764              : 
    2765              :           /* Is the field at least as large as the access?  If so, ok,
    2766              :              otherwise strip back to the containing structure.  */
    2767            0 :           if (poly_int_tree_p (DECL_SIZE_UNIT (field))
    2768            0 :               && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
    2769            0 :               && known_ge (attrs.offset, 0))
    2770              :             break;
    2771              : 
    2772            0 :           poly_uint64 suboffset;
    2773            0 :           if (!poly_int_tree_p (offset, &suboffset))
    2774              :             {
    2775            0 :               attrs.expr = NULL_TREE;
    2776            0 :               break;
    2777              :             }
    2778              : 
    2779            0 :           attrs.expr = TREE_OPERAND (attrs.expr, 0);
    2780            0 :           attrs.offset += suboffset;
    2781            0 :           attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
    2782            0 :                            / BITS_PER_UNIT);
    2783              :         }
    2784              :       /* Similarly for the decl.  */
    2785            0 :       else if (DECL_P (attrs.expr)
    2786            0 :                && DECL_SIZE_UNIT (attrs.expr)
    2787            0 :                && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
    2788            0 :                && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
    2789              :                            size)
    2790            0 :                && known_ge (attrs.offset, 0))
    2791              :         break;
    2792              :       else
    2793              :         {
    2794              :           /* The widened memory access overflows the expression, which means
    2795              :              that it could alias another expression.  Zap it.  */
    2796            0 :           attrs.expr = NULL_TREE;
    2797            0 :           break;
    2798              :         }
    2799              :     }
    2800              : 
    2801            0 :   if (! attrs.expr)
    2802            0 :     attrs.offset_known_p = false;
    2803              : 
    2804              :   /* The widened memory may alias other stuff, so zap the alias set.  */
    2805              :   /* ??? Maybe use get_alias_set on any remaining expression.  */
    2806            0 :   attrs.alias = 0;
    2807            0 :   attrs.size_known_p = true;
    2808            0 :   attrs.size = size;
    2809            0 :   set_mem_attrs (new_rtx, &attrs);
    2810            0 :   return new_rtx;
    2811              : }
    2812              : 
    2813              : /* A fake decl that is used as the MEM_EXPR of spill slots.  */
    2814              : static GTY(()) tree spill_slot_decl;
    2815              : 
    2816              : tree
    2817    807235497 : get_spill_slot_decl (bool force_build_p)
    2818              : {
    2819    807235497 :   tree d = spill_slot_decl;
    2820    807235497 :   rtx rd;
    2821              : 
    2822    807235497 :   if (d || !force_build_p)
    2823              :     return d;
    2824              : 
    2825        30068 :   d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
    2826              :                   VAR_DECL, get_identifier ("%sfp"), void_type_node);
    2827        30068 :   DECL_ARTIFICIAL (d) = 1;
    2828        30068 :   DECL_IGNORED_P (d) = 1;
    2829        30068 :   TREE_USED (d) = 1;
    2830        30068 :   spill_slot_decl = d;
    2831              : 
    2832        30068 :   rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
    2833        30068 :   MEM_NOTRAP_P (rd) = 1;
    2834        30068 :   mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
    2835        30068 :   attrs.alias = new_alias_set ();
    2836        30068 :   attrs.expr = d;
    2837        30068 :   set_mem_attrs (rd, &attrs);
    2838        30068 :   SET_DECL_RTL (d, rd);
    2839              : 
    2840        30068 :   return d;
    2841              : }
    2842              : 
    2843              : /* Given MEM, a result from assign_stack_local, fill in the memory
    2844              :    attributes as appropriate for a register allocator spill slot.
    2845              :    These slots are not aliasable by other memory.  We arrange for
    2846              :    them all to use a single MEM_EXPR, so that the aliasing code can
    2847              :    work properly in the case of shared spill slots.  */
    2848              : 
    2849              : void
    2850      1420453 : set_mem_attrs_for_spill (rtx mem)
    2851              : {
    2852      1420453 :   rtx addr;
    2853              : 
    2854      1420453 :   mem_attrs attrs (*get_mem_attrs (mem));
    2855      1420453 :   attrs.expr = get_spill_slot_decl (true);
    2856      1420453 :   attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
    2857      1420453 :   attrs.addrspace = ADDR_SPACE_GENERIC;
    2858              : 
    2859              :   /* We expect the incoming memory to be of the form:
    2860              :         (mem:MODE (plus (reg sfp) (const_int offset)))
    2861              :      with perhaps the plus missing for offset = 0.  */
    2862      1420453 :   addr = XEXP (mem, 0);
    2863      1420453 :   attrs.offset_known_p = true;
    2864      1420453 :   strip_offset (addr, &attrs.offset);
    2865              : 
    2866      1420453 :   set_mem_attrs (mem, &attrs);
    2867      1420453 :   MEM_NOTRAP_P (mem) = 1;
    2868      1420453 : }
    2869              : 
    2870              : /* Return a newly created CODE_LABEL rtx with a unique label number.  */
    2871              : 
    2872              : rtx_code_label *
    2873     15331701 : gen_label_rtx (void)
    2874              : {
    2875     15331701 :   return as_a <rtx_code_label *> (
    2876              :             gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
    2877     15331701 :                                 NULL, label_num++, NULL));
    2878              : }
    2879              : 
    2880              : /* For procedure integration.  */
    2881              : 
    2882              : /* Install new pointers to the first and last insns in the chain.
    2883              :    Also, set cur_insn_uid to one higher than the last in use.
    2884              :    Used for an inline-procedure after copying the insn chain.  */
    2885              : 
    2886              : void
    2887            8 : set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
    2888              : {
    2889            8 :   rtx_insn *insn;
    2890              : 
    2891            8 :   set_first_insn (first);
    2892            8 :   set_last_insn (last);
    2893            8 :   cur_insn_uid = 0;
    2894              : 
    2895            8 :   if (param_min_nondebug_insn_uid || MAY_HAVE_DEBUG_INSNS)
    2896              :     {
    2897            0 :       int debug_count = 0;
    2898              : 
    2899            0 :       cur_insn_uid = param_min_nondebug_insn_uid - 1;
    2900            0 :       cur_debug_insn_uid = 0;
    2901              : 
    2902            0 :       for (insn = first; insn; insn = NEXT_INSN (insn))
    2903            0 :         if (INSN_UID (insn) < param_min_nondebug_insn_uid)
    2904            0 :           cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
    2905              :         else
    2906              :           {
    2907            0 :             cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
    2908            0 :             if (DEBUG_INSN_P (insn))
    2909            0 :               debug_count++;
    2910              :           }
    2911              : 
    2912            0 :       if (debug_count)
    2913            0 :         cur_debug_insn_uid = param_min_nondebug_insn_uid + debug_count;
    2914              :       else
    2915            0 :         cur_debug_insn_uid++;
    2916              :     }
    2917              :   else
    2918            8 :     for (insn = first; insn; insn = NEXT_INSN (insn))
    2919            0 :       cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
    2920              : 
    2921            8 :   cur_insn_uid++;
    2922            8 : }
    2923              : 
    2924              : /* Go through all the RTL insn bodies and copy any invalid shared
    2925              :    structure.  This routine should only be called once.  */
    2926              : 
    2927              : static void
    2928      2943502 : unshare_all_rtl_1 (rtx_insn *insn)
    2929              : {
    2930              :   /* Unshare just about everything else.  */
    2931      2943502 :   unshare_all_rtl_in_chain (insn);
    2932              : 
    2933              :   /* Make sure the addresses of stack slots found outside the insn chain
    2934              :      (such as, in DECL_RTL of a variable) are not shared
    2935              :      with the insn chain.
    2936              : 
    2937              :      This special care is necessary when the stack slot MEM does not
    2938              :      actually appear in the insn chain.  If it does appear, its address
    2939              :      is unshared from all else at that point.  */
    2940      2943502 :   unsigned int i;
    2941      2943502 :   rtx temp;
    2942      9083642 :   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
    2943      3196638 :     (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
    2944      2943502 : }
    2945              : 
    2946              : /* Go through all the RTL insn bodies and copy any invalid shared
    2947              :    structure, again.  This is a fairly expensive thing to do so it
    2948              :    should be done sparingly.  */
    2949              : 
    2950              : void
    2951      1471362 : unshare_all_rtl_again (rtx_insn *insn)
    2952              : {
    2953      1471362 :   rtx_insn *p;
    2954      1471362 :   tree decl;
    2955              : 
    2956    167663445 :   for (p = insn; p; p = NEXT_INSN (p))
    2957    166192083 :     if (INSN_P (p))
    2958              :       {
    2959    131125717 :         reset_used_flags (PATTERN (p));
    2960    131125717 :         reset_used_flags (REG_NOTES (p));
    2961    131125717 :         if (CALL_P (p))
    2962      5946193 :           reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
    2963              :       }
    2964              : 
    2965              :   /* Make sure that virtual stack slots are not shared.  */
    2966      1471362 :   set_used_decls (DECL_INITIAL (cfun->decl));
    2967              : 
    2968              :   /* Make sure that virtual parameters are not shared.  */
    2969      4573643 :   for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
    2970      3102281 :     set_used_flags (DECL_RTL (decl));
    2971              : 
    2972              :   rtx temp;
    2973              :   unsigned int i;
    2974      3843173 :   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
    2975      2371811 :     reset_used_flags (temp);
    2976              : 
    2977      1471362 :   unshare_all_rtl_1 (insn);
    2978      1471362 : }
    2979              : 
    2980              : void
    2981      1472140 : unshare_all_rtl (void)
    2982              : {
    2983      1472140 :   unshare_all_rtl_1 (get_insns ());
    2984              : 
    2985      4574980 :   for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
    2986              :     {
    2987      3102840 :       if (DECL_RTL_SET_P (decl))
    2988      3102840 :         SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
    2989      3102840 :       DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
    2990              :     }
    2991      1472140 : }
    2992              : 
    2993              : 
    2994              : /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
    2995              :    Recursively does the same for subexpressions.  */
    2996              : 
    2997              : static void
    2998  55538861652 : verify_rtx_sharing (rtx orig, rtx insn)
    2999              : {
    3000  55538861652 :   rtx x = orig;
    3001  55538861652 :   int i;
    3002  55538861652 :   enum rtx_code code;
    3003  55538861652 :   const char *format_ptr;
    3004              : 
    3005  55538861652 :   if (x == 0)
    3006              :     return;
    3007              : 
    3008  46082654362 :   code = GET_CODE (x);
    3009              : 
    3010              :   /* These types may be freely shared.  */
    3011              : 
    3012  46082654362 :   switch (code)
    3013              :     {
    3014              :     case REG:
    3015              :     case DEBUG_EXPR:
    3016              :     case VALUE:
    3017              :     CASE_CONST_ANY:
    3018              :     case SYMBOL_REF:
    3019              :     case LABEL_REF:
    3020              :     case CODE_LABEL:
    3021              :     case PC:
    3022              :     case RETURN:
    3023              :     case SIMPLE_RETURN:
    3024              :     case SCRATCH:
    3025              :       /* SCRATCH must be shared because they represent distinct values.  */
    3026              :       return;
    3027   1994818646 :     case CLOBBER:
    3028              :       /* Share clobbers of hard registers, but do not share pseudo reg
    3029              :          clobbers or clobbers of hard registers that originated as pseudos.
    3030              :          This is needed to allow safe register renaming.  */
    3031   1994818646 :       if (REG_P (XEXP (x, 0))
    3032    766790610 :           && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
    3033   2758696283 :           && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
    3034              :         return;
    3035              :       break;
    3036              : 
    3037     38262698 :     case CONST:
    3038     38262698 :       if (shared_const_p (orig))
    3039              :         return;
    3040              :       break;
    3041              : 
    3042   2754320024 :     case MEM:
    3043              :       /* A MEM is allowed to be shared if its address is constant.  */
    3044   2754320024 :       if (CONSTANT_ADDRESS_P (XEXP (x, 0))
    3045   2754320024 :           || reload_completed || reload_in_progress)
    3046              :         return;
    3047              : 
    3048              :       break;
    3049              : 
    3050              :     default:
    3051              :       break;
    3052              :     }
    3053              : 
    3054              :   /* This rtx may not be shared.  If it has already been seen,
    3055              :      replace it with a copy of itself.  */
    3056  22324309858 :   if (flag_checking && RTX_FLAG (x, used))
    3057              :     {
    3058            0 :       error ("invalid rtl sharing found in the insn");
    3059            0 :       debug_rtx (insn);
    3060            0 :       error ("shared rtx");
    3061            0 :       debug_rtx (x);
    3062            0 :       internal_error ("internal consistency failure");
    3063              :     }
    3064  22324309858 :   gcc_assert (!RTX_FLAG (x, used));
    3065              : 
    3066  22324309858 :   RTX_FLAG (x, used) = 1;
    3067              : 
    3068              :   /* Now scan the subexpressions recursively.  */
    3069              : 
    3070  22324309858 :   format_ptr = GET_RTX_FORMAT (code);
    3071              : 
    3072  62723777108 :   for (i = 0; i < GET_RTX_LENGTH (code); i++)
    3073              :     {
    3074  40399467250 :       switch (*format_ptr++)
    3075              :         {
    3076  35318900631 :         case 'e':
    3077  35318900631 :           verify_rtx_sharing (XEXP (x, i), insn);
    3078  35318900631 :           break;
    3079              : 
    3080    899326894 :         case 'E':
    3081    899326894 :           if (XVEC (x, i) != NULL)
    3082              :             {
    3083    899326894 :               int j;
    3084    899326894 :               int len = XVECLEN (x, i);
    3085              : 
    3086   2711661725 :               for (j = 0; j < len; j++)
    3087              :                 {
    3088              :                   /* We allow sharing of ASM_OPERANDS inside single
    3089              :                      instruction.  */
    3090   1812334831 :                   if (j && GET_CODE (XVECEXP (x, i, j)) == SET
    3091     53703389 :                       && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
    3092              :                           == ASM_OPERANDS))
    3093      3057809 :                     verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
    3094              :                   else
    3095   1809277022 :                     verify_rtx_sharing (XVECEXP (x, i, j), insn);
    3096              :                 }
    3097              :             }
    3098              :           break;
    3099              :         }
    3100              :     }
    3101              : }
    3102              : 
    3103              : /* Reset used-flags for INSN.  */
    3104              : 
    3105              : static void
    3106  18000426102 : reset_insn_used_flags (rtx insn)
    3107              : {
    3108  18000426102 :   gcc_assert (INSN_P (insn));
    3109  18000426102 :   reset_used_flags (PATTERN (insn));
    3110  18000426102 :   reset_used_flags (REG_NOTES (insn));
    3111  18000426102 :   if (CALL_P (insn))
    3112    814400176 :     reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
    3113  18000426102 : }
    3114              : 
    3115              : /* Go through all the RTL insn bodies and clear all the USED bits.  */
    3116              : 
    3117              : static void
    3118    187800232 : reset_all_used_flags (void)
    3119              : {
    3120    187800232 :   rtx_insn *p;
    3121              : 
    3122  24338312700 :   for (p = get_insns (); p; p = NEXT_INSN (p))
    3123  24150512468 :     if (INSN_P (p))
    3124              :       {
    3125  18000426102 :         rtx pat = PATTERN (p);
    3126  18000426102 :         if (GET_CODE (pat) != SEQUENCE)
    3127  18000426102 :           reset_insn_used_flags (p);
    3128              :         else
    3129              :           {
    3130            0 :             gcc_assert (REG_NOTES (p) == NULL);
    3131            0 :             for (int i = 0; i < XVECLEN (pat, 0); i++)
    3132              :               {
    3133            0 :                 rtx insn = XVECEXP (pat, 0, i);
    3134            0 :                 if (INSN_P (insn))
    3135            0 :                   reset_insn_used_flags (insn);
    3136              :               }
    3137              :           }
    3138              :       }
    3139    187800232 : }
    3140              : 
    3141              : /* Verify sharing in INSN.  */
    3142              : 
    3143              : static void
    3144   9000213051 : verify_insn_sharing (rtx insn)
    3145              : {
    3146   9000213051 :   gcc_assert (INSN_P (insn));
    3147   9000213051 :   verify_rtx_sharing (PATTERN (insn), insn);
    3148   9000213051 :   verify_rtx_sharing (REG_NOTES (insn), insn);
    3149   9000213051 :   if (CALL_P (insn))
    3150    407200088 :     verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
    3151   9000213051 : }
    3152              : 
    3153              : /* Go through all the RTL insn bodies and check that there is no unexpected
    3154              :    sharing in between the subexpressions.  */
    3155              : 
    3156              : DEBUG_FUNCTION void
    3157     93900116 : verify_rtl_sharing (void)
    3158              : {
    3159     93900116 :   rtx_insn *p;
    3160              : 
    3161     93900116 :   timevar_push (TV_VERIFY_RTL_SHARING);
    3162              : 
    3163     93900116 :   reset_all_used_flags ();
    3164              : 
    3165  12169156350 :   for (p = get_insns (); p; p = NEXT_INSN (p))
    3166  12075256234 :     if (INSN_P (p))
    3167              :       {
    3168   9000213051 :         rtx pat = PATTERN (p);
    3169   9000213051 :         if (GET_CODE (pat) != SEQUENCE)
    3170   9000213051 :           verify_insn_sharing (p);
    3171              :         else
    3172            0 :           for (int i = 0; i < XVECLEN (pat, 0); i++)
    3173              :               {
    3174            0 :                 rtx insn = XVECEXP (pat, 0, i);
    3175            0 :                 if (INSN_P (insn))
    3176            0 :                   verify_insn_sharing (insn);
    3177              :               }
    3178              :       }
    3179              : 
    3180     93900116 :   reset_all_used_flags ();
    3181              : 
    3182     93900116 :   timevar_pop (TV_VERIFY_RTL_SHARING);
    3183     93900116 : }
    3184              : 
    3185              : /* Go through all the RTL insn bodies and copy any invalid shared structure.
    3186              :    Assumes the mark bits are cleared at entry.  */
    3187              : 
    3188              : void
    3189      9427503 : unshare_all_rtl_in_chain (rtx_insn *insn)
    3190              : {
    3191    357462750 :   for (; insn; insn = NEXT_INSN (insn))
    3192    348035247 :     if (INSN_P (insn))
    3193              :       {
    3194    283860866 :         PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
    3195    283860866 :         REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
    3196    283860866 :         if (CALL_P (insn))
    3197     11884388 :           CALL_INSN_FUNCTION_USAGE (insn)
    3198     11884388 :             = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
    3199              :       }
    3200      9427503 : }
    3201              : 
    3202              : /* Go through all virtual stack slots of a function and mark them as
    3203              :    shared.  We never replace the DECL_RTLs themselves with a copy,
    3204              :    but expressions mentioned into a DECL_RTL cannot be shared with
    3205              :    expressions in the instruction stream.
    3206              : 
    3207              :    Note that reload may convert pseudo registers into memories in-place.
    3208              :    Pseudo registers are always shared, but MEMs never are.  Thus if we
    3209              :    reset the used flags on MEMs in the instruction stream, we must set
    3210              :    them again on MEMs that appear in DECL_RTLs.  */
    3211              : 
    3212              : static void
    3213     16098747 : set_used_decls (tree blk)
    3214              : {
    3215     16098747 :   tree t;
    3216              : 
    3217              :   /* Mark decls.  */
    3218     34631562 :   for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
    3219     18532815 :     if (DECL_RTL_SET_P (t))
    3220      2270099 :       set_used_flags (DECL_RTL (t));
    3221              : 
    3222              :   /* Now process sub-blocks.  */
    3223     30726132 :   for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
    3224     14627385 :     set_used_decls (t);
    3225     16098747 : }
    3226              : 
    3227              : /* Mark ORIG as in use, and return a copy of it if it was already in use.
    3228              :    Recursively does the same for subexpressions.  Uses
    3229              :    copy_rtx_if_shared_1 to reduce stack space.  */
    3230              : 
    3231              : rtx
    3232    616423980 : copy_rtx_if_shared (rtx orig)
    3233              : {
    3234    616423980 :   copy_rtx_if_shared_1 (&orig);
    3235    616423980 :   return orig;
    3236              : }
    3237              : 
    3238              : /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
    3239              :    use.  Recursively does the same for subexpressions.  */
    3240              : 
    3241              : static void
    3242   1082939132 : copy_rtx_if_shared_1 (rtx *orig1)
    3243              : {
    3244   1082939132 :   rtx x;
    3245   1082939132 :   int i;
    3246   1082939132 :   enum rtx_code code;
    3247   1082939132 :   rtx *last_ptr;
    3248   1082939132 :   const char *format_ptr;
    3249   1082939132 :   int copied = 0;
    3250   1790438883 :   int length;
    3251              : 
    3252              :   /* Repeat is used to turn tail-recursion into iteration.  */
    3253   1790438883 : repeat:
    3254   1790438883 :   x = *orig1;
    3255              : 
    3256   1790438883 :   if (x == 0)
    3257              :     return;
    3258              : 
    3259   1474185315 :   code = GET_CODE (x);
    3260              : 
    3261              :   /* These types may be freely shared.  */
    3262              : 
    3263   1474185315 :   switch (code)
    3264              :     {
    3265              :     case REG:
    3266              :     case DEBUG_EXPR:
    3267              :     case VALUE:
    3268              :     CASE_CONST_ANY:
    3269              :     case SYMBOL_REF:
    3270              :     case LABEL_REF:
    3271              :     case CODE_LABEL:
    3272              :     case PC:
    3273              :     case RETURN:
    3274              :     case SIMPLE_RETURN:
    3275              :     case SCRATCH:
    3276              :       /* SCRATCH must be shared because they represent distinct values.  */
    3277              :       return;
    3278     61742724 :     case CLOBBER:
    3279              :       /* Share clobbers of hard registers, but do not share pseudo reg
    3280              :          clobbers or clobbers of hard registers that originated as pseudos.
    3281              :          This is needed to allow safe register renaming.  */
    3282     61742724 :       if (REG_P (XEXP (x, 0))
    3283     27350455 :           && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
    3284     88929375 :           && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
    3285              :         return;
    3286              :       break;
    3287              : 
    3288      5693551 :     case CONST:
    3289      5693551 :       if (shared_const_p (x))
    3290              :         return;
    3291              :       break;
    3292              : 
    3293              :     case DEBUG_INSN:
    3294              :     case INSN:
    3295              :     case JUMP_INSN:
    3296              :     case CALL_INSN:
    3297              :     case NOTE:
    3298              :     case BARRIER:
    3299              :       /* The chain of insns is not being copied.  */
    3300              :       return;
    3301              : 
    3302              :     default:
    3303              :       break;
    3304              :     }
    3305              : 
    3306              :   /* This rtx may not be shared.  If it has already been seen,
    3307              :      replace it with a copy of itself.  */
    3308              : 
    3309    733402615 :   if (RTX_FLAG (x, used))
    3310              :     {
    3311      4743738 :       x = shallow_copy_rtx (x);
    3312      4743738 :       copied = 1;
    3313              :     }
    3314    733402615 :   RTX_FLAG (x, used) = 1;
    3315              : 
    3316              :   /* Now scan the subexpressions recursively.
    3317              :      We can store any replaced subexpressions directly into X
    3318              :      since we know X is not shared!  Any vectors in X
    3319              :      must be copied if X was copied.  */
    3320              : 
    3321    733402615 :   format_ptr = GET_RTX_FORMAT (code);
    3322    733402615 :   length = GET_RTX_LENGTH (code);
    3323    733402615 :   last_ptr = NULL;
    3324              : 
    3325   2069826876 :   for (i = 0; i < length; i++)
    3326              :     {
    3327   1336424261 :       switch (*format_ptr++)
    3328              :         {
    3329   1112511529 :         case 'e':
    3330   1112511529 :           if (last_ptr)
    3331    435673382 :             copy_rtx_if_shared_1 (last_ptr);
    3332   1112511529 :           last_ptr = &XEXP (x, i);
    3333   1112511529 :           break;
    3334              : 
    3335     31392915 :         case 'E':
    3336     31392915 :           if (XVEC (x, i) != NULL)
    3337              :             {
    3338     31392915 :               int j;
    3339     31392915 :               int len = XVECLEN (x, i);
    3340              : 
    3341              :               /* Copy the vector iff I copied the rtx and the length
    3342              :                  is nonzero.  */
    3343     31392915 :               if (copied && len > 0)
    3344         1491 :                 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
    3345              : 
    3346              :               /* Call recursively on all inside the vector.  */
    3347     92896289 :               for (j = 0; j < len; j++)
    3348              :                 {
    3349     61503374 :                   if (last_ptr)
    3350     30841770 :                     copy_rtx_if_shared_1 (last_ptr);
    3351     61503374 :                   last_ptr = &XVECEXP (x, i, j);
    3352              :                 }
    3353              :             }
    3354              :           break;
    3355              :         }
    3356              :     }
    3357    733402615 :   *orig1 = x;
    3358    733402615 :   if (last_ptr)
    3359              :     {
    3360    707499751 :       orig1 = last_ptr;
    3361    707499751 :       goto repeat;
    3362              :     }
    3363              : }
    3364              : 
    3365              : /* Set the USED bit in X and its non-shareable subparts to FLAG.  */
    3366              : 
    3367              : static void
    3368  78349146419 : mark_used_flags (rtx x, int flag)
    3369              : {
    3370  >12196*10^7 :   int i, j;
    3371  >12196*10^7 :   enum rtx_code code;
    3372  >12196*10^7 :   const char *format_ptr;
    3373  >12196*10^7 :   int length;
    3374              : 
    3375              :   /* Repeat is used to turn tail-recursion into iteration.  */
    3376  >12196*10^7 : repeat:
    3377  >12196*10^7 :   if (x == 0)
    3378              :     return;
    3379              : 
    3380  >10289*10^7 :   code = GET_CODE (x);
    3381              : 
    3382              :   /* These types may be freely shared so we needn't do any resetting
    3383              :      for them.  */
    3384              : 
    3385  >10289*10^7 :   switch (code)
    3386              :     {
    3387              :     case REG:
    3388              :     case DEBUG_EXPR:
    3389              :     case VALUE:
    3390              :     CASE_CONST_ANY:
    3391              :     case SYMBOL_REF:
    3392              :     case CODE_LABEL:
    3393              :     case PC:
    3394              :     case RETURN:
    3395              :     case SIMPLE_RETURN:
    3396              :       return;
    3397              : 
    3398              :     case DEBUG_INSN:
    3399              :     case INSN:
    3400              :     case JUMP_INSN:
    3401              :     case CALL_INSN:
    3402              :     case NOTE:
    3403              :     case LABEL_REF:
    3404              :     case BARRIER:
    3405              :       /* The chain of insns is not being copied.  */
    3406              :       return;
    3407              : 
    3408  53137001265 :     default:
    3409  53137001265 :       break;
    3410              :     }
    3411              : 
    3412  53137001265 :   RTX_FLAG (x, used) = flag;
    3413              : 
    3414  53137001265 :   format_ptr = GET_RTX_FORMAT (code);
    3415  53137001265 :   length = GET_RTX_LENGTH (code);
    3416              : 
    3417  >10487*10^7 :   for (i = 0; i < length; i++)
    3418              :     {
    3419  95352990774 :       switch (*format_ptr++)
    3420              :         {
    3421  81149844010 :         case 'e':
    3422  81149844010 :           if (i == length-1)
    3423              :             {
    3424  43612763924 :               x = XEXP (x, i);
    3425  43612763924 :               goto repeat;
    3426              :             }
    3427  37537080086 :           mark_used_flags (XEXP (x, i), flag);
    3428  37537080086 :           break;
    3429              : 
    3430              :         case 'E':
    3431   5547869750 :           for (j = 0; j < XVECLEN (x, i); j++)
    3432   3694217098 :             mark_used_flags (XVECEXP (x, i, j), flag);
    3433              :           break;
    3434              :         }
    3435              :     }
    3436              : }
    3437              : 
    3438              : /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
    3439              :    to look for shared sub-parts.  */
    3440              : 
    3441              : void
    3442  37110343630 : reset_used_flags (rtx x)
    3443              : {
    3444  37110343630 :   mark_used_flags (x, 0);
    3445  37110343630 : }
    3446              : 
    3447              : /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
    3448              :    to look for shared sub-parts.  */
    3449              : 
    3450              : void
    3451      7505605 : set_used_flags (rtx x)
    3452              : {
    3453      7505605 :   mark_used_flags (x, 1);
    3454      7505605 : }
    3455              : 
    3456              : /* Copy X if necessary so that it won't be altered by changes in OTHER.
    3457              :    Return X or the rtx for the pseudo reg the value of X was copied into.
    3458              :    OTHER must be valid as a SET_DEST.  */
    3459              : 
    3460              : rtx
    3461            0 : make_safe_from (rtx x, rtx other)
    3462              : {
    3463            0 :   while (1)
    3464            0 :     switch (GET_CODE (other))
    3465              :       {
    3466            0 :       case SUBREG:
    3467            0 :         other = SUBREG_REG (other);
    3468            0 :         break;
    3469            0 :       case STRICT_LOW_PART:
    3470            0 :       case SIGN_EXTEND:
    3471            0 :       case ZERO_EXTEND:
    3472            0 :         other = XEXP (other, 0);
    3473            0 :         break;
    3474            0 :       default:
    3475            0 :         goto done;
    3476              :       }
    3477            0 :  done:
    3478            0 :   if ((MEM_P (other)
    3479            0 :        && ! CONSTANT_P (x)
    3480            0 :        && !REG_P (x)
    3481            0 :        && GET_CODE (x) != SUBREG)
    3482            0 :       || (REG_P (other)
    3483            0 :           && (REGNO (other) < FIRST_PSEUDO_REGISTER
    3484            0 :               || reg_mentioned_p (other, x))))
    3485              :     {
    3486            0 :       rtx temp = gen_reg_rtx (GET_MODE (x));
    3487            0 :       emit_move_insn (temp, x);
    3488            0 :       return temp;
    3489              :     }
    3490              :   return x;
    3491              : }
    3492              : 
    3493              : /* Emission of insns (adding them to the doubly-linked list).  */
    3494              : 
    3495              : /* Return the last insn emitted, even if it is in a sequence now pushed.  */
    3496              : 
    3497              : rtx_insn *
    3498            0 : get_last_insn_anywhere (void)
    3499              : {
    3500            0 :   struct sequence_stack *seq;
    3501            0 :   for (seq = get_current_sequence (); seq; seq = seq->next)
    3502            0 :     if (seq->last != 0)
    3503              :       return seq->last;
    3504              :   return 0;
    3505              : }
    3506              : 
    3507              : /* Return the first nonnote insn emitted in current sequence or current
    3508              :    function.  This routine looks inside SEQUENCEs.  */
    3509              : 
    3510              : rtx_insn *
    3511            0 : get_first_nonnote_insn (void)
    3512              : {
    3513            0 :   rtx_insn *insn = get_insns ();
    3514              : 
    3515            0 :   if (insn)
    3516              :     {
    3517            0 :       if (NOTE_P (insn))
    3518            0 :         for (insn = next_insn (insn);
    3519            0 :              insn && NOTE_P (insn);
    3520            0 :              insn = next_insn (insn))
    3521            0 :           continue;
    3522              :       else
    3523              :         {
    3524            0 :           if (NONJUMP_INSN_P (insn)
    3525            0 :               && GET_CODE (PATTERN (insn)) == SEQUENCE)
    3526            0 :             insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
    3527              :         }
    3528              :     }
    3529              : 
    3530            0 :   return insn;
    3531              : }
    3532              : 
    3533              : /* Return the last nonnote insn emitted in current sequence or current
    3534              :    function.  This routine looks inside SEQUENCEs.  */
    3535              : 
    3536              : rtx_insn *
    3537            0 : get_last_nonnote_insn (void)
    3538              : {
    3539            0 :   rtx_insn *insn = get_last_insn ();
    3540              : 
    3541            0 :   if (insn)
    3542              :     {
    3543            0 :       if (NOTE_P (insn))
    3544            0 :         for (insn = previous_insn (insn);
    3545            0 :              insn && NOTE_P (insn);
    3546            0 :              insn = previous_insn (insn))
    3547            0 :           continue;
    3548              :       else
    3549              :         {
    3550            0 :           if (NONJUMP_INSN_P (insn))
    3551            0 :             if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
    3552            0 :               insn = seq->insn (seq->len () - 1);
    3553              :         }
    3554              :     }
    3555              : 
    3556            0 :   return insn;
    3557              : }
    3558              : 
    3559              : /* Return the number of actual (non-debug) insns emitted in this
    3560              :    function.  */
    3561              : 
    3562              : int
    3563      2023017 : get_max_insn_count (void)
    3564              : {
    3565      2023017 :   int n = cur_insn_uid;
    3566              : 
    3567              :   /* The table size must be stable across -g, to avoid codegen
    3568              :      differences due to debug insns, and not be affected by
    3569              :      -fmin-insn-uid, to avoid excessive table size and to simplify
    3570              :      debugging of -fcompare-debug failures.  */
    3571      2023017 :   if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
    3572      2023017 :     n -= cur_debug_insn_uid;
    3573              :   else
    3574            0 :     n -= param_min_nondebug_insn_uid;
    3575              : 
    3576      2023017 :   return n;
    3577              : }
    3578              : 
    3579              : 
    3580              : /* Return the next insn.  If it is a SEQUENCE, return the first insn
    3581              :    of the sequence.  */
    3582              : 
    3583              : rtx_insn *
    3584    128019871 : next_insn (rtx_insn *insn)
    3585              : {
    3586    128019871 :   if (insn)
    3587              :     {
    3588    128019871 :       insn = NEXT_INSN (insn);
    3589    127715737 :       if (insn && NONJUMP_INSN_P (insn)
    3590    142258727 :           && GET_CODE (PATTERN (insn)) == SEQUENCE)
    3591            0 :         insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
    3592              :     }
    3593              : 
    3594    128019871 :   return insn;
    3595              : }
    3596              : 
    3597              : /* Return the previous insn.  If it is a SEQUENCE, return the last insn
    3598              :    of the sequence.  */
    3599              : 
    3600              : rtx_insn *
    3601        71356 : previous_insn (rtx_insn *insn)
    3602              : {
    3603        71356 :   if (insn)
    3604              :     {
    3605        71356 :       insn = PREV_INSN (insn);
    3606        71356 :       if (insn && NONJUMP_INSN_P (insn))
    3607         8454 :         if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
    3608            0 :           insn = seq->insn (seq->len () - 1);
    3609              :     }
    3610              : 
    3611        71356 :   return insn;
    3612              : }
    3613              : 
    3614              : /* Return the next insn after INSN that is not a NOTE.  This routine does not
    3615              :    look inside SEQUENCEs.  */
    3616              : 
    3617              : rtx_insn *
    3618       556197 : next_nonnote_insn (rtx_insn *insn)
    3619              : {
    3620       580456 :   while (insn)
    3621              :     {
    3622       580456 :       insn = NEXT_INSN (insn);
    3623       580456 :       if (insn == 0 || !NOTE_P (insn))
    3624              :         break;
    3625              :     }
    3626              : 
    3627       556197 :   return insn;
    3628              : }
    3629              : 
    3630              : /* Return the next insn after INSN that is not a DEBUG_INSN.  This
    3631              :    routine does not look inside SEQUENCEs.  */
    3632              : 
    3633              : rtx_insn *
    3634      5975259 : next_nondebug_insn (rtx_insn *insn)
    3635              : {
    3636      8074433 :   while (insn)
    3637              :     {
    3638      8074433 :       insn = NEXT_INSN (insn);
    3639      8074433 :       if (insn == 0 || !DEBUG_INSN_P (insn))
    3640              :         break;
    3641              :     }
    3642              : 
    3643      5975259 :   return insn;
    3644              : }
    3645              : 
    3646              : /* Return the previous insn before INSN that is not a NOTE.  This routine does
    3647              :    not look inside SEQUENCEs.  */
    3648              : 
    3649              : rtx_insn *
    3650     90464359 : prev_nonnote_insn (rtx_insn *insn)
    3651              : {
    3652     95349171 :   while (insn)
    3653              :     {
    3654     95349171 :       insn = PREV_INSN (insn);
    3655     95349171 :       if (insn == 0 || !NOTE_P (insn))
    3656              :         break;
    3657              :     }
    3658              : 
    3659     90464359 :   return insn;
    3660              : }
    3661              : 
    3662              : /* Return the previous insn before INSN that is not a DEBUG_INSN.
    3663              :    This routine does not look inside SEQUENCEs.  */
    3664              : 
    3665              : rtx_insn *
    3666      2018660 : prev_nondebug_insn (rtx_insn *insn)
    3667              : {
    3668      4254497 :   while (insn)
    3669              :     {
    3670      4254497 :       insn = PREV_INSN (insn);
    3671      4254497 :       if (insn == 0 || !DEBUG_INSN_P (insn))
    3672              :         break;
    3673              :     }
    3674              : 
    3675      2018660 :   return insn;
    3676              : }
    3677              : 
    3678              : /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
    3679              :    This routine does not look inside SEQUENCEs.  */
    3680              : 
    3681              : rtx_insn *
    3682     51152110 : next_nonnote_nondebug_insn (rtx_insn *insn)
    3683              : {
    3684     69581568 :   while (insn)
    3685              :     {
    3686     69581568 :       insn = NEXT_INSN (insn);
    3687     69581568 :       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
    3688              :         break;
    3689              :     }
    3690              : 
    3691     51152110 :   return insn;
    3692              : }
    3693              : 
    3694              : /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
    3695              :    but stop the search before we enter another basic block.  This
    3696              :    routine does not look inside SEQUENCEs.
    3697              :    NOTE: This can potentially bleed into next BB. If current insn is
    3698              :          last insn of BB, followed by a code_label before the start of
    3699              :          the next BB, code_label will be returned. But this is the
    3700              :          behavior rest of gcc assumes/relies on e.g. get_last_bb_insn.  */
    3701              : 
    3702              : rtx_insn *
    3703      7364427 : next_nonnote_nondebug_insn_bb (rtx_insn *insn)
    3704              : {
    3705      7395523 :   while (insn)
    3706              :     {
    3707      7395523 :       insn = NEXT_INSN (insn);
    3708      7395523 :       if (insn == 0)
    3709              :         break;
    3710      7169789 :       if (DEBUG_INSN_P (insn))
    3711            0 :         continue;
    3712      7169789 :       if (!NOTE_P (insn))
    3713              :         break;
    3714      2084621 :       if (NOTE_INSN_BASIC_BLOCK_P (insn))
    3715              :         return NULL;
    3716              :     }
    3717              : 
    3718              :   return insn;
    3719              : }
    3720              : 
    3721              : /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
    3722              :    This routine does not look inside SEQUENCEs.  */
    3723              : 
    3724              : rtx_insn *
    3725    121534791 : prev_nonnote_nondebug_insn (rtx_insn *insn)
    3726              : {
    3727    225203150 :   while (insn)
    3728              :     {
    3729    225203150 :       insn = PREV_INSN (insn);
    3730    225203150 :       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
    3731              :         break;
    3732              :     }
    3733              : 
    3734    121534791 :   return insn;
    3735              : }
    3736              : 
    3737              : /* Return the previous insn before INSN that is not a NOTE nor
    3738              :    DEBUG_INSN, but stop the search before we enter another basic
    3739              :    block.  This routine does not look inside SEQUENCEs.  */
    3740              : 
    3741              : rtx_insn *
    3742     69395979 : prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
    3743              : {
    3744    134199393 :   while (insn)
    3745              :     {
    3746    134199393 :       insn = PREV_INSN (insn);
    3747    134199393 :       if (insn == 0)
    3748              :         break;
    3749    134198802 :       if (DEBUG_INSN_P (insn))
    3750     55377275 :         continue;
    3751     78821527 :       if (!NOTE_P (insn))
    3752              :         break;
    3753     21221311 :       if (NOTE_INSN_BASIC_BLOCK_P (insn))
    3754              :         return NULL;
    3755              :     }
    3756              : 
    3757              :   return insn;
    3758              : }
    3759              : 
    3760              : /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
    3761              :    or 0, if there is none.  This routine does not look inside
    3762              :    SEQUENCEs.  */
    3763              : 
    3764              : rtx_insn *
    3765      4056146 : next_real_insn (rtx_insn *insn)
    3766              : {
    3767      4114790 :   while (insn)
    3768              :     {
    3769      4114790 :       insn = NEXT_INSN (insn);
    3770      4114790 :       if (insn == 0 || INSN_P (insn))
    3771              :         break;
    3772              :     }
    3773              : 
    3774      4056146 :   return insn;
    3775              : }
    3776              : 
    3777              : /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
    3778              :    or 0, if there is none.  This routine does not look inside
    3779              :    SEQUENCEs.  */
    3780              : 
    3781              : rtx_insn *
    3782       981546 : prev_real_insn (rtx_insn *insn)
    3783              : {
    3784      2893387 :   while (insn)
    3785              :     {
    3786      2893387 :       insn = PREV_INSN (insn);
    3787      2893387 :       if (insn == 0 || INSN_P (insn))
    3788              :         break;
    3789              :     }
    3790              : 
    3791       981546 :   return insn;
    3792              : }
    3793              : 
    3794              : /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
    3795              :    or 0, if there is none.  This routine does not look inside
    3796              :    SEQUENCEs.  */
    3797              : 
    3798              : rtx_insn *
    3799            0 : next_real_nondebug_insn (rtx uncast_insn)
    3800              : {
    3801            0 :   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
    3802              : 
    3803            0 :   while (insn)
    3804              :     {
    3805            0 :       insn = NEXT_INSN (insn);
    3806            0 :       if (insn == 0 || NONDEBUG_INSN_P (insn))
    3807              :         break;
    3808              :     }
    3809              : 
    3810            0 :   return insn;
    3811              : }
    3812              : 
    3813              : /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
    3814              :    or 0, if there is none.  This routine does not look inside
    3815              :    SEQUENCEs.  */
    3816              : 
    3817              : rtx_insn *
    3818      4922097 : prev_real_nondebug_insn (rtx_insn *insn)
    3819              : {
    3820     14811570 :   while (insn)
    3821              :     {
    3822     14811570 :       insn = PREV_INSN (insn);
    3823     14811570 :       if (insn == 0 || NONDEBUG_INSN_P (insn))
    3824              :         break;
    3825              :     }
    3826              : 
    3827      4922097 :   return insn;
    3828              : }
    3829              : 
    3830              : /* Return the last CALL_INSN in the current list, or 0 if there is none.
    3831              :    This routine does not look inside SEQUENCEs.  */
    3832              : 
    3833              : rtx_call_insn *
    3834     11870525 : last_call_insn (void)
    3835              : {
    3836     11870525 :   rtx_insn *insn;
    3837              : 
    3838     11997193 :   for (insn = get_last_insn ();
    3839     11997193 :        insn && !CALL_P (insn);
    3840       126668 :        insn = PREV_INSN (insn))
    3841              :     ;
    3842              : 
    3843     11870525 :   return safe_as_a <rtx_call_insn *> (insn);
    3844              : }
    3845              : 
    3846              : bool
    3847   1137965333 : active_insn_p (const rtx_insn *insn)
    3848              : {
    3849   1137965333 :   return (CALL_P (insn) || JUMP_P (insn)
    3850   1137965333 :           || JUMP_TABLE_DATA_P (insn) /* FIXME */
    3851   1137965333 :           || (NONJUMP_INSN_P (insn)
    3852    679079291 :               && (! reload_completed
    3853    481862188 :                   || (GET_CODE (PATTERN (insn)) != USE
    3854    480258054 :                       && GET_CODE (PATTERN (insn)) != CLOBBER))));
    3855              : }
    3856              : 
    3857              : /* Find the next insn after INSN that really does something.  This routine
    3858              :    does not look inside SEQUENCEs.  After reload this also skips over
    3859              :    standalone USE and CLOBBER insn.  */
    3860              : 
    3861              : rtx_insn *
    3862    112252479 : next_active_insn (rtx_insn *insn)
    3863              : {
    3864    250074759 :   while (insn)
    3865              :     {
    3866    250074759 :       insn = NEXT_INSN (insn);
    3867    362327238 :       if (insn == 0 || active_insn_p (insn))
    3868              :         break;
    3869              :     }
    3870              : 
    3871    112252479 :   return insn;
    3872              : }
    3873              : 
    3874              : /* Find the last insn before INSN that really does something.  This routine
    3875              :    does not look inside SEQUENCEs.  After reload this also skips over
    3876              :    standalone USE and CLOBBER insn.  */
    3877              : 
    3878              : rtx_insn *
    3879        45109 : prev_active_insn (rtx_insn *insn)
    3880              : {
    3881       134455 :   while (insn)
    3882              :     {
    3883       134455 :       insn = PREV_INSN (insn);
    3884       179564 :       if (insn == 0 || active_insn_p (insn))
    3885              :         break;
    3886              :     }
    3887              : 
    3888        45109 :   return insn;
    3889              : }
    3890              : 
    3891              : /* Find a RTX_AUTOINC class rtx which matches DATA.  */
    3892              : 
    3893              : static int
    3894            0 : find_auto_inc (const_rtx x, const_rtx reg)
    3895              : {
    3896            0 :   subrtx_iterator::array_type array;
    3897            0 :   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
    3898              :     {
    3899            0 :       const_rtx x = *iter;
    3900            0 :       if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
    3901            0 :           && rtx_equal_p (reg, XEXP (x, 0)))
    3902            0 :         return true;
    3903              :     }
    3904            0 :   return false;
    3905            0 : }
    3906              : 
    3907              : /* Increment the label uses for all labels present in rtx.  */
    3908              : 
    3909              : static void
    3910     55599072 : mark_label_nuses (rtx x)
    3911              : {
    3912     55599072 :   enum rtx_code code;
    3913     55599072 :   int i, j;
    3914     55599072 :   const char *fmt;
    3915              : 
    3916     55599072 :   code = GET_CODE (x);
    3917     55599077 :   if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
    3918            1 :     LABEL_NUSES (label_ref_label (x))++;
    3919              : 
    3920     55599072 :   fmt = GET_RTX_FORMAT (code);
    3921    137270042 :   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    3922              :     {
    3923     81670970 :       if (fmt[i] == 'e')
    3924     43736593 :         mark_label_nuses (XEXP (x, i));
    3925     37934377 :       else if (fmt[i] == 'E')
    3926      3769407 :         for (j = XVECLEN (x, i) - 1; j >= 0; j--)
    3927      2608827 :           mark_label_nuses (XVECEXP (x, i, j));
    3928              :     }
    3929     55599072 : }
    3930              : 
    3931              : 
    3932              : /* Try splitting insns that can be split for better scheduling.
    3933              :    PAT is the pattern which might split.
    3934              :    TRIAL is the insn providing PAT.
    3935              :    LAST is nonzero if we should return the last insn of the sequence produced.
    3936              : 
    3937              :    If this routine succeeds in splitting, it returns the first or last
    3938              :    replacement insn depending on the value of LAST.  Otherwise, it
    3939              :    returns TRIAL.  If the insn to be returned can be split, it will be.  */
    3940              : 
    3941              : rtx_insn *
    3942    386872525 : try_split (rtx pat, rtx_insn *trial, int last)
    3943              : {
    3944    386872525 :   rtx_insn *before, *after;
    3945    386872525 :   rtx note;
    3946    386872525 :   rtx_insn *seq, *tem;
    3947    386872525 :   profile_probability probability;
    3948    386872525 :   rtx_insn *insn_last, *insn;
    3949    386872525 :   int njumps = 0;
    3950    386872525 :   rtx_insn *call_insn = NULL;
    3951              : 
    3952    386872525 :   if (any_condjump_p (trial)
    3953    386872525 :       && (note = find_reg_note (trial, REG_BR_PROB, 0)))
    3954     14816294 :     split_branch_probability
    3955     14816294 :       = profile_probability::from_reg_br_prob_note (XINT (note, 0));
    3956              :   else
    3957    372056231 :     split_branch_probability = profile_probability::uninitialized ();
    3958              : 
    3959    386872525 :   probability = split_branch_probability;
    3960              : 
    3961    386872525 :   seq = split_insns (pat, trial);
    3962              : 
    3963    386872525 :   split_branch_probability = profile_probability::uninitialized ();
    3964              : 
    3965    386872525 :   if (!seq)
    3966              :     return trial;
    3967              : 
    3968              :   int split_insn_count = 0;
    3969              :   /* Avoid infinite loop if any insn of the result matches
    3970              :      the original pattern.  */
    3971              :   insn_last = seq;
    3972      9269742 :   while (1)
    3973              :     {
    3974      9269742 :       if (INSN_P (insn_last)
    3975      9269742 :           && rtx_equal_p (PATTERN (insn_last), pat))
    3976              :         return trial;
    3977      9268301 :       split_insn_count++;
    3978      9268301 :       if (!NEXT_INSN (insn_last))
    3979              :         break;
    3980              :       insn_last = NEXT_INSN (insn_last);
    3981              :     }
    3982              : 
    3983              :   /* We're not good at redistributing frame information if
    3984              :      the split occurs before reload or if it results in more
    3985              :      than one insn.  */
    3986      6191902 :   if (RTX_FRAME_RELATED_P (trial))
    3987              :     {
    3988           17 :       if (!reload_completed || split_insn_count != 1)
    3989              :         return trial;
    3990              : 
    3991           17 :       rtx_insn *new_insn = seq;
    3992           17 :       rtx_insn *old_insn = trial;
    3993           17 :       copy_frame_info_to_split_insn (old_insn, new_insn);
    3994              :     }
    3995              : 
    3996              :   /* We will be adding the new sequence to the function.  The splitters
    3997              :      may have introduced invalid RTL sharing, so unshare the sequence now.  */
    3998      6191902 :   unshare_all_rtl_in_chain (seq);
    3999              : 
    4000              :   /* Mark labels and copy flags.  */
    4001     15460203 :   for (insn = insn_last; insn ; insn = PREV_INSN (insn))
    4002              :     {
    4003      9268301 :       if (JUMP_P (insn))
    4004              :         {
    4005         6429 :           if (JUMP_P (trial))
    4006         5597 :             CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
    4007         6429 :           mark_jump_label (PATTERN (insn), insn, 0);
    4008         6429 :           njumps++;
    4009         6429 :           if (probability.initialized_p ()
    4010         5567 :               && any_condjump_p (insn)
    4011        11996 :               && !find_reg_note (insn, REG_BR_PROB, 0))
    4012              :             {
    4013              :               /* We can preserve the REG_BR_PROB notes only if exactly
    4014              :                  one jump is created, otherwise the machine description
    4015              :                  is responsible for this step using
    4016              :                  split_branch_probability variable.  */
    4017         5567 :               gcc_assert (njumps == 1);
    4018         5567 :               add_reg_br_prob_note (insn, probability);
    4019              :             }
    4020              :         }
    4021              :     }
    4022              : 
    4023              :   /* If we are splitting a CALL_INSN, look for the CALL_INSN
    4024              :      in SEQ and copy any additional information across.  */
    4025      6191902 :   if (CALL_P (trial))
    4026              :     {
    4027            0 :       for (insn = insn_last; insn ; insn = PREV_INSN (insn))
    4028            0 :         if (CALL_P (insn))
    4029              :           {
    4030            0 :             gcc_assert (call_insn == NULL_RTX);
    4031            0 :             call_insn = insn;
    4032              : 
    4033              :             /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
    4034              :                target may have explicitly specified.  */
    4035            0 :             rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
    4036            0 :             while (*p)
    4037            0 :               p = &XEXP (*p, 1);
    4038            0 :             *p = CALL_INSN_FUNCTION_USAGE (trial);
    4039              : 
    4040              :             /* If the old call was a sibling call, the new one must
    4041              :                be too.  */
    4042            0 :             SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
    4043              :           }
    4044              :     }
    4045              : 
    4046              :   /* Copy notes, particularly those related to the CFG.  */
    4047      8005226 :   for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
    4048              :     {
    4049      1813324 :       switch (REG_NOTE_KIND (note))
    4050              :         {
    4051         2806 :         case REG_EH_REGION:
    4052         2806 :           copy_reg_eh_region_note_backward (note, insn_last, NULL);
    4053         2806 :           break;
    4054              : 
    4055              :         case REG_NORETURN:
    4056              :         case REG_SETJMP:
    4057              :         case REG_TM:
    4058              :         case REG_CALL_NOCF_CHECK:
    4059              :         case REG_CALL_ARG_LOCATION:
    4060            0 :           for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
    4061              :             {
    4062            0 :               if (CALL_P (insn))
    4063            0 :                 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
    4064              :             }
    4065              :           break;
    4066              : 
    4067              :         case REG_NON_LOCAL_GOTO:
    4068              :         case REG_LABEL_TARGET:
    4069            0 :           for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
    4070              :             {
    4071            0 :               if (JUMP_P (insn))
    4072            0 :                 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
    4073              :             }
    4074              :           break;
    4075              : 
    4076              :         case REG_INC:
    4077              :           if (!AUTO_INC_DEC)
    4078              :             break;
    4079              : 
    4080              :           for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
    4081              :             {
    4082              :               rtx reg = XEXP (note, 0);
    4083              :               if (!FIND_REG_INC_NOTE (insn, reg)
    4084              :                   && find_auto_inc (PATTERN (insn), reg))
    4085              :                 add_reg_note (insn, REG_INC, reg);
    4086              :             }
    4087              :           break;
    4088              : 
    4089       308424 :         case REG_ARGS_SIZE:
    4090       308424 :           fixup_args_size_notes (NULL, insn_last, get_args_size (note));
    4091       308424 :           break;
    4092              : 
    4093            0 :         case REG_CALL_DECL:
    4094            0 :         case REG_UNTYPED_CALL:
    4095            0 :           gcc_assert (call_insn != NULL_RTX);
    4096            0 :           add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
    4097            0 :           break;
    4098              : 
    4099              :         default:
    4100              :           break;
    4101              :         }
    4102              :     }
    4103              : 
    4104              :   /* If there are LABELS inside the split insns increment the
    4105              :      usage count so we don't delete the label.  */
    4106      6191902 :   if (INSN_P (trial))
    4107              :     {
    4108              :       insn = insn_last;
    4109     15460203 :       while (insn != NULL_RTX)
    4110              :         {
    4111              :           /* JUMP_P insns have already been "marked" above.  */
    4112      9268301 :           if (NONJUMP_INSN_P (insn))
    4113      9253652 :             mark_label_nuses (PATTERN (insn));
    4114              : 
    4115      9268301 :           insn = PREV_INSN (insn);
    4116              :         }
    4117              :     }
    4118              : 
    4119      6191902 :   before = PREV_INSN (trial);
    4120      6191902 :   after = NEXT_INSN (trial);
    4121              : 
    4122      6191902 :   emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
    4123              : 
    4124      6191902 :   delete_insn (trial);
    4125              : 
    4126              :   /* Recursively call try_split for each new insn created; by the
    4127              :      time control returns here that insn will be fully split, so
    4128              :      set LAST and continue from the insn after the one returned.
    4129              :      We can't use next_active_insn here since AFTER may be a note.
    4130              :      Ignore deleted insns, which can be occur if not optimizing.  */
    4131     21652105 :   for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
    4132      9268301 :     if (! tem->deleted () && INSN_P (tem))
    4133      9260081 :       tem = try_split (PATTERN (tem), tem, 1);
    4134              : 
    4135              :   /* Return either the first or the last insn, depending on which was
    4136              :      requested.  */
    4137      6191902 :   return last
    4138      6191902 :     ? (after ? PREV_INSN (after) : get_last_insn ())
    4139            0 :     : NEXT_INSN (before);
    4140              : }
    4141              : 
    4142              : /* Make and return an INSN rtx, initializing all its slots.
    4143              :    Store PATTERN in the pattern slots.  */
    4144              : 
    4145              : rtx_insn *
    4146    125550128 : make_insn_raw (rtx pattern)
    4147              : {
    4148    125550128 :   rtx_insn *insn;
    4149              : 
    4150    125550128 :   insn = as_a <rtx_insn *> (rtx_alloc (INSN));
    4151              : 
    4152    125550128 :   INSN_UID (insn) = cur_insn_uid++;
    4153    125550128 :   PATTERN (insn) = pattern;
    4154    125550128 :   INSN_CODE (insn) = -1;
    4155    125550128 :   REG_NOTES (insn) = NULL;
    4156    125550128 :   INSN_LOCATION (insn) = curr_insn_location ();
    4157    125550128 :   BLOCK_FOR_INSN (insn) = NULL;
    4158              : 
    4159              : #ifdef ENABLE_RTL_CHECKING
    4160              :   if (insn
    4161              :       && INSN_P (insn)
    4162              :       && (returnjump_p (insn)
    4163              :           || (GET_CODE (insn) == SET
    4164              :               && SET_DEST (insn) == pc_rtx)))
    4165              :     {
    4166              :       warning (0, "ICE: %<emit_insn%> used where %<emit_jump_insn%> needed:");
    4167              :       debug_rtx (insn);
    4168              :     }
    4169              : #endif
    4170              : 
    4171    125550128 :   return insn;
    4172              : }
    4173              : 
    4174              : /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn.  */
    4175              : 
    4176              : static rtx_insn *
    4177     50611736 : make_debug_insn_raw (rtx pattern)
    4178              : {
    4179     50611736 :   rtx_debug_insn *insn;
    4180              : 
    4181     50611736 :   insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
    4182     50611736 :   INSN_UID (insn) = cur_debug_insn_uid++;
    4183     50611736 :   if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
    4184     50611736 :     INSN_UID (insn) = cur_insn_uid++;
    4185              : 
    4186     50611736 :   PATTERN (insn) = pattern;
    4187     50611736 :   INSN_CODE (insn) = -1;
    4188     50611736 :   REG_NOTES (insn) = NULL;
    4189     50611736 :   INSN_LOCATION (insn) = curr_insn_location ();
    4190     50611736 :   BLOCK_FOR_INSN (insn) = NULL;
    4191              : 
    4192     50611736 :   return insn;
    4193              : }
    4194              : 
    4195              : /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
    4196              : 
    4197              : static rtx_insn *
    4198     18103924 : make_jump_insn_raw (rtx pattern)
    4199              : {
    4200     18103924 :   rtx_jump_insn *insn;
    4201              : 
    4202     18103924 :   insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
    4203     18103924 :   INSN_UID (insn) = cur_insn_uid++;
    4204              : 
    4205     18103924 :   PATTERN (insn) = pattern;
    4206     18103924 :   INSN_CODE (insn) = -1;
    4207     18103924 :   REG_NOTES (insn) = NULL;
    4208     18103924 :   JUMP_LABEL (insn) = NULL;
    4209     18103924 :   INSN_LOCATION (insn) = curr_insn_location ();
    4210     18103924 :   BLOCK_FOR_INSN (insn) = NULL;
    4211              : 
    4212     18103924 :   return insn;
    4213              : }
    4214              : 
    4215              : /* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
    4216              : 
    4217              : static rtx_insn *
    4218      6251525 : make_call_insn_raw (rtx pattern)
    4219              : {
    4220      6251525 :   rtx_call_insn *insn;
    4221              : 
    4222      6251525 :   insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
    4223      6251525 :   INSN_UID (insn) = cur_insn_uid++;
    4224              : 
    4225      6251525 :   PATTERN (insn) = pattern;
    4226      6251525 :   INSN_CODE (insn) = -1;
    4227      6251525 :   REG_NOTES (insn) = NULL;
    4228      6251525 :   CALL_INSN_FUNCTION_USAGE (insn) = NULL;
    4229      6251525 :   INSN_LOCATION (insn) = curr_insn_location ();
    4230      6251525 :   BLOCK_FOR_INSN (insn) = NULL;
    4231              : 
    4232      6251525 :   return insn;
    4233              : }
    4234              : 
    4235              : /* Like `make_insn_raw' but make a NOTE instead of an insn.  */
    4236              : 
    4237              : static rtx_note *
    4238    164365661 : make_note_raw (enum insn_note subtype)
    4239              : {
    4240              :   /* Some notes are never created this way at all.  These notes are
    4241              :      only created by patching out insns.  */
    4242    164365661 :   gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
    4243              :               && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
    4244              : 
    4245    164365661 :   rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
    4246    164365661 :   INSN_UID (note) = cur_insn_uid++;
    4247    164365661 :   NOTE_KIND (note) = subtype;
    4248    164365661 :   BLOCK_FOR_INSN (note) = NULL;
    4249    164365661 :   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
    4250    164365661 :   return note;
    4251              : }
    4252              : 
    4253              : /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
    4254              :    INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
    4255              :    but also BARRIERs and JUMP_TABLE_DATAs.  PREV and NEXT may be NULL.  */
    4256              : 
    4257              : static inline void
    4258    588948243 : link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
    4259              : {
    4260    588948243 :   SET_PREV_INSN (insn) = prev;
    4261    588948243 :   SET_NEXT_INSN (insn) = next;
    4262    588948243 :   if (prev != NULL)
    4263              :     {
    4264    421446719 :       SET_NEXT_INSN (prev) = insn;
    4265    421446719 :       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
    4266              :         {
    4267            0 :           rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
    4268            0 :           SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
    4269              :         }
    4270              :     }
    4271    588948243 :   if (next != NULL)
    4272              :     {
    4273    169225140 :       SET_PREV_INSN (next) = insn;
    4274    169225140 :       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
    4275              :         {
    4276            0 :           rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
    4277            0 :           SET_PREV_INSN (sequence->insn (0)) = insn;
    4278              :         }
    4279              :     }
    4280              : 
    4281    588948243 :   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
    4282              :     {
    4283            0 :       rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
    4284            0 :       SET_PREV_INSN (sequence->insn (0)) = prev;
    4285            0 :       SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
    4286              :     }
    4287    588948243 : }
    4288              : 
    4289              : /* Add INSN to the end of the doubly-linked list.
    4290              :    INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
    4291              : 
    4292              : void
    4293    408624115 : add_insn (rtx_insn *insn)
    4294              : {
    4295    408624115 :   rtx_insn *prev = get_last_insn ();
    4296    408624115 :   link_insn_into_chain (insn, prev, NULL);
    4297    408624115 :   if (get_insns () == NULL)
    4298    167314387 :     set_first_insn (insn);
    4299    408624115 :   set_last_insn (insn);
    4300    408624115 : }
    4301              : 
    4302              : /* Add INSN into the doubly-linked list after insn AFTER.  */
    4303              : 
    4304              : static void
    4305     65497475 : add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
    4306              : {
    4307     65497475 :   rtx_insn *next = NEXT_INSN (after);
    4308              : 
    4309     65497475 :   gcc_assert (!optimize || !after->deleted ());
    4310              : 
    4311     65497475 :   link_insn_into_chain (insn, after, next);
    4312              : 
    4313     65497475 :   if (next == NULL)
    4314              :     {
    4315              :       struct sequence_stack *seq;
    4316              : 
    4317     11098988 :       for (seq = get_current_sequence (); seq; seq = seq->next)
    4318     11098988 :         if (after == seq->last)
    4319              :           {
    4320     11098988 :             seq->last = insn;
    4321     11098988 :             break;
    4322              :           }
    4323              :     }
    4324     65497475 : }
    4325              : 
    4326              : /* Add INSN into the doubly-linked list before insn BEFORE.  */
    4327              : 
    4328              : static void
    4329    114826653 : add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
    4330              : {
    4331    114826653 :   rtx_insn *prev = PREV_INSN (before);
    4332              : 
    4333    114826653 :   gcc_assert (!optimize || !before->deleted ());
    4334              : 
    4335    114826653 :   link_insn_into_chain (insn, prev, before);
    4336              : 
    4337    114826653 :   if (prev == NULL)
    4338              :     {
    4339              :       struct sequence_stack *seq;
    4340              : 
    4341       187137 :       for (seq = get_current_sequence (); seq; seq = seq->next)
    4342       187137 :         if (before == seq->first)
    4343              :           {
    4344       187137 :             seq->first = insn;
    4345       187137 :             break;
    4346              :           }
    4347              : 
    4348       187137 :       gcc_assert (seq);
    4349              :     }
    4350    114826653 : }
    4351              : 
    4352              : /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
    4353              :    If BB is NULL, an attempt is made to infer the bb from before.
    4354              : 
    4355              :    This and the next function should be the only functions called
    4356              :    to insert an insn once delay slots have been filled since only
    4357              :    they know how to update a SEQUENCE. */
    4358              : 
    4359              : void
    4360     64172409 : add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
    4361              : {
    4362     64172409 :   add_insn_after_nobb (insn, after);
    4363     64172409 :   if (!BARRIER_P (after)
    4364     64141269 :       && !BARRIER_P (insn)
    4365    120661891 :       && (bb = BLOCK_FOR_INSN (after)))
    4366              :     {
    4367     46037812 :       set_block_for_insn (insn, bb);
    4368     46037812 :       if (INSN_P (insn))
    4369       787511 :         df_insn_rescan (insn);
    4370              :       /* Should not happen as first in the BB is always
    4371              :          either NOTE or LABEL.  */
    4372     46037812 :       if (BB_END (bb) == after
    4373              :           /* Avoid clobbering of structure when creating new BB.  */
    4374       971123 :           && !BARRIER_P (insn)
    4375       971123 :           && !NOTE_INSN_BASIC_BLOCK_P (insn))
    4376       316474 :         BB_END (bb) = insn;
    4377              :     }
    4378     64172409 : }
    4379              : 
    4380              : /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
    4381              :    If BB is NULL, an attempt is made to infer the bb from before.
    4382              : 
    4383              :    This and the previous function should be the only functions called
    4384              :    to insert an insn once delay slots have been filled since only
    4385              :    they know how to update a SEQUENCE. */
    4386              : 
    4387              : void
    4388     92629747 : add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
    4389              : {
    4390     92629747 :   add_insn_before_nobb (insn, before);
    4391              : 
    4392     92629747 :   if (BARRIER_P (insn))
    4393              :     return;
    4394              : 
    4395     92629630 :   if (!bb
    4396     22540838 :       && !BARRIER_P (before))
    4397     22540837 :     bb = BLOCK_FOR_INSN (before);
    4398              : 
    4399     22540838 :   if (bb)
    4400              :     {
    4401     88999917 :       set_block_for_insn (insn, bb);
    4402     88999917 :       if (INSN_P (insn))
    4403     13202345 :         df_insn_rescan (insn);
    4404              :       /* Should not happen as first in the BB is always either NOTE or
    4405              :          LABEL.  */
    4406     88999917 :       gcc_assert (BB_HEAD (bb) != insn
    4407              :                   /* Avoid clobbering of structure when creating new BB.  */
    4408              :                   || BARRIER_P (insn)
    4409              :                   || NOTE_INSN_BASIC_BLOCK_P (insn));
    4410              :     }
    4411              : }
    4412              : 
    4413              : /* Replace insn with an deleted instruction note.  */
    4414              : 
    4415              : void
    4416     14181151 : set_insn_deleted (rtx_insn *insn)
    4417              : {
    4418     14181151 :   if (INSN_P (insn))
    4419     14181151 :     df_insn_delete (insn);
    4420     14181151 :   PUT_CODE (insn, NOTE);
    4421     14181151 :   NOTE_KIND (insn) = NOTE_INSN_DELETED;
    4422     14181151 : }
    4423              : 
    4424              : 
    4425              : /* Unlink INSN from the insn chain.
    4426              : 
    4427              :    This function knows how to handle sequences.
    4428              : 
    4429              :    This function does not invalidate data flow information associated with
    4430              :    INSN (i.e. does not call df_insn_delete).  That makes this function
    4431              :    usable for only disconnecting an insn from the chain, and re-emit it
    4432              :    elsewhere later.
    4433              : 
    4434              :    To later insert INSN elsewhere in the insn chain via add_insn and
    4435              :    similar functions, PREV_INSN and NEXT_INSN must be nullified by
    4436              :    the caller.  Nullifying them here breaks many insn chain walks.
    4437              : 
    4438              :    To really delete an insn and related DF information, use delete_insn.  */
    4439              : 
    4440              : void
    4441    135748304 : remove_insn (rtx_insn *insn)
    4442              : {
    4443    135748304 :   rtx_insn *next = NEXT_INSN (insn);
    4444    135748304 :   rtx_insn *prev = PREV_INSN (insn);
    4445    135748304 :   basic_block bb;
    4446              : 
    4447    135748304 :   if (prev)
    4448              :     {
    4449    135748043 :       SET_NEXT_INSN (prev) = next;
    4450    135748043 :       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
    4451              :         {
    4452            0 :           rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
    4453            0 :           SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
    4454              :         }
    4455              :     }
    4456              :   else
    4457              :     {
    4458              :       struct sequence_stack *seq;
    4459              : 
    4460          261 :       for (seq = get_current_sequence (); seq; seq = seq->next)
    4461          261 :         if (insn == seq->first)
    4462              :           {
    4463          261 :             seq->first = next;
    4464          261 :             break;
    4465              :           }
    4466              : 
    4467          261 :       gcc_assert (seq);
    4468              :     }
    4469              : 
    4470    135748304 :   if (next)
    4471              :     {
    4472    133663681 :       SET_PREV_INSN (next) = prev;
    4473    133663681 :       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
    4474              :         {
    4475            0 :           rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
    4476            0 :           SET_PREV_INSN (sequence->insn (0)) = prev;
    4477              :         }
    4478              :     }
    4479              :   else
    4480              :     {
    4481              :       struct sequence_stack *seq;
    4482              : 
    4483      2084623 :       for (seq = get_current_sequence (); seq; seq = seq->next)
    4484      2084623 :         if (insn == seq->last)
    4485              :           {
    4486      2084623 :             seq->last = prev;
    4487      2084623 :             break;
    4488              :           }
    4489              : 
    4490      2084623 :       gcc_assert (seq);
    4491              :     }
    4492              : 
    4493              :   /* Fix up basic block boundaries, if necessary.  */
    4494    135748304 :   if (!BARRIER_P (insn)
    4495    135748304 :       && (bb = BLOCK_FOR_INSN (insn)))
    4496              :     {
    4497    132752173 :       if (BB_HEAD (bb) == insn)
    4498              :         {
    4499              :           /* Never ever delete the basic block note without deleting whole
    4500              :              basic block.  */
    4501      2476162 :           gcc_assert (!NOTE_P (insn));
    4502      2476162 :           BB_HEAD (bb) = next;
    4503              :         }
    4504    132752173 :       if (BB_END (bb) == insn)
    4505     22937179 :         BB_END (bb) = prev;
    4506              :     }
    4507    135748304 : }
    4508              : 
    4509              : /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
    4510              : 
    4511              : void
    4512      6195436 : add_function_usage_to (rtx call_insn, rtx call_fusage)
    4513              : {
    4514      6195436 :   gcc_assert (call_insn && CALL_P (call_insn));
    4515              : 
    4516              :   /* Put the register usage information on the CALL.  If there is already
    4517              :      some usage information, put ours at the end.  */
    4518      6195436 :   if (CALL_INSN_FUNCTION_USAGE (call_insn))
    4519              :     {
    4520              :       rtx link;
    4521              : 
    4522      1353205 :       for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
    4523              :            link = XEXP (link, 1))
    4524              :         ;
    4525              : 
    4526       595756 :       XEXP (link, 1) = call_fusage;
    4527              :     }
    4528              :   else
    4529      5599680 :     CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
    4530      6195436 : }
    4531              : 
    4532              : /* Delete all insns made since FROM.
    4533              :    FROM becomes the new last instruction.  */
    4534              : 
    4535              : void
    4536      1254809 : delete_insns_since (rtx_insn *from)
    4537              : {
    4538      1254809 :   if (from == 0)
    4539       148041 :     set_first_insn (0);
    4540              :   else
    4541      1106768 :     SET_NEXT_INSN (from) = 0;
    4542      1254809 :   set_last_insn (from);
    4543      1254809 : }
    4544              : 
    4545              : /* This function is deprecated, please use sequences instead.
    4546              : 
    4547              :    Move a consecutive bunch of insns to a different place in the chain.
    4548              :    The insns to be moved are those between FROM and TO.
    4549              :    They are moved to a new position after the insn AFTER.
    4550              :    AFTER must not be FROM or TO or any insn in between.
    4551              : 
    4552              :    This function does not know about SEQUENCEs and hence should not be
    4553              :    called after delay-slot filling has been done.  */
    4554              : 
    4555              : void
    4556      1828852 : reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
    4557              : {
    4558      1828852 :   if (flag_checking)
    4559              :     {
    4560      2463839 :       for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
    4561       635009 :         gcc_assert (after != x);
    4562      1828830 :       gcc_assert (after != to);
    4563              :     }
    4564              : 
    4565              :   /* Splice this bunch out of where it is now.  */
    4566      1828852 :   if (PREV_INSN (from))
    4567      1828852 :     SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
    4568      1828852 :   if (NEXT_INSN (to))
    4569      1819331 :     SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
    4570      1828852 :   if (get_last_insn () == to)
    4571         9521 :     set_last_insn (PREV_INSN (from));
    4572      1828852 :   if (get_insns () == from)
    4573            0 :     set_first_insn (NEXT_INSN (to));
    4574              : 
    4575              :   /* Make the new neighbors point to it and it to them.  */
    4576      1828852 :   if (NEXT_INSN (after))
    4577      1820524 :     SET_PREV_INSN (NEXT_INSN (after)) = to;
    4578              : 
    4579      1828852 :   SET_NEXT_INSN (to) = NEXT_INSN (after);
    4580      1828852 :   SET_PREV_INSN (from) = after;
    4581      1828852 :   SET_NEXT_INSN (after) = from;
    4582      1828852 :   if (after == get_last_insn ())
    4583         8328 :     set_last_insn (to);
    4584      1828852 : }
    4585              : 
    4586              : /* Same as function above, but take care to update BB boundaries.  */
    4587              : void
    4588      1186356 : reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
    4589              : {
    4590      1186356 :   rtx_insn *prev = PREV_INSN (from);
    4591      1186356 :   basic_block bb, bb2;
    4592              : 
    4593      1186356 :   reorder_insns_nobb (from, to, after);
    4594              : 
    4595      1186356 :   if (!BARRIER_P (after)
    4596      1186356 :       && (bb = BLOCK_FOR_INSN (after)))
    4597              :     {
    4598      1178328 :       rtx_insn *x;
    4599      1178328 :       df_set_bb_dirty (bb);
    4600              : 
    4601      1178328 :       if (!BARRIER_P (from)
    4602      1178328 :           && (bb2 = BLOCK_FOR_INSN (from)))
    4603              :         {
    4604      1178328 :           if (BB_END (bb2) == to)
    4605        17451 :             BB_END (bb2) = prev;
    4606      1178328 :           df_set_bb_dirty (bb2);
    4607              :         }
    4608              : 
    4609      1178328 :       if (BB_END (bb) == after)
    4610       335209 :         BB_END (bb) = to;
    4611              : 
    4612      2421537 :       for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
    4613      1243209 :         if (!BARRIER_P (x))
    4614      1243209 :           df_insn_change_bb (x, bb);
    4615              :     }
    4616      1186356 : }
    4617              : 
    4618              : 
    4619              : /* Emit insn(s) of given code and pattern
    4620              :    at a specified place within the doubly-linked list.
    4621              : 
    4622              :    All of the emit_foo global entry points accept an object
    4623              :    X which is either an insn list or a PATTERN of a single
    4624              :    instruction.
    4625              : 
    4626              :    There are thus a few canonical ways to generate code and
    4627              :    emit it at a specific place in the instruction stream.  For
    4628              :    example, consider the instruction named SPOT and the fact that
    4629              :    we would like to emit some instructions before SPOT.  We might
    4630              :    do it like this:
    4631              : 
    4632              :         start_sequence ();
    4633              :         ... emit the new instructions ...
    4634              :         insns_head = end_sequence ();
    4635              : 
    4636              :         emit_insn_before (insns_head, SPOT);
    4637              : 
    4638              :    It used to be common to generate SEQUENCE rtl instead, but that
    4639              :    is a relic of the past which no longer occurs.  The reason is that
    4640              :    SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
    4641              :    generated would almost certainly die right after it was created.  */
    4642              : 
    4643              : static rtx_insn *
    4644     11643168 : emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last,
    4645              :                            basic_block bb,
    4646              :                            rtx_insn *(*make_raw) (rtx))
    4647              : {
    4648     11643168 :   rtx_insn *insn;
    4649              : 
    4650     11643168 :   gcc_assert (before);
    4651              : 
    4652     11643168 :   if (x == NULL_RTX)
    4653              :     return last;
    4654              : 
    4655     11629091 :   switch (GET_CODE (x))
    4656              :     {
    4657     10135455 :     case DEBUG_INSN:
    4658     10135455 :     case INSN:
    4659     10135455 :     case JUMP_INSN:
    4660     10135455 :     case CALL_INSN:
    4661     10135455 :     case CODE_LABEL:
    4662     10135455 :     case BARRIER:
    4663     10135455 :     case NOTE:
    4664     10135455 :       insn = as_a <rtx_insn *> (x);
    4665     32628331 :       while (insn)
    4666              :         {
    4667     12357421 :           rtx_insn *next = NEXT_INSN (insn);
    4668     12357421 :           add_insn_before (insn, before, bb);
    4669     12357421 :           last = insn;
    4670     12357421 :           insn = next;
    4671              :         }
    4672              :       break;
    4673              : 
    4674              : #ifdef ENABLE_RTL_CHECKING
    4675              :     case SEQUENCE:
    4676              :       gcc_unreachable ();
    4677              :       break;
    4678              : #endif
    4679              : 
    4680      1493636 :     default:
    4681      1493636 :       last = (*make_raw) (x);
    4682      1493636 :       add_insn_before (last, before, bb);
    4683      1493636 :       break;
    4684              :     }
    4685              : 
    4686              :   return last;
    4687              : }
    4688              : 
    4689              : /* Make X be output before the instruction BEFORE.  */
    4690              : 
    4691              : rtx_insn *
    4692       477093 : emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
    4693              : {
    4694       477093 :   return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
    4695              : }
    4696              : 
    4697              : /* Make an instruction with body X and code JUMP_INSN
    4698              :    and output it before the instruction BEFORE.  */
    4699              : 
    4700              : rtx_jump_insn *
    4701            0 : emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
    4702              : {
    4703            0 :   return as_a <rtx_jump_insn *> (
    4704              :                 emit_pattern_before_noloc (x, before, NULL, NULL,
    4705            0 :                                            make_jump_insn_raw));
    4706              : }
    4707              : 
    4708              : /* Make an instruction with body X and code CALL_INSN
    4709              :    and output it before the instruction BEFORE.  */
    4710              : 
    4711              : rtx_insn *
    4712            0 : emit_call_insn_before_noloc (rtx x, rtx_insn *before)
    4713              : {
    4714            0 :   return emit_pattern_before_noloc (x, before, NULL, NULL,
    4715            0 :                                     make_call_insn_raw);
    4716              : }
    4717              : 
    4718              : /* Make an instruction with body X and code DEBUG_INSN
    4719              :    and output it before the instruction BEFORE.  */
    4720              : 
    4721              : rtx_insn *
    4722            0 : emit_debug_insn_before_noloc (rtx x, rtx_insn *before)
    4723              : {
    4724            0 :   return emit_pattern_before_noloc (x, before, NULL, NULL,
    4725            0 :                                     make_debug_insn_raw);
    4726              : }
    4727              : 
    4728              : /* Make an insn of code BARRIER
    4729              :    and output it before the insn BEFORE.  */
    4730              : 
    4731              : rtx_barrier *
    4732            0 : emit_barrier_before (rtx_insn *before)
    4733              : {
    4734            0 :   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
    4735              : 
    4736            0 :   INSN_UID (insn) = cur_insn_uid++;
    4737              : 
    4738            0 :   add_insn_before (insn, before, NULL);
    4739            0 :   return insn;
    4740              : }
    4741              : 
    4742              : /* Emit the label LABEL before the insn BEFORE.  */
    4743              : 
    4744              : rtx_code_label *
    4745      6625658 : emit_label_before (rtx_code_label *label, rtx_insn *before)
    4746              : {
    4747      6625658 :   gcc_checking_assert (INSN_UID (label) == 0);
    4748      6625658 :   INSN_UID (label) = cur_insn_uid++;
    4749      6625658 :   add_insn_before (label, before, NULL);
    4750      6625658 :   return label;
    4751              : }
    4752              : 
    4753              : /* Helper for emit_insn_after, handles lists of instructions
    4754              :    efficiently.  */
    4755              : 
    4756              : static rtx_insn *
    4757     24699300 : emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
    4758              : {
    4759     24699300 :   rtx_insn *last;
    4760     24699300 :   rtx_insn *after_after;
    4761     24699300 :   if (!bb && !BARRIER_P (after))
    4762     19885595 :     bb = BLOCK_FOR_INSN (after);
    4763              : 
    4764     19885595 :   if (bb)
    4765              :     {
    4766     24699230 :       df_set_bb_dirty (bb);
    4767     68046659 :       for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
    4768     18648199 :         if (!BARRIER_P (last))
    4769              :           {
    4770     18646606 :             set_block_for_insn (last, bb);
    4771     18646606 :             df_insn_rescan (last);
    4772              :           }
    4773     24699230 :       if (!BARRIER_P (last))
    4774              :         {
    4775     24699230 :           set_block_for_insn (last, bb);
    4776     24699230 :           df_insn_rescan (last);
    4777              :         }
    4778     24699230 :       if (BB_END (bb) == after)
    4779      9331473 :         BB_END (bb) = last;
    4780              :     }
    4781              :   else
    4782           70 :     for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
    4783            0 :       continue;
    4784              : 
    4785     24699300 :   after_after = NEXT_INSN (after);
    4786              : 
    4787     24699300 :   SET_NEXT_INSN (after) = first;
    4788     24699300 :   SET_PREV_INSN (first) = after;
    4789     24699300 :   SET_NEXT_INSN (last) = after_after;
    4790     24699300 :   if (after_after)
    4791     24587250 :     SET_PREV_INSN (after_after) = last;
    4792              : 
    4793     24699300 :   if (after == get_last_insn ())
    4794       112050 :     set_last_insn (last);
    4795              : 
    4796     24699300 :   return last;
    4797              : }
    4798              : 
    4799              : static rtx_insn *
    4800     29666223 : emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb,
    4801              :                           rtx_insn *(*make_raw)(rtx))
    4802              : {
    4803     29666223 :   rtx_insn *last = after;
    4804              : 
    4805     29666223 :   gcc_assert (after);
    4806              : 
    4807     29666223 :   if (x == NULL_RTX)
    4808              :     return last;
    4809              : 
    4810     29120112 :   switch (GET_CODE (x))
    4811              :     {
    4812     24699300 :     case DEBUG_INSN:
    4813     24699300 :     case INSN:
    4814     24699300 :     case JUMP_INSN:
    4815     24699300 :     case CALL_INSN:
    4816     24699300 :     case CODE_LABEL:
    4817     24699300 :     case BARRIER:
    4818     24699300 :     case NOTE:
    4819     24699300 :       last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
    4820     24699300 :       break;
    4821              : 
    4822              : #ifdef ENABLE_RTL_CHECKING
    4823              :     case SEQUENCE:
    4824              :       gcc_unreachable ();
    4825              :       break;
    4826              : #endif
    4827              : 
    4828      4420812 :     default:
    4829      4420812 :       last = (*make_raw) (x);
    4830      4420812 :       add_insn_after (last, after, bb);
    4831      4420812 :       break;
    4832              :     }
    4833              : 
    4834              :   return last;
    4835              : }
    4836              : 
    4837              : /* Make X be output after the insn AFTER and set the BB of insn.  If
    4838              :    BB is NULL, an attempt is made to infer the BB from AFTER.  */
    4839              : 
    4840              : rtx_insn *
    4841      5037266 : emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
    4842              : {
    4843      5037266 :   return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
    4844              : }
    4845              : 
    4846              : 
    4847              : /* Make an insn of code JUMP_INSN with body X
    4848              :    and output it after the insn AFTER.  */
    4849              : 
    4850              : rtx_jump_insn *
    4851         1999 : emit_jump_insn_after_noloc (rtx x, rtx_insn *after)
    4852              : {
    4853         1999 :   return as_a <rtx_jump_insn *> (
    4854         1999 :                 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
    4855              : }
    4856              : 
    4857              : /* Make an instruction with body X and code CALL_INSN
    4858              :    and output it after the instruction AFTER.  */
    4859              : 
    4860              : rtx_insn *
    4861            0 : emit_call_insn_after_noloc (rtx x, rtx_insn *after)
    4862              : {
    4863            0 :   return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
    4864              : }
    4865              : 
    4866              : /* Make an instruction with body X and code CALL_INSN
    4867              :    and output it after the instruction AFTER.  */
    4868              : 
    4869              : rtx_insn *
    4870            0 : emit_debug_insn_after_noloc (rtx x, rtx_insn *after)
    4871              : {
    4872            0 :   return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
    4873              : }
    4874              : 
    4875              : /* Make an insn of code BARRIER
    4876              :    and output it after the insn AFTER.  */
    4877              : 
    4878              : rtx_barrier *
    4879      7651787 : emit_barrier_after (rtx_insn *after)
    4880              : {
    4881      7651787 :   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
    4882              : 
    4883      7651787 :   INSN_UID (insn) = cur_insn_uid++;
    4884              : 
    4885      7651787 :   add_insn_after (insn, after, NULL);
    4886      7651787 :   return insn;
    4887              : }
    4888              : 
    4889              : /* Emit the label LABEL after the insn AFTER.  */
    4890              : 
    4891              : rtx_insn *
    4892            0 : emit_label_after (rtx_insn *label, rtx_insn *after)
    4893              : {
    4894            0 :   gcc_checking_assert (INSN_UID (label) == 0);
    4895            0 :   INSN_UID (label) = cur_insn_uid++;
    4896            0 :   add_insn_after (label, after, NULL);
    4897            0 :   return label;
    4898              : }
    4899              : 
    4900              : /* Notes require a bit of special handling: Some notes need to have their
    4901              :    BLOCK_FOR_INSN set, others should never have it set, and some should
    4902              :    have it set or clear depending on the context.   */
    4903              : 
    4904              : /* Return true iff a note of kind SUBTYPE should be emitted with routines
    4905              :    that never set BLOCK_FOR_INSN on NOTE.  BB_BOUNDARY is true if the
    4906              :    caller is asked to emit a note before BB_HEAD, or after BB_END.  */
    4907              : 
    4908              : static bool
    4909    147749034 : note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
    4910              : {
    4911            0 :   switch (subtype)
    4912              :     {
    4913              :       /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks.  */
    4914              :       case NOTE_INSN_SWITCH_TEXT_SECTIONS:
    4915              :         return true;
    4916              : 
    4917              :       /* Notes for var tracking and EH region markers can appear between or
    4918              :          inside basic blocks.  If the caller is emitting on the basic block
    4919              :          boundary, do not set BLOCK_FOR_INSN on the new note.  */
    4920     59953212 :       case NOTE_INSN_VAR_LOCATION:
    4921     59953212 :       case NOTE_INSN_EH_REGION_BEG:
    4922     59953212 :       case NOTE_INSN_EH_REGION_END:
    4923            0 :         return on_bb_boundary_p;
    4924              : 
    4925              :       /* Otherwise, BLOCK_FOR_INSN must be set.  */
    4926            0 :       default:
    4927            0 :         return false;
    4928              :     }
    4929              : }
    4930              : 
    4931              : /* Emit a note of subtype SUBTYPE after the insn AFTER.  */
    4932              : 
    4933              : rtx_note *
    4934     53399096 : emit_note_after (enum insn_note subtype, rtx_insn *after)
    4935              : {
    4936     53399096 :   rtx_note *note = make_note_raw (subtype);
    4937     53399096 :   basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
    4938     53367956 :   bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
    4939              : 
    4940     53399096 :   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
    4941      1325066 :     add_insn_after_nobb (note, after);
    4942              :   else
    4943     52074030 :     add_insn_after (note, after, bb);
    4944     53399096 :   return note;
    4945              : }
    4946              : 
    4947              : /* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
    4948              : 
    4949              : rtx_note *
    4950     94349938 : emit_note_before (enum insn_note subtype, rtx_insn *before)
    4951              : {
    4952     94349938 :   rtx_note *note = make_note_raw (subtype);
    4953     94349938 :   basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
    4954     94349937 :   bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
    4955              : 
    4956     94349938 :   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
    4957     22196906 :     add_insn_before_nobb (note, before);
    4958              :   else
    4959     72153032 :     add_insn_before (note, before, bb);
    4960     94349938 :   return note;
    4961              : }
    4962              : 
    4963              : /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
    4964              :    MAKE_RAW indicates how to turn PATTERN into a real insn.  */
    4965              : 
    4966              : static rtx_insn *
    4967     22498938 : emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc,
    4968              :                            rtx_insn *(*make_raw) (rtx))
    4969              : {
    4970     22498938 :   rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
    4971              : 
    4972     22498938 :   if (pattern == NULL_RTX || !loc)
    4973              :     return last;
    4974              : 
    4975     15850587 :   after = NEXT_INSN (after);
    4976      4245899 :   while (1)
    4977              :     {
    4978     20096486 :       if (active_insn_p (after)
    4979     18942002 :           && !JUMP_TABLE_DATA_P (after) /* FIXME */
    4980     39038488 :           && !INSN_LOCATION (after))
    4981     18935946 :         INSN_LOCATION (after) = loc;
    4982     20096486 :       if (after == last)
    4983              :         break;
    4984      4245899 :       after = NEXT_INSN (after);
    4985              :     }
    4986              :   return last;
    4987              : }
    4988              : 
    4989              : /* Insert PATTERN after AFTER.  MAKE_RAW indicates how to turn PATTERN
    4990              :    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert after
    4991              :    any DEBUG_INSNs.  */
    4992              : 
    4993              : static rtx_insn *
    4994     10897278 : emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
    4995              :                     rtx_insn *(*make_raw) (rtx))
    4996              : {
    4997     10897278 :   rtx_insn *prev = after;
    4998              : 
    4999     10897278 :   if (skip_debug_insns)
    5000     11433721 :     while (DEBUG_INSN_P (prev))
    5001      2051447 :       prev = PREV_INSN (prev);
    5002              : 
    5003     10897278 :   if (INSN_P (prev))
    5004      8769258 :     return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
    5005      8769258 :                                       make_raw);
    5006              :   else
    5007      2128020 :     return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
    5008              : }
    5009              : 
    5010              : /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
    5011              : rtx_insn *
    5012      8446801 : emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
    5013              : {
    5014      8446801 :   return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
    5015              : }
    5016              : 
    5017              : /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
    5018              : rtx_insn *
    5019      8756171 : emit_insn_after (rtx pattern, rtx_insn *after)
    5020              : {
    5021      8756171 :   return emit_pattern_after (pattern, after, true, make_insn_raw);
    5022              : }
    5023              : 
    5024              : /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
    5025              : rtx_jump_insn *
    5026      5282879 : emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
    5027              : {
    5028      5282879 :   return as_a <rtx_jump_insn *> (
    5029      5282879 :         emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
    5030              : }
    5031              : 
    5032              : /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
    5033              : rtx_jump_insn *
    5034       600654 : emit_jump_insn_after (rtx pattern, rtx_insn *after)
    5035              : {
    5036       600654 :   return as_a <rtx_jump_insn *> (
    5037       600654 :         emit_pattern_after (pattern, after, true, make_jump_insn_raw));
    5038              : }
    5039              : 
    5040              : /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
    5041              : rtx_insn *
    5042            0 : emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
    5043              : {
    5044            0 :   return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
    5045              : }
    5046              : 
    5047              : /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
    5048              : rtx_insn *
    5049        25449 : emit_call_insn_after (rtx pattern, rtx_insn *after)
    5050              : {
    5051        25449 :   return emit_pattern_after (pattern, after, true, make_call_insn_raw);
    5052              : }
    5053              : 
    5054              : /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
    5055              : rtx_insn *
    5056            0 : emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
    5057              : {
    5058            0 :   return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
    5059              : }
    5060              : 
    5061              : /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
    5062              : rtx_insn *
    5063      1515004 : emit_debug_insn_after (rtx pattern, rtx_insn *after)
    5064              : {
    5065      1515004 :   return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
    5066              : }
    5067              : 
    5068              : /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
    5069              :    MAKE_RAW indicates how to turn PATTERN into a real insn.  INSNP
    5070              :    indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
    5071              :    CALL_INSN, etc.  */
    5072              : 
    5073              : static rtx_insn *
    5074     10576211 : emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc,
    5075              :                             bool insnp, rtx_insn *(*make_raw) (rtx))
    5076              : {
    5077     10576211 :   rtx_insn *first = PREV_INSN (before);
    5078     10917379 :   rtx_insn *last = emit_pattern_before_noloc (pattern, before,
    5079              :                                               insnp ? before : NULL,
    5080              :                                               NULL, make_raw);
    5081              : 
    5082     10576211 :   if (pattern == NULL_RTX || !loc)
    5083              :     return last;
    5084              : 
    5085      9552987 :   if (!first)
    5086        82317 :     first = get_insns ();
    5087              :   else
    5088      9470670 :     first = NEXT_INSN (first);
    5089      1517582 :   while (1)
    5090              :     {
    5091     11070569 :       if (active_insn_p (first)
    5092     10513367 :           && !JUMP_TABLE_DATA_P (first) /* FIXME */
    5093     21583936 :           && !INSN_LOCATION (first))
    5094     10293603 :         INSN_LOCATION (first) = loc;
    5095     11070569 :       if (first == last)
    5096              :         break;
    5097      1517582 :       first = NEXT_INSN (first);
    5098              :     }
    5099              :   return last;
    5100              : }
    5101              : 
    5102              : /* Insert PATTERN before BEFORE.  MAKE_RAW indicates how to turn PATTERN
    5103              :    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert
    5104              :    before any DEBUG_INSNs.  INSNP indicates if PATTERN is meant for an
    5105              :    INSN as opposed to a JUMP_INSN, CALL_INSN, etc.  */
    5106              : 
    5107              : static rtx_insn *
    5108     10803729 : emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
    5109              :                      bool insnp, rtx_insn *(*make_raw) (rtx))
    5110              : {
    5111     10803729 :   rtx_insn *next = before;
    5112              : 
    5113     10803729 :   if (skip_debug_insns)
    5114     10465742 :     while (DEBUG_INSN_P (next))
    5115           31 :       next = PREV_INSN (next);
    5116              : 
    5117     10803729 :   if (INSN_P (next))
    5118     10213865 :     return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
    5119     10213865 :                                        insnp, make_raw);
    5120              :   else
    5121       591145 :     return emit_pattern_before_noloc (pattern, before,
    5122              :                                       insnp ? before : NULL,
    5123       589864 :                                       NULL, make_raw);
    5124              : }
    5125              : 
    5126              : /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
    5127              : rtx_insn *
    5128       362346 : emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
    5129              : {
    5130       362346 :   return emit_pattern_before_setloc (pattern, before, loc, true,
    5131       362346 :                                      make_insn_raw);
    5132              : }
    5133              : 
    5134              : /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
    5135              : rtx_insn *
    5136     10461280 : emit_insn_before (rtx pattern, rtx_insn *before)
    5137              : {
    5138     10461280 :   return emit_pattern_before (pattern, before, true, true, make_insn_raw);
    5139              : }
    5140              : 
    5141              : /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
    5142              : rtx_jump_insn *
    5143            0 : emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
    5144              : {
    5145            0 :   return as_a <rtx_jump_insn *> (
    5146              :         emit_pattern_before_setloc (pattern, before, loc, false,
    5147            0 :                                     make_jump_insn_raw));
    5148              : }
    5149              : 
    5150              : /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
    5151              : rtx_jump_insn *
    5152         4431 : emit_jump_insn_before (rtx pattern, rtx_insn *before)
    5153              : {
    5154         4431 :   return as_a <rtx_jump_insn *> (
    5155              :         emit_pattern_before (pattern, before, true, false,
    5156         4431 :                              make_jump_insn_raw));
    5157              : }
    5158              : 
    5159              : /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
    5160              : rtx_insn *
    5161            0 : emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
    5162              : {
    5163            0 :   return emit_pattern_before_setloc (pattern, before, loc, false,
    5164            0 :                                      make_call_insn_raw);
    5165              : }
    5166              : 
    5167              : /* Like emit_call_insn_before_noloc,
    5168              :    but set insn_location according to BEFORE.  */
    5169              : rtx_insn *
    5170            0 : emit_call_insn_before (rtx pattern, rtx_insn *before)
    5171              : {
    5172            0 :   return emit_pattern_before (pattern, before, true, false,
    5173            0 :                               make_call_insn_raw);
    5174              : }
    5175              : 
    5176              : /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
    5177              : rtx_insn *
    5178            0 : emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
    5179              : {
    5180            0 :   return emit_pattern_before_setloc (pattern, before, loc, false,
    5181            0 :                                      make_debug_insn_raw);
    5182              : }
    5183              : 
    5184              : /* Like emit_debug_insn_before_noloc,
    5185              :    but set insn_location according to BEFORE.  */
    5186              : rtx_insn *
    5187       338018 : emit_debug_insn_before (rtx pattern, rtx_insn *before)
    5188              : {
    5189       338018 :   return emit_pattern_before (pattern, before, false, false,
    5190       338018 :                               make_debug_insn_raw);
    5191              : }
    5192              : 
    5193              : /* Take X and emit it at the end of the doubly-linked
    5194              :    INSN list.
    5195              : 
    5196              :    Returns the last insn emitted.  */
    5197              : 
    5198              : rtx_insn *
    5199    243355337 : emit_insn (rtx x)
    5200              : {
    5201    243355337 :   rtx_insn *last = get_last_insn ();
    5202    243355337 :   rtx_insn *insn;
    5203              : 
    5204    243355337 :   if (x == NULL_RTX)
    5205              :     return last;
    5206              : 
    5207    238763931 :   switch (GET_CODE (x))
    5208              :     {
    5209    116865076 :     case DEBUG_INSN:
    5210    116865076 :     case INSN:
    5211    116865076 :     case JUMP_INSN:
    5212    116865076 :     case CALL_INSN:
    5213    116865076 :     case CODE_LABEL:
    5214    116865076 :     case BARRIER:
    5215    116865076 :     case NOTE:
    5216    116865076 :       insn = as_a <rtx_insn *> (x);
    5217    397237356 :       while (insn)
    5218              :         {
    5219    163507204 :           rtx_insn *next = NEXT_INSN (insn);
    5220    163507204 :           add_insn (insn);
    5221    163507204 :           last = insn;
    5222    163507204 :           insn = next;
    5223              :         }
    5224              :       break;
    5225              : 
    5226              : #ifdef ENABLE_RTL_CHECKING
    5227              :     case JUMP_TABLE_DATA:
    5228              :     case SEQUENCE:
    5229              :       gcc_unreachable ();
    5230              :       break;
    5231              : #endif
    5232              : 
    5233    121898855 :     default:
    5234    121898855 :       last = make_insn_raw (x);
    5235    121898855 :       add_insn (last);
    5236    121898855 :       break;
    5237              :     }
    5238              : 
    5239              :   return last;
    5240              : }
    5241              : 
    5242              : /* Make an insn of code DEBUG_INSN with pattern X
    5243              :    and add it to the end of the doubly-linked list.  */
    5244              : 
    5245              : rtx_insn *
    5246     48758714 : emit_debug_insn (rtx x)
    5247              : {
    5248     48758714 :   rtx_insn *last = get_last_insn ();
    5249     48758714 :   rtx_insn *insn;
    5250              : 
    5251     48758714 :   if (x == NULL_RTX)
    5252              :     return last;
    5253              : 
    5254     48758714 :   switch (GET_CODE (x))
    5255              :     {
    5256            0 :     case DEBUG_INSN:
    5257            0 :     case INSN:
    5258            0 :     case JUMP_INSN:
    5259            0 :     case CALL_INSN:
    5260            0 :     case CODE_LABEL:
    5261            0 :     case BARRIER:
    5262            0 :     case NOTE:
    5263            0 :       insn = as_a <rtx_insn *> (x);
    5264            0 :       while (insn)
    5265              :         {
    5266            0 :           rtx_insn *next = NEXT_INSN (insn);
    5267            0 :           add_insn (insn);
    5268            0 :           last = insn;
    5269            0 :           insn = next;
    5270              :         }
    5271              :       break;
    5272              : 
    5273              : #ifdef ENABLE_RTL_CHECKING
    5274              :     case JUMP_TABLE_DATA:
    5275              :     case SEQUENCE:
    5276              :       gcc_unreachable ();
    5277              :       break;
    5278              : #endif
    5279              : 
    5280     48758714 :     default:
    5281     48758714 :       last = make_debug_insn_raw (x);
    5282     48758714 :       add_insn (last);
    5283     48758714 :       break;
    5284              :     }
    5285              : 
    5286              :   return last;
    5287              : }
    5288              : 
    5289              : /* Make an insn of code JUMP_INSN with pattern X
    5290              :    and add it to the end of the doubly-linked list.  */
    5291              : 
    5292              : rtx_insn *
    5293     29445476 : emit_jump_insn (rtx x)
    5294              : {
    5295     29445476 :   rtx_insn *last = NULL;
    5296     29445476 :   rtx_insn *insn;
    5297              : 
    5298     29445476 :   switch (GET_CODE (x))
    5299              :     {
    5300     11826910 :     case DEBUG_INSN:
    5301     11826910 :     case INSN:
    5302     11826910 :     case JUMP_INSN:
    5303     11826910 :     case CALL_INSN:
    5304     11826910 :     case CODE_LABEL:
    5305     11826910 :     case BARRIER:
    5306     11826910 :     case NOTE:
    5307     11826910 :       insn = as_a <rtx_insn *> (x);
    5308     45009421 :       while (insn)
    5309              :         {
    5310     21355601 :           rtx_insn *next = NEXT_INSN (insn);
    5311     21355601 :           add_insn (insn);
    5312     21355601 :           last = insn;
    5313     21355601 :           insn = next;
    5314              :         }
    5315              :       break;
    5316              : 
    5317              : #ifdef ENABLE_RTL_CHECKING
    5318              :     case JUMP_TABLE_DATA:
    5319              :     case SEQUENCE:
    5320              :       gcc_unreachable ();
    5321              :       break;
    5322              : #endif
    5323              : 
    5324     17618566 :     default:
    5325     17618566 :       last = make_jump_insn_raw (x);
    5326     17618566 :       add_insn (last);
    5327     17618566 :       break;
    5328              :     }
    5329              : 
    5330     29445476 :   return last;
    5331              : }
    5332              : 
    5333              : /* Make an insn of code JUMP_INSN with pattern X,
    5334              :    add a REG_BR_PROB note that indicates very likely probability,
    5335              :    and add it to the end of the doubly-linked list.  */
    5336              : 
    5337              : rtx_insn *
    5338            0 : emit_likely_jump_insn (rtx x)
    5339              : {
    5340            0 :   rtx_insn *jump = emit_jump_insn (x);
    5341            0 :   add_reg_br_prob_note (jump, profile_probability::very_likely ());
    5342            0 :   return jump;
    5343              : }
    5344              : 
    5345              : /* Make an insn of code JUMP_INSN with pattern X,
    5346              :    add a REG_BR_PROB note that indicates very unlikely probability,
    5347              :    and add it to the end of the doubly-linked list.  */
    5348              : 
    5349              : rtx_insn *
    5350            0 : emit_unlikely_jump_insn (rtx x)
    5351              : {
    5352            0 :   rtx_insn *jump = emit_jump_insn (x);
    5353            0 :   add_reg_br_prob_note (jump, profile_probability::very_unlikely ());
    5354            0 :   return jump;
    5355              : }
    5356              : 
    5357              : /* Make an insn of code CALL_INSN with pattern X
    5358              :    and add it to the end of the doubly-linked list.  */
    5359              : 
    5360              : rtx_insn *
    5361      6230476 : emit_call_insn (rtx x)
    5362              : {
    5363      6230476 :   rtx_insn *insn;
    5364              : 
    5365      6230476 :   switch (GET_CODE (x))
    5366              :     {
    5367         4400 :     case DEBUG_INSN:
    5368         4400 :     case INSN:
    5369         4400 :     case JUMP_INSN:
    5370         4400 :     case CALL_INSN:
    5371         4400 :     case CODE_LABEL:
    5372         4400 :     case BARRIER:
    5373         4400 :     case NOTE:
    5374         4400 :       insn = emit_insn (x);
    5375         4400 :       break;
    5376              : 
    5377              : #ifdef ENABLE_RTL_CHECKING
    5378              :     case SEQUENCE:
    5379              :     case JUMP_TABLE_DATA:
    5380              :       gcc_unreachable ();
    5381              :       break;
    5382              : #endif
    5383              : 
    5384      6226076 :     default:
    5385      6226076 :       insn = make_call_insn_raw (x);
    5386      6226076 :       add_insn (insn);
    5387      6226076 :       break;
    5388              :     }
    5389              : 
    5390      6230476 :   return insn;
    5391              : }
    5392              : 
    5393              : /* Add the label LABEL to the end of the doubly-linked list.  */
    5394              : 
    5395              : rtx_code_label *
    5396      8703698 : emit_label (rtx uncast_label)
    5397              : {
    5398      8703698 :   rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
    5399              : 
    5400      8703698 :   gcc_checking_assert (INSN_UID (label) == 0);
    5401      8703698 :   INSN_UID (label) = cur_insn_uid++;
    5402      8703698 :   add_insn (label);
    5403      8703698 :   return label;
    5404              : }
    5405              : 
    5406              : /* Make an insn of code JUMP_TABLE_DATA
    5407              :    and add it to the end of the doubly-linked list.  */
    5408              : 
    5409              : rtx_jump_table_data *
    5410         8028 : emit_jump_table_data (rtx table)
    5411              : {
    5412         8028 :   rtx_jump_table_data *jump_table_data =
    5413         8028 :     as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
    5414         8028 :   INSN_UID (jump_table_data) = cur_insn_uid++;
    5415         8028 :   PATTERN (jump_table_data) = table;
    5416         8028 :   BLOCK_FOR_INSN (jump_table_data) = NULL;
    5417         8028 :   add_insn (jump_table_data);
    5418         8028 :   return jump_table_data;
    5419              : }
    5420              : 
    5421              : /* Make an insn of code BARRIER
    5422              :    and add it to the end of the doubly-linked list.  */
    5423              : 
    5424              : rtx_barrier *
    5425      3531607 : emit_barrier (void)
    5426              : {
    5427      3531607 :   rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
    5428      3531607 :   INSN_UID (barrier) = cur_insn_uid++;
    5429      3531607 :   add_insn (barrier);
    5430      3531607 :   return barrier;
    5431              : }
    5432              : 
    5433              : /* Emit a copy of note ORIG.  */
    5434              : 
    5435              : rtx_note *
    5436       176484 : emit_note_copy (rtx_note *orig)
    5437              : {
    5438       176484 :   enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
    5439       176484 :   rtx_note *note = make_note_raw (kind);
    5440       176484 :   NOTE_DATA (note) = NOTE_DATA (orig);
    5441       176484 :   add_insn (note);
    5442       176484 :   return note;
    5443              : }
    5444              : 
    5445              : /* Make an insn of code NOTE or type NOTE_NO
    5446              :    and add it to the end of the doubly-linked list.  */
    5447              : 
    5448              : rtx_note *
    5449     16440143 : emit_note (enum insn_note kind)
    5450              : {
    5451     16440143 :   rtx_note *note = make_note_raw (kind);
    5452     16440143 :   add_insn (note);
    5453     16440143 :   return note;
    5454              : }
    5455              : 
    5456              : /* Emit a clobber of lvalue X.  */
    5457              : 
    5458              : rtx_insn *
    5459       538116 : emit_clobber (rtx x)
    5460              : {
    5461              :   /* CONCATs should not appear in the insn stream.  */
    5462       538116 :   if (GET_CODE (x) == CONCAT)
    5463              :     {
    5464            0 :       emit_clobber (XEXP (x, 0));
    5465            0 :       return emit_clobber (XEXP (x, 1));
    5466              :     }
    5467       538116 :   return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
    5468              : }
    5469              : 
    5470              : /* Return a sequence of insns to clobber lvalue X.  */
    5471              : 
    5472              : rtx_insn *
    5473            0 : gen_clobber (rtx x)
    5474              : {
    5475            0 :   rtx_insn *seq;
    5476              : 
    5477            0 :   start_sequence ();
    5478            0 :   emit_clobber (x);
    5479            0 :   seq = end_sequence ();
    5480            0 :   return seq;
    5481              : }
    5482              : 
    5483              : /* Emit a use of rvalue X.  */
    5484              : 
    5485              : rtx_insn *
    5486       789355 : emit_use (rtx x)
    5487              : {
    5488              :   /* CONCATs should not appear in the insn stream.  */
    5489       789355 :   if (GET_CODE (x) == CONCAT)
    5490              :     {
    5491            0 :       emit_use (XEXP (x, 0));
    5492            0 :       return emit_use (XEXP (x, 1));
    5493              :     }
    5494       789355 :   return emit_insn (gen_rtx_USE (VOIDmode, x));
    5495              : }
    5496              : 
    5497              : /* Return a sequence of insns to use rvalue X.  */
    5498              : 
    5499              : rtx_insn *
    5500            0 : gen_use (rtx x)
    5501              : {
    5502            0 :   rtx_insn *seq;
    5503              : 
    5504            0 :   start_sequence ();
    5505            0 :   emit_use (x);
    5506            0 :   seq = end_sequence ();
    5507            0 :   return seq;
    5508              : }
    5509              : 
    5510              : /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
    5511              :    Return the set in INSN that such notes describe, or NULL if the notes
    5512              :    have no meaning for INSN.  */
    5513              : 
    5514              : rtx
    5515    228586454 : set_for_reg_notes (rtx insn)
    5516              : {
    5517    228586454 :   rtx pat, reg;
    5518              : 
    5519    228586454 :   if (!INSN_P (insn))
    5520              :     return NULL_RTX;
    5521              : 
    5522    228581957 :   pat = PATTERN (insn);
    5523    228581957 :   if (GET_CODE (pat) == PARALLEL)
    5524              :     {
    5525              :       /* We do not use single_set because that ignores SETs of unused
    5526              :          registers.  REG_EQUAL and REG_EQUIV notes really do require the
    5527              :          PARALLEL to have a single SET.  */
    5528     18432186 :       if (multiple_sets (insn))
    5529              :         return NULL_RTX;
    5530     17754318 :       pat = XVECEXP (pat, 0, 0);
    5531              :     }
    5532              : 
    5533    227904089 :   if (GET_CODE (pat) != SET)
    5534              :     return NULL_RTX;
    5535              : 
    5536    128465052 :   reg = SET_DEST (pat);
    5537              : 
    5538              :   /* Notes apply to the contents of a STRICT_LOW_PART.  */
    5539    128465052 :   if (GET_CODE (reg) == STRICT_LOW_PART
    5540    128458962 :       || GET_CODE (reg) == ZERO_EXTRACT)
    5541         8117 :     reg = XEXP (reg, 0);
    5542              : 
    5543              :   /* Check that we have a register.  */
    5544    128465052 :   if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
    5545              :     return NULL_RTX;
    5546              : 
    5547              :   return pat;
    5548              : }
    5549              : 
    5550              : /* Place a note of KIND on insn INSN with DATUM as the datum. If a
    5551              :    note of this type already exists, remove it first.  */
    5552              : 
    5553              : rtx
    5554     22694697 : set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
    5555              : {
    5556     22694697 :   rtx note = find_reg_note (insn, kind, NULL_RTX);
    5557              : 
    5558     22694697 :   switch (kind)
    5559              :     {
    5560     22694697 :     case REG_EQUAL:
    5561     22694697 :     case REG_EQUIV:
    5562              :       /* We need to support the REG_EQUAL on USE trick of find_reloads.  */
    5563     22694697 :       if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
    5564              :         return NULL_RTX;
    5565              : 
    5566              :       /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
    5567              :          It serves no useful purpose and breaks eliminate_regs.  */
    5568     22659974 :       if (GET_CODE (datum) == ASM_OPERANDS)
    5569              :         return NULL_RTX;
    5570              : 
    5571              :       /* Notes with side effects are dangerous.  Even if the side-effect
    5572              :          initially mirrors one in PATTERN (INSN), later optimizations
    5573              :          might alter the way that the final register value is calculated
    5574              :          and so move or alter the side-effect in some way.  The note would
    5575              :          then no longer be a valid substitution for SET_SRC.  */
    5576     22650529 :       if (side_effects_p (datum))
    5577              :         return NULL_RTX;
    5578              :       break;
    5579              : 
    5580              :     default:
    5581              :       break;
    5582              :     }
    5583              : 
    5584     22650063 :   if (note)
    5585      7701839 :     XEXP (note, 0) = datum;
    5586              :   else
    5587              :     {
    5588     14948224 :       add_reg_note (insn, kind, datum);
    5589     14948224 :       note = REG_NOTES (insn);
    5590              :     }
    5591              : 
    5592     22650063 :   switch (kind)
    5593              :     {
    5594     22650063 :     case REG_EQUAL:
    5595     22650063 :     case REG_EQUIV:
    5596     22650063 :       df_notes_rescan (as_a <rtx_insn *> (insn));
    5597     22650063 :       break;
    5598              :     default:
    5599              :       break;
    5600              :     }
    5601              : 
    5602              :   return note;
    5603              : }
    5604              : 
    5605              : /* Like set_unique_reg_note, but don't do anything unless INSN sets DST.  */
    5606              : rtx
    5607      1392759 : set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
    5608              : {
    5609      1392759 :   rtx set = set_for_reg_notes (insn);
    5610              : 
    5611      1392759 :   if (set && SET_DEST (set) == dst)
    5612      1377923 :     return set_unique_reg_note (insn, kind, datum);
    5613              :   return NULL_RTX;
    5614              : }
    5615              : 
    5616              : /* Emit the rtl pattern X as an appropriate kind of insn.  Also emit a
    5617              :    following barrier if the instruction needs one and if ALLOW_BARRIER_P
    5618              :    is true.
    5619              : 
    5620              :    If X is a label, it is simply added into the insn chain.  */
    5621              : 
    5622              : rtx_insn *
    5623     18673411 : emit (rtx x, bool allow_barrier_p)
    5624              : {
    5625     18673411 :   enum rtx_code code = classify_insn (x);
    5626              : 
    5627     18673411 :   switch (code)
    5628              :     {
    5629            0 :     case CODE_LABEL:
    5630            0 :       return emit_label (x);
    5631      9257848 :     case INSN:
    5632      9257848 :       return emit_insn (x);
    5633      9409286 :     case  JUMP_INSN:
    5634      9409286 :       {
    5635      9409286 :         rtx_insn *insn = emit_jump_insn (x);
    5636      9409286 :         if (allow_barrier_p
    5637      9409286 :             && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
    5638            0 :           return emit_barrier ();
    5639              :         return insn;
    5640              :       }
    5641         6277 :     case CALL_INSN:
    5642         6277 :       return emit_call_insn (x);
    5643            0 :     case DEBUG_INSN:
    5644            0 :       return emit_debug_insn (x);
    5645            0 :     default:
    5646            0 :       gcc_unreachable ();
    5647              :     }
    5648              : }
    5649              : 
    5650              : /* Space for free sequence stack entries.  */
    5651              : static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
    5652              : 
    5653              : /* Begin emitting insns to a sequence.  If this sequence will contain
    5654              :    something that might cause the compiler to pop arguments to function
    5655              :    calls (because those pops have previously been deferred; see
    5656              :    INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
    5657              :    before calling this function.  That will ensure that the deferred
    5658              :    pops are not accidentally emitted in the middle of this sequence.  */
    5659              : 
    5660              : void
    5661    184942399 : start_sequence (void)
    5662              : {
    5663    184942399 :   struct sequence_stack *tem;
    5664              : 
    5665    184942399 :   if (free_sequence_stack != NULL)
    5666              :     {
    5667    184298768 :       tem = free_sequence_stack;
    5668    184298768 :       free_sequence_stack = tem->next;
    5669              :     }
    5670              :   else
    5671       643631 :     tem = ggc_alloc<sequence_stack> ();
    5672              : 
    5673    184942399 :   tem->next = get_current_sequence ()->next;
    5674    184942399 :   tem->first = get_insns ();
    5675    184942399 :   tem->last = get_last_insn ();
    5676    184942399 :   get_current_sequence ()->next = tem;
    5677              : 
    5678    184942399 :   set_first_insn (0);
    5679    184942399 :   set_last_insn (0);
    5680    184942399 : }
    5681              : 
    5682              : /* Set up the insn chain starting with FIRST as the current sequence,
    5683              :    saving the previously current one.  See the documentation for
    5684              :    start_sequence for more information about how to use this function.  */
    5685              : 
    5686              : void
    5687      5456139 : push_to_sequence (rtx_insn *first)
    5688              : {
    5689      5456139 :   rtx_insn *last;
    5690              : 
    5691      5456139 :   start_sequence ();
    5692              : 
    5693     14428896 :   for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
    5694              :     ;
    5695              : 
    5696      5456139 :   set_first_insn (first);
    5697      5456139 :   set_last_insn (last);
    5698      5456139 : }
    5699              : 
    5700              : /* Like push_to_sequence, but take the last insn as an argument to avoid
    5701              :    looping through the list.  */
    5702              : 
    5703              : void
    5704        76380 : push_to_sequence2 (rtx_insn *first, rtx_insn *last)
    5705              : {
    5706        76380 :   start_sequence ();
    5707              : 
    5708        76380 :   set_first_insn (first);
    5709        76380 :   set_last_insn (last);
    5710        76380 : }
    5711              : 
    5712              : /* Set up the outer-level insn chain
    5713              :    as the current sequence, saving the previously current one.  */
    5714              : 
    5715              : void
    5716          476 : push_topmost_sequence (void)
    5717              : {
    5718          476 :   struct sequence_stack *top;
    5719              : 
    5720          476 :   start_sequence ();
    5721              : 
    5722          476 :   top = get_topmost_sequence ();
    5723          476 :   set_first_insn (top->first);
    5724          476 :   set_last_insn (top->last);
    5725          476 : }
    5726              : 
    5727              : /* After emitting to the outer-level insn chain, update the outer-level
    5728              :    insn chain, and restore the previous saved state.  */
    5729              : 
    5730              : void
    5731          476 : pop_topmost_sequence (void)
    5732              : {
    5733          476 :   struct sequence_stack *top;
    5734              : 
    5735          476 :   top = get_topmost_sequence ();
    5736          476 :   top->first = get_insns ();
    5737          476 :   top->last = get_last_insn ();
    5738              : 
    5739          476 :   end_sequence ();
    5740          476 : }
    5741              : 
    5742              : /* After emitting to a sequence, restore the previous saved state and return
    5743              :    the start of the completed sequence.
    5744              : 
    5745              :    If the compiler might have deferred popping arguments while
    5746              :    generating this sequence, and this sequence will not be immediately
    5747              :    inserted into the instruction stream, use do_pending_stack_adjust
    5748              :    before calling this function.  That will ensure that the deferred
    5749              :    pops are inserted into this sequence, and not into some random
    5750              :    location in the instruction stream.  See INHIBIT_DEFER_POP for more
    5751              :    information about deferred popping of arguments.  */
    5752              : 
    5753              : rtx_insn *
    5754    184942398 : end_sequence (void)
    5755              : {
    5756    184942398 :   rtx_insn *insns = get_insns ();
    5757              : 
    5758    184942398 :   struct sequence_stack *tem = get_current_sequence ()->next;
    5759              : 
    5760    184942398 :   set_first_insn (tem->first);
    5761    184942398 :   set_last_insn (tem->last);
    5762    184942398 :   get_current_sequence ()->next = tem->next;
    5763              : 
    5764    184942398 :   memset (tem, 0, sizeof (*tem));
    5765    184942398 :   tem->next = free_sequence_stack;
    5766    184942398 :   free_sequence_stack = tem;
    5767              : 
    5768    184942398 :   return insns;
    5769              : }
    5770              : 
    5771              : /* Return true if currently emitting into a sequence.  */
    5772              : 
    5773              : bool
    5774      4900841 : in_sequence_p (void)
    5775              : {
    5776      4900841 :   return get_current_sequence ()->next != 0;
    5777              : }
    5778              : 
    5779              : /* Put the various virtual registers into REGNO_REG_RTX.  */
    5780              : 
    5781              : static void
    5782      1691207 : init_virtual_regs (void)
    5783              : {
    5784      1691207 :   regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
    5785      1691207 :   regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
    5786      1691207 :   regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
    5787      1691207 :   regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
    5788      1691207 :   regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
    5789      1691207 :   regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
    5790      1691207 :     = virtual_preferred_stack_boundary_rtx;
    5791      1691207 : }
    5792              : 
    5793              : 
    5794              : /* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
    5795              : static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
    5796              : static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
    5797              : static int copy_insn_n_scratches;
    5798              : 
    5799              : /* When an insn is being copied by copy_insn_1, this is nonzero if we have
    5800              :    copied an ASM_OPERANDS.
    5801              :    In that case, it is the original input-operand vector.  */
    5802              : static rtvec orig_asm_operands_vector;
    5803              : 
    5804              : /* When an insn is being copied by copy_insn_1, this is nonzero if we have
    5805              :    copied an ASM_OPERANDS.
    5806              :    In that case, it is the copied input-operand vector.  */
    5807              : static rtvec copy_asm_operands_vector;
    5808              : 
    5809              : /* Likewise for the constraints vector.  */
    5810              : static rtvec orig_asm_constraints_vector;
    5811              : static rtvec copy_asm_constraints_vector;
    5812              : 
    5813              : /* Recursively create a new copy of an rtx for copy_insn.
    5814              :    This function differs from copy_rtx in that it handles SCRATCHes and
    5815              :    ASM_OPERANDs properly.
    5816              :    Normally, this function is not used directly; use copy_insn as front end.
    5817              :    However, you could first copy an insn pattern with copy_insn and then use
    5818              :    this function afterwards to properly copy any REG_NOTEs containing
    5819              :    SCRATCHes.  */
    5820              : 
    5821              : rtx
    5822     22366962 : copy_insn_1 (rtx orig)
    5823              : {
    5824     22366962 :   rtx copy;
    5825     22366962 :   int i, j;
    5826     22366962 :   RTX_CODE code;
    5827     22366962 :   const char *format_ptr;
    5828              : 
    5829     22366962 :   if (orig == NULL)
    5830              :     return NULL;
    5831              : 
    5832     22365676 :   code = GET_CODE (orig);
    5833              : 
    5834     22365676 :   switch (code)
    5835              :     {
    5836              :     case REG:
    5837              :     case DEBUG_EXPR:
    5838              :     CASE_CONST_ANY:
    5839              :     case SYMBOL_REF:
    5840              :     case CODE_LABEL:
    5841              :     case PC:
    5842              :     case RETURN:
    5843              :     case SIMPLE_RETURN:
    5844              :       return orig;
    5845      1094799 :     case CLOBBER:
    5846              :       /* Share clobbers of hard registers, but do not share pseudo reg
    5847              :          clobbers or clobbers of hard registers that originated as pseudos.
    5848              :          This is needed to allow safe register renaming.  */
    5849      1094799 :       if (REG_P (XEXP (orig, 0))
    5850       443245 :           && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
    5851      1538038 :           && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
    5852              :         return orig;
    5853              :       break;
    5854              : 
    5855              :     case SCRATCH:
    5856        72462 :       for (i = 0; i < copy_insn_n_scratches; i++)
    5857         1603 :         if (copy_insn_scratch_in[i] == orig)
    5858         1547 :           return copy_insn_scratch_out[i];
    5859              :       break;
    5860              : 
    5861       106462 :     case CONST:
    5862       106462 :       if (shared_const_p (orig))
    5863              :         return orig;
    5864              :       break;
    5865              : 
    5866              :       /* A MEM with a constant address is not sharable.  The problem is that
    5867              :          the constant address may need to be reloaded.  If the mem is shared,
    5868              :          then reloading one copy of this mem will cause all copies to appear
    5869              :          to have been reloaded.  */
    5870              : 
    5871              :     default:
    5872              :       break;
    5873              :     }
    5874              : 
    5875              :   /* Copy the various flags, fields, and other information.  We assume
    5876              :      that all fields need copying, and then clear the fields that should
    5877              :      not be copied.  That is the sensible default behavior, and forces
    5878              :      us to explicitly document why we are *not* copying a flag.  */
    5879      9996301 :   copy = shallow_copy_rtx (orig);
    5880              : 
    5881              :   /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
    5882      9996301 :   if (INSN_P (orig))
    5883              :     {
    5884            0 :       RTX_FLAG (copy, jump) = 0;
    5885            0 :       RTX_FLAG (copy, call) = 0;
    5886            0 :       RTX_FLAG (copy, frame_related) = 0;
    5887              :     }
    5888              : 
    5889      9996301 :   format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
    5890              : 
    5891     27441443 :   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
    5892     17445142 :     switch (*format_ptr++)
    5893              :       {
    5894     14315652 :       case 'e':
    5895     14315652 :         if (XEXP (orig, i) != NULL)
    5896     14293311 :           XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
    5897              :         break;
    5898              : 
    5899       628929 :       case 'E':
    5900       628929 :       case 'V':
    5901       628929 :         if (XVEC (orig, i) == orig_asm_constraints_vector)
    5902           12 :           XVEC (copy, i) = copy_asm_constraints_vector;
    5903       628917 :         else if (XVEC (orig, i) == orig_asm_operands_vector)
    5904           12 :           XVEC (copy, i) = copy_asm_operands_vector;
    5905       628905 :         else if (XVEC (orig, i) != NULL)
    5906              :           {
    5907       628905 :             XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
    5908      1866070 :             for (j = 0; j < XVECLEN (copy, i); j++)
    5909      1237165 :               XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
    5910              :           }
    5911              :         break;
    5912              : 
    5913              :       case 't':
    5914              :       case 'w':
    5915              :       case 'i':
    5916              :       case 'L':
    5917              :       case 'p':
    5918              :       case 's':
    5919              :       case 'S':
    5920              :       case 'u':
    5921              :       case '0':
    5922              :         /* These are left unchanged.  */
    5923              :         break;
    5924              : 
    5925            0 :       default:
    5926            0 :         gcc_unreachable ();
    5927              :       }
    5928              : 
    5929      9996301 :   if (code == SCRATCH)
    5930              :     {
    5931        70859 :       i = copy_insn_n_scratches++;
    5932        70859 :       gcc_assert (i < MAX_RECOG_OPERANDS);
    5933        70859 :       copy_insn_scratch_in[i] = orig;
    5934        70859 :       copy_insn_scratch_out[i] = copy;
    5935              :     }
    5936      9925442 :   else if (code == ASM_OPERANDS)
    5937              :     {
    5938          214 :       orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
    5939          214 :       copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
    5940          214 :       orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
    5941          214 :       copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
    5942              :     }
    5943              : 
    5944              :   return copy;
    5945              : }
    5946              : 
    5947              : /* Create a new copy of an rtx.
    5948              :    This function differs from copy_rtx in that it handles SCRATCHes and
    5949              :    ASM_OPERANDs properly.
    5950              :    INSN doesn't really have to be a full INSN; it could be just the
    5951              :    pattern.  */
    5952              : rtx
    5953      3876293 : copy_insn (rtx insn)
    5954              : {
    5955      3876293 :   copy_insn_n_scratches = 0;
    5956      3876293 :   orig_asm_operands_vector = 0;
    5957      3876293 :   orig_asm_constraints_vector = 0;
    5958      3876293 :   copy_asm_operands_vector = 0;
    5959      3876293 :   copy_asm_constraints_vector = 0;
    5960      3876293 :   return copy_insn_1 (insn);
    5961              : }
    5962              : 
    5963              : /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
    5964              :    on that assumption that INSN itself remains in its original place.  */
    5965              : 
    5966              : rtx_insn *
    5967            0 : copy_delay_slot_insn (rtx_insn *insn)
    5968              : {
    5969              :   /* Copy INSN with its rtx_code, all its notes, location etc.  */
    5970            0 :   insn = as_a <rtx_insn *> (copy_rtx (insn));
    5971            0 :   INSN_UID (insn) = cur_insn_uid++;
    5972            0 :   return insn;
    5973              : }
    5974              : 
    5975              : /* Initialize data structures and variables in this file
    5976              :    before generating rtl for each function.  */
    5977              : 
    5978              : void
    5979      1691207 : init_emit (void)
    5980              : {
    5981      1691207 :   set_first_insn (NULL);
    5982      1691207 :   set_last_insn (NULL);
    5983      1691207 :   if (param_min_nondebug_insn_uid)
    5984            0 :     cur_insn_uid = param_min_nondebug_insn_uid;
    5985              :   else
    5986      1691207 :     cur_insn_uid = 1;
    5987      1691207 :   cur_debug_insn_uid = 1;
    5988      1691207 :   reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
    5989      1691207 :   first_label_num = label_num;
    5990      1691207 :   get_current_sequence ()->next = NULL;
    5991              : 
    5992              :   /* Init the tables that describe all the pseudo regs.  */
    5993              : 
    5994      1691207 :   crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
    5995              : 
    5996      1691207 :   crtl->emit.regno_pointer_align
    5997      1691207 :     = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
    5998              : 
    5999      1691207 :   regno_reg_rtx
    6000      1691207 :     = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
    6001              : 
    6002              :   /* Put copies of all the hard registers into regno_reg_rtx.  */
    6003      1691207 :   memcpy (regno_reg_rtx,
    6004      1691207 :           initial_regno_reg_rtx,
    6005              :           FIRST_PSEUDO_REGISTER * sizeof (rtx));
    6006              : 
    6007              :   /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
    6008      1691207 :   init_virtual_regs ();
    6009              : 
    6010              :   /* Indicate that the virtual registers and stack locations are
    6011              :      all pointers.  */
    6012      1691207 :   REG_POINTER (stack_pointer_rtx) = 1;
    6013      1691207 :   REG_POINTER (frame_pointer_rtx) = 1;
    6014      1691207 :   REG_POINTER (hard_frame_pointer_rtx) = 1;
    6015      1691207 :   REG_POINTER (arg_pointer_rtx) = 1;
    6016              : 
    6017      1691207 :   REG_POINTER (virtual_incoming_args_rtx) = 1;
    6018      1691207 :   REG_POINTER (virtual_stack_vars_rtx) = 1;
    6019      1691207 :   REG_POINTER (virtual_stack_dynamic_rtx) = 1;
    6020      1691207 :   REG_POINTER (virtual_outgoing_args_rtx) = 1;
    6021      1691207 :   REG_POINTER (virtual_cfa_rtx) = 1;
    6022              : 
    6023              : #ifdef STACK_BOUNDARY
    6024      1691207 :   REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
    6025      1691207 :   REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
    6026      1691207 :   REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
    6027      1691207 :   REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
    6028              : 
    6029      1691207 :   REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
    6030      1691207 :   REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
    6031      1691207 :   REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
    6032      1691207 :   REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
    6033              : 
    6034      1691207 :   REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
    6035              : #endif
    6036              : 
    6037              : #ifdef INIT_EXPANDERS
    6038              :   INIT_EXPANDERS;
    6039              : #endif
    6040      1691207 : }
    6041              : 
    6042              : /* Return the value of element I of CONST_VECTOR X as a wide_int.  */
    6043              : 
    6044              : wide_int
    6045         1068 : const_vector_int_elt (const_rtx x, unsigned int i)
    6046              : {
    6047              :   /* First handle elements that are directly encoded.  */
    6048         1068 :   machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
    6049         1068 :   if (i < (unsigned int) XVECLEN (x, 0))
    6050            0 :     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
    6051              : 
    6052              :   /* Identify the pattern that contains element I and work out the index of
    6053              :      the last encoded element for that pattern.  */
    6054         1068 :   unsigned int encoded_nelts = const_vector_encoded_nelts (x);
    6055         1068 :   unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
    6056         1068 :   unsigned int count = i / npatterns;
    6057         1068 :   unsigned int pattern = i % npatterns;
    6058         1068 :   unsigned int final_i = encoded_nelts - npatterns + pattern;
    6059              : 
    6060              :   /* If there are no steps, the final encoded value is the right one.  */
    6061         1068 :   if (!CONST_VECTOR_STEPPED_P (x))
    6062            0 :     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
    6063              : 
    6064              :   /* Otherwise work out the value from the last two encoded elements.  */
    6065         1068 :   rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
    6066         1068 :   rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
    6067         1068 :   wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
    6068         1068 :                            rtx_mode_t (v1, elt_mode));
    6069         1068 :   return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
    6070         1068 : }
    6071              : 
    6072              : /* Return the value of element I of CONST_VECTOR X.  */
    6073              : 
    6074              : rtx
    6075      4623663 : const_vector_elt (const_rtx x, unsigned int i)
    6076              : {
    6077              :   /* First handle elements that are directly encoded.  */
    6078      4623663 :   if (i < (unsigned int) XVECLEN (x, 0))
    6079      4621527 :     return CONST_VECTOR_ENCODED_ELT (x, i);
    6080              : 
    6081              :   /* If there are no steps, the final encoded value is the right one.  */
    6082         2136 :   if (!CONST_VECTOR_STEPPED_P (x))
    6083              :     {
    6084              :       /* Identify the pattern that contains element I and work out the index of
    6085              :          the last encoded element for that pattern.  */
    6086         1068 :       unsigned int encoded_nelts = const_vector_encoded_nelts (x);
    6087         1068 :       unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
    6088         1068 :       unsigned int pattern = i % npatterns;
    6089         1068 :       unsigned int final_i = encoded_nelts - npatterns + pattern;
    6090         1068 :       return CONST_VECTOR_ENCODED_ELT (x, final_i);
    6091              :     }
    6092              : 
    6093              :   /* Otherwise work out the value from the last two encoded elements.  */
    6094         1068 :   return immed_wide_int_const (const_vector_int_elt (x, i),
    6095         2136 :                                GET_MODE_INNER (GET_MODE (x)));
    6096              : }
    6097              : 
    6098              : /* Return true if X is a valid element for a CONST_VECTOR of the given
    6099              :   mode.  */
    6100              : 
    6101              : bool
    6102       550775 : valid_for_const_vector_p (machine_mode, rtx x)
    6103              : {
    6104       550775 :   return (CONST_SCALAR_INT_P (x)
    6105              :           || CONST_POLY_INT_P (x)
    6106       159606 :           || CONST_DOUBLE_AS_FLOAT_P (x)
    6107       691279 :           || CONST_FIXED_P (x));
    6108              : }
    6109              : 
    6110              : /* Generate a vector constant of mode MODE in which every element has
    6111              :    value ELT.  */
    6112              : 
    6113              : rtx
    6114     41191384 : gen_const_vec_duplicate (machine_mode mode, rtx elt)
    6115              : {
    6116     41191384 :   rtx_vector_builder builder (mode, 1, 1);
    6117     41191384 :   builder.quick_push (elt);
    6118     41191384 :   return builder.build ();
    6119     41191384 : }
    6120              : 
    6121              : /* Return a vector rtx of mode MODE in which every element has value X.
    6122              :    The result will be a constant if X is constant.  */
    6123              : 
    6124              : rtx
    6125       239019 : gen_vec_duplicate (machine_mode mode, rtx x)
    6126              : {
    6127       239019 :   if (valid_for_const_vector_p (mode, x))
    6128       102298 :     return gen_const_vec_duplicate (mode, x);
    6129       136721 :   return gen_rtx_VEC_DUPLICATE (mode, x);
    6130              : }
    6131              : 
    6132              : /* A subroutine of const_vec_series_p that handles the case in which:
    6133              : 
    6134              :      (GET_CODE (X) == CONST_VECTOR
    6135              :       && CONST_VECTOR_NPATTERNS (X) == 1
    6136              :       && !CONST_VECTOR_DUPLICATE_P (X))
    6137              : 
    6138              :    is known to hold.  */
    6139              : 
    6140              : bool
    6141         3051 : const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
    6142              : {
    6143              :   /* Stepped sequences are only defined for integers, to avoid specifying
    6144              :      rounding behavior.  */
    6145         3051 :   if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
    6146              :     return false;
    6147              : 
    6148              :   /* A non-duplicated vector with two elements can always be seen as a
    6149              :      series with a nonzero step.  Longer vectors must have a stepped
    6150              :      encoding.  */
    6151         3051 :   if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
    6152         3051 :       && !CONST_VECTOR_STEPPED_P (x))
    6153              :     return false;
    6154              : 
    6155              :   /* Calculate the step between the first and second elements.  */
    6156         3047 :   scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
    6157         3047 :   rtx base = CONST_VECTOR_ELT (x, 0);
    6158         6094 :   rtx step = simplify_binary_operation (MINUS, inner,
    6159         3047 :                                         CONST_VECTOR_ENCODED_ELT (x, 1), base);
    6160         3047 :   if (rtx_equal_p (step, CONST0_RTX (inner)))
    6161              :     return false;
    6162              : 
    6163              :   /* If we have a stepped encoding, check that the step between the
    6164              :      second and third elements is the same as STEP.  */
    6165         3047 :   if (CONST_VECTOR_STEPPED_P (x))
    6166              :     {
    6167         4556 :       rtx diff = simplify_binary_operation (MINUS, inner,
    6168              :                                             CONST_VECTOR_ENCODED_ELT (x, 2),
    6169         2278 :                                             CONST_VECTOR_ENCODED_ELT (x, 1));
    6170         2278 :       if (!rtx_equal_p (step, diff))
    6171              :         return false;
    6172              :     }
    6173              : 
    6174         3047 :   *base_out = base;
    6175         3047 :   *step_out = step;
    6176         3047 :   return true;
    6177              : }
    6178              : 
    6179              : /* Generate a vector constant of mode MODE in which element I has
    6180              :    the value BASE + I * STEP.  */
    6181              : 
    6182              : rtx
    6183          645 : gen_const_vec_series (machine_mode mode, rtx base, rtx step)
    6184              : {
    6185          645 :   gcc_assert (valid_for_const_vector_p (mode, base)
    6186              :               && valid_for_const_vector_p (mode, step));
    6187              : 
    6188          645 :   rtx_vector_builder builder (mode, 1, 3);
    6189          645 :   builder.quick_push (base);
    6190         1935 :   for (int i = 1; i < 3; ++i)
    6191         1290 :     builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
    6192         1290 :                                              builder[i - 1], step));
    6193          645 :   return builder.build ();
    6194          645 : }
    6195              : 
    6196              : /* Generate a vector of mode MODE in which element I has the value
    6197              :    BASE + I * STEP.  The result will be a constant if BASE and STEP
    6198              :    are both constants.  */
    6199              : 
    6200              : rtx
    6201         3055 : gen_vec_series (machine_mode mode, rtx base, rtx step)
    6202              : {
    6203         3055 :   if (step == const0_rtx)
    6204          194 :     return gen_vec_duplicate (mode, base);
    6205         2861 :   if (valid_for_const_vector_p (mode, base)
    6206         2861 :       && valid_for_const_vector_p (mode, step))
    6207            0 :     return gen_const_vec_series (mode, base, step);
    6208         2861 :   return gen_rtx_VEC_SERIES (mode, base, step);
    6209              : }
    6210              : 
    6211              : /* Generate a new vector constant for mode MODE and constant value
    6212              :    CONSTANT.  */
    6213              : 
    6214              : static rtx
    6215     39845663 : gen_const_vector (machine_mode mode, int constant)
    6216              : {
    6217     39845663 :   machine_mode inner = GET_MODE_INNER (mode);
    6218              : 
    6219     39845663 :   gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
    6220              : 
    6221     39845663 :   rtx el = const_tiny_rtx[constant][(int) inner];
    6222     39845663 :   gcc_assert (el);
    6223              : 
    6224     39845663 :   return gen_const_vec_duplicate (mode, el);
    6225              : }
    6226              : 
    6227              : /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
    6228              :    all elements are zero, and the one vector when all elements are one.  */
    6229              : rtx
    6230       166105 : gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
    6231              : {
    6232       332210 :   gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
    6233              : 
    6234              :   /* If the values are all the same, check to see if we can use one of the
    6235              :      standard constant vectors.  */
    6236       166105 :   if (rtvec_all_equal_p (v))
    6237        53190 :     return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
    6238              : 
    6239       112915 :   unsigned int nunits = GET_NUM_ELEM (v);
    6240       112915 :   rtx_vector_builder builder (mode, nunits, 1);
    6241       719749 :   for (unsigned int i = 0; i < nunits; ++i)
    6242       606834 :     builder.quick_push (RTVEC_ELT (v, i));
    6243       112915 :   return builder.build (v);
    6244       112915 : }
    6245              : 
    6246              : /* Initialise global register information required by all functions.  */
    6247              : 
    6248              : void
    6249       774348 : init_emit_regs (void)
    6250              : {
    6251       774348 :   int i;
    6252       774348 :   machine_mode mode;
    6253       774348 :   mem_attrs *attrs;
    6254              : 
    6255              :   /* Reset register attributes */
    6256       774348 :   reg_attrs_htab->empty ();
    6257              : 
    6258              :   /* We need reg_raw_mode, so initialize the modes now.  */
    6259       774348 :   init_reg_modes_target ();
    6260              : 
    6261              :   /* Assign register numbers to the globally defined register rtx.  */
    6262       789371 :   stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
    6263       789371 :   frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
    6264       789371 :   hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
    6265       789371 :   arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
    6266      2323044 :   virtual_incoming_args_rtx =
    6267       789371 :     gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
    6268      2323044 :   virtual_stack_vars_rtx =
    6269       789371 :     gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
    6270      2323044 :   virtual_stack_dynamic_rtx =
    6271       789371 :     gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
    6272      2323044 :   virtual_outgoing_args_rtx =
    6273       789371 :     gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
    6274       789371 :   virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
    6275      2323044 :   virtual_preferred_stack_boundary_rtx =
    6276       789371 :     gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
    6277              : 
    6278              :   /* Initialize RTL for commonly used hard registers.  These are
    6279              :      copied into regno_reg_rtx as we begin to compile each function.  */
    6280     72014364 :   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
    6281     71240016 :     initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
    6282              : 
    6283              : #ifdef RETURN_ADDRESS_POINTER_REGNUM
    6284              :   return_address_pointer_rtx
    6285              :     = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
    6286              : #endif
    6287              : 
    6288       774348 :   pic_offset_table_rtx = NULL_RTX;
    6289       774348 :   if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
    6290        20371 :     pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
    6291              : 
    6292              :   /* Process stack-limiting command-line options.  */
    6293       774348 :   if (opt_fstack_limit_symbol_arg != NULL)
    6294            0 :     stack_limit_rtx
    6295            0 :       = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
    6296       774348 :   if (opt_fstack_limit_register_no >= 0)
    6297            0 :     stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
    6298              : 
    6299     96793500 :   for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
    6300              :     {
    6301     96019152 :       mode = (machine_mode) i;
    6302     96019152 :       attrs = ggc_cleared_alloc<mem_attrs> ();
    6303     96019152 :       attrs->align = BITS_PER_UNIT;
    6304     96019152 :       attrs->addrspace = ADDR_SPACE_GENERIC;
    6305     96019152 :       if (mode != BLKmode && mode != VOIDmode)
    6306              :         {
    6307     94470456 :           attrs->size_known_p = true;
    6308    188940912 :           attrs->size = GET_MODE_SIZE (mode);
    6309     94470456 :           if (STRICT_ALIGNMENT)
    6310              :             attrs->align = GET_MODE_ALIGNMENT (mode);
    6311              :         }
    6312     96019152 :       mode_mem_attrs[i] = attrs;
    6313              :     }
    6314              : 
    6315       774348 :   split_branch_probability = profile_probability::uninitialized ();
    6316       774348 : }
    6317              : 
    6318              : /* Initialize global machine_mode variables.  */
    6319              : 
    6320              : void
    6321       285422 : init_derived_machine_modes (void)
    6322              : {
    6323       285422 :   opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
    6324      2283376 :   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
    6325              :     {
    6326      1997954 :       scalar_int_mode mode = mode_iter.require ();
    6327              : 
    6328      2283376 :       if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
    6329      1997954 :           && !opt_byte_mode.exists ())
    6330       285422 :         opt_byte_mode = mode;
    6331              : 
    6332      3995908 :       if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
    6333      1997954 :           && !opt_word_mode.exists ())
    6334       285422 :         opt_word_mode = mode;
    6335              :     }
    6336              : 
    6337       285422 :   byte_mode = opt_byte_mode.require ();
    6338       285422 :   word_mode = opt_word_mode.require ();
    6339       285422 :   ptr_mode = as_a <scalar_int_mode>
    6340       299902 :     (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
    6341       285422 : }
    6342              : 
    6343              : /* Create some permanent unique rtl objects shared between all functions.  */
    6344              : 
    6345              : void
    6346       278641 : init_emit_once (void)
    6347              : {
    6348       278641 :   int i;
    6349       278641 :   machine_mode mode;
    6350       278641 :   scalar_float_mode double_mode;
    6351       278641 :   opt_scalar_mode smode_iter;
    6352              : 
    6353              :   /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
    6354              :      CONST_FIXED, and memory attribute hash tables.  */
    6355       278641 :   const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
    6356              : 
    6357              : #if TARGET_SUPPORTS_WIDE_INT
    6358       278641 :   const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
    6359              : #endif
    6360       278641 :   const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
    6361              : 
    6362       278641 :   if (NUM_POLY_INT_COEFFS > 1)
    6363              :     const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
    6364              : 
    6365       278641 :   const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
    6366              : 
    6367       278641 :   reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
    6368              : 
    6369              : #ifdef INIT_EXPANDERS
    6370              :   /* This is to initialize {init|mark|free}_machine_status before the first
    6371              :      call to push_function_context_to.  This is needed by the Chill front
    6372              :      end which calls push_function_context_to before the first call to
    6373              :      init_function_start.  */
    6374              :   INIT_EXPANDERS;
    6375              : #endif
    6376              : 
    6377              :   /* Create the unique rtx's for certain rtx codes and operand values.  */
    6378              : 
    6379              :   /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
    6380              :      tries to use these variables.  */
    6381     36223330 :   for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
    6382     71889378 :     const_int_rtx[i + MAX_SAVED_CONST_INT] =
    6383     35944689 :       gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
    6384              : 
    6385       278641 :   if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
    6386              :       && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
    6387       278641 :     const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
    6388              :   else
    6389              :     const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
    6390              : 
    6391       278641 :   mode = targetm.c.mode_for_floating_type (TI_DOUBLE_TYPE);
    6392       278641 :   double_mode = as_a<scalar_float_mode> (mode);
    6393              : 
    6394       278641 :   real_from_integer (&dconst0, double_mode, 0, SIGNED);
    6395       278641 :   real_from_integer (&dconst1, double_mode, 1, SIGNED);
    6396       278641 :   real_from_integer (&dconst2, double_mode, 2, SIGNED);
    6397              : 
    6398       278641 :   dconstm0 = dconst0;
    6399       278641 :   dconstm0.sign = 1;
    6400              : 
    6401       278641 :   dconstm1 = dconst1;
    6402       278641 :   dconstm1.sign = 1;
    6403              : 
    6404       278641 :   dconsthalf = dconst1;
    6405       278641 :   SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
    6406              : 
    6407       278641 :   real_inf (&dconstinf);
    6408       278641 :   real_inf (&dconstninf, true);
    6409              : 
    6410      1114564 :   for (i = 0; i < 3; i++)
    6411              :     {
    6412       835923 :       const REAL_VALUE_TYPE *const r =
    6413              :         (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
    6414              : 
    6415      5851461 :       FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
    6416      5015538 :         const_tiny_rtx[i][(int) mode] =
    6417      5015538 :           const_double_from_real_value (*r, mode);
    6418              : 
    6419      3343692 :       FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
    6420      2507769 :         const_tiny_rtx[i][(int) mode] =
    6421      2507769 :           const_double_from_real_value (*r, mode);
    6422              : 
    6423       835923 :       const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
    6424              : 
    6425      6687384 :       FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
    6426      5851461 :         const_tiny_rtx[i][(int) mode] = GEN_INT (i);
    6427              : 
    6428      2507769 :       for (mode = MIN_MODE_PARTIAL_INT;
    6429      3343692 :            mode <= MAX_MODE_PARTIAL_INT;
    6430      2507769 :            mode = (machine_mode)((int)(mode) + 1))
    6431      2507769 :         const_tiny_rtx[i][(int) mode] = GEN_INT (i);
    6432              :     }
    6433              : 
    6434       278641 :   const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
    6435              : 
    6436      2229128 :   FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
    6437      1950487 :     const_tiny_rtx[3][(int) mode] = constm1_rtx;
    6438              : 
    6439              :   /* For BImode, 1 and -1 are unsigned and signed interpretations
    6440              :      of the same value.  */
    6441       278641 :   for (mode = MIN_MODE_BOOL;
    6442       557282 :        mode <= MAX_MODE_BOOL;
    6443       278641 :        mode = (machine_mode)((int)(mode) + 1))
    6444              :     {
    6445       278641 :       const_tiny_rtx[0][(int) mode] = const0_rtx;
    6446       278641 :       if (mode == BImode)
    6447              :         {
    6448       278641 :           const_tiny_rtx[1][(int) mode] = const_true_rtx;
    6449       278641 :           const_tiny_rtx[3][(int) mode] = const_true_rtx;
    6450              :         }
    6451              :       else
    6452              :         {
    6453              :           const_tiny_rtx[1][(int) mode] = const1_rtx;
    6454              :           const_tiny_rtx[3][(int) mode] = constm1_rtx;
    6455              :         }
    6456              :     }
    6457              : 
    6458       835923 :   for (mode = MIN_MODE_PARTIAL_INT;
    6459      1114564 :        mode <= MAX_MODE_PARTIAL_INT;
    6460       835923 :        mode = (machine_mode)((int)(mode) + 1))
    6461       835923 :     const_tiny_rtx[3][(int) mode] = constm1_rtx;
    6462              : 
    6463      3065051 :   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
    6464              :     {
    6465      2786410 :       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
    6466      2786410 :       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
    6467              :     }
    6468              : 
    6469      1950487 :   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
    6470              :     {
    6471      1671846 :       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
    6472      1671846 :       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
    6473              :     }
    6474              : 
    6475       278641 :   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
    6476              :     {
    6477            0 :       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
    6478            0 :       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
    6479            0 :       if (GET_MODE_INNER (mode) == BImode)
    6480              :         /* As for BImode, "all 1" and "all -1" are unsigned and signed
    6481              :            interpretations of the same value.  */
    6482            0 :         const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
    6483              :       else
    6484            0 :         const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
    6485              :     }
    6486              : 
    6487      8916512 :   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
    6488              :     {
    6489      8637871 :       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
    6490      8637871 :       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
    6491      8637871 :       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
    6492              :     }
    6493              : 
    6494      7244666 :   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
    6495              :     {
    6496      6966025 :       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
    6497      6966025 :       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
    6498              :     }
    6499              : 
    6500      1671846 :   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
    6501              :     {
    6502      1393205 :       scalar_mode smode = smode_iter.require ();
    6503      1393205 :       FCONST0 (smode).data.high = 0;
    6504      1393205 :       FCONST0 (smode).data.low = 0;
    6505      1393205 :       FCONST0 (smode).mode = smode;
    6506      2786410 :       const_tiny_rtx[0][(int) smode]
    6507      1393205 :         = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
    6508              :     }
    6509              : 
    6510      1671846 :   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
    6511              :     {
    6512      1393205 :       scalar_mode smode = smode_iter.require ();
    6513      1393205 :       FCONST0 (smode).data.high = 0;
    6514      1393205 :       FCONST0 (smode).data.low = 0;
    6515      1393205 :       FCONST0 (smode).mode = smode;
    6516      2786410 :       const_tiny_rtx[0][(int) smode]
    6517      1393205 :         = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
    6518              :     }
    6519              : 
    6520      1393205 :   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
    6521              :     {
    6522      1114564 :       scalar_mode smode = smode_iter.require ();
    6523      1114564 :       FCONST0 (smode).data.high = 0;
    6524      1114564 :       FCONST0 (smode).data.low = 0;
    6525      1114564 :       FCONST0 (smode).mode = smode;
    6526      2229128 :       const_tiny_rtx[0][(int) smode]
    6527      1114564 :         = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
    6528              : 
    6529              :       /* We store the value 1.  */
    6530      1114564 :       FCONST1 (smode).data.high = 0;
    6531      1114564 :       FCONST1 (smode).data.low = 0;
    6532      1114564 :       FCONST1 (smode).mode = smode;
    6533      2229128 :       FCONST1 (smode).data
    6534      1114564 :         = double_int_one.lshift (GET_MODE_FBIT (smode),
    6535              :                                  HOST_BITS_PER_DOUBLE_INT,
    6536      1114564 :                                  SIGNED_FIXED_POINT_MODE_P (smode));
    6537      2229128 :       const_tiny_rtx[1][(int) smode]
    6538      1114564 :         = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
    6539              :     }
    6540              : 
    6541      1393205 :   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
    6542              :     {
    6543      1114564 :       scalar_mode smode = smode_iter.require ();
    6544      1114564 :       FCONST0 (smode).data.high = 0;
    6545      1114564 :       FCONST0 (smode).data.low = 0;
    6546      1114564 :       FCONST0 (smode).mode = smode;
    6547      2229128 :       const_tiny_rtx[0][(int) smode]
    6548      1114564 :         = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
    6549              : 
    6550              :       /* We store the value 1.  */
    6551      1114564 :       FCONST1 (smode).data.high = 0;
    6552      1114564 :       FCONST1 (smode).data.low = 0;
    6553      1114564 :       FCONST1 (smode).mode = smode;
    6554      2229128 :       FCONST1 (smode).data
    6555      1114564 :         = double_int_one.lshift (GET_MODE_FBIT (smode),
    6556              :                                  HOST_BITS_PER_DOUBLE_INT,
    6557      1114564 :                                  SIGNED_FIXED_POINT_MODE_P (smode));
    6558      2229128 :       const_tiny_rtx[1][(int) smode]
    6559      1114564 :         = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
    6560              :     }
    6561              : 
    6562       278641 :   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
    6563              :     {
    6564            0 :       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
    6565              :     }
    6566              : 
    6567       278641 :   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
    6568              :     {
    6569            0 :       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
    6570              :     }
    6571              : 
    6572       278641 :   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
    6573              :     {
    6574            0 :       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
    6575            0 :       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
    6576              :     }
    6577              : 
    6578       278641 :   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
    6579              :     {
    6580            0 :       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
    6581            0 :       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
    6582              :     }
    6583              : 
    6584     34272843 :   for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
    6585     33994202 :     if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
    6586      3343692 :       const_tiny_rtx[0][i] = const0_rtx;
    6587              : 
    6588       278641 :   pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
    6589       278641 :   ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
    6590       278641 :   simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
    6591       278641 :   invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
    6592              :                                    /*prev_insn=*/NULL,
    6593              :                                    /*next_insn=*/NULL,
    6594              :                                    /*bb=*/NULL,
    6595              :                                    /*pattern=*/NULL_RTX,
    6596              :                                    /*location=*/-1,
    6597              :                                    CODE_FOR_nothing,
    6598              :                                    /*reg_notes=*/NULL_RTX);
    6599       278641 : }
    6600              : 
    6601              : /* Produce exact duplicate of insn INSN after AFTER.
    6602              :    Care updating of libcall regions if present.  */
    6603              : 
    6604              : rtx_insn *
    6605      3658769 : emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
    6606              : {
    6607      3658769 :   rtx_insn *new_rtx;
    6608      3658769 :   rtx link;
    6609              : 
    6610      3658769 :   switch (GET_CODE (insn))
    6611              :     {
    6612      1708006 :     case INSN:
    6613      1708006 :       new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
    6614      1708006 :       break;
    6615              : 
    6616       484869 :     case JUMP_INSN:
    6617       484869 :       new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
    6618       484869 :       CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
    6619       484869 :       break;
    6620              : 
    6621      1440445 :     case DEBUG_INSN:
    6622      1440445 :       new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
    6623      1440445 :       break;
    6624              : 
    6625        25449 :     case CALL_INSN:
    6626        25449 :       new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
    6627        25449 :       if (CALL_INSN_FUNCTION_USAGE (insn))
    6628        22341 :         CALL_INSN_FUNCTION_USAGE (new_rtx)
    6629        22341 :           = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
    6630        25449 :       SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
    6631        25449 :       RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
    6632        25449 :       RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
    6633        25449 :       RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
    6634        25449 :         = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
    6635        25449 :       break;
    6636              : 
    6637            0 :     default:
    6638            0 :       gcc_unreachable ();
    6639              :     }
    6640              : 
    6641              :   /* Update LABEL_NUSES.  */
    6642      3658769 :   if (NONDEBUG_INSN_P (insn))
    6643      2218324 :     mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
    6644              : 
    6645      3658769 :   INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
    6646              : 
    6647              :   /* If the old insn is frame related, then so is the new one.  This is
    6648              :      primarily needed for IA-64 unwind info which marks epilogue insns,
    6649              :      which may be duplicated by the basic block reordering code.  */
    6650      3658769 :   RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
    6651              : 
    6652              :   /* Locate the end of existing REG_NOTES in NEW_RTX.  */
    6653      3658769 :   rtx *ptail = &REG_NOTES (new_rtx);
    6654      3658775 :   while (*ptail != NULL_RTX)
    6655            6 :     ptail = &XEXP (*ptail, 1);
    6656              : 
    6657              :   /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
    6658              :      will make them.  REG_LABEL_TARGETs are created there too, but are
    6659              :      supposed to be sticky, so we copy them.  */
    6660      5952000 :   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
    6661      2293231 :     if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
    6662              :       {
    6663      2293225 :         *ptail = duplicate_reg_note (link);
    6664      2293225 :         ptail = &XEXP (*ptail, 1);
    6665              :       }
    6666              : 
    6667      3658769 :   INSN_CODE (new_rtx) = INSN_CODE (insn);
    6668      3658769 :   return new_rtx;
    6669              : }
    6670              : 
    6671              : static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
    6672              : rtx
    6673      4496861 : gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
    6674              : {
    6675      4496861 :   if (hard_reg_clobbers[mode][regno])
    6676              :     return hard_reg_clobbers[mode][regno];
    6677              :   else
    6678       186846 :     return (hard_reg_clobbers[mode][regno] =
    6679       373692 :             gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
    6680              : }
    6681              : 
    6682              : location_t prologue_location;
    6683              : location_t epilogue_location;
    6684              : 
    6685              : /* Hold current location information and last location information, so the
    6686              :    datastructures are built lazily only when some instructions in given
    6687              :    place are needed.  */
    6688              : static location_t curr_location;
    6689              : 
    6690              : /* Allocate insn location datastructure.  */
    6691              : void
    6692      1704035 : insn_locations_init (void)
    6693              : {
    6694      1704035 :   prologue_location = epilogue_location = 0;
    6695      1704035 :   curr_location = UNKNOWN_LOCATION;
    6696      1704035 : }
    6697              : 
    6698              : /* At the end of emit stage, clear current location.  */
    6699              : void
    6700      1473901 : insn_locations_finalize (void)
    6701              : {
    6702      1473901 :   epilogue_location = curr_location;
    6703      1473901 :   curr_location = UNKNOWN_LOCATION;
    6704      1473901 : }
    6705              : 
    6706              : /* Set current location.  */
    6707              : void
    6708    149388304 : set_curr_insn_location (location_t location)
    6709              : {
    6710    149388304 :   curr_location = location;
    6711    149388304 : }
    6712              : 
    6713              : /* Get current location.  */
    6714              : location_t
    6715    261660409 : curr_insn_location (void)
    6716              : {
    6717    261660409 :   return curr_location;
    6718              : }
    6719              : 
    6720              : /* Set the location of the insn chain starting at INSN to LOC.  */
    6721              : void
    6722      3590761 : set_insn_locations (rtx_insn *insn, location_t loc)
    6723              : {
    6724     16135676 :   while (insn)
    6725              :     {
    6726     12544915 :       if (INSN_P (insn))
    6727     10610967 :         INSN_LOCATION (insn) = loc;
    6728     12544915 :       insn = NEXT_INSN (insn);
    6729              :     }
    6730      3590761 : }
    6731              : 
    6732              : /* Return lexical scope block insn belongs to.  */
    6733              : tree
    6734     48806799 : insn_scope (const rtx_insn *insn)
    6735              : {
    6736     48806799 :   return LOCATION_BLOCK (INSN_LOCATION (insn));
    6737              : }
    6738              : 
    6739              : /* Return line number of the statement that produced this insn.  */
    6740              : int
    6741            0 : insn_line (const rtx_insn *insn)
    6742              : {
    6743            0 :   return LOCATION_LINE (INSN_LOCATION (insn));
    6744              : }
    6745              : 
    6746              : /* Return source file of the statement that produced this insn.  */
    6747              : const char *
    6748            0 : insn_file (const rtx_insn *insn)
    6749              : {
    6750            0 :   return LOCATION_FILE (INSN_LOCATION (insn));
    6751              : }
    6752              : 
    6753              : /* Return expanded location of the statement that produced this insn.  */
    6754              : expanded_location
    6755     83603819 : insn_location (const rtx_insn *insn)
    6756              : {
    6757     83603819 :   return expand_location (INSN_LOCATION (insn));
    6758              : }
    6759              : 
    6760              : /* Return true if memory model MODEL requires a pre-operation (release-style)
    6761              :    barrier or a post-operation (acquire-style) barrier.  While not universal,
    6762              :    this function matches behavior of several targets.  */
    6763              : 
    6764              : bool
    6765            0 : need_atomic_barrier_p (enum memmodel model, bool pre)
    6766              : {
    6767            0 :   switch (model & MEMMODEL_BASE_MASK)
    6768              :     {
    6769              :     case MEMMODEL_RELAXED:
    6770              :     case MEMMODEL_CONSUME:
    6771              :       return false;
    6772            0 :     case MEMMODEL_RELEASE:
    6773            0 :       return pre;
    6774            0 :     case MEMMODEL_ACQUIRE:
    6775            0 :       return !pre;
    6776            0 :     case MEMMODEL_ACQ_REL:
    6777            0 :     case MEMMODEL_SEQ_CST:
    6778            0 :       return true;
    6779            0 :     default:
    6780            0 :       gcc_unreachable ();
    6781              :     }
    6782              : }
    6783              : 
    6784              : /* Return a constant shift amount for shifting a value of mode MODE
    6785              :    by VALUE bits.  */
    6786              : 
    6787              : rtx
    6788    207235605 : gen_int_shift_amount (machine_mode, poly_int64 value)
    6789              : {
    6790              :   /* Use a 64-bit mode, to avoid any truncation.
    6791              : 
    6792              :      ??? Perhaps this should be automatically derived from the .md files
    6793              :      instead, or perhaps have a target hook.  */
    6794    207235605 :   scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
    6795              :                                 ? DImode
    6796              :                                 : int_mode_for_size (64, 0).require ());
    6797    207235605 :   return gen_int_mode (value, shift_mode);
    6798              : }
    6799              : 
    6800              : namespace {
    6801              : /* Helper class for expanding an rtx using the encoding generated by
    6802              :    genemit.cc.  The code needs to be kept in sync with there.  */
    6803              : 
    6804              : class rtx_expander
    6805              : {
    6806              : public:
    6807              :   rtx_expander (const uint8_t *, rtx *);
    6808              : 
    6809              :   rtx get_rtx ();
    6810              :   rtvec get_rtvec ();
    6811              :   void expand_seq ();
    6812              : 
    6813              : protected:
    6814              :   uint64_t get_uint ();
    6815     83630036 :   machine_mode get_mode () { return machine_mode (get_uint ()); }
    6816              :   char *get_string ();
    6817              :   rtx get_shared_operand ();
    6818              :   rtx get_unshared_operand ();
    6819              : 
    6820              :   rtx get_rtx (expand_opcode);
    6821              :   rtx get_rtx (rtx_code, machine_mode);
    6822              : 
    6823              :   /* Points to the first unread byte.  */
    6824              :   const uint8_t *m_seq;
    6825              : 
    6826              :   /* The operands passed to the gen_* function.  */
    6827              :   rtx *m_operands;
    6828              : 
    6829              :   /* A bitmap of operands that have already been used to replace a
    6830              :      MATCH_OPERAND or MATCH_DUP.  In order to ensure correct sharing,
    6831              :      further replacements need to use a copy of the operand, rather than
    6832              :      the original rtx.  */
    6833              :   bbitmap<MAX_RECOG_OPERANDS> m_used;
    6834              : };
    6835              : }
    6836              : 
    6837     24987608 : rtx_expander::rtx_expander (const uint8_t *seq, rtx *operands)
    6838     24987608 :   : m_seq (seq), m_operands (operands), m_used ()
    6839            0 : {}
    6840              : 
    6841              : /* Read and return the next encoded "BEB128" integer.  */
    6842              : 
    6843              : inline uint64_t
    6844    242875123 : rtx_expander::get_uint ()
    6845              : {
    6846    242875123 :   const uint8_t *seq = m_seq;
    6847    242875123 :   uint64_t res = 0;
    6848    244901117 :   do
    6849    244901117 :     res = (res << 7) | (*seq & 127);
    6850    244901117 :   while (*seq++ >= 128);
    6851    242875123 :   m_seq = seq;
    6852    242875123 :   return res;
    6853              : }
    6854              : 
    6855              : /* Read an operand number and return the associated operand rtx,
    6856              :    without copying it.  */
    6857              : 
    6858              : rtx
    6859       979722 : rtx_expander::get_shared_operand ()
    6860              : {
    6861       979722 :   return m_operands[get_uint ()];
    6862              : }
    6863              : 
    6864              : /* Read an operand number and return a correctly-shared instance of
    6865              :    the associated operand rtx.  This can be either the original rtx
    6866              :    or a copy.  */
    6867              : 
    6868              : rtx
    6869     44455567 : rtx_expander::get_unshared_operand ()
    6870              : {
    6871     44455567 :   auto opno = get_uint ();
    6872     44455567 :   auto mask = m_used.from_index (opno);
    6873     44455567 :   if (m_used & mask)
    6874      2848778 :     return copy_rtx (m_operands[opno]);
    6875              : 
    6876     41606789 :   m_used |= mask;
    6877     41606789 :   return m_operands[opno];
    6878              : }
    6879              : 
    6880              : /* Read an encoded rtx.  */
    6881              : 
    6882              : rtx
    6883    126944838 : rtx_expander::get_rtx ()
    6884              : {
    6885    126944838 :   auto FIRST_CODE = (unsigned) expand_opcode::FIRST_CODE;
    6886    126944838 :   auto opcode = get_uint ();
    6887    126944838 :   if (opcode < FIRST_CODE)
    6888     49932150 :     return get_rtx (expand_opcode (opcode));
    6889     77012688 :   return get_rtx (rtx_code (opcode - FIRST_CODE), NUM_MACHINE_MODES);
    6890              : }
    6891              : 
    6892              : /* Read an encoded rtx that starts with the given opcode.  */
    6893              : 
    6894              : rtx
    6895     49932150 : rtx_expander::get_rtx (expand_opcode opcode)
    6896              : {
    6897     49932150 :   switch (opcode)
    6898              :     {
    6899              :     case expand_opcode::NO_RTX:
    6900              :       return NULL_RTX;
    6901              : 
    6902     44455567 :     case expand_opcode::MATCH_OPERAND:
    6903     44455567 :       return get_unshared_operand ();
    6904              : 
    6905        41725 :     case expand_opcode::MATCH_OPERATOR_WITH_MODE:
    6906        41725 :       {
    6907        41725 :         auto mode = get_mode ();
    6908        41725 :         auto op = get_shared_operand ();
    6909        41725 :         return get_rtx (GET_CODE (op), mode);
    6910              :       }
    6911              : 
    6912       936169 :     case expand_opcode::MATCH_OPERATOR:
    6913       936169 :       {
    6914       936169 :         auto op = get_shared_operand ();
    6915       936169 :         return get_rtx (GET_CODE (op), GET_MODE (op));
    6916              :       }
    6917              : 
    6918         1828 :     case expand_opcode::MATCH_PARALLEL:
    6919         1828 :       return get_shared_operand ();
    6920              : 
    6921      4496861 :     case expand_opcode::CLOBBER_REG:
    6922      4496861 :       {
    6923      4496861 :         auto mode = get_mode ();
    6924      4496861 :         auto regno = get_uint ();
    6925      4496861 :         return gen_hard_reg_clobber (mode, regno);
    6926              :       }
    6927              : 
    6928              :     case expand_opcode::FIRST_CODE:
    6929              :       break;
    6930              :     }
    6931            0 :   gcc_unreachable ();
    6932              : }
    6933              : 
    6934              : /* Read the rest of an rtx of code CODE.  If such rtxes are not always
    6935              :    VOIDmode, MODE is the mode that the rtx should have, or NUM_MACHINE_MODES
    6936              :    if the mode is encoded at the current iterator position.  */
    6937              : 
    6938              : rtx
    6939     77990582 : rtx_expander::get_rtx (rtx_code code, machine_mode mode)
    6940              : {
    6941     77990582 :   switch (code)
    6942              :     {
    6943              :       /* Please keep the cases below in sync with gengenrtl.cc:special_rtx.  */
    6944              : 
    6945            0 :     case EXPR_LIST:
    6946            0 :     case INSN_LIST:
    6947            0 :     case INSN:
    6948            0 :       gcc_unreachable ();
    6949              : 
    6950      4875584 :     case CONST_INT:
    6951      9751168 :       return GEN_INT (get_uint ());
    6952              : 
    6953      1869852 :     case REG:
    6954      1869852 :       if (mode == NUM_MACHINE_MODES)
    6955      3739704 :         mode = get_mode ();
    6956      3739704 :       return gen_rtx_REG (mode, get_uint ());
    6957              : 
    6958        54344 :     case SUBREG:
    6959        54344 :       {
    6960        54344 :         if (mode == NUM_MACHINE_MODES)
    6961       108688 :           mode = get_mode ();
    6962        54344 :         auto reg = get_rtx ();
    6963        54344 :         auto byte = get_uint ();
    6964        54344 :         return gen_rtx_SUBREG (mode, reg, byte);
    6965              :       }
    6966              : 
    6967      2455769 :     case MEM:
    6968      2455769 :       if (mode == NUM_MACHINE_MODES)
    6969      4911538 :         mode = get_mode ();
    6970      2455769 :       return gen_rtx_MEM (mode, get_rtx ());
    6971              : 
    6972      9474466 :     case PC:
    6973      9474466 :       return pc_rtx;
    6974              : 
    6975            0 :     case RETURN:
    6976            0 :       return ret_rtx;
    6977              : 
    6978      1571714 :     case SIMPLE_RETURN:
    6979      1571714 :       return simple_return_rtx;
    6980              : 
    6981         1864 :     case CONST_VECTOR:
    6982         1864 :       if (mode == NUM_MACHINE_MODES)
    6983         3728 :         mode = get_mode ();
    6984         1864 :       return gen_rtx_CONST_VECTOR (mode, get_rtvec ());
    6985              : 
    6986              :       /* Please keep the cases below in sync with
    6987              :          gengenrtl.cc:excluded_rtx.  */
    6988              : 
    6989            0 :     case VAR_LOCATION:
    6990            0 :       gcc_unreachable ();
    6991              : 
    6992            0 :     case CONST_DOUBLE:
    6993              :       /* genemit.cc only accepts zero const_doubles.  */
    6994            0 :       if (mode == NUM_MACHINE_MODES)
    6995            0 :         mode = get_mode ();
    6996            0 :       return CONST0_RTX (mode);
    6997              : 
    6998            0 :     case CONST_WIDE_INT:
    6999            0 :     case CONST_POLY_INT:
    7000            0 :     case CONST_FIXED:
    7001            0 :       gcc_unreachable ();
    7002              : 
    7003     57686989 :     default:
    7004     57686989 :       break;
    7005              :     }
    7006              : 
    7007     57686989 :   rtx x = rtx_alloc (code);
    7008     57686989 :   if (!always_void_p (code))
    7009              :     {
    7010     33872497 :       if (mode == NUM_MACHINE_MODES)
    7011     65789206 :         mode = get_mode ();
    7012     33872497 :       PUT_MODE_RAW (x, mode);
    7013              :     }
    7014              : 
    7015     57686989 :   const char *fmt = GET_RTX_FORMAT (code);
    7016    148992255 :   for (unsigned int i = 0; fmt[i]; ++i)
    7017     91305266 :     switch (fmt[i])
    7018              :       {
    7019              :         /* Please keep these cases in sync with
    7020              :            gengenrtl.cc:type_from_format.  */
    7021              : 
    7022      1805794 :       case 'i':
    7023      1805794 :         XINT (x, i) = get_uint ();
    7024      1805794 :         break;
    7025              : 
    7026            0 :       case 'L':
    7027            0 :       case 'w':
    7028            0 :       case 'p':
    7029            0 :       case 's':
    7030            0 :         gcc_unreachable ();
    7031              : 
    7032     82213675 :       case 'e':  case 'u':
    7033     82213675 :         XEXP (x, i) = get_rtx ();
    7034     82213675 :         break;
    7035              : 
    7036      7285797 :       case 'E':
    7037      7285797 :         XVEC (x, i) = get_rtvec ();
    7038      7285797 :         break;
    7039              : 
    7040            0 :       case 't':
    7041            0 :       case 'B':
    7042            0 :       default:
    7043            0 :         gcc_unreachable ();
    7044              :       }
    7045              : 
    7046              :   return x;
    7047              : }
    7048              : 
    7049              : /* Read an encoded rtvec.  */
    7050              : 
    7051              : rtvec
    7052      7287661 : rtx_expander::get_rtvec ()
    7053              : {
    7054      7287661 :   unsigned int len = get_uint ();
    7055      7287661 :   rtvec v = rtvec_alloc (len);
    7056     23463560 :   for (unsigned int i = 0; i < len; ++i)
    7057     16175899 :     RTVEC_ELT (v, i) = get_rtx ();
    7058      7287661 :   return v;
    7059              : }
    7060              : 
    7061              : /* Read and emit an encoded sequence of instructions.  */
    7062              : 
    7063              : void
    7064      8289882 : rtx_expander::expand_seq ()
    7065              : {
    7066      8289882 :   unsigned int len = get_uint ();
    7067     17637307 :   for (unsigned int i = 0; i < len; ++i)
    7068      9347425 :     emit (get_rtx (), i < len - 1);
    7069      8289882 : }
    7070              : 
    7071              : /* Read an rtx from the bytecode in SEQ, which was generated by genemit.cc.
    7072              :    Replace operand placeholders with the values given in OPERANDS.  */
    7073              : 
    7074              : rtx
    7075     16697726 : expand_rtx (const uint8_t *seq, rtx *operands)
    7076              : {
    7077     16697726 :   return rtx_expander (seq, operands).get_rtx ();
    7078              : }
    7079              : 
    7080              : /* Read and emit a sequence of instructions from the bytecode in SEQ,
    7081              :    which was generated by genemit.cc.  Replace operand placeholders with
    7082              :    the values given in OPERANDS.  */
    7083              : 
    7084              : rtx_insn *
    7085      8289882 : complete_seq (const uint8_t *seq, rtx *operands)
    7086              : {
    7087      8289882 :   rtx_expander (seq, operands).expand_seq ();
    7088      8289882 :   return end_sequence ();
    7089              : }
    7090              : 
    7091              : /* Initialize fields of rtl_data related to stack alignment.  */
    7092              : 
    7093              : void
    7094      1472240 : rtl_data::init_stack_alignment ()
    7095              : {
    7096      1472240 :   stack_alignment_needed = STACK_BOUNDARY;
    7097      1472240 :   max_used_stack_slot_alignment = STACK_BOUNDARY;
    7098      1472240 :   stack_alignment_estimated = 0;
    7099      1472240 :   preferred_stack_boundary = STACK_BOUNDARY;
    7100      1472240 : }
    7101              : 
    7102              : 
    7103              : #include "gt-emit-rtl.h"
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.