LCOV - code coverage report
Current view: top level - gcc - simplify-rtx.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 88.8 % 4785 4250
Test Date: 2026-02-28 14:20:25 Functions: 100.0 % 80 80
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* RTL simplification functions for GNU compiler.
       2              :    Copyright (C) 1987-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify it under
       7              : the terms of the GNU General Public License as published by the Free
       8              : Software Foundation; either version 3, or (at your option) any later
       9              : version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      12              : WARRANTY; without even the implied warranty of MERCHANTABILITY or
      13              : FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      14              : for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : 
      21              : #include "config.h"
      22              : #include "system.h"
      23              : #include "coretypes.h"
      24              : #include "backend.h"
      25              : #include "target.h"
      26              : #include "rtl.h"
      27              : #include "tree.h"
      28              : #include "predict.h"
      29              : #include "memmodel.h"
      30              : #include "optabs.h"
      31              : #include "emit-rtl.h"
      32              : #include "recog.h"
      33              : #include "diagnostic-core.h"
      34              : #include "varasm.h"
      35              : #include "flags.h"
      36              : #include "selftest.h"
      37              : #include "selftest-rtl.h"
      38              : #include "rtx-vector-builder.h"
      39              : #include "rtlanal.h"
      40              : 
      41              : /* Simplification and canonicalization of RTL.  */
      42              : 
      43              : /* Much code operates on (low, high) pairs; the low value is an
      44              :    unsigned wide int, the high value a signed wide int.  We
      45              :    occasionally need to sign extend from low to high as if low were a
      46              :    signed wide int.  */
      47              : #define HWI_SIGN_EXTEND(low) \
      48              :   ((((HOST_WIDE_INT) low) < 0) ? HOST_WIDE_INT_M1 : HOST_WIDE_INT_0)
      49              : 
      50              : static bool plus_minus_operand_p (const_rtx);
      51              : 
      52              : /* Negate I, which satisfies poly_int_rtx_p.  MODE is the mode of I.  */
      53              : 
      54              : static rtx
      55      8991548 : neg_poly_int_rtx (machine_mode mode, const_rtx i)
      56              : {
      57      8991548 :   return immed_wide_int_const (-wi::to_poly_wide (i, mode), mode);
      58              : }
      59              : 
      60              : /* Test whether expression, X, is an immediate constant that represents
      61              :    the most significant bit of machine mode MODE.  */
      62              : 
      63              : bool
      64      6234933 : mode_signbit_p (machine_mode mode, const_rtx x)
      65              : {
      66      6234933 :   unsigned HOST_WIDE_INT val;
      67      6234933 :   unsigned int width;
      68      6234933 :   scalar_int_mode int_mode;
      69              : 
      70      6234933 :   if (!is_int_mode (mode, &int_mode))
      71              :     return false;
      72              : 
      73      6234925 :   width = GET_MODE_PRECISION (int_mode);
      74      6234925 :   if (width == 0)
      75              :     return false;
      76              : 
      77      6234925 :   if (width <= HOST_BITS_PER_WIDE_INT
      78      6233395 :       && CONST_INT_P (x))
      79      6085876 :     val = INTVAL (x);
      80              : #if TARGET_SUPPORTS_WIDE_INT
      81       149049 :   else if (CONST_WIDE_INT_P (x))
      82              :     {
      83          474 :       unsigned int i;
      84          474 :       unsigned int elts = CONST_WIDE_INT_NUNITS (x);
      85          474 :       if (elts != (width + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT)
      86              :         return false;
      87          888 :       for (i = 0; i < elts - 1; i++)
      88          474 :         if (CONST_WIDE_INT_ELT (x, i) != 0)
      89              :           return false;
      90          414 :       val = CONST_WIDE_INT_ELT (x, elts - 1);
      91          414 :       width %= HOST_BITS_PER_WIDE_INT;
      92          414 :       if (width == 0)
      93              :         width = HOST_BITS_PER_WIDE_INT;
      94              :     }
      95              : #else
      96              :   else if (width <= HOST_BITS_PER_DOUBLE_INT
      97              :            && CONST_DOUBLE_AS_INT_P (x)
      98              :            && CONST_DOUBLE_LOW (x) == 0)
      99              :     {
     100              :       val = CONST_DOUBLE_HIGH (x);
     101              :       width -= HOST_BITS_PER_WIDE_INT;
     102              :     }
     103              : #endif
     104              :   else
     105              :     /* X is not an integer constant.  */
     106              :     return false;
     107              : 
     108      6085876 :   if (width < HOST_BITS_PER_WIDE_INT)
     109      5506915 :     val &= (HOST_WIDE_INT_1U << width) - 1;
     110      6086290 :   return val == (HOST_WIDE_INT_1U << (width - 1));
     111              : }
     112              : 
     113              : /* Test whether VAL is equal to the most significant bit of mode MODE
     114              :    (after masking with the mode mask of MODE).  Returns false if the
     115              :    precision of MODE is too large to handle.  */
     116              : 
     117              : bool
     118      3593127 : val_signbit_p (machine_mode mode, unsigned HOST_WIDE_INT val)
     119              : {
     120      3593127 :   unsigned int width;
     121      3593127 :   scalar_int_mode int_mode;
     122              : 
     123      3593127 :   if (!is_int_mode (mode, &int_mode))
     124              :     return false;
     125              : 
     126      3593091 :   width = GET_MODE_PRECISION (int_mode);
     127      3593091 :   if (width == 0 || width > HOST_BITS_PER_WIDE_INT)
     128              :     return false;
     129              : 
     130      3588463 :   val &= GET_MODE_MASK (int_mode);
     131      3588463 :   return val == (HOST_WIDE_INT_1U << (width - 1));
     132              : }
     133              : 
     134              : /* Test whether the most significant bit of mode MODE is set in VAL.
     135              :    Returns false if the precision of MODE is too large to handle.  */
     136              : bool
     137      2732652 : val_signbit_known_set_p (machine_mode mode, unsigned HOST_WIDE_INT val)
     138              : {
     139      2732652 :   unsigned int width;
     140              : 
     141      2732652 :   scalar_int_mode int_mode;
     142      2732652 :   if (!is_int_mode (mode, &int_mode))
     143              :     return false;
     144              : 
     145      2699138 :   width = GET_MODE_PRECISION (int_mode);
     146      2699138 :   if (width == 0 || width > HOST_BITS_PER_WIDE_INT)
     147              :     return false;
     148              : 
     149      2699138 :   val &= HOST_WIDE_INT_1U << (width - 1);
     150      2699138 :   return val != 0;
     151              : }
     152              : 
     153              : /* Test whether the most significant bit of mode MODE is clear in VAL.
     154              :    Returns false if the precision of MODE is too large to handle.  */
     155              : bool
     156      7700898 : val_signbit_known_clear_p (machine_mode mode, unsigned HOST_WIDE_INT val)
     157              : {
     158      7700898 :   unsigned int width;
     159              : 
     160      7700898 :   scalar_int_mode int_mode;
     161      7700898 :   if (!is_int_mode (mode, &int_mode))
     162              :     return false;
     163              : 
     164      7381936 :   width = GET_MODE_PRECISION (int_mode);
     165      7381936 :   if (width == 0 || width > HOST_BITS_PER_WIDE_INT)
     166              :     return false;
     167              : 
     168      7270772 :   val &= HOST_WIDE_INT_1U << (width - 1);
     169      7270772 :   return val == 0;
     170              : }
     171              : 
     172              : /* Make a binary operation by properly ordering the operands and
     173              :    seeing if the expression folds.  */
     174              : 
     175              : rtx
     176    114684260 : simplify_context::simplify_gen_binary (rtx_code code, machine_mode mode,
     177              :                                        rtx op0, rtx op1)
     178              : {
     179    114684260 :   rtx tem;
     180              : 
     181              :   /* If this simplifies, do it.  */
     182    114684260 :   tem = simplify_binary_operation (code, mode, op0, op1);
     183    114684260 :   if (tem)
     184              :     return tem;
     185              : 
     186              :   /* Put complex operands first and constants second if commutative.  */
     187     72470281 :   if (GET_RTX_CLASS (code) == RTX_COMM_ARITH
     188     72470281 :       && swap_commutative_operands_p (op0, op1))
     189              :     std::swap (op0, op1);
     190              : 
     191     72470281 :   return gen_rtx_fmt_ee (code, mode, op0, op1);
     192              : }
     193              : 
     194              : /* If X is a MEM referencing the constant pool, return the real value.
     195              :    Otherwise return X.  */
     196              : rtx
     197   2767611659 : avoid_constant_pool_reference (rtx x)
     198              : {
     199   2767611659 :   rtx c, tmp, addr;
     200   2767611659 :   machine_mode cmode;
     201   2767611659 :   poly_int64 offset = 0;
     202              : 
     203   2767611659 :   switch (GET_CODE (x))
     204              :     {
     205    261236004 :     case MEM:
     206    261236004 :       break;
     207              : 
     208       914871 :     case FLOAT_EXTEND:
     209              :       /* Handle float extensions of constant pool references.  */
     210       914871 :       tmp = XEXP (x, 0);
     211       914871 :       c = avoid_constant_pool_reference (tmp);
     212       914871 :       if (c != tmp && CONST_DOUBLE_AS_FLOAT_P (c))
     213       122417 :         return const_double_from_real_value (*CONST_DOUBLE_REAL_VALUE (c),
     214       122417 :                                              GET_MODE (x));
     215              :       return x;
     216              : 
     217              :     default:
     218              :       return x;
     219              :     }
     220              : 
     221    261236004 :   if (GET_MODE (x) == BLKmode)
     222              :     return x;
     223              : 
     224    257210575 :   addr = XEXP (x, 0);
     225              : 
     226              :   /* Call target hook to avoid the effects of -fpic etc....  */
     227    257210575 :   addr = targetm.delegitimize_address (addr);
     228              : 
     229              :   /* Split the address into a base and integer offset.  */
     230    257210575 :   addr = strip_offset (addr, &offset);
     231              : 
     232    257210575 :   if (GET_CODE (addr) == LO_SUM)
     233            0 :     addr = XEXP (addr, 1);
     234              : 
     235              :   /* If this is a constant pool reference, we can turn it into its
     236              :      constant and hope that simplifications happen.  */
     237    257210575 :   if (GET_CODE (addr) == SYMBOL_REF
     238    257210575 :       && CONSTANT_POOL_ADDRESS_P (addr))
     239              :     {
     240      5332505 :       c = get_pool_constant (addr);
     241      5332505 :       cmode = get_pool_mode (addr);
     242              : 
     243              :       /* If we're accessing the constant in a different mode than it was
     244              :          originally stored, attempt to fix that up via subreg simplifications.
     245              :          If that fails we have no choice but to return the original memory.  */
     246      5332505 :       if (known_eq (offset, 0) && cmode == GET_MODE (x))
     247              :         return c;
     248        20466 :       else if (known_in_range_p (offset, 0, GET_MODE_SIZE (cmode)))
     249              :         {
     250        10233 :           rtx tem = simplify_subreg (GET_MODE (x), c, cmode, offset);
     251        10233 :           if (tem && CONSTANT_P (tem))
     252              :             return tem;
     253              :         }
     254              :     }
     255              : 
     256              :   return x;
     257              : }
     258              : 
     259              : /* Simplify a MEM based on its attributes.  This is the default
     260              :    delegitimize_address target hook, and it's recommended that every
     261              :    overrider call it.  */
     262              : 
     263              : rtx
     264   3524079321 : delegitimize_mem_from_attrs (rtx x)
     265              : {
     266              :   /* MEMs without MEM_OFFSETs may have been offset, so we can't just
     267              :      use their base addresses as equivalent.  */
     268   3524079321 :   if (MEM_P (x)
     269     62060910 :       && MEM_EXPR (x)
     270   3562182486 :       && MEM_OFFSET_KNOWN_P (x))
     271              :     {
     272     35225680 :       tree decl = MEM_EXPR (x);
     273     35225680 :       machine_mode mode = GET_MODE (x);
     274     35225680 :       poly_int64 offset = 0;
     275              : 
     276     35225680 :       switch (TREE_CODE (decl))
     277              :         {
     278              :         default:
     279              :           decl = NULL;
     280              :           break;
     281              : 
     282              :         case VAR_DECL:
     283              :           break;
     284              : 
     285     10128128 :         case ARRAY_REF:
     286     10128128 :         case ARRAY_RANGE_REF:
     287     10128128 :         case COMPONENT_REF:
     288     10128128 :         case BIT_FIELD_REF:
     289     10128128 :         case REALPART_EXPR:
     290     10128128 :         case IMAGPART_EXPR:
     291     10128128 :         case VIEW_CONVERT_EXPR:
     292     10128128 :           {
     293     10128128 :             poly_int64 bitsize, bitpos, bytepos, toffset_val = 0;
     294     10128128 :             tree toffset;
     295     10128128 :             int unsignedp, reversep, volatilep = 0;
     296              : 
     297     10128128 :             decl
     298     10128128 :               = get_inner_reference (decl, &bitsize, &bitpos, &toffset, &mode,
     299              :                                      &unsignedp, &reversep, &volatilep);
     300     20256256 :             if (maybe_ne (bitsize, GET_MODE_BITSIZE (mode))
     301     10408420 :                 || !multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
     302     19818360 :                 || (toffset && !poly_int_tree_p (toffset, &toffset_val)))
     303              :               decl = NULL;
     304              :             else
     305      9409940 :               offset += bytepos + toffset_val;
     306     10128128 :             break;
     307              :           }
     308              :         }
     309              : 
     310       718188 :       if (decl
     311     20788267 :           && mode == GET_MODE (x)
     312     20518886 :           && VAR_P (decl)
     313     13308507 :           && (TREE_STATIC (decl)
     314     12085195 :               || DECL_THREAD_LOCAL_P (decl))
     315      1259427 :           && DECL_RTL_SET_P (decl)
     316     10668867 :           && MEM_P (DECL_RTL (decl)))
     317              :         {
     318      1258927 :           rtx newx;
     319              : 
     320      1258927 :           offset += MEM_OFFSET (x);
     321              : 
     322      1258927 :           newx = DECL_RTL (decl);
     323              : 
     324      1258927 :           if (MEM_P (newx))
     325              :             {
     326      1258927 :               rtx n = XEXP (newx, 0), o = XEXP (x, 0);
     327      1258927 :               poly_int64 n_offset, o_offset;
     328              : 
     329              :               /* Avoid creating a new MEM needlessly if we already had
     330              :                  the same address.  We do if there's no OFFSET and the
     331              :                  old address X is identical to NEWX, or if X is of the
     332              :                  form (plus NEWX OFFSET), or the NEWX is of the form
     333              :                  (plus Y (const_int Z)) and X is that with the offset
     334              :                  added: (plus Y (const_int Z+OFFSET)).  */
     335      1258927 :               n = strip_offset (n, &n_offset);
     336      1258927 :               o = strip_offset (o, &o_offset);
     337      2490196 :               if (!(known_eq (o_offset, n_offset + offset)
     338      1231269 :                     && rtx_equal_p (o, n)))
     339       211512 :                 x = adjust_address_nv (newx, mode, offset);
     340              :             }
     341            0 :           else if (GET_MODE (x) == GET_MODE (newx)
     342            0 :                    && known_eq (offset, 0))
     343              :             x = newx;
     344              :         }
     345              :     }
     346              : 
     347   3524079321 :   return x;
     348              : }
     349              : 
     350              : /* Make a unary operation by first seeing if it folds and otherwise making
     351              :    the specified operation.  */
     352              : 
     353              : rtx
     354      5196402 : simplify_context::simplify_gen_unary (rtx_code code, machine_mode mode, rtx op,
     355              :                                       machine_mode op_mode)
     356              : {
     357      5196402 :   rtx tem;
     358              : 
     359              :   /* If this simplifies, use it.  */
     360      5196402 :   if ((tem = simplify_unary_operation (code, mode, op, op_mode)) != 0)
     361              :     return tem;
     362              : 
     363      1995264 :   return gen_rtx_fmt_e (code, mode, op);
     364              : }
     365              : 
     366              : /* Likewise for ternary operations.  */
     367              : 
     368              : rtx
     369      2239409 : simplify_context::simplify_gen_ternary (rtx_code code, machine_mode mode,
     370              :                                         machine_mode op0_mode,
     371              :                                         rtx op0, rtx op1, rtx op2)
     372              : {
     373      2239409 :   rtx tem;
     374              : 
     375              :   /* If this simplifies, use it.  */
     376      2239409 :   if ((tem = simplify_ternary_operation (code, mode, op0_mode,
     377              :                                          op0, op1, op2)) != 0)
     378              :     return tem;
     379              : 
     380      1993895 :   return gen_rtx_fmt_eee (code, mode, op0, op1, op2);
     381              : }
     382              : 
     383              : /* Likewise, for relational operations.
     384              :    CMP_MODE specifies mode comparison is done in.  */
     385              : 
     386              : rtx
     387     21780874 : simplify_context::simplify_gen_relational (rtx_code code, machine_mode mode,
     388              :                                            machine_mode cmp_mode,
     389              :                                            rtx op0, rtx op1)
     390              : {
     391     21780874 :   rtx tem;
     392              : 
     393     21780874 :   if ((tem = simplify_relational_operation (code, mode, cmp_mode,
     394              :                                             op0, op1)) != 0)
     395              :     return tem;
     396              : 
     397     19379257 :   return gen_rtx_fmt_ee (code, mode, op0, op1);
     398              : }
     399              : 
     400              : /* If FN is NULL, replace all occurrences of OLD_RTX in X with copy_rtx (DATA)
     401              :    and simplify the result.  If FN is non-NULL, call this callback on each
     402              :    X, if it returns non-NULL, replace X with its return value and simplify the
     403              :    result.  */
     404              : 
     405              : rtx
     406    484831485 : simplify_replace_fn_rtx (rtx x, const_rtx old_rtx,
     407              :                          rtx (*fn) (rtx, const_rtx, void *), void *data)
     408              : {
     409    484831485 :   enum rtx_code code = GET_CODE (x);
     410    484831485 :   machine_mode mode = GET_MODE (x);
     411    484831485 :   machine_mode op_mode;
     412    484831485 :   const char *fmt;
     413    484831485 :   rtx op0, op1, op2, newx, op;
     414    484831485 :   rtvec vec, newvec;
     415    484831485 :   int i, j;
     416              : 
     417    484831485 :   if (UNLIKELY (fn != NULL))
     418              :     {
     419    421142891 :       newx = fn (x, old_rtx, data);
     420    421142891 :       if (newx)
     421              :         return newx;
     422              :     }
     423     63688594 :   else if (rtx_equal_p (x, old_rtx))
     424      5225961 :     return copy_rtx ((rtx) data);
     425              : 
     426    380842034 :   switch (GET_RTX_CLASS (code))
     427              :     {
     428      2033353 :     case RTX_UNARY:
     429      2033353 :       op0 = XEXP (x, 0);
     430      2033353 :       op_mode = GET_MODE (op0);
     431      2033353 :       op0 = simplify_replace_fn_rtx (op0, old_rtx, fn, data);
     432      2033353 :       if (op0 == XEXP (x, 0))
     433              :         return x;
     434       660648 :       return simplify_gen_unary (code, mode, op0, op_mode);
     435              : 
     436     74940692 :     case RTX_BIN_ARITH:
     437     74940692 :     case RTX_COMM_ARITH:
     438     74940692 :       op0 = simplify_replace_fn_rtx (XEXP (x, 0), old_rtx, fn, data);
     439     74940692 :       op1 = simplify_replace_fn_rtx (XEXP (x, 1), old_rtx, fn, data);
     440     74940692 :       if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
     441              :         return x;
     442     22955359 :       return simplify_gen_binary (code, mode, op0, op1);
     443              : 
     444      9901623 :     case RTX_COMPARE:
     445      9901623 :     case RTX_COMM_COMPARE:
     446      9901623 :       op0 = XEXP (x, 0);
     447      9901623 :       op1 = XEXP (x, 1);
     448      9901623 :       op_mode = GET_MODE (op0) != VOIDmode ? GET_MODE (op0) : GET_MODE (op1);
     449      9901623 :       op0 = simplify_replace_fn_rtx (op0, old_rtx, fn, data);
     450      9901623 :       op1 = simplify_replace_fn_rtx (op1, old_rtx, fn, data);
     451      9901623 :       if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
     452              :         return x;
     453      2358206 :       return simplify_gen_relational (code, mode, op_mode, op0, op1);
     454              : 
     455      5779648 :     case RTX_TERNARY:
     456      5779648 :     case RTX_BITFIELD_OPS:
     457      5779648 :       op0 = XEXP (x, 0);
     458      5779648 :       op_mode = GET_MODE (op0);
     459      5779648 :       op0 = simplify_replace_fn_rtx (op0, old_rtx, fn, data);
     460      5779648 :       op1 = simplify_replace_fn_rtx (XEXP (x, 1), old_rtx, fn, data);
     461      5779648 :       op2 = simplify_replace_fn_rtx (XEXP (x, 2), old_rtx, fn, data);
     462      5779648 :       if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1) && op2 == XEXP (x, 2))
     463              :         return x;
     464      1661537 :       if (op_mode == VOIDmode)
     465      1640883 :         op_mode = GET_MODE (op0);
     466      1661537 :       return simplify_gen_ternary (code, mode, op_mode, op0, op1, op2);
     467              : 
     468     83613367 :     case RTX_EXTRA:
     469     83613367 :       if (code == SUBREG)
     470              :         {
     471       671530 :           op0 = simplify_replace_fn_rtx (SUBREG_REG (x), old_rtx, fn, data);
     472       671530 :           if (op0 == SUBREG_REG (x))
     473              :             return x;
     474       137052 :           op0 = simplify_gen_subreg (GET_MODE (x), op0,
     475        68526 :                                      GET_MODE (SUBREG_REG (x)),
     476        68526 :                                      SUBREG_BYTE (x));
     477        68526 :           return op0 ? op0 : x;
     478              :         }
     479              :       break;
     480              : 
     481     61430279 :     case RTX_OBJ:
     482     61430279 :       if (code == MEM)
     483              :         {
     484     10841555 :           op0 = simplify_replace_fn_rtx (XEXP (x, 0), old_rtx, fn, data);
     485     10841555 :           if (op0 == XEXP (x, 0))
     486              :             return x;
     487       152973 :           return replace_equiv_address_nv (x, op0);
     488              :         }
     489     50588724 :       else if (code == LO_SUM)
     490              :         {
     491            0 :           op0 = simplify_replace_fn_rtx (XEXP (x, 0), old_rtx, fn, data);
     492            0 :           op1 = simplify_replace_fn_rtx (XEXP (x, 1), old_rtx, fn, data);
     493              : 
     494              :           /* (lo_sum (high x) y) -> y where x and y have the same base.  */
     495            0 :           if (GET_CODE (op0) == HIGH)
     496              :             {
     497            0 :               rtx base0, base1, offset0, offset1;
     498            0 :               split_const (XEXP (op0, 0), &base0, &offset0);
     499            0 :               split_const (op1, &base1, &offset1);
     500            0 :               if (rtx_equal_p (base0, base1))
     501            0 :                 return op1;
     502              :             }
     503              : 
     504            0 :           if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
     505              :             return x;
     506            0 :           return gen_rtx_LO_SUM (mode, op0, op1);
     507              :         }
     508              :       break;
     509              : 
     510              :     default:
     511              :       break;
     512              :     }
     513              : 
     514    276673633 :   newx = x;
     515    276673633 :   fmt = GET_RTX_FORMAT (code);
     516    597198462 :   for (i = 0; fmt[i]; i++)
     517    320524829 :     switch (fmt[i])
     518              :       {
     519      3030893 :       case 'E':
     520      3030893 :         vec = XVEC (x, i);
     521      3030893 :         newvec = XVEC (newx, i);
     522     12513348 :         for (j = 0; j < GET_NUM_ELEM (vec); j++)
     523              :           {
     524      9482455 :             op = simplify_replace_fn_rtx (RTVEC_ELT (vec, j),
     525              :                                           old_rtx, fn, data);
     526      9482455 :             if (op != RTVEC_ELT (vec, j))
     527              :               {
     528       341935 :                 if (newvec == vec)
     529              :                   {
     530       332938 :                     newvec = shallow_copy_rtvec (vec);
     531       332938 :                     if (x == newx)
     532       332938 :                       newx = shallow_copy_rtx (x);
     533       332938 :                     XVEC (newx, i) = newvec;
     534              :                   }
     535       341935 :                 RTVEC_ELT (newvec, j) = op;
     536              :               }
     537              :           }
     538              :         break;
     539              : 
     540     75705163 :       case 'e':
     541     75705163 :         if (XEXP (x, i))
     542              :           {
     543     75705163 :             op = simplify_replace_fn_rtx (XEXP (x, i), old_rtx, fn, data);
     544     75705163 :             if (op != XEXP (x, i))
     545              :               {
     546      4242508 :                 if (x == newx)
     547      4239229 :                   newx = shallow_copy_rtx (x);
     548      4242508 :                 XEXP (newx, i) = op;
     549              :               }
     550              :           }
     551              :         break;
     552              :       }
     553              :   return newx;
     554              : }
     555              : 
     556              : /* Replace all occurrences of OLD_RTX in X with NEW_RTX and try to simplify the
     557              :    resulting RTX.  Return a new RTX which is as simplified as possible.  */
     558              : 
     559              : rtx
     560     13097220 : simplify_replace_rtx (rtx x, const_rtx old_rtx, rtx new_rtx)
     561              : {
     562     13097220 :   return simplify_replace_fn_rtx (x, old_rtx, 0, new_rtx);
     563              : }
     564              : 
     565              : /* Try to simplify a MODE truncation of OP, which has OP_MODE.
     566              :    Only handle cases where the truncated value is inherently an rvalue.
     567              : 
     568              :    RTL provides two ways of truncating a value:
     569              : 
     570              :    1. a lowpart subreg.  This form is only a truncation when both
     571              :       the outer and inner modes (here MODE and OP_MODE respectively)
     572              :       are scalar integers, and only then when the subreg is used as
     573              :       an rvalue.
     574              : 
     575              :       It is only valid to form such truncating subregs if the
     576              :       truncation requires no action by the target.  The onus for
     577              :       proving this is on the creator of the subreg -- e.g. the
     578              :       caller to simplify_subreg or simplify_gen_subreg -- and typically
     579              :       involves either TRULY_NOOP_TRUNCATION_MODES_P or truncated_to_mode.
     580              : 
     581              :    2. a TRUNCATE.  This form handles both scalar and compound integers.
     582              : 
     583              :    The first form is preferred where valid.  However, the TRUNCATE
     584              :    handling in simplify_unary_operation turns the second form into the
     585              :    first form when TRULY_NOOP_TRUNCATION_MODES_P or truncated_to_mode allow,
     586              :    so it is generally safe to form rvalue truncations using:
     587              : 
     588              :       simplify_gen_unary (TRUNCATE, ...)
     589              : 
     590              :    and leave simplify_unary_operation to work out which representation
     591              :    should be used.
     592              : 
     593              :    Because of the proof requirements on (1), simplify_truncation must
     594              :    also use simplify_gen_unary (TRUNCATE, ...) to truncate parts of OP,
     595              :    regardless of whether the outer truncation came from a SUBREG or a
     596              :    TRUNCATE.  For example, if the caller has proven that an SImode
     597              :    truncation of:
     598              : 
     599              :       (and:DI X Y)
     600              : 
     601              :    is a no-op and can be represented as a subreg, it does not follow
     602              :    that SImode truncations of X and Y are also no-ops.  On a target
     603              :    like 64-bit MIPS that requires SImode values to be stored in
     604              :    sign-extended form, an SImode truncation of:
     605              : 
     606              :       (and:DI (reg:DI X) (const_int 63))
     607              : 
     608              :    is trivially a no-op because only the lower 6 bits can be set.
     609              :    However, X is still an arbitrary 64-bit number and so we cannot
     610              :    assume that truncating it too is a no-op.  */
     611              : 
     612              : rtx
     613     17756805 : simplify_context::simplify_truncation (machine_mode mode, rtx op,
     614              :                                        machine_mode op_mode)
     615              : {
     616     17756805 :   unsigned int precision = GET_MODE_UNIT_PRECISION (mode);
     617     17756805 :   unsigned int op_precision = GET_MODE_UNIT_PRECISION (op_mode);
     618     17756805 :   scalar_int_mode int_mode, int_op_mode, subreg_mode;
     619              : 
     620     17756805 :   gcc_assert (precision <= op_precision);
     621              : 
     622              :   /* Optimize truncations of zero and sign extended values.  */
     623     17756805 :   if (GET_CODE (op) == ZERO_EXTEND
     624     17756805 :       || GET_CODE (op) == SIGN_EXTEND)
     625              :     {
     626              :       /* There are three possibilities.  If MODE is the same as the
     627              :          origmode, we can omit both the extension and the subreg.
     628              :          If MODE is not larger than the origmode, we can apply the
     629              :          truncation without the extension.  Finally, if the outermode
     630              :          is larger than the origmode, we can just extend to the appropriate
     631              :          mode.  */
     632       273945 :       machine_mode origmode = GET_MODE (XEXP (op, 0));
     633       273945 :       if (mode == origmode)
     634              :         return XEXP (op, 0);
     635        16010 :       else if (precision <= GET_MODE_UNIT_PRECISION (origmode))
     636         4891 :         return simplify_gen_unary (TRUNCATE, mode,
     637         4891 :                                    XEXP (op, 0), origmode);
     638              :       else
     639         3114 :         return simplify_gen_unary (GET_CODE (op), mode,
     640         3114 :                                    XEXP (op, 0), origmode);
     641              :     }
     642              : 
     643              :   /* If the machine can perform operations in the truncated mode, distribute
     644              :      the truncation, i.e. simplify (truncate:QI (op:SI (x:SI) (y:SI))) into
     645              :      (op:QI (truncate:QI (x:SI)) (truncate:QI (y:SI))).  */
     646     17482860 :   if (1
     647              :       && (!WORD_REGISTER_OPERATIONS || precision >= BITS_PER_WORD)
     648              :       && (GET_CODE (op) == PLUS
     649              :           || GET_CODE (op) == MINUS
     650     17482860 :           || GET_CODE (op) == MULT))
     651              :     {
     652       751453 :       rtx op0 = simplify_gen_unary (TRUNCATE, mode, XEXP (op, 0), op_mode);
     653       751453 :       if (op0)
     654              :         {
     655       751453 :           rtx op1 = simplify_gen_unary (TRUNCATE, mode, XEXP (op, 1), op_mode);
     656       751453 :           if (op1)
     657       751453 :             return simplify_gen_binary (GET_CODE (op), mode, op0, op1);
     658              :         }
     659              :     }
     660              : 
     661              :   /* Simplify (truncate:QI (lshiftrt:SI (sign_extend:SI (x:QI)) C)) into
     662              :      to (ashiftrt:QI (x:QI) C), where C is a suitable small constant and
     663              :      the outer subreg is effectively a truncation to the original mode.  */
     664     16731407 :   if ((GET_CODE (op) == LSHIFTRT
     665     16731407 :        || GET_CODE (op) == ASHIFTRT)
     666              :       /* Ensure that OP_MODE is at least twice as wide as MODE
     667              :          to avoid the possibility that an outer LSHIFTRT shifts by more
     668              :          than the sign extension's sign_bit_copies and introduces zeros
     669              :          into the high bits of the result.  */
     670      1594796 :       && 2 * precision <= op_precision
     671      1594796 :       && CONST_INT_P (XEXP (op, 1))
     672      1499110 :       && GET_CODE (XEXP (op, 0)) == SIGN_EXTEND
     673           26 :       && GET_MODE (XEXP (XEXP (op, 0), 0)) == mode
     674           23 :       && UINTVAL (XEXP (op, 1)) < precision)
     675           19 :     return simplify_gen_binary (ASHIFTRT, mode,
     676           19 :                                 XEXP (XEXP (op, 0), 0), XEXP (op, 1));
     677              : 
     678              :   /* Likewise (truncate:QI (lshiftrt:SI (zero_extend:SI (x:QI)) C)) into
     679              :      to (lshiftrt:QI (x:QI) C), where C is a suitable small constant and
     680              :      the outer subreg is effectively a truncation to the original mode.  */
     681     16731388 :   if ((GET_CODE (op) == LSHIFTRT
     682              :        || GET_CODE (op) == ASHIFTRT)
     683      1594777 :       && CONST_INT_P (XEXP (op, 1))
     684      1499091 :       && GET_CODE (XEXP (op, 0)) == ZERO_EXTEND
     685          677 :       && GET_MODE (XEXP (XEXP (op, 0), 0)) == mode
     686          677 :       && UINTVAL (XEXP (op, 1)) < precision)
     687          667 :     return simplify_gen_binary (LSHIFTRT, mode,
     688          667 :                                 XEXP (XEXP (op, 0), 0), XEXP (op, 1));
     689              : 
     690              :   /* Likewise (truncate:QI (ashift:SI (zero_extend:SI (x:QI)) C)) into
     691              :      to (ashift:QI (x:QI) C), where C is a suitable small constant and
     692              :      the outer subreg is effectively a truncation to the original mode.  */
     693     16730721 :   if (GET_CODE (op) == ASHIFT
     694       444141 :       && CONST_INT_P (XEXP (op, 1))
     695       384873 :       && (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND
     696       384873 :           || GET_CODE (XEXP (op, 0)) == SIGN_EXTEND)
     697          555 :       && GET_MODE (XEXP (XEXP (op, 0), 0)) == mode
     698          542 :       && UINTVAL (XEXP (op, 1)) < precision)
     699          534 :     return simplify_gen_binary (ASHIFT, mode,
     700          534 :                                 XEXP (XEXP (op, 0), 0), XEXP (op, 1));
     701              : 
     702              :   /* Likewise (truncate:QI (and:SI (lshiftrt:SI (x:SI) C) C2)) into
     703              :      (and:QI (lshiftrt:QI (truncate:QI (x:SI)) C) C2) for suitable C
     704              :      and C2.  */
     705     16730187 :   if (GET_CODE (op) == AND
     706       633500 :       && (GET_CODE (XEXP (op, 0)) == LSHIFTRT
     707       633500 :           || GET_CODE (XEXP (op, 0)) == ASHIFTRT)
     708        30786 :       && CONST_INT_P (XEXP (XEXP (op, 0), 1))
     709        30681 :       && CONST_INT_P (XEXP (op, 1)))
     710              :     {
     711        30681 :       rtx op0 = (XEXP (XEXP (op, 0), 0));
     712        30681 :       rtx shift_op = XEXP (XEXP (op, 0), 1);
     713        30681 :       rtx mask_op = XEXP (op, 1);
     714        30681 :       unsigned HOST_WIDE_INT shift = UINTVAL (shift_op);
     715        30681 :       unsigned HOST_WIDE_INT mask = UINTVAL (mask_op);
     716              : 
     717        30681 :       if (shift < precision
     718              :           /* If doing this transform works for an X with all bits set,
     719              :              it works for any X.  */
     720        17522 :           && ((GET_MODE_MASK (mode) >> shift) & mask)
     721        17522 :              == ((GET_MODE_MASK (op_mode) >> shift) & mask)
     722         3063 :           && (op0 = simplify_gen_unary (TRUNCATE, mode, op0, op_mode))
     723        33744 :           && (op0 = simplify_gen_binary (LSHIFTRT, mode, op0, shift_op)))
     724              :         {
     725         3063 :           mask_op = GEN_INT (trunc_int_for_mode (mask, mode));
     726         3063 :           return simplify_gen_binary (AND, mode, op0, mask_op);
     727              :         }
     728              :     }
     729              : 
     730              :   /* Turn (truncate:M1 (*_extract:M2 (reg:M2) (len) (pos))) into
     731              :      (*_extract:M1 (truncate:M1 (reg:M2)) (len) (pos')) if possible without
     732              :      changing len.  */
     733     16727124 :   if ((GET_CODE (op) == ZERO_EXTRACT || GET_CODE (op) == SIGN_EXTRACT)
     734       372409 :       && REG_P (XEXP (op, 0))
     735       254355 :       && GET_MODE (XEXP (op, 0)) == GET_MODE (op)
     736       253559 :       && CONST_INT_P (XEXP (op, 1))
     737       253559 :       && CONST_INT_P (XEXP (op, 2)))
     738              :     {
     739       220859 :       rtx op0 = XEXP (op, 0);
     740       220859 :       unsigned HOST_WIDE_INT len = UINTVAL (XEXP (op, 1));
     741       220859 :       unsigned HOST_WIDE_INT pos = UINTVAL (XEXP (op, 2));
     742       220859 :       if (BITS_BIG_ENDIAN && pos >= op_precision - precision)
     743              :         {
     744              :           op0 = simplify_gen_unary (TRUNCATE, mode, op0, GET_MODE (op0));
     745              :           if (op0)
     746              :             {
     747              :               pos -= op_precision - precision;
     748              :               return simplify_gen_ternary (GET_CODE (op), mode, mode, op0,
     749              :                                            XEXP (op, 1), GEN_INT (pos));
     750              :             }
     751              :         }
     752       220859 :       else if (!BITS_BIG_ENDIAN && precision >= len + pos)
     753              :         {
     754         7609 :           op0 = simplify_gen_unary (TRUNCATE, mode, op0, GET_MODE (op0));
     755         7609 :           if (op0)
     756         7609 :             return simplify_gen_ternary (GET_CODE (op), mode, mode, op0,
     757         7609 :                                          XEXP (op, 1), XEXP (op, 2));
     758              :         }
     759              :     }
     760              : 
     761              :   /* Recognize a word extraction from a multi-word subreg.  */
     762     16719515 :   if ((GET_CODE (op) == LSHIFTRT
     763     16719515 :        || GET_CODE (op) == ASHIFTRT)
     764      1594110 :       && SCALAR_INT_MODE_P (mode)
     765      1591096 :       && SCALAR_INT_MODE_P (op_mode)
     766      1725476 :       && precision >= BITS_PER_WORD
     767        64360 :       && 2 * precision <= op_precision
     768        64360 :       && CONST_INT_P (XEXP (op, 1))
     769        56224 :       && (INTVAL (XEXP (op, 1)) & (precision - 1)) == 0
     770         1864 :       && UINTVAL (XEXP (op, 1)) < op_precision)
     771              :     {
     772         1864 :       poly_int64 byte = subreg_lowpart_offset (mode, op_mode);
     773         1864 :       int shifted_bytes = INTVAL (XEXP (op, 1)) / BITS_PER_UNIT;
     774         1864 :       return simplify_gen_subreg (mode, XEXP (op, 0), op_mode,
     775              :                                   (WORDS_BIG_ENDIAN
     776         1864 :                                    ? byte - shifted_bytes
     777         1864 :                                    : byte + shifted_bytes));
     778              :     }
     779              : 
     780              :   /* If we have a TRUNCATE of a right shift of MEM, make a new MEM
     781              :      and try replacing the TRUNCATE and shift with it.  Don't do this
     782              :      if the MEM has a mode-dependent address.  */
     783     16717651 :   if ((GET_CODE (op) == LSHIFTRT
     784              :        || GET_CODE (op) == ASHIFTRT)
     785      1589232 :       && is_a <scalar_int_mode> (mode, &int_mode)
     786     18305647 :       && is_a <scalar_int_mode> (op_mode, &int_op_mode)
     787      1589232 :       && MEM_P (XEXP (op, 0))
     788        11330 :       && CONST_INT_P (XEXP (op, 1))
     789        21092 :       && INTVAL (XEXP (op, 1)) % GET_MODE_BITSIZE (int_mode) == 0
     790         1279 :       && INTVAL (XEXP (op, 1)) > 0
     791         2558 :       && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (int_op_mode)
     792         1279 :       && ! mode_dependent_address_p (XEXP (XEXP (op, 0), 0),
     793         1279 :                                      MEM_ADDR_SPACE (XEXP (op, 0)))
     794         1279 :       && ! MEM_VOLATILE_P (XEXP (op, 0))
     795     16717651 :       && (GET_MODE_SIZE (int_mode) >= UNITS_PER_WORD
     796              :           || WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN))
     797              :     {
     798         1236 :       poly_int64 byte = subreg_lowpart_offset (int_mode, int_op_mode);
     799         1236 :       int shifted_bytes = INTVAL (XEXP (op, 1)) / BITS_PER_UNIT;
     800         1236 :       return adjust_address_nv (XEXP (op, 0), int_mode,
     801              :                                 (WORDS_BIG_ENDIAN
     802              :                                  ? byte - shifted_bytes
     803              :                                  : byte + shifted_bytes));
     804              :     }
     805              : 
     806              :   /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
     807              :      (OP:SI foo:SI) if OP is NEG or ABS.  */
     808     16716415 :   if ((GET_CODE (op) == ABS
     809     16716415 :        || GET_CODE (op) == NEG)
     810        23175 :       && (GET_CODE (XEXP (op, 0)) == SIGN_EXTEND
     811        23175 :           || GET_CODE (XEXP (op, 0)) == ZERO_EXTEND)
     812           18 :       && GET_MODE (XEXP (XEXP (op, 0), 0)) == mode)
     813            2 :     return simplify_gen_unary (GET_CODE (op), mode,
     814            2 :                                XEXP (XEXP (op, 0), 0), mode);
     815              : 
     816              :   /* Simplifications of (truncate:A (subreg:B X 0)).  */
     817     16716413 :   if (GET_CODE (op) == SUBREG
     818     16716512 :       && is_a <scalar_int_mode> (mode, &int_mode)
     819        27587 :       && SCALAR_INT_MODE_P (op_mode)
     820        27587 :       && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (op)), &subreg_mode)
     821     16743938 :       && subreg_lowpart_p (op))
     822              :     {
     823              :       /* (truncate:A (subreg:B (truncate:C X) 0)) is (truncate:A X).  */
     824        27522 :       if (GET_CODE (SUBREG_REG (op)) == TRUNCATE)
     825              :         {
     826            0 :           rtx inner = XEXP (SUBREG_REG (op), 0);
     827            0 :           if (GET_MODE_PRECISION (int_mode)
     828            0 :               <= GET_MODE_PRECISION (subreg_mode))
     829            0 :             return simplify_gen_unary (TRUNCATE, int_mode, inner,
     830            0 :                                        GET_MODE (inner));
     831              :           else
     832              :             /* If subreg above is paradoxical and C is narrower
     833              :                than A, return (subreg:A (truncate:C X) 0).  */
     834            0 :             return simplify_gen_subreg (int_mode, SUBREG_REG (op),
     835              :                                         subreg_mode, 0);
     836              :         }
     837              : 
     838              :       /* Simplifications of (truncate:A (subreg:B X:C 0)) with
     839              :          paradoxical subregs (B is wider than C).  */
     840        27522 :       if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
     841              :         {
     842        27522 :           unsigned int int_op_prec = GET_MODE_PRECISION (int_op_mode);
     843        27522 :           unsigned int subreg_prec = GET_MODE_PRECISION (subreg_mode);
     844        27522 :           if (int_op_prec > subreg_prec)
     845              :             {
     846         1428 :               if (int_mode == subreg_mode)
     847              :                 return SUBREG_REG (op);
     848           61 :               if (GET_MODE_PRECISION (int_mode) < subreg_prec)
     849           27 :                 return simplify_gen_unary (TRUNCATE, int_mode,
     850           27 :                                            SUBREG_REG (op), subreg_mode);
     851              :             }
     852              :           /* Simplification of (truncate:A (subreg:B X:C 0)) where
     853              :              A is narrower than B and B is narrower than C.  */
     854        26094 :           else if (int_op_prec < subreg_prec
     855        26094 :                    && GET_MODE_PRECISION (int_mode) < int_op_prec)
     856        26094 :             return simplify_gen_unary (TRUNCATE, int_mode,
     857        26094 :                                        SUBREG_REG (op), subreg_mode);
     858              :         }
     859              :     }
     860              : 
     861              :   /* (truncate:A (truncate:B X)) is (truncate:A X).  */
     862     16688925 :   if (GET_CODE (op) == TRUNCATE)
     863            0 :     return simplify_gen_unary (TRUNCATE, mode, XEXP (op, 0),
     864            0 :                                GET_MODE (XEXP (op, 0)));
     865              : 
     866              :   /* (truncate:A (ior X C)) is (const_int -1) if C is equal to that already,
     867              :      in mode A.  */
     868     16688925 :   if (GET_CODE (op) == IOR
     869        34596 :       && SCALAR_INT_MODE_P (mode)
     870        34596 :       && SCALAR_INT_MODE_P (op_mode)
     871        34596 :       && CONST_INT_P (XEXP (op, 1))
     872     16697639 :       && trunc_int_for_mode (INTVAL (XEXP (op, 1)), mode) == -1)
     873           42 :     return constm1_rtx;
     874              : 
     875              :   return NULL_RTX;
     876              : }
     877              : 
     878              : /* Try to simplify a unary operation CODE whose output mode is to be
     879              :    MODE with input operand OP whose mode was originally OP_MODE.
     880              :    Return zero if no simplification can be made.  */
     881              : rtx
     882     27133457 : simplify_context::simplify_unary_operation (rtx_code code, machine_mode mode,
     883              :                                             rtx op, machine_mode op_mode)
     884              : {
     885     27133457 :   rtx trueop, tem;
     886              : 
     887     27133457 :   trueop = avoid_constant_pool_reference (op);
     888              : 
     889     27133457 :   tem = simplify_const_unary_operation (code, mode, trueop, op_mode);
     890     27133457 :   if (tem)
     891              :     return tem;
     892              : 
     893     22123949 :   return simplify_unary_operation_1 (code, mode, op);
     894              : }
     895              : 
     896              : /* Return true if FLOAT or UNSIGNED_FLOAT operation OP is known
     897              :    to be exact.  */
     898              : 
     899              : static bool
     900         2700 : exact_int_to_float_conversion_p (const_rtx op)
     901              : {
     902         2700 :   machine_mode op0_mode = GET_MODE (XEXP (op, 0));
     903              :   /* Constants can reach here with -frounding-math, if they do then
     904              :      the conversion isn't exact.  */
     905         2700 :   if (op0_mode == VOIDmode)
     906              :     return false;
     907         5398 :   int out_bits = significand_size (GET_MODE_INNER (GET_MODE (op)));
     908         2699 :   int in_prec = GET_MODE_UNIT_PRECISION (op0_mode);
     909         2699 :   int in_bits = in_prec;
     910         2699 :   if (HWI_COMPUTABLE_MODE_P (op0_mode))
     911              :     {
     912         2609 :       unsigned HOST_WIDE_INT nonzero = nonzero_bits (XEXP (op, 0), op0_mode);
     913         2609 :       if (GET_CODE (op) == FLOAT)
     914         2485 :         in_bits -= num_sign_bit_copies (XEXP (op, 0), op0_mode);
     915          124 :       else if (GET_CODE (op) == UNSIGNED_FLOAT)
     916          124 :         in_bits = wi::min_precision (wi::uhwi (nonzero, in_prec), UNSIGNED);
     917              :       else
     918            0 :         gcc_unreachable ();
     919         2609 :       in_bits -= wi::ctz (wi::uhwi (nonzero, in_prec));
     920              :     }
     921         2699 :   return in_bits <= out_bits;
     922              : }
     923              : 
     924              : /* Perform some simplifications we can do even if the operands
     925              :    aren't constant.  */
     926              : rtx
     927     22123949 : simplify_context::simplify_unary_operation_1 (rtx_code code, machine_mode mode,
     928              :                                               rtx op)
     929              : {
     930     22123949 :   enum rtx_code reversed;
     931     22123949 :   rtx temp, elt, base, step;
     932     22123949 :   scalar_int_mode inner, int_mode, op_mode, op0_mode;
     933              : 
     934     22123949 :   switch (code)
     935              :     {
     936      1772656 :     case NOT:
     937              :       /* (not (not X)) == X.  */
     938      1772656 :       if (GET_CODE (op) == NOT)
     939         3328 :         return XEXP (op, 0);
     940              : 
     941              :       /* (not (eq X Y)) == (ne X Y), etc. if BImode or the result of the
     942              :          comparison is all ones.   */
     943      1769328 :       if (COMPARISON_P (op)
     944        11126 :           && (mode == BImode || STORE_FLAG_VALUE == -1)
     945      1769328 :           && ((reversed = reversed_comparison_code (op, NULL)) != UNKNOWN))
     946            0 :         return simplify_gen_relational (reversed, mode, VOIDmode,
     947            0 :                                         XEXP (op, 0), XEXP (op, 1));
     948              : 
     949              :       /* (not (plus X -1)) can become (neg X).  */
     950      1769328 :       if (GET_CODE (op) == PLUS
     951       290881 :           && XEXP (op, 1) == constm1_rtx)
     952         7123 :         return simplify_gen_unary (NEG, mode, XEXP (op, 0), mode);
     953              : 
     954              :       /* Similarly, (not (neg X)) is (plus X -1).  Only do this for
     955              :          modes that have CONSTM1_RTX, i.e. MODE_INT, MODE_PARTIAL_INT
     956              :          and MODE_VECTOR_INT.  */
     957      1762205 :       if (GET_CODE (op) == NEG && CONSTM1_RTX (mode))
     958        71043 :         return simplify_gen_binary (PLUS, mode, XEXP (op, 0),
     959        71043 :                                     CONSTM1_RTX (mode));
     960              : 
     961              :       /* (not (xor X C)) for C constant is (xor X D) with D = ~C.  */
     962      1691162 :       if (GET_CODE (op) == XOR
     963        13898 :           && CONST_INT_P (XEXP (op, 1))
     964      1694806 :           && (temp = simplify_unary_operation (NOT, mode,
     965              :                                                XEXP (op, 1), mode)) != 0)
     966         3644 :         return simplify_gen_binary (XOR, mode, XEXP (op, 0), temp);
     967              : 
     968              :       /* (not (plus X C)) for signbit C is (xor X D) with D = ~C.  */
     969      1687518 :       if (GET_CODE (op) == PLUS
     970       283758 :           && CONST_INT_P (XEXP (op, 1))
     971       166655 :           && mode_signbit_p (mode, XEXP (op, 1))
     972      1691433 :           && (temp = simplify_unary_operation (NOT, mode,
     973              :                                                XEXP (op, 1), mode)) != 0)
     974         3915 :         return simplify_gen_binary (XOR, mode, XEXP (op, 0), temp);
     975              : 
     976              : 
     977              :       /* (not (ashift 1 X)) is (rotate ~1 X).  We used to do this for
     978              :          operands other than 1, but that is not valid.  We could do a
     979              :          similar simplification for (not (lshiftrt C X)) where C is
     980              :          just the sign bit, but this doesn't seem common enough to
     981              :          bother with.  */
     982      1683603 :       if (GET_CODE (op) == ASHIFT
     983        41501 :           && XEXP (op, 0) == const1_rtx)
     984              :         {
     985         1043 :           temp = simplify_gen_unary (NOT, mode, const1_rtx, mode);
     986         1043 :           return simplify_gen_binary (ROTATE, mode, temp, XEXP (op, 1));
     987              :         }
     988              : 
     989              :       /* (not (ashiftrt foo C)) where C is the number of bits in FOO
     990              :          minus 1 is (ge foo (const_int 0)) if STORE_FLAG_VALUE is -1,
     991              :          so we can perform the above simplification.  */
     992      1682560 :       if (STORE_FLAG_VALUE == -1
     993              :           && is_a <scalar_int_mode> (mode, &int_mode)
     994              :           && GET_CODE (op) == ASHIFTRT
     995              :           && CONST_INT_P (XEXP (op, 1))
     996              :           && INTVAL (XEXP (op, 1)) == GET_MODE_PRECISION (int_mode) - 1)
     997              :         return simplify_gen_relational (GE, int_mode, VOIDmode,
     998              :                                         XEXP (op, 0), const0_rtx);
     999              : 
    1000              : 
    1001      1682560 :       if (partial_subreg_p (op)
    1002        69023 :           && subreg_lowpart_p (op)
    1003        68713 :           && GET_CODE (SUBREG_REG (op)) == ASHIFT
    1004        90691 :           && XEXP (SUBREG_REG (op), 0) == const1_rtx)
    1005              :         {
    1006          163 :           machine_mode inner_mode = GET_MODE (SUBREG_REG (op));
    1007          163 :           rtx x;
    1008              : 
    1009          163 :           x = gen_rtx_ROTATE (inner_mode,
    1010              :                               simplify_gen_unary (NOT, inner_mode, const1_rtx,
    1011              :                                                   inner_mode),
    1012              :                               XEXP (SUBREG_REG (op), 1));
    1013          163 :           temp = rtl_hooks.gen_lowpart_no_emit (mode, x);
    1014          163 :           if (temp)
    1015              :             return temp;
    1016              :         }
    1017              : 
    1018              :       /* Apply De Morgan's laws to reduce number of patterns for machines
    1019              :          with negating logical insns (and-not, nand, etc.).  If result has
    1020              :          only one NOT, put it first, since that is how the patterns are
    1021              :          coded.  */
    1022      1682397 :       if (GET_CODE (op) == IOR || GET_CODE (op) == AND)
    1023              :         {
    1024        11243 :           rtx in1 = XEXP (op, 0), in2 = XEXP (op, 1);
    1025        11243 :           machine_mode op_mode;
    1026              : 
    1027        11243 :           op_mode = GET_MODE (in1);
    1028        11243 :           in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
    1029              : 
    1030        11243 :           op_mode = GET_MODE (in2);
    1031        11243 :           if (op_mode == VOIDmode)
    1032         5383 :             op_mode = mode;
    1033        11243 :           in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
    1034              : 
    1035        11243 :           if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
    1036              :             std::swap (in1, in2);
    1037              : 
    1038        22486 :           return gen_rtx_fmt_ee (GET_CODE (op) == IOR ? AND : IOR,
    1039              :                                  mode, in1, in2);
    1040              :         }
    1041              : 
    1042              :       /* (not (bswap x)) -> (bswap (not x)).  */
    1043      1671154 :       if (GET_CODE (op) == BSWAP || GET_CODE (op) == BITREVERSE)
    1044              :         {
    1045            0 :           rtx x = simplify_gen_unary (NOT, mode, XEXP (op, 0), mode);
    1046            0 :           return simplify_gen_unary (GET_CODE (op), mode, x, mode);
    1047              :         }
    1048              :       break;
    1049              : 
    1050      1684566 :     case NEG:
    1051              :       /* (neg (neg X)) == X.  */
    1052      1684566 :       if (GET_CODE (op) == NEG)
    1053         6356 :         return XEXP (op, 0);
    1054              : 
    1055              :       /* (neg (x ? (neg y) : y)) == !x ? (neg y) : y.
    1056              :          If comparison is not reversible use
    1057              :          x ? y : (neg y).  */
    1058      1678210 :       if (GET_CODE (op) == IF_THEN_ELSE)
    1059              :         {
    1060         3173 :           rtx cond = XEXP (op, 0);
    1061         3173 :           rtx true_rtx = XEXP (op, 1);
    1062         3173 :           rtx false_rtx = XEXP (op, 2);
    1063              : 
    1064         3173 :           if ((GET_CODE (true_rtx) == NEG
    1065            0 :                && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
    1066         3173 :                || (GET_CODE (false_rtx) == NEG
    1067            0 :                    && rtx_equal_p (XEXP (false_rtx, 0), true_rtx)))
    1068              :             {
    1069            0 :               if (reversed_comparison_code (cond, NULL) != UNKNOWN)
    1070            0 :                 temp = reversed_comparison (cond, mode);
    1071              :               else
    1072              :                 {
    1073              :                   temp = cond;
    1074              :                   std::swap (true_rtx, false_rtx);
    1075              :                 }
    1076            0 :               return simplify_gen_ternary (IF_THEN_ELSE, mode,
    1077            0 :                                             mode, temp, true_rtx, false_rtx);
    1078              :             }
    1079              :         }
    1080              : 
    1081              :       /* (neg (plus X 1)) can become (not X).  */
    1082      1678210 :       if (GET_CODE (op) == PLUS
    1083       138669 :           && XEXP (op, 1) == const1_rtx)
    1084        54058 :         return simplify_gen_unary (NOT, mode, XEXP (op, 0), mode);
    1085              : 
    1086              :       /* Similarly, (neg (not X)) is (plus X 1).  */
    1087      1624152 :       if (GET_CODE (op) == NOT)
    1088          522 :         return simplify_gen_binary (PLUS, mode, XEXP (op, 0),
    1089          522 :                                     CONST1_RTX (mode));
    1090              : 
    1091              :       /* (neg (minus X Y)) can become (minus Y X).  This transformation
    1092              :          isn't safe for modes with signed zeros, since if X and Y are
    1093              :          both +0, (minus Y X) is the same as (minus X Y).  If the
    1094              :          rounding mode is towards +infinity (or -infinity) then the two
    1095              :          expressions will be rounded differently.  */
    1096      1623630 :       if (GET_CODE (op) == MINUS
    1097        23794 :           && !HONOR_SIGNED_ZEROS (mode)
    1098      1646040 :           && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
    1099        22410 :         return simplify_gen_binary (MINUS, mode, XEXP (op, 1), XEXP (op, 0));
    1100              : 
    1101      1601220 :       if (GET_CODE (op) == PLUS
    1102        84611 :           && !HONOR_SIGNED_ZEROS (mode)
    1103      1685407 :           && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
    1104              :         {
    1105              :           /* (neg (plus A C)) is simplified to (minus -C A).  */
    1106        84187 :           if (CONST_SCALAR_INT_P (XEXP (op, 1))
    1107         4886 :               || CONST_DOUBLE_AS_FLOAT_P (XEXP (op, 1)))
    1108              :             {
    1109        79301 :               temp = simplify_unary_operation (NEG, mode, XEXP (op, 1), mode);
    1110        79301 :               if (temp)
    1111        79301 :                 return simplify_gen_binary (MINUS, mode, temp, XEXP (op, 0));
    1112              :             }
    1113              : 
    1114              :           /* (neg (plus A B)) is canonicalized to (minus (neg A) B).  */
    1115         4886 :           temp = simplify_gen_unary (NEG, mode, XEXP (op, 0), mode);
    1116         4886 :           return simplify_gen_binary (MINUS, mode, temp, XEXP (op, 1));
    1117              :         }
    1118              : 
    1119              :       /* (neg (mult A B)) becomes (mult A (neg B)).
    1120              :          This works even for floating-point values.  */
    1121      1517033 :       if (GET_CODE (op) == MULT
    1122      1517033 :           && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
    1123              :         {
    1124        20204 :           temp = simplify_gen_unary (NEG, mode, XEXP (op, 1), mode);
    1125        20204 :           return simplify_gen_binary (MULT, mode, XEXP (op, 0), temp);
    1126              :         }
    1127              : 
    1128              :       /* NEG commutes with ASHIFT since it is multiplication.  Only do
    1129              :          this if we can then eliminate the NEG (e.g., if the operand
    1130              :          is a constant).  */
    1131      1496829 :       if (GET_CODE (op) == ASHIFT)
    1132              :         {
    1133        53380 :           temp = simplify_unary_operation (NEG, mode, XEXP (op, 0), mode);
    1134        53380 :           if (temp)
    1135        12852 :             return simplify_gen_binary (ASHIFT, mode, temp, XEXP (op, 1));
    1136              :         }
    1137              : 
    1138              :       /* (neg (ashiftrt X C)) can be replaced by (lshiftrt X C) when
    1139              :          C is equal to the width of MODE minus 1.  */
    1140      1483977 :       if (GET_CODE (op) == ASHIFTRT
    1141        27569 :           && CONST_INT_P (XEXP (op, 1))
    1142      1539019 :           && INTVAL (XEXP (op, 1)) == GET_MODE_UNIT_PRECISION (mode) - 1)
    1143          420 :         return simplify_gen_binary (LSHIFTRT, mode,
    1144          420 :                                     XEXP (op, 0), XEXP (op, 1));
    1145              : 
    1146              :       /* (neg (lshiftrt X C)) can be replaced by (ashiftrt X C) when
    1147              :          C is equal to the width of MODE minus 1.  */
    1148      1483557 :       if (GET_CODE (op) == LSHIFTRT
    1149         7627 :           && CONST_INT_P (XEXP (op, 1))
    1150      1498643 :           && INTVAL (XEXP (op, 1)) == GET_MODE_UNIT_PRECISION (mode) - 1)
    1151         2282 :         return simplify_gen_binary (ASHIFTRT, mode,
    1152         2282 :                                     XEXP (op, 0), XEXP (op, 1));
    1153              : 
    1154              :       /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1.  */
    1155      1481275 :       if (GET_CODE (op) == XOR
    1156        16664 :           && XEXP (op, 1) == const1_rtx
    1157      1481387 :           && nonzero_bits (XEXP (op, 0), mode) == 1)
    1158           33 :         return plus_constant (mode, XEXP (op, 0), -1);
    1159              : 
    1160              :       /* (neg (lt x 0)) is (ashiftrt X C) if STORE_FLAG_VALUE is 1.  */
    1161              :       /* (neg (lt x 0)) is (lshiftrt X C) if STORE_FLAG_VALUE is -1.  */
    1162      1481242 :       if (GET_CODE (op) == LT
    1163         2366 :           && XEXP (op, 1) == const0_rtx
    1164      1482862 :           && is_a <scalar_int_mode> (GET_MODE (XEXP (op, 0)), &inner))
    1165              :         {
    1166          339 :           int_mode = as_a <scalar_int_mode> (mode);
    1167          339 :           int isize = GET_MODE_PRECISION (inner);
    1168          339 :           if (STORE_FLAG_VALUE == 1)
    1169              :             {
    1170          339 :               temp = simplify_gen_binary (ASHIFTRT, inner, XEXP (op, 0),
    1171              :                                           gen_int_shift_amount (inner,
    1172          339 :                                                                 isize - 1));
    1173          339 :               if (int_mode == inner)
    1174              :                 return temp;
    1175          174 :               if (GET_MODE_PRECISION (int_mode) > isize)
    1176          109 :                 return simplify_gen_unary (SIGN_EXTEND, int_mode, temp, inner);
    1177           65 :               return simplify_gen_unary (TRUNCATE, int_mode, temp, inner);
    1178              :             }
    1179              :           else if (STORE_FLAG_VALUE == -1)
    1180              :             {
    1181              :               temp = simplify_gen_binary (LSHIFTRT, inner, XEXP (op, 0),
    1182              :                                           gen_int_shift_amount (inner,
    1183              :                                                                 isize - 1));
    1184              :               if (int_mode == inner)
    1185              :                 return temp;
    1186              :               if (GET_MODE_PRECISION (int_mode) > isize)
    1187              :                 return simplify_gen_unary (ZERO_EXTEND, int_mode, temp, inner);
    1188              :               return simplify_gen_unary (TRUNCATE, int_mode, temp, inner);
    1189              :             }
    1190              :         }
    1191              : 
    1192      1480903 :       if (vec_series_p (op, &base, &step))
    1193              :         {
    1194              :           /* Only create a new series if we can simplify both parts.  In other
    1195              :              cases this isn't really a simplification, and it's not necessarily
    1196              :              a win to replace a vector operation with a scalar operation.  */
    1197          276 :           scalar_mode inner_mode = GET_MODE_INNER (mode);
    1198          276 :           base = simplify_unary_operation (NEG, inner_mode, base, inner_mode);
    1199          276 :           if (base)
    1200              :             {
    1201          276 :               step = simplify_unary_operation (NEG, inner_mode,
    1202              :                                                step, inner_mode);
    1203          276 :               if (step)
    1204          276 :                 return gen_vec_series (mode, base, step);
    1205              :             }
    1206              :         }
    1207              :       break;
    1208              : 
    1209      1230641 :     case TRUNCATE:
    1210              :       /* Don't optimize (lshiftrt (mult ...)) as it would interfere
    1211              :          with the umulXi3_highpart patterns.  */
    1212      1230641 :       if (GET_CODE (op) == LSHIFTRT
    1213        18110 :           && GET_CODE (XEXP (op, 0)) == MULT)
    1214              :         break;
    1215              : 
    1216      1223392 :       if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
    1217              :         {
    1218           12 :           if (TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (op)))
    1219              :             {
    1220           12 :               temp = rtl_hooks.gen_lowpart_no_emit (mode, op);
    1221           12 :               if (temp)
    1222              :                 return temp;
    1223              :             }
    1224              :           /* We can't handle truncation to a partial integer mode here
    1225              :              because we don't know the real bitsize of the partial
    1226              :              integer mode.  */
    1227              :           break;
    1228              :         }
    1229              : 
    1230      1223380 :       if (GET_MODE (op) != VOIDmode)
    1231              :         {
    1232      1223380 :           temp = simplify_truncation (mode, op, GET_MODE (op));
    1233      1223380 :           if (temp)
    1234              :             return temp;
    1235              :         }
    1236              : 
    1237              :       /* If we know that the value is already truncated, we can
    1238              :          replace the TRUNCATE with a SUBREG.  */
    1239      1103593 :       if (known_eq (GET_MODE_NUNITS (mode), 1)
    1240      1103593 :           && (TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (op))
    1241            0 :               || truncated_to_mode (mode, op)))
    1242              :         {
    1243      1092160 :           temp = rtl_hooks.gen_lowpart_no_emit (mode, op);
    1244      1092160 :           if (temp)
    1245              :             return temp;
    1246              :         }
    1247              : 
    1248              :       /* A truncate of a comparison can be replaced with a subreg if
    1249              :          STORE_FLAG_VALUE permits.  This is like the previous test,
    1250              :          but it works even if the comparison is done in a mode larger
    1251              :          than HOST_BITS_PER_WIDE_INT.  */
    1252        11631 :       if (HWI_COMPUTABLE_MODE_P (mode)
    1253          198 :           && COMPARISON_P (op)
    1254            0 :           && (STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
    1255        11631 :           && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (op)))
    1256              :         {
    1257            0 :           temp = rtl_hooks.gen_lowpart_no_emit (mode, op);
    1258            0 :           if (temp)
    1259              :             return temp;
    1260              :         }
    1261              : 
    1262              :       /* A truncate of a memory is just loading the low part of the memory
    1263              :          if we are not changing the meaning of the address. */
    1264        11631 :       if (GET_CODE (op) == MEM
    1265          321 :           && !VECTOR_MODE_P (mode)
    1266          196 :           && !MEM_VOLATILE_P (op)
    1267        11821 :           && !mode_dependent_address_p (XEXP (op, 0), MEM_ADDR_SPACE (op)))
    1268              :         {
    1269          190 :           temp = rtl_hooks.gen_lowpart_no_emit (mode, op);
    1270          190 :           if (temp)
    1271              :             return temp;
    1272              :         }
    1273              : 
    1274              :       /* Check for useless truncation.  */
    1275        11631 :       if (GET_MODE (op) == mode)
    1276              :         return op;
    1277              :       break;
    1278              : 
    1279       172535 :     case FLOAT_TRUNCATE:
    1280              :       /* Check for useless truncation.  */
    1281       172535 :       if (GET_MODE (op) == mode)
    1282              :         return op;
    1283              : 
    1284       172535 :       if (DECIMAL_FLOAT_MODE_P (mode))
    1285              :         break;
    1286              : 
    1287              :       /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF.  */
    1288       172381 :       if (GET_CODE (op) == FLOAT_EXTEND
    1289            5 :           && GET_MODE (XEXP (op, 0)) == mode)
    1290              :         return XEXP (op, 0);
    1291              : 
    1292              :       /* (float_truncate:SF (float_truncate:DF foo:XF))
    1293              :          = (float_truncate:SF foo:XF).
    1294              :          This may eliminate double rounding, so it is unsafe.
    1295              : 
    1296              :          (float_truncate:SF (float_extend:XF foo:DF))
    1297              :          = (float_truncate:SF foo:DF).
    1298              : 
    1299              :          (float_truncate:DF (float_extend:XF foo:SF))
    1300              :          = (float_extend:DF foo:SF).  */
    1301       172379 :       if ((GET_CODE (op) == FLOAT_TRUNCATE
    1302          145 :            && flag_unsafe_math_optimizations)
    1303       172375 :           || GET_CODE (op) == FLOAT_EXTEND)
    1304           14 :         return simplify_gen_unary (GET_MODE_UNIT_SIZE (GET_MODE (XEXP (op, 0)))
    1305            7 :                                    > GET_MODE_UNIT_SIZE (mode)
    1306              :                                    ? FLOAT_TRUNCATE : FLOAT_EXTEND,
    1307              :                                    mode,
    1308           14 :                                    XEXP (op, 0), GET_MODE (XEXP (op, 0)));
    1309              : 
    1310              :       /*  (float_truncate (float x)) is (float x)  */
    1311       172372 :       if ((GET_CODE (op) == FLOAT || GET_CODE (op) == UNSIGNED_FLOAT)
    1312       172372 :           && (flag_unsafe_math_optimizations
    1313         1419 :               || exact_int_to_float_conversion_p (op)))
    1314         1418 :         return simplify_gen_unary (GET_CODE (op), mode,
    1315              :                                    XEXP (op, 0),
    1316         1418 :                                    GET_MODE (XEXP (op, 0)));
    1317              : 
    1318              :       /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
    1319              :          (OP:SF foo:SF) if OP is NEG or ABS.  */
    1320       170954 :       if ((GET_CODE (op) == ABS
    1321       170954 :            || GET_CODE (op) == NEG)
    1322          209 :           && GET_CODE (XEXP (op, 0)) == FLOAT_EXTEND
    1323           28 :           && GET_MODE (XEXP (XEXP (op, 0), 0)) == mode)
    1324           28 :         return simplify_gen_unary (GET_CODE (op), mode,
    1325           28 :                                    XEXP (XEXP (op, 0), 0), mode);
    1326              : 
    1327              :       /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
    1328              :          is (float_truncate:SF x).  */
    1329       170926 :       if (GET_CODE (op) == SUBREG
    1330          318 :           && subreg_lowpart_p (op)
    1331       171241 :           && GET_CODE (SUBREG_REG (op)) == FLOAT_TRUNCATE)
    1332              :         return SUBREG_REG (op);
    1333              :       break;
    1334              : 
    1335       592419 :     case FLOAT_EXTEND:
    1336              :       /* Check for useless extension.  */
    1337       592419 :       if (GET_MODE (op) == mode)
    1338              :         return op;
    1339              : 
    1340       592419 :       if (DECIMAL_FLOAT_MODE_P (mode))
    1341              :         break;
    1342              : 
    1343              :       /*  (float_extend (float_extend x)) is (float_extend x)
    1344              : 
    1345              :           (float_extend (float x)) is (float x) assuming that double
    1346              :           rounding can't happen.
    1347              :           */
    1348       592316 :       if (GET_CODE (op) == FLOAT_EXTEND
    1349       592316 :           || ((GET_CODE (op) == FLOAT || GET_CODE (op) == UNSIGNED_FLOAT)
    1350         1281 :               && exact_int_to_float_conversion_p (op)))
    1351          563 :         return simplify_gen_unary (GET_CODE (op), mode,
    1352              :                                    XEXP (op, 0),
    1353          563 :                                    GET_MODE (XEXP (op, 0)));
    1354              : 
    1355              :       break;
    1356              : 
    1357       274650 :     case ABS:
    1358              :       /* (abs (neg <foo>)) -> (abs <foo>) */
    1359       274650 :       if (GET_CODE (op) == NEG)
    1360           30 :         return simplify_gen_unary (ABS, mode, XEXP (op, 0),
    1361           30 :                                    GET_MODE (XEXP (op, 0)));
    1362              : 
    1363              :       /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
    1364              :          do nothing.  */
    1365       274620 :       if (GET_MODE (op) == VOIDmode)
    1366              :         break;
    1367              : 
    1368              :       /* If operand is something known to be positive, ignore the ABS.  */
    1369       274620 :       if (val_signbit_known_clear_p (GET_MODE (op),
    1370              :                                      nonzero_bits (op, GET_MODE (op))))
    1371              :         return op;
    1372              : 
    1373              :       /* Using nonzero_bits doesn't (currently) work for modes wider than
    1374              :          HOST_WIDE_INT, so the following transformations help simplify
    1375              :          ABS for TImode and wider.  */
    1376       274376 :       switch (GET_CODE (op))
    1377              :         {
    1378              :         case ABS:
    1379              :         case CLRSB:
    1380              :         case FFS:
    1381              :         case PARITY:
    1382              :         case POPCOUNT:
    1383              :         case SS_ABS:
    1384              :           return op;
    1385              : 
    1386            0 :         case LSHIFTRT:
    1387            0 :           if (CONST_INT_P (XEXP (op, 1))
    1388            0 :               && INTVAL (XEXP (op, 1)) > 0
    1389       274376 :               && is_a <scalar_int_mode> (mode, &int_mode)
    1390            0 :               && INTVAL (XEXP (op, 1)) < GET_MODE_PRECISION (int_mode))
    1391              :             return op;
    1392              :           break;
    1393              : 
    1394              :         default:
    1395              :           break;
    1396              :         }
    1397              : 
    1398              :       /* If operand is known to be only -1 or 0, convert ABS to NEG.  */
    1399       274376 :       if (is_a <scalar_int_mode> (mode, &int_mode)
    1400        36544 :           && (num_sign_bit_copies (op, int_mode)
    1401        36544 :               == GET_MODE_PRECISION (int_mode)))
    1402           74 :         return gen_rtx_NEG (int_mode, op);
    1403              : 
    1404              :       break;
    1405              : 
    1406            0 :     case FFS:
    1407              :       /* (ffs (*_extend <X>)) = (*_extend (ffs <X>)).  */
    1408            0 :       if (GET_CODE (op) == SIGN_EXTEND
    1409            0 :           || GET_CODE (op) == ZERO_EXTEND)
    1410              :         {
    1411            0 :           temp = simplify_gen_unary (FFS, GET_MODE (XEXP (op, 0)),
    1412            0 :                                      XEXP (op, 0), GET_MODE (XEXP (op, 0)));
    1413            0 :           return simplify_gen_unary (GET_CODE (op), mode, temp,
    1414            0 :                                      GET_MODE (temp));
    1415              :         }
    1416              :       break;
    1417              : 
    1418         3443 :     case POPCOUNT:
    1419         3443 :       switch (GET_CODE (op))
    1420              :         {
    1421            0 :         case BSWAP:
    1422            0 :         case BITREVERSE:
    1423              :           /* (popcount (bswap <X>)) = (popcount <X>).  */
    1424            0 :           return simplify_gen_unary (POPCOUNT, mode, XEXP (op, 0),
    1425            0 :                                      GET_MODE (XEXP (op, 0)));
    1426              : 
    1427           44 :         case ZERO_EXTEND:
    1428              :           /* (popcount (zero_extend <X>)) = (zero_extend (popcount <X>)).  */
    1429           88 :           temp = simplify_gen_unary (POPCOUNT, GET_MODE (XEXP (op, 0)),
    1430           44 :                                      XEXP (op, 0), GET_MODE (XEXP (op, 0)));
    1431           44 :           return simplify_gen_unary (ZERO_EXTEND, mode, temp,
    1432           44 :                                      GET_MODE (temp));
    1433              : 
    1434            0 :         case ROTATE:
    1435            0 :         case ROTATERT:
    1436              :           /* Rotations don't affect popcount.  */
    1437            0 :           if (!side_effects_p (XEXP (op, 1)))
    1438            0 :             return simplify_gen_unary (POPCOUNT, mode, XEXP (op, 0),
    1439            0 :                                        GET_MODE (XEXP (op, 0)));
    1440              :           break;
    1441              : 
    1442              :         default:
    1443              :           break;
    1444              :         }
    1445              :       break;
    1446              : 
    1447            0 :     case PARITY:
    1448            0 :       switch (GET_CODE (op))
    1449              :         {
    1450            0 :         case NOT:
    1451            0 :         case BSWAP:
    1452            0 :         case BITREVERSE:
    1453            0 :           return simplify_gen_unary (PARITY, mode, XEXP (op, 0),
    1454            0 :                                      GET_MODE (XEXP (op, 0)));
    1455              : 
    1456            0 :         case ZERO_EXTEND:
    1457            0 :         case SIGN_EXTEND:
    1458            0 :           temp = simplify_gen_unary (PARITY, GET_MODE (XEXP (op, 0)),
    1459            0 :                                      XEXP (op, 0), GET_MODE (XEXP (op, 0)));
    1460            0 :           return simplify_gen_unary (GET_CODE (op), mode, temp,
    1461            0 :                                      GET_MODE (temp));
    1462              : 
    1463            0 :         case ROTATE:
    1464            0 :         case ROTATERT:
    1465              :           /* Rotations don't affect parity.  */
    1466            0 :           if (!side_effects_p (XEXP (op, 1)))
    1467            0 :             return simplify_gen_unary (PARITY, mode, XEXP (op, 0),
    1468            0 :                                        GET_MODE (XEXP (op, 0)));
    1469              :           break;
    1470              : 
    1471              :         case PARITY:
    1472              :           /* (parity (parity x)) -> parity (x).  */
    1473              :           return op;
    1474              : 
    1475              :         default:
    1476              :           break;
    1477              :         }
    1478              :       break;
    1479              : 
    1480        31174 :     case BSWAP:
    1481              :       /* (bswap (bswap x)) -> x.  */
    1482        31174 :       if (GET_CODE (op) == BSWAP)
    1483          184 :         return XEXP (op, 0);
    1484              :       break;
    1485              : 
    1486            0 :     case BITREVERSE:
    1487              :       /* (bitreverse (bitreverse x)) -> x.  */
    1488            0 :       if (GET_CODE (op) == BITREVERSE)
    1489            0 :         return XEXP (op, 0);
    1490              :       break;
    1491              : 
    1492       895956 :     case FLOAT:
    1493              :       /* (float (sign_extend <X>)) = (float <X>).  */
    1494       895956 :       if (GET_CODE (op) == SIGN_EXTEND)
    1495         9037 :         return simplify_gen_unary (FLOAT, mode, XEXP (op, 0),
    1496         9037 :                                    GET_MODE (XEXP (op, 0)));
    1497              :       break;
    1498              : 
    1499      3206197 :     case SIGN_EXTEND:
    1500              :       /* Check for useless extension.  */
    1501      3206197 :       if (GET_MODE (op) == mode)
    1502              :         return op;
    1503              : 
    1504              :       /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
    1505              :          becomes just the MINUS if its mode is MODE.  This allows
    1506              :          folding switch statements on machines using casesi (such as
    1507              :          the VAX).  */
    1508      3206157 :       if (GET_CODE (op) == TRUNCATE
    1509           62 :           && GET_MODE (XEXP (op, 0)) == mode
    1510           62 :           && GET_CODE (XEXP (op, 0)) == MINUS
    1511            0 :           && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
    1512            0 :           && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
    1513              :         return XEXP (op, 0);
    1514              : 
    1515              :       /* Extending a widening multiplication should be canonicalized to
    1516              :          a wider widening multiplication.  */
    1517      3206157 :       if (GET_CODE (op) == MULT)
    1518              :         {
    1519        66851 :           rtx lhs = XEXP (op, 0);
    1520        66851 :           rtx rhs = XEXP (op, 1);
    1521        66851 :           enum rtx_code lcode = GET_CODE (lhs);
    1522        66851 :           enum rtx_code rcode = GET_CODE (rhs);
    1523              : 
    1524              :           /* Widening multiplies usually extend both operands, but sometimes
    1525              :              they use a shift to extract a portion of a register.  */
    1526        66851 :           if ((lcode == SIGN_EXTEND
    1527        66702 :                || (lcode == ASHIFTRT && CONST_INT_P (XEXP (lhs, 1))))
    1528          881 :               && (rcode == SIGN_EXTEND
    1529          837 :                   || (rcode == ASHIFTRT && CONST_INT_P (XEXP (rhs, 1)))))
    1530              :             {
    1531          165 :               machine_mode lmode = GET_MODE (lhs);
    1532          165 :               machine_mode rmode = GET_MODE (rhs);
    1533          165 :               int bits;
    1534              : 
    1535          165 :               if (lcode == ASHIFTRT)
    1536              :                 /* Number of bits not shifted off the end.  */
    1537          125 :                 bits = (GET_MODE_UNIT_PRECISION (lmode)
    1538          125 :                         - INTVAL (XEXP (lhs, 1)));
    1539              :               else /* lcode == SIGN_EXTEND */
    1540              :                 /* Size of inner mode.  */
    1541           80 :                 bits = GET_MODE_UNIT_PRECISION (GET_MODE (XEXP (lhs, 0)));
    1542              : 
    1543          165 :               if (rcode == ASHIFTRT)
    1544          121 :                 bits += (GET_MODE_UNIT_PRECISION (rmode)
    1545          121 :                          - INTVAL (XEXP (rhs, 1)));
    1546              :               else /* rcode == SIGN_EXTEND */
    1547           88 :                 bits += GET_MODE_UNIT_PRECISION (GET_MODE (XEXP (rhs, 0)));
    1548              : 
    1549              :               /* We can only widen multiplies if the result is mathematiclly
    1550              :                  equivalent.  I.e. if overflow was impossible.  */
    1551          330 :               if (bits <= GET_MODE_UNIT_PRECISION (GET_MODE (op)))
    1552          108 :                 return simplify_gen_binary
    1553          108 :                          (MULT, mode,
    1554              :                           simplify_gen_unary (SIGN_EXTEND, mode, lhs, lmode),
    1555          108 :                           simplify_gen_unary (SIGN_EXTEND, mode, rhs, rmode));
    1556              :             }
    1557              :         }
    1558              : 
    1559              :       /* Check for a sign extension of a subreg of a promoted
    1560              :          variable, where the promotion is sign-extended, and the
    1561              :          target mode is the same as the variable's promotion.  */
    1562      3206049 :       if (GET_CODE (op) == SUBREG
    1563       159120 :           && SUBREG_PROMOTED_VAR_P (op)
    1564      3211177 :           && SUBREG_PROMOTED_SIGNED_P (op))
    1565              :         {
    1566            0 :           rtx subreg = SUBREG_REG (op);
    1567            0 :           machine_mode subreg_mode = GET_MODE (subreg);
    1568            0 :           if (!paradoxical_subreg_p (mode, subreg_mode))
    1569              :             {
    1570            0 :               temp = rtl_hooks.gen_lowpart_no_emit (mode, subreg);
    1571            0 :               if (temp)
    1572              :                 {
    1573              :                   /* Preserve SUBREG_PROMOTED_VAR_P.  */
    1574            0 :                   if (partial_subreg_p (temp))
    1575              :                     {
    1576            0 :                       SUBREG_PROMOTED_VAR_P (temp) = 1;
    1577            0 :                       SUBREG_PROMOTED_SET (temp, SRP_SIGNED);
    1578              :                     }
    1579            0 :                   return temp;
    1580              :                 }
    1581              :             }
    1582              :           else
    1583              :             /* Sign-extending a sign-extended subreg.  */
    1584            0 :             return simplify_gen_unary (SIGN_EXTEND, mode,
    1585            0 :                                        subreg, subreg_mode);
    1586              :         }
    1587              : 
    1588              :       /* (sign_extend:M (sign_extend:N <X>)) is (sign_extend:M <X>).
    1589              :          (sign_extend:M (zero_extend:N <X>)) is (zero_extend:M <X>).  */
    1590      3206049 :       if (GET_CODE (op) == SIGN_EXTEND || GET_CODE (op) == ZERO_EXTEND)
    1591              :         {
    1592        19329 :           gcc_assert (GET_MODE_UNIT_PRECISION (mode)
    1593              :                       > GET_MODE_UNIT_PRECISION (GET_MODE (op)));
    1594         6443 :           return simplify_gen_unary (GET_CODE (op), mode, XEXP (op, 0),
    1595         6443 :                                      GET_MODE (XEXP (op, 0)));
    1596              :         }
    1597              : 
    1598              :       /* (sign_extend:M (ashiftrt:N (ashift <X> (const_int I)) (const_int I)))
    1599              :          is (sign_extend:M (subreg:O <X>)) if there is mode with
    1600              :          GET_MODE_BITSIZE (N) - I bits.
    1601              :          (sign_extend:M (lshiftrt:N (ashift <X> (const_int I)) (const_int I)))
    1602              :          is similarly (zero_extend:M (subreg:O <X>)).  */
    1603      3199606 :       if ((GET_CODE (op) == ASHIFTRT || GET_CODE (op) == LSHIFTRT)
    1604        88792 :           && GET_CODE (XEXP (op, 0)) == ASHIFT
    1605      3202100 :           && is_a <scalar_int_mode> (mode, &int_mode)
    1606         5029 :           && CONST_INT_P (XEXP (op, 1))
    1607         5029 :           && XEXP (XEXP (op, 0), 1) == XEXP (op, 1)
    1608      3204511 :           && (op_mode = as_a <scalar_int_mode> (GET_MODE (op)),
    1609         4905 :               GET_MODE_PRECISION (op_mode) > INTVAL (XEXP (op, 1))))
    1610              :         {
    1611         4905 :           scalar_int_mode tmode;
    1612         4905 :           gcc_assert (GET_MODE_PRECISION (int_mode)
    1613              :                       > GET_MODE_PRECISION (op_mode));
    1614         4905 :           if (int_mode_for_size (GET_MODE_PRECISION (op_mode)
    1615         7275 :                                  - INTVAL (XEXP (op, 1)), 1).exists (&tmode))
    1616              :             {
    1617         2535 :               rtx inner =
    1618         2535 :                 rtl_hooks.gen_lowpart_no_emit (tmode, XEXP (XEXP (op, 0), 0));
    1619         2535 :               if (inner)
    1620         2535 :                 return simplify_gen_unary (GET_CODE (op) == ASHIFTRT
    1621              :                                            ? SIGN_EXTEND : ZERO_EXTEND,
    1622         2535 :                                            int_mode, inner, tmode);
    1623              :             }
    1624              :         }
    1625              : 
    1626              :       /* (sign_extend:M (lshiftrt:N <X> (const_int I))) is better as
    1627              :          (zero_extend:M (lshiftrt:N <X> (const_int I))) if I is not 0.  */
    1628      3197071 :       if (GET_CODE (op) == LSHIFTRT
    1629          201 :           && CONST_INT_P (XEXP (op, 1))
    1630          201 :           && XEXP (op, 1) != const0_rtx)
    1631          201 :         return simplify_gen_unary (ZERO_EXTEND, mode, op, GET_MODE (op));
    1632              : 
    1633              :       /* (sign_extend:M (truncate:N (lshiftrt:O <X> (const_int I)))) where
    1634              :          I is GET_MODE_PRECISION(O) - GET_MODE_PRECISION(N), simplifies to
    1635              :          (ashiftrt:M <X> (const_int I)) if modes M and O are the same, and
    1636              :          (truncate:M (ashiftrt:O <X> (const_int I))) if M is narrower than
    1637              :          O, and (sign_extend:M (ashiftrt:O <X> (const_int I))) if M is
    1638              :          wider than O.  */
    1639      3196870 :       if (GET_CODE (op) == TRUNCATE
    1640           62 :           && GET_CODE (XEXP (op, 0)) == LSHIFTRT
    1641            0 :           && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
    1642              :         {
    1643            0 :           scalar_int_mode m_mode, n_mode, o_mode;
    1644            0 :           rtx old_shift = XEXP (op, 0);
    1645            0 :           if (is_a <scalar_int_mode> (mode, &m_mode)
    1646            0 :               && is_a <scalar_int_mode> (GET_MODE (op), &n_mode)
    1647            0 :               && is_a <scalar_int_mode> (GET_MODE (old_shift), &o_mode)
    1648            0 :               && GET_MODE_PRECISION (o_mode) - GET_MODE_PRECISION (n_mode)
    1649            0 :                  == INTVAL (XEXP (old_shift, 1)))
    1650              :             {
    1651            0 :               rtx new_shift = simplify_gen_binary (ASHIFTRT,
    1652              :                                                    GET_MODE (old_shift),
    1653              :                                                    XEXP (old_shift, 0),
    1654              :                                                    XEXP (old_shift, 1));
    1655            0 :               if (GET_MODE_PRECISION (m_mode) > GET_MODE_PRECISION (o_mode))
    1656            0 :                 return simplify_gen_unary (SIGN_EXTEND, mode, new_shift,
    1657            0 :                                            GET_MODE (new_shift));
    1658            0 :               if (mode != GET_MODE (new_shift))
    1659            0 :                 return simplify_gen_unary (TRUNCATE, mode, new_shift,
    1660            0 :                                            GET_MODE (new_shift));
    1661              :               return new_shift;
    1662              :             }
    1663              :         }
    1664              : 
    1665              :       /* We can canonicalize SIGN_EXTEND (op) as ZERO_EXTEND (op) when
    1666              :          we know the sign bit of OP must be clear.  */
    1667      3196870 :       if (val_signbit_known_clear_p (GET_MODE (op),
    1668      3196870 :                                      nonzero_bits (op, GET_MODE (op))))
    1669        46828 :         return simplify_gen_unary (ZERO_EXTEND, mode, op, GET_MODE (op));
    1670              : 
    1671              :       /* (sign_extend:DI (subreg:SI (ctz:DI ...))) is (ctz:DI ...).  */
    1672      3150042 :       if (GET_CODE (op) == SUBREG
    1673       158517 :           && subreg_lowpart_p (op)
    1674       158386 :           && GET_MODE (SUBREG_REG (op)) == mode
    1675      3283556 :           && is_a <scalar_int_mode> (mode, &int_mode)
    1676       140292 :           && is_a <scalar_int_mode> (GET_MODE (op), &op_mode)
    1677       140292 :           && GET_MODE_PRECISION (int_mode) <= HOST_BITS_PER_WIDE_INT
    1678       138485 :           && GET_MODE_PRECISION (op_mode) < GET_MODE_PRECISION (int_mode)
    1679      3288527 :           && (nonzero_bits (SUBREG_REG (op), mode)
    1680       138485 :               & ~(GET_MODE_MASK (op_mode) >> 1)) == 0)
    1681         6778 :         return SUBREG_REG (op);
    1682              : 
    1683              : #if defined(POINTERS_EXTEND_UNSIGNED)
    1684              :       /* As we do not know which address space the pointer is referring to,
    1685              :          we can do this only if the target does not support different pointer
    1686              :          or address modes depending on the address space.  */
    1687      3143264 :       if (target_default_pointer_address_modes_p ()
    1688              :           && ! POINTERS_EXTEND_UNSIGNED
    1689              :           && mode == Pmode && GET_MODE (op) == ptr_mode
    1690              :           && (CONSTANT_P (op)
    1691              :               || (GET_CODE (op) == SUBREG
    1692              :                   && REG_P (SUBREG_REG (op))
    1693              :                   && REG_POINTER (SUBREG_REG (op))
    1694              :                   && GET_MODE (SUBREG_REG (op)) == Pmode))
    1695              :           && !targetm.have_ptr_extend ())
    1696              :         {
    1697              :           temp
    1698              :             = convert_memory_address_addr_space_1 (Pmode, op,
    1699              :                                                    ADDR_SPACE_GENERIC, false,
    1700              :                                                    true);
    1701              :           if (temp)
    1702              :             return temp;
    1703              :         }
    1704              : #endif
    1705              :       break;
    1706              : 
    1707     11381639 :     case ZERO_EXTEND:
    1708              :       /* Check for useless extension.  */
    1709     11381639 :       if (GET_MODE (op) == mode)
    1710              :         return op;
    1711              : 
    1712              :       /* (zero_extend:SI (and:QI X (const))) -> (and:SI (lowpart:SI X) const)
    1713              :          where const does not sign bit set. */
    1714     11381599 :       if (GET_CODE (op) == AND
    1715       116256 :           && CONST_INT_P (XEXP (op, 1))
    1716        93896 :           && INTVAL (XEXP (op, 1)) > 0)
    1717              :         {
    1718        89447 :           rtx tem = rtl_hooks.gen_lowpart_no_emit (mode, XEXP (op, 0));
    1719        89447 :           if (tem)
    1720        73951 :             return simplify_gen_binary (AND, mode, tem, XEXP (op, 1));
    1721              :         }
    1722              : 
    1723              :       /* Check for a zero extension of a subreg of a promoted
    1724              :          variable, where the promotion is zero-extended, and the
    1725              :          target mode is the same as the variable's promotion.  */
    1726     11307648 :       if (GET_CODE (op) == SUBREG
    1727      1511888 :           && SUBREG_PROMOTED_VAR_P (op)
    1728     11308105 :           && SUBREG_PROMOTED_UNSIGNED_P (op))
    1729              :         {
    1730          457 :           rtx subreg = SUBREG_REG (op);
    1731          457 :           machine_mode subreg_mode = GET_MODE (subreg);
    1732          457 :           if (!paradoxical_subreg_p (mode, subreg_mode))
    1733              :             {
    1734          299 :               temp = rtl_hooks.gen_lowpart_no_emit (mode, subreg);
    1735          299 :               if (temp)
    1736              :                 {
    1737              :                   /* Preserve SUBREG_PROMOTED_VAR_P.  */
    1738          299 :                   if (partial_subreg_p (temp))
    1739              :                     {
    1740          129 :                       SUBREG_PROMOTED_VAR_P (temp) = 1;
    1741          129 :                       SUBREG_PROMOTED_SET (temp, SRP_UNSIGNED);
    1742              :                     }
    1743          299 :                   return temp;
    1744              :                 }
    1745              :             }
    1746              :           else
    1747              :             /* Zero-extending a zero-extended subreg.  */
    1748          158 :             return simplify_gen_unary (ZERO_EXTEND, mode,
    1749          158 :                                        subreg, subreg_mode);
    1750              :         }
    1751              : 
    1752              :       /* Extending a widening multiplication should be canonicalized to
    1753              :          a wider widening multiplication.  */
    1754     11307191 :       if (GET_CODE (op) == MULT)
    1755              :         {
    1756       168562 :           rtx lhs = XEXP (op, 0);
    1757       168562 :           rtx rhs = XEXP (op, 1);
    1758       168562 :           enum rtx_code lcode = GET_CODE (lhs);
    1759       168562 :           enum rtx_code rcode = GET_CODE (rhs);
    1760              : 
    1761              :           /* Widening multiplies usually extend both operands, but sometimes
    1762              :              they use a shift to extract a portion of a register.  */
    1763       168562 :           if ((lcode == ZERO_EXTEND
    1764       167790 :                || (lcode == LSHIFTRT && CONST_INT_P (XEXP (lhs, 1))))
    1765          862 :               && (rcode == ZERO_EXTEND
    1766          750 :                   || (rcode == LSHIFTRT && CONST_INT_P (XEXP (rhs, 1)))))
    1767              :             {
    1768          112 :               machine_mode lmode = GET_MODE (lhs);
    1769          112 :               machine_mode rmode = GET_MODE (rhs);
    1770          112 :               int bits;
    1771              : 
    1772          112 :               if (lcode == LSHIFTRT)
    1773              :                 /* Number of bits not shifted off the end.  */
    1774            0 :                 bits = (GET_MODE_UNIT_PRECISION (lmode)
    1775            0 :                         - INTVAL (XEXP (lhs, 1)));
    1776              :               else /* lcode == ZERO_EXTEND */
    1777              :                 /* Size of inner mode.  */
    1778          224 :                 bits = GET_MODE_UNIT_PRECISION (GET_MODE (XEXP (lhs, 0)));
    1779              : 
    1780          112 :               if (rcode == LSHIFTRT)
    1781            0 :                 bits += (GET_MODE_UNIT_PRECISION (rmode)
    1782            0 :                          - INTVAL (XEXP (rhs, 1)));
    1783              :               else /* rcode == ZERO_EXTEND */
    1784          224 :                 bits += GET_MODE_UNIT_PRECISION (GET_MODE (XEXP (rhs, 0)));
    1785              : 
    1786              :               /* We can only widen multiplies if the result is mathematiclly
    1787              :                  equivalent.  I.e. if overflow was impossible.  */
    1788          224 :               if (bits <= GET_MODE_UNIT_PRECISION (GET_MODE (op)))
    1789          112 :                 return simplify_gen_binary
    1790          112 :                          (MULT, mode,
    1791              :                           simplify_gen_unary (ZERO_EXTEND, mode, lhs, lmode),
    1792          112 :                           simplify_gen_unary (ZERO_EXTEND, mode, rhs, rmode));
    1793              :             }
    1794              :         }
    1795              : 
    1796              :       /* (zero_extend:M (zero_extend:N <X>)) is (zero_extend:M <X>).  */
    1797     11307079 :       if (GET_CODE (op) == ZERO_EXTEND)
    1798        22222 :         return simplify_gen_unary (ZERO_EXTEND, mode, XEXP (op, 0),
    1799        22222 :                                    GET_MODE (XEXP (op, 0)));
    1800              : 
    1801              :       /* (zero_extend:M (lshiftrt:N (ashift <X> (const_int I)) (const_int I)))
    1802              :          is (zero_extend:M (subreg:O <X>)) if there is mode with
    1803              :          GET_MODE_PRECISION (N) - I bits.  */
    1804     11284857 :       if (GET_CODE (op) == LSHIFTRT
    1805        75178 :           && GET_CODE (XEXP (op, 0)) == ASHIFT
    1806     11284880 :           && is_a <scalar_int_mode> (mode, &int_mode)
    1807           23 :           && CONST_INT_P (XEXP (op, 1))
    1808           18 :           && XEXP (XEXP (op, 0), 1) == XEXP (op, 1)
    1809     11284857 :           && (op_mode = as_a <scalar_int_mode> (GET_MODE (op)),
    1810            0 :               GET_MODE_PRECISION (op_mode) > INTVAL (XEXP (op, 1))))
    1811              :         {
    1812            0 :           scalar_int_mode tmode;
    1813            0 :           if (int_mode_for_size (GET_MODE_PRECISION (op_mode)
    1814            0 :                                  - INTVAL (XEXP (op, 1)), 1).exists (&tmode))
    1815              :             {
    1816            0 :               rtx inner =
    1817            0 :                 rtl_hooks.gen_lowpart_no_emit (tmode, XEXP (XEXP (op, 0), 0));
    1818            0 :               if (inner)
    1819            0 :                 return simplify_gen_unary (ZERO_EXTEND, int_mode,
    1820            0 :                                            inner, tmode);
    1821              :             }
    1822              :         }
    1823              : 
    1824              :       /* (zero_extend:M (subreg:N <X:O>)) is <X:O> (for M == O) or
    1825              :          (zero_extend:M <X:O>), if X doesn't have any non-zero bits outside
    1826              :          of mode N.  E.g.
    1827              :          (zero_extend:SI (subreg:QI (and:SI (reg:SI) (const_int 63)) 0)) is
    1828              :          (and:SI (reg:SI) (const_int 63)).  */
    1829     11284857 :       if (partial_subreg_p (op)
    1830     12757386 :           && is_a <scalar_int_mode> (mode, &int_mode)
    1831      1492801 :           && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (op)), &op0_mode)
    1832      1492247 :           && GET_MODE_PRECISION (op0_mode) <= HOST_BITS_PER_WIDE_INT
    1833      1115273 :           && GET_MODE_PRECISION (int_mode) >= GET_MODE_PRECISION (op0_mode)
    1834      1094316 :           && subreg_lowpart_p (op)
    1835      2275973 :           && (nonzero_bits (SUBREG_REG (op), op0_mode)
    1836       764542 :               & ~GET_MODE_MASK (GET_MODE (op))) == 0)
    1837              :         {
    1838        20272 :           if (GET_MODE_PRECISION (int_mode) == GET_MODE_PRECISION (op0_mode))
    1839        13529 :             return SUBREG_REG (op);
    1840         6743 :           return simplify_gen_unary (ZERO_EXTEND, int_mode, SUBREG_REG (op),
    1841         6743 :                                      op0_mode);
    1842              :         }
    1843              : 
    1844              :       /* (zero_extend:DI (subreg:SI (ctz:DI ...))) is (ctz:DI ...).  */
    1845     11264585 :       if (GET_CODE (op) == SUBREG
    1846      1491159 :           && subreg_lowpart_p (op)
    1847       848445 :           && GET_MODE (SUBREG_REG (op)) == mode
    1848     12034659 :           && is_a <scalar_int_mode> (mode, &int_mode)
    1849       770074 :           && is_a <scalar_int_mode> (GET_MODE (op), &op_mode)
    1850       770074 :           && GET_MODE_PRECISION (int_mode) <= HOST_BITS_PER_WIDE_INT
    1851       706856 :           && GET_MODE_PRECISION (op_mode) < GET_MODE_PRECISION (int_mode)
    1852     11971441 :           && (nonzero_bits (SUBREG_REG (op), mode)
    1853       706856 :               & ~GET_MODE_MASK (op_mode)) == 0)
    1854            0 :         return SUBREG_REG (op);
    1855              : 
    1856              :       /* Trying to optimize:
    1857              :          (zero_extend:M (subreg:N (not:M (X:M)))) ->
    1858              :          (xor:M (zero_extend:M (subreg:N (X:M)), mask))
    1859              :          where the mask is GET_MODE_MASK (N).
    1860              :          For the cases when X:M doesn't have any non-zero bits
    1861              :          outside of mode N, (zero_extend:M (subreg:N (X:M))
    1862              :          will be simplified to just (X:M)
    1863              :          and whole optimization will be -> (xor:M (X:M, mask)).  */
    1864     11264585 :       if (partial_subreg_p (op)
    1865      1472529 :           && GET_CODE (XEXP (op, 0)) == NOT
    1866         1447 :           && GET_MODE (XEXP (op, 0)) == mode
    1867         1426 :           && subreg_lowpart_p (op)
    1868     11264989 :           && HWI_COMPUTABLE_MODE_P (mode)
    1869          411 :           && is_a <scalar_int_mode> (GET_MODE (op), &op_mode)
    1870      1491570 :           && (nonzero_bits (XEXP (XEXP (op, 0), 0), mode)
    1871          411 :               & ~GET_MODE_MASK (op_mode)) == 0)
    1872              :       {
    1873            7 :         unsigned HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
    1874           14 :         return simplify_gen_binary (XOR, mode,
    1875            7 :                                     XEXP (XEXP (op, 0), 0),
    1876            7 :                                     gen_int_mode (mask, mode));
    1877              :       }
    1878              : 
    1879              : #if defined(POINTERS_EXTEND_UNSIGNED)
    1880              :       /* As we do not know which address space the pointer is referring to,
    1881              :          we can do this only if the target does not support different pointer
    1882              :          or address modes depending on the address space.  */
    1883     11264578 :       if (target_default_pointer_address_modes_p ()
    1884              :           && POINTERS_EXTEND_UNSIGNED > 0
    1885     12599605 :           && mode == Pmode && GET_MODE (op) == ptr_mode
    1886          687 :           && (CONSTANT_P (op)
    1887          666 :               || (GET_CODE (op) == SUBREG
    1888            0 :                   && REG_P (SUBREG_REG (op))
    1889            0 :                   && REG_POINTER (SUBREG_REG (op))
    1890            0 :                   && GET_MODE (SUBREG_REG (op)) == Pmode))
    1891     11264599 :           && !targetm.have_ptr_extend ())
    1892              :         {
    1893           21 :           temp
    1894           21 :             = convert_memory_address_addr_space_1 (Pmode, op,
    1895              :                                                    ADDR_SPACE_GENERIC, false,
    1896              :                                                    true);
    1897           21 :           if (temp)
    1898              :             return temp;
    1899              :         }
    1900              : #endif
    1901              :       break;
    1902              : 
    1903              :     default:
    1904              :       break;
    1905              :     }
    1906              : 
    1907     18930560 :   if (VECTOR_MODE_P (mode)
    1908      1484541 :       && vec_duplicate_p (op, &elt)
    1909     20420867 :       && code != VEC_DUPLICATE)
    1910              :     {
    1911         5762 :       if (code == SIGN_EXTEND || code == ZERO_EXTEND)
    1912              :         /* Enforce a canonical order of VEC_DUPLICATE wrt other unary
    1913              :            operations by promoting VEC_DUPLICATE to the root of the expression
    1914              :            (as far as possible).  */
    1915         4684 :         temp = simplify_gen_unary (code, GET_MODE_INNER (mode),
    1916         9368 :                                    elt, GET_MODE_INNER (GET_MODE (op)));
    1917              :       else
    1918              :         /* Try applying the operator to ELT and see if that simplifies.
    1919              :            We can duplicate the result if so.
    1920              : 
    1921              :            The reason we traditionally haven't used simplify_gen_unary
    1922              :            for these codes is that it didn't necessarily seem to be a
    1923              :            win to convert things like:
    1924              : 
    1925              :              (neg:V (vec_duplicate:V (reg:S R)))
    1926              : 
    1927              :            to:
    1928              : 
    1929              :              (vec_duplicate:V (neg:S (reg:S R)))
    1930              : 
    1931              :            The first might be done entirely in vector registers while the
    1932              :            second might need a move between register files.
    1933              : 
    1934              :            However, there also cases where promoting the vec_duplicate is
    1935              :            more efficient, and there is definite value in having a canonical
    1936              :            form when matching instruction patterns.  We should consider
    1937              :            extending the simplify_gen_unary code above to more cases.  */
    1938         1078 :         temp = simplify_unary_operation (code, GET_MODE_INNER (mode),
    1939         2156 :                                          elt, GET_MODE_INNER (GET_MODE (op)));
    1940         5762 :       if (temp)
    1941         5270 :         return gen_vec_duplicate (mode, temp);
    1942              :     }
    1943              : 
    1944              :   return 0;
    1945              : }
    1946              : 
    1947              : /* Try to compute the value of a unary operation CODE whose output mode is to
    1948              :    be MODE with input operand OP whose mode was originally OP_MODE.
    1949              :    Return zero if the value cannot be computed.  */
    1950              : rtx
    1951     27134344 : simplify_const_unary_operation (enum rtx_code code, machine_mode mode,
    1952              :                                 rtx op, machine_mode op_mode)
    1953              : {
    1954     27134344 :   scalar_int_mode result_mode;
    1955              : 
    1956     27134344 :   if (code == VEC_DUPLICATE)
    1957              :     {
    1958      1607919 :       gcc_assert (VECTOR_MODE_P (mode));
    1959      1607919 :       if (GET_MODE (op) != VOIDmode)
    1960              :       {
    1961       531212 :         if (!VECTOR_MODE_P (GET_MODE (op)))
    1962      1049248 :           gcc_assert (GET_MODE_INNER (mode) == GET_MODE (op));
    1963              :         else
    1964        19764 :           gcc_assert (GET_MODE_INNER (mode) == GET_MODE_INNER
    1965              :                                                 (GET_MODE (op)));
    1966              :       }
    1967      1607919 :       if (CONST_SCALAR_INT_P (op) || CONST_DOUBLE_AS_FLOAT_P (op))
    1968      1114019 :         return gen_const_vec_duplicate (mode, op);
    1969       493900 :       if (GET_CODE (op) == CONST_VECTOR
    1970       493900 :           && (CONST_VECTOR_DUPLICATE_P (op)
    1971              :               || CONST_VECTOR_NUNITS (op).is_constant ()))
    1972              :         {
    1973          755 :           unsigned int npatterns = (CONST_VECTOR_DUPLICATE_P (op)
    1974          755 :                                     ? CONST_VECTOR_NPATTERNS (op)
    1975         1509 :                                     : CONST_VECTOR_NUNITS (op).to_constant ());
    1976         2265 :           gcc_assert (multiple_p (GET_MODE_NUNITS (mode), npatterns));
    1977          755 :           rtx_vector_builder builder (mode, npatterns, 1);
    1978         3130 :           for (unsigned i = 0; i < npatterns; i++)
    1979         2375 :             builder.quick_push (CONST_VECTOR_ELT (op, i));
    1980          755 :           return builder.build ();
    1981          755 :         }
    1982              :     }
    1983              : 
    1984     24812684 :   if (VECTOR_MODE_P (mode)
    1985      1523231 :       && GET_CODE (op) == CONST_VECTOR
    1986     26117814 :       && known_eq (GET_MODE_NUNITS (mode), CONST_VECTOR_NUNITS (op)))
    1987              :     {
    1988        32748 :       gcc_assert (GET_MODE (op) == op_mode);
    1989              : 
    1990        32748 :       rtx_vector_builder builder;
    1991        32748 :       if (!builder.new_unary_operation (mode, op, false))
    1992              :         return 0;
    1993              : 
    1994        32748 :       unsigned int count = builder.encoded_nelts ();
    1995       144127 :       for (unsigned int i = 0; i < count; i++)
    1996              :         {
    1997       223796 :           rtx x = simplify_unary_operation (code, GET_MODE_INNER (mode),
    1998              :                                             CONST_VECTOR_ELT (op, i),
    1999       223796 :                                             GET_MODE_INNER (op_mode));
    2000       111898 :           if (!x || !valid_for_const_vector_p (mode, x))
    2001          519 :             return 0;
    2002       111379 :           builder.quick_push (x);
    2003              :         }
    2004        32229 :       return builder.build ();
    2005        32748 :     }
    2006              : 
    2007              :   /* The order of these tests is critical so that, for example, we don't
    2008              :      check the wrong mode (input vs. output) for a conversion operation,
    2009              :      such as FIX.  At some point, this should be simplified.  */
    2010              : 
    2011     25986822 :   if (code == FLOAT && CONST_SCALAR_INT_P (op))
    2012              :     {
    2013         6965 :       REAL_VALUE_TYPE d;
    2014              : 
    2015         6965 :       if (op_mode == VOIDmode)
    2016              :         {
    2017              :           /* CONST_INT have VOIDmode as the mode.  We assume that all
    2018              :              the bits of the constant are significant, though, this is
    2019              :              a dangerous assumption as many times CONST_INTs are
    2020              :              created and used with garbage in the bits outside of the
    2021              :              precision of the implied mode of the const_int.  */
    2022           64 :           op_mode = MAX_MODE_INT;
    2023              :         }
    2024              : 
    2025         6965 :       real_from_integer (&d, mode, rtx_mode_t (op, op_mode), SIGNED);
    2026              : 
    2027              :       /* Avoid the folding if flag_signaling_nans is on and
    2028              :          operand is a signaling NaN.  */
    2029         6965 :       if (HONOR_SNANS (mode) && REAL_VALUE_ISSIGNALING_NAN (d))
    2030              :         return 0;
    2031              : 
    2032         6965 :       d = real_value_truncate (mode, d);
    2033              : 
    2034              :       /* Avoid the folding if flag_rounding_math is on and the
    2035              :          conversion is not exact.  */
    2036         6965 :       if (HONOR_SIGN_DEPENDENT_ROUNDING (mode))
    2037              :         {
    2038         1011 :           bool fail = false;
    2039         1011 :           wide_int w = real_to_integer (&d, &fail,
    2040              :                                         GET_MODE_PRECISION
    2041         1011 :                                           (as_a <scalar_int_mode> (op_mode)));
    2042         2022 :           if (fail || wi::ne_p (w, wide_int (rtx_mode_t (op, op_mode))))
    2043          905 :             return 0;
    2044         1011 :         }
    2045              : 
    2046         6060 :       return const_double_from_real_value (d, mode);
    2047              :     }
    2048     25979857 :   else if (code == UNSIGNED_FLOAT && CONST_SCALAR_INT_P (op))
    2049              :     {
    2050         2138 :       REAL_VALUE_TYPE d;
    2051              : 
    2052         2138 :       if (op_mode == VOIDmode)
    2053              :         {
    2054              :           /* CONST_INT have VOIDmode as the mode.  We assume that all
    2055              :              the bits of the constant are significant, though, this is
    2056              :              a dangerous assumption as many times CONST_INTs are
    2057              :              created and used with garbage in the bits outside of the
    2058              :              precision of the implied mode of the const_int.  */
    2059            8 :           op_mode = MAX_MODE_INT;
    2060              :         }
    2061              : 
    2062         2138 :       real_from_integer (&d, mode, rtx_mode_t (op, op_mode), UNSIGNED);
    2063              : 
    2064              :       /* Avoid the folding if flag_signaling_nans is on and
    2065              :          operand is a signaling NaN.  */
    2066         2138 :       if (HONOR_SNANS (mode) && REAL_VALUE_ISSIGNALING_NAN (d))
    2067              :         return 0;
    2068              : 
    2069         2138 :       d = real_value_truncate (mode, d);
    2070              : 
    2071              :       /* Avoid the folding if flag_rounding_math is on and the
    2072              :          conversion is not exact.  */
    2073         2138 :       if (HONOR_SIGN_DEPENDENT_ROUNDING (mode))
    2074              :         {
    2075           16 :           bool fail = false;
    2076           16 :           wide_int w = real_to_integer (&d, &fail,
    2077              :                                         GET_MODE_PRECISION
    2078           16 :                                           (as_a <scalar_int_mode> (op_mode)));
    2079           28 :           if (fail || wi::ne_p (w, wide_int (rtx_mode_t (op, op_mode))))
    2080           16 :             return 0;
    2081           16 :         }
    2082              : 
    2083         2122 :       return const_double_from_real_value (d, mode);
    2084              :     }
    2085              : 
    2086     25977719 :   if (CONST_SCALAR_INT_P (op) && is_a <scalar_int_mode> (mode, &result_mode))
    2087              :     {
    2088      3437495 :       unsigned int width = GET_MODE_PRECISION (result_mode);
    2089      3437495 :       if (width > MAX_BITSIZE_MODE_ANY_INT)
    2090              :         return 0;
    2091              : 
    2092      3437495 :       wide_int result;
    2093      3437495 :       scalar_int_mode imode = (op_mode == VOIDmode
    2094      3437495 :                                ? result_mode
    2095      3437276 :                                : as_a <scalar_int_mode> (op_mode));
    2096      3437495 :       rtx_mode_t op0 = rtx_mode_t (op, imode);
    2097      3437495 :       int int_value;
    2098              : 
    2099              : #if TARGET_SUPPORTS_WIDE_INT == 0
    2100              :       /* This assert keeps the simplification from producing a result
    2101              :          that cannot be represented in a CONST_DOUBLE but a lot of
    2102              :          upstream callers expect that this function never fails to
    2103              :          simplify something and so you if you added this to the test
    2104              :          above the code would die later anyway.  If this assert
    2105              :          happens, you just need to make the port support wide int.  */
    2106              :       gcc_assert (width <= HOST_BITS_PER_DOUBLE_INT);
    2107              : #endif
    2108              : 
    2109      3437495 :       switch (code)
    2110              :         {
    2111       177761 :         case NOT:
    2112       177761 :           result = wi::bit_not (op0);
    2113       177761 :           break;
    2114              : 
    2115      1946215 :         case NEG:
    2116      1946215 :           result = wi::neg (op0);
    2117      1946215 :           break;
    2118              : 
    2119         7324 :         case ABS:
    2120         7324 :           result = wi::abs (op0);
    2121         7324 :           break;
    2122              : 
    2123            0 :         case FFS:
    2124            0 :           result = wi::shwi (wi::ffs (op0), result_mode);
    2125            0 :           break;
    2126              : 
    2127          140 :         case CLZ:
    2128          140 :           if (wi::ne_p (op0, 0))
    2129           10 :             int_value = wi::clz (op0);
    2130          260 :           else if (! CLZ_DEFINED_VALUE_AT_ZERO (imode, int_value))
    2131              :             return NULL_RTX;
    2132           10 :           result = wi::shwi (int_value, result_mode);
    2133           10 :           break;
    2134              : 
    2135            0 :         case CLRSB:
    2136            0 :           result = wi::shwi (wi::clrsb (op0), result_mode);
    2137            0 :           break;
    2138              : 
    2139            0 :         case CTZ:
    2140            0 :           if (wi::ne_p (op0, 0))
    2141            0 :             int_value = wi::ctz (op0);
    2142            0 :           else if (! CTZ_DEFINED_VALUE_AT_ZERO (imode, int_value))
    2143              :             return NULL_RTX;
    2144            0 :           result = wi::shwi (int_value, result_mode);
    2145            0 :           break;
    2146              : 
    2147          160 :         case POPCOUNT:
    2148          160 :           result = wi::shwi (wi::popcount (op0), result_mode);
    2149          160 :           break;
    2150              : 
    2151            0 :         case PARITY:
    2152            0 :           result = wi::shwi (wi::parity (op0), result_mode);
    2153            0 :           break;
    2154              : 
    2155         2017 :         case BSWAP:
    2156         2017 :           result = wi::bswap (op0);
    2157         2017 :           break;
    2158              : 
    2159            0 :         case BITREVERSE:
    2160            0 :           result = wi::bitreverse (op0);
    2161            0 :           break;
    2162              : 
    2163      1126485 :         case TRUNCATE:
    2164      1126485 :         case ZERO_EXTEND:
    2165      1126485 :           result = wide_int::from (op0, width, UNSIGNED);
    2166      1126485 :           break;
    2167              : 
    2168        14342 :         case US_TRUNCATE:
    2169        14342 :         case SS_TRUNCATE:
    2170        14342 :           {
    2171        14342 :             signop sgn = code == US_TRUNCATE ? UNSIGNED : SIGNED;
    2172        14342 :             wide_int nmax
    2173        14342 :               = wide_int::from (wi::max_value (width, sgn),
    2174        28684 :                                 GET_MODE_PRECISION (imode), sgn);
    2175        14342 :             wide_int nmin
    2176        14342 :               = wide_int::from (wi::min_value (width, sgn),
    2177        28684 :                                 GET_MODE_PRECISION (imode), sgn);
    2178        14342 :             result = wi::min (wi::max (op0, nmin, sgn), nmax, sgn);
    2179        14342 :             result = wide_int::from (result, width, sgn);
    2180        14342 :             break;
    2181        14342 :           }
    2182       163051 :         case SIGN_EXTEND:
    2183       163051 :           result = wide_int::from (op0, width, SIGNED);
    2184       163051 :           break;
    2185              : 
    2186            0 :         case SS_NEG:
    2187            0 :           if (wi::only_sign_bit_p (op0))
    2188            0 :             result = wi::max_value (GET_MODE_PRECISION (imode), SIGNED);
    2189              :           else
    2190            0 :             result = wi::neg (op0);
    2191              :           break;
    2192              : 
    2193            0 :         case SS_ABS:
    2194            0 :           if (wi::only_sign_bit_p (op0))
    2195            0 :             result = wi::max_value (GET_MODE_PRECISION (imode), SIGNED);
    2196              :           else
    2197            0 :             result = wi::abs (op0);
    2198              :           break;
    2199              : 
    2200              :         case SQRT:
    2201              :         default:
    2202              :           return 0;
    2203              :         }
    2204              : 
    2205      3437365 :       return immed_wide_int_const (result, result_mode);
    2206      3437495 :     }
    2207              : 
    2208     22540224 :   else if (CONST_DOUBLE_AS_FLOAT_P (op)
    2209       420608 :            && SCALAR_FLOAT_MODE_P (mode)
    2210       418582 :            && SCALAR_FLOAT_MODE_P (GET_MODE (op)))
    2211              :     {
    2212       418582 :       REAL_VALUE_TYPE d = *CONST_DOUBLE_REAL_VALUE (op);
    2213       418582 :       switch (code)
    2214              :         {
    2215              :         case SQRT:
    2216              :           return 0;
    2217          350 :         case ABS:
    2218          350 :           d = real_value_abs (&d);
    2219          350 :           break;
    2220        15687 :         case NEG:
    2221        15687 :           d = real_value_negate (&d);
    2222        15687 :           break;
    2223         2286 :         case FLOAT_TRUNCATE:
    2224              :           /* Don't perform the operation if flag_signaling_nans is on
    2225              :              and the operand is a signaling NaN.  */
    2226         2286 :           if (HONOR_SNANS (mode) && REAL_VALUE_ISSIGNALING_NAN (d))
    2227              :             return NULL_RTX;
    2228              :           /* Or if flag_rounding_math is on and the truncation is not
    2229              :              exact.  */
    2230         2286 :           if (HONOR_SIGN_DEPENDENT_ROUNDING (mode)
    2231         2286 :               && !exact_real_truncate (mode, &d))
    2232          231 :             return NULL_RTX;
    2233         2055 :           d = real_value_truncate (mode, d);
    2234         2055 :           break;
    2235       393726 :         case FLOAT_EXTEND:
    2236              :           /* Don't perform the operation if flag_signaling_nans is on
    2237              :              and the operand is a signaling NaN.  */
    2238       393726 :           if (HONOR_SNANS (mode) && REAL_VALUE_ISSIGNALING_NAN (d))
    2239              :             return NULL_RTX;
    2240              :           /* All this does is change the mode, unless changing
    2241              :              mode class.  */
    2242       393724 :           if (GET_MODE_CLASS (mode) != GET_MODE_CLASS (GET_MODE (op)))
    2243            0 :             real_convert (&d, mode, &d);
    2244              :           break;
    2245            0 :         case FIX:
    2246              :           /* Don't perform the operation if flag_signaling_nans is on
    2247              :              and the operand is a signaling NaN.  */
    2248            0 :           if (HONOR_SNANS (mode) && REAL_VALUE_ISSIGNALING_NAN (d))
    2249              :             return NULL_RTX;
    2250            0 :           real_arithmetic (&d, FIX_TRUNC_EXPR, &d, NULL);
    2251            0 :           break;
    2252         5928 :         case NOT:
    2253         5928 :           {
    2254         5928 :             long tmp[4];
    2255         5928 :             int i;
    2256              : 
    2257         5928 :             real_to_target (tmp, &d, GET_MODE (op));
    2258        29640 :             for (i = 0; i < 4; i++)
    2259        23712 :               tmp[i] = ~tmp[i];
    2260         5928 :             real_from_target (&d, tmp, mode);
    2261         5928 :             break;
    2262              :           }
    2263            0 :         default:
    2264            0 :           gcc_unreachable ();
    2265              :         }
    2266       417744 :       return const_double_from_real_value (d, mode);
    2267              :     }
    2268         2026 :   else if (CONST_DOUBLE_AS_FLOAT_P (op)
    2269         2026 :            && SCALAR_FLOAT_MODE_P (GET_MODE (op))
    2270     22123668 :            && is_int_mode (mode, &result_mode))
    2271              :     {
    2272         2026 :       unsigned int width = GET_MODE_PRECISION (result_mode);
    2273         2026 :       if (width > MAX_BITSIZE_MODE_ANY_INT)
    2274              :         return 0;
    2275              : 
    2276              :       /* Although the overflow semantics of RTL's FIX and UNSIGNED_FIX
    2277              :          operators are intentionally left unspecified (to ease implementation
    2278              :          by target backends), for consistency, this routine implements the
    2279              :          same semantics for constant folding as used by the middle-end.  */
    2280              : 
    2281              :       /* This was formerly used only for non-IEEE float.
    2282              :          eggert@twinsun.com says it is safe for IEEE also.  */
    2283         2026 :       REAL_VALUE_TYPE t;
    2284         2026 :       const REAL_VALUE_TYPE *x = CONST_DOUBLE_REAL_VALUE (op);
    2285         2026 :       wide_int wmax, wmin;
    2286              :       /* This is part of the abi to real_to_integer, but we check
    2287              :          things before making this call.  */
    2288         2026 :       bool fail;
    2289              : 
    2290         2026 :       switch (code)
    2291              :         {
    2292         2018 :         case FIX:
    2293              :           /* According to IEEE standard, for conversions from floating point to
    2294              :              integer. When a NaN or infinite operand cannot be represented in
    2295              :              the destination format and this cannot otherwise be indicated, the
    2296              :              invalid operation exception shall be signaled. When a numeric
    2297              :              operand would convert to an integer outside the range of the
    2298              :              destination format, the invalid operation exception shall be
    2299              :              signaled if this situation cannot otherwise be indicated.  */
    2300         2018 :           if (REAL_VALUE_ISNAN (*x))
    2301          955 :             return flag_trapping_math ? NULL_RTX : const0_rtx;
    2302              : 
    2303         1063 :           if (REAL_VALUE_ISINF (*x) && flag_trapping_math)
    2304              :             return NULL_RTX;
    2305              : 
    2306              :           /* Test against the signed upper bound.  */
    2307          103 :           wmax = wi::max_value (width, SIGNED);
    2308          103 :           real_from_integer (&t, VOIDmode, wmax, SIGNED);
    2309          103 :           if (real_less (&t, x))
    2310            3 :             return (flag_trapping_math
    2311            3 :                     ? NULL_RTX : immed_wide_int_const (wmax, mode));
    2312              : 
    2313              :           /* Test against the signed lower bound.  */
    2314          100 :           wmin = wi::min_value (width, SIGNED);
    2315          100 :           real_from_integer (&t, VOIDmode, wmin, SIGNED);
    2316          100 :           if (real_less (x, &t))
    2317            8 :             return immed_wide_int_const (wmin, mode);
    2318              : 
    2319           92 :           return immed_wide_int_const (real_to_integer (x, &fail, width),
    2320              :                                        mode);
    2321              : 
    2322            8 :         case UNSIGNED_FIX:
    2323            8 :           if (REAL_VALUE_ISNAN (*x) || REAL_VALUE_NEGATIVE (*x))
    2324            6 :             return flag_trapping_math ? NULL_RTX : const0_rtx;
    2325              : 
    2326            2 :           if (REAL_VALUE_ISINF (*x) && flag_trapping_math)
    2327              :             return NULL_RTX;
    2328              : 
    2329              :           /* Test against the unsigned upper bound.  */
    2330            0 :           wmax = wi::max_value (width, UNSIGNED);
    2331            0 :           real_from_integer (&t, VOIDmode, wmax, UNSIGNED);
    2332            0 :           if (real_less (&t, x))
    2333            0 :             return (flag_trapping_math
    2334            0 :                     ? NULL_RTX : immed_wide_int_const (wmax, mode));
    2335              : 
    2336            0 :           return immed_wide_int_const (real_to_integer (x, &fail, width),
    2337              :                                        mode);
    2338              : 
    2339            0 :         default:
    2340            0 :           gcc_unreachable ();
    2341              :         }
    2342         2026 :     }
    2343              : 
    2344              :   /* Handle polynomial integers.  */
    2345              :   else if (CONST_POLY_INT_P (op))
    2346              :     {
    2347              :       poly_wide_int result;
    2348              :       switch (code)
    2349              :         {
    2350              :         case NEG:
    2351              :           result = -const_poly_int_value (op);
    2352              :           break;
    2353              : 
    2354              :         case NOT:
    2355              :           result = ~const_poly_int_value (op);
    2356              :           break;
    2357              : 
    2358              :         default:
    2359              :           return NULL_RTX;
    2360              :         }
    2361              :       return immed_wide_int_const (result, mode);
    2362              :     }
    2363              : 
    2364              :   return NULL_RTX;
    2365              : }
    2366              : 
    2367              : /* Subroutine of simplify_binary_operation to simplify a binary operation
    2368              :    CODE that can commute with byte swapping, with result mode MODE and
    2369              :    operating on OP0 and OP1.  CODE is currently one of AND, IOR or XOR.
    2370              :    Return zero if no simplification or canonicalization is possible.  */
    2371              : 
    2372              : rtx
    2373     37809983 : simplify_context::simplify_byte_swapping_operation (rtx_code code,
    2374              :                                                     machine_mode mode,
    2375              :                                                     rtx op0, rtx op1)
    2376              : {
    2377     37809983 :   rtx tem;
    2378              : 
    2379              :   /* (op (bswap x) C1)) -> (bswap (op x C2)) with C2 swapped.  */
    2380     37809983 :   if (GET_CODE (op0) == BSWAP && CONST_SCALAR_INT_P (op1))
    2381              :     {
    2382          506 :       tem = simplify_gen_binary (code, mode, XEXP (op0, 0),
    2383              :                                  simplify_gen_unary (BSWAP, mode, op1, mode));
    2384          506 :       return simplify_gen_unary (BSWAP, mode, tem, mode);
    2385              :     }
    2386              : 
    2387              :   /* (op (bswap x) (bswap y)) -> (bswap (op x y)).  */
    2388     37809477 :   if (GET_CODE (op0) == BSWAP && GET_CODE (op1) == BSWAP)
    2389              :     {
    2390            0 :       tem = simplify_gen_binary (code, mode, XEXP (op0, 0), XEXP (op1, 0));
    2391            0 :       return simplify_gen_unary (BSWAP, mode, tem, mode);
    2392              :     }
    2393              : 
    2394              :   return NULL_RTX;
    2395              : }
    2396              : 
    2397              : /* Subroutine of simplify_binary_operation to simplify a commutative,
    2398              :    associative binary operation CODE with result mode MODE, operating
    2399              :    on OP0 and OP1.  CODE is currently one of PLUS, MULT, AND, IOR, XOR,
    2400              :    SMIN, SMAX, UMIN or UMAX.  Return zero if no simplification or
    2401              :    canonicalization is possible.  */
    2402              : 
    2403              : rtx
    2404     47711197 : simplify_context::simplify_associative_operation (rtx_code code,
    2405              :                                                   machine_mode mode,
    2406              :                                                   rtx op0, rtx op1)
    2407              : {
    2408     47711197 :   rtx tem;
    2409              : 
    2410              :   /* Normally expressions simplified by simplify-rtx.cc are combined
    2411              :      at most from a few machine instructions and therefore the
    2412              :      expressions should be fairly small.  During var-tracking
    2413              :      we can see arbitrarily large expressions though and reassociating
    2414              :      those can be quadratic, so punt after encountering max_assoc_count
    2415              :      simplify_associative_operation calls during outermost simplify_*
    2416              :      call.  */
    2417     47711197 :   if (++assoc_count >= max_assoc_count)
    2418              :     return NULL_RTX;
    2419              : 
    2420              :   /* Linearize the operator to the left.  */
    2421     47706717 :   if (GET_CODE (op1) == code)
    2422              :     {
    2423              :       /* "(a op b) op (c op d)" becomes "((a op b) op c) op d)".  */
    2424        21610 :       if (GET_CODE (op0) == code)
    2425              :         {
    2426         5002 :           tem = simplify_gen_binary (code, mode, op0, XEXP (op1, 0));
    2427         5002 :           return simplify_gen_binary (code, mode, tem, XEXP (op1, 1));
    2428              :         }
    2429              : 
    2430              :       /* "a op (b op c)" becomes "(b op c) op a".  */
    2431        16608 :       if (! swap_commutative_operands_p (op1, op0))
    2432        16608 :         return simplify_gen_binary (code, mode, op1, op0);
    2433              : 
    2434              :       std::swap (op0, op1);
    2435              :     }
    2436              : 
    2437     47685107 :   if (GET_CODE (op0) == code)
    2438              :     {
    2439              :       /* Canonicalize "(x op c) op y" as "(x op y) op c".  */
    2440      1424176 :       if (swap_commutative_operands_p (XEXP (op0, 1), op1))
    2441              :         {
    2442       293478 :           tem = simplify_gen_binary (code, mode, XEXP (op0, 0), op1);
    2443       293478 :           return simplify_gen_binary (code, mode, tem, XEXP (op0, 1));
    2444              :         }
    2445              : 
    2446              :       /* Attempt to simplify "(a op b) op c" as "a op (b op c)".  */
    2447      1130698 :       tem = simplify_binary_operation (code, mode, XEXP (op0, 1), op1);
    2448      1130698 :       if (tem != 0)
    2449        82473 :         return simplify_gen_binary (code, mode, XEXP (op0, 0), tem);
    2450              : 
    2451              :       /* Attempt to simplify "(a op b) op c" as "(a op c) op b".  */
    2452      1048225 :       tem = simplify_binary_operation (code, mode, XEXP (op0, 0), op1);
    2453      1048225 :       if (tem != 0)
    2454        35483 :         return simplify_gen_binary (code, mode, tem, XEXP (op0, 1));
    2455              :     }
    2456              : 
    2457              :   return 0;
    2458              : }
    2459              : 
    2460              : /* If COMPARISON can be treated as an unsigned comparison, return a mask
    2461              :    that represents it (8 if it includes <, 4 if it includes > and 2
    2462              :    if it includes ==).  Return 0 otherwise.  */
    2463              : static int
    2464        18866 : unsigned_comparison_to_mask (rtx_code comparison)
    2465              : {
    2466            0 :   switch (comparison)
    2467              :     {
    2468              :     case LTU:
    2469              :       return 8;
    2470              :     case GTU:
    2471              :       return 4;
    2472              :     case EQ:
    2473              :       return 2;
    2474              : 
    2475              :     case LEU:
    2476              :       return 10;
    2477              :     case GEU:
    2478              :       return 6;
    2479              : 
    2480              :     case NE:
    2481              :       return 12;
    2482              : 
    2483              :     default:
    2484              :       return 0;
    2485              :     }
    2486              : }
    2487              : 
    2488              : /* Reverse the mapping in unsigned_comparison_to_mask, going from masks
    2489              :    to comparisons.  */
    2490              : static rtx_code
    2491         6596 : mask_to_unsigned_comparison (int mask)
    2492              : {
    2493         6596 :   switch (mask)
    2494              :     {
    2495              :     case 8:
    2496              :       return LTU;
    2497          160 :     case 4:
    2498          160 :       return GTU;
    2499         2590 :     case 2:
    2500         2590 :       return EQ;
    2501              : 
    2502          160 :     case 10:
    2503          160 :       return LEU;
    2504          160 :     case 6:
    2505          160 :       return GEU;
    2506              : 
    2507         3366 :     case 12:
    2508         3366 :       return NE;
    2509              : 
    2510            0 :     default:
    2511            0 :       gcc_unreachable ();
    2512              :     }
    2513              : }
    2514              : 
    2515              : /* Return a mask describing the COMPARISON.  */
    2516              : static int
    2517         2666 : comparison_to_mask (enum rtx_code comparison)
    2518              : {
    2519         2666 :   switch (comparison)
    2520              :     {
    2521              :     case LT:
    2522              :       return 8;
    2523          472 :     case GT:
    2524          472 :       return 4;
    2525          419 :     case EQ:
    2526          419 :       return 2;
    2527           19 :     case UNORDERED:
    2528           19 :       return 1;
    2529              : 
    2530            0 :     case LTGT:
    2531            0 :       return 12;
    2532          441 :     case LE:
    2533          441 :       return 10;
    2534          441 :     case GE:
    2535          441 :       return 6;
    2536            0 :     case UNLT:
    2537            0 :       return 9;
    2538            0 :     case UNGT:
    2539            0 :       return 5;
    2540            0 :     case UNEQ:
    2541            0 :       return 3;
    2542              : 
    2543            0 :     case ORDERED:
    2544            0 :       return 14;
    2545          400 :     case NE:
    2546          400 :       return 13;
    2547            0 :     case UNLE:
    2548            0 :       return 11;
    2549            0 :     case UNGE:
    2550            0 :       return 7;
    2551              : 
    2552            0 :     default:
    2553            0 :       gcc_unreachable ();
    2554              :     }
    2555              : }
    2556              : 
    2557              : /* Return a comparison corresponding to the MASK.  */
    2558              : static enum rtx_code
    2559         1014 : mask_to_comparison (int mask)
    2560              : {
    2561         1014 :   switch (mask)
    2562              :     {
    2563              :     case 8:
    2564              :       return LT;
    2565              :     case 4:
    2566              :       return GT;
    2567              :     case 2:
    2568              :       return EQ;
    2569              :     case 1:
    2570              :       return UNORDERED;
    2571              : 
    2572              :     case 12:
    2573              :       return LTGT;
    2574              :     case 10:
    2575              :       return LE;
    2576              :     case 6:
    2577              :       return GE;
    2578              :     case 9:
    2579              :       return UNLT;
    2580              :     case 5:
    2581              :       return UNGT;
    2582              :     case 3:
    2583              :       return UNEQ;
    2584              : 
    2585              :     case 14:
    2586              :       return ORDERED;
    2587              :     case 13:
    2588              :       return NE;
    2589              :     case 11:
    2590              :       return UNLE;
    2591              :     case 7:
    2592              :       return UNGE;
    2593              : 
    2594            0 :     default:
    2595            0 :       gcc_unreachable ();
    2596              :     }
    2597              : }
    2598              : 
    2599              : /* Canonicalize RES, a scalar const0_rtx/const_true_rtx to the right
    2600              :    false/true value of comparison with MODE where comparison operands
    2601              :    have CMP_MODE.  */
    2602              : 
    2603              : static rtx
    2604       879928 : relational_result (machine_mode mode, machine_mode cmp_mode, rtx res)
    2605              : {
    2606       879928 :   if (SCALAR_FLOAT_MODE_P (mode))
    2607              :     {
    2608          190 :       if (res == const0_rtx)
    2609          186 :         return CONST0_RTX (mode);
    2610              : #ifdef FLOAT_STORE_FLAG_VALUE
    2611              :       REAL_VALUE_TYPE val = FLOAT_STORE_FLAG_VALUE (mode);
    2612              :       return const_double_from_real_value (val, mode);
    2613              : #else
    2614              :       return NULL_RTX;
    2615              : #endif
    2616              :     }
    2617       879738 :   if (VECTOR_MODE_P (mode))
    2618              :     {
    2619          350 :       if (res == const0_rtx)
    2620           51 :         return CONST0_RTX (mode);
    2621              : #ifdef VECTOR_STORE_FLAG_VALUE
    2622          299 :       rtx val = VECTOR_STORE_FLAG_VALUE (mode);
    2623          299 :       if (val == NULL_RTX)
    2624              :         return NULL_RTX;
    2625          299 :       if (val == const1_rtx)
    2626            0 :         return CONST1_RTX (mode);
    2627              : 
    2628          299 :       return gen_const_vec_duplicate (mode, val);
    2629              : #else
    2630              :       return NULL_RTX;
    2631              : #endif
    2632              :     }
    2633              :   /* For vector comparison with scalar int result, it is unknown
    2634              :      if the target means here a comparison into an integral bitmask,
    2635              :      or comparison where all comparisons true mean const_true_rtx
    2636              :      whole result, or where any comparisons true mean const_true_rtx
    2637              :      whole result.  For const0_rtx all the cases are the same.  */
    2638       879388 :   if (VECTOR_MODE_P (cmp_mode)
    2639            0 :       && SCALAR_INT_MODE_P (mode)
    2640            0 :       && res == const_true_rtx)
    2641            0 :     return NULL_RTX;
    2642              : 
    2643              :   return res;
    2644              : }
    2645              : 
    2646              : /* Simplify a logical operation CODE with result mode MODE, operating on OP0
    2647              :    and OP1, in the case where both are relational operations.  Assume that
    2648              :    OP0 is inverted if INVERT0_P is true.
    2649              : 
    2650              :    Return 0 if no such simplification is possible.  */
    2651              : rtx
    2652     14022794 : simplify_context::simplify_logical_relational_operation (rtx_code code,
    2653              :                                                          machine_mode mode,
    2654              :                                                          rtx op0, rtx op1,
    2655              :                                                          bool invert0_p)
    2656              : {
    2657     14022794 :   if (!(COMPARISON_P (op0) && COMPARISON_P (op1)))
    2658              :     return 0;
    2659              : 
    2660        21356 :   if (!(rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
    2661         9859 :         && rtx_equal_p (XEXP (op0, 1), XEXP (op1, 1))))
    2662         2064 :     return 0;
    2663              : 
    2664         9433 :   if (side_effects_p (op0))
    2665              :     return 0;
    2666              : 
    2667         9433 :   enum rtx_code code0 = GET_CODE (op0);
    2668         9433 :   enum rtx_code code1 = GET_CODE (op1);
    2669         9433 :   machine_mode cmp_mode = GET_MODE (XEXP (op0, 0));
    2670         9433 :   if (cmp_mode == VOIDmode)
    2671            0 :     cmp_mode = GET_MODE (XEXP (op0, 1));
    2672              : 
    2673              :   /* Assume at first that the comparisons are on integers, and that the
    2674              :      operands are therefore ordered.  */
    2675         9433 :   int all = 14;
    2676         9433 :   int mask0 = unsigned_comparison_to_mask (code0);
    2677         9433 :   int mask1 = unsigned_comparison_to_mask (code1);
    2678        18866 :   bool unsigned_p = (IN_RANGE (mask0 & 12, 4, 8)
    2679         9433 :                      || IN_RANGE (mask1 & 12, 4, 8));
    2680         1333 :   if (unsigned_p)
    2681              :     {
    2682              :       /* We only reach here when comparing integers.  Reject mixtures of signed
    2683              :          and unsigned comparisons.  */
    2684         8100 :       if (mask0 == 0 || mask1 == 0)
    2685              :         return 0;
    2686              :     }
    2687              :   else
    2688              :     {
    2689              :       /* See whether the operands might be unordered.  Assume that all
    2690              :          results are possible for CC modes, and punt later if we don't get an
    2691              :          always-true or always-false answer.  */
    2692         1333 :       if (GET_MODE_CLASS (cmp_mode) == MODE_CC || HONOR_NANS (cmp_mode))
    2693              :         all = 15;
    2694         1333 :       mask0 = comparison_to_mask (code0) & all;
    2695         1333 :       mask1 = comparison_to_mask (code1) & all;
    2696              :     }
    2697              : 
    2698         8153 :   if (invert0_p)
    2699         4658 :     mask0 = mask0 ^ all;
    2700              : 
    2701         8153 :   int mask;
    2702         8153 :   if (code == AND)
    2703          960 :     mask = mask0 & mask1;
    2704         7193 :   else if (code == IOR)
    2705          948 :     mask = mask0 | mask1;
    2706         6245 :   else if (code == XOR)
    2707         6245 :     mask = mask0 ^ mask1;
    2708              :   else
    2709              :     return 0;
    2710              : 
    2711         8153 :   if (mask == all)
    2712          232 :     return relational_result (mode, GET_MODE (op0), const_true_rtx);
    2713              : 
    2714         7921 :   if (mask == 0)
    2715          232 :     return relational_result (mode, GET_MODE (op0), const0_rtx);
    2716              : 
    2717         7689 :   if (unsigned_p)
    2718         6596 :     code = mask_to_unsigned_comparison (mask);
    2719              :   else
    2720              :     {
    2721         1093 :       if (GET_MODE_CLASS (cmp_mode) == MODE_CC)
    2722              :         return 0;
    2723              : 
    2724         1014 :       code = mask_to_comparison (mask);
    2725              :       /* LTGT and NE are arithmetically equivalent for ordered operands,
    2726              :          with NE being the canonical choice.  */
    2727         1014 :       if (code == LTGT && all == 14)
    2728          184 :         code = NE;
    2729              :     }
    2730              : 
    2731         7610 :   op0 = XEXP (op1, 0);
    2732         7610 :   op1 = XEXP (op1, 1);
    2733              : 
    2734         7610 :   return simplify_gen_relational (code, mode, VOIDmode, op0, op1);
    2735              : }
    2736              : 
    2737              : /* Simplify a binary operation CODE with result mode MODE, operating on OP0
    2738              :    and OP1.  Return 0 if no simplification is possible.
    2739              : 
    2740              :    Don't use this for relational operations such as EQ or LT.
    2741              :    Use simplify_relational_operation instead.  */
    2742              : rtx
    2743    473239276 : simplify_context::simplify_binary_operation (rtx_code code, machine_mode mode,
    2744              :                                              rtx op0, rtx op1)
    2745              : {
    2746    473239276 :   rtx trueop0, trueop1;
    2747    473239276 :   rtx tem;
    2748              : 
    2749              :   /* Relational operations don't work here.  We must know the mode
    2750              :      of the operands in order to do the comparison correctly.
    2751              :      Assuming a full word can give incorrect results.
    2752              :      Consider comparing 128 with -128 in QImode.  */
    2753    473239276 :   gcc_assert (GET_RTX_CLASS (code) != RTX_COMPARE);
    2754    473239276 :   gcc_assert (GET_RTX_CLASS (code) != RTX_COMM_COMPARE);
    2755              : 
    2756              :   /* Make sure the constant is second.  */
    2757    473239276 :   if (GET_RTX_CLASS (code) == RTX_COMM_ARITH
    2758    473239276 :       && swap_commutative_operands_p (op0, op1))
    2759              :     std::swap (op0, op1);
    2760              : 
    2761    473239276 :   trueop0 = avoid_constant_pool_reference (op0);
    2762    473239276 :   trueop1 = avoid_constant_pool_reference (op1);
    2763              : 
    2764    473239276 :   tem = simplify_const_binary_operation (code, mode, trueop0, trueop1);
    2765    473239276 :   if (tem)
    2766              :     return tem;
    2767    443500346 :   tem = simplify_binary_operation_1 (code, mode, op0, op1, trueop0, trueop1);
    2768              : 
    2769    443500346 :   if (tem)
    2770              :     return tem;
    2771              : 
    2772              :   /* If the above steps did not result in a simplification and op0 or op1
    2773              :      were constant pool references, use the referenced constants directly.  */
    2774    381277397 :   if (trueop0 != op0 || trueop1 != op1)
    2775       580090 :     return simplify_gen_binary (code, mode, trueop0, trueop1);
    2776              : 
    2777              :   return NULL_RTX;
    2778              : }
    2779              : 
    2780              : /* Subroutine of simplify_binary_operation_1 that looks for cases in
    2781              :    which OP0 and OP1 are both vector series or vector duplicates
    2782              :    (which are really just series with a step of 0).  If so, try to
    2783              :    form a new series by applying CODE to the bases and to the steps.
    2784              :    Return null if no simplification is possible.
    2785              : 
    2786              :    MODE is the mode of the operation and is known to be a vector
    2787              :    integer mode.  */
    2788              : 
    2789              : rtx
    2790      2357633 : simplify_context::simplify_binary_operation_series (rtx_code code,
    2791              :                                                     machine_mode mode,
    2792              :                                                     rtx op0, rtx op1)
    2793              : {
    2794      2357633 :   rtx base0, step0;
    2795      2357633 :   if (vec_duplicate_p (op0, &base0))
    2796        64962 :     step0 = const0_rtx;
    2797      2292671 :   else if (!vec_series_p (op0, &base0, &step0))
    2798              :     return NULL_RTX;
    2799              : 
    2800        65519 :   rtx base1, step1;
    2801        65519 :   if (vec_duplicate_p (op1, &base1))
    2802          407 :     step1 = const0_rtx;
    2803        65112 :   else if (!vec_series_p (op1, &base1, &step1))
    2804              :     return NULL_RTX;
    2805              : 
    2806              :   /* Only create a new series if we can simplify both parts.  In other
    2807              :      cases this isn't really a simplification, and it's not necessarily
    2808              :      a win to replace a vector operation with a scalar operation.  */
    2809         3081 :   scalar_mode inner_mode = GET_MODE_INNER (mode);
    2810         3081 :   rtx new_base = simplify_binary_operation (code, inner_mode, base0, base1);
    2811         3081 :   if (!new_base)
    2812              :     return NULL_RTX;
    2813              : 
    2814         2779 :   rtx new_step = simplify_binary_operation (code, inner_mode, step0, step1);
    2815         2779 :   if (!new_step)
    2816              :     return NULL_RTX;
    2817              : 
    2818         2779 :   return gen_vec_series (mode, new_base, new_step);
    2819              : }
    2820              : 
    2821              : /* Subroutine of simplify_binary_operation_1.  Un-distribute a binary
    2822              :    operation CODE with result mode MODE, operating on OP0 and OP1.
    2823              :    e.g. simplify (xor (and A C) (and (B C)) to (and (xor (A B) C).
    2824              :    Returns NULL_RTX if no simplification is possible.  */
    2825              : 
    2826              : rtx
    2827      1423717 : simplify_context::simplify_distributive_operation (rtx_code code,
    2828              :                                                    machine_mode mode,
    2829              :                                                    rtx op0, rtx op1)
    2830              : {
    2831      1423717 :   enum rtx_code op = GET_CODE (op0);
    2832      1423717 :   gcc_assert (GET_CODE (op1) == op);
    2833              : 
    2834      1423717 :   if (rtx_equal_p (XEXP (op0, 1), XEXP (op1, 1))
    2835      1423717 :       && ! side_effects_p (XEXP (op0, 1)))
    2836       334854 :     return simplify_gen_binary (op, mode,
    2837              :                                 simplify_gen_binary (code, mode,
    2838              :                                                      XEXP (op0, 0),
    2839              :                                                      XEXP (op1, 0)),
    2840       334854 :                                 XEXP (op0, 1));
    2841              : 
    2842      1088863 :   if (GET_RTX_CLASS (op) == RTX_COMM_ARITH)
    2843              :     {
    2844      1066376 :       if (rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
    2845      1066376 :           && ! side_effects_p (XEXP (op0, 0)))
    2846       490719 :         return simplify_gen_binary (op, mode,
    2847              :                                     simplify_gen_binary (code, mode,
    2848              :                                                          XEXP (op0, 1),
    2849              :                                                          XEXP (op1, 1)),
    2850       490719 :                                     XEXP (op0, 0));
    2851       575657 :       if (rtx_equal_p (XEXP (op0, 0), XEXP (op1, 1))
    2852       575657 :           && ! side_effects_p (XEXP (op0, 0)))
    2853           55 :         return simplify_gen_binary (op, mode,
    2854              :                                     simplify_gen_binary (code, mode,
    2855              :                                                          XEXP (op0, 1),
    2856              :                                                          XEXP (op1, 0)),
    2857           55 :                                     XEXP (op0, 0));
    2858       575602 :       if (rtx_equal_p (XEXP (op0, 1), XEXP (op1, 0))
    2859       575602 :           && ! side_effects_p (XEXP (op0, 1)))
    2860       285580 :         return simplify_gen_binary (op, mode,
    2861              :                                     simplify_gen_binary (code, mode,
    2862              :                                                          XEXP (op0, 0),
    2863              :                                                          XEXP (op1, 1)),
    2864       285580 :                                     XEXP (op0, 1));
    2865              :     }
    2866              : 
    2867              :   return NULL_RTX;
    2868              : }
    2869              : 
    2870              : /* Return TRUE if a rotate in mode MODE with a constant count in OP1
    2871              :    should be reversed.
    2872              : 
    2873              :    If the rotate should not be reversed, return FALSE.
    2874              : 
    2875              :    LEFT indicates if this is a rotate left or a rotate right.  */
    2876              : 
    2877              : bool
    2878       145021 : reverse_rotate_by_imm_p (machine_mode mode, unsigned int left, rtx op1)
    2879              : {
    2880       145021 :   if (!CONST_INT_P (op1))
    2881              :     return false;
    2882              : 
    2883              :   /* Some targets may only be able to rotate by a constant
    2884              :      in one direction.  So we need to query the optab interface
    2885              :      to see what is possible.  */
    2886       113128 :   optab binoptab = left ? rotl_optab : rotr_optab;
    2887        47233 :   optab re_binoptab = left ? rotr_optab : rotl_optab;
    2888       113128 :   enum insn_code icode = optab_handler (binoptab, mode);
    2889       113128 :   enum insn_code re_icode = optab_handler (re_binoptab, mode);
    2890              : 
    2891              :   /* If the target can not support the reversed optab, then there
    2892              :      is nothing to do.  */
    2893       113128 :   if (re_icode == CODE_FOR_nothing)
    2894              :     return false;
    2895              : 
    2896              :   /* If the target does not support the requested rotate-by-immediate,
    2897              :      then we want to try reversing the rotate.  We also want to try
    2898              :      reversing to minimize the count.  */
    2899       110670 :   if ((icode == CODE_FOR_nothing)
    2900       110670 :       || (!insn_operand_matches (icode, 2, op1))
    2901       553350 :       || (IN_RANGE (INTVAL (op1),
    2902              :                     GET_MODE_UNIT_PRECISION (mode) / 2 + left,
    2903              :                     GET_MODE_UNIT_PRECISION (mode) - 1)))
    2904        14682 :     return (insn_operand_matches (re_icode, 2, op1));
    2905              :   return false;
    2906              : }
    2907              : 
    2908              : /* Analyse argument X to see if it represents an (ASHIFT X Y) operation
    2909              :    and return the expression to be shifted in SHIFT_OPND and the shift amount
    2910              :    in SHIFT_AMNT.  This is primarily used to group handling of ASHIFT (X, CST)
    2911              :    and (PLUS (X, X)) in one place.  If the expression is not equivalent to an
    2912              :    ASHIFT then return FALSE and set SHIFT_OPND and SHIFT_AMNT to NULL.  */
    2913              : 
    2914              : static bool
    2915    523445144 : extract_ashift_operands_p (rtx x, rtx *shift_opnd, rtx *shift_amnt)
    2916              : {
    2917    523445144 :   if (GET_CODE (x) == ASHIFT)
    2918              :     {
    2919     13718235 :       *shift_opnd = XEXP (x, 0);
    2920     13718235 :       *shift_amnt = XEXP (x, 1);
    2921     13718235 :       return true;
    2922              :     }
    2923    509726909 :   if (GET_CODE (x) == PLUS && rtx_equal_p (XEXP (x, 0), XEXP (x, 1)))
    2924              :     {
    2925        13282 :       *shift_opnd = XEXP (x, 0);
    2926        13282 :       *shift_amnt = CONST1_RTX (GET_MODE (x));
    2927        13282 :       return true;
    2928              :     }
    2929    509713627 :   *shift_opnd = NULL_RTX;
    2930    509713627 :   *shift_amnt = NULL_RTX;
    2931    509713627 :   return false;
    2932              : }
    2933              : 
    2934              : /* OP0 and OP1 are combined under an operation of mode MODE that can
    2935              :    potentially result in a ROTATE expression.  Analyze the OP0 and OP1
    2936              :    and return the resulting ROTATE expression if so.  Return NULL otherwise.
    2937              :    This is used in detecting the patterns (X << C1) [+,|,^] (X >> C2) where
    2938              :    C1 + C2 == GET_MODE_UNIT_PRECISION (mode).
    2939              :    (X << C1) and (C >> C2) would be OP0 and OP1.  */
    2940              : 
    2941              : static rtx
    2942    263821595 : simplify_rotate_op (rtx op0, rtx op1, machine_mode mode)
    2943              : {
    2944              :   /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
    2945              :      mode size to (rotate A CX).  */
    2946              : 
    2947    263821595 :   rtx opleft = op0;
    2948    263821595 :   rtx opright = op1;
    2949    263821595 :   rtx ashift_opnd, ashift_amnt;
    2950              :   /* In some cases the ASHIFT is not a direct ASHIFT.  Look deeper and extract
    2951              :      the relevant operands here.  */
    2952    263821595 :   bool ashift_op_p
    2953    263821595 :     = extract_ashift_operands_p (op1, &ashift_opnd, &ashift_amnt);
    2954              : 
    2955    263821595 :   if (ashift_op_p
    2956    262197048 :      || GET_CODE (op1) == SUBREG)
    2957              :     {
    2958              :       opleft = op1;
    2959              :       opright = op0;
    2960              :     }
    2961              :   else
    2962              :     {
    2963    259623549 :       opright = op1;
    2964    259623549 :       opleft = op0;
    2965    259623549 :       ashift_op_p
    2966    259623549 :         = extract_ashift_operands_p (opleft, &ashift_opnd, &ashift_amnt);
    2967              :     }
    2968              : 
    2969     13731517 :   if (ashift_op_p && GET_CODE (opright) == LSHIFTRT
    2970    262239767 :       && rtx_equal_p (ashift_opnd, XEXP (opright, 0)))
    2971              :     {
    2972         9709 :       rtx leftcst = unwrap_const_vec_duplicate (ashift_amnt);
    2973         9709 :       rtx rightcst = unwrap_const_vec_duplicate (XEXP (opright, 1));
    2974              : 
    2975         5891 :       if (CONST_INT_P (leftcst) && CONST_INT_P (rightcst)
    2976        15600 :           && (INTVAL (leftcst) + INTVAL (rightcst)
    2977         5891 :               == GET_MODE_UNIT_PRECISION (mode)))
    2978         5364 :         return gen_rtx_ROTATE (mode, XEXP (opright, 0), ashift_amnt);
    2979              :     }
    2980              : 
    2981              :   /* Same, but for ashift that has been "simplified" to a wider mode
    2982              :      by simplify_shift_const.  */
    2983    263816231 :   scalar_int_mode int_mode, inner_mode;
    2984              : 
    2985    263816231 :   if (GET_CODE (opleft) == SUBREG
    2986    267388511 :       && is_a <scalar_int_mode> (mode, &int_mode)
    2987      3566916 :       && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (opleft)),
    2988              :                                  &inner_mode)
    2989      3536389 :       && GET_CODE (SUBREG_REG (opleft)) == ASHIFT
    2990       103441 :       && GET_CODE (opright) == LSHIFTRT
    2991         1106 :       && GET_CODE (XEXP (opright, 0)) == SUBREG
    2992          251 :       && known_eq (SUBREG_BYTE (opleft), SUBREG_BYTE (XEXP (opright, 0)))
    2993          498 :       && GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (inner_mode)
    2994          235 :       && rtx_equal_p (XEXP (SUBREG_REG (opleft), 0),
    2995          235 :                       SUBREG_REG (XEXP (opright, 0)))
    2996           19 :       && CONST_INT_P (XEXP (SUBREG_REG (opleft), 1))
    2997           19 :       && CONST_INT_P (XEXP (opright, 1))
    2998    263816231 :       && (INTVAL (XEXP (SUBREG_REG (opleft), 1))
    2999           19 :             + INTVAL (XEXP (opright, 1))
    3000           19 :          == GET_MODE_PRECISION (int_mode)))
    3001           15 :         return gen_rtx_ROTATE (int_mode, XEXP (opright, 0),
    3002              :                                XEXP (SUBREG_REG (opleft), 1));
    3003              :   return NULL_RTX;
    3004              : }
    3005              : 
    3006              : /* Returns true if OP0 and OP1 match the pattern (OP (plus (A - 1)) (neg A)),
    3007              :    and the pattern can be simplified (there are no side effects).  */
    3008              : 
    3009              : static bool
    3010     39905151 : match_plus_neg_pattern (rtx op0, rtx op1, machine_mode mode)
    3011              : {
    3012              :   /* Remove SUBREG from OP0 and OP1, if needed.  */
    3013     39905151 :   if (GET_CODE (op0) == SUBREG
    3014      6961286 :       && GET_CODE (op1) == SUBREG
    3015       308457 :       && subreg_lowpart_p (op0)
    3016     40212208 :       && subreg_lowpart_p (op1))
    3017              :     {
    3018       307048 :       op0 = XEXP (op0, 0);
    3019       307048 :       op1 = XEXP (op1, 0);
    3020              :     }
    3021              : 
    3022              :   /* Check for the pattern (OP (plus (A - 1)) (neg A)).  */
    3023     39905151 :   if (((GET_CODE (op1) == NEG
    3024         3650 :         && GET_CODE (op0) == PLUS
    3025         2209 :         && XEXP (op0, 1) == CONSTM1_RTX (mode))
    3026     39904489 :        || (GET_CODE (op0) == NEG
    3027        78731 :            && GET_CODE (op1) == PLUS
    3028            0 :            && XEXP (op1, 1) == CONSTM1_RTX (mode)))
    3029          662 :       && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
    3030     39905153 :       && !side_effects_p (XEXP (op0, 0)))
    3031              :     return true;
    3032              :   return false;
    3033              : }
    3034              : 
    3035              : /* Check if OP matches the pattern of (subreg (not X)) and the subreg is
    3036              :    non-paradoxical.  */
    3037              : 
    3038              : static bool
    3039     75622205 : non_paradoxical_subreg_not_p (rtx op)
    3040              : {
    3041     75622205 :   return GET_CODE (op) == SUBREG
    3042      8488721 :          && !paradoxical_subreg_p (op)
    3043     78536616 :          && GET_CODE (SUBREG_REG (op)) == NOT;
    3044              : }
    3045              : 
    3046              : /* Convert (binop (subreg (not X)) Y) into (binop (not (subreg X)) Y), or
    3047              :    (binop X (subreg (not Y))) into (binop X (not (subreg Y))) to expose
    3048              :    opportunities to combine another binary logical operation with NOT.  */
    3049              : 
    3050              : static rtx
    3051     37812274 : simplify_with_subreg_not (rtx_code binop, machine_mode mode, rtx op0, rtx op1)
    3052              : {
    3053     37812274 :   rtx opn = NULL_RTX;
    3054     37812274 :   if (non_paradoxical_subreg_not_p (op0))
    3055              :     opn = op0;
    3056     37809931 :   else if (non_paradoxical_subreg_not_p (op1))
    3057              :     opn = op1;
    3058              : 
    3059         2346 :   if (opn == NULL_RTX)
    3060              :     return NULL_RTX;
    3061              : 
    3062         4692 :   rtx new_subreg = simplify_gen_subreg (mode,
    3063              :                                         XEXP (SUBREG_REG (opn), 0),
    3064         2346 :                                         GET_MODE (SUBREG_REG (opn)),
    3065         2346 :                                         SUBREG_BYTE (opn));
    3066              : 
    3067         2346 :   if (!new_subreg)
    3068              :     return NULL_RTX;
    3069              : 
    3070         2291 :   rtx new_not = simplify_gen_unary (NOT, mode, new_subreg, mode);
    3071         2291 :   if (opn == op0)
    3072         2288 :     return simplify_gen_binary (binop, mode, new_not, op1);
    3073              :   else
    3074            3 :     return simplify_gen_binary (binop, mode, op0, new_not);
    3075              : }
    3076              : 
    3077              : /* Return TRUE iff NOP is a negated form of OP, or vice-versa.  */
    3078              : static bool
    3079      6977103 : negated_ops_p (rtx nop, rtx op)
    3080              : {
    3081              :   /* Explicit negation.  */
    3082      6977103 :   if (GET_CODE (nop) == NOT
    3083      6977103 :       && rtx_equal_p (XEXP (nop, 0), op))
    3084              :     return true;
    3085      6973667 :   if (GET_CODE (op) == NOT
    3086      6973667 :       && rtx_equal_p (XEXP (op, 0), nop))
    3087              :     return true;
    3088              : 
    3089              :   /* (~C <r A) is a negated form of (C << A) if C == 1.  */
    3090      6973012 :   if (GET_CODE (op) == ASHIFT
    3091      1400558 :       && GET_CODE (nop) == ROTATE
    3092            0 :       && XEXP (op, 0) == CONST1_RTX (GET_MODE (op))
    3093            0 :       && CONST_INT_P (XEXP (nop, 0))
    3094            0 :       && INTVAL (XEXP (nop, 0)) == -2
    3095      6973012 :       && rtx_equal_p (XEXP (op, 1), XEXP (nop, 1)))
    3096              :     return true;
    3097      6973012 :   if (GET_CODE (nop) == ASHIFT
    3098       203129 :       && GET_CODE (op) == ROTATE
    3099            0 :       && XEXP (nop, 0) == CONST1_RTX (GET_MODE (op))
    3100            0 :       && CONST_INT_P (XEXP (nop, 0))
    3101            0 :       && INTVAL (XEXP (nop, 0)) == -2
    3102      6973012 :       && rtx_equal_p (XEXP (op, 1), XEXP (nop, 1)))
    3103              :     return true;
    3104              : 
    3105              :   /* ??? Should we consider rotations of C and ~C by the same amount?  */
    3106              : 
    3107              :   return false;
    3108              : }
    3109              : 
    3110              : /* Subroutine of simplify_binary_operation.  Simplify a binary operation
    3111              :    CODE with result mode MODE, operating on OP0 and OP1.  If OP0 and/or
    3112              :    OP1 are constant pool references, TRUEOP0 and TRUEOP1 represent the
    3113              :    actual constants.  */
    3114              : 
    3115              : rtx
    3116    443500346 : simplify_context::simplify_binary_operation_1 (rtx_code code,
    3117              :                                                machine_mode mode,
    3118              :                                                rtx op0, rtx op1,
    3119              :                                                rtx trueop0, rtx trueop1)
    3120              : {
    3121    443500346 :   rtx tem, reversed, elt0, elt1;
    3122    443500346 :   HOST_WIDE_INT val;
    3123    443500346 :   scalar_int_mode int_mode, inner_mode;
    3124    443500346 :   poly_int64 offset;
    3125              : 
    3126              :   /* Even if we can't compute a constant result,
    3127              :      there are some cases worth simplifying.  */
    3128              : 
    3129    443500346 :   switch (code)
    3130              :     {
    3131    252560549 :     case PLUS:
    3132              :       /* Maybe simplify x + 0 to x.  The two expressions are equivalent
    3133              :          when x is NaN, infinite, or finite and nonzero.  They aren't
    3134              :          when x is -0 and the rounding mode is not towards -infinity,
    3135              :          since (-0) + 0 is then 0.  */
    3136    501215345 :       if (!HONOR_SIGNED_ZEROS (mode) && !HONOR_SNANS (mode)
    3137    501215333 :           && trueop1 == CONST0_RTX (mode))
    3138              :         return op0;
    3139              : 
    3140              :       /* ((-a) + b) -> (b - a) and similarly for (a + (-b)).  These
    3141              :          transformations are safe even for IEEE.  */
    3142    251257336 :       if (GET_CODE (op0) == NEG)
    3143        26023 :         return simplify_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
    3144    251231313 :       else if (GET_CODE (op1) == NEG)
    3145         7391 :         return simplify_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
    3146              : 
    3147              :       /* (~a) + 1 -> -a */
    3148    251223922 :       if (INTEGRAL_MODE_P (mode)
    3149    246413519 :           && GET_CODE (op0) == NOT
    3150       632015 :           && trueop1 == const1_rtx)
    3151         3564 :         return simplify_gen_unary (NEG, mode, XEXP (op0, 0), mode);
    3152              : 
    3153              :       /* Handle both-operands-constant cases.  We can only add
    3154              :          CONST_INTs to constants since the sum of relocatable symbols
    3155              :          can't be handled by most assemblers.  Don't add CONST_INT
    3156              :          to CONST_INT since overflow won't be computed properly if wider
    3157              :          than HOST_BITS_PER_WIDE_INT.  */
    3158              : 
    3159    251220358 :       if ((GET_CODE (op0) == CONST
    3160    251220358 :            || GET_CODE (op0) == SYMBOL_REF
    3161    248716185 :            || GET_CODE (op0) == LABEL_REF)
    3162    251220358 :           && poly_int_rtx_p (op1, &offset))
    3163      2503192 :         return plus_constant (mode, op0, offset);
    3164    248717166 :       else if ((GET_CODE (op1) == CONST
    3165    248717166 :                 || GET_CODE (op1) == SYMBOL_REF
    3166    245028533 :                 || GET_CODE (op1) == LABEL_REF)
    3167    248717166 :                && poly_int_rtx_p (op0, &offset))
    3168            0 :         return plus_constant (mode, op1, offset);
    3169              : 
    3170              :       /* See if this is something like X * C - X or vice versa or
    3171              :          if the multiplication is written as a shift.  If so, we can
    3172              :          distribute and make a new multiply, shift, or maybe just
    3173              :          have X (if C is 2 in the example above).  But don't make
    3174              :          something more expensive than we had before.  */
    3175              : 
    3176    248717166 :       if (is_a <scalar_int_mode> (mode, &int_mode))
    3177              :         {
    3178    241796620 :           rtx lhs = op0, rhs = op1;
    3179              : 
    3180    241796620 :           wide_int coeff0 = wi::one (GET_MODE_PRECISION (int_mode));
    3181    241796620 :           wide_int coeff1 = wi::one (GET_MODE_PRECISION (int_mode));
    3182              : 
    3183    241796620 :           if (GET_CODE (lhs) == NEG)
    3184              :             {
    3185            0 :               coeff0 = wi::minus_one (GET_MODE_PRECISION (int_mode));
    3186            0 :               lhs = XEXP (lhs, 0);
    3187              :             }
    3188    241796620 :           else if (GET_CODE (lhs) == MULT
    3189      6722071 :                    && CONST_SCALAR_INT_P (XEXP (lhs, 1)))
    3190              :             {
    3191      5625566 :               coeff0 = rtx_mode_t (XEXP (lhs, 1), int_mode);
    3192      5625566 :               lhs = XEXP (lhs, 0);
    3193              :             }
    3194    236171054 :           else if (GET_CODE (lhs) == ASHIFT
    3195     10947893 :                    && CONST_INT_P (XEXP (lhs, 1))
    3196     10876251 :                    && INTVAL (XEXP (lhs, 1)) >= 0
    3197    247047293 :                    && INTVAL (XEXP (lhs, 1)) < GET_MODE_PRECISION (int_mode))
    3198              :             {
    3199     10876239 :               coeff0 = wi::set_bit_in_zero (INTVAL (XEXP (lhs, 1)),
    3200     21752478 :                                             GET_MODE_PRECISION (int_mode));
    3201     10876239 :               lhs = XEXP (lhs, 0);
    3202              :             }
    3203              : 
    3204    241796620 :           if (GET_CODE (rhs) == NEG)
    3205              :             {
    3206            0 :               coeff1 = wi::minus_one (GET_MODE_PRECISION (int_mode));
    3207            0 :               rhs = XEXP (rhs, 0);
    3208              :             }
    3209    241796620 :           else if (GET_CODE (rhs) == MULT
    3210       290227 :                    && CONST_INT_P (XEXP (rhs, 1)))
    3211              :             {
    3212       180144 :               coeff1 = rtx_mode_t (XEXP (rhs, 1), int_mode);
    3213       180144 :               rhs = XEXP (rhs, 0);
    3214              :             }
    3215    241616476 :           else if (GET_CODE (rhs) == ASHIFT
    3216       534500 :                    && CONST_INT_P (XEXP (rhs, 1))
    3217       524721 :                    && INTVAL (XEXP (rhs, 1)) >= 0
    3218    242141197 :                    && INTVAL (XEXP (rhs, 1)) < GET_MODE_PRECISION (int_mode))
    3219              :             {
    3220       524721 :               coeff1 = wi::set_bit_in_zero (INTVAL (XEXP (rhs, 1)),
    3221      1049442 :                                             GET_MODE_PRECISION (int_mode));
    3222       524721 :               rhs = XEXP (rhs, 0);
    3223              :             }
    3224              : 
    3225              :           /* Keep PLUS of 2 volatile memory references.  */
    3226    241796620 :           if (rtx_equal_p (lhs, rhs)
    3227    241796620 :               && (!MEM_P (lhs) || !MEM_VOLATILE_P (lhs)))
    3228              :             {
    3229       774448 :               rtx orig = gen_rtx_PLUS (int_mode, op0, op1);
    3230       774448 :               rtx coeff;
    3231       774448 :               bool speed = optimize_function_for_speed_p (cfun);
    3232              : 
    3233       774448 :               coeff = immed_wide_int_const (coeff0 + coeff1, int_mode);
    3234              : 
    3235       774448 :               tem = simplify_gen_binary (MULT, int_mode, lhs, coeff);
    3236       774448 :               return (set_src_cost (tem, int_mode, speed)
    3237       774448 :                       <= set_src_cost (orig, int_mode, speed) ? tem : 0);
    3238              :             }
    3239              : 
    3240              :           /* Optimize (X - 1) * Y + Y to X * Y.  */
    3241    241022172 :           lhs = op0;
    3242    241022172 :           rhs = op1;
    3243    241022172 :           if (GET_CODE (op0) == MULT)
    3244              :             {
    3245      6701933 :               if (((GET_CODE (XEXP (op0, 0)) == PLUS
    3246       278442 :                     && XEXP (XEXP (op0, 0), 1) == constm1_rtx)
    3247      6660639 :                    || (GET_CODE (XEXP (op0, 0)) == MINUS
    3248        36177 :                        && XEXP (XEXP (op0, 0), 1) == const1_rtx))
    3249      6743227 :                   && rtx_equal_p (XEXP (op0, 1), op1))
    3250           78 :                 lhs = XEXP (XEXP (op0, 0), 0);
    3251      6701855 :               else if (((GET_CODE (XEXP (op0, 1)) == PLUS
    3252         1288 :                          && XEXP (XEXP (op0, 1), 1) == constm1_rtx)
    3253      6701821 :                         || (GET_CODE (XEXP (op0, 1)) == MINUS
    3254          339 :                             && XEXP (XEXP (op0, 1), 1) == const1_rtx))
    3255      6701889 :                        && rtx_equal_p (XEXP (op0, 0), op1))
    3256            0 :                 lhs = XEXP (XEXP (op0, 1), 0);
    3257              :             }
    3258    234320239 :           else if (GET_CODE (op1) == MULT)
    3259              :             {
    3260       136675 :               if (((GET_CODE (XEXP (op1, 0)) == PLUS
    3261           42 :                     && XEXP (XEXP (op1, 0), 1) == constm1_rtx)
    3262       136670 :                    || (GET_CODE (XEXP (op1, 0)) == MINUS
    3263           27 :                        && XEXP (XEXP (op1, 0), 1) == const1_rtx))
    3264       136680 :                   && rtx_equal_p (XEXP (op1, 1), op0))
    3265            0 :                 rhs = XEXP (XEXP (op1, 0), 0);
    3266       136675 :               else if (((GET_CODE (XEXP (op1, 1)) == PLUS
    3267            0 :                          && XEXP (XEXP (op1, 1), 1) == constm1_rtx)
    3268       136675 :                         || (GET_CODE (XEXP (op1, 1)) == MINUS
    3269            0 :                             && XEXP (XEXP (op1, 1), 1) == const1_rtx))
    3270       136675 :                        && rtx_equal_p (XEXP (op1, 0), op0))
    3271            0 :                 rhs = XEXP (XEXP (op1, 1), 0);
    3272              :             }
    3273    241022172 :           if (lhs != op0 || rhs != op1)
    3274           78 :             return simplify_gen_binary (MULT, int_mode, lhs, rhs);
    3275    241796620 :         }
    3276              : 
    3277              :       /* (plus (xor X C1) C2) is (xor X (C1^C2)) if C2 is signbit.  */
    3278    247942640 :       if (CONST_SCALAR_INT_P (op1)
    3279    191108549 :           && GET_CODE (op0) == XOR
    3280        20581 :           && CONST_SCALAR_INT_P (XEXP (op0, 1))
    3281    247954727 :           && mode_signbit_p (mode, op1))
    3282          121 :         return simplify_gen_binary (XOR, mode, XEXP (op0, 0),
    3283              :                                     simplify_gen_binary (XOR, mode, op1,
    3284          121 :                                                          XEXP (op0, 1)));
    3285              : 
    3286              :       /* (plus (xor X C1) C2) is (xor X (C1^C2)) if X is either 0 or 1 and
    3287              :          2 * ((X ^ C1) & C2) == 0; based on A + B == A ^ B + 2 * (A & B). */
    3288    247942519 :       if (CONST_SCALAR_INT_P (op1)
    3289    191108428 :           && GET_CODE (op0) == XOR
    3290        20460 :           && CONST_SCALAR_INT_P (XEXP (op0, 1))
    3291        11966 :           && nonzero_bits (XEXP (op0, 0), mode) == 1
    3292          191 :           && 2 * (INTVAL (XEXP (op0, 1)) & INTVAL (op1)) == 0
    3293    247942519 :           && 2 * ((1 ^ INTVAL (XEXP (op0, 1))) & INTVAL (op1)) == 0)
    3294            0 :         return simplify_gen_binary (XOR, mode, XEXP (op0, 0),
    3295              :                                     simplify_gen_binary (XOR, mode, op1,
    3296            0 :                                                          XEXP (op0, 1)));
    3297              : 
    3298              :       /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)).  */
    3299    247942519 :       if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode)
    3300    247940039 :           && GET_CODE (op0) == MULT
    3301    255009229 :           && GET_CODE (XEXP (op0, 0)) == NEG)
    3302              :         {
    3303         5489 :           rtx in1, in2;
    3304              : 
    3305         5489 :           in1 = XEXP (XEXP (op0, 0), 0);
    3306         5489 :           in2 = XEXP (op0, 1);
    3307         5489 :           return simplify_gen_binary (MINUS, mode, op1,
    3308              :                                       simplify_gen_binary (MULT, mode,
    3309         5489 :                                                            in1, in2));
    3310              :         }
    3311              : 
    3312              :       /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
    3313              :          C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
    3314              :          is 1.  */
    3315    247937030 :       if (COMPARISON_P (op0)
    3316      1460982 :           && ((STORE_FLAG_VALUE == -1 && trueop1 == const1_rtx)
    3317      1460982 :               || (STORE_FLAG_VALUE == 1 && trueop1 == constm1_rtx))
    3318    247990222 :           && (reversed = reversed_comparison (op0, mode)))
    3319        52843 :         return
    3320        52843 :           simplify_gen_unary (NEG, mode, reversed, mode);
    3321              : 
    3322              :       /* Convert (plus (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
    3323              :          mode size to (rotate A CX).  */
    3324    247884187 :       if ((tem = simplify_rotate_op (op0, op1, mode)))
    3325              :         return tem;
    3326              : 
    3327              :       /* If one of the operands is a PLUS or a MINUS, see if we can
    3328              :          simplify this by the associative law.
    3329              :          Don't use the associative law for floating point.
    3330              :          The inaccuracy makes it nonassociative,
    3331              :          and subtle programs can break if operations are associated.  */
    3332              : 
    3333    247882717 :       if (INTEGRAL_MODE_P (mode)
    3334    243072363 :           && (plus_minus_operand_p (op0)
    3335    209845164 :               || plus_minus_operand_p (op1))
    3336     34230365 :           && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
    3337              :         return tem;
    3338              : 
    3339              :       /* Reassociate floating point addition only when the user
    3340              :          specifies associative math operations.  */
    3341    214366035 :       if (FLOAT_MODE_P (mode)
    3342      4810354 :           && flag_associative_math)
    3343              :         {
    3344       904581 :           tem = simplify_associative_operation (code, mode, op0, op1);
    3345       904581 :           if (tem)
    3346              :             return tem;
    3347              :         }
    3348              : 
    3349              :       /* Handle vector series.  */
    3350    214352461 :       if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT)
    3351              :         {
    3352      1943737 :           tem = simplify_binary_operation_series (code, mode, op0, op1);
    3353      1943737 :           if (tem)
    3354              :             return tem;
    3355              :         }
    3356              :       break;
    3357              : 
    3358              :     case COMPARE:
    3359              :       break;
    3360              : 
    3361     42279782 :     case MINUS:
    3362              :       /* We can't assume x-x is 0 even with non-IEEE floating point,
    3363              :          but since it is zero except in very strange circumstances, we
    3364              :          will treat it as zero with -ffinite-math-only.  */
    3365     42279782 :       if (rtx_equal_p (trueop0, trueop1)
    3366       218637 :           && ! side_effects_p (op0)
    3367     42497158 :           && (!FLOAT_MODE_P (mode) || !HONOR_NANS (mode)))
    3368       214983 :         return CONST0_RTX (mode);
    3369              : 
    3370              :       /* Change subtraction from zero into negation.  (0 - x) is the
    3371              :          same as -x when x is NaN, infinite, or finite and nonzero.
    3372              :          But if the mode has signed zeros, and does not round towards
    3373              :          -infinity, then 0 - 0 is 0, not -0.  */
    3374     42064799 :       if (!HONOR_SIGNED_ZEROS (mode) && trueop0 == CONST0_RTX (mode))
    3375       303841 :         return simplify_gen_unary (NEG, mode, op1, mode);
    3376              : 
    3377              :       /* (-1 - a) is ~a, unless the expression contains symbolic
    3378              :          constants, in which case not retaining additions and
    3379              :          subtractions could cause invalid assembly to be produced.  */
    3380     41760958 :       if (trueop0 == CONSTM1_RTX (mode)
    3381     41760958 :           && !contains_symbolic_reference_p (op1))
    3382       357690 :         return simplify_gen_unary (NOT, mode, op1, mode);
    3383              : 
    3384              :       /* Subtracting 0 has no effect unless the mode has signalling NaNs,
    3385              :          or has signed zeros and supports rounding towards -infinity.
    3386              :          In such a case, 0 - 0 is -0.  */
    3387     42098058 :       if (!(HONOR_SIGNED_ZEROS (mode)
    3388       694790 :             && HONOR_SIGN_DEPENDENT_ROUNDING (mode))
    3389     41402062 :           && !HONOR_SNANS (mode)
    3390     82805294 :           && trueop1 == CONST0_RTX (mode))
    3391              :         return op0;
    3392              : 
    3393              :       /* See if this is something like X * C - X or vice versa or
    3394              :          if the multiplication is written as a shift.  If so, we can
    3395              :          distribute and make a new multiply, shift, or maybe just
    3396              :          have X (if C is 2 in the example above).  But don't make
    3397              :          something more expensive than we had before.  */
    3398              : 
    3399     40453415 :       if (is_a <scalar_int_mode> (mode, &int_mode))
    3400              :         {
    3401     39204061 :           rtx lhs = op0, rhs = op1;
    3402              : 
    3403     39204061 :           wide_int coeff0 = wi::one (GET_MODE_PRECISION (int_mode));
    3404     39204061 :           wide_int negcoeff1 = wi::minus_one (GET_MODE_PRECISION (int_mode));
    3405              : 
    3406     39204061 :           if (GET_CODE (lhs) == NEG)
    3407              :             {
    3408       113437 :               coeff0 = wi::minus_one (GET_MODE_PRECISION (int_mode));
    3409       113437 :               lhs = XEXP (lhs, 0);
    3410              :             }
    3411     39090624 :           else if (GET_CODE (lhs) == MULT
    3412       230864 :                    && CONST_SCALAR_INT_P (XEXP (lhs, 1)))
    3413              :             {
    3414        82559 :               coeff0 = rtx_mode_t (XEXP (lhs, 1), int_mode);
    3415        82559 :               lhs = XEXP (lhs, 0);
    3416              :             }
    3417     39008065 :           else if (GET_CODE (lhs) == ASHIFT
    3418       325870 :                    && CONST_INT_P (XEXP (lhs, 1))
    3419       322603 :                    && INTVAL (XEXP (lhs, 1)) >= 0
    3420     39330647 :                    && INTVAL (XEXP (lhs, 1)) < GET_MODE_PRECISION (int_mode))
    3421              :             {
    3422       322582 :               coeff0 = wi::set_bit_in_zero (INTVAL (XEXP (lhs, 1)),
    3423       645164 :                                             GET_MODE_PRECISION (int_mode));
    3424       322582 :               lhs = XEXP (lhs, 0);
    3425              :             }
    3426              : 
    3427     39204061 :           if (GET_CODE (rhs) == NEG)
    3428              :             {
    3429         8510 :               negcoeff1 = wi::one (GET_MODE_PRECISION (int_mode));
    3430         8510 :               rhs = XEXP (rhs, 0);
    3431              :             }
    3432     39195551 :           else if (GET_CODE (rhs) == MULT
    3433       147768 :                    && CONST_INT_P (XEXP (rhs, 1)))
    3434              :             {
    3435       115043 :               negcoeff1 = wi::neg (rtx_mode_t (XEXP (rhs, 1), int_mode));
    3436       115043 :               rhs = XEXP (rhs, 0);
    3437              :             }
    3438     39080508 :           else if (GET_CODE (rhs) == ASHIFT
    3439       382089 :                    && CONST_INT_P (XEXP (rhs, 1))
    3440       381580 :                    && INTVAL (XEXP (rhs, 1)) >= 0
    3441     39462088 :                    && INTVAL (XEXP (rhs, 1)) < GET_MODE_PRECISION (int_mode))
    3442              :             {
    3443       381580 :               negcoeff1 = wi::set_bit_in_zero (INTVAL (XEXP (rhs, 1)),
    3444       763160 :                                                GET_MODE_PRECISION (int_mode));
    3445       381580 :               negcoeff1 = -negcoeff1;
    3446       381580 :               rhs = XEXP (rhs, 0);
    3447              :             }
    3448              : 
    3449     39204061 :           if (rtx_equal_p (lhs, rhs))
    3450              :             {
    3451        98752 :               rtx orig = gen_rtx_MINUS (int_mode, op0, op1);
    3452        98752 :               rtx coeff;
    3453        98752 :               bool speed = optimize_function_for_speed_p (cfun);
    3454              : 
    3455        98752 :               coeff = immed_wide_int_const (coeff0 + negcoeff1, int_mode);
    3456              : 
    3457        98752 :               tem = simplify_gen_binary (MULT, int_mode, lhs, coeff);
    3458        98752 :               return (set_src_cost (tem, int_mode, speed)
    3459        98752 :                       <= set_src_cost (orig, int_mode, speed) ? tem : 0);
    3460              :             }
    3461              : 
    3462              :           /* Optimize (X + 1) * Y - Y to X * Y.  */
    3463     39105309 :           lhs = op0;
    3464     39105309 :           if (GET_CODE (op0) == MULT)
    3465              :             {
    3466       230744 :               if (((GET_CODE (XEXP (op0, 0)) == PLUS
    3467         4943 :                     && XEXP (XEXP (op0, 0), 1) == const1_rtx)
    3468       229132 :                    || (GET_CODE (XEXP (op0, 0)) == MINUS
    3469         1834 :                        && XEXP (XEXP (op0, 0), 1) == constm1_rtx))
    3470       232356 :                   && rtx_equal_p (XEXP (op0, 1), op1))
    3471            2 :                 lhs = XEXP (XEXP (op0, 0), 0);
    3472       230742 :               else if (((GET_CODE (XEXP (op0, 1)) == PLUS
    3473           30 :                          && XEXP (XEXP (op0, 1), 1) == const1_rtx)
    3474       230736 :                         || (GET_CODE (XEXP (op0, 1)) == MINUS
    3475           84 :                             && XEXP (XEXP (op0, 1), 1) == constm1_rtx))
    3476       230748 :                        && rtx_equal_p (XEXP (op0, 0), op1))
    3477            0 :                 lhs = XEXP (XEXP (op0, 1), 0);
    3478              :             }
    3479     39105309 :           if (lhs != op0)
    3480            2 :             return simplify_gen_binary (MULT, int_mode, lhs, op1);
    3481     39204061 :         }
    3482              : 
    3483              :       /* (a - (-b)) -> (a + b).  True even for IEEE.  */
    3484     40354661 :       if (GET_CODE (op1) == NEG)
    3485         8472 :         return simplify_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
    3486              : 
    3487              :       /* (-x - c) may be simplified as (-c - x).  */
    3488     40346189 :       if (GET_CODE (op0) == NEG
    3489       117601 :           && (CONST_SCALAR_INT_P (op1) || CONST_DOUBLE_AS_FLOAT_P (op1)))
    3490              :         {
    3491          622 :           tem = simplify_unary_operation (NEG, mode, op1, mode);
    3492          622 :           if (tem)
    3493          622 :             return simplify_gen_binary (MINUS, mode, tem, XEXP (op0, 0));
    3494              :         }
    3495              : 
    3496     40345567 :       if ((GET_CODE (op0) == CONST
    3497     40345567 :            || GET_CODE (op0) == SYMBOL_REF
    3498     35289990 :            || GET_CODE (op0) == LABEL_REF)
    3499     40345567 :           && poly_int_rtx_p (op1, &offset))
    3500        50676 :         return plus_constant (mode, op0, trunc_int_for_mode (-offset, mode));
    3501              : 
    3502              :       /* Don't let a relocatable value get a negative coeff.  */
    3503     40294891 :       if (is_a <scalar_int_mode> (mode)
    3504     39045568 :           && poly_int_rtx_p (op1)
    3505     47430752 :           && GET_MODE (op0) != VOIDmode)
    3506      7135861 :         return simplify_gen_binary (PLUS, mode,
    3507              :                                     op0,
    3508      7135861 :                                     neg_poly_int_rtx (mode, op1));
    3509              : 
    3510              :       /* (x - (x & y)) -> (x & ~y) */
    3511     33159030 :       if (INTEGRAL_MODE_P (mode) && GET_CODE (op1) == AND)
    3512              :         {
    3513       282119 :           if (rtx_equal_p (op0, XEXP (op1, 0)))
    3514              :             {
    3515          502 :               tem = simplify_gen_unary (NOT, mode, XEXP (op1, 1),
    3516          251 :                                         GET_MODE (XEXP (op1, 1)));
    3517          251 :               return simplify_gen_binary (AND, mode, op0, tem);
    3518              :             }
    3519       281868 :           if (rtx_equal_p (op0, XEXP (op1, 1)))
    3520              :             {
    3521         2004 :               tem = simplify_gen_unary (NOT, mode, XEXP (op1, 0),
    3522         1002 :                                         GET_MODE (XEXP (op1, 0)));
    3523         1002 :               return simplify_gen_binary (AND, mode, op0, tem);
    3524              :             }
    3525              :         }
    3526              : 
    3527              :       /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
    3528              :          by reversing the comparison code if valid.  */
    3529     33157777 :       if (STORE_FLAG_VALUE == 1
    3530     33157777 :           && trueop0 == const1_rtx
    3531      1115431 :           && COMPARISON_P (op1)
    3532     33262721 :           && (reversed = reversed_comparison (op1, mode)))
    3533              :         return reversed;
    3534              : 
    3535              :       /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A).  */
    3536     33052856 :       if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode)
    3537     33051449 :           && GET_CODE (op1) == MULT
    3538     33300196 :           && GET_CODE (XEXP (op1, 0)) == NEG)
    3539              :         {
    3540          165 :           rtx in1, in2;
    3541              : 
    3542          165 :           in1 = XEXP (XEXP (op1, 0), 0);
    3543          165 :           in2 = XEXP (op1, 1);
    3544          165 :           return simplify_gen_binary (PLUS, mode,
    3545              :                                       simplify_gen_binary (MULT, mode,
    3546              :                                                            in1, in2),
    3547          165 :                                       op0);
    3548              :         }
    3549              : 
    3550              :       /* Canonicalize (minus (neg A) (mult B C)) to
    3551              :          (minus (mult (neg B) C) A).  */
    3552     33052691 :       if (!HONOR_SIGN_DEPENDENT_ROUNDING (mode)
    3553     33051284 :           && GET_CODE (op1) == MULT
    3554     33299866 :           && GET_CODE (op0) == NEG)
    3555              :         {
    3556          655 :           rtx in1, in2;
    3557              : 
    3558          655 :           in1 = simplify_gen_unary (NEG, mode, XEXP (op1, 0), mode);
    3559          655 :           in2 = XEXP (op1, 1);
    3560          655 :           return simplify_gen_binary (MINUS, mode,
    3561              :                                       simplify_gen_binary (MULT, mode,
    3562              :                                                            in1, in2),
    3563          655 :                                       XEXP (op0, 0));
    3564              :         }
    3565              : 
    3566              :       /* If one of the operands is a PLUS or a MINUS, see if we can
    3567              :          simplify this by the associative law.  This will, for example,
    3568              :          canonicalize (minus A (plus B C)) to (minus (minus A B) C).
    3569              :          Don't use the associative law for floating point.
    3570              :          The inaccuracy makes it nonassociative,
    3571              :          and subtle programs can break if operations are associated.  */
    3572              : 
    3573     33052036 :       if (INTEGRAL_MODE_P (mode)
    3574     32237646 :           && (plus_minus_operand_p (op0)
    3575     29587052 :               || plus_minus_operand_p (op1))
    3576      3861527 :           && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
    3577              :         return tem;
    3578              : 
    3579              :       /* Handle vector series.  */
    3580     29329316 :       if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT)
    3581              :         {
    3582       413896 :           tem = simplify_binary_operation_series (code, mode, op0, op1);
    3583       413896 :           if (tem)
    3584              :             return tem;
    3585              :         }
    3586              :       break;
    3587              : 
    3588     12203991 :     case MULT:
    3589     12203991 :       if (trueop1 == constm1_rtx)
    3590        32327 :         return simplify_gen_unary (NEG, mode, op0, mode);
    3591              : 
    3592     12171664 :       if (GET_CODE (op0) == NEG)
    3593              :         {
    3594        32931 :           rtx temp = simplify_unary_operation (NEG, mode, op1, mode);
    3595              :           /* If op1 is a MULT as well and simplify_unary_operation
    3596              :              just moved the NEG to the second operand, simplify_gen_binary
    3597              :              below could through simplify_associative_operation move
    3598              :              the NEG around again and recurse endlessly.  */
    3599        32931 :           if (temp
    3600         1525 :               && GET_CODE (op1) == MULT
    3601            0 :               && GET_CODE (temp) == MULT
    3602            0 :               && XEXP (op1, 0) == XEXP (temp, 0)
    3603            0 :               && GET_CODE (XEXP (temp, 1)) == NEG
    3604            0 :               && XEXP (op1, 1) == XEXP (XEXP (temp, 1), 0))
    3605              :             temp = NULL_RTX;
    3606              :           if (temp)
    3607         1525 :             return simplify_gen_binary (MULT, mode, XEXP (op0, 0), temp);
    3608              :         }
    3609     12170139 :       if (GET_CODE (op1) == NEG)
    3610              :         {
    3611          988 :           rtx temp = simplify_unary_operation (NEG, mode, op0, mode);
    3612              :           /* If op0 is a MULT as well and simplify_unary_operation
    3613              :              just moved the NEG to the second operand, simplify_gen_binary
    3614              :              below could through simplify_associative_operation move
    3615              :              the NEG around again and recurse endlessly.  */
    3616          988 :           if (temp
    3617          420 :               && GET_CODE (op0) == MULT
    3618          300 :               && GET_CODE (temp) == MULT
    3619          300 :               && XEXP (op0, 0) == XEXP (temp, 0)
    3620            6 :               && GET_CODE (XEXP (temp, 1)) == NEG
    3621            5 :               && XEXP (op0, 1) == XEXP (XEXP (temp, 1), 0))
    3622              :             temp = NULL_RTX;
    3623              :           if (temp)
    3624          415 :             return simplify_gen_binary (MULT, mode, temp, XEXP (op1, 0));
    3625              :         }
    3626              : 
    3627              :       /* Maybe simplify x * 0 to 0.  The reduction is not valid if
    3628              :          x is NaN, since x * 0 is then also NaN.  Nor is it valid
    3629              :          when the mode has signed zeros, since multiplying a negative
    3630              :          number by 0 will give -0, not 0.  */
    3631     12169724 :       if (!HONOR_NANS (mode)
    3632     11208910 :           && !HONOR_SIGNED_ZEROS (mode)
    3633     11208514 :           && trueop1 == CONST0_RTX (mode)
    3634     12212172 :           && ! side_effects_p (op0))
    3635              :         return op1;
    3636              : 
    3637              :       /* In IEEE floating point, x*1 is not equivalent to x for
    3638              :          signalling NaNs.  */
    3639     12128539 :       if (!HONOR_SNANS (mode)
    3640     12128539 :           && trueop1 == CONST1_RTX (mode))
    3641              :         return op0;
    3642              : 
    3643              :       /* Convert multiply by constant power of two into shift.  */
    3644     11609375 :       if (mem_depth == 0 && CONST_SCALAR_INT_P (trueop1))
    3645              :         {
    3646      6299749 :           val = wi::exact_log2 (rtx_mode_t (trueop1, mode));
    3647      6299749 :           if (val >= 0)
    3648      3021003 :             return simplify_gen_binary (ASHIFT, mode, op0,
    3649      3021003 :                                         gen_int_shift_amount (mode, val));
    3650              :         }
    3651              : 
    3652              :       /* x*2 is x+x and x*(-1) is -x */
    3653      8588372 :       if (CONST_DOUBLE_AS_FLOAT_P (trueop1)
    3654       167353 :           && SCALAR_FLOAT_MODE_P (GET_MODE (trueop1))
    3655       167353 :           && !DECIMAL_FLOAT_MODE_P (GET_MODE (trueop1))
    3656       167069 :           && GET_MODE (op0) == mode)
    3657              :         {
    3658       167069 :           const REAL_VALUE_TYPE *d1 = CONST_DOUBLE_REAL_VALUE (trueop1);
    3659              : 
    3660       167069 :           if (real_equal (d1, &dconst2))
    3661          615 :             return simplify_gen_binary (PLUS, mode, op0, copy_rtx (op0));
    3662              : 
    3663       166454 :           if (!HONOR_SNANS (mode)
    3664       166454 :               && real_equal (d1, &dconstm1))
    3665           24 :             return simplify_gen_unary (NEG, mode, op0, mode);
    3666              :         }
    3667              : 
    3668              :       /* Optimize -x * -x as x * x.  */
    3669      8587733 :       if (FLOAT_MODE_P (mode)
    3670      1377941 :           && GET_CODE (op0) == NEG
    3671         7941 :           && GET_CODE (op1) == NEG
    3672            0 :           && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
    3673            0 :           && !side_effects_p (XEXP (op0, 0)))
    3674            0 :         return simplify_gen_binary (MULT, mode, XEXP (op0, 0), XEXP (op1, 0));
    3675              : 
    3676              :       /* Likewise, optimize abs(x) * abs(x) as x * x.  */
    3677      8587733 :       if (SCALAR_FLOAT_MODE_P (mode)
    3678      1091057 :           && GET_CODE (op0) == ABS
    3679         1339 :           && GET_CODE (op1) == ABS
    3680            0 :           && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
    3681      8587733 :           && !side_effects_p (XEXP (op0, 0)))
    3682            0 :         return simplify_gen_binary (MULT, mode, XEXP (op0, 0), XEXP (op1, 0));
    3683              : 
    3684              :       /* Reassociate multiplication, but for floating point MULTs
    3685              :          only when the user specifies unsafe math optimizations.  */
    3686      8587733 :       if (! FLOAT_MODE_P (mode)
    3687      1377941 :           || flag_unsafe_math_optimizations)
    3688              :         {
    3689      7628077 :           tem = simplify_associative_operation (code, mode, op0, op1);
    3690      7628077 :           if (tem)
    3691              :             return tem;
    3692              :         }
    3693              :       break;
    3694              : 
    3695     15791252 :     case IOR:
    3696     15791252 :       if (trueop1 == CONST0_RTX (mode))
    3697              :         return op0;
    3698     14964370 :       if (INTEGRAL_MODE_P (mode)
    3699     14690356 :           && trueop1 == CONSTM1_RTX (mode)
    3700         9501 :           && !side_effects_p (op0))
    3701              :         return op1;
    3702     14954869 :       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
    3703              :         return op0;
    3704              :       /* A | (~A) -> -1 */
    3705        73587 :       if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
    3706     14935284 :            || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
    3707           10 :           && ! side_effects_p (op0)
    3708     14935304 :           && GET_MODE_CLASS (mode) != MODE_CC)
    3709           10 :         return CONSTM1_RTX (mode);
    3710              : 
    3711              :       /* IOR of two single bit bitfields extracted from the same object.
    3712              :          Bitfields are represented as an AND based extraction */
    3713     14935284 :       if (GET_CODE (op0) == AND
    3714      4225102 :           && GET_CODE (op1) == AND
    3715              :           /* Verify both AND operands are logical right shifts. */
    3716       332299 :           && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
    3717         5199 :           && GET_CODE (XEXP (op1, 0)) == LSHIFTRT
    3718              :           /* Verify both bitfields are extracted from the same object. */
    3719           54 :           && XEXP (XEXP (op0, 0), 0) == XEXP (XEXP (op1, 0), 0)
    3720              :           /* Verify both fields are a single bit (could be generalized). */
    3721           54 :           && XEXP (op0, 1) == CONST1_RTX (mode)
    3722            0 :           && XEXP (op1, 1) == CONST1_RTX (mode)
    3723              :           /* Verify bit positions (for cases with variable bit position). */
    3724            0 :           && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
    3725            0 :           && CONST_INT_P (XEXP (XEXP (op1, 0), 1)))
    3726              :         {
    3727            0 :           unsigned HOST_WIDE_INT bitpos1 = INTVAL (XEXP (XEXP (op0, 0), 1));
    3728            0 :           unsigned HOST_WIDE_INT bitpos2 = INTVAL (XEXP (XEXP (op1, 0), 1));
    3729            0 :           unsigned HOST_WIDE_INT mask
    3730            0 :             = (HOST_WIDE_INT_1U << bitpos1) | (HOST_WIDE_INT_1U << bitpos2);
    3731              : 
    3732            0 :           rtx m = GEN_INT (mask);
    3733            0 :           rtx t = gen_rtx_AND (mode, XEXP (XEXP (op0, 0), 0), m);
    3734            0 :           t = gen_rtx_NE (mode, t, CONST0_RTX (mode));
    3735            0 :           return t;
    3736              :         }
    3737              : 
    3738              :       /* IOR of multiple single bit bitfields extracted from the same object
    3739              :          (building on previous case).
    3740              :          First bitfield is represented as an AND based extraction, as done
    3741              :                 above. Second represented as NE based extraction, from
    3742              :                 output above. */
    3743     14935284 :       if (GET_CODE (op0) == AND
    3744      4225102 :           && GET_CODE (op1) == NE
    3745              :           /* Verify AND operand is logical right shift. */
    3746         4590 :           && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
    3747              :           /* Verify NE operand is an AND (based on output above). */
    3748           86 :           && GET_CODE (XEXP (op1, 0)) == AND
    3749              :           /* Verify both bitfields are extracted from the same object. */
    3750            0 :           && XEXP (XEXP (op0, 0), 0) == XEXP (XEXP (op1, 0), 0)
    3751              :           /* Verify masking is with a single bit and that we have a NE 0
    3752              :              comparison for the other operand.  */
    3753            0 :           && XEXP (op0, 1) == CONST1_RTX (mode)
    3754            0 :           && XEXP (op1, 1) == CONST0_RTX (mode)
    3755              :           /* Verify bit position. */
    3756            0 :           && CONST_INT_P (XEXP (XEXP (op0, 0), 1)))
    3757              :         {
    3758            0 :           unsigned HOST_WIDE_INT bitpos1 = INTVAL (XEXP (XEXP (op0, 0), 1));
    3759            0 :           unsigned HOST_WIDE_INT mask
    3760            0 :             = (HOST_WIDE_INT_1U << bitpos1) | INTVAL (XEXP (XEXP (op1, 0), 1));
    3761              : 
    3762            0 :           rtx m = GEN_INT (mask);
    3763            0 :           rtx t = gen_rtx_AND (mode, XEXP (XEXP (op0, 0), 0), m);
    3764            0 :           t = gen_rtx_NE (mode, t, CONST0_RTX (mode));
    3765            0 :           return t;
    3766              :         }
    3767              : 
    3768              :       /* Convert (ior (plus (A - 1)) (neg A)) to -1.  */
    3769     14935284 :       if (match_plus_neg_pattern (op0, op1, mode))
    3770            0 :         return CONSTM1_RTX (mode);
    3771              : 
    3772              :       /* (ior A C) is C if all bits of A that might be nonzero are on in C.  */
    3773     14935284 :       if (CONST_INT_P (op1)
    3774      3830080 :           && HWI_COMPUTABLE_MODE_P (mode)
    3775      3776263 :           && (nonzero_bits (op0, mode) & ~UINTVAL (op1)) == 0
    3776     15301997 :           && !side_effects_p (op0))
    3777              :         return op1;
    3778              : 
    3779              :       /* Canonicalize (X & C1) | C2.  */
    3780     14568571 :       if (GET_CODE (op0) == AND
    3781      4216359 :           && CONST_INT_P (trueop1)
    3782       709110 :           && CONST_INT_P (XEXP (op0, 1)))
    3783              :         {
    3784       551129 :           HOST_WIDE_INT mask = GET_MODE_MASK (mode);
    3785       551129 :           HOST_WIDE_INT c1 = INTVAL (XEXP (op0, 1));
    3786       551129 :           HOST_WIDE_INT c2 = INTVAL (trueop1);
    3787              : 
    3788              :           /* If (C1&C2) == C1, then (X&C1)|C2 becomes C2.  */
    3789       551129 :           if ((c1 & c2) == c1
    3790       551129 :               && !side_effects_p (XEXP (op0, 0)))
    3791              :             return trueop1;
    3792              : 
    3793              :           /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
    3794       551109 :           if (((c1|c2) & mask) == mask)
    3795        73957 :             return simplify_gen_binary (IOR, mode, XEXP (op0, 0), op1);
    3796              : 
    3797              :           /* If (C1|C2) has a single bit clear, then adjust C1 so that
    3798              :              when split it'll match a single bit clear style insn.
    3799              : 
    3800              :              This could have been done with a target dependent splitter, but
    3801              :              then every target with single bit manipulation insns would need
    3802              :              to implement such splitters.  */
    3803       477152 :           if (exact_log2 (~(c1 | c2)) >= 0)
    3804              :             {
    3805        65281 :               rtx temp = gen_rtx_AND (mode, XEXP (op0, 0), GEN_INT (c1 | c2));
    3806        65281 :               temp = gen_rtx_IOR (mode, temp, trueop1);
    3807        65281 :               return temp;
    3808              :             }
    3809              :         }
    3810              : 
    3811              :       /* Convert (A & B) | A to A.  */
    3812     14429313 :       if (GET_CODE (op0) == AND
    3813      4077101 :           && (rtx_equal_p (XEXP (op0, 0), op1)
    3814      4076990 :               || rtx_equal_p (XEXP (op0, 1), op1))
    3815         3804 :           && ! side_effects_p (XEXP (op0, 0))
    3816     14433117 :           && ! side_effects_p (XEXP (op0, 1)))
    3817              :         return op1;
    3818              : 
    3819              :       /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
    3820              :          mode size to (rotate A CX).  */
    3821     14425509 :       tem = simplify_rotate_op (op0, op1, mode);
    3822     14425509 :       if (tem)
    3823              :         return tem;
    3824              : 
    3825              :       /* If OP0 is (ashiftrt (plus ...) C), it might actually be
    3826              :          a (sign_extend (plus ...)).  Then check if OP1 is a CONST_INT and
    3827              :          the PLUS does not affect any of the bits in OP1: then we can do
    3828              :          the IOR as a PLUS and we can associate.  This is valid if OP1
    3829              :          can be safely shifted left C bits.  */
    3830     14422980 :       if (CONST_INT_P (trueop1) && GET_CODE (op0) == ASHIFTRT
    3831         6378 :           && GET_CODE (XEXP (op0, 0)) == PLUS
    3832          141 :           && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
    3833           87 :           && CONST_INT_P (XEXP (op0, 1))
    3834           87 :           && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
    3835              :         {
    3836           87 :           int count = INTVAL (XEXP (op0, 1));
    3837           87 :           HOST_WIDE_INT mask = UINTVAL (trueop1) << count;
    3838              : 
    3839           87 :           if (mask >> count == INTVAL (trueop1)
    3840           80 :               && trunc_int_for_mode (mask, mode) == mask
    3841          154 :               && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
    3842            0 :             return simplify_gen_binary (ASHIFTRT, mode,
    3843              :                                         plus_constant (mode, XEXP (op0, 0),
    3844            0 :                                                        mask),
    3845              :                                         XEXP (op0, 1));
    3846              :         }
    3847              : 
    3848              :       /* The following happens with bitfield merging.
    3849              :          (X & C) | ((X | Y) & ~C) -> X | (Y & ~C) */
    3850     14422980 :       if (GET_CODE (op0) == AND
    3851      4073297 :           && GET_CODE (op1) == AND
    3852       332299 :           && CONST_INT_P (XEXP (op0, 1))
    3853       164179 :           && CONST_INT_P (XEXP (op1, 1))
    3854       158723 :           && (INTVAL (XEXP (op0, 1))
    3855       158723 :               == ~INTVAL (XEXP (op1, 1))))
    3856              :         {
    3857              :           /* The IOR may be on both sides.  */
    3858        34944 :           rtx top0 = NULL_RTX, top1 = NULL_RTX;
    3859        34944 :           if (GET_CODE (XEXP (op1, 0)) == IOR)
    3860              :             top0 = op0, top1 = op1;
    3861        34888 :           else if (GET_CODE (XEXP (op0, 0)) == IOR)
    3862            3 :             top0 = op1, top1 = op0;
    3863        34944 :           if (top0 && top1)
    3864              :             {
    3865              :               /* X may be on either side of the inner IOR.  */
    3866           59 :               rtx tem = NULL_RTX;
    3867           59 :               if (rtx_equal_p (XEXP (top0, 0),
    3868           59 :                                XEXP (XEXP (top1, 0), 0)))
    3869           42 :                 tem = XEXP (XEXP (top1, 0), 1);
    3870           17 :               else if (rtx_equal_p (XEXP (top0, 0),
    3871           17 :                                     XEXP (XEXP (top1, 0), 1)))
    3872            8 :                 tem = XEXP (XEXP (top1, 0), 0);
    3873           50 :               if (tem)
    3874           50 :                 return simplify_gen_binary (IOR, mode, XEXP (top0, 0),
    3875              :                                             simplify_gen_binary
    3876           50 :                                               (AND, mode, tem, XEXP (top1, 1)));
    3877              :             }
    3878              :         }
    3879              : 
    3880              :       /* Convert (ior (and A C) (and B C)) into (and (ior A B) C).  */
    3881     14422930 :       if (GET_CODE (op0) == GET_CODE (op1)
    3882      3609475 :           && (GET_CODE (op0) == AND
    3883              :               || GET_CODE (op0) == IOR
    3884      3609475 :               || GET_CODE (op0) == LSHIFTRT
    3885      3275995 :               || GET_CODE (op0) == ASHIFTRT
    3886      3275901 :               || GET_CODE (op0) == ASHIFT
    3887      3255093 :               || GET_CODE (op0) == ROTATE
    3888      3255093 :               || GET_CODE (op0) == ROTATERT))
    3889              :         {
    3890       354382 :           tem = simplify_distributive_operation (code, mode, op0, op1);
    3891       354382 :           if (tem)
    3892              :             return tem;
    3893              :         }
    3894              : 
    3895              :       /* Convert (ior (and (not A) B) A) into A | B.  */
    3896     14332182 :       if (GET_CODE (op0) == AND
    3897     14332182 :           && negated_ops_p (XEXP (op0, 0), op1))
    3898         4063 :         return simplify_gen_binary (IOR, mode, XEXP (op0, 1), op1);
    3899              : 
    3900     14328119 :       tem = simplify_with_subreg_not (code, mode, op0, op1);
    3901     14328119 :       if (tem)
    3902              :         return tem;
    3903              : 
    3904     14328114 :       tem = simplify_byte_swapping_operation (code, mode, op0, op1);
    3905     14328114 :       if (tem)
    3906              :         return tem;
    3907              : 
    3908     14328081 :       tem = simplify_associative_operation (code, mode, op0, op1);
    3909     14328081 :       if (tem)
    3910              :         return tem;
    3911              : 
    3912     14012820 :       tem = simplify_logical_relational_operation (code, mode, op0, op1);
    3913     14012820 :       if (tem)
    3914              :         return tem;
    3915              :       break;
    3916              : 
    3917      1782582 :     case XOR:
    3918      1782582 :       if (trueop1 == CONST0_RTX (mode))
    3919              :         return op0;
    3920      1726828 :       if (INTEGRAL_MODE_P (mode) && trueop1 == CONSTM1_RTX (mode))
    3921        22338 :         return simplify_gen_unary (NOT, mode, op0, mode);
    3922      1704490 :       if (rtx_equal_p (trueop0, trueop1)
    3923         2416 :           && ! side_effects_p (op0)
    3924      1706902 :           && GET_MODE_CLASS (mode) != MODE_CC)
    3925         2412 :          return CONST0_RTX (mode);
    3926              : 
    3927              :       /* Canonicalize XOR of the most significant bit to PLUS.  */
    3928      1702078 :       if (CONST_SCALAR_INT_P (op1)
    3929      1702078 :           && mode_signbit_p (mode, op1))
    3930        40535 :         return simplify_gen_binary (PLUS, mode, op0, op1);
    3931              :       /* (xor (plus X C1) C2) is (xor X (C1^C2)) if C1 is signbit.  */
    3932      1661543 :       if (CONST_SCALAR_INT_P (op1)
    3933       548692 :           && GET_CODE (op0) == PLUS
    3934         2505 :           && CONST_SCALAR_INT_P (XEXP (op0, 1))
    3935      1663138 :           && mode_signbit_p (mode, XEXP (op0, 1)))
    3936          189 :         return simplify_gen_binary (XOR, mode, XEXP (op0, 0),
    3937              :                                     simplify_gen_binary (XOR, mode, op1,
    3938          189 :                                                          XEXP (op0, 1)));
    3939              : 
    3940              :       /* If we are XORing two things that have no bits in common,
    3941              :          convert them into an IOR.  This helps to detect rotation encoded
    3942              :          using those methods and possibly other simplifications.  */
    3943              : 
    3944      1661354 :       if (HWI_COMPUTABLE_MODE_P (mode)
    3945      1377748 :           && (nonzero_bits (op0, mode)
    3946      1377748 :               & nonzero_bits (op1, mode)) == 0)
    3947        10724 :         return (simplify_gen_binary (IOR, mode, op0, op1));
    3948              : 
    3949              :       /* Convert (xor (plus (A - 1)) (neg A)) to -1.  */
    3950      1650630 :       if (match_plus_neg_pattern (op0, op1, mode))
    3951            0 :         return CONSTM1_RTX (mode);
    3952              : 
    3953              :       /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
    3954              :          Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
    3955              :          (NOT y).  */
    3956      1650630 :       {
    3957      1650630 :         int num_negated = 0;
    3958              : 
    3959      1650630 :         if (GET_CODE (op0) == NOT)
    3960          948 :           num_negated++, op0 = XEXP (op0, 0);
    3961      1650630 :         if (GET_CODE (op1) == NOT)
    3962            0 :           num_negated++, op1 = XEXP (op1, 0);
    3963              : 
    3964            0 :         if (num_negated == 2)
    3965            0 :           return simplify_gen_binary (XOR, mode, op0, op1);
    3966      1650630 :         else if (num_negated == 1)
    3967          948 :           return simplify_gen_unary (NOT, mode,
    3968              :                                      simplify_gen_binary (XOR, mode, op0, op1),
    3969          948 :                                      mode);
    3970              :       }
    3971              : 
    3972              :       /* Convert (xor (and A B) B) to (and (not A) B).  The latter may
    3973              :          correspond to a machine insn or result in further simplifications
    3974              :          if B is a constant.  */
    3975              : 
    3976      1649682 :       if (GET_CODE (op0) == AND
    3977       174961 :           && rtx_equal_p (XEXP (op0, 1), op1)
    3978      1677558 :           && ! side_effects_p (op1))
    3979        27876 :         return simplify_gen_binary (AND, mode,
    3980              :                                     simplify_gen_unary (NOT, mode,
    3981              :                                                         XEXP (op0, 0), mode),
    3982        27876 :                                     op1);
    3983              : 
    3984      1621806 :       else if (GET_CODE (op0) == AND
    3985       147085 :                && rtx_equal_p (XEXP (op0, 0), op1)
    3986      1623179 :                && ! side_effects_p (op1))
    3987         1373 :         return simplify_gen_binary (AND, mode,
    3988              :                                     simplify_gen_unary (NOT, mode,
    3989              :                                                         XEXP (op0, 1), mode),
    3990         1373 :                                     op1);
    3991              : 
    3992              :       /* Given (xor (ior (xor A B) C) D), where B, C and D are
    3993              :          constants, simplify to (xor (ior A C) (B&~C)^D), canceling
    3994              :          out bits inverted twice and not set by C.  Similarly, given
    3995              :          (xor (and (xor A B) C) D), simplify without inverting C in
    3996              :          the xor operand: (xor (and A C) (B&C)^D).
    3997              :       */
    3998      1620433 :       else if ((GET_CODE (op0) == IOR || GET_CODE (op0) == AND)
    3999       167244 :                && GET_CODE (XEXP (op0, 0)) == XOR
    4000         7269 :                && CONST_INT_P (op1)
    4001          331 :                && CONST_INT_P (XEXP (op0, 1))
    4002          286 :                && CONST_INT_P (XEXP (XEXP (op0, 0), 1)))
    4003              :         {
    4004           38 :           enum rtx_code op = GET_CODE (op0);
    4005           38 :           rtx a = XEXP (XEXP (op0, 0), 0);
    4006           38 :           rtx b = XEXP (XEXP (op0, 0), 1);
    4007           38 :           rtx c = XEXP (op0, 1);
    4008           38 :           rtx d = op1;
    4009           38 :           HOST_WIDE_INT bval = INTVAL (b);
    4010           38 :           HOST_WIDE_INT cval = INTVAL (c);
    4011           38 :           HOST_WIDE_INT dval = INTVAL (d);
    4012           38 :           HOST_WIDE_INT xcval;
    4013              : 
    4014           38 :           if (op == IOR)
    4015            8 :             xcval = ~cval;
    4016              :           else
    4017              :             xcval = cval;
    4018              : 
    4019           38 :           return simplify_gen_binary (XOR, mode,
    4020              :                                       simplify_gen_binary (op, mode, a, c),
    4021           38 :                                       gen_int_mode ((bval & xcval) ^ dval,
    4022              :                                                     mode));
    4023              :         }
    4024              : 
    4025              :       /* Given (xor (and A B) C), using P^Q == (~P&Q) | (~Q&P),
    4026              :          we can transform like this:
    4027              :             (A&B)^C == ~(A&B)&C | ~C&(A&B)
    4028              :                     == (~A|~B)&C | ~C&(A&B)    * DeMorgan's Law
    4029              :                     == ~A&C | ~B&C | A&(~C&B)  * Distribute and re-order
    4030              :          Attempt a few simplifications when B and C are both constants.  */
    4031      1620395 :       if (GET_CODE (op0) == AND
    4032       145682 :           && CONST_INT_P (op1)
    4033        13272 :           && CONST_INT_P (XEXP (op0, 1)))
    4034              :         {
    4035        11534 :           rtx a = XEXP (op0, 0);
    4036        11534 :           rtx b = XEXP (op0, 1);
    4037        11534 :           rtx c = op1;
    4038        11534 :           HOST_WIDE_INT bval = INTVAL (b);
    4039        11534 :           HOST_WIDE_INT cval = INTVAL (c);
    4040              : 
    4041              :           /* Instead of computing ~A&C, we compute its negated value,
    4042              :              ~(A|~C).  If it yields -1, ~A&C is zero, so we can
    4043              :              optimize for sure.  If it does not simplify, we still try
    4044              :              to compute ~A&C below, but since that always allocates
    4045              :              RTL, we don't try that before committing to returning a
    4046              :              simplified expression.  */
    4047        11534 :           rtx n_na_c = simplify_binary_operation (IOR, mode, a,
    4048              :                                                   GEN_INT (~cval));
    4049              : 
    4050        11534 :           if ((~cval & bval) == 0)
    4051              :             {
    4052          322 :               rtx na_c = NULL_RTX;
    4053          322 :               if (n_na_c)
    4054            0 :                 na_c = simplify_gen_unary (NOT, mode, n_na_c, mode);
    4055              :               else
    4056              :                 {
    4057              :                   /* If ~A does not simplify, don't bother: we don't
    4058              :                      want to simplify 2 operations into 3, and if na_c
    4059              :                      were to simplify with na, n_na_c would have
    4060              :                      simplified as well.  */
    4061          322 :                   rtx na = simplify_unary_operation (NOT, mode, a, mode);
    4062          322 :                   if (na)
    4063            0 :                     na_c = simplify_gen_binary (AND, mode, na, c);
    4064              :                 }
    4065              : 
    4066              :               /* Try to simplify ~A&C | ~B&C.  */
    4067            0 :               if (na_c != NULL_RTX)
    4068            0 :                 return simplify_gen_binary (IOR, mode, na_c,
    4069            0 :                                             gen_int_mode (~bval & cval, mode));
    4070              :             }
    4071              :           else
    4072              :             {
    4073              :               /* If ~A&C is zero, simplify A&(~C&B) | ~B&C.  */
    4074        11212 :               if (n_na_c == CONSTM1_RTX (mode))
    4075              :                 {
    4076            0 :                   rtx a_nc_b = simplify_gen_binary (AND, mode, a,
    4077            0 :                                                     gen_int_mode (~cval & bval,
    4078              :                                                                   mode));
    4079            0 :                   return simplify_gen_binary (IOR, mode, a_nc_b,
    4080            0 :                                               gen_int_mode (~bval & cval,
    4081              :                                                             mode));
    4082              :                 }
    4083              :             }
    4084              :         }
    4085              : 
    4086              :       /* If we have (xor (and (xor A B) C) A) with C a constant we can instead
    4087              :          do (ior (and A ~C) (and B C)) which is a machine instruction on some
    4088              :          machines, and also has shorter instruction path length.  */
    4089      1620395 :       if (GET_CODE (op0) == AND
    4090       145682 :           && GET_CODE (XEXP (op0, 0)) == XOR
    4091         6798 :           && CONST_INT_P (XEXP (op0, 1))
    4092      1623923 :           && rtx_equal_p (XEXP (XEXP (op0, 0), 0), trueop1))
    4093              :         {
    4094            7 :           rtx a = trueop1;
    4095            7 :           rtx b = XEXP (XEXP (op0, 0), 1);
    4096            7 :           rtx c = XEXP (op0, 1);
    4097            7 :           rtx nc = simplify_gen_unary (NOT, mode, c, mode);
    4098            7 :           rtx a_nc = simplify_gen_binary (AND, mode, a, nc);
    4099            7 :           rtx bc = simplify_gen_binary (AND, mode, b, c);
    4100            7 :           return simplify_gen_binary (IOR, mode, a_nc, bc);
    4101              :         }
    4102              :       /* Similarly, (xor (and (xor A B) C) B) as (ior (and A C) (and B ~C))  */
    4103      1620388 :       else if (GET_CODE (op0) == AND
    4104       145675 :           && GET_CODE (XEXP (op0, 0)) == XOR
    4105         6791 :           && CONST_INT_P (XEXP (op0, 1))
    4106      1623909 :           && rtx_equal_p (XEXP (XEXP (op0, 0), 1), trueop1))
    4107              :         {
    4108            8 :           rtx a = XEXP (XEXP (op0, 0), 0);
    4109            8 :           rtx b = trueop1;
    4110            8 :           rtx c = XEXP (op0, 1);
    4111            8 :           rtx nc = simplify_gen_unary (NOT, mode, c, mode);
    4112            8 :           rtx b_nc = simplify_gen_binary (AND, mode, b, nc);
    4113            8 :           rtx ac = simplify_gen_binary (AND, mode, a, c);
    4114            8 :           return simplify_gen_binary (IOR, mode, ac, b_nc);
    4115              :         }
    4116              : 
    4117              :       /* (xor (comparison foo bar) (const_int 1)) can become the reversed
    4118              :          comparison if STORE_FLAG_VALUE is 1.  */
    4119      1620380 :       if (STORE_FLAG_VALUE == 1
    4120      1620380 :           && trueop1 == const1_rtx
    4121       205642 :           && COMPARISON_P (op0)
    4122      1626571 :           && (reversed = reversed_comparison (op0, mode)))
    4123              :         return reversed;
    4124              : 
    4125              :       /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
    4126              :          is (lt foo (const_int 0)), so we can perform the above
    4127              :          simplification if STORE_FLAG_VALUE is 1.  */
    4128              : 
    4129      1614197 :       if (is_a <scalar_int_mode> (mode, &int_mode)
    4130              :           && STORE_FLAG_VALUE == 1
    4131      1334756 :           && trueop1 == const1_rtx
    4132       199459 :           && GET_CODE (op0) == LSHIFTRT
    4133        35116 :           && CONST_INT_P (XEXP (op0, 1))
    4134        35116 :           && INTVAL (XEXP (op0, 1)) == GET_MODE_PRECISION (int_mode) - 1)
    4135        34219 :         return gen_rtx_GE (int_mode, XEXP (op0, 0), const0_rtx);
    4136              : 
    4137              :       /* (xor (comparison foo bar) (const_int sign-bit))
    4138              :          when STORE_FLAG_VALUE is the sign bit.  */
    4139      1579978 :       if (is_a <scalar_int_mode> (mode, &int_mode)
    4140      1300537 :           && val_signbit_p (int_mode, STORE_FLAG_VALUE)
    4141            0 :           && trueop1 == const_true_rtx
    4142            0 :           && COMPARISON_P (op0)
    4143            0 :           && (reversed = reversed_comparison (op0, int_mode)))
    4144              :         return reversed;
    4145              : 
    4146              :       /* Convert (xor (and A C) (and B C)) into (and (xor A B) C).  */
    4147      1579978 :       if (GET_CODE (op0) == GET_CODE (op1)
    4148       525185 :           && (GET_CODE (op0) == AND
    4149       525185 :               || GET_CODE (op0) == LSHIFTRT
    4150       453442 :               || GET_CODE (op0) == ASHIFTRT
    4151       453378 :               || GET_CODE (op0) == ASHIFT
    4152       453262 :               || GET_CODE (op0) == ROTATE
    4153       453154 :               || GET_CODE (op0) == ROTATERT))
    4154              :         {
    4155        72565 :           tem = simplify_distributive_operation (code, mode, op0, op1);
    4156        72565 :           if (tem)
    4157              :             return tem;
    4158              :         }
    4159              : 
    4160              :       /* Convert (xor (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
    4161              :          mode size to (rotate A CX).  */
    4162      1511899 :       tem = simplify_rotate_op (op0, op1, mode);
    4163      1511899 :       if (tem)
    4164              :         return tem;
    4165              : 
    4166              :       /* Convert (xor (and (not A) B) A) into A | B.  */
    4167      1510519 :       if (GET_CODE (op0) == AND
    4168      1510519 :           && negated_ops_p (XEXP (op0, 0), op1))
    4169            1 :         return simplify_gen_binary (IOR, mode, XEXP (op0, 1), op1);
    4170              : 
    4171              :       /* Convert (xor (and (rotate (~1) A) B) (ashift 1 A))
    4172              :          into B | (1 << A).  */
    4173      1510518 :       if (SHIFT_COUNT_TRUNCATED
    4174              :           && GET_CODE (op0) == AND
    4175              :           && GET_CODE (XEXP (op0, 0)) == ROTATE
    4176              :           && CONST_INT_P (XEXP (XEXP (op0, 0), 0))
    4177              :           && INTVAL (XEXP (XEXP (op0, 0), 0)) == -2
    4178              :           && GET_CODE (op1) == ASHIFT
    4179              :           && CONST_INT_P (XEXP (op1, 0))
    4180              :           && INTVAL (XEXP (op1, 0)) == 1
    4181              :           && rtx_equal_p (XEXP (XEXP (op0, 0), 1), XEXP (op1, 1))
    4182              :           && !side_effects_p (XEXP (op1, 1)))
    4183              :         return simplify_gen_binary (IOR, mode, XEXP (op0, 1), op1);
    4184              : 
    4185      1510518 :       tem = simplify_with_subreg_not (code, mode, op0, op1);
    4186      1510518 :       if (tem)
    4187              :         return tem;
    4188              : 
    4189      1510517 :       tem = simplify_byte_swapping_operation (code, mode, op0, op1);
    4190      1510517 :       if (tem)
    4191              :         return tem;
    4192              : 
    4193      1510517 :       tem = simplify_associative_operation (code, mode, op0, op1);
    4194      1510517 :       if (tem)
    4195              :         return tem;
    4196              :       break;
    4197              : 
    4198     24383257 :     case AND:
    4199     24383257 :       if (trueop1 == CONST0_RTX (mode) && ! side_effects_p (op0))
    4200              :         return trueop1;
    4201     24129160 :       if (INTEGRAL_MODE_P (mode) && trueop1 == CONSTM1_RTX (mode))
    4202              :         return op0;
    4203     23747208 :       if (HWI_COMPUTABLE_MODE_P (mode))
    4204              :         {
    4205              :           /* When WORD_REGISTER_OPERATIONS is true, we need to know the
    4206              :              nonzero bits in WORD_MODE rather than MODE.  */
    4207     20897785 :           scalar_int_mode tmode = as_a <scalar_int_mode> (mode);
    4208     20897785 :           if (WORD_REGISTER_OPERATIONS
    4209              :               && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
    4210              :             tmode = word_mode;
    4211     20897785 :           HOST_WIDE_INT nzop0 = nonzero_bits (trueop0, tmode);
    4212     20897785 :           HOST_WIDE_INT nzop1;
    4213     20897785 :           if (CONST_INT_P (trueop1))
    4214              :             {
    4215     17795353 :               HOST_WIDE_INT val1 = INTVAL (trueop1);
    4216              :               /* If we are turning off bits already known off in OP0, we need
    4217              :                  not do an AND.  */
    4218     17795353 :               if ((nzop0 & ~val1) == 0)
    4219       420586 :                 return op0;
    4220              : 
    4221              :               /* Canonicalize (and (subreg (lshiftrt X shift)) mask) into
    4222              :                  (and (lshiftrt (subreg X) shift) mask).
    4223              : 
    4224              :                  Keeps shift and AND in the same mode, improving recognition.
    4225              :                  Only applied when subreg is a lowpart, shift is valid,
    4226              :                  and no precision is lost.  */
    4227     17457696 :               if (SUBREG_P (op0)
    4228      5969909 :                   && subreg_lowpart_p (op0)
    4229      5953962 :                   && !paradoxical_subreg_p (op0)
    4230       914634 :                   && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
    4231              :                   /* simplify_subreg asserts the object being accessed is not
    4232              :                      VOIDmode or BLKmode.  We may have a REG_EQUAL note which
    4233              :                      is not simplified and the source operand is a constant,
    4234              :                      and thus VOIDmode.  Guard against that.  */
    4235       116978 :                   && GET_MODE (XEXP (XEXP (op0, 0), 0)) != VOIDmode
    4236       116928 :                   && GET_MODE (XEXP (XEXP (op0, 0), 0)) != BLKmode
    4237       116928 :                   && !CONST_INT_P (XEXP (XEXP (op0, 0), 0))
    4238       116928 :                   && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
    4239        95914 :                   && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
    4240        95914 :                   && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT
    4241     17553609 :                   && ((INTVAL (XEXP (XEXP (op0, 0), 1))
    4242        95913 :                       + floor_log2 (val1))
    4243     17457696 :                       < GET_MODE_PRECISION (as_a <scalar_int_mode> (mode))))
    4244              :                 {
    4245        10556 :                   tem = XEXP (XEXP (op0, 0), 0);
    4246        10556 :                   if (SUBREG_P (tem))
    4247              :                     {
    4248          265 :                       if (subreg_lowpart_p (tem))
    4249          265 :                         tem = SUBREG_REG (tem);
    4250              :                       else
    4251              :                         tem = NULL_RTX;
    4252              :                     }
    4253          265 :                   if (tem != NULL_RTX)
    4254              :                     {
    4255        10556 :                       offset = subreg_lowpart_offset (mode, GET_MODE (tem));
    4256        10556 :                       tem = simplify_gen_subreg (mode, tem, GET_MODE (tem),
    4257        10556 :                                                  offset);
    4258        10556 :                       if (tem)
    4259              :                         {
    4260        10556 :                           unsigned shiftamt = INTVAL (XEXP (XEXP (op0, 0), 1));
    4261        10556 :                           rtx shiftamtrtx = gen_int_shift_amount (mode,
    4262        10556 :                                                                   shiftamt);
    4263        10556 :                           op0 = simplify_gen_binary (LSHIFTRT, mode, tem,
    4264              :                                                      shiftamtrtx);
    4265        10556 :                           return simplify_gen_binary (AND, mode, op0, op1);
    4266              :                         }
    4267              :                     }
    4268              :                 }
    4269              :             }
    4270     20549572 :           nzop1 = nonzero_bits (trueop1, mode);
    4271              :           /* If we are clearing all the nonzero bits, the result is zero.  */
    4272     20549572 :           if ((nzop1 & nzop0) == 0
    4273     20549572 :               && !side_effects_p (op0) && !side_effects_p (op1))
    4274        72373 :             return CONST0_RTX (mode);
    4275              :         }
    4276     23330011 :       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)
    4277     23330007 :           && GET_MODE_CLASS (mode) != MODE_CC)
    4278              :         return op0;
    4279              :       /* A & (~A) -> 0 */
    4280       634709 :       if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
    4281     23319282 :            || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
    4282         4001 :           && ! side_effects_p (op0)
    4283     23327237 :           && GET_MODE_CLASS (mode) != MODE_CC)
    4284         4000 :         return CONST0_RTX (mode);
    4285              : 
    4286              :       /* Convert (and (plus (A - 1)) (neg A)) to 0.  */
    4287     23319237 :       if (match_plus_neg_pattern (op0, op1, mode))
    4288            2 :         return CONST0_RTX (mode);
    4289              : 
    4290              :       /* Transform (and (extend X) C) into (zero_extend (and X C)) if
    4291              :          there are no nonzero bits of C outside of X's mode.  */
    4292     46638470 :       if ((GET_CODE (op0) == SIGN_EXTEND
    4293     23319235 :            || GET_CODE (op0) == ZERO_EXTEND)
    4294        94981 :           && CONST_SCALAR_INT_P (trueop1)
    4295        80901 :           && is_a <scalar_int_mode> (mode, &int_mode)
    4296        80901 :           && is_a <scalar_int_mode> (GET_MODE (XEXP (op0, 0)), &inner_mode)
    4297     23400136 :           && (wi::mask (GET_MODE_PRECISION (inner_mode), true,
    4298        80901 :                         GET_MODE_PRECISION (int_mode))
    4299     23400136 :               & rtx_mode_t (trueop1, mode)) == 0)
    4300              :         {
    4301        78760 :           machine_mode imode = GET_MODE (XEXP (op0, 0));
    4302        78760 :           tem = immed_wide_int_const (rtx_mode_t (trueop1, mode), imode);
    4303        78760 :           tem = simplify_gen_binary (AND, imode, XEXP (op0, 0), tem);
    4304        78760 :           return simplify_gen_unary (ZERO_EXTEND, mode, tem, imode);
    4305              :         }
    4306              : 
    4307              :       /* Transform (and (truncate X) C) into (truncate (and X C)).  This way
    4308              :          we might be able to further simplify the AND with X and potentially
    4309              :          remove the truncation altogether.  */
    4310     23240475 :       if (GET_CODE (op0) == TRUNCATE && CONST_INT_P (trueop1))
    4311              :         {
    4312            6 :           rtx x = XEXP (op0, 0);
    4313            6 :           machine_mode xmode = GET_MODE (x);
    4314            6 :           tem = simplify_gen_binary (AND, xmode, x,
    4315            6 :                                      gen_int_mode (INTVAL (trueop1), xmode));
    4316            6 :           return simplify_gen_unary (TRUNCATE, mode, tem, xmode);
    4317              :         }
    4318              : 
    4319              :       /* Canonicalize (A | C1) & C2 as (A & C2) | (C1 & C2).  */
    4320     23240469 :       if (GET_CODE (op0) == IOR
    4321      1464083 :           && CONST_INT_P (trueop1)
    4322       235059 :           && CONST_INT_P (XEXP (op0, 1)))
    4323              :         {
    4324       138169 :           HOST_WIDE_INT tmp = INTVAL (trueop1) & INTVAL (XEXP (op0, 1));
    4325       138169 :           return simplify_gen_binary (IOR, mode,
    4326              :                                       simplify_gen_binary (AND, mode,
    4327              :                                                            XEXP (op0, 0), op1),
    4328       138169 :                                       gen_int_mode (tmp, mode));
    4329              :         }
    4330              : 
    4331              :       /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
    4332              :          insn (and may simplify more).  */
    4333     23102300 :       if (GET_CODE (op0) == XOR
    4334       137761 :           && rtx_equal_p (XEXP (op0, 0), op1)
    4335     23103741 :           && ! side_effects_p (op1))
    4336         1441 :         return simplify_gen_binary (AND, mode,
    4337              :                                     simplify_gen_unary (NOT, mode,
    4338              :                                                         XEXP (op0, 1), mode),
    4339         1441 :                                     op1);
    4340              : 
    4341     23100859 :       if (GET_CODE (op0) == XOR
    4342       136320 :           && rtx_equal_p (XEXP (op0, 1), op1)
    4343     23104061 :           && ! side_effects_p (op1))
    4344         3202 :         return simplify_gen_binary (AND, mode,
    4345              :                                     simplify_gen_unary (NOT, mode,
    4346              :                                                         XEXP (op0, 0), mode),
    4347         3202 :                                     op1);
    4348              : 
    4349              :       /* Similarly for (~(A ^ B)) & A.  */
    4350     23097657 :       if (GET_CODE (op0) == NOT
    4351       630755 :           && GET_CODE (XEXP (op0, 0)) == XOR
    4352         3209 :           && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
    4353     23097711 :           && ! side_effects_p (op1))
    4354           54 :         return simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
    4355              : 
    4356     23097603 :       if (GET_CODE (op0) == NOT
    4357       630701 :           && GET_CODE (XEXP (op0, 0)) == XOR
    4358         3155 :           && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
    4359     23097640 :           && ! side_effects_p (op1))
    4360           37 :         return simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
    4361              : 
    4362              :       /* Convert (A | B) & A to A.  */
    4363     23097566 :       if (GET_CODE (op0) == IOR
    4364      1325914 :           && (rtx_equal_p (XEXP (op0, 0), op1)
    4365      1325392 :               || rtx_equal_p (XEXP (op0, 1), op1))
    4366          716 :           && ! side_effects_p (XEXP (op0, 0))
    4367     23098282 :           && ! side_effects_p (XEXP (op0, 1)))
    4368              :         return op1;
    4369              : 
    4370              :       /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
    4371              :          ((A & N) + B) & M -> (A + B) & M
    4372              :          Similarly if (N & M) == 0,
    4373              :          ((A | N) + B) & M -> (A + B) & M
    4374              :          and for - instead of + and/or ^ instead of |.
    4375              :          Also, if (N & M) == 0, then
    4376              :          (A +- N) & M -> A & M.  */
    4377     23096850 :       if (CONST_INT_P (trueop1)
    4378     17258195 :           && HWI_COMPUTABLE_MODE_P (mode)
    4379     17230288 :           && ~UINTVAL (trueop1)
    4380     17230288 :           && (UINTVAL (trueop1) & (UINTVAL (trueop1) + 1)) == 0
    4381     34034940 :           && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS))
    4382              :         {
    4383       986714 :           rtx pmop[2];
    4384       986714 :           int which;
    4385              : 
    4386       986714 :           pmop[0] = XEXP (op0, 0);
    4387       986714 :           pmop[1] = XEXP (op0, 1);
    4388              : 
    4389       986714 :           if (CONST_INT_P (pmop[1])
    4390       527371 :               && (UINTVAL (pmop[1]) & UINTVAL (trueop1)) == 0)
    4391       170751 :             return simplify_gen_binary (AND, mode, pmop[0], op1);
    4392              : 
    4393      2471400 :           for (which = 0; which < 2; which++)
    4394              :             {
    4395      1647600 :               tem = pmop[which];
    4396      1647600 :               switch (GET_CODE (tem))
    4397              :                 {
    4398        12112 :                 case AND:
    4399        12112 :                   if (CONST_INT_P (XEXP (tem, 1))
    4400        10602 :                       && (UINTVAL (XEXP (tem, 1)) & UINTVAL (trueop1))
    4401              :                       == UINTVAL (trueop1))
    4402         7698 :                     pmop[which] = XEXP (tem, 0);
    4403              :                   break;
    4404         1734 :                 case IOR:
    4405         1734 :                 case XOR:
    4406         1734 :                   if (CONST_INT_P (XEXP (tem, 1))
    4407          705 :                       && (UINTVAL (XEXP (tem, 1)) & UINTVAL (trueop1)) == 0)
    4408          139 :                     pmop[which] = XEXP (tem, 0);
    4409              :                   break;
    4410              :                 default:
    4411              :                   break;
    4412              :                 }
    4413              :             }
    4414              : 
    4415       823800 :           if (pmop[0] != XEXP (op0, 0) || pmop[1] != XEXP (op0, 1))
    4416              :             {
    4417         7837 :               tem = simplify_gen_binary (GET_CODE (op0), mode,
    4418              :                                          pmop[0], pmop[1]);
    4419         7837 :               return simplify_gen_binary (code, mode, tem, op1);
    4420              :             }
    4421              :         }
    4422              : 
    4423              :       /* (and X (ior (not X) Y) -> (and X Y) */
    4424     22926099 :       if (GET_CODE (op1) == IOR
    4425       997207 :           && GET_CODE (XEXP (op1, 0)) == NOT
    4426     22931476 :           && rtx_equal_p (op0, XEXP (XEXP (op1, 0), 0)))
    4427            0 :        return simplify_gen_binary (AND, mode, op0, XEXP (op1, 1));
    4428              : 
    4429              :       /* (and (ior (not X) Y) X) -> (and X Y) */
    4430     22926099 :       if (GET_CODE (op0) == IOR
    4431      1325198 :           && GET_CODE (XEXP (op0, 0)) == NOT
    4432     22976301 :           && rtx_equal_p (op1, XEXP (XEXP (op0, 0), 0)))
    4433            6 :         return simplify_gen_binary (AND, mode, op1, XEXP (op0, 1));
    4434              : 
    4435              :       /* (and X (ior Y (not X)) -> (and X Y) */
    4436     22926093 :       if (GET_CODE (op1) == IOR
    4437       997207 :           && GET_CODE (XEXP (op1, 1)) == NOT
    4438     22926340 :           && rtx_equal_p (op0, XEXP (XEXP (op1, 1), 0)))
    4439            0 :        return simplify_gen_binary (AND, mode, op0, XEXP (op1, 0));
    4440              : 
    4441              :       /* (and (ior Y (not X)) X) -> (and X Y) */
    4442     22926093 :       if (GET_CODE (op0) == IOR
    4443      1325192 :           && GET_CODE (XEXP (op0, 1)) == NOT
    4444     22933919 :           && rtx_equal_p (op1, XEXP (XEXP (op0, 1), 0)))
    4445            6 :         return simplify_gen_binary (AND, mode, op1, XEXP (op0, 0));
    4446              : 
    4447              :       /* (and (ior/xor X Y) (not Y)) -> X & ~Y */
    4448     22926087 :       if ((GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
    4449     22926087 :           && negated_ops_p (op1, XEXP (op0, 1)))
    4450           23 :         return simplify_gen_binary (AND, mode, XEXP (op0, 0),
    4451              :                                     simplify_gen_unary (NOT, mode,
    4452              :                                                         XEXP (op0, 1),
    4453           23 :                                                         mode));
    4454              :       /* (and (ior/xor Y X) (not Y)) -> X & ~Y */
    4455     22926064 :       if ((GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
    4456     22926064 :           && negated_ops_p (op1, XEXP (op0, 0)))
    4457            4 :         return simplify_gen_binary (AND, mode, XEXP (op0, 1),
    4458              :                                     simplify_gen_unary (NOT, mode,
    4459              :                                                         XEXP (op0, 0),
    4460            4 :                                                         mode));
    4461              : 
    4462              :       /* Convert (and (ior A C) (ior B C)) into (ior (and A B) C).  */
    4463     22926060 :       if (GET_CODE (op0) == GET_CODE (op1)
    4464      2195892 :           && (GET_CODE (op0) == AND
    4465              :               || GET_CODE (op0) == IOR
    4466      2195892 :               || GET_CODE (op0) == LSHIFTRT
    4467      1199387 :               || GET_CODE (op0) == ASHIFTRT
    4468      1199273 :               || GET_CODE (op0) == ASHIFT
    4469      1199122 :               || GET_CODE (op0) == ROTATE
    4470      1199122 :               || GET_CODE (op0) == ROTATERT))
    4471              :         {
    4472       996770 :           tem = simplify_distributive_operation (code, mode, op0, op1);
    4473       996770 :           if (tem)
    4474              :             return tem;
    4475              :         }
    4476              : 
    4477              :       /* (and:v4si
    4478              :            (ashiftrt:v4si A 16)
    4479              :            (const_vector: 0xffff x4))
    4480              :          is just (lshiftrt:v4si A 16).  */
    4481     21973679 :       if (VECTOR_MODE_P (mode) && GET_CODE (op0) == ASHIFTRT
    4482         4252 :           && (CONST_INT_P (XEXP (op0, 1))
    4483         1880 :               || (GET_CODE (XEXP (op0, 1)) == CONST_VECTOR
    4484           94 :                   && const_vec_duplicate_p (XEXP (op0, 1))
    4485            0 :                   && CONST_INT_P (XVECEXP (XEXP (op0, 1), 0, 0))))
    4486         2372 :           && GET_CODE (op1) == CONST_VECTOR
    4487     21973695 :           && const_vec_duplicate_p (op1)
    4488     21973737 :           && CONST_INT_P (XVECEXP (op1, 0, 0)))
    4489              :         {
    4490          112 :           unsigned HOST_WIDE_INT shift_count
    4491              :             = (CONST_INT_P (XEXP (op0, 1))
    4492           56 :                ? UINTVAL (XEXP (op0, 1))
    4493            0 :                : UINTVAL (XVECEXP (XEXP (op0, 1), 0, 0)));
    4494           56 :           unsigned HOST_WIDE_INT inner_prec
    4495          112 :             = GET_MODE_PRECISION (GET_MODE_INNER (mode));
    4496              : 
    4497              :           /* Avoid UD shift count.  */
    4498           56 :           if (shift_count < inner_prec
    4499           56 :               && (UINTVAL (XVECEXP (op1, 0, 0))
    4500           56 :                   == (HOST_WIDE_INT_1U << (inner_prec - shift_count)) - 1))
    4501           42 :             return simplify_gen_binary (LSHIFTRT, mode, XEXP (op0, 0), XEXP (op0, 1));
    4502              :         }
    4503              : 
    4504     21973637 :       tem = simplify_with_subreg_not (code, mode, op0, op1);
    4505     21973637 :       if (tem)
    4506              :         return tem;
    4507              : 
    4508     21971352 :       tem = simplify_byte_swapping_operation (code, mode, op0, op1);
    4509     21971352 :       if (tem)
    4510              :         return tem;
    4511              : 
    4512     21970879 :       tem = simplify_associative_operation (code, mode, op0, op1);
    4513     21970879 :       if (tem)
    4514              :         return tem;
    4515              :       break;
    4516              : 
    4517       904896 :     case UDIV:
    4518              :       /* 0/x is 0 (or x&0 if x has side-effects).  */
    4519       904896 :       if (trueop0 == CONST0_RTX (mode)
    4520          265 :           && !cfun->can_throw_non_call_exceptions)
    4521              :         {
    4522          265 :           if (side_effects_p (op1))
    4523            0 :             return simplify_gen_binary (AND, mode, op1, trueop0);
    4524              :           return trueop0;
    4525              :         }
    4526              :       /* x/1 is x.  */
    4527       904631 :       if (trueop1 == CONST1_RTX (mode))
    4528              :         {
    4529       239176 :           tem = rtl_hooks.gen_lowpart_no_emit (mode, op0);
    4530       239176 :           if (tem)
    4531              :             return tem;
    4532              :         }
    4533              :       /* Convert divide by power of two into shift.  */
    4534       665455 :       if (CONST_INT_P (trueop1)
    4535       997760 :           && (val = exact_log2 (UINTVAL (trueop1))) > 0)
    4536       332305 :         return simplify_gen_binary (LSHIFTRT, mode, op0,
    4537       332305 :                                     gen_int_shift_amount (mode, val));
    4538              :       break;
    4539              : 
    4540      1180008 :     case DIV:
    4541              :       /* Handle floating point and integers separately.  */
    4542      1180008 :       if (SCALAR_FLOAT_MODE_P (mode))
    4543              :         {
    4544              :           /* Maybe change 0.0 / x to 0.0.  This transformation isn't
    4545              :              safe for modes with NaNs, since 0.0 / 0.0 will then be
    4546              :              NaN rather than 0.0.  Nor is it safe for modes with signed
    4547              :              zeros, since dividing 0 by a negative number gives -0.0  */
    4548       326993 :           if (trueop0 == CONST0_RTX (mode)
    4549         2886 :               && !HONOR_NANS (mode)
    4550           14 :               && !HONOR_SIGNED_ZEROS (mode)
    4551       327007 :               && ! side_effects_p (op1))
    4552              :             return op0;
    4553              :           /* x/1.0 is x.  */
    4554       326979 :           if (trueop1 == CONST1_RTX (mode)
    4555       326979 :               && !HONOR_SNANS (mode))
    4556              :             return op0;
    4557              : 
    4558       326974 :           if (CONST_DOUBLE_AS_FLOAT_P (trueop1)
    4559        27141 :               && trueop1 != CONST0_RTX (mode))
    4560              :             {
    4561        20858 :               const REAL_VALUE_TYPE *d1 = CONST_DOUBLE_REAL_VALUE (trueop1);
    4562              : 
    4563              :               /* x/-1.0 is -x.  */
    4564        20858 :               if (real_equal (d1, &dconstm1)
    4565        20858 :                   && !HONOR_SNANS (mode))
    4566            0 :                 return simplify_gen_unary (NEG, mode, op0, mode);
    4567              : 
    4568              :               /* Change FP division by a constant into multiplication.
    4569              :                  Only do this with -freciprocal-math.  */
    4570        20858 :               if (flag_reciprocal_math
    4571        20858 :                   && !real_equal (d1, &dconst0))
    4572              :                 {
    4573            7 :                   REAL_VALUE_TYPE d;
    4574            7 :                   real_arithmetic (&d, RDIV_EXPR, &dconst1, d1);
    4575            7 :                   tem = const_double_from_real_value (d, mode);
    4576            7 :                   return simplify_gen_binary (MULT, mode, op0, tem);
    4577              :                 }
    4578              :             }
    4579              :         }
    4580       853015 :       else if (SCALAR_INT_MODE_P (mode) || GET_MODE_CLASS (mode) == MODE_VECTOR_INT)
    4581              :         {
    4582              :           /* 0/x is 0 (or x&0 if x has side-effects).  */
    4583       831565 :           if (trueop0 == CONST0_RTX (mode)
    4584          678 :               && !cfun->can_throw_non_call_exceptions)
    4585              :             {
    4586          601 :               if (side_effects_p (op1))
    4587            8 :                 return simplify_gen_binary (AND, mode, op1, trueop0);
    4588              :               return trueop0;
    4589              :             }
    4590              :           /* x/1 is x.  */
    4591       830964 :           if (trueop1 == CONST1_RTX (mode))
    4592              :             {
    4593          872 :               tem = rtl_hooks.gen_lowpart_no_emit (mode, op0);
    4594          872 :               if (tem)
    4595              :                 return tem;
    4596              :             }
    4597              :           /* x/-1 is -x.  */
    4598       830092 :           if (trueop1 == CONSTM1_RTX (mode))
    4599              :             {
    4600          215 :               rtx x = rtl_hooks.gen_lowpart_no_emit (mode, op0);
    4601          215 :               if (x)
    4602          215 :                 return simplify_gen_unary (NEG, mode, x, mode);
    4603              :             }
    4604              :         }
    4605              :       break;
    4606              : 
    4607       933383 :     case UMOD:
    4608              :       /* 0%x is 0 (or x&0 if x has side-effects).  */
    4609       933383 :       if (trueop0 == CONST0_RTX (mode))
    4610              :         {
    4611          767 :           if (side_effects_p (op1))
    4612            0 :             return simplify_gen_binary (AND, mode, op1, trueop0);
    4613              :           return trueop0;
    4614              :         }
    4615              :       /* x%1 is 0 (of x&0 if x has side-effects).  */
    4616       932616 :       if (trueop1 == CONST1_RTX (mode))
    4617              :         {
    4618       273186 :           if (side_effects_p (op0))
    4619            0 :             return simplify_gen_binary (AND, mode, op0, CONST0_RTX (mode));
    4620       273186 :           return CONST0_RTX (mode);
    4621              :         }
    4622              :       /* Implement modulus by power of two as AND.  */
    4623       659430 :       if (CONST_INT_P (trueop1)
    4624       982250 :           && exact_log2 (UINTVAL (trueop1)) > 0)
    4625       322820 :         return simplify_gen_binary (AND, mode, op0,
    4626       322820 :                                     gen_int_mode (UINTVAL (trueop1) - 1,
    4627              :                                                   mode));
    4628              :       break;
    4629              : 
    4630       358514 :     case MOD:
    4631              :       /* 0%x is 0 (or x&0 if x has side-effects).  */
    4632       358514 :       if (trueop0 == CONST0_RTX (mode))
    4633              :         {
    4634          833 :           if (side_effects_p (op1))
    4635            8 :             return simplify_gen_binary (AND, mode, op1, trueop0);
    4636              :           return trueop0;
    4637              :         }
    4638              :       /* x%1 and x%-1 is 0 (or x&0 if x has side-effects).  */
    4639       357681 :       if (trueop1 == CONST1_RTX (mode) || trueop1 == constm1_rtx)
    4640              :         {
    4641          439 :           if (side_effects_p (op0))
    4642            0 :             return simplify_gen_binary (AND, mode, op0, CONST0_RTX (mode));
    4643          439 :           return CONST0_RTX (mode);
    4644              :         }
    4645              :       break;
    4646              : 
    4647       137160 :     case ROTATERT:
    4648       137160 :     case ROTATE:
    4649       137160 :       if (trueop1 == CONST0_RTX (mode))
    4650              :         return op0;
    4651              :       /* Canonicalize rotates by constant amount.  If the condition of
    4652              :          reversing direction is met, then reverse the direction. */
    4653              : #if defined(HAVE_rotate) && defined(HAVE_rotatert)
    4654       137070 :       if (reverse_rotate_by_imm_p (mode, (code == ROTATE), trueop1))
    4655              :         {
    4656        11681 :           int new_amount = GET_MODE_UNIT_PRECISION (mode) - INTVAL (trueop1);
    4657        11681 :           rtx new_amount_rtx = gen_int_shift_amount (mode, new_amount);
    4658        12225 :           return simplify_gen_binary (code == ROTATE ? ROTATERT : ROTATE,
    4659              :                                       mode, op0, new_amount_rtx);
    4660              :         }
    4661              : #endif
    4662              :       /* ROTATE/ROTATERT:HI (X:HI, 8) is BSWAP:HI (X).  Other combinations
    4663              :          such as SImode with a count of 16 do not correspond to RTL BSWAP
    4664              :          semantics.  */
    4665       125389 :       tem = unwrap_const_vec_duplicate (trueop1);
    4666       125389 :       if (GET_MODE_UNIT_BITSIZE (mode) == (2 * BITS_PER_UNIT)
    4667       125389 :           && CONST_INT_P (tem) && INTVAL (tem) == BITS_PER_UNIT)
    4668          599 :         return simplify_gen_unary (BSWAP, mode, op0, mode);
    4669              : 
    4670              :       /* FALLTHRU */
    4671      5192480 :     case ASHIFTRT:
    4672      5192480 :       if (trueop1 == CONST0_RTX (mode))
    4673              :         return op0;
    4674      5190647 :       if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
    4675              :         return op0;
    4676              :       /* Rotating ~0 always results in ~0.  */
    4677      5190475 :       if (CONST_INT_P (trueop0)
    4678        14573 :           && HWI_COMPUTABLE_MODE_P (mode)
    4679        14545 :           && UINTVAL (trueop0) == GET_MODE_MASK (mode)
    4680      5190475 :           && ! side_effects_p (op1))
    4681              :         return op0;
    4682              : 
    4683     31282391 :     canonicalize_shift:
    4684              :       /* Given:
    4685              :          scalar modes M1, M2
    4686              :          scalar constants c1, c2
    4687              :          size (M2) > size (M1)
    4688              :          c1 == size (M2) - size (M1)
    4689              :          optimize:
    4690              :          ([a|l]shiftrt:M1 (subreg:M1 (lshiftrt:M2 (reg:M2) (const_int <c1>))
    4691              :                                  <low_part>)
    4692              :                       (const_int <c2>))
    4693              :          to:
    4694              :          (subreg:M1 ([a|l]shiftrt:M2 (reg:M2) (const_int <c1 + c2>))
    4695              :                     <low_part>).  */
    4696     31282391 :       if ((code == ASHIFTRT || code == LSHIFTRT)
    4697     11837288 :           && is_a <scalar_int_mode> (mode, &int_mode)
    4698     11047020 :           && SUBREG_P (op0)
    4699      1190466 :           && CONST_INT_P (op1)
    4700      1189436 :           && GET_CODE (SUBREG_REG (op0)) == LSHIFTRT
    4701        19434 :           && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (op0)),
    4702              :                                      &inner_mode)
    4703        19434 :           && CONST_INT_P (XEXP (SUBREG_REG (op0), 1))
    4704        38562 :           && GET_MODE_BITSIZE (inner_mode) > GET_MODE_BITSIZE (int_mode)
    4705        19281 :           && (INTVAL (XEXP (SUBREG_REG (op0), 1))
    4706        38562 :               == GET_MODE_BITSIZE (inner_mode) - GET_MODE_BITSIZE (int_mode))
    4707     31301420 :           && subreg_lowpart_p (op0))
    4708              :         {
    4709        19029 :           rtx tmp = gen_int_shift_amount
    4710        19029 :             (inner_mode, INTVAL (XEXP (SUBREG_REG (op0), 1)) + INTVAL (op1));
    4711              : 
    4712              :          /* Combine would usually zero out the value when combining two
    4713              :             local shifts and the range becomes larger or equal to the mode.
    4714              :             However since we fold away one of the shifts here combine won't
    4715              :             see it so we should immediately zero the result if it's out of
    4716              :             range.  */
    4717        19029 :          if (code == LSHIFTRT
    4718        34558 :              && INTVAL (tmp) >= GET_MODE_BITSIZE (inner_mode))
    4719            0 :           tmp = const0_rtx;
    4720              :          else
    4721        19029 :            tmp = simplify_gen_binary (code,
    4722              :                                       inner_mode,
    4723        19029 :                                       XEXP (SUBREG_REG (op0), 0),
    4724              :                                       tmp);
    4725              : 
    4726        19029 :           return lowpart_subreg (int_mode, tmp, inner_mode);
    4727              :         }
    4728              : 
    4729     31263362 :       if (SHIFT_COUNT_TRUNCATED && CONST_INT_P (op1))
    4730              :         {
    4731              :           val = INTVAL (op1) & (GET_MODE_UNIT_PRECISION (mode) - 1);
    4732              :           if (val != INTVAL (op1))
    4733              :             return simplify_gen_binary (code, mode, op0,
    4734              :                                         gen_int_shift_amount (mode, val));
    4735              :         }
    4736              : 
    4737              :       /* Simplify:
    4738              : 
    4739              :            (code:M1
    4740              :              (subreg:M1
    4741              :                ([al]shiftrt:M2
    4742              :                  (subreg:M2
    4743              :                    (ashift:M1 X C1))
    4744              :                  C2))
    4745              :              C3)
    4746              : 
    4747              :          to:
    4748              : 
    4749              :            (code:M1
    4750              :              ([al]shiftrt:M1
    4751              :                (ashift:M1 X C1+N)
    4752              :                C2+N)
    4753              :              C3)
    4754              : 
    4755              :          where M1 is N bits wider than M2.  Optimizing the (subreg:M1 ...)
    4756              :          directly would be arithmetically correct, but restricting the
    4757              :          simplification to shifts by constants is more conservative,
    4758              :          since it is more likely to lead to further simplifications.  */
    4759     31263362 :       if (is_a<scalar_int_mode> (mode, &int_mode)
    4760      5591528 :           && paradoxical_subreg_p (op0)
    4761      5152483 :           && is_a<scalar_int_mode> (GET_MODE (SUBREG_REG (op0)), &inner_mode)
    4762      5152397 :           && (GET_CODE (SUBREG_REG (op0)) == ASHIFTRT
    4763      5152397 :               || GET_CODE (SUBREG_REG (op0)) == LSHIFTRT)
    4764       143125 :           && CONST_INT_P (op1))
    4765              :         {
    4766       143125 :           auto xcode = GET_CODE (SUBREG_REG (op0));
    4767       143125 :           rtx xop0 = XEXP (SUBREG_REG (op0), 0);
    4768       143125 :           rtx xop1 = XEXP (SUBREG_REG (op0), 1);
    4769       143125 :           if (SUBREG_P (xop0)
    4770        10150 :               && GET_MODE (SUBREG_REG (xop0)) == mode
    4771        10122 :               && GET_CODE (SUBREG_REG (xop0)) == ASHIFT
    4772          605 :               && CONST_INT_P (xop1)
    4773       143730 :               && UINTVAL (xop1) < GET_MODE_PRECISION (inner_mode))
    4774              :             {
    4775          605 :               rtx yop0 = XEXP (SUBREG_REG (xop0), 0);
    4776          605 :               rtx yop1 = XEXP (SUBREG_REG (xop0), 1);
    4777          605 :               if (CONST_INT_P (yop1)
    4778          605 :                   && UINTVAL (yop1) < GET_MODE_PRECISION (inner_mode))
    4779              :                 {
    4780         1210 :                   auto bias = (GET_MODE_BITSIZE (int_mode)
    4781          605 :                                - GET_MODE_BITSIZE (inner_mode));
    4782          605 :                   tem = simplify_gen_binary (ASHIFT, mode, yop0,
    4783          605 :                                              GEN_INT (INTVAL (yop1) + bias));
    4784          605 :                   tem = simplify_gen_binary (xcode, mode, tem,
    4785          605 :                                              GEN_INT (INTVAL (xop1) + bias));
    4786          605 :                   return simplify_gen_binary (code, mode, tem, op1);
    4787              :                 }
    4788              :             }
    4789              :         }
    4790              :       break;
    4791              : 
    4792            0 :     case SS_ASHIFT:
    4793            0 :       if (CONST_INT_P (trueop0)
    4794            0 :           && HWI_COMPUTABLE_MODE_P (mode)
    4795            0 :           && (UINTVAL (trueop0) == (GET_MODE_MASK (mode) >> 1)
    4796            0 :               || mode_signbit_p (mode, trueop0))
    4797            0 :           && ! side_effects_p (op1))
    4798              :         return op0;
    4799            0 :       goto simplify_ashift;
    4800              : 
    4801            0 :     case US_ASHIFT:
    4802            0 :       if (CONST_INT_P (trueop0)
    4803            0 :           && HWI_COMPUTABLE_MODE_P (mode)
    4804            0 :           && UINTVAL (trueop0) == GET_MODE_MASK (mode)
    4805            0 :           && ! side_effects_p (op1))
    4806              :         return op0;
    4807              :       /* FALLTHRU */
    4808              : 
    4809     19752094 :     case ASHIFT:
    4810     19752094 : simplify_ashift:
    4811     19752094 :       if (trueop1 == CONST0_RTX (mode))
    4812              :         return op0;
    4813     19593566 :       if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
    4814              :         return op0;
    4815     19564504 :       if (mem_depth
    4816       244160 :           && code == ASHIFT
    4817       244160 :           && CONST_INT_P (trueop1)
    4818       244152 :           && is_a <scalar_int_mode> (mode, &int_mode)
    4819     19808644 :           && IN_RANGE (UINTVAL (trueop1),
    4820              :                        1, GET_MODE_PRECISION (int_mode) - 1))
    4821              :         {
    4822       244140 :           auto c = (wi::one (GET_MODE_PRECISION (int_mode))
    4823       244140 :                     << UINTVAL (trueop1));
    4824       244140 :           rtx new_op1 = immed_wide_int_const (c, int_mode);
    4825       244140 :           return simplify_gen_binary (MULT, int_mode, op0, new_op1);
    4826       244140 :         }
    4827              : 
    4828              :       /* If we're shifting left a signed bitfield extraction and the
    4829              :          shift count + bitfield size is a natural integral mode and
    4830              :          the field starts at offset 0 (counting from the LSB), then
    4831              :          this can be simplified to a sign extension of a left shift.
    4832              : 
    4833              :          Some ISAs (RISC-V 64-bit) have inherent support for such
    4834              :          instructions and it's better for various optimizations to
    4835              :          express as a SIGN_EXTEND rather than a shifted SIGN_EXTRACT.  */
    4836     19320364 :       if (GET_CODE (op0) == SIGN_EXTRACT
    4837           27 :           && REG_P (XEXP (op0, 0))
    4838              :           /* The size of the bitfield, the location of the bitfield and
    4839              :              shift count must be CONST_INTs.  */
    4840           21 :           && CONST_INT_P (op1)
    4841           21 :           && CONST_INT_P (XEXP (op0, 1))
    4842           21 :           && CONST_INT_P (XEXP (op0, 2)))
    4843              :         {
    4844           21 :           int size = INTVAL (op1) + INTVAL (XEXP (op0, 1));
    4845           21 :           machine_mode smaller_mode;
    4846              :           /* Now we need to verify the size of the bitfield plus the shift
    4847              :              count is an integral mode and smaller than MODE.  This is
    4848              :              requirement for using SIGN_EXTEND.  We also need to verify the
    4849              :              field starts at bit location 0 and that the subreg lowpart also
    4850              :              starts at zero.  */
    4851           21 :           if (int_mode_for_size (size, size).exists (&smaller_mode)
    4852            3 :               && mode > smaller_mode
    4853           21 :               && (subreg_lowpart_offset (smaller_mode, mode).to_constant ()
    4854            3 :                   == UINTVAL (XEXP (op0, 2)))
    4855            1 :               && XEXP (op0, 2) == CONST0_RTX (mode))
    4856              :             {
    4857              :               /* Everything passed.  So we just need to get the subreg of the
    4858              :                  original input, shift it and sign extend the result.  */
    4859            1 :               rtx op = gen_lowpart (smaller_mode, XEXP (op0, 0));
    4860            1 :               rtx x = gen_rtx_ASHIFT (smaller_mode, op, op1);
    4861            1 :               return gen_rtx_SIGN_EXTEND (mode, x);
    4862              :             }
    4863              :         }
    4864     19320363 :       goto canonicalize_shift;
    4865              : 
    4866      8534139 :     case LSHIFTRT:
    4867      8534139 :       if (trueop1 == CONST0_RTX (mode))
    4868              :         return op0;
    4869      6772978 :       if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
    4870              :         return op0;
    4871              :       /* Optimize (lshiftrt (clz X) C) as (eq X 0).  */
    4872      6771553 :       if (GET_CODE (op0) == CLZ
    4873            0 :           && is_a <scalar_int_mode> (GET_MODE (XEXP (op0, 0)), &inner_mode)
    4874            0 :           && CONST_INT_P (trueop1)
    4875              :           && STORE_FLAG_VALUE == 1
    4876      6771553 :           && INTVAL (trueop1) < GET_MODE_UNIT_PRECISION (mode))
    4877              :         {
    4878            0 :           unsigned HOST_WIDE_INT zero_val = 0;
    4879              : 
    4880            0 :           if (CLZ_DEFINED_VALUE_AT_ZERO (inner_mode, zero_val)
    4881            0 :               && zero_val == GET_MODE_PRECISION (inner_mode)
    4882            0 :               && INTVAL (trueop1) == exact_log2 (zero_val))
    4883            0 :             return simplify_gen_relational (EQ, mode, inner_mode,
    4884            0 :                                             XEXP (op0, 0), const0_rtx);
    4885              :         }
    4886      6771553 :       goto canonicalize_shift;
    4887              : 
    4888       234601 :     case SMIN:
    4889       234601 :       if (HWI_COMPUTABLE_MODE_P (mode)
    4890       213800 :           && mode_signbit_p (mode, trueop1)
    4891            0 :           && ! side_effects_p (op0))
    4892              :         return op1;
    4893       234601 :       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
    4894              :         return op0;
    4895       234445 :       tem = simplify_associative_operation (code, mode, op0, op1);
    4896       234445 :       if (tem)
    4897              :         return tem;
    4898              :       break;
    4899              : 
    4900       482473 :     case SMAX:
    4901       482473 :       if (HWI_COMPUTABLE_MODE_P (mode)
    4902       455529 :           && CONST_INT_P (trueop1)
    4903       423161 :           && (UINTVAL (trueop1) == GET_MODE_MASK (mode) >> 1)
    4904            0 :           && ! side_effects_p (op0))
    4905              :         return op1;
    4906       482473 :       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
    4907              :         return op0;
    4908       482368 :       tem = simplify_associative_operation (code, mode, op0, op1);
    4909       482368 :       if (tem)
    4910              :         return tem;
    4911              :       break;
    4912              : 
    4913       339208 :     case UMIN:
    4914       339208 :       if (trueop1 == CONST0_RTX (mode) && ! side_effects_p (op0))
    4915              :         return op1;
    4916       339195 :       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
    4917              :         return op0;
    4918       339078 :       tem = simplify_associative_operation (code, mode, op0, op1);
    4919       339078 :       if (tem)
    4920              :         return tem;
    4921              :       break;
    4922              : 
    4923       313261 :     case UMAX:
    4924       313261 :       if (trueop1 == constm1_rtx && ! side_effects_p (op0))
    4925              :         return op1;
    4926       313261 :       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
    4927              :         return op0;
    4928       313171 :       tem = simplify_associative_operation (code, mode, op0, op1);
    4929       313171 :       if (tem)
    4930              :         return tem;
    4931              :       break;
    4932              : 
    4933        11656 :     case SS_PLUS:
    4934        11656 :     case US_PLUS:
    4935        11656 :     case SS_MINUS:
    4936        11656 :     case US_MINUS:
    4937              :       /* Simplify x +/- 0 to x, if possible.  */
    4938        11656 :       if (trueop1 == CONST0_RTX (mode))
    4939              :         return op0;
    4940              :       return 0;
    4941              : 
    4942            0 :     case SS_MULT:
    4943            0 :     case US_MULT:
    4944              :       /* Simplify x * 0 to 0, if possible.  */
    4945            0 :       if (trueop1 == CONST0_RTX (mode)
    4946            0 :           && !side_effects_p (op0))
    4947              :         return op1;
    4948              : 
    4949              :       /* Simplify x * 1 to x, if possible.  */
    4950            0 :       if (trueop1 == CONST1_RTX (mode))
    4951              :         return op0;
    4952              :       return 0;
    4953              : 
    4954       470799 :     case SMUL_HIGHPART:
    4955       470799 :     case UMUL_HIGHPART:
    4956              :       /* Simplify x * 0 to 0, if possible.  */
    4957       470799 :       if (trueop1 == CONST0_RTX (mode)
    4958       470799 :           && !side_effects_p (op0))
    4959              :         return op1;
    4960              :       return 0;
    4961              : 
    4962            0 :     case SS_DIV:
    4963            0 :     case US_DIV:
    4964              :       /* Simplify x / 1 to x, if possible.  */
    4965            0 :       if (trueop1 == CONST1_RTX (mode))
    4966              :         return op0;
    4967              :       return 0;
    4968              : 
    4969            0 :     case COPYSIGN:
    4970            0 :       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
    4971              :         return op0;
    4972            0 :       if (CONST_DOUBLE_AS_FLOAT_P (trueop1))
    4973              :         {
    4974            0 :           REAL_VALUE_TYPE f1;
    4975            0 :           real_convert (&f1, mode, CONST_DOUBLE_REAL_VALUE (trueop1));
    4976            0 :           rtx tmp = simplify_gen_unary (ABS, mode, op0, mode);
    4977            0 :           if (REAL_VALUE_NEGATIVE (f1))
    4978            0 :             tmp = simplify_unary_operation (NEG, mode, tmp, mode);
    4979            0 :           return tmp;
    4980              :         }
    4981            0 :       if (GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
    4982            0 :         return simplify_gen_binary (COPYSIGN, mode, XEXP (op0, 0), op1);
    4983            0 :       if (GET_CODE (op1) == ABS
    4984            0 :           && ! side_effects_p (op1))
    4985            0 :         return simplify_gen_unary (ABS, mode, op0, mode);
    4986            0 :       if (GET_CODE (op0) == COPYSIGN
    4987            0 :           && ! side_effects_p (XEXP (op0, 1)))
    4988            0 :         return simplify_gen_binary (COPYSIGN, mode, XEXP (op0, 0), op1);
    4989            0 :       if (GET_CODE (op1) == COPYSIGN
    4990            0 :           && ! side_effects_p (XEXP (op1, 0)))
    4991            0 :         return simplify_gen_binary (COPYSIGN, mode, op0, XEXP (op1, 1));
    4992              :       return 0;
    4993              : 
    4994         1107 :     case VEC_SERIES:
    4995         2214 :       if (op1 == CONST0_RTX (GET_MODE_INNER (mode)))
    4996           92 :         return gen_vec_duplicate (mode, op0);
    4997         1015 :       if (valid_for_const_vector_p (mode, op0)
    4998         1015 :           && valid_for_const_vector_p (mode, op1))
    4999           93 :         return gen_const_vec_series (mode, op0, op1);
    5000              :       return 0;
    5001              : 
    5002      3394403 :     case VEC_SELECT:
    5003      3394403 :       if (!VECTOR_MODE_P (mode))
    5004              :         {
    5005       915399 :           gcc_assert (VECTOR_MODE_P (GET_MODE (trueop0)));
    5006      1830798 :           gcc_assert (mode == GET_MODE_INNER (GET_MODE (trueop0)));
    5007       915399 :           gcc_assert (GET_CODE (trueop1) == PARALLEL);
    5008       915399 :           gcc_assert (XVECLEN (trueop1, 0) == 1);
    5009              : 
    5010              :           /* We can't reason about selections made at runtime.  */
    5011       915399 :           if (!CONST_INT_P (XVECEXP (trueop1, 0, 0)))
    5012    443500346 :             return 0;
    5013              : 
    5014       915399 :           if (vec_duplicate_p (trueop0, &elt0))
    5015         2142 :             return elt0;
    5016              : 
    5017       913257 :           if (GET_CODE (trueop0) == CONST_VECTOR)
    5018         7242 :             return CONST_VECTOR_ELT (trueop0, INTVAL (XVECEXP
    5019              :                                                       (trueop1, 0, 0)));
    5020              : 
    5021              :           /* Extract a scalar element from a nested VEC_SELECT expression
    5022              :              (with optional nested VEC_CONCAT expression).  Some targets
    5023              :              (i386) extract scalar element from a vector using chain of
    5024              :              nested VEC_SELECT expressions.  When input operand is a memory
    5025              :              operand, this operation can be simplified to a simple scalar
    5026              :              load from an offseted memory address.  */
    5027       906015 :           int n_elts;
    5028       906015 :           if (GET_CODE (trueop0) == VEC_SELECT
    5029       975269 :               && (GET_MODE_NUNITS (GET_MODE (XEXP (trueop0, 0)))
    5030        69254 :                   .is_constant (&n_elts)))
    5031              :             {
    5032        69254 :               rtx op0 = XEXP (trueop0, 0);
    5033        69254 :               rtx op1 = XEXP (trueop0, 1);
    5034              : 
    5035        69254 :               int i = INTVAL (XVECEXP (trueop1, 0, 0));
    5036        69254 :               int elem;
    5037              : 
    5038        69254 :               rtvec vec;
    5039        69254 :               rtx tmp_op, tmp;
    5040              : 
    5041        69254 :               gcc_assert (GET_CODE (op1) == PARALLEL);
    5042        69254 :               gcc_assert (i < XVECLEN (op1, 0));
    5043              : 
    5044              :               /* Select element, pointed by nested selector.  */
    5045        69254 :               elem = INTVAL (XVECEXP (op1, 0, i));
    5046              : 
    5047        69254 :               gcc_assert (elem < n_elts);
    5048              : 
    5049              :               /* Handle the case when nested VEC_SELECT wraps VEC_CONCAT.  */
    5050        69254 :               if (GET_CODE (op0) == VEC_CONCAT)
    5051              :                 {
    5052        27961 :                   rtx op00 = XEXP (op0, 0);
    5053        27961 :                   rtx op01 = XEXP (op0, 1);
    5054              : 
    5055        27961 :                   machine_mode mode00, mode01;
    5056        27961 :                   int n_elts00, n_elts01;
    5057              : 
    5058        27961 :                   mode00 = GET_MODE (op00);
    5059        27961 :                   mode01 = GET_MODE (op01);
    5060              : 
    5061              :                   /* Find out the number of elements of each operand.
    5062              :                      Since the concatenated result has a constant number
    5063              :                      of elements, the operands must too.  */
    5064        27961 :                   n_elts00 = GET_MODE_NUNITS (mode00).to_constant ();
    5065        27961 :                   n_elts01 = GET_MODE_NUNITS (mode01).to_constant ();
    5066              : 
    5067        27961 :                   gcc_assert (n_elts == n_elts00 + n_elts01);
    5068              : 
    5069              :                   /* Select correct operand of VEC_CONCAT
    5070              :                      and adjust selector. */
    5071        27961 :                   if (elem < n_elts01)
    5072              :                     tmp_op = op00;
    5073              :                   else
    5074              :                     {
    5075           43 :                       tmp_op = op01;
    5076           43 :                       elem -= n_elts00;
    5077              :                     }
    5078              :                 }
    5079              :               else
    5080              :                 tmp_op = op0;
    5081              : 
    5082        69254 :               vec = rtvec_alloc (1);
    5083        69254 :               RTVEC_ELT (vec, 0) = GEN_INT (elem);
    5084              : 
    5085        69254 :               tmp = gen_rtx_fmt_ee (code, mode,
    5086              :                                     tmp_op, gen_rtx_PARALLEL (VOIDmode, vec));
    5087        69254 :               return tmp;
    5088              :             }
    5089              :         }
    5090              :       else
    5091              :         {
    5092      2479004 :           gcc_assert (VECTOR_MODE_P (GET_MODE (trueop0)));
    5093      7437012 :           gcc_assert (GET_MODE_INNER (mode)
    5094              :                       == GET_MODE_INNER (GET_MODE (trueop0)));
    5095      2479004 :           gcc_assert (GET_CODE (trueop1) == PARALLEL);
    5096              : 
    5097      2479004 :           if (vec_duplicate_p (trueop0, &elt0))
    5098              :             /* It doesn't matter which elements are selected by trueop1,
    5099              :                because they are all the same.  */
    5100        15466 :             return gen_vec_duplicate (mode, elt0);
    5101              : 
    5102      2463538 :           if (GET_CODE (trueop0) == CONST_VECTOR)
    5103              :             {
    5104        17230 :               unsigned n_elts = XVECLEN (trueop1, 0);
    5105        17230 :               rtvec v = rtvec_alloc (n_elts);
    5106        17230 :               unsigned int i;
    5107              : 
    5108        34460 :               gcc_assert (known_eq (n_elts, GET_MODE_NUNITS (mode)));
    5109        88266 :               for (i = 0; i < n_elts; i++)
    5110              :                 {
    5111        71036 :                   rtx x = XVECEXP (trueop1, 0, i);
    5112              : 
    5113        71036 :                   if (!CONST_INT_P (x))
    5114              :                     return 0;
    5115              : 
    5116        71036 :                   RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0,
    5117              :                                                        INTVAL (x));
    5118              :                 }
    5119              : 
    5120        17230 :               return gen_rtx_CONST_VECTOR (mode, v);
    5121              :             }
    5122              : 
    5123              :           /* Recognize the identity.  */
    5124      2446308 :           if (GET_MODE (trueop0) == mode)
    5125              :             {
    5126       576167 :               bool maybe_ident = true;
    5127       576167 :               for (int i = 0; i < XVECLEN (trueop1, 0); i++)
    5128              :                 {
    5129       575784 :                   rtx j = XVECEXP (trueop1, 0, i);
    5130       575784 :                   if (!CONST_INT_P (j) || INTVAL (j) != i)
    5131              :                     {
    5132              :                       maybe_ident = false;
    5133              :                       break;
    5134              :                     }
    5135              :                 }
    5136       349560 :               if (maybe_ident)
    5137              :                 return trueop0;
    5138              :             }
    5139              : 
    5140              :           /* If we select a low-part subreg, return that.  */
    5141      2445925 :           if (vec_series_lowpart_p (mode, GET_MODE (trueop0), trueop1))
    5142              :             {
    5143            0 :               rtx new_rtx = lowpart_subreg (mode, trueop0,
    5144            0 :                                             GET_MODE (trueop0));
    5145            0 :               if (new_rtx != NULL_RTX)
    5146              :                 return new_rtx;
    5147              :             }
    5148              : 
    5149              :           /* If we build {a,b} then permute it, build the result directly.  */
    5150      2445925 :           if (XVECLEN (trueop1, 0) == 2
    5151       588932 :               && CONST_INT_P (XVECEXP (trueop1, 0, 0))
    5152       588932 :               && CONST_INT_P (XVECEXP (trueop1, 0, 1))
    5153       588932 :               && GET_CODE (trueop0) == VEC_CONCAT
    5154       175871 :               && GET_CODE (XEXP (trueop0, 0)) == VEC_CONCAT
    5155           78 :               && GET_MODE (XEXP (trueop0, 0)) == mode
    5156           78 :               && GET_CODE (XEXP (trueop0, 1)) == VEC_CONCAT
    5157           54 :               && GET_MODE (XEXP (trueop0, 1)) == mode)
    5158              :             {
    5159           54 :               unsigned int i0 = INTVAL (XVECEXP (trueop1, 0, 0));
    5160           54 :               unsigned int i1 = INTVAL (XVECEXP (trueop1, 0, 1));
    5161           54 :               rtx subop0, subop1;
    5162              : 
    5163           54 :               gcc_assert (i0 < 4 && i1 < 4);
    5164           54 :               subop0 = XEXP (XEXP (trueop0, i0 / 2), i0 % 2);
    5165           54 :               subop1 = XEXP (XEXP (trueop0, i1 / 2), i1 % 2);
    5166              : 
    5167           54 :               return simplify_gen_binary (VEC_CONCAT, mode, subop0, subop1);
    5168              :             }
    5169              : 
    5170      2445871 :           if (XVECLEN (trueop1, 0) == 2
    5171       588878 :               && CONST_INT_P (XVECEXP (trueop1, 0, 0))
    5172       588878 :               && CONST_INT_P (XVECEXP (trueop1, 0, 1))
    5173       588878 :               && GET_CODE (trueop0) == VEC_CONCAT
    5174       175817 :               && GET_MODE (trueop0) == mode)
    5175              :             {
    5176            2 :               unsigned int i0 = INTVAL (XVECEXP (trueop1, 0, 0));
    5177            2 :               unsigned int i1 = INTVAL (XVECEXP (trueop1, 0, 1));
    5178            2 :               rtx subop0, subop1;
    5179              : 
    5180            2 :               gcc_assert (i0 < 2 && i1 < 2);
    5181            2 :               subop0 = XEXP (trueop0, i0);
    5182            2 :               subop1 = XEXP (trueop0, i1);
    5183              : 
    5184            2 :               return simplify_gen_binary (VEC_CONCAT, mode, subop0, subop1);
    5185              :             }
    5186              : 
    5187              :           /* If we select one half of a vec_concat, return that.  */
    5188      2445869 :           int l0, l1;
    5189      2445869 :           if (GET_CODE (trueop0) == VEC_CONCAT
    5190      3020970 :               && (GET_MODE_NUNITS (GET_MODE (XEXP (trueop0, 0)))
    5191      1510485 :                   .is_constant (&l0))
    5192      3020970 :               && (GET_MODE_NUNITS (GET_MODE (XEXP (trueop0, 1)))
    5193      1510485 :                   .is_constant (&l1))
    5194      3956354 :               && CONST_INT_P (XVECEXP (trueop1, 0, 0)))
    5195              :             {
    5196      1510485 :               rtx subop0 = XEXP (trueop0, 0);
    5197      1510485 :               rtx subop1 = XEXP (trueop0, 1);
    5198      1510485 :               machine_mode mode0 = GET_MODE (subop0);
    5199      1510485 :               machine_mode mode1 = GET_MODE (subop1);
    5200      1510485 :               int i0 = INTVAL (XVECEXP (trueop1, 0, 0));
    5201      1510485 :               if (i0 == 0 && !side_effects_p (op1) && mode == mode0)
    5202              :                 {
    5203       961760 :                   bool success = true;
    5204       961760 :                   for (int i = 1; i < l0; ++i)
    5205              :                     {
    5206       961431 :                       rtx j = XVECEXP (trueop1, 0, i);
    5207       961431 :                       if (!CONST_INT_P (j) || INTVAL (j) != i)
    5208              :                         {
    5209              :                           success = false;
    5210              :                           break;
    5211              :                         }
    5212              :                     }
    5213       878286 :                   if (success)
    5214              :                     return subop0;
    5215              :                 }
    5216      1510156 :               if (i0 == l0 && !side_effects_p (op0) && mode == mode1)
    5217              :                 {
    5218          590 :                   bool success = true;
    5219          590 :                   for (int i = 1; i < l1; ++i)
    5220              :                     {
    5221          543 :                       rtx j = XVECEXP (trueop1, 0, i);
    5222          543 :                       if (!CONST_INT_P (j) || INTVAL (j) != i0 + i)
    5223              :                         {
    5224              :                           success = false;
    5225              :                           break;
    5226              :                         }
    5227              :                     }
    5228           76 :                   if (success)
    5229              :                     return subop1;
    5230              :                 }
    5231              :             }
    5232              : 
    5233              :           /* Simplify vec_select of a subreg of X to just a vec_select of X
    5234              :              when X has same component mode as vec_select.  */
    5235      2445493 :           unsigned HOST_WIDE_INT subreg_offset = 0;
    5236      2445493 :           if (GET_CODE (trueop0) == SUBREG
    5237       364862 :               && GET_MODE_INNER (mode)
    5238       729724 :                  == GET_MODE_INNER (GET_MODE (SUBREG_REG (trueop0)))
    5239        28688 :               && GET_MODE_NUNITS (mode).is_constant (&l1)
    5240      2810355 :               && constant_multiple_p (subreg_memory_offset (trueop0),
    5241        28688 :                                       GET_MODE_UNIT_BITSIZE (mode),
    5242              :                                       &subreg_offset))
    5243              :             {
    5244        14344 :               poly_uint64 nunits
    5245        28688 :                 = GET_MODE_NUNITS (GET_MODE (SUBREG_REG (trueop0)));
    5246        14344 :               bool success = true;
    5247        81398 :               for (int i = 0; i != l1; i++)
    5248              :                 {
    5249        77403 :                   rtx idx = XVECEXP (trueop1, 0, i);
    5250        77403 :                   if (!CONST_INT_P (idx)
    5251        77403 :                       || maybe_ge (UINTVAL (idx) + subreg_offset, nunits))
    5252              :                     {
    5253              :                       success = false;
    5254              :                       break;
    5255              :                     }
    5256              :                 }
    5257              : 
    5258        14344 :               if (success)
    5259              :                 {
    5260         3995 :                   rtx par = trueop1;
    5261         3995 :                   if (subreg_offset)
    5262              :                     {
    5263            0 :                       rtvec vec = rtvec_alloc (l1);
    5264            0 :                       for (int i = 0; i < l1; i++)
    5265            0 :                         RTVEC_ELT (vec, i)
    5266            0 :                           = GEN_INT (INTVAL (XVECEXP (trueop1, 0, i))
    5267              :                                      + subreg_offset);
    5268            0 :                       par = gen_rtx_PARALLEL (VOIDmode, vec);
    5269              :                     }
    5270         3995 :                   return gen_rtx_VEC_SELECT (mode, SUBREG_REG (trueop0), par);
    5271              :                 }
    5272              :             }
    5273              :         }
    5274              : 
    5275      3278259 :       if (XVECLEN (trueop1, 0) == 1
    5276       836845 :           && CONST_INT_P (XVECEXP (trueop1, 0, 0))
    5277       836845 :           && GET_CODE (trueop0) == VEC_CONCAT)
    5278              :         {
    5279         1372 :           rtx vec = trueop0;
    5280         2744 :           offset = INTVAL (XVECEXP (trueop1, 0, 0)) * GET_MODE_SIZE (mode);
    5281              : 
    5282              :           /* Try to find the element in the VEC_CONCAT.  */
    5283         1372 :           while (GET_MODE (vec) != mode
    5284         2744 :                  && GET_CODE (vec) == VEC_CONCAT)
    5285              :             {
    5286         1372 :               poly_int64 vec_size;
    5287              : 
    5288         1372 :               if (CONST_INT_P (XEXP (vec, 0)))
    5289              :                 {
    5290              :                   /* vec_concat of two const_ints doesn't make sense with
    5291              :                      respect to modes.  */
    5292            3 :                   if (CONST_INT_P (XEXP (vec, 1)))
    5293    380620196 :                     return 0;
    5294              : 
    5295            3 :                   vec_size = GET_MODE_SIZE (GET_MODE (trueop0))
    5296            9 :                              - GET_MODE_SIZE (GET_MODE (XEXP (vec, 1)));
    5297              :                 }
    5298              :               else
    5299         2738 :                 vec_size = GET_MODE_SIZE (GET_MODE (XEXP (vec, 0)));
    5300              : 
    5301         1372 :               if (known_lt (offset, vec_size))
    5302              :                 vec = XEXP (vec, 0);
    5303          316 :               else if (known_ge (offset, vec_size))
    5304              :                 {
    5305          316 :                   offset -= vec_size;
    5306          316 :                   vec = XEXP (vec, 1);
    5307              :                 }
    5308              :               else
    5309              :                 break;
    5310         1372 :               vec = avoid_constant_pool_reference (vec);
    5311              :             }
    5312              : 
    5313         1372 :           if (GET_MODE (vec) == mode)
    5314              :             return vec;
    5315              :         }
    5316              : 
    5317              :       /* If we select elements in a vec_merge that all come from the same
    5318              :          operand, select from that operand directly.  */
    5319      3277067 :       if (GET_CODE (op0) == VEC_MERGE)
    5320              :         {
    5321        10001 :           rtx trueop02 = avoid_constant_pool_reference (XEXP (op0, 2));
    5322        10001 :           if (CONST_INT_P (trueop02))
    5323              :             {
    5324         3278 :               unsigned HOST_WIDE_INT sel = UINTVAL (trueop02);
    5325         3278 :               bool all_operand0 = true;
    5326         3278 :               bool all_operand1 = true;
    5327        10892 :               for (int i = 0; i < XVECLEN (trueop1, 0); i++)
    5328              :                 {
    5329         7614 :                   rtx j = XVECEXP (trueop1, 0, i);
    5330         7614 :                   if (sel & (HOST_WIDE_INT_1U << UINTVAL (j)))
    5331              :                     all_operand1 = false;
    5332              :                   else
    5333         3392 :                     all_operand0 = false;
    5334              :                 }
    5335         3278 :               if (all_operand0 && !side_effects_p (XEXP (op0, 1)))
    5336         1443 :                 return simplify_gen_binary (VEC_SELECT, mode, XEXP (op0, 0), op1);
    5337         1835 :               if (all_operand1 && !side_effects_p (XEXP (op0, 0)))
    5338           47 :                 return simplify_gen_binary (VEC_SELECT, mode, XEXP (op0, 1), op1);
    5339              :             }
    5340              :         }
    5341              : 
    5342              :       /* If we have two nested selects that are inverses of each
    5343              :          other, replace them with the source operand.  */
    5344      3275577 :       if (GET_CODE (trueop0) == VEC_SELECT
    5345        70433 :           && GET_MODE (XEXP (trueop0, 0)) == mode)
    5346              :         {
    5347         1275 :           rtx op0_subop1 = XEXP (trueop0, 1);
    5348         1275 :           gcc_assert (GET_CODE (op0_subop1) == PARALLEL);
    5349         2550 :           gcc_assert (known_eq (XVECLEN (trueop1, 0), GET_MODE_NUNITS (mode)));
    5350              : 
    5351              :           /* Apply the outer ordering vector to the inner one.  (The inner
    5352              :              ordering vector is expressly permitted to be of a different
    5353              :              length than the outer one.)  If the result is { 0, 1, ..., n-1 }
    5354              :              then the two VEC_SELECTs cancel.  */
    5355         1581 :           for (int i = 0; i < XVECLEN (trueop1, 0); ++i)
    5356              :             {
    5357         1581 :               rtx x = XVECEXP (trueop1, 0, i);
    5358         1581 :               if (!CONST_INT_P (x))
    5359              :                 return 0;
    5360         1581 :               rtx y = XVECEXP (op0_subop1, 0, INTVAL (x));
    5361         1581 :               if (!CONST_INT_P (y) || i != INTVAL (y))
    5362              :                 return 0;
    5363              :             }
    5364              :           return XEXP (trueop0, 0);
    5365              :         }
    5366              : 
    5367              :       return 0;
    5368      4091474 :     case VEC_CONCAT:
    5369      4091474 :       {
    5370      4091474 :         machine_mode op0_mode = (GET_MODE (trueop0) != VOIDmode
    5371      4091474 :                                       ? GET_MODE (trueop0)
    5372      4091474 :                                       : GET_MODE_INNER (mode));
    5373      4091474 :         machine_mode op1_mode = (GET_MODE (trueop1) != VOIDmode
    5374      4091474 :                                       ? GET_MODE (trueop1)
    5375      4091474 :                                       : GET_MODE_INNER (mode));
    5376              : 
    5377      4091474 :         gcc_assert (VECTOR_MODE_P (mode));
    5378     16365896 :         gcc_assert (known_eq (GET_MODE_SIZE (op0_mode)
    5379              :                               + GET_MODE_SIZE (op1_mode),
    5380              :                               GET_MODE_SIZE (mode)));
    5381              : 
    5382      4091474 :         if (VECTOR_MODE_P (op0_mode))
    5383      5679477 :           gcc_assert (GET_MODE_INNER (mode)
    5384              :                       == GET_MODE_INNER (op0_mode));
    5385              :         else
    5386      4396630 :           gcc_assert (GET_MODE_INNER (mode) == op0_mode);
    5387              : 
    5388      4091474 :         if (VECTOR_MODE_P (op1_mode))
    5389      5679477 :           gcc_assert (GET_MODE_INNER (mode)
    5390              :                       == GET_MODE_INNER (op1_mode));
    5391              :         else
    5392      4396630 :           gcc_assert (GET_MODE_INNER (mode) == op1_mode);
    5393              : 
    5394      4091474 :         unsigned int n_elts, in_n_elts;
    5395      4091474 :         if ((GET_CODE (trueop0) == CONST_VECTOR
    5396      4091474 :              || CONST_SCALAR_INT_P (trueop0)
    5397      3936488 :              || CONST_DOUBLE_AS_FLOAT_P (trueop0))
    5398       156423 :             && (GET_CODE (trueop1) == CONST_VECTOR
    5399       156423 :                 || CONST_SCALAR_INT_P (trueop1)
    5400       156423 :                 || CONST_DOUBLE_AS_FLOAT_P (trueop1))
    5401            0 :             && GET_MODE_NUNITS (mode).is_constant (&n_elts)
    5402      4091474 :             && GET_MODE_NUNITS (op0_mode).is_constant (&in_n_elts))
    5403              :           {
    5404            0 :             rtvec v = rtvec_alloc (n_elts);
    5405            0 :             unsigned int i;
    5406            0 :             for (i = 0; i < n_elts; i++)
    5407              :               {
    5408            0 :                 if (i < in_n_elts)
    5409              :                   {
    5410            0 :                     if (!VECTOR_MODE_P (op0_mode))
    5411            0 :                       RTVEC_ELT (v, i) = trueop0;
    5412              :                     else
    5413            0 :                       RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0, i);
    5414              :                   }
    5415              :                 else
    5416              :                   {
    5417            0 :                     if (!VECTOR_MODE_P (op1_mode))
    5418            0 :                       RTVEC_ELT (v, i) = trueop1;
    5419              :                     else
    5420            0 :                       RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop1,
    5421              :                                                            i - in_n_elts);
    5422              :                   }
    5423              :               }
    5424              : 
    5425            0 :             return gen_rtx_CONST_VECTOR (mode, v);
    5426              :           }
    5427              : 
    5428              :         /* Try to merge two VEC_SELECTs from the same vector into a single one.
    5429              :            Restrict the transformation to avoid generating a VEC_SELECT with a
    5430              :            mode unrelated to its operand.  */
    5431      4091474 :         if (GET_CODE (trueop0) == VEC_SELECT
    5432       132226 :             && GET_CODE (trueop1) == VEC_SELECT
    5433        28693 :             && rtx_equal_p (XEXP (trueop0, 0), XEXP (trueop1, 0))
    5434      4107542 :             && GET_MODE_INNER (GET_MODE (XEXP (trueop0, 0)))
    5435        32136 :                == GET_MODE_INNER(mode))
    5436              :           {
    5437        16068 :             rtx par0 = XEXP (trueop0, 1);
    5438        16068 :             rtx par1 = XEXP (trueop1, 1);
    5439        16068 :             int len0 = XVECLEN (par0, 0);
    5440        16068 :             int len1 = XVECLEN (par1, 0);
    5441        16068 :             rtvec vec = rtvec_alloc (len0 + len1);
    5442        99066 :             for (int i = 0; i < len0; i++)
    5443        82998 :               RTVEC_ELT (vec, i) = XVECEXP (par0, 0, i);
    5444        99066 :             for (int i = 0; i < len1; i++)
    5445        82998 :               RTVEC_ELT (vec, len0 + i) = XVECEXP (par1, 0, i);
    5446        16068 :             return simplify_gen_binary (VEC_SELECT, mode, XEXP (trueop0, 0),
    5447        16068 :                                         gen_rtx_PARALLEL (VOIDmode, vec));
    5448              :           }
    5449              :         /* (vec_concat:
    5450              :              (subreg_lowpart:N OP)
    5451              :              (vec_select:N OP P))  -->  OP when P selects the high half
    5452              :             of the OP.  */
    5453      4075406 :         if (GET_CODE (trueop0) == SUBREG
    5454       475900 :             && subreg_lowpart_p (trueop0)
    5455       475693 :             && GET_CODE (trueop1) == VEC_SELECT
    5456           49 :             && SUBREG_REG (trueop0) == XEXP (trueop1, 0)
    5457            0 :             && !side_effects_p (XEXP (trueop1, 0))
    5458      4075406 :             && vec_series_highpart_p (op1_mode, mode, XEXP (trueop1, 1)))
    5459            0 :           return XEXP (trueop1, 0);
    5460              :       }
    5461              :       return 0;
    5462              : 
    5463            0 :     default:
    5464            0 :       gcc_unreachable ();
    5465              :     }
    5466              : 
    5467    372785900 :   if (mode == GET_MODE (op0)
    5468    318879866 :       && mode == GET_MODE (op1)
    5469     97894971 :       && vec_duplicate_p (op0, &elt0)
    5470    372901762 :       && vec_duplicate_p (op1, &elt1))
    5471              :     {
    5472              :       /* Try applying the operator to ELT and see if that simplifies.
    5473              :          We can duplicate the result if so.
    5474              : 
    5475              :          The reason we don't use simplify_gen_binary is that it isn't
    5476              :          necessarily a win to convert things like:
    5477              : 
    5478              :            (plus:V (vec_duplicate:V (reg:S R1))
    5479              :                    (vec_duplicate:V (reg:S R2)))
    5480              : 
    5481              :          to:
    5482              : 
    5483              :            (vec_duplicate:V (plus:S (reg:S R1) (reg:S R2)))
    5484              : 
    5485              :          The first might be done entirely in vector registers while the
    5486              :          second might need a move between register files.  */
    5487          118 :       tem = simplify_binary_operation (code, GET_MODE_INNER (mode),
    5488              :                                        elt0, elt1);
    5489           59 :       if (tem)
    5490            2 :         return gen_vec_duplicate (mode, tem);
    5491              :     }
    5492              : 
    5493              :   return 0;
    5494              : }
    5495              : 
    5496              : /* Return true if binary operation OP distributes over addition in operand
    5497              :    OPNO, with the other operand being held constant.  OPNO counts from 1.  */
    5498              : 
    5499              : static bool
    5500         7168 : distributes_over_addition_p (rtx_code op, int opno)
    5501              : {
    5502            0 :   switch (op)
    5503              :     {
    5504              :     case PLUS:
    5505              :     case MINUS:
    5506              :     case MULT:
    5507              :       return true;
    5508              : 
    5509            0 :     case ASHIFT:
    5510            0 :       return opno == 1;
    5511              : 
    5512            0 :     default:
    5513            0 :       return false;
    5514              :     }
    5515              : }
    5516              : 
    5517              : rtx
    5518    476939605 : simplify_const_binary_operation (enum rtx_code code, machine_mode mode,
    5519              :                                  rtx op0, rtx op1)
    5520              : {
    5521    476939605 :   if (VECTOR_MODE_P (mode)
    5522     14753771 :       && code != VEC_CONCAT
    5523     10651890 :       && GET_CODE (op0) == CONST_VECTOR
    5524       188307 :       && GET_CODE (op1) == CONST_VECTOR)
    5525              :     {
    5526         7885 :       bool step_ok_p;
    5527         7885 :       if (CONST_VECTOR_STEPPED_P (op0)
    5528         7885 :           && CONST_VECTOR_STEPPED_P (op1))
    5529              :         /* We can operate directly on the encoding if:
    5530              : 
    5531              :               a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
    5532              :             implies
    5533              :               (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
    5534              : 
    5535              :            Addition and subtraction are the supported operators
    5536              :            for which this is true.  */
    5537          717 :         step_ok_p = (code == PLUS || code == MINUS);
    5538         7168 :       else if (CONST_VECTOR_STEPPED_P (op0))
    5539              :         /* We can operate directly on stepped encodings if:
    5540              : 
    5541              :              a3 - a2 == a2 - a1
    5542              :            implies:
    5543              :              (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
    5544              : 
    5545              :            which is true if (x -> x op c) distributes over addition.  */
    5546         1057 :         step_ok_p = distributes_over_addition_p (code, 1);
    5547              :       else
    5548              :         /* Similarly in reverse.  */
    5549         6111 :         step_ok_p = distributes_over_addition_p (code, 2);
    5550         7885 :       rtx_vector_builder builder;
    5551         7885 :       if (!builder.new_binary_operation (mode, op0, op1, step_ok_p))
    5552              :         return 0;
    5553              : 
    5554         7885 :       unsigned int count = builder.encoded_nelts ();
    5555        51272 :       for (unsigned int i = 0; i < count; i++)
    5556              :         {
    5557        87074 :           rtx x = simplify_binary_operation (code, GET_MODE_INNER (mode),
    5558              :                                              CONST_VECTOR_ELT (op0, i),
    5559        43537 :                                              CONST_VECTOR_ELT (op1, i));
    5560        43537 :           if (!x || !valid_for_const_vector_p (mode, x))
    5561          150 :             return 0;
    5562        43387 :           builder.quick_push (x);
    5563              :         }
    5564         7735 :       return builder.build ();
    5565         7885 :     }
    5566              : 
    5567    476931720 :   if (VECTOR_MODE_P (mode)
    5568     14745886 :       && code == VEC_CONCAT
    5569      4101881 :       && (CONST_SCALAR_INT_P (op0)
    5570      3957344 :           || CONST_FIXED_P (op0)
    5571      3957344 :           || CONST_DOUBLE_AS_FLOAT_P (op0)
    5572      3954544 :           || CONST_VECTOR_P (op0))
    5573       166830 :       && (CONST_SCALAR_INT_P (op1)
    5574       164961 :           || CONST_DOUBLE_AS_FLOAT_P (op1)
    5575       163598 :           || CONST_FIXED_P (op1)
    5576       163598 :           || CONST_VECTOR_P (op1)))
    5577              :     {
    5578              :       /* Both inputs have a constant number of elements, so the result
    5579              :          must too.  */
    5580        10407 :       unsigned n_elts = GET_MODE_NUNITS (mode).to_constant ();
    5581        10407 :       rtvec v = rtvec_alloc (n_elts);
    5582              : 
    5583        10407 :       gcc_assert (n_elts >= 2);
    5584        10407 :       if (n_elts == 2)
    5585              :         {
    5586         3232 :           gcc_assert (GET_CODE (op0) != CONST_VECTOR);
    5587         3232 :           gcc_assert (GET_CODE (op1) != CONST_VECTOR);
    5588              : 
    5589         3232 :           RTVEC_ELT (v, 0) = op0;
    5590         3232 :           RTVEC_ELT (v, 1) = op1;
    5591              :         }
    5592              :       else
    5593              :         {
    5594         7175 :           unsigned op0_n_elts = GET_MODE_NUNITS (GET_MODE (op0)).to_constant ();
    5595         7175 :           unsigned op1_n_elts = GET_MODE_NUNITS (GET_MODE (op1)).to_constant ();
    5596         7175 :           unsigned i;
    5597              : 
    5598         7175 :           gcc_assert (GET_CODE (op0) == CONST_VECTOR);
    5599         7175 :           gcc_assert (GET_CODE (op1) == CONST_VECTOR);
    5600         7175 :           gcc_assert (op0_n_elts + op1_n_elts == n_elts);
    5601              : 
    5602        61331 :           for (i = 0; i < op0_n_elts; ++i)
    5603        54156 :             RTVEC_ELT (v, i) = CONST_VECTOR_ELT (op0, i);
    5604        61523 :           for (i = 0; i < op1_n_elts; ++i)
    5605        54348 :             RTVEC_ELT (v, op0_n_elts+i) = CONST_VECTOR_ELT (op1, i);
    5606              :         }
    5607              : 
    5608        10407 :       return gen_rtx_CONST_VECTOR (mode, v);
    5609              :     }
    5610              : 
    5611    465436804 :   if (VECTOR_MODE_P (mode)
    5612     14735479 :       && GET_CODE (op0) == CONST_VECTOR
    5613       192740 :       && (CONST_SCALAR_INT_P (op1) || CONST_DOUBLE_AS_FLOAT_P (op1))
    5614    476921313 :       && (CONST_VECTOR_DUPLICATE_P (op0)
    5615              :           || CONST_VECTOR_NUNITS (op0).is_constant ()))
    5616              :     {
    5617       138362 :       switch (code)
    5618              :         {
    5619       138362 :         case PLUS:
    5620       138362 :         case MINUS:
    5621       138362 :         case MULT:
    5622       138362 :         case DIV:
    5623       138362 :         case MOD:
    5624       138362 :         case UDIV:
    5625       138362 :         case UMOD:
    5626       138362 :         case AND:
    5627       138362 :         case IOR:
    5628       138362 :         case XOR:
    5629       138362 :         case SMIN:
    5630       138362 :         case SMAX:
    5631       138362 :         case UMIN:
    5632       138362 :         case UMAX:
    5633       138362 :         case LSHIFTRT:
    5634       138362 :         case ASHIFTRT:
    5635       138362 :         case ASHIFT:
    5636       138362 :         case ROTATE:
    5637       138362 :         case ROTATERT:
    5638       138362 :         case SS_PLUS:
    5639       138362 :         case US_PLUS:
    5640       138362 :         case SS_MINUS:
    5641       138362 :         case US_MINUS:
    5642       138362 :         case SS_ASHIFT:
    5643       138362 :         case US_ASHIFT:
    5644       138362 :         case COPYSIGN:
    5645       138362 :           break;
    5646              :         default:
    5647              :           return NULL_RTX;
    5648              :         }
    5649              : 
    5650       138362 :       unsigned int npatterns = (CONST_VECTOR_DUPLICATE_P (op0)
    5651       138362 :                                 ? CONST_VECTOR_NPATTERNS (op0)
    5652       146091 :                                 : CONST_VECTOR_NUNITS (op0).to_constant ());
    5653       138362 :       rtx_vector_builder builder (mode, npatterns, 1);
    5654       289851 :       for (unsigned i = 0; i < npatterns; i++)
    5655              :         {
    5656       302978 :           rtx x = simplify_binary_operation (code, GET_MODE_INNER (mode),
    5657       151489 :                                              CONST_VECTOR_ELT (op0, i), op1);
    5658       151489 :           if (!x || !valid_for_const_vector_p (mode, x))
    5659            0 :             return 0;
    5660       151489 :           builder.quick_push (x);
    5661              :         }
    5662       138362 :       return builder.build ();
    5663              :     }
    5664              : 
    5665    476782951 :   if (SCALAR_FLOAT_MODE_P (mode)
    5666      6384588 :       && CONST_DOUBLE_AS_FLOAT_P (op0)
    5667        76006 :       && CONST_DOUBLE_AS_FLOAT_P (op1)
    5668        11648 :       && mode == GET_MODE (op0) && mode == GET_MODE (op1))
    5669              :     {
    5670        11648 :       if (code == AND
    5671              :           || code == IOR
    5672        11648 :           || code == XOR)
    5673              :         {
    5674         2516 :           long tmp0[4];
    5675         2516 :           long tmp1[4];
    5676         2516 :           REAL_VALUE_TYPE r;
    5677         2516 :           int i;
    5678              : 
    5679         2516 :           real_to_target (tmp0, CONST_DOUBLE_REAL_VALUE (op0),
    5680         2516 :                           GET_MODE (op0));
    5681         2516 :           real_to_target (tmp1, CONST_DOUBLE_REAL_VALUE (op1),
    5682         2516 :                           GET_MODE (op1));
    5683        12580 :           for (i = 0; i < 4; i++)
    5684              :             {
    5685        10064 :               switch (code)
    5686              :               {
    5687         5272 :               case AND:
    5688         5272 :                 tmp0[i] &= tmp1[i];
    5689         5272 :                 break;
    5690         2512 :               case IOR:
    5691         2512 :                 tmp0[i] |= tmp1[i];
    5692         2512 :                 break;
    5693         2280 :               case XOR:
    5694         2280 :                 tmp0[i] ^= tmp1[i];
    5695         2280 :                 break;
    5696              :               default:
    5697              :                 gcc_unreachable ();
    5698              :               }
    5699              :             }
    5700         2516 :            real_from_target (&r, tmp0, mode);
    5701         2516 :            return const_double_from_real_value (r, mode);
    5702              :         }
    5703         9132 :       else if (code == COPYSIGN)
    5704              :         {
    5705            0 :           REAL_VALUE_TYPE f0, f1;
    5706            0 :           real_convert (&f0, mode, CONST_DOUBLE_REAL_VALUE (op0));
    5707            0 :           real_convert (&f1, mode, CONST_DOUBLE_REAL_VALUE (op1));
    5708            0 :           real_copysign (&f0, &f1);
    5709            0 :           return const_double_from_real_value (f0, mode);
    5710              :         }
    5711              :       else
    5712              :         {
    5713         9132 :           REAL_VALUE_TYPE f0, f1, value, result;
    5714         9132 :           const REAL_VALUE_TYPE *opr0, *opr1;
    5715         9132 :           bool inexact;
    5716              : 
    5717         9132 :           opr0 = CONST_DOUBLE_REAL_VALUE (op0);
    5718         9132 :           opr1 = CONST_DOUBLE_REAL_VALUE (op1);
    5719              : 
    5720         9132 :           if (HONOR_SNANS (mode)
    5721         9132 :               && (REAL_VALUE_ISSIGNALING_NAN (*opr0)
    5722          803 :                   || REAL_VALUE_ISSIGNALING_NAN (*opr1)))
    5723           10 :             return 0;
    5724              : 
    5725         9122 :           real_convert (&f0, mode, opr0);
    5726         9122 :           real_convert (&f1, mode, opr1);
    5727              : 
    5728         9122 :           if (code == DIV
    5729         4232 :               && real_equal (&f1, &dconst0)
    5730        12891 :               && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
    5731         3765 :             return 0;
    5732              : 
    5733        26684 :           if (MODE_HAS_INFINITIES (mode) && HONOR_NANS (mode)
    5734         5265 :               && flag_trapping_math
    5735         5189 :               && REAL_VALUE_ISINF (f0) && REAL_VALUE_ISINF (f1))
    5736              :             {
    5737            9 :               int s0 = REAL_VALUE_NEGATIVE (f0);
    5738            9 :               int s1 = REAL_VALUE_NEGATIVE (f1);
    5739              : 
    5740            9 :               switch (code)
    5741              :                 {
    5742            0 :                 case PLUS:
    5743              :                   /* Inf + -Inf = NaN plus exception.  */
    5744            0 :                   if (s0 != s1)
    5745              :                     return 0;
    5746              :                   break;
    5747            0 :                 case MINUS:
    5748              :                   /* Inf - Inf = NaN plus exception.  */
    5749            0 :                   if (s0 == s1)
    5750              :                     return 0;
    5751              :                   break;
    5752              :                 case DIV:
    5753              :                   /* Inf / Inf = NaN plus exception.  */
    5754              :                   return 0;
    5755              :                 default:
    5756              :                   break;
    5757              :                 }
    5758              :             }
    5759              : 
    5760         7932 :           if (code == MULT && MODE_HAS_INFINITIES (mode) && HONOR_NANS (mode)
    5761         1949 :               && flag_trapping_math
    5762         7251 :               && ((REAL_VALUE_ISINF (f0) && real_equal (&f1, &dconst0))
    5763         1895 :                   || (REAL_VALUE_ISINF (f1)
    5764           10 :                       && real_equal (&f0, &dconst0))))
    5765              :             /* Inf * 0 = NaN plus exception.  */
    5766           18 :             return 0;
    5767              : 
    5768         5330 :           inexact = real_arithmetic (&value, rtx_to_tree_code (code),
    5769              :                                      &f0, &f1);
    5770         5330 :           real_convert (&result, mode, &value);
    5771              : 
    5772              :           /* Don't constant fold this floating point operation if
    5773              :              the result has overflowed and flag_trapping_math.  */
    5774              : 
    5775         5330 :           if (flag_trapping_math
    5776        20648 :               && MODE_HAS_INFINITIES (mode)
    5777         5162 :               && REAL_VALUE_ISINF (result)
    5778         1104 :               && !REAL_VALUE_ISINF (f0)
    5779         6420 :               && !REAL_VALUE_ISINF (f1))
    5780              :             /* Overflow plus exception.  */
    5781         1090 :             return 0;
    5782              : 
    5783              :           /* Don't constant fold this floating point operation if the
    5784              :              result may dependent upon the run-time rounding mode and
    5785              :              flag_rounding_math is set, or if GCC's software emulation
    5786              :              is unable to accurately represent the result.  */
    5787              : 
    5788         4240 :           if ((flag_rounding_math
    5789        26999 :                || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
    5790         4240 :               && (inexact || !real_identical (&result, &value)))
    5791          378 :             return NULL_RTX;
    5792              : 
    5793         3862 :           return const_double_from_real_value (result, mode);
    5794              :         }
    5795              :     }
    5796              : 
    5797              :   /* We can fold some multi-word operations.  */
    5798    476771303 :   scalar_int_mode int_mode;
    5799    476771303 :   if (is_a <scalar_int_mode> (mode, &int_mode)
    5800    407509165 :       && CONST_SCALAR_INT_P (op0)
    5801     39854780 :       && CONST_SCALAR_INT_P (op1)
    5802     33277150 :       && GET_MODE_PRECISION (int_mode) <= MAX_BITSIZE_MODE_ANY_INT)
    5803              :     {
    5804     33277150 :       wide_int result;
    5805     33277150 :       wi::overflow_type overflow;
    5806     33277150 :       rtx_mode_t pop0 = rtx_mode_t (op0, int_mode);
    5807     33277150 :       rtx_mode_t pop1 = rtx_mode_t (op1, int_mode);
    5808              : 
    5809              : #if TARGET_SUPPORTS_WIDE_INT == 0
    5810              :       /* This assert keeps the simplification from producing a result
    5811              :          that cannot be represented in a CONST_DOUBLE but a lot of
    5812              :          upstream callers expect that this function never fails to
    5813              :          simplify something and so you if you added this to the test
    5814              :          above the code would die later anyway.  If this assert
    5815              :          happens, you just need to make the port support wide int.  */
    5816              :       gcc_assert (GET_MODE_PRECISION (int_mode) <= HOST_BITS_PER_DOUBLE_INT);
    5817              : #endif
    5818     33277150 :       switch (code)
    5819              :         {
    5820      1121233 :         case MINUS:
    5821      1121233 :           result = wi::sub (pop0, pop1);
    5822      1121233 :           break;
    5823              : 
    5824     26049477 :         case PLUS:
    5825     26049477 :           result = wi::add (pop0, pop1);
    5826     26049477 :           break;
    5827              : 
    5828       315869 :         case MULT:
    5829       315869 :           result = wi::mul (pop0, pop1);
    5830       315869 :           break;
    5831              : 
    5832         6196 :         case DIV:
    5833         6196 :           result = wi::div_trunc (pop0, pop1, SIGNED, &overflow);
    5834         6196 :           if (overflow)
    5835              :             return NULL_RTX;
    5836              :           break;
    5837              : 
    5838          251 :         case MOD:
    5839          251 :           result = wi::mod_trunc (pop0, pop1, SIGNED, &overflow);
    5840          251 :           if (overflow)
    5841              :             return NULL_RTX;
    5842              :           break;
    5843              : 
    5844         6437 :         case UDIV:
    5845         6437 :           result = wi::div_trunc (pop0, pop1, UNSIGNED, &overflow);
    5846         6437 :           if (overflow)
    5847              :             return NULL_RTX;
    5848              :           break;
    5849              : 
    5850        16121 :         case UMOD:
    5851        16121 :           result = wi::mod_trunc (pop0, pop1, UNSIGNED, &overflow);
    5852        16121 :           if (overflow)
    5853              :             return NULL_RTX;
    5854              :           break;
    5855              : 
    5856       588974 :         case AND:
    5857       588974 :           result = wi::bit_and (pop0, pop1);
    5858       588974 :           break;
    5859              : 
    5860       278728 :         case IOR:
    5861       278728 :           result = wi::bit_or (pop0, pop1);
    5862       278728 :           break;
    5863              : 
    5864        69957 :         case XOR:
    5865        69957 :           result = wi::bit_xor (pop0, pop1);
    5866        69957 :           break;
    5867              : 
    5868         1761 :         case SMIN:
    5869         1761 :           result = wi::smin (pop0, pop1);
    5870         1761 :           break;
    5871              : 
    5872         1849 :         case SMAX:
    5873         1849 :           result = wi::smax (pop0, pop1);
    5874         1849 :           break;
    5875              : 
    5876         3366 :         case UMIN:
    5877         3366 :           result = wi::umin (pop0, pop1);
    5878         3366 :           break;
    5879              : 
    5880         2948 :         case UMAX:
    5881         2948 :           result = wi::umax (pop0, pop1);
    5882         2948 :           break;
    5883              : 
    5884      4770942 :         case LSHIFTRT:
    5885      4770942 :         case ASHIFTRT:
    5886      4770942 :         case ASHIFT:
    5887      4770942 :         case SS_ASHIFT:
    5888      4770942 :         case US_ASHIFT:
    5889      4770942 :           {
    5890              :             /* The shift count might be in SImode while int_mode might
    5891              :                be narrower.  On IA-64 it is even DImode.  If the shift
    5892              :                count is too large and doesn't fit into int_mode, we'd
    5893              :                ICE.  So, if int_mode is narrower than
    5894              :                HOST_BITS_PER_WIDE_INT, use DImode for the shift count.  */
    5895      4770942 :             if (GET_MODE (op1) == VOIDmode
    5896      4770942 :                 && GET_MODE_PRECISION (int_mode) < HOST_BITS_PER_WIDE_INT)
    5897      1884568 :               pop1 = rtx_mode_t (op1, DImode);
    5898              : 
    5899      4770942 :             wide_int wop1 = pop1;
    5900      4770942 :             if (SHIFT_COUNT_TRUNCATED)
    5901              :               wop1 = wi::umod_trunc (wop1, GET_MODE_PRECISION (int_mode));
    5902      4770942 :             else if (wi::geu_p (wop1, GET_MODE_PRECISION (int_mode)))
    5903          132 :               return NULL_RTX;
    5904              : 
    5905      4770810 :             switch (code)
    5906              :               {
    5907      2731785 :               case LSHIFTRT:
    5908      2731785 :                 result = wi::lrshift (pop0, wop1);
    5909      2731785 :                 break;
    5910              : 
    5911        66226 :               case ASHIFTRT:
    5912        66226 :                 result = wi::arshift (pop0, wop1);
    5913        66226 :                 break;
    5914              : 
    5915      1972799 :               case ASHIFT:
    5916      1972799 :                 result = wi::lshift (pop0, wop1);
    5917      1972799 :                 break;
    5918              : 
    5919            0 :               case SS_ASHIFT:
    5920            0 :                 if (wi::leu_p (wop1, wi::clrsb (pop0)))
    5921            0 :                   result = wi::lshift (pop0, wop1);
    5922            0 :                 else if (wi::neg_p (pop0))
    5923            0 :                   result = wi::min_value (int_mode, SIGNED);
    5924              :                 else
    5925            0 :                   result = wi::max_value (int_mode, SIGNED);
    5926              :                 break;
    5927              : 
    5928            0 :               case US_ASHIFT:
    5929            0 :                 if (wi::eq_p (pop0, 0))
    5930            0 :                   result = pop0;
    5931            0 :                 else if (wi::leu_p (wop1, wi::clz (pop0)))
    5932            0 :                   result = wi::lshift (pop0, wop1);
    5933              :                 else
    5934            0 :                   result = wi::max_value (int_mode, UNSIGNED);
    5935              :                 break;
    5936              : 
    5937            0 :               default:
    5938            0 :                 gcc_unreachable ();
    5939              :               }
    5940      4770810 :             break;
    5941      4770942 :           }
    5942        34477 :         case ROTATE:
    5943        34477 :         case ROTATERT:
    5944        34477 :           {
    5945              :             /* The rotate count might be in SImode while int_mode might
    5946              :                be narrower.  On IA-64 it is even DImode.  If the shift
    5947              :                count is too large and doesn't fit into int_mode, we'd
    5948              :                ICE.  So, if int_mode is narrower than
    5949              :                HOST_BITS_PER_WIDE_INT, use DImode for the shift count.  */
    5950        34477 :             if (GET_MODE (op1) == VOIDmode
    5951        34477 :                 && GET_MODE_PRECISION (int_mode) < HOST_BITS_PER_WIDE_INT)
    5952        27505 :               pop1 = rtx_mode_t (op1, DImode);
    5953              : 
    5954        34477 :             if (wi::neg_p (pop1))
    5955              :               return NULL_RTX;
    5956              : 
    5957        34377 :             switch (code)
    5958              :               {
    5959        11569 :               case ROTATE:
    5960        11569 :                 result = wi::lrotate (pop0, pop1);
    5961        11569 :                 break;
    5962              : 
    5963        22808 :               case ROTATERT:
    5964        22808 :                 result = wi::rrotate (pop0, pop1);
    5965        22808 :                 break;
    5966              : 
    5967            0 :               default:
    5968            0 :                 gcc_unreachable ();
    5969              :               }
    5970              :             break;
    5971              :           }
    5972              : 
    5973         2270 :         case SS_PLUS:
    5974         2270 :           result = wi::add (pop0, pop1, SIGNED, &overflow);
    5975         4484 :  clamp_signed_saturation:
    5976         4484 :           if (overflow == wi::OVF_OVERFLOW)
    5977          314 :             result = wi::max_value (GET_MODE_PRECISION (int_mode), SIGNED);
    5978         4170 :           else if (overflow == wi::OVF_UNDERFLOW)
    5979          278 :             result = wi::min_value (GET_MODE_PRECISION (int_mode), SIGNED);
    5980         3892 :           else if (overflow != wi::OVF_NONE)
    5981              :             return NULL_RTX;
    5982              :           break;
    5983              : 
    5984         2220 :         case US_PLUS:
    5985         2220 :           result = wi::add (pop0, pop1, UNSIGNED, &overflow);
    5986         2220 :  clamp_unsigned_saturation:
    5987         2220 :           if (overflow != wi::OVF_NONE)
    5988          461 :             result = wi::max_value (GET_MODE_PRECISION (int_mode), UNSIGNED);
    5989              :           break;
    5990              : 
    5991         2214 :         case SS_MINUS:
    5992         2214 :           result = wi::sub (pop0, pop1, SIGNED, &overflow);
    5993         2214 :           goto clamp_signed_saturation;
    5994              : 
    5995         1852 :         case US_MINUS:
    5996         1852 :           result = wi::sub (pop0, pop1, UNSIGNED, &overflow);
    5997         1852 :           if (overflow != wi::OVF_NONE)
    5998         1203 :             result = wi::min_value (GET_MODE_PRECISION (int_mode), UNSIGNED);
    5999              :           break;
    6000              : 
    6001            0 :         case SS_MULT:
    6002            0 :           result = wi::mul (pop0, pop1, SIGNED, &overflow);
    6003            0 :           goto clamp_signed_saturation;
    6004              : 
    6005            0 :         case US_MULT:
    6006            0 :           result = wi::mul (pop0, pop1, UNSIGNED, &overflow);
    6007            0 :           goto clamp_unsigned_saturation;
    6008              : 
    6009            8 :         case SMUL_HIGHPART:
    6010            8 :           result = wi::mul_high (pop0, pop1, SIGNED);
    6011            8 :           break;
    6012              : 
    6013            0 :         case UMUL_HIGHPART:
    6014            0 :           result = wi::mul_high (pop0, pop1, UNSIGNED);
    6015            0 :           break;
    6016              : 
    6017              :         default:
    6018              :           return NULL_RTX;
    6019              :         }
    6020     33276377 :       return immed_wide_int_const (result, int_mode);
    6021     33277150 :     }
    6022              : 
    6023              :   /* Handle polynomial integers.  */
    6024              :   if (NUM_POLY_INT_COEFFS > 1
    6025              :       && is_a <scalar_int_mode> (mode, &int_mode)
    6026              :       && poly_int_rtx_p (op0)
    6027              :       && poly_int_rtx_p (op1))
    6028              :     {
    6029              :       poly_wide_int result;
    6030              :       switch (code)
    6031              :         {
    6032              :         case PLUS:
    6033              :           result = wi::to_poly_wide (op0, mode) + wi::to_poly_wide (op1, mode);
    6034              :           break;
    6035              : 
    6036              :         case MINUS:
    6037              :           result = wi::to_poly_wide (op0, mode) - wi::to_poly_wide (op1, mode);
    6038              :           break;
    6039              : 
    6040              :         case MULT:
    6041              :           if (CONST_SCALAR_INT_P (op1))
    6042              :             result = wi::to_poly_wide (op0, mode) * rtx_mode_t (op1, mode);
    6043              :           else
    6044              :             return NULL_RTX;
    6045              :           break;
    6046              : 
    6047              :         case ASHIFT:
    6048              :           if (CONST_SCALAR_INT_P (op1))
    6049              :             {
    6050              :               wide_int shift
    6051              :                 = rtx_mode_t (op1,
    6052              :                               GET_MODE (op1) == VOIDmode
    6053              :                               && (GET_MODE_PRECISION (int_mode)
    6054              :                                   < HOST_BITS_PER_WIDE_INT)
    6055              :                               ? DImode : mode);
    6056              :               if (SHIFT_COUNT_TRUNCATED)
    6057              :                 shift = wi::umod_trunc (shift, GET_MODE_PRECISION (int_mode));
    6058              :               else if (wi::geu_p (shift, GET_MODE_PRECISION (int_mode)))
    6059              :                 return NULL_RTX;
    6060              :               result = wi::to_poly_wide (op0, mode) << shift;
    6061              :             }
    6062              :           else
    6063              :             return NULL_RTX;
    6064              :           break;
    6065              : 
    6066              :         case IOR:
    6067              :           if (!CONST_SCALAR_INT_P (op1)
    6068              :               || !can_ior_p (wi::to_poly_wide (op0, mode),
    6069              :                              rtx_mode_t (op1, mode), &result))
    6070              :             return NULL_RTX;
    6071              :           break;
    6072              : 
    6073              :         default:
    6074              :           return NULL_RTX;
    6075              :         }
    6076              :       return immed_wide_int_const (result, int_mode);
    6077              :     }
    6078              : 
    6079              :   return NULL_RTX;
    6080              : }
    6081              : 
    6082              : 
    6083              : 
    6084              : /* Return a positive integer if X should sort after Y.  The value
    6085              :    returned is 1 if and only if X and Y are both regs.  */
    6086              : 
    6087              : static int
    6088    113342606 : simplify_plus_minus_op_data_cmp (rtx x, rtx y)
    6089              : {
    6090    113342606 :   int result;
    6091              : 
    6092    113342606 :   result = (commutative_operand_precedence (y)
    6093    113342606 :             - commutative_operand_precedence (x));
    6094    113342606 :   if (result)
    6095     79301756 :     return result + result;
    6096              : 
    6097              :   /* Group together equal REGs to do more simplification.  */
    6098     34040850 :   if (REG_P (x) && REG_P (y))
    6099      8640875 :     return REGNO (x) > REGNO (y);
    6100              : 
    6101              :   return 0;
    6102              : }
    6103              : 
    6104              : /* Simplify and canonicalize a PLUS or MINUS, at least one of whose
    6105              :    operands may be another PLUS or MINUS.
    6106              : 
    6107              :    Rather than test for specific case, we do this by a brute-force method
    6108              :    and do all possible simplifications until no more changes occur.  Then
    6109              :    we rebuild the operation.
    6110              : 
    6111              :    May return NULL_RTX when no changes were made.  */
    6112              : 
    6113              : rtx
    6114     38091892 : simplify_context::simplify_plus_minus (rtx_code code, machine_mode mode,
    6115              :                                        rtx op0, rtx op1)
    6116              : {
    6117     38091892 :   struct simplify_plus_minus_op_data
    6118              :   {
    6119              :     rtx op;
    6120              :     short neg;
    6121              :   } ops[16];
    6122     38091892 :   rtx result, tem;
    6123     38091892 :   int n_ops = 2;
    6124     38091892 :   int changed, n_constants, canonicalized = 0;
    6125     38091892 :   int i, j;
    6126              : 
    6127     38091892 :   memset (ops, 0, sizeof ops);
    6128              : 
    6129              :   /* Set up the two operands and then expand them until nothing has been
    6130              :      changed.  If we run out of room in our array, give up; this should
    6131              :      almost never happen.  */
    6132              : 
    6133     38091892 :   ops[0].op = op0;
    6134     38091892 :   ops[0].neg = 0;
    6135     38091892 :   ops[1].op = op1;
    6136     38091892 :   ops[1].neg = (code == MINUS);
    6137              : 
    6138     77535347 :   do
    6139              :     {
    6140     77535347 :       changed = 0;
    6141     77535347 :       n_constants = 0;
    6142              : 
    6143    314007331 :       for (i = 0; i < n_ops; i++)
    6144              :         {
    6145    236471998 :           rtx this_op = ops[i].op;
    6146    236471998 :           int this_neg = ops[i].neg;
    6147    236471998 :           enum rtx_code this_code = GET_CODE (this_op);
    6148              : 
    6149    236471998 :           switch (this_code)
    6150              :             {
    6151     38585701 :             case PLUS:
    6152     38585701 :             case MINUS:
    6153     38585701 :               if (n_ops == ARRAY_SIZE (ops))
    6154              :                 return NULL_RTX;
    6155              : 
    6156     38585687 :               ops[n_ops].op = XEXP (this_op, 1);
    6157     38585687 :               ops[n_ops].neg = (this_code == MINUS) ^ this_neg;
    6158     38585687 :               n_ops++;
    6159              : 
    6160     38585687 :               ops[i].op = XEXP (this_op, 0);
    6161     38585687 :               changed = 1;
    6162              :               /* If this operand was negated then we will potentially
    6163              :                  canonicalize the expression.  Similarly if we don't
    6164              :                  place the operands adjacent we're re-ordering the
    6165              :                  expression and thus might be performing a
    6166              :                  canonicalization.  Ignore register re-ordering.
    6167              :                  ??? It might be better to shuffle the ops array here,
    6168              :                  but then (plus (plus (A, B), plus (C, D))) wouldn't
    6169              :                  be seen as non-canonical.  */
    6170     38585687 :               if (this_neg
    6171     37873062 :                   || (i != n_ops - 2
    6172     37191569 :                       && !(REG_P (ops[i].op) && REG_P (ops[n_ops - 1].op))))
    6173    236471984 :                 canonicalized = 1;
    6174              :               break;
    6175              : 
    6176         1675 :             case NEG:
    6177         1675 :               ops[i].op = XEXP (this_op, 0);
    6178         1675 :               ops[i].neg = ! this_neg;
    6179         1675 :               changed = 1;
    6180         1675 :               canonicalized = 1;
    6181         1675 :               break;
    6182              : 
    6183      1485505 :             case CONST:
    6184      1485505 :               if (n_ops != ARRAY_SIZE (ops)
    6185      1485505 :                   && GET_CODE (XEXP (this_op, 0)) == PLUS
    6186      1363231 :                   && CONSTANT_P (XEXP (XEXP (this_op, 0), 0))
    6187      1343606 :                   && CONSTANT_P (XEXP (XEXP (this_op, 0), 1)))
    6188              :                 {
    6189      1343606 :                   ops[i].op = XEXP (XEXP (this_op, 0), 0);
    6190      1343606 :                   ops[n_ops].op = XEXP (XEXP (this_op, 0), 1);
    6191      1343606 :                   ops[n_ops].neg = this_neg;
    6192      1343606 :                   n_ops++;
    6193      1343606 :                   changed = 1;
    6194      1343606 :                   canonicalized = 1;
    6195              :                 }
    6196              :               break;
    6197              : 
    6198        40590 :             case NOT:
    6199              :               /* ~a -> (-a - 1) */
    6200        40590 :               if (n_ops != ARRAY_SIZE (ops))
    6201              :                 {
    6202        40590 :                   ops[n_ops].op = CONSTM1_RTX (mode);
    6203        40590 :                   ops[n_ops++].neg = this_neg;
    6204        40590 :                   ops[i].op = XEXP (this_op, 0);
    6205        40590 :                   ops[i].neg = !this_neg;
    6206        40590 :                   changed = 1;
    6207        40590 :                   canonicalized = 1;
    6208              :                 }
    6209              :               break;
    6210              : 
    6211    118818683 :             CASE_CONST_SCALAR_INT:
    6212    118818683 :             case CONST_POLY_INT:
    6213    118818683 :               n_constants++;
    6214    118818683 :               if (this_neg)
    6215              :                 {
    6216      1175468 :                   ops[i].op = neg_poly_int_rtx (mode, this_op);
    6217      1175468 :                   ops[i].neg = 0;
    6218      1175468 :                   changed = 1;
    6219      1175468 :                   canonicalized = 1;
    6220              :                 }
    6221              :               break;
    6222              : 
    6223              :             default:
    6224              :               break;
    6225              :             }
    6226              :         }
    6227              :     }
    6228     77535333 :   while (changed);
    6229              : 
    6230     38091878 :   if (n_constants > 1)
    6231     23606514 :     canonicalized = 1;
    6232              : 
    6233     38091878 :   gcc_assert (n_ops >= 2);
    6234              : 
    6235              :   /* If we only have two operands, we can avoid the loops.  */
    6236     38091878 :   if (n_ops == 2)
    6237              :     {
    6238            0 :       enum rtx_code code = ops[0].neg || ops[1].neg ? MINUS : PLUS;
    6239            0 :       rtx lhs, rhs;
    6240              : 
    6241              :       /* Get the two operands.  Be careful with the order, especially for
    6242              :          the cases where code == MINUS.  */
    6243            0 :       if (ops[0].neg && ops[1].neg)
    6244              :         {
    6245            0 :           lhs = gen_rtx_NEG (mode, ops[0].op);
    6246            0 :           rhs = ops[1].op;
    6247              :         }
    6248            0 :       else if (ops[0].neg)
    6249              :         {
    6250            0 :           lhs = ops[1].op;
    6251            0 :           rhs = ops[0].op;
    6252              :         }
    6253              :       else
    6254              :         {
    6255            0 :           lhs = ops[0].op;
    6256            0 :           rhs = ops[1].op;
    6257              :         }
    6258              : 
    6259            0 :       return simplify_const_binary_operation (code, mode, lhs, rhs);
    6260              :     }
    6261              : 
    6262              :   /* Now simplify each pair of operands until nothing changes.  */
    6263     62482105 :   while (1)
    6264              :     {
    6265              :       /* Insertion sort is good enough for a small array.  */
    6266    165420219 :       for (i = 1; i < n_ops; i++)
    6267              :         {
    6268    102938114 :           struct simplify_plus_minus_op_data save;
    6269    102938114 :           int cmp;
    6270              : 
    6271    102938114 :           j = i - 1;
    6272    102938114 :           cmp = simplify_plus_minus_op_data_cmp (ops[j].op, ops[i].op);
    6273    102938114 :           if (cmp <= 0)
    6274     90703780 :             continue;
    6275              :           /* Just swapping registers doesn't count as canonicalization.  */
    6276     12234334 :           if (cmp != 1)
    6277      9242233 :             canonicalized = 1;
    6278              : 
    6279     12234334 :           save = ops[i];
    6280     14625999 :           do
    6281     14625999 :             ops[j + 1] = ops[j];
    6282     14625999 :           while (j--
    6283     26860333 :                  && simplify_plus_minus_op_data_cmp (ops[j].op, save.op) > 0);
    6284     12234334 :           ops[j + 1] = save;
    6285              :         }
    6286              : 
    6287     62482105 :       changed = 0;
    6288    165420219 :       for (i = n_ops - 1; i > 0; i--)
    6289    247233704 :         for (j = i - 1; j >= 0; j--)
    6290              :           {
    6291    145113001 :             rtx lhs = ops[j].op, rhs = ops[i].op;
    6292    145113001 :             int lneg = ops[j].neg, rneg = ops[i].neg;
    6293              : 
    6294    145113001 :             if (lhs != 0 && rhs != 0)
    6295              :               {
    6296    119879301 :                 enum rtx_code ncode = PLUS;
    6297              : 
    6298    119879301 :                 if (lneg != rneg)
    6299              :                   {
    6300     10634733 :                     ncode = MINUS;
    6301     10634733 :                     if (lneg)
    6302      6821377 :                       std::swap (lhs, rhs);
    6303              :                   }
    6304    109244568 :                 else if (swap_commutative_operands_p (lhs, rhs))
    6305       216471 :                   std::swap (lhs, rhs);
    6306              : 
    6307    119879301 :                 if ((GET_CODE (lhs) == CONST || CONST_INT_P (lhs))
    6308     28175991 :                     && (GET_CODE (rhs) == CONST || CONST_INT_P (rhs)))
    6309              :                   {
    6310     23756374 :                     rtx tem_lhs, tem_rhs;
    6311              : 
    6312     23756374 :                     tem_lhs = GET_CODE (lhs) == CONST ? XEXP (lhs, 0) : lhs;
    6313     23756374 :                     tem_rhs = GET_CODE (rhs) == CONST ? XEXP (rhs, 0) : rhs;
    6314     23756374 :                     tem = simplify_binary_operation (ncode, mode, tem_lhs,
    6315              :                                                      tem_rhs);
    6316              : 
    6317     23756374 :                     if (tem && !CONSTANT_P (tem))
    6318         1752 :                       tem = gen_rtx_CONST (GET_MODE (tem), tem);
    6319              :                   }
    6320              :                 else
    6321     96122927 :                   tem = simplify_binary_operation (ncode, mode, lhs, rhs);
    6322              : 
    6323     96124679 :                 if (tem)
    6324              :                   {
    6325              :                     /* Reject "simplifications" that just wrap the two
    6326              :                        arguments in a CONST.  Failure to do so can result
    6327              :                        in infinite recursion with simplify_binary_operation
    6328              :                        when it calls us to simplify CONST operations.
    6329              :                        Also, if we find such a simplification, don't try
    6330              :                        any more combinations with this rhs:  We must have
    6331              :                        something like symbol+offset, ie. one of the
    6332              :                        trivial CONST expressions we handle later.  */
    6333     25687985 :                     if (GET_CODE (tem) == CONST
    6334       819163 :                         && GET_CODE (XEXP (tem, 0)) == ncode
    6335       818614 :                         && XEXP (XEXP (tem, 0), 0) == lhs
    6336       817411 :                         && XEXP (XEXP (tem, 0), 1) == rhs)
    6337              :                       break;
    6338     24870574 :                     lneg &= rneg;
    6339     24870574 :                     if (GET_CODE (tem) == NEG)
    6340        45764 :                       tem = XEXP (tem, 0), lneg = !lneg;
    6341     24870574 :                     if (poly_int_rtx_p (tem) && lneg)
    6342            0 :                       tem = neg_poly_int_rtx (mode, tem), lneg = 0;
    6343              : 
    6344     24870574 :                     ops[i].op = tem;
    6345     24870574 :                     ops[i].neg = lneg;
    6346     24870574 :                     ops[j].op = NULL_RTX;
    6347     24870574 :                     changed = 1;
    6348     24870574 :                     canonicalized = 1;
    6349              :                   }
    6350              :               }
    6351              :           }
    6352              : 
    6353     62482105 :       if (!changed)
    6354              :         break;
    6355              : 
    6356              :       /* Pack all the operands to the lower-numbered entries.  */
    6357     98527577 :       for (i = 0, j = 0; j < n_ops; j++)
    6358     74137350 :         if (ops[j].op)
    6359              :           {
    6360     49266776 :             ops[i] = ops[j];
    6361     49266776 :             i++;
    6362              :           }
    6363              :       n_ops = i;
    6364              :     }
    6365              : 
    6366              :   /* If nothing changed, check that rematerialization of rtl instructions
    6367              :      is still required.  */
    6368     38091878 :   if (!canonicalized)
    6369              :     {
    6370              :       /* Perform rematerialization if only all operands are registers and
    6371              :          all operations are PLUS.  */
    6372              :       /* ??? Also disallow (non-global, non-frame) fixed registers to work
    6373              :          around rs6000 and how it uses the CA register.  See PR67145.  */
    6374      5121347 :       for (i = 0; i < n_ops; i++)
    6375      4132088 :         if (ops[i].neg
    6376      3853599 :             || !REG_P (ops[i].op)
    6377      7411700 :             || (REGNO (ops[i].op) < FIRST_PSEUDO_REGISTER
    6378       317290 :                 && fixed_regs[REGNO (ops[i].op)]
    6379          193 :                 && !global_regs[REGNO (ops[i].op)]
    6380          193 :                 && ops[i].op != frame_pointer_rtx
    6381          101 :                 && ops[i].op != arg_pointer_rtx
    6382           88 :                 && ops[i].op != stack_pointer_rtx))
    6383              :           return NULL_RTX;
    6384       989259 :       goto gen_result;
    6385              :     }
    6386              : 
    6387              :   /* Create (minus -C X) instead of (neg (const (plus X C))).  */
    6388     36250143 :   if (n_ops == 2
    6389     22679922 :       && CONST_INT_P (ops[1].op)
    6390     22223357 :       && CONSTANT_P (ops[0].op)
    6391          162 :       && ops[0].neg)
    6392           56 :     return gen_rtx_fmt_ee (MINUS, mode, ops[1].op, ops[0].op);
    6393              : 
    6394              :   /* We suppressed creation of trivial CONST expressions in the
    6395              :      combination loop to avoid recursion.  Create one manually now.
    6396              :      The combination loop should have ensured that there is exactly
    6397              :      one CONST_INT, and the sort will have ensured that it is last
    6398              :      in the array and that any other constant will be next-to-last.  */
    6399              : 
    6400     36250087 :   if (n_ops > 1
    6401     35706827 :       && poly_int_rtx_p (ops[n_ops - 1].op)
    6402     69765623 :       && CONSTANT_P (ops[n_ops - 2].op))
    6403              :     {
    6404      1412144 :       rtx value = ops[n_ops - 1].op;
    6405      1412144 :       if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
    6406       680219 :         value = neg_poly_int_rtx (mode, value);
    6407      1412144 :       if (CONST_INT_P (value))
    6408              :         {
    6409      2824288 :           ops[n_ops - 2].op = plus_constant (mode, ops[n_ops - 2].op,
    6410      1412144 :                                              INTVAL (value));
    6411      1412144 :           n_ops--;
    6412              :         }
    6413              :     }
    6414              : 
    6415              :   /* Put a non-negated operand first, if possible.  */
    6416              : 
    6417     37956660 :   for (i = 0; i < n_ops && ops[i].neg; i++)
    6418      1706573 :     continue;
    6419     36250087 :   if (i == n_ops)
    6420         9508 :     ops[0].op = gen_rtx_NEG (mode, ops[0].op);
    6421     36240579 :   else if (i != 0)
    6422              :     {
    6423      1605288 :       tem = ops[0].op;
    6424      1605288 :       ops[0] = ops[i];
    6425      1605288 :       ops[i].op = tem;
    6426      1605288 :       ops[i].neg = 1;
    6427              :     }
    6428              : 
    6429              :   /* Now make the result by performing the requested operations.  */
    6430     34635291 :  gen_result:
    6431     37239346 :   result = ops[0].op;
    6432     87311318 :   for (i = 1; i < n_ops; i++)
    6433    100143944 :     result = gen_rtx_fmt_ee (ops[i].neg ? MINUS : PLUS,
    6434              :                              mode, result, ops[i].op);
    6435              : 
    6436              :   return result;
    6437      1706573 : }
    6438              : 
    6439              : /* Check whether an operand is suitable for calling simplify_plus_minus.  */
    6440              : static bool
    6441    514742225 : plus_minus_operand_p (const_rtx x)
    6442              : {
    6443    514742225 :   return GET_CODE (x) == PLUS
    6444    514742225 :          || GET_CODE (x) == MINUS
    6445    514742225 :          || (GET_CODE (x) == CONST
    6446      1852258 :              && GET_CODE (XEXP (x, 0)) == PLUS
    6447      1245020 :              && CONSTANT_P (XEXP (XEXP (x, 0), 0))
    6448      1172484 :              && CONSTANT_P (XEXP (XEXP (x, 0), 1)));
    6449              : }
    6450              : 
    6451              : /* Like simplify_binary_operation except used for relational operators.
    6452              :    MODE is the mode of the result. If MODE is VOIDmode, both operands must
    6453              :    not also be VOIDmode.
    6454              : 
    6455              :    CMP_MODE specifies in which mode the comparison is done in, so it is
    6456              :    the mode of the operands.  If CMP_MODE is VOIDmode, it is taken from
    6457              :    the operands or, if both are VOIDmode, the operands are compared in
    6458              :    "infinite precision".  */
    6459              : rtx
    6460    130301410 : simplify_context::simplify_relational_operation (rtx_code code,
    6461              :                                                  machine_mode mode,
    6462              :                                                  machine_mode cmp_mode,
    6463              :                                                  rtx op0, rtx op1)
    6464              : {
    6465    130301410 :   rtx tem, trueop0, trueop1;
    6466              : 
    6467    130301410 :   if (cmp_mode == VOIDmode)
    6468     28788888 :     cmp_mode = GET_MODE (op0);
    6469     28788888 :   if (cmp_mode == VOIDmode)
    6470       512426 :     cmp_mode = GET_MODE (op1);
    6471              : 
    6472    130301410 :   tem = simplify_const_relational_operation (code, cmp_mode, op0, op1);
    6473    130301410 :   if (tem)
    6474       879464 :     return relational_result (mode, cmp_mode, tem);
    6475              : 
    6476              :   /* For the following tests, ensure const0_rtx is op1.  */
    6477    129421946 :   if (swap_commutative_operands_p (op0, op1)
    6478    129421946 :       || (op0 == const0_rtx && op1 != const0_rtx))
    6479      2770182 :     std::swap (op0, op1), code = swap_condition (code);
    6480              : 
    6481              :   /* If op0 is a compare, extract the comparison arguments from it.  */
    6482    129421946 :   if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
    6483     14032649 :     return simplify_gen_relational (code, mode, VOIDmode,
    6484     14032649 :                                     XEXP (op0, 0), XEXP (op0, 1));
    6485              : 
    6486    115389297 :   if (GET_MODE_CLASS (cmp_mode) == MODE_CC)
    6487              :     return NULL_RTX;
    6488              : 
    6489     84968172 :   trueop0 = avoid_constant_pool_reference (op0);
    6490     84968172 :   trueop1 = avoid_constant_pool_reference (op1);
    6491     84968172 :   return simplify_relational_operation_1 (code, mode, cmp_mode,
    6492     84968172 :                                           trueop0, trueop1);
    6493              : }
    6494              : 
    6495              : /* This part of simplify_relational_operation is only used when CMP_MODE
    6496              :    is not in class MODE_CC (i.e. it is a real comparison).
    6497              : 
    6498              :    MODE is the mode of the result, while CMP_MODE specifies in which
    6499              :    mode the comparison is done in, so it is the mode of the operands.  */
    6500              : 
    6501              : rtx
    6502     84968172 : simplify_context::simplify_relational_operation_1 (rtx_code code,
    6503              :                                                    machine_mode mode,
    6504              :                                                    machine_mode cmp_mode,
    6505              :                                                    rtx op0, rtx op1)
    6506              : {
    6507     84968172 :   enum rtx_code op0code = GET_CODE (op0);
    6508              : 
    6509     84968172 :   if (op1 == const0_rtx && COMPARISON_P (op0))
    6510              :     {
    6511              :       /* If op0 is a comparison, extract the comparison arguments
    6512              :          from it.  */
    6513       452835 :       if (code == NE)
    6514              :         {
    6515       209658 :           if (GET_MODE (op0) == mode)
    6516          164 :             return simplify_rtx (op0);
    6517              :           else
    6518       209494 :             return simplify_gen_relational (GET_CODE (op0), mode, VOIDmode,
    6519       209494 :                                             XEXP (op0, 0), XEXP (op0, 1));
    6520              :         }
    6521       243177 :       else if (code == EQ)
    6522              :         {
    6523       118600 :           enum rtx_code new_code = reversed_comparison_code (op0, NULL);
    6524       118600 :           if (new_code != UNKNOWN)
    6525       118283 :             return simplify_gen_relational (new_code, mode, VOIDmode,
    6526       118283 :                                             XEXP (op0, 0), XEXP (op0, 1));
    6527              :         }
    6528              :     }
    6529              : 
    6530              :   /* (LTU/GEU (PLUS a C) C), where C is constant, can be simplified to
    6531              :      (GEU/LTU a -C).  Likewise for (LTU/GEU (PLUS a C) a).  */
    6532     84640231 :   if ((code == LTU || code == GEU)
    6533      5232334 :       && GET_CODE (op0) == PLUS
    6534       695825 :       && CONST_INT_P (XEXP (op0, 1))
    6535       465971 :       && (rtx_equal_p (op1, XEXP (op0, 0))
    6536       352152 :           || rtx_equal_p (op1, XEXP (op0, 1)))
    6537              :       /* (LTU/GEU (PLUS a 0) 0) is not the same as (GEU/LTU a 0). */
    6538     84819884 :       && XEXP (op0, 1) != const0_rtx)
    6539              :     {
    6540       179653 :       rtx new_cmp
    6541       179653 :         = simplify_gen_unary (NEG, cmp_mode, XEXP (op0, 1), cmp_mode);
    6542       181636 :       return simplify_gen_relational ((code == LTU ? GEU : LTU), mode,
    6543       179653 :                                       cmp_mode, XEXP (op0, 0), new_cmp);
    6544              :     }
    6545              : 
    6546              :   /* (GTU (PLUS a C) (C - 1)) where C is a non-zero constant can be
    6547              :      transformed into (LTU a -C).  */
    6548     84460578 :   if (code == GTU && GET_CODE (op0) == PLUS && CONST_INT_P (op1)
    6549       347447 :       && CONST_INT_P (XEXP (op0, 1))
    6550       268531 :       && (UINTVAL (op1) == UINTVAL (XEXP (op0, 1)) - 1)
    6551        28828 :       && XEXP (op0, 1) != const0_rtx)
    6552              :     {
    6553        28828 :       rtx new_cmp
    6554        28828 :         = simplify_gen_unary (NEG, cmp_mode, XEXP (op0, 1), cmp_mode);
    6555        28828 :       return simplify_gen_relational (LTU, mode, cmp_mode,
    6556        28828 :                                        XEXP (op0, 0), new_cmp);
    6557              :     }
    6558              : 
    6559              :   /* Canonicalize (LTU/GEU (PLUS a b) b) as (LTU/GEU (PLUS a b) a).  */
    6560     84431750 :   if ((code == LTU || code == GEU)
    6561      5052681 :       && GET_CODE (op0) == PLUS
    6562       516172 :       && rtx_equal_p (op1, XEXP (op0, 1))
    6563              :       /* Don't recurse "infinitely" for (LTU/GEU (PLUS b b) b).  */
    6564     84437162 :       && !rtx_equal_p (op1, XEXP (op0, 0)))
    6565         5412 :     return simplify_gen_relational (code, mode, cmp_mode, op0,
    6566         5412 :                                     copy_rtx (XEXP (op0, 0)));
    6567              : 
    6568     84426338 :   if (op1 == const0_rtx)
    6569              :     {
    6570              :       /* Canonicalize (GTU x 0) as (NE x 0).  */
    6571     37180395 :       if (code == GTU)
    6572       172206 :         return simplify_gen_relational (NE, mode, cmp_mode, op0, op1);
    6573              :       /* Canonicalize (LEU x 0) as (EQ x 0).  */
    6574     37008189 :       if (code == LEU)
    6575        40174 :         return simplify_gen_relational (EQ, mode, cmp_mode, op0, op1);
    6576              : 
    6577     36968015 :       if ((code == NE || code == EQ)
    6578              :           /* Verify op0 is IOR */
    6579     33343028 :           && GET_CODE (op0) == IOR
    6580              :           /* only enters if op1 is 0 */
    6581              :           /* Verify IOR operand is NE */
    6582       546981 :           && GET_CODE (XEXP (op0, 0)) == NE
    6583        15711 :           && GET_MODE (XEXP (XEXP (op0, 0), 0)) == cmp_mode
    6584              :           /* Verify second NE operand is 0 */
    6585          110 :           && XEXP (XEXP (op0, 0), 1) == CONST0_RTX (cmp_mode))
    6586              :         {
    6587           31 :           rtx t = gen_rtx_IOR (cmp_mode, XEXP (XEXP (op0, 0), 0), XEXP (op0, 1));
    6588           31 :           t = gen_rtx_fmt_ee (code, mode, t, CONST0_RTX (mode));
    6589           31 :           return t;
    6590              :         }
    6591              : 
    6592              :     }
    6593     47245943 :   else if (op1 == const1_rtx)
    6594              :     {
    6595      3125134 :       switch (code)
    6596              :         {
    6597         9080 :         case GE:
    6598              :           /* Canonicalize (GE x 1) as (GT x 0).  */
    6599         9080 :           return simplify_gen_relational (GT, mode, cmp_mode,
    6600         9080 :                                           op0, const0_rtx);
    6601       171786 :         case GEU:
    6602              :           /* Canonicalize (GEU x 1) as (NE x 0).  */
    6603       171786 :           return simplify_gen_relational (NE, mode, cmp_mode,
    6604       171786 :                                           op0, const0_rtx);
    6605        10413 :         case LT:
    6606              :           /* Canonicalize (LT x 1) as (LE x 0).  */
    6607        10413 :           return simplify_gen_relational (LE, mode, cmp_mode,
    6608        10413 :                                           op0, const0_rtx);
    6609        52351 :         case LTU:
    6610              :           /* Canonicalize (LTU x 1) as (EQ x 0).  */
    6611        52351 :           return simplify_gen_relational (EQ, mode, cmp_mode,
    6612        52351 :                                           op0, const0_rtx);
    6613              :         default:
    6614              :           break;
    6615              :         }
    6616              :     }
    6617     44120809 :   else if (op1 == constm1_rtx)
    6618              :     {
    6619              :       /* Canonicalize (LE x -1) as (LT x 0).  */
    6620      1094565 :       if (code == LE)
    6621         1662 :         return simplify_gen_relational (LT, mode, cmp_mode, op0, const0_rtx);
    6622              :       /* Canonicalize (GT x -1) as (GE x 0).  */
    6623      1092903 :       if (code == GT)
    6624         5209 :         return simplify_gen_relational (GE, mode, cmp_mode, op0, const0_rtx);
    6625              :     }
    6626              : 
    6627              :   /* (eq/ne (plus x cst1) cst2) simplifies to (eq/ne x (cst2 - cst1))  */
    6628     80338439 :   if ((code == EQ || code == NE)
    6629     62766635 :       && (op0code == PLUS || op0code == MINUS)
    6630      2553825 :       && CONSTANT_P (op1)
    6631       928133 :       && CONSTANT_P (XEXP (op0, 1))
    6632       536343 :       && (INTEGRAL_MODE_P (cmp_mode) || flag_unsafe_math_optimizations))
    6633              :     {
    6634       536311 :       rtx x = XEXP (op0, 0);
    6635       536311 :       rtx c = XEXP (op0, 1);
    6636       536311 :       enum rtx_code invcode = op0code == PLUS ? MINUS : PLUS;
    6637       536311 :       rtx tem = simplify_gen_binary (invcode, cmp_mode, op1, c);
    6638              : 
    6639              :       /* Detect an infinite recursive condition, where we oscillate at this
    6640              :          simplification case between:
    6641              :             A + B == C  <--->  C - B == A,
    6642              :          where A, B, and C are all constants with non-simplifiable expressions,
    6643              :          usually SYMBOL_REFs.  */
    6644       536311 :       if (GET_CODE (tem) == invcode
    6645           46 :           && CONSTANT_P (x)
    6646       536329 :           && rtx_equal_p (c, XEXP (tem, 1)))
    6647              :         return NULL_RTX;
    6648              : 
    6649       536293 :       return simplify_gen_relational (code, mode, cmp_mode, x, tem);
    6650              :     }
    6651              : 
    6652              :   /* (ne:SI (zero_extract:SI FOO (const_int 1) BAR) (const_int 0))) is
    6653              :      the same as (zero_extract:SI FOO (const_int 1) BAR).  */
    6654     62230324 :   scalar_int_mode int_mode, int_cmp_mode;
    6655     62230324 :   if (code == NE
    6656     32861432 :       && op1 == const0_rtx
    6657      2176835 :       && is_int_mode (mode, &int_mode)
    6658     83371676 :       && is_a <scalar_int_mode> (cmp_mode, &int_cmp_mode)
    6659              :       /* ??? Work-around BImode bugs in the ia64 backend.  */
    6660      2176835 :       && int_mode != BImode
    6661      2176811 :       && int_cmp_mode != BImode
    6662      2176811 :       && nonzero_bits (op0, int_cmp_mode) == 1
    6663     62230324 :       && STORE_FLAG_VALUE == 1)
    6664       110878 :     return GET_MODE_SIZE (int_mode) > GET_MODE_SIZE (int_cmp_mode)
    6665        55439 :            ? simplify_gen_unary (ZERO_EXTEND, int_mode, op0, int_cmp_mode)
    6666        23219 :            : lowpart_subreg (int_mode, op0, int_cmp_mode);
    6667              : 
    6668              :   /* (eq/ne (xor x y) 0) simplifies to (eq/ne x y).  */
    6669     83371676 :   if ((code == EQ || code == NE)
    6670     62174885 :       && op1 == const0_rtx
    6671     33214032 :       && op0code == XOR)
    6672        13195 :     return simplify_gen_relational (code, mode, cmp_mode,
    6673        13195 :                                     XEXP (op0, 0), XEXP (op0, 1));
    6674              : 
    6675              :   /* (eq/ne (xor x y) x) simplifies to (eq/ne y 0).  */
    6676     62161690 :   if ((code == EQ || code == NE)
    6677     62161690 :       && op0code == XOR
    6678         4425 :       && rtx_equal_p (XEXP (op0, 0), op1)
    6679            6 :       && !side_effects_p (XEXP (op0, 0)))
    6680            0 :     return simplify_gen_relational (code, mode, cmp_mode, XEXP (op0, 1),
    6681            0 :                                     CONST0_RTX (mode));
    6682              : 
    6683              :   /* Likewise (eq/ne (xor x y) y) simplifies to (eq/ne x 0).  */
    6684     83358481 :   if ((code == EQ || code == NE)
    6685     62161690 :       && op0code == XOR
    6686         4425 :       && rtx_equal_p (XEXP (op0, 1), op1)
    6687     83358615 :       && !side_effects_p (XEXP (op0, 1)))
    6688          134 :     return simplify_gen_relational (code, mode, cmp_mode, XEXP (op0, 0),
    6689          134 :                                     CONST0_RTX (mode));
    6690              : 
    6691              :   /* (eq/ne (xor x C1) C2) simplifies to (eq/ne x (C1^C2)).  */
    6692     83358347 :   if ((code == EQ || code == NE)
    6693     62161556 :       && op0code == XOR
    6694         4291 :       && CONST_SCALAR_INT_P (op1)
    6695          952 :       && CONST_SCALAR_INT_P (XEXP (op0, 1)))
    6696          414 :     return simplify_gen_relational (code, mode, cmp_mode, XEXP (op0, 0),
    6697              :                                     simplify_gen_binary (XOR, cmp_mode,
    6698          414 :                                                          XEXP (op0, 1), op1));
    6699              : 
    6700              :   /* Simplify eq/ne (and/ior x y) x/y) for targets with a BICS instruction or
    6701              :      constant folding if x/y is a constant.  */
    6702     62161142 :   if ((code == EQ || code == NE)
    6703     62161142 :       && (op0code == AND || op0code == IOR)
    6704      3880783 :       && !side_effects_p (op1)
    6705      3880677 :       && op1 != CONST0_RTX (cmp_mode))
    6706              :     {
    6707              :       /* Both (eq/ne (and x y) x) and (eq/ne (ior x y) y) simplify to
    6708              :          (eq/ne (and (not y) x) 0).  */
    6709       471008 :       if ((op0code == AND && rtx_equal_p (XEXP (op0, 0), op1))
    6710       942897 :           || (op0code == IOR && rtx_equal_p (XEXP (op0, 1), op1)))
    6711              :         {
    6712        25130 :           rtx not_y = simplify_gen_unary (NOT, cmp_mode, XEXP (op0, 1),
    6713              :                                           cmp_mode);
    6714        25130 :           rtx lhs = simplify_gen_binary (AND, cmp_mode, not_y, XEXP (op0, 0));
    6715              : 
    6716        25130 :           return simplify_gen_relational (code, mode, cmp_mode, lhs,
    6717        25130 :                                           CONST0_RTX (cmp_mode));
    6718              :         }
    6719              : 
    6720              :       /* Both (eq/ne (and x y) y) and (eq/ne (ior x y) x) simplify to
    6721              :          (eq/ne (and (not x) y) 0).  */
    6722       445955 :       if ((op0code == AND && rtx_equal_p (XEXP (op0, 1), op1))
    6723       875556 :           || (op0code == IOR && rtx_equal_p (XEXP (op0, 0), op1)))
    6724              :         {
    6725        42231 :           rtx not_x = simplify_gen_unary (NOT, cmp_mode, XEXP (op0, 0),
    6726              :                                           cmp_mode);
    6727        42231 :           rtx lhs = simplify_gen_binary (AND, cmp_mode, not_x, XEXP (op0, 1));
    6728              : 
    6729        42231 :           return simplify_gen_relational (code, mode, cmp_mode, lhs,
    6730        42231 :                                           CONST0_RTX (cmp_mode));
    6731              :         }
    6732              :     }
    6733              : 
    6734              :   /* (eq/ne (bswap x) C1) simplifies to (eq/ne x C2) with C2 swapped.  */
    6735     83290572 :   if ((code == EQ || code == NE)
    6736     62093781 :       && GET_CODE (op0) == BSWAP
    6737          324 :       && CONST_SCALAR_INT_P (op1))
    6738           93 :     return simplify_gen_relational (code, mode, cmp_mode, XEXP (op0, 0),
    6739              :                                     simplify_gen_unary (BSWAP, cmp_mode,
    6740           93 :                                                         op1, cmp_mode));
    6741              : 
    6742              :   /* (eq/ne (bswap x) (bswap y)) simplifies to (eq/ne x y).  */
    6743     62093688 :   if ((code == EQ || code == NE)
    6744     62093688 :       && GET_CODE (op0) == BSWAP
    6745          231 :       && GET_CODE (op1) == BSWAP)
    6746           18 :     return simplify_gen_relational (code, mode, cmp_mode,
    6747           18 :                                     XEXP (op0, 0), XEXP (op1, 0));
    6748              : 
    6749     83290461 :   if (op0code == POPCOUNT && op1 == const0_rtx)
    6750            0 :     switch (code)
    6751              :       {
    6752            0 :       case EQ:
    6753            0 :       case LE:
    6754            0 :       case LEU:
    6755              :         /* (eq (popcount x) (const_int 0)) -> (eq x (const_int 0)).  */
    6756            0 :         return simplify_gen_relational (EQ, mode, GET_MODE (XEXP (op0, 0)),
    6757              :                                         XEXP (op0, 0),
    6758            0 :                                         CONST0_RTX (GET_MODE (XEXP (op0, 0))));
    6759              : 
    6760            0 :       case NE:
    6761            0 :       case GT:
    6762            0 :       case GTU:
    6763              :         /* (ne (popcount x) (const_int 0)) -> (ne x (const_int 0)).  */
    6764            0 :         return simplify_gen_relational (NE, mode, GET_MODE (XEXP (op0, 0)),
    6765              :                                         XEXP (op0, 0),
    6766            0 :                                         CONST0_RTX (GET_MODE (XEXP (op0, 0))));
    6767              : 
    6768              :       default:
    6769              :         break;
    6770              :       }
    6771              : 
    6772              :   /* (ne:SI (subreg:QI (ashift:SI x 7) 0) 0) -> (and:SI x 1).  */
    6773     83290461 :   if (code == NE
    6774     32764742 :       && op1 == const0_rtx
    6775     16564261 :       && (op0code == TRUNCATE
    6776       146523 :           || (partial_subreg_p (op0)
    6777       145775 :               && subreg_lowpart_p (op0)))
    6778       122477 :       && SCALAR_INT_MODE_P (mode)
    6779     83290461 :       && STORE_FLAG_VALUE == 1)
    6780              :     {
    6781        29395 :       rtx tmp = XEXP (op0, 0);
    6782        29395 :       if (GET_CODE (tmp) == ASHIFT
    6783         1894 :           && GET_MODE (tmp) == mode
    6784          188 :           && CONST_INT_P (XEXP (tmp, 1))
    6785          188 :           && is_int_mode (GET_MODE (op0), &int_mode)
    6786        29583 :           && INTVAL (XEXP (tmp, 1)) == GET_MODE_PRECISION (int_mode) - 1)
    6787          188 :         return simplify_gen_binary (AND, mode, XEXP (tmp, 0), const1_rtx);
    6788              :     }
    6789              : 
    6790              :   /* For two unsigned booleans A and B:
    6791              : 
    6792              :      A >  B == ~B & A
    6793              :      A >= B == ~B | A
    6794              :      A <  B == ~A & B
    6795              :      A <= B == ~A | B
    6796              :      A == B == ~A ^ B (== ~B ^ A)
    6797              :      A != B ==  A ^ B
    6798              : 
    6799              :      For signed comparisons, we have to take STORE_FLAG_VALUE into account,
    6800              :      with the rules above applying for positive STORE_FLAG_VALUE and with
    6801              :      the relations reversed for negative STORE_FLAG_VALUE.  */
    6802     83290273 :   if (is_a<scalar_int_mode> (cmp_mode)
    6803     80725898 :       && COMPARISON_P (op0)
    6804     83364318 :       && COMPARISON_P (op1))
    6805              :     {
    6806         9974 :       rtx t = NULL_RTX;
    6807         9974 :       if (code == GTU || code == (STORE_FLAG_VALUE > 0 ? GT : LT))
    6808          755 :         t = simplify_logical_relational_operation (AND, mode, op1, op0, true);
    6809              :       else if (code == GEU || code == (STORE_FLAG_VALUE > 0 ? GE : LE))
    6810          720 :         t = simplify_logical_relational_operation (IOR, mode, op1, op0, true);
    6811              :       else if (code == LTU || code == (STORE_FLAG_VALUE > 0 ? LT : GT))
    6812          720 :         t = simplify_logical_relational_operation (AND, mode, op0, op1, true);
    6813              :       else if (code == LEU || code == (STORE_FLAG_VALUE > 0 ? LE : GE))
    6814          720 :         t = simplify_logical_relational_operation (IOR, mode, op0, op1, true);
    6815              :       else if (code == EQ)
    6816         3249 :         t = simplify_logical_relational_operation (XOR, mode, op0, op1, true);
    6817              :       else if (code == NE)
    6818         3810 :         t = simplify_logical_relational_operation (XOR, mode, op0, op1);
    6819         9974 :       if (t)
    6820              :         return t;
    6821              :     }
    6822              : 
    6823              :   return NULL_RTX;
    6824              : }
    6825              : 
    6826              : enum
    6827              : {
    6828              :   CMP_EQ = 1,
    6829              :   CMP_LT = 2,
    6830              :   CMP_GT = 4,
    6831              :   CMP_LTU = 8,
    6832              :   CMP_GTU = 16
    6833              : };
    6834              : 
    6835              : 
    6836              : /* Convert the known results for EQ, LT, GT, LTU, GTU contained in
    6837              :    KNOWN_RESULT to a CONST_INT, based on the requested comparison CODE
    6838              :    For KNOWN_RESULT to make sense it should be either CMP_EQ, or the
    6839              :    logical OR of one of (CMP_LT, CMP_GT) and one of (CMP_LTU, CMP_GTU).
    6840              :    For floating-point comparisons, assume that the operands were ordered.  */
    6841              : 
    6842              : static rtx
    6843       711196 : comparison_result (enum rtx_code code, int known_results)
    6844              : {
    6845       711196 :   switch (code)
    6846              :     {
    6847       131019 :     case EQ:
    6848       131019 :     case UNEQ:
    6849       131019 :       return (known_results & CMP_EQ) ? const_true_rtx : const0_rtx;
    6850       442787 :     case NE:
    6851       442787 :     case LTGT:
    6852       442787 :       return (known_results & CMP_EQ) ? const0_rtx : const_true_rtx;
    6853              : 
    6854         9497 :     case LT:
    6855         9497 :     case UNLT:
    6856         9497 :       return (known_results & CMP_LT) ? const_true_rtx : const0_rtx;
    6857         8547 :     case GE:
    6858         8547 :     case UNGE:
    6859         8547 :       return (known_results & CMP_LT) ? const0_rtx : const_true_rtx;
    6860              : 
    6861        12699 :     case GT:
    6862        12699 :     case UNGT:
    6863        12699 :       return (known_results & CMP_GT) ? const_true_rtx : const0_rtx;
    6864        14707 :     case LE:
    6865        14707 :     case UNLE:
    6866        14707 :       return (known_results & CMP_GT) ? const0_rtx : const_true_rtx;
    6867              : 
    6868        24389 :     case LTU:
    6869        24389 :       return (known_results & CMP_LTU) ? const_true_rtx : const0_rtx;
    6870         8828 :     case GEU:
    6871         8828 :       return (known_results & CMP_LTU) ? const0_rtx : const_true_rtx;
    6872              : 
    6873        47881 :     case GTU:
    6874        47881 :       return (known_results & CMP_GTU) ? const_true_rtx : const0_rtx;
    6875        10834 :     case LEU:
    6876        10834 :       return (known_results & CMP_GTU) ? const0_rtx : const_true_rtx;
    6877              : 
    6878            0 :     case ORDERED:
    6879            0 :       return const_true_rtx;
    6880            8 :     case UNORDERED:
    6881            8 :       return const0_rtx;
    6882            0 :     default:
    6883            0 :       gcc_unreachable ();
    6884              :     }
    6885              : }
    6886              : 
    6887              : /* Check if the given comparison (done in the given MODE) is actually
    6888              :    a tautology or a contradiction.  If the mode is VOIDmode, the
    6889              :    comparison is done in "infinite precision".  If no simplification
    6890              :    is possible, this function returns zero.  Otherwise, it returns
    6891              :    either const_true_rtx or const0_rtx.  */
    6892              : 
    6893              : rtx
    6894    130391554 : simplify_const_relational_operation (enum rtx_code code,
    6895              :                                      machine_mode mode,
    6896              :                                      rtx op0, rtx op1)
    6897              : {
    6898    137373595 :   rtx tem;
    6899    137373595 :   rtx trueop0;
    6900    137373595 :   rtx trueop1;
    6901              : 
    6902    137373595 :   gcc_assert (mode != VOIDmode
    6903              :               || (GET_MODE (op0) == VOIDmode
    6904              :                   && GET_MODE (op1) == VOIDmode));
    6905              : 
    6906              :   /* We only handle MODE_CC comparisons that are COMPARE against zero.  */
    6907    137373595 :   if (GET_MODE_CLASS (mode) == MODE_CC
    6908     44459830 :       && (op1 != const0_rtx
    6909     44459830 :           || GET_CODE (op0) != COMPARE))
    6910              :     return NULL_RTX;
    6911              : 
    6912              :   /* If op0 is a compare, extract the comparison arguments from it.  */
    6913    106952470 :   if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
    6914              :     {
    6915     14038705 :       op1 = XEXP (op0, 1);
    6916     14038705 :       op0 = XEXP (op0, 0);
    6917              : 
    6918     14038705 :       if (GET_MODE (op0) != VOIDmode)
    6919     13882799 :         mode = GET_MODE (op0);
    6920       155906 :       else if (GET_MODE (op1) != VOIDmode)
    6921       124023 :         mode = GET_MODE (op1);
    6922              :       else
    6923              :         return 0;
    6924              :     }
    6925              : 
    6926              :   /* We can't simplify MODE_CC values since we don't know what the
    6927              :      actual comparison is.  */
    6928    106920587 :   if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
    6929              :     return 0;
    6930              : 
    6931              :   /* Make sure the constant is second.  */
    6932    106920587 :   if (swap_commutative_operands_p (op0, op1))
    6933              :     {
    6934      3267067 :       std::swap (op0, op1);
    6935      3267067 :       code = swap_condition (code);
    6936              :     }
    6937              : 
    6938    106920587 :   trueop0 = avoid_constant_pool_reference (op0);
    6939    106920587 :   trueop1 = avoid_constant_pool_reference (op1);
    6940              : 
    6941              :   /* For integer comparisons of A and B maybe we can simplify A - B and can
    6942              :      then simplify a comparison of that with zero.  If A and B are both either
    6943              :      a register or a CONST_INT, this can't help; testing for these cases will
    6944              :      prevent infinite recursion here and speed things up.
    6945              : 
    6946              :      We can only do this for EQ and NE comparisons as otherwise we may
    6947              :      lose or introduce overflow which we cannot disregard as undefined as
    6948              :      we do not know the signedness of the operation on either the left or
    6949              :      the right hand side of the comparison.  */
    6950              : 
    6951    106920587 :   if (INTEGRAL_MODE_P (mode)
    6952    104357821 :       && trueop1 != CONST0_RTX (mode)
    6953     53590510 :       && (code == EQ || code == NE)
    6954     34055840 :       && ! ((REG_P (op0)
    6955      9994419 :              || CONST_SCALAR_INT_P (trueop0)
    6956      9965825 :              || CONST_VECTOR_P (trueop0))
    6957     24090035 :             && (REG_P (op1)
    6958     14299722 :                 || CONST_SCALAR_INT_P (trueop1)
    6959      3286275 :                 || CONST_VECTOR_P (trueop1)))
    6960     13250276 :       && (tem = simplify_binary_operation (MINUS, mode, op0, op1)) != 0
    6961              :       /* We cannot do this if tem is a nonzero address.  */
    6962      6982043 :       && ! nonzero_address_p (tem))
    6963      6982041 :     return simplify_const_relational_operation (signed_condition (code),
    6964      6982041 :                                                 mode, tem, CONST0_RTX (mode));
    6965              : 
    6966     99938546 :   if (! HONOR_NANS (mode) && code == ORDERED)
    6967            0 :     return const_true_rtx;
    6968              : 
    6969     99938546 :   if (! HONOR_NANS (mode) && code == UNORDERED)
    6970            8 :     return const0_rtx;
    6971              : 
    6972              :   /* For modes without NaNs, if the two operands are equal, we know the
    6973              :      result except if they have side-effects.  Even with NaNs we know
    6974              :      the result of unordered comparisons and, if signaling NaNs are
    6975              :      irrelevant, also the result of LT/GT/LTGT.  */
    6976     99938538 :   if ((! HONOR_NANS (trueop0)
    6977      2076379 :        || code == UNEQ || code == UNLE || code == UNGE
    6978              :        || ((code == LT || code == GT || code == LTGT)
    6979       831522 :            && ! HONOR_SNANS (trueop0)))
    6980     98798837 :       && rtx_equal_p (trueop0, trueop1)
    6981    100444949 :       && ! side_effects_p (trueop0))
    6982       506322 :     return comparison_result (code, CMP_EQ);
    6983              : 
    6984              :   /* If the operands are floating-point constants, see if we can fold
    6985              :      the result.  */
    6986     99432216 :   if (CONST_DOUBLE_AS_FLOAT_P (trueop0)
    6987         1283 :       && CONST_DOUBLE_AS_FLOAT_P (trueop1)
    6988         1283 :       && SCALAR_FLOAT_MODE_P (GET_MODE (trueop0)))
    6989              :     {
    6990         1283 :       const REAL_VALUE_TYPE *d0 = CONST_DOUBLE_REAL_VALUE (trueop0);
    6991         1283 :       const REAL_VALUE_TYPE *d1 = CONST_DOUBLE_REAL_VALUE (trueop1);
    6992              : 
    6993              :       /* Comparisons are unordered iff at least one of the values is NaN.  */
    6994         1283 :       if (REAL_VALUE_ISNAN (*d0) || REAL_VALUE_ISNAN (*d1))
    6995          167 :         switch (code)
    6996              :           {
    6997            0 :           case UNEQ:
    6998            0 :           case UNLT:
    6999            0 :           case UNGT:
    7000            0 :           case UNLE:
    7001            0 :           case UNGE:
    7002            0 :           case NE:
    7003            0 :           case UNORDERED:
    7004            0 :             return const_true_rtx;
    7005          167 :           case EQ:
    7006          167 :           case LT:
    7007          167 :           case GT:
    7008          167 :           case LE:
    7009          167 :           case GE:
    7010          167 :           case LTGT:
    7011          167 :           case ORDERED:
    7012          167 :             return const0_rtx;
    7013              :           default:
    7014              :             return 0;
    7015              :           }
    7016              : 
    7017         1201 :       return comparison_result (code,
    7018         1201 :                                 (real_equal (d0, d1) ? CMP_EQ :
    7019         1201 :                                  real_less (d0, d1) ? CMP_LT : CMP_GT));
    7020              :     }
    7021              : 
    7022              :   /* Otherwise, see if the operands are both integers.  */
    7023     99430933 :   if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
    7024     96473548 :       && CONST_SCALAR_INT_P (trueop0) && CONST_SCALAR_INT_P (trueop1))
    7025              :     {
    7026              :       /* It would be nice if we really had a mode here.  However, the
    7027              :          largest int representable on the target is as good as
    7028              :          infinite.  */
    7029       203758 :       machine_mode cmode = (mode == VOIDmode) ? MAX_MODE_INT : mode;
    7030       203758 :       rtx_mode_t ptrueop0 = rtx_mode_t (trueop0, cmode);
    7031       203758 :       rtx_mode_t ptrueop1 = rtx_mode_t (trueop1, cmode);
    7032              : 
    7033       203758 :       if (wi::eq_p (ptrueop0, ptrueop1))
    7034            0 :         return comparison_result (code, CMP_EQ);
    7035              :       else
    7036              :         {
    7037       203758 :           int cr = wi::lts_p (ptrueop0, ptrueop1) ? CMP_LT : CMP_GT;
    7038       203758 :           cr |= wi::ltu_p (ptrueop0, ptrueop1) ? CMP_LTU : CMP_GTU;
    7039       203758 :           return comparison_result (code, cr);
    7040              :         }
    7041              :     }
    7042              : 
    7043              :   /* Optimize comparisons with upper and lower bounds.  */
    7044     99227175 :   scalar_int_mode int_mode;
    7045     99227175 :   if (CONST_INT_P (trueop1)
    7046     69427479 :       && is_a <scalar_int_mode> (mode, &int_mode)
    7047     69427479 :       && HWI_COMPUTABLE_MODE_P (int_mode)
    7048    168178312 :       && !side_effects_p (trueop0))
    7049              :     {
    7050     68800860 :       int sign;
    7051     68800860 :       unsigned HOST_WIDE_INT nonzero = nonzero_bits (trueop0, int_mode);
    7052     68800860 :       HOST_WIDE_INT val = INTVAL (trueop1);
    7053     68800860 :       HOST_WIDE_INT mmin, mmax;
    7054              : 
    7055     68800860 :       if (code == GEU
    7056     68800860 :           || code == LEU
    7057     65507908 :           || code == GTU
    7058     65507908 :           || code == LTU)
    7059              :         sign = 0;
    7060              :       else
    7061     68800860 :         sign = 1;
    7062              : 
    7063              :       /* Get a reduced range if the sign bit is zero.  */
    7064     68800860 :       if (nonzero <= (GET_MODE_MASK (int_mode) >> 1))
    7065              :         {
    7066      6760767 :           mmin = 0;
    7067      6760767 :           mmax = nonzero;
    7068              :         }
    7069              :       else
    7070              :         {
    7071     62040093 :           rtx mmin_rtx, mmax_rtx;
    7072     62040093 :           get_mode_bounds (int_mode, sign, int_mode, &mmin_rtx, &mmax_rtx);
    7073              : 
    7074     62040093 :           mmin = INTVAL (mmin_rtx);
    7075     62040093 :           mmax = INTVAL (mmax_rtx);
    7076     62040093 :           if (sign)
    7077              :             {
    7078     55865536 :               unsigned int sign_copies
    7079     55865536 :                 = num_sign_bit_copies (trueop0, int_mode);
    7080              : 
    7081     55865536 :               mmin >>= (sign_copies - 1);
    7082     55865536 :               mmax >>= (sign_copies - 1);
    7083              :             }
    7084              :         }
    7085              : 
    7086     68800860 :       switch (code)
    7087              :         {
    7088              :         /* x >= y is always true for y <= mmin, always false for y > mmax.  */
    7089       572041 :         case GEU:
    7090       572041 :           if ((unsigned HOST_WIDE_INT) val <= (unsigned HOST_WIDE_INT) mmin)
    7091        12378 :             return const_true_rtx;
    7092       559663 :           if ((unsigned HOST_WIDE_INT) val > (unsigned HOST_WIDE_INT) mmax)
    7093           49 :             return const0_rtx;
    7094              :           break;
    7095       926268 :         case GE:
    7096       926268 :           if (val <= mmin)
    7097         2332 :             return const_true_rtx;
    7098       923936 :           if (val > mmax)
    7099            0 :             return const0_rtx;
    7100              :           break;
    7101              : 
    7102              :         /* x <= y is always true for y >= mmax, always false for y < mmin.  */
    7103      2720911 :         case LEU:
    7104      2720911 :           if ((unsigned HOST_WIDE_INT) val >= (unsigned HOST_WIDE_INT) mmax)
    7105        15259 :             return const_true_rtx;
    7106      2705652 :           if ((unsigned HOST_WIDE_INT) val < (unsigned HOST_WIDE_INT) mmin)
    7107            0 :             return const0_rtx;
    7108              :           break;
    7109      2265802 :         case LE:
    7110      2265802 :           if (val >= mmax)
    7111          463 :             return const_true_rtx;
    7112      2265339 :           if (val < mmin)
    7113            0 :             return const0_rtx;
    7114              :           break;
    7115              : 
    7116     25655672 :         case EQ:
    7117              :           /* x == y is always false for y out of range.  */
    7118     25655672 :           if (val < mmin || val > mmax)
    7119          434 :             return const0_rtx;
    7120              :           break;
    7121              : 
    7122              :         /* x > y is always false for y >= mmax, always true for y < mmin.  */
    7123      2596972 :         case GTU:
    7124      2596972 :           if ((unsigned HOST_WIDE_INT) val >= (unsigned HOST_WIDE_INT) mmax)
    7125       129559 :             return const0_rtx;
    7126      2467413 :           if ((unsigned HOST_WIDE_INT) val < (unsigned HOST_WIDE_INT) mmin)
    7127            0 :             return const_true_rtx;
    7128              :           break;
    7129      1768336 :         case GT:
    7130      1768336 :           if (val >= mmax)
    7131          335 :             return const0_rtx;
    7132      1768001 :           if (val < mmin)
    7133            5 :             return const_true_rtx;
    7134              :           break;
    7135              : 
    7136              :         /* x < y is always false for y <= mmin, always true for y > mmax.  */
    7137       850859 :         case LTU:
    7138       850859 :           if ((unsigned HOST_WIDE_INT) val <= (unsigned HOST_WIDE_INT) mmin)
    7139         2951 :             return const0_rtx;
    7140       847908 :           if ((unsigned HOST_WIDE_INT) val > (unsigned HOST_WIDE_INT) mmax)
    7141        87841 :             return const_true_rtx;
    7142              :           break;
    7143      1106705 :         case LT:
    7144      1106705 :           if (val <= mmin)
    7145         2291 :             return const0_rtx;
    7146      1104414 :           if (val > mmax)
    7147         3476 :             return const_true_rtx;
    7148              :           break;
    7149              : 
    7150     30337294 :         case NE:
    7151              :           /* x != y is always true for y out of range.  */
    7152     30337294 :           if (val < mmin || val > mmax)
    7153          128 :             return const_true_rtx;
    7154              :           break;
    7155              : 
    7156              :         default:
    7157              :           break;
    7158              :         }
    7159              :     }
    7160              : 
    7161              :   /* Optimize integer comparisons with zero.  */
    7162     98969674 :   if (is_a <scalar_int_mode> (mode, &int_mode)
    7163     96055370 :       && trueop1 == const0_rtx
    7164     50043351 :       && !side_effects_p (trueop0))
    7165              :     {
    7166              :       /* Some addresses are known to be nonzero.  We don't know
    7167              :          their sign, but equality comparisons are known.  */
    7168     49891002 :       if (nonzero_address_p (trueop0))
    7169              :         {
    7170          532 :           if (code == EQ || code == LEU)
    7171          251 :             return const0_rtx;
    7172          281 :           if (code == NE || code == GTU)
    7173          281 :             return const_true_rtx;
    7174              :         }
    7175              : 
    7176              :       /* See if the first operand is an IOR with a constant.  If so, we
    7177              :          may be able to determine the result of this comparison.  */
    7178     49890470 :       if (GET_CODE (op0) == IOR)
    7179              :         {
    7180       629577 :           rtx inner_const = avoid_constant_pool_reference (XEXP (op0, 1));
    7181       629577 :           if (CONST_INT_P (inner_const) && inner_const != const0_rtx)
    7182              :             {
    7183          290 :               int sign_bitnum = GET_MODE_PRECISION (int_mode) - 1;
    7184          580 :               int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
    7185          290 :                               && (UINTVAL (inner_const)
    7186          290 :                                   & (HOST_WIDE_INT_1U
    7187              :                                      << sign_bitnum)));
    7188              : 
    7189          290 :               switch (code)
    7190              :                 {
    7191              :                 case EQ:
    7192              :                 case LEU:
    7193              :                   return const0_rtx;
    7194            4 :                 case NE:
    7195            4 :                 case GTU:
    7196            4 :                   return const_true_rtx;
    7197           70 :                 case LT:
    7198           70 :                 case LE:
    7199           70 :                   if (has_sign)
    7200            2 :                     return const_true_rtx;
    7201              :                   break;
    7202          210 :                 case GT:
    7203          210 :                 case GE:
    7204          210 :                   if (has_sign)
    7205              :                     return const0_rtx;
    7206              :                   break;
    7207              :                 default:
    7208              :                   break;
    7209              :                 }
    7210              :             }
    7211              :         }
    7212              :     }
    7213              : 
    7214              :   /* Optimize comparison of ABS with zero.  */
    7215     50369628 :   if (trueop1 == CONST0_RTX (mode) && !side_effects_p (trueop0)
    7216    149185904 :       && (GET_CODE (trueop0) == ABS
    7217     50216383 :           || (GET_CODE (trueop0) == FLOAT_EXTEND
    7218           38 :               && GET_CODE (XEXP (trueop0, 0)) == ABS)))
    7219              :     {
    7220          583 :       switch (code)
    7221              :         {
    7222           60 :         case LT:
    7223              :           /* Optimize abs(x) < 0.0.  */
    7224           60 :           if (!INTEGRAL_MODE_P (mode) && !HONOR_SNANS (mode))
    7225            0 :             return const0_rtx;
    7226              :           break;
    7227              : 
    7228           42 :         case GE:
    7229              :           /* Optimize abs(x) >= 0.0.  */
    7230           42 :           if (!INTEGRAL_MODE_P (mode) && !HONOR_NANS (mode))
    7231            0 :             return const_true_rtx;
    7232              :           break;
    7233              : 
    7234            0 :         case UNGE:
    7235              :           /* Optimize ! (abs(x) < 0.0).  */
    7236            0 :           return const_true_rtx;
    7237              : 
    7238              :         default:
    7239              :           break;
    7240              :         }
    7241              :     }
    7242              : 
    7243              :   return 0;
    7244              : }
    7245              : 
    7246              : /* Recognize expressions of the form (X CMP 0) ? VAL : OP (X)
    7247              :    where OP is CLZ or CTZ and VAL is the value from CLZ_DEFINED_VALUE_AT_ZERO
    7248              :    or CTZ_DEFINED_VALUE_AT_ZERO respectively and return OP (X) if the expression
    7249              :    can be simplified to that or NULL_RTX if not.
    7250              :    Assume X is compared against zero with CMP_CODE and the true
    7251              :    arm is TRUE_VAL and the false arm is FALSE_VAL.  */
    7252              : 
    7253              : rtx
    7254     30909385 : simplify_context::simplify_cond_clz_ctz (rtx x, rtx_code cmp_code,
    7255              :                                          rtx true_val, rtx false_val)
    7256              : {
    7257     30909385 :   if (cmp_code != EQ && cmp_code != NE)
    7258              :     return NULL_RTX;
    7259              : 
    7260              :   /* Result on X == 0 and X !=0 respectively.  */
    7261     22377040 :   rtx on_zero, on_nonzero;
    7262     22377040 :   if (cmp_code == EQ)
    7263              :     {
    7264              :       on_zero = true_val;
    7265              :       on_nonzero = false_val;
    7266              :     }
    7267              :   else
    7268              :     {
    7269     11948412 :       on_zero = false_val;
    7270     11948412 :       on_nonzero = true_val;
    7271              :     }
    7272              : 
    7273     22377040 :   rtx_code op_code = GET_CODE (on_nonzero);
    7274     22377040 :   if ((op_code != CLZ && op_code != CTZ)
    7275         1961 :       || !rtx_equal_p (XEXP (on_nonzero, 0), x)
    7276     22378066 :       || !CONST_INT_P (on_zero))
    7277     22376738 :     return NULL_RTX;
    7278              : 
    7279          302 :   HOST_WIDE_INT op_val;
    7280          302 :   scalar_int_mode mode ATTRIBUTE_UNUSED
    7281          302 :     = as_a <scalar_int_mode> (GET_MODE (XEXP (on_nonzero, 0)));
    7282            0 :   if (((op_code == CLZ && CLZ_DEFINED_VALUE_AT_ZERO (mode, op_val))
    7283          604 :        || (op_code == CTZ && CTZ_DEFINED_VALUE_AT_ZERO (mode, op_val)))
    7284          326 :       && op_val == INTVAL (on_zero))
    7285              :     return on_nonzero;
    7286              : 
    7287              :   return NULL_RTX;
    7288              : }
    7289              : 
    7290              : /* Try to simplify X given that it appears within operand OP of a
    7291              :    VEC_MERGE operation whose mask is MASK.  X need not use the same
    7292              :    vector mode as the VEC_MERGE, but it must have the same number of
    7293              :    elements.
    7294              : 
    7295              :    Return the simplified X on success, otherwise return NULL_RTX.  */
    7296              : 
    7297              : rtx
    7298      1631538 : simplify_context::simplify_merge_mask (rtx x, rtx mask, int op)
    7299              : {
    7300      1631538 :   gcc_assert (VECTOR_MODE_P (GET_MODE (x)));
    7301      3263076 :   poly_uint64 nunits = GET_MODE_NUNITS (GET_MODE (x));
    7302      1631538 :   if (GET_CODE (x) == VEC_MERGE && rtx_equal_p (XEXP (x, 2), mask))
    7303              :     {
    7304         5484 :       if (side_effects_p (XEXP (x, 1 - op)))
    7305              :         return NULL_RTX;
    7306              : 
    7307         5260 :       return XEXP (x, op);
    7308              :     }
    7309      1626054 :   if (UNARY_P (x)
    7310       182796 :       && VECTOR_MODE_P (GET_MODE (XEXP (x, 0)))
    7311      1683862 :       && known_eq (GET_MODE_NUNITS (GET_MODE (XEXP (x, 0))), nunits))
    7312              :     {
    7313        24337 :       rtx top0 = simplify_merge_mask (XEXP (x, 0), mask, op);
    7314        24337 :       if (top0)
    7315          448 :         return simplify_gen_unary (GET_CODE (x), GET_MODE (x), top0,
    7316          448 :                                    GET_MODE (XEXP (x, 0)));
    7317              :     }
    7318      1625606 :   if (BINARY_P (x)
    7319       205711 :       && VECTOR_MODE_P (GET_MODE (XEXP (x, 0)))
    7320       411220 :       && known_eq (GET_MODE_NUNITS (GET_MODE (XEXP (x, 0))), nunits)
    7321       180382 :       && VECTOR_MODE_P (GET_MODE (XEXP (x, 1)))
    7322      1912512 :       && known_eq (GET_MODE_NUNITS (GET_MODE (XEXP (x, 1))), nunits))
    7323              :     {
    7324       143453 :       rtx top0 = simplify_merge_mask (XEXP (x, 0), mask, op);
    7325       143453 :       rtx top1 = simplify_merge_mask (XEXP (x, 1), mask, op);
    7326       143453 :       if (top0 || top1)
    7327              :         {
    7328          952 :           if (COMPARISON_P (x))
    7329            0 :             return simplify_gen_relational (GET_CODE (x), GET_MODE (x),
    7330            0 :                                             GET_MODE (XEXP (x, 0)) != VOIDmode
    7331              :                                             ? GET_MODE (XEXP (x, 0))
    7332            0 :                                             : GET_MODE (XEXP (x, 1)),
    7333              :                                             top0 ? top0 : XEXP (x, 0),
    7334            0 :                                             top1 ? top1 : XEXP (x, 1));
    7335              :           else
    7336          952 :             return simplify_gen_binary (GET_CODE (x), GET_MODE (x),
    7337              :                                         top0 ? top0 : XEXP (x, 0),
    7338          952 :                                         top1 ? top1 : XEXP (x, 1));
    7339              :         }
    7340              :     }
    7341      1624654 :   if (GET_RTX_CLASS (GET_CODE (x)) == RTX_TERNARY
    7342        35690 :       && VECTOR_MODE_P (GET_MODE (XEXP (x, 0)))
    7343        71380 :       && known_eq (GET_MODE_NUNITS (GET_MODE (XEXP (x, 0))), nunits)
    7344        35690 :       && VECTOR_MODE_P (GET_MODE (XEXP (x, 1)))
    7345        71380 :       && known_eq (GET_MODE_NUNITS (GET_MODE (XEXP (x, 1))), nunits)
    7346        35690 :       && VECTOR_MODE_P (GET_MODE (XEXP (x, 2)))
    7347      1642834 :       && known_eq (GET_MODE_NUNITS (GET_MODE (XEXP (x, 2))), nunits))
    7348              :     {
    7349         9090 :       rtx top0 = simplify_merge_mask (XEXP (x, 0), mask, op);
    7350         9090 :       rtx top1 = simplify_merge_mask (XEXP (x, 1), mask, op);
    7351         9090 :       rtx top2 = simplify_merge_mask (XEXP (x, 2), mask, op);
    7352         9090 :       if (top0 || top1 || top2)
    7353          448 :         return simplify_gen_ternary (GET_CODE (x), GET_MODE (x),
    7354          448 :                                      GET_MODE (XEXP (x, 0)),
    7355              :                                      top0 ? top0 : XEXP (x, 0),
    7356              :                                      top1 ? top1 : XEXP (x, 1),
    7357          448 :                                      top2 ? top2 : XEXP (x, 2));
    7358              :     }
    7359              :   return NULL_RTX;
    7360              : }
    7361              : 
    7362              : 
    7363              : /* Simplify CODE, an operation with result mode MODE and three operands,
    7364              :    OP0, OP1, and OP2.  OP0_MODE was the mode of OP0 before it became
    7365              :    a constant.  Return 0 if no simplifications is possible.  */
    7366              : 
    7367              : rtx
    7368     42793379 : simplify_context::simplify_ternary_operation (rtx_code code, machine_mode mode,
    7369              :                                               machine_mode op0_mode,
    7370              :                                               rtx op0, rtx op1, rtx op2)
    7371              : {
    7372     42793379 :   bool any_change = false;
    7373     42793379 :   rtx tem, trueop2;
    7374     42793379 :   scalar_int_mode int_mode, int_op0_mode;
    7375     42793379 :   unsigned int n_elts;
    7376              : 
    7377     42793379 :   switch (code)
    7378              :     {
    7379       337125 :     case FMA:
    7380              :       /* Simplify negations around the multiplication.  */
    7381              :       /* -a * -b + c  =>  a * b + c.  */
    7382       337125 :       if (GET_CODE (op0) == NEG)
    7383              :         {
    7384        81164 :           tem = simplify_unary_operation (NEG, mode, op1, mode);
    7385        81164 :           if (tem)
    7386          259 :             op1 = tem, op0 = XEXP (op0, 0), any_change = true;
    7387              :         }
    7388       255961 :       else if (GET_CODE (op1) == NEG)
    7389              :         {
    7390         1082 :           tem = simplify_unary_operation (NEG, mode, op0, mode);
    7391         1082 :           if (tem)
    7392            0 :             op0 = tem, op1 = XEXP (op1, 0), any_change = true;
    7393              :         }
    7394              : 
    7395              :       /* Canonicalize the two multiplication operands.  */
    7396              :       /* a * -b + c  =>  -b * a + c.  */
    7397       337125 :       if (swap_commutative_operands_p (op0, op1))
    7398              :         std::swap (op0, op1), any_change = true;
    7399              : 
    7400       308779 :       if (any_change)
    7401        28596 :         return gen_rtx_FMA (mode, op0, op1, op2);
    7402              :       return NULL_RTX;
    7403              : 
    7404       667450 :     case SIGN_EXTRACT:
    7405       667450 :     case ZERO_EXTRACT:
    7406       667450 :       if (CONST_INT_P (op0)
    7407        17964 :           && CONST_INT_P (op1)
    7408        17964 :           && CONST_INT_P (op2)
    7409     42793411 :           && is_a <scalar_int_mode> (mode, &int_mode)
    7410           32 :           && INTVAL (op1) + INTVAL (op2) <= GET_MODE_PRECISION (int_mode)
    7411       667482 :           && HWI_COMPUTABLE_MODE_P (int_mode))
    7412              :         {
    7413              :           /* Extracting a bit-field from a constant */
    7414           32 :           unsigned HOST_WIDE_INT val = UINTVAL (op0);
    7415           32 :           HOST_WIDE_INT op1val = INTVAL (op1);
    7416           32 :           HOST_WIDE_INT op2val = INTVAL (op2);
    7417           32 :           if (!BITS_BIG_ENDIAN)
    7418           32 :             val >>= op2val;
    7419              :           else if (is_a <scalar_int_mode> (op0_mode, &int_op0_mode))
    7420              :             val >>= GET_MODE_PRECISION (int_op0_mode) - op2val - op1val;
    7421              :           else
    7422              :             /* Not enough information to calculate the bit position.  */
    7423              :             break;
    7424              : 
    7425           32 :           if (HOST_BITS_PER_WIDE_INT != op1val)
    7426              :             {
    7427              :               /* First zero-extend.  */
    7428           29 :               val &= (HOST_WIDE_INT_1U << op1val) - 1;
    7429              :               /* If desired, propagate sign bit.  */
    7430           29 :               if (code == SIGN_EXTRACT
    7431            5 :                   && (val & (HOST_WIDE_INT_1U << (op1val - 1)))
    7432            5 :                      != 0)
    7433            2 :                 val |= ~ ((HOST_WIDE_INT_1U << op1val) - 1);
    7434              :             }
    7435              : 
    7436           32 :           return gen_int_mode (val, int_mode);
    7437              :         }
    7438              :       break;
    7439              : 
    7440     41005896 :     case IF_THEN_ELSE:
    7441     41005896 :       if (CONST_INT_P (op0))
    7442       283877 :         return op0 != const0_rtx ? op1 : op2;
    7443              : 
    7444              :       /* Convert c ? a : a into "a".  */
    7445     40814661 :       if (rtx_equal_p (op1, op2) && ! side_effects_p (op0))
    7446              :         return op1;
    7447              : 
    7448              :       /* Convert a != b ? a : b into "a".  */
    7449     40811299 :       if (GET_CODE (op0) == NE
    7450     15794164 :           && ! side_effects_p (op0)
    7451     15752770 :           && ! HONOR_NANS (mode)
    7452     15746746 :           && ! HONOR_SIGNED_ZEROS (mode)
    7453     56558045 :           && ((rtx_equal_p (XEXP (op0, 0), op1)
    7454       106548 :                && rtx_equal_p (XEXP (op0, 1), op2))
    7455     15746402 :               || (rtx_equal_p (XEXP (op0, 0), op2)
    7456         5210 :                   && rtx_equal_p (XEXP (op0, 1), op1))))
    7457          561 :         return op1;
    7458              : 
    7459              :       /* Convert a == b ? a : b into "b".  */
    7460     40810738 :       if (GET_CODE (op0) == EQ
    7461     13013984 :           && ! side_effects_p (op0)
    7462     12987922 :           && ! HONOR_NANS (mode)
    7463     12850459 :           && ! HONOR_SIGNED_ZEROS (mode)
    7464     53661197 :           && ((rtx_equal_p (XEXP (op0, 0), op1)
    7465        14361 :                && rtx_equal_p (XEXP (op0, 1), op2))
    7466     12850449 :               || (rtx_equal_p (XEXP (op0, 0), op2)
    7467         7233 :                   && rtx_equal_p (XEXP (op0, 1), op1))))
    7468           26 :         return op2;
    7469              : 
    7470              :       /* Convert a != 0 ? -a : 0 into "-a".  */
    7471     40810712 :       if (GET_CODE (op0) == NE
    7472     15793603 :           && ! side_effects_p (op0)
    7473     15752209 :           && ! HONOR_NANS (mode)
    7474     15746185 :           && ! HONOR_SIGNED_ZEROS (mode)
    7475     15746185 :           && XEXP (op0, 1) == CONST0_RTX (mode)
    7476     11942278 :           && op2 == CONST0_RTX (mode)
    7477       181664 :           && GET_CODE (op1) == NEG
    7478     40810764 :           && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0)))
    7479              :         return op1;
    7480              : 
    7481              :       /* Convert a == 0 ? 0 : -a into "-a".  */
    7482     40810703 :       if (GET_CODE (op0) == EQ
    7483     13013958 :           && ! side_effects_p (op0)
    7484     12987896 :           && ! HONOR_NANS (mode)
    7485     12850433 :           && ! HONOR_SIGNED_ZEROS (mode)
    7486     12850433 :           && op1 == CONST0_RTX (mode)
    7487        31706 :           && XEXP (op0, 1) == CONST0_RTX (mode)
    7488        14224 :           && GET_CODE (op2) == NEG
    7489     40810709 :           && rtx_equal_p (XEXP (op0, 0), XEXP (op2, 0)))
    7490              :         return op2;
    7491              : 
    7492              :       /* Convert (!c) != {0,...,0} ? a : b into
    7493              :          c != {0,...,0} ? b : a for vector modes.  */
    7494     40810697 :       if (VECTOR_MODE_P (GET_MODE (op1))
    7495        14895 :           && GET_CODE (op0) == NE
    7496          450 :           && GET_CODE (XEXP (op0, 0)) == NOT
    7497            0 :           && GET_CODE (XEXP (op0, 1)) == CONST_VECTOR)
    7498              :         {
    7499            0 :           rtx cv = XEXP (op0, 1);
    7500            0 :           int nunits;
    7501            0 :           bool ok = true;
    7502            0 :           if (!CONST_VECTOR_NUNITS (cv).is_constant (&nunits))
    7503              :             ok = false;
    7504              :           else
    7505            0 :             for (int i = 0; i < nunits; ++i)
    7506            0 :               if (CONST_VECTOR_ELT (cv, i) != const0_rtx)
    7507              :                 {
    7508              :                   ok = false;
    7509              :                   break;
    7510              :                 }
    7511            0 :           if (ok)
    7512              :             {
    7513            0 :               rtx new_op0 = gen_rtx_NE (GET_MODE (op0),
    7514              :                                         XEXP (XEXP (op0, 0), 0),
    7515              :                                         XEXP (op0, 1));
    7516            0 :               rtx retval = gen_rtx_IF_THEN_ELSE (mode, new_op0, op2, op1);
    7517            0 :               return retval;
    7518              :             }
    7519              :         }
    7520              : 
    7521              :       /* Convert x == 0 ? N : clz (x) into clz (x) when
    7522              :          CLZ_DEFINED_VALUE_AT_ZERO is defined to N for the mode of x.
    7523              :          Similarly for ctz (x).  */
    7524     40809699 :       if (COMPARISON_P (op0) && !side_effects_p (op0)
    7525     81520012 :           && XEXP (op0, 1) == const0_rtx)
    7526              :         {
    7527     30909385 :           rtx simplified
    7528     30909385 :             = simplify_cond_clz_ctz (XEXP (op0, 0), GET_CODE (op0),
    7529              :                                      op1, op2);
    7530     30909385 :           if (simplified)
    7531              :             return simplified;
    7532              :         }
    7533              : 
    7534     40810697 :       if (COMPARISON_P (op0) && ! side_effects_p (op0))
    7535              :         {
    7536     81511667 :           machine_mode cmp_mode = (GET_MODE (XEXP (op0, 0)) == VOIDmode
    7537     40709315 :                                         ? GET_MODE (XEXP (op0, 1))
    7538              :                                         : GET_MODE (XEXP (op0, 0)));
    7539     40709315 :           rtx temp;
    7540              : 
    7541              :           /* Look for happy constants in op1 and op2.  */
    7542     40709315 :           if (CONST_INT_P (op1) && CONST_INT_P (op2))
    7543              :             {
    7544       208518 :               HOST_WIDE_INT t = INTVAL (op1);
    7545       208518 :               HOST_WIDE_INT f = INTVAL (op2);
    7546              : 
    7547       208518 :               if (t == STORE_FLAG_VALUE && f == 0)
    7548        52601 :                 code = GET_CODE (op0);
    7549       155917 :               else if (t == 0 && f == STORE_FLAG_VALUE)
    7550              :                 {
    7551        31161 :                   enum rtx_code tmp;
    7552        31161 :                   tmp = reversed_comparison_code (op0, NULL);
    7553        31161 :                   if (tmp == UNKNOWN)
    7554              :                     break;
    7555              :                   code = tmp;
    7556              :                 }
    7557              :               else
    7558              :                 break;
    7559              : 
    7560        78379 :               return simplify_gen_relational (code, mode, cmp_mode,
    7561        78379 :                                               XEXP (op0, 0), XEXP (op0, 1));
    7562              :             }
    7563              : 
    7564     40500797 :           temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
    7565              :                                                 cmp_mode, XEXP (op0, 0),
    7566              :                                                 XEXP (op0, 1));
    7567              : 
    7568              :           /* See if any simplifications were possible.  */
    7569     40500797 :           if (temp)
    7570              :             {
    7571         6917 :               if (CONST_INT_P (temp))
    7572          872 :                 return temp == const0_rtx ? op2 : op1;
    7573         6090 :               else if (temp)
    7574         6090 :                 return gen_rtx_IF_THEN_ELSE (mode, temp, op1, op2);
    7575              :             }
    7576              :         }
    7577              :       break;
    7578              : 
    7579       782908 :     case VEC_MERGE:
    7580       782908 :       gcc_assert (GET_MODE (op0) == mode);
    7581       782908 :       gcc_assert (GET_MODE (op1) == mode);
    7582       782908 :       gcc_assert (VECTOR_MODE_P (mode));
    7583       782908 :       trueop2 = avoid_constant_pool_reference (op2);
    7584       782908 :       if (CONST_INT_P (trueop2)
    7585      1245700 :           && GET_MODE_NUNITS (mode).is_constant (&n_elts))
    7586              :         {
    7587       462792 :           unsigned HOST_WIDE_INT sel = UINTVAL (trueop2);
    7588       462792 :           unsigned HOST_WIDE_INT mask;
    7589       462792 :           if (n_elts == HOST_BITS_PER_WIDE_INT)
    7590              :             mask = -1;
    7591              :           else
    7592       460338 :             mask = (HOST_WIDE_INT_1U << n_elts) - 1;
    7593              : 
    7594       462792 :           if (!(sel & mask) && !side_effects_p (op0))
    7595              :             return op1;
    7596       462355 :           if ((sel & mask) == mask && !side_effects_p (op1))
    7597              :             return op0;
    7598              : 
    7599       451610 :           rtx trueop0 = avoid_constant_pool_reference (op0);
    7600       451610 :           rtx trueop1 = avoid_constant_pool_reference (op1);
    7601       451610 :           if (GET_CODE (trueop0) == CONST_VECTOR
    7602         9195 :               && GET_CODE (trueop1) == CONST_VECTOR)
    7603              :             {
    7604         4787 :               rtvec v = rtvec_alloc (n_elts);
    7605         4787 :               unsigned int i;
    7606              : 
    7607        54160 :               for (i = 0; i < n_elts; i++)
    7608        44586 :                 RTVEC_ELT (v, i) = ((sel & (HOST_WIDE_INT_1U << i))
    7609        44586 :                                     ? CONST_VECTOR_ELT (trueop0, i)
    7610        25019 :                                     : CONST_VECTOR_ELT (trueop1, i));
    7611         4787 :               return gen_rtx_CONST_VECTOR (mode, v);
    7612              :             }
    7613              : 
    7614       446823 :           if (swap_commutative_operands_p (op0, op1)
    7615              :               /* Two operands have same precedence, then first bit of mask
    7616              :                  select first operand.  */
    7617       446823 :               || (!swap_commutative_operands_p (op1, op0) && !(sel & 1)))
    7618        31117 :             return simplify_gen_ternary (code, mode, mode, op1, op0,
    7619        62234 :                                          GEN_INT (~sel & mask));
    7620              : 
    7621              :           /* Replace (vec_merge (vec_merge a b m) c n) with (vec_merge b c n)
    7622              :              if no element from a appears in the result.  */
    7623       415706 :           if (GET_CODE (op0) == VEC_MERGE)
    7624              :             {
    7625        17196 :               tem = avoid_constant_pool_reference (XEXP (op0, 2));
    7626        17196 :               if (CONST_INT_P (tem))
    7627              :                 {
    7628         1475 :                   unsigned HOST_WIDE_INT sel0 = UINTVAL (tem);
    7629         1475 :                   if (!(sel & sel0 & mask) && !side_effects_p (XEXP (op0, 0)))
    7630          104 :                     return simplify_gen_ternary (code, mode, mode,
    7631          104 :                                                  XEXP (op0, 1), op1, op2);
    7632         1371 :                   if (!(sel & ~sel0 & mask) && !side_effects_p (XEXP (op0, 1)))
    7633          834 :                     return simplify_gen_ternary (code, mode, mode,
    7634          834 :                                                  XEXP (op0, 0), op1, op2);
    7635              :                 }
    7636              :             }
    7637       414768 :           if (GET_CODE (op1) == VEC_MERGE)
    7638              :             {
    7639          588 :               tem = avoid_constant_pool_reference (XEXP (op1, 2));
    7640          588 :               if (CONST_INT_P (tem))
    7641              :                 {
    7642          557 :                   unsigned HOST_WIDE_INT sel1 = UINTVAL (tem);
    7643          557 :                   if (!(~sel & sel1 & mask) && !side_effects_p (XEXP (op1, 0)))
    7644          526 :                     return simplify_gen_ternary (code, mode, mode,
    7645          526 :                                                  op0, XEXP (op1, 1), op2);
    7646           31 :                   if (!(~sel & ~sel1 & mask) && !side_effects_p (XEXP (op1, 1)))
    7647            4 :                     return simplify_gen_ternary (code, mode, mode,
    7648            4 :                                                  op0, XEXP (op1, 0), op2);
    7649              :                 }
    7650              :             }
    7651              : 
    7652              :           /* Replace (vec_merge (vec_duplicate (vec_select a parallel (i))) a 1 << i)
    7653              :              with a.  */
    7654       414238 :           if (GET_CODE (op0) == VEC_DUPLICATE
    7655       133899 :               && GET_CODE (XEXP (op0, 0)) == VEC_SELECT
    7656          634 :               && GET_CODE (XEXP (XEXP (op0, 0), 1)) == PARALLEL
    7657       415506 :               && known_eq (GET_MODE_NUNITS (GET_MODE (XEXP (op0, 0))), 1))
    7658              :             {
    7659          566 :               tem = XVECEXP ((XEXP (XEXP (op0, 0), 1)), 0, 0);
    7660          566 :               if (CONST_INT_P (tem) && CONST_INT_P (op2))
    7661              :                 {
    7662          566 :                   if (XEXP (XEXP (op0, 0), 0) == op1
    7663            2 :                       && UINTVAL (op2) == HOST_WIDE_INT_1U << UINTVAL (tem))
    7664              :                     return op1;
    7665              :                 }
    7666              :             }
    7667              :           /* Replace (vec_merge (vec_duplicate (X)) (const_vector [A, B])
    7668              :              (const_int N))
    7669              :              with (vec_concat (X) (B)) if N == 1 or
    7670              :              (vec_concat (A) (X)) if N == 2.  */
    7671       414236 :           if (GET_CODE (op0) == VEC_DUPLICATE
    7672       133897 :               && GET_CODE (op1) == CONST_VECTOR
    7673       141992 :               && known_eq (CONST_VECTOR_NUNITS (op1), 2)
    7674         2344 :               && known_eq (GET_MODE_NUNITS (GET_MODE (op0)), 2)
    7675       415408 :               && IN_RANGE (sel, 1, 2))
    7676              :             {
    7677         1170 :               rtx newop0 = XEXP (op0, 0);
    7678         1170 :               rtx newop1 = CONST_VECTOR_ELT (op1, 2 - sel);
    7679         1170 :               if (sel == 2)
    7680          123 :                 std::swap (newop0, newop1);
    7681         1170 :               return simplify_gen_binary (VEC_CONCAT, mode, newop0, newop1);
    7682              :             }
    7683              :           /* Replace (vec_merge (vec_duplicate x) (vec_concat (y) (z)) (const_int N))
    7684              :              with (vec_concat x z) if N == 1, or (vec_concat y x) if N == 2.
    7685              :              Only applies for vectors of two elements.  */
    7686       413066 :           if (GET_CODE (op0) == VEC_DUPLICATE
    7687       132727 :               && GET_CODE (op1) == VEC_CONCAT
    7688            0 :               && known_eq (GET_MODE_NUNITS (GET_MODE (op0)), 2)
    7689            0 :               && known_eq (GET_MODE_NUNITS (GET_MODE (op1)), 2)
    7690       413066 :               && IN_RANGE (sel, 1, 2))
    7691              :             {
    7692            0 :               rtx newop0 = XEXP (op0, 0);
    7693            0 :               rtx newop1 = XEXP (op1, 2 - sel);
    7694            0 :               rtx otherop = XEXP (op1, sel - 1);
    7695            0 :               if (sel == 2)
    7696            0 :                 std::swap (newop0, newop1);
    7697              :               /* Don't want to throw away the other part of the vec_concat if
    7698              :                  it has side-effects.  */
    7699            0 :               if (!side_effects_p (otherop))
    7700            0 :                 return simplify_gen_binary (VEC_CONCAT, mode, newop0, newop1);
    7701              :             }
    7702              : 
    7703              :           /* Replace:
    7704              : 
    7705              :               (vec_merge:outer (vec_duplicate:outer x:inner)
    7706              :                                (subreg:outer y:inner 0)
    7707              :                                (const_int N))
    7708              : 
    7709              :              with (vec_concat:outer x:inner y:inner) if N == 1,
    7710              :              or (vec_concat:outer y:inner x:inner) if N == 2.
    7711              : 
    7712              :              Implicitly, this means we have a paradoxical subreg, but such
    7713              :              a check is cheap, so make it anyway.
    7714              : 
    7715              :              Only applies for vectors of two elements.  */
    7716       413066 :           if (GET_CODE (op0) == VEC_DUPLICATE
    7717       132727 :               && GET_CODE (op1) == SUBREG
    7718        40976 :               && GET_MODE (op1) == GET_MODE (op0)
    7719        40976 :               && GET_MODE (SUBREG_REG (op1)) == GET_MODE (XEXP (op0, 0))
    7720            0 :               && paradoxical_subreg_p (op1)
    7721            0 :               && subreg_lowpart_p (op1)
    7722            0 :               && known_eq (GET_MODE_NUNITS (GET_MODE (op0)), 2)
    7723            0 :               && known_eq (GET_MODE_NUNITS (GET_MODE (op1)), 2)
    7724       413066 :               && IN_RANGE (sel, 1, 2))
    7725              :             {
    7726            0 :               rtx newop0 = XEXP (op0, 0);
    7727            0 :               rtx newop1 = SUBREG_REG (op1);
    7728            0 :               if (sel == 2)
    7729            0 :                 std::swap (newop0, newop1);
    7730            0 :               return simplify_gen_binary (VEC_CONCAT, mode, newop0, newop1);
    7731              :             }
    7732              : 
    7733              :           /* Same as above but with switched operands:
    7734              :                 Replace (vec_merge:outer (subreg:outer x:inner 0)
    7735              :                                          (vec_duplicate:outer y:inner)
    7736              :                                (const_int N))
    7737              : 
    7738              :              with (vec_concat:outer x:inner y:inner) if N == 1,
    7739              :              or (vec_concat:outer y:inner x:inner) if N == 2.  */
    7740       413066 :           if (GET_CODE (op1) == VEC_DUPLICATE
    7741        27357 :               && GET_CODE (op0) == SUBREG
    7742        24349 :               && GET_MODE (op0) == GET_MODE (op1)
    7743        24349 :               && GET_MODE (SUBREG_REG (op0)) == GET_MODE (XEXP (op1, 0))
    7744            0 :               && paradoxical_subreg_p (op0)
    7745            0 :               && subreg_lowpart_p (op0)
    7746            0 :               && known_eq (GET_MODE_NUNITS (GET_MODE (op1)), 2)
    7747            0 :               && known_eq (GET_MODE_NUNITS (GET_MODE (op0)), 2)
    7748       413066 :               && IN_RANGE (sel, 1, 2))
    7749              :             {
    7750            0 :               rtx newop0 = SUBREG_REG (op0);
    7751            0 :               rtx newop1 = XEXP (op1, 0);
    7752            0 :               if (sel == 2)
    7753            0 :                 std::swap (newop0, newop1);
    7754            0 :               return simplify_gen_binary (VEC_CONCAT, mode, newop0, newop1);
    7755              :             }
    7756              : 
    7757              :           /* Replace (vec_merge (vec_duplicate x) (vec_duplicate y)
    7758              :                                  (const_int n))
    7759              :              with (vec_concat x y) or (vec_concat y x) depending on value
    7760              :              of N.  */
    7761       413066 :           if (GET_CODE (op0) == VEC_DUPLICATE
    7762       132727 :               && GET_CODE (op1) == VEC_DUPLICATE
    7763          198 :               && known_eq (GET_MODE_NUNITS (GET_MODE (op0)), 2)
    7764            0 :               && known_eq (GET_MODE_NUNITS (GET_MODE (op1)), 2)
    7765       413066 :               && IN_RANGE (sel, 1, 2))
    7766              :             {
    7767            0 :               rtx newop0 = XEXP (op0, 0);
    7768            0 :               rtx newop1 = XEXP (op1, 0);
    7769            0 :               if (sel == 2)
    7770            0 :                 std::swap (newop0, newop1);
    7771              : 
    7772            0 :               return simplify_gen_binary (VEC_CONCAT, mode, newop0, newop1);
    7773              :             }
    7774              :         }
    7775              : 
    7776       733182 :       if (rtx_equal_p (op0, op1)
    7777       733182 :           && !side_effects_p (op2) && !side_effects_p (op1))
    7778              :         return op0;
    7779              : 
    7780       732889 :       if (!side_effects_p (op2))
    7781              :         {
    7782       729211 :           rtx top0
    7783       729211 :             = may_trap_p (op0) ? NULL_RTX : simplify_merge_mask (op0, op2, 0);
    7784       729211 :           rtx top1
    7785       729211 :             = may_trap_p (op1) ? NULL_RTX : simplify_merge_mask (op1, op2, 1);
    7786       729211 :           if (top0 || top1)
    7787          984 :             return simplify_gen_ternary (code, mode, mode,
    7788              :                                          top0 ? top0 : op0,
    7789          812 :                                          top1 ? top1 : op1, op2);
    7790              :         }
    7791              : 
    7792              :       break;
    7793              : 
    7794            0 :     default:
    7795            0 :       gcc_unreachable ();
    7796              :     }
    7797              : 
    7798              :   return 0;
    7799              : }
    7800              : 
    7801              : /* Try to calculate NUM_BYTES bytes of the target memory image of X,
    7802              :    starting at byte FIRST_BYTE.  Return true on success and add the
    7803              :    bytes to BYTES, such that each byte has BITS_PER_UNIT bits and such
    7804              :    that the bytes follow target memory order.  Leave BYTES unmodified
    7805              :    on failure.
    7806              : 
    7807              :    MODE is the mode of X.  The caller must reserve NUM_BYTES bytes in
    7808              :    BYTES before calling this function.  */
    7809              : 
    7810              : bool
    7811     13186962 : native_encode_rtx (machine_mode mode, rtx x, vec<target_unit> &bytes,
    7812              :                    unsigned int first_byte, unsigned int num_bytes)
    7813              : {
    7814              :   /* Check the mode is sensible.  */
    7815     13186962 :   gcc_assert (GET_MODE (x) == VOIDmode
    7816              :               ? is_a <scalar_int_mode> (mode)
    7817              :               : mode == GET_MODE (x));
    7818              : 
    7819     13186962 :   if (GET_CODE (x) == CONST_VECTOR)
    7820              :     {
    7821              :       /* CONST_VECTOR_ELT follows target memory order, so no shuffling
    7822              :          is necessary.  The only complication is that MODE_VECTOR_BOOL
    7823              :          vectors can have several elements per byte.  */
    7824       973318 :       unsigned int elt_bits = vector_element_size (GET_MODE_PRECISION (mode),
    7825              :                                                    GET_MODE_NUNITS (mode));
    7826       486659 :       unsigned int elt = first_byte * BITS_PER_UNIT / elt_bits;
    7827       486659 :       if (elt_bits < BITS_PER_UNIT)
    7828              :         {
    7829              :           /* This is the only case in which elements can be smaller than
    7830              :              a byte.  */
    7831            0 :           gcc_assert (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL);
    7832            0 :           auto mask = GET_MODE_MASK (GET_MODE_INNER (mode));
    7833            0 :           for (unsigned int i = 0; i < num_bytes; ++i)
    7834              :             {
    7835            0 :               target_unit value = 0;
    7836            0 :               for (unsigned int j = 0; j < BITS_PER_UNIT; j += elt_bits)
    7837              :                 {
    7838            0 :                   if (INTVAL (CONST_VECTOR_ELT (x, elt)))
    7839            0 :                     value |= mask << j;
    7840            0 :                   elt += 1;
    7841              :                 }
    7842            0 :               bytes.quick_push (value);
    7843              :             }
    7844              :           return true;
    7845              :         }
    7846              : 
    7847       486659 :       unsigned int start = bytes.length ();
    7848       486659 :       unsigned int elt_bytes = GET_MODE_UNIT_SIZE (mode);
    7849              :       /* Make FIRST_BYTE relative to ELT.  */
    7850       486659 :       first_byte %= elt_bytes;
    7851      2553002 :       while (num_bytes > 0)
    7852              :         {
    7853              :           /* Work out how many bytes we want from element ELT.  */
    7854      2066343 :           unsigned int chunk_bytes = MIN (num_bytes, elt_bytes - first_byte);
    7855      4132686 :           if (!native_encode_rtx (GET_MODE_INNER (mode),
    7856              :                                   CONST_VECTOR_ELT (x, elt), bytes,
    7857              :                                   first_byte, chunk_bytes))
    7858              :             {
    7859            0 :               bytes.truncate (start);
    7860            0 :               return false;
    7861              :             }
    7862      2066343 :           elt += 1;
    7863      2066343 :           first_byte = 0;
    7864      2066343 :           num_bytes -= chunk_bytes;
    7865              :         }
    7866              :       return true;
    7867              :     }
    7868              : 
    7869              :   /* All subsequent cases are limited to scalars.  */
    7870     12700303 :   scalar_mode smode;
    7871     12731236 :   if (!is_a <scalar_mode> (mode, &smode))
    7872              :     return false;
    7873              : 
    7874              :   /* Make sure that the region is in range.  */
    7875     12700303 :   unsigned int end_byte = first_byte + num_bytes;
    7876     12700303 :   unsigned int mode_bytes = GET_MODE_SIZE (smode);
    7877     12700303 :   gcc_assert (end_byte <= mode_bytes);
    7878              : 
    7879     12700303 :   if (CONST_SCALAR_INT_P (x))
    7880              :     {
    7881              :       /* The target memory layout is affected by both BYTES_BIG_ENDIAN
    7882              :          and WORDS_BIG_ENDIAN.  Use the subreg machinery to get the lsb
    7883              :          position of each byte.  */
    7884     12033162 :       rtx_mode_t value (x, smode);
    7885     12033162 :       wide_int_ref value_wi (value);
    7886     51409497 :       for (unsigned int byte = first_byte; byte < end_byte; ++byte)
    7887              :         {
    7888              :           /* Always constant because the inputs are.  */
    7889     39376335 :           unsigned int lsb
    7890     39376335 :             = subreg_size_lsb (1, mode_bytes, byte).to_constant ();
    7891              :           /* Operate directly on the encoding rather than using
    7892              :              wi::extract_uhwi, so that we preserve the sign or zero
    7893              :              extension for modes that are not a whole number of bits in
    7894              :              size.  (Zero extension is only used for the combination of
    7895              :              innermode == BImode && STORE_FLAG_VALUE == 1).  */
    7896     39376335 :           unsigned int elt = lsb / HOST_BITS_PER_WIDE_INT;
    7897     39376335 :           unsigned int shift = lsb % HOST_BITS_PER_WIDE_INT;
    7898     39376335 :           unsigned HOST_WIDE_INT uhwi = value_wi.elt (elt);
    7899     39376335 :           bytes.quick_push (uhwi >> shift);
    7900              :         }
    7901     12033162 :       return true;
    7902              :     }
    7903              : 
    7904       667141 :   if (CONST_DOUBLE_P (x))
    7905              :     {
    7906              :       /* real_to_target produces an array of integers in target memory order.
    7907              :          All integers before the last one have 32 bits; the last one may
    7908              :          have 32 bits or fewer, depending on whether the mode bitsize
    7909              :          is divisible by 32.  Each of these integers is then laid out
    7910              :          in target memory as any other integer would be.  */
    7911       636208 :       long el32[MAX_BITSIZE_MODE_ANY_MODE / 32];
    7912       636208 :       real_to_target (el32, CONST_DOUBLE_REAL_VALUE (x), smode);
    7913              : 
    7914              :       /* The (maximum) number of target bytes per element of el32.  */
    7915       636208 :       unsigned int bytes_per_el32 = 32 / BITS_PER_UNIT;
    7916       636208 :       gcc_assert (bytes_per_el32 != 0);
    7917              : 
    7918              :       /* Build up the integers in a similar way to the CONST_SCALAR_INT_P
    7919              :          handling above.  */
    7920      4356723 :       for (unsigned int byte = first_byte; byte < end_byte; ++byte)
    7921              :         {
    7922      3720515 :           unsigned int index = byte / bytes_per_el32;
    7923      3720515 :           unsigned int subbyte = byte % bytes_per_el32;
    7924      3720515 :           unsigned int int_bytes = MIN (bytes_per_el32,
    7925              :                                         mode_bytes - index * bytes_per_el32);
    7926              :           /* Always constant because the inputs are.  */
    7927      3720515 :           unsigned int lsb
    7928      3720515 :             = subreg_size_lsb (1, int_bytes, subbyte).to_constant ();
    7929      3720515 :           bytes.quick_push ((unsigned long) el32[index] >> lsb);
    7930              :         }
    7931       636208 :       return true;
    7932              :     }
    7933              : 
    7934        30933 :   if (GET_CODE (x) == CONST_FIXED)
    7935              :     {
    7936            0 :       for (unsigned int byte = first_byte; byte < end_byte; ++byte)
    7937              :         {
    7938              :           /* Always constant because the inputs are.  */
    7939            0 :           unsigned int lsb
    7940            0 :             = subreg_size_lsb (1, mode_bytes, byte).to_constant ();
    7941            0 :           unsigned HOST_WIDE_INT piece = CONST_FIXED_VALUE_LOW (x);
    7942            0 :           if (lsb >= HOST_BITS_PER_WIDE_INT)
    7943              :             {
    7944            0 :               lsb -= HOST_BITS_PER_WIDE_INT;
    7945            0 :               piece = CONST_FIXED_VALUE_HIGH (x);
    7946              :             }
    7947            0 :           bytes.quick_push (piece >> lsb);
    7948              :         }
    7949              :       return true;
    7950              :     }
    7951              : 
    7952              :   return false;
    7953              : }
    7954              : 
    7955              : /* Read a vector of mode MODE from the target memory image given by BYTES,
    7956              :    starting at byte FIRST_BYTE.  The vector is known to be encodable using
    7957              :    NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each,
    7958              :    and BYTES is known to have enough bytes to supply NPATTERNS *
    7959              :    NELTS_PER_PATTERN vector elements.  Each element of BYTES contains
    7960              :    BITS_PER_UNIT bits and the bytes are in target memory order.
    7961              : 
    7962              :    Return the vector on success, otherwise return NULL_RTX.  */
    7963              : 
    7964              : rtx
    7965       224848 : native_decode_vector_rtx (machine_mode mode, const vec<target_unit> &bytes,
    7966              :                           unsigned int first_byte, unsigned int npatterns,
    7967              :                           unsigned int nelts_per_pattern)
    7968              : {
    7969       224848 :   rtx_vector_builder builder (mode, npatterns, nelts_per_pattern);
    7970              : 
    7971       449696 :   unsigned int elt_bits = vector_element_size (GET_MODE_PRECISION (mode),
    7972              :                                                GET_MODE_NUNITS (mode));
    7973       224848 :   if (elt_bits < BITS_PER_UNIT)
    7974              :     {
    7975              :       /* This is the only case in which elements can be smaller than a byte.
    7976              :          Element 0 is always in the lsb of the containing byte.  */
    7977            0 :       gcc_assert (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL);
    7978            0 :       for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
    7979              :         {
    7980            0 :           unsigned int bit_index = first_byte * BITS_PER_UNIT + i * elt_bits;
    7981            0 :           unsigned int byte_index = bit_index / BITS_PER_UNIT;
    7982            0 :           unsigned int lsb = bit_index % BITS_PER_UNIT;
    7983            0 :           unsigned int value = bytes[byte_index] >> lsb;
    7984            0 :           builder.quick_push (gen_int_mode (value, GET_MODE_INNER (mode)));
    7985              :         }
    7986              :     }
    7987              :   else
    7988              :     {
    7989       900233 :       for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
    7990              :         {
    7991      1350770 :           rtx x = native_decode_rtx (GET_MODE_INNER (mode), bytes, first_byte);
    7992       675385 :           if (!x)
    7993            0 :             return NULL_RTX;
    7994       675385 :           builder.quick_push (x);
    7995       675385 :           first_byte += elt_bits / BITS_PER_UNIT;
    7996              :         }
    7997              :     }
    7998       224848 :   return builder.build ();
    7999       224848 : }
    8000              : 
    8001              : /* Extract a PRECISION-bit integer from bytes [FIRST_BYTE, FIRST_BYTE + SIZE)
    8002              :    of target memory image BYTES.  */
    8003              : 
    8004              : wide_int
    8005     11093587 : native_decode_int (const vec<target_unit> &bytes, unsigned int first_byte,
    8006              :                    unsigned int size, unsigned int precision)
    8007              : {
    8008              :   /* Pull the bytes msb first, so that we can use simple
    8009              :      shift-and-insert wide_int operations.  */
    8010     11093587 :   wide_int result (wi::zero (precision));
    8011     50790982 :   for (unsigned int i = 0; i < size; ++i)
    8012              :     {
    8013     39697395 :       unsigned int lsb = (size - i - 1) * BITS_PER_UNIT;
    8014              :       /* Always constant because the inputs are.  */
    8015     39697395 :       unsigned int subbyte
    8016     39697395 :         = subreg_size_offset_from_lsb (1, size, lsb).to_constant ();
    8017     39697395 :       result <<= BITS_PER_UNIT;
    8018     39697395 :       result |= bytes[first_byte + subbyte];
    8019              :     }
    8020     11093587 :   return result;
    8021              : }
    8022              : 
    8023              : /* Read an rtx of mode MODE from the target memory image given by BYTES,
    8024              :    starting at byte FIRST_BYTE.  Each element of BYTES contains BITS_PER_UNIT
    8025              :    bits and the bytes are in target memory order.  The image has enough
    8026              :    values to specify all bytes of MODE.
    8027              : 
    8028              :    Return the rtx on success, otherwise return NULL_RTX.  */
    8029              : 
    8030              : rtx
    8031     11391518 : native_decode_rtx (machine_mode mode, const vec<target_unit> &bytes,
    8032              :                    unsigned int first_byte)
    8033              : {
    8034     11391518 :   if (VECTOR_MODE_P (mode))
    8035              :     {
    8036              :       /* If we know at compile time how many elements there are,
    8037              :          pull each element directly from BYTES.  */
    8038        59207 :       unsigned int nelts;
    8039       118414 :       if (GET_MODE_NUNITS (mode).is_constant (&nelts))
    8040        59207 :         return native_decode_vector_rtx (mode, bytes, first_byte, nelts, 1);
    8041              :       return NULL_RTX;
    8042              :     }
    8043              : 
    8044     11332311 :   scalar_int_mode imode;
    8045     11332311 :   if (is_a <scalar_int_mode> (mode, &imode)
    8046     11093587 :       && GET_MODE_PRECISION (imode) <= MAX_BITSIZE_MODE_ANY_INT)
    8047              :     {
    8048     11093587 :       auto result = native_decode_int (bytes, first_byte,
    8049     11093587 :                                        GET_MODE_SIZE (imode),
    8050     22187174 :                                        GET_MODE_PRECISION (imode));
    8051     11093587 :       return immed_wide_int_const (result, imode);
    8052     11093587 :     }
    8053              : 
    8054       238724 :   scalar_float_mode fmode;
    8055       238724 :   if (is_a <scalar_float_mode> (mode, &fmode))
    8056              :     {
    8057              :       /* We need to build an array of integers in target memory order.
    8058              :          All integers before the last one have 32 bits; the last one may
    8059              :          have 32 bits or fewer, depending on whether the mode bitsize
    8060              :          is divisible by 32.  */
    8061       238694 :       long el32[MAX_BITSIZE_MODE_ANY_MODE / 32];
    8062       238694 :       unsigned int num_el32 = CEIL (GET_MODE_BITSIZE (fmode), 32);
    8063       238694 :       memset (el32, 0, num_el32 * sizeof (long));
    8064              : 
    8065              :       /* The (maximum) number of target bytes per element of el32.  */
    8066       238694 :       unsigned int bytes_per_el32 = 32 / BITS_PER_UNIT;
    8067       238694 :       gcc_assert (bytes_per_el32 != 0);
    8068              : 
    8069       238694 :       unsigned int mode_bytes = GET_MODE_SIZE (fmode);
    8070      1661458 :       for (unsigned int byte = 0; byte < mode_bytes; ++byte)
    8071              :         {
    8072      1422764 :           unsigned int index = byte / bytes_per_el32;
    8073      1422764 :           unsigned int subbyte = byte % bytes_per_el32;
    8074      1422764 :           unsigned int int_bytes = MIN (bytes_per_el32,
    8075              :                                         mode_bytes - index * bytes_per_el32);
    8076              :           /* Always constant because the inputs are.  */
    8077      1422764 :           unsigned int lsb
    8078      1422764 :             = subreg_size_lsb (1, int_bytes, subbyte).to_constant ();
    8079      1422764 :           el32[index] |= (unsigned long) bytes[first_byte + byte] << lsb;
    8080              :         }
    8081       238694 :       REAL_VALUE_TYPE r;
    8082       238694 :       real_from_target (&r, el32, fmode);
    8083       238694 :       return const_double_from_real_value (r, fmode);
    8084              :     }
    8085              : 
    8086           30 :   if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
    8087              :     {
    8088            0 :       scalar_mode smode = as_a <scalar_mode> (mode);
    8089            0 :       FIXED_VALUE_TYPE f;
    8090            0 :       f.data.low = 0;
    8091            0 :       f.data.high = 0;
    8092            0 :       f.mode = smode;
    8093              : 
    8094            0 :       unsigned int mode_bytes = GET_MODE_SIZE (smode);
    8095            0 :       for (unsigned int byte = 0; byte < mode_bytes; ++byte)
    8096              :         {
    8097              :           /* Always constant because the inputs are.  */
    8098            0 :           unsigned int lsb
    8099            0 :             = subreg_size_lsb (1, mode_bytes, byte).to_constant ();
    8100            0 :           unsigned HOST_WIDE_INT unit = bytes[first_byte + byte];
    8101            0 :           if (lsb >= HOST_BITS_PER_WIDE_INT)
    8102            0 :             f.data.high |= unit << (lsb - HOST_BITS_PER_WIDE_INT);
    8103              :           else
    8104            0 :             f.data.low |= unit << lsb;
    8105              :         }
    8106            0 :       return CONST_FIXED_FROM_FIXED_VALUE (f, mode);
    8107              :     }
    8108              : 
    8109              :   return NULL_RTX;
    8110              : }
    8111              : 
    8112              : /* Simplify a byte offset BYTE into CONST_VECTOR X.  The main purpose
    8113              :    is to convert a runtime BYTE value into a constant one.  */
    8114              : 
    8115              : static poly_uint64
    8116       275150 : simplify_const_vector_byte_offset (rtx x, poly_uint64 byte)
    8117              : {
    8118              :   /* Cope with MODE_VECTOR_BOOL by operating on bits rather than bytes.  */
    8119       275150 :   machine_mode mode = GET_MODE (x);
    8120       550300 :   unsigned int elt_bits = vector_element_size (GET_MODE_PRECISION (mode),
    8121              :                                                GET_MODE_NUNITS (mode));
    8122              :   /* The number of bits needed to encode one element from each pattern.  */
    8123       275150 :   unsigned int sequence_bits = CONST_VECTOR_NPATTERNS (x) * elt_bits;
    8124              : 
    8125              :   /* Identify the start point in terms of a sequence number and a byte offset
    8126              :      within that sequence.  */
    8127       275150 :   poly_uint64 first_sequence;
    8128       275150 :   unsigned HOST_WIDE_INT subbit;
    8129       275150 :   if (can_div_trunc_p (byte * BITS_PER_UNIT, sequence_bits,
    8130              :                        &first_sequence, &subbit))
    8131              :     {
    8132       275150 :       unsigned int nelts_per_pattern = CONST_VECTOR_NELTS_PER_PATTERN (x);
    8133       275150 :       if (nelts_per_pattern == 1)
    8134              :         /* This is a duplicated vector, so the value of FIRST_SEQUENCE
    8135              :            doesn't matter.  */
    8136       224726 :         byte = subbit / BITS_PER_UNIT;
    8137        50424 :       else if (nelts_per_pattern == 2 && known_gt (first_sequence, 0U))
    8138              :         {
    8139              :           /* The subreg drops the first element from each pattern and
    8140              :              only uses the second element.  Find the first sequence
    8141              :              that starts on a byte boundary.  */
    8142         5568 :           subbit += least_common_multiple (sequence_bits, BITS_PER_UNIT);
    8143         5568 :           byte = subbit / BITS_PER_UNIT;
    8144              :         }
    8145              :     }
    8146       275150 :   return byte;
    8147              : }
    8148              : 
    8149              : /* Subroutine of simplify_subreg in which:
    8150              : 
    8151              :    - X is known to be a CONST_VECTOR
    8152              :    - OUTERMODE is known to be a vector mode
    8153              : 
    8154              :    Try to handle the subreg by operating on the CONST_VECTOR encoding
    8155              :    rather than on each individual element of the CONST_VECTOR.
    8156              : 
    8157              :    Return the simplified subreg on success, otherwise return NULL_RTX.  */
    8158              : 
    8159              : static rtx
    8160       173381 : simplify_const_vector_subreg (machine_mode outermode, rtx x,
    8161              :                               machine_mode innermode, unsigned int first_byte)
    8162              : {
    8163              :   /* Paradoxical subregs of vectors have dubious semantics.  */
    8164       173381 :   if (paradoxical_subreg_p (outermode, innermode))
    8165              :     return NULL_RTX;
    8166              : 
    8167              :   /* We can only preserve the semantics of a stepped pattern if the new
    8168              :      vector element is the same as the original one.  */
    8169       173315 :   if (CONST_VECTOR_STEPPED_P (x)
    8170       193887 :       && GET_MODE_INNER (outermode) != GET_MODE_INNER (innermode))
    8171              :     return NULL_RTX;
    8172              : 
    8173              :   /* Cope with MODE_VECTOR_BOOL by operating on bits rather than bytes.  */
    8174       165641 :   unsigned int x_elt_bits
    8175       165641 :     = vector_element_size (GET_MODE_PRECISION (innermode),
    8176              :                            GET_MODE_NUNITS (innermode));
    8177       165641 :   unsigned int out_elt_bits
    8178       165641 :     = vector_element_size (GET_MODE_PRECISION (outermode),
    8179              :                            GET_MODE_NUNITS (outermode));
    8180              : 
    8181              :   /* The number of bits needed to encode one element from every pattern
    8182              :      of the original vector.  */
    8183       165641 :   unsigned int x_sequence_bits = CONST_VECTOR_NPATTERNS (x) * x_elt_bits;
    8184              : 
    8185              :   /* The number of bits needed to encode one element from every pattern
    8186              :      of the result.  */
    8187       165641 :   unsigned int out_sequence_bits
    8188       165641 :     = least_common_multiple (x_sequence_bits, out_elt_bits);
    8189              : 
    8190              :   /* Work out the number of interleaved patterns in the output vector
    8191              :      and the number of encoded elements per pattern.  */
    8192       165641 :   unsigned int out_npatterns = out_sequence_bits / out_elt_bits;
    8193       165641 :   unsigned int nelts_per_pattern = CONST_VECTOR_NELTS_PER_PATTERN (x);
    8194              : 
    8195              :   /* The encoding scheme requires the number of elements to be a multiple
    8196              :      of the number of patterns, so that each pattern appears at least once
    8197              :      and so that the same number of elements appear from each pattern.  */
    8198       331282 :   bool ok_p = multiple_p (GET_MODE_NUNITS (outermode), out_npatterns);
    8199       165641 :   unsigned int const_nunits;
    8200       331282 :   if (GET_MODE_NUNITS (outermode).is_constant (&const_nunits)
    8201       165641 :       && (!ok_p || out_npatterns * nelts_per_pattern > const_nunits))
    8202              :     {
    8203              :       /* Either the encoding is invalid, or applying it would give us
    8204              :          more elements than we need.  Just encode each element directly.  */
    8205              :       out_npatterns = const_nunits;
    8206              :       nelts_per_pattern = 1;
    8207              :     }
    8208              :   else if (!ok_p)
    8209              :     return NULL_RTX;
    8210              : 
    8211              :   /* Get enough bytes of X to form the new encoding.  */
    8212       165641 :   unsigned int buffer_bits = out_npatterns * nelts_per_pattern * out_elt_bits;
    8213       165641 :   unsigned int buffer_bytes = CEIL (buffer_bits, BITS_PER_UNIT);
    8214       165641 :   auto_vec<target_unit, 128> buffer (buffer_bytes);
    8215       165641 :   if (!native_encode_rtx (innermode, x, buffer, first_byte, buffer_bytes))
    8216              :     return NULL_RTX;
    8217              : 
    8218              :   /* Reencode the bytes as OUTERMODE.  */
    8219       165641 :   return native_decode_vector_rtx (outermode, buffer, 0, out_npatterns,
    8220       165641 :                                    nelts_per_pattern);
    8221       165641 : }
    8222              : 
    8223              : /* Try to simplify a subreg of a constant by encoding the subreg region
    8224              :    as a sequence of target bytes and reading them back in the new mode.
    8225              :    Return the new value on success, otherwise return null.
    8226              : 
    8227              :    The subreg has outer mode OUTERMODE, inner mode INNERMODE, inner value X
    8228              :    and byte offset FIRST_BYTE.  */
    8229              : 
    8230              : static rtx
    8231     10426649 : simplify_immed_subreg (fixed_size_mode outermode, rtx x,
    8232              :                        machine_mode innermode, unsigned int first_byte)
    8233              : {
    8234     10426649 :   unsigned int buffer_bytes = GET_MODE_SIZE (outermode);
    8235     10426649 :   auto_vec<target_unit, 128> buffer (buffer_bytes);
    8236              : 
    8237              :   /* Some ports misuse CCmode.  */
    8238     10426649 :   if (GET_MODE_CLASS (outermode) == MODE_CC && CONST_INT_P (x))
    8239              :     return x;
    8240              : 
    8241              :   /* Paradoxical subregs read undefined values for bytes outside of the
    8242              :      inner value.  However, we have traditionally always sign-extended
    8243              :      integer constants and zero-extended others.  */
    8244     10424739 :   unsigned int inner_bytes = buffer_bytes;
    8245     10424739 :   if (paradoxical_subreg_p (outermode, innermode))
    8246              :     {
    8247       751654 :       if (!GET_MODE_SIZE (innermode).is_constant (&inner_bytes))
    8248            0 :         return NULL_RTX;
    8249              : 
    8250       375827 :       target_unit filler = 0;
    8251       375827 :       if (CONST_SCALAR_INT_P (x) && wi::neg_p (rtx_mode_t (x, innermode)))
    8252        41494 :         filler = -1;
    8253              : 
    8254              :       /* Add any leading bytes due to big-endian layout.  The number of
    8255              :          bytes must be constant because both modes have constant size.  */
    8256       375827 :       unsigned int leading_bytes
    8257       375827 :         = -byte_lowpart_offset (outermode, innermode).to_constant ();
    8258       375827 :       for (unsigned int i = 0; i < leading_bytes; ++i)
    8259            0 :         buffer.quick_push (filler);
    8260              : 
    8261       375827 :       if (!native_encode_rtx (innermode, x, buffer, first_byte, inner_bytes))
    8262            0 :         return NULL_RTX;
    8263              : 
    8264              :       /* Add any trailing bytes due to little-endian layout.  */
    8265      4559996 :       while (buffer.length () < buffer_bytes)
    8266      1904171 :         buffer.quick_push (filler);
    8267              :     }
    8268     10048912 :   else if (!native_encode_rtx (innermode, x, buffer, first_byte, inner_bytes))
    8269              :     return NULL_RTX;
    8270     10424739 :   rtx ret = native_decode_rtx (outermode, buffer, 0);
    8271     10424739 :   if (ret && FLOAT_MODE_P (outermode))
    8272              :     {
    8273       124886 :       auto_vec<target_unit, 128> buffer2 (buffer_bytes);
    8274       124886 :       if (!native_encode_rtx (outermode, ret, buffer2, 0, buffer_bytes))
    8275              :         return NULL_RTX;
    8276      1385193 :       for (unsigned int i = 0; i < buffer_bytes; ++i)
    8277      1260342 :         if (buffer[i] != buffer2[i])
    8278              :           return NULL_RTX;
    8279       124886 :     }
    8280              :   return ret;
    8281     10426649 : }
    8282              : 
    8283              : /* Simplify SUBREG:OUTERMODE(OP:INNERMODE, BYTE)
    8284              :    Return 0 if no simplifications are possible.  */
    8285              : rtx
    8286     67747280 : simplify_context::simplify_subreg (machine_mode outermode, rtx op,
    8287              :                                    machine_mode innermode, poly_uint64 byte)
    8288              : {
    8289              :   /* Little bit of sanity checking.  */
    8290     67747280 :   gcc_assert (innermode != VOIDmode);
    8291     67747280 :   gcc_assert (outermode != VOIDmode);
    8292     67747280 :   gcc_assert (innermode != BLKmode);
    8293     67747280 :   gcc_assert (outermode != BLKmode);
    8294              : 
    8295     67747280 :   gcc_assert (GET_MODE (op) == innermode
    8296              :               || GET_MODE (op) == VOIDmode);
    8297              : 
    8298    135494560 :   poly_uint64 outersize = GET_MODE_SIZE (outermode);
    8299     67747280 :   if (!multiple_p (byte, outersize))
    8300              :     return NULL_RTX;
    8301              : 
    8302    135494520 :   poly_uint64 innersize = GET_MODE_SIZE (innermode);
    8303     67747260 :   if (maybe_ge (byte, innersize))
    8304              :     return NULL_RTX;
    8305              : 
    8306     67747260 :   if (outermode == innermode && known_eq (byte, 0U))
    8307      4597388 :     return op;
    8308              : 
    8309     63149872 :   if (GET_CODE (op) == CONST_VECTOR)
    8310       275150 :     byte = simplify_const_vector_byte_offset (op, byte);
    8311              : 
    8312    126299744 :   if (multiple_p (byte, GET_MODE_UNIT_SIZE (innermode)))
    8313              :     {
    8314     57354126 :       rtx elt;
    8315              : 
    8316     48873299 :       if (VECTOR_MODE_P (outermode)
    8317     25442481 :           && GET_MODE_INNER (outermode) == GET_MODE_INNER (innermode)
    8318     59056282 :           && vec_duplicate_p (op, &elt))
    8319        12185 :         return gen_vec_duplicate (outermode, elt);
    8320              : 
    8321     57350238 :       if (outermode == GET_MODE_INNER (innermode)
    8322     57350238 :           && vec_duplicate_p (op, &elt))
    8323         8297 :         return elt;
    8324              :     }
    8325              : 
    8326     63137687 :   if (CONST_SCALAR_INT_P (op)
    8327     52870562 :       || CONST_DOUBLE_AS_FLOAT_P (op)
    8328     52813625 :       || CONST_FIXED_P (op)
    8329     52813625 :       || GET_CODE (op) == CONST_VECTOR)
    8330              :     {
    8331     10592290 :       unsigned HOST_WIDE_INT cbyte;
    8332     10592290 :       if (byte.is_constant (&cbyte))
    8333              :         {
    8334     10592290 :           if (GET_CODE (op) == CONST_VECTOR && VECTOR_MODE_P (outermode))
    8335              :             {
    8336       173381 :               rtx tmp = simplify_const_vector_subreg (outermode, op,
    8337              :                                                       innermode, cbyte);
    8338       173381 :               if (tmp)
    8339     10592290 :                 return tmp;
    8340              :             }
    8341              : 
    8342     10426649 :           fixed_size_mode fs_outermode;
    8343     10426649 :           if (is_a <fixed_size_mode> (outermode, &fs_outermode))
    8344     10426649 :             return simplify_immed_subreg (fs_outermode, op, innermode, cbyte);
    8345              :         }
    8346              :     }
    8347              : 
    8348              :   /* Changing mode twice with SUBREG => just change it once,
    8349              :      or not at all if changing back op starting mode.  */
    8350     52545397 :   if (GET_CODE (op) == SUBREG)
    8351              :     {
    8352      1300877 :       machine_mode innermostmode = GET_MODE (SUBREG_REG (op));
    8353      2601754 :       poly_uint64 innermostsize = GET_MODE_SIZE (innermostmode);
    8354      1300877 :       rtx newx;
    8355              : 
    8356              :       /* Make sure that the relationship between the two subregs is
    8357              :          known at compile time.  */
    8358      1300877 :       if (!ordered_p (outersize, innermostsize))
    8359              :         return NULL_RTX;
    8360              : 
    8361      1300877 :       if (outermode == innermostmode
    8362       732711 :           && known_eq (byte, subreg_lowpart_offset (outermode, innermode))
    8363      2033587 :           && known_eq (SUBREG_BYTE (op),
    8364              :                        subreg_lowpart_offset (innermode, innermostmode)))
    8365       732710 :         return SUBREG_REG (op);
    8366              : 
    8367              :       /* Work out the memory offset of the final OUTERMODE value relative
    8368              :          to the inner value of OP.  */
    8369       568167 :       poly_int64 mem_offset = subreg_memory_offset (outermode,
    8370              :                                                     innermode, byte);
    8371       568167 :       poly_int64 op_mem_offset = subreg_memory_offset (op);
    8372       568167 :       poly_int64 final_offset = mem_offset + op_mem_offset;
    8373              : 
    8374              :       /* See whether resulting subreg will be paradoxical.  */
    8375       568167 :       if (!paradoxical_subreg_p (outermode, innermostmode))
    8376              :         {
    8377              :           /* Bail out in case resulting subreg would be incorrect.  */
    8378       934950 :           if (maybe_lt (final_offset, 0)
    8379       934941 :               || maybe_ge (poly_uint64 (final_offset), innermostsize)
    8380       934949 :               || !multiple_p (final_offset, outersize))
    8381            9 :             return NULL_RTX;
    8382              :         }
    8383              :       else
    8384              :         {
    8385       100692 :           poly_int64 required_offset = subreg_memory_offset (outermode,
    8386              :                                                              innermostmode, 0);
    8387       100692 :           if (maybe_ne (final_offset, required_offset))
    8388            0 :             return NULL_RTX;
    8389              :           /* Paradoxical subregs always have byte offset 0.  */
    8390       100692 :           final_offset = 0;
    8391              :         }
    8392              : 
    8393              :       /* Recurse for further possible simplifications.  */
    8394       568158 :       newx = simplify_subreg (outermode, SUBREG_REG (op), innermostmode,
    8395       568158 :                               final_offset);
    8396       568158 :       if (newx)
    8397              :         return newx;
    8398       567779 :       if (validate_subreg (outermode, innermostmode,
    8399       567779 :                            SUBREG_REG (op), final_offset))
    8400              :         {
    8401       509128 :           newx = gen_rtx_SUBREG (outermode, SUBREG_REG (op), final_offset);
    8402       509128 :           if (SUBREG_PROMOTED_VAR_P (op)
    8403          365 :               && SUBREG_PROMOTED_SIGN (op) >= 0
    8404          365 :               && GET_MODE_CLASS (outermode) == MODE_INT
    8405          361 :               && known_ge (outersize, innersize)
    8406          336 :               && known_le (outersize, innermostsize)
    8407       509132 :               && subreg_lowpart_p (newx))
    8408              :             {
    8409            4 :               SUBREG_PROMOTED_VAR_P (newx) = 1;
    8410            4 :               SUBREG_PROMOTED_SET (newx, SUBREG_PROMOTED_GET (op));
    8411              :             }
    8412       509128 :           return newx;
    8413              :         }
    8414              :       return NULL_RTX;
    8415              :     }
    8416              : 
    8417              :   /* SUBREG of a hard register => just change the register number
    8418              :      and/or mode.  If the hard register is not valid in that mode,
    8419              :      suppress this simplification.  If the hard register is the stack,
    8420              :      frame, or argument pointer, leave this as a SUBREG.  */
    8421              : 
    8422     51244520 :   if (REG_P (op) && HARD_REGISTER_P (op))
    8423              :     {
    8424     10632443 :       unsigned int regno, final_regno;
    8425              : 
    8426     10632443 :       regno = REGNO (op);
    8427     10632443 :       final_regno = simplify_subreg_regno (regno, innermode, byte, outermode);
    8428     10632443 :       if (HARD_REGISTER_NUM_P (final_regno))
    8429              :         {
    8430     10608296 :           rtx x = gen_rtx_REG_offset (op, outermode, final_regno,
    8431              :                                       subreg_memory_offset (outermode,
    8432              :                                                             innermode, byte));
    8433              : 
    8434              :           /* Propagate original regno.  We don't have any way to specify
    8435              :              the offset inside original regno, so do so only for lowpart.
    8436              :              The information is used only by alias analysis that cannot
    8437              :              grog partial register anyway.  */
    8438              : 
    8439     10608296 :           if (known_eq (subreg_lowpart_offset (outermode, innermode), byte))
    8440      7952589 :             ORIGINAL_REGNO (x) = ORIGINAL_REGNO (op);
    8441     10608296 :           return x;
    8442              :         }
    8443              :     }
    8444              : 
    8445              :   /* If we have a SUBREG of a register that we are replacing and we are
    8446              :      replacing it with a MEM, make a new MEM and try replacing the
    8447              :      SUBREG with it.  Don't do this if the MEM has a mode-dependent address
    8448              :      or if we would be widening it.  */
    8449              : 
    8450     40636224 :   if (MEM_P (op)
    8451      1716218 :       && ! mode_dependent_address_p (XEXP (op, 0), MEM_ADDR_SPACE (op))
    8452              :       /* Allow splitting of volatile memory references in case we don't
    8453              :          have instruction to move the whole thing.  */
    8454      1716215 :       && (! MEM_VOLATILE_P (op)
    8455        44289 :           || ! have_insn_for (SET, innermode))
    8456              :       && !(STRICT_ALIGNMENT && MEM_ALIGN (op) < GET_MODE_ALIGNMENT (outermode))
    8457     42308150 :       && known_le (outersize, innersize))
    8458       811146 :     return adjust_address_nv (op, outermode, byte);
    8459              : 
    8460              :   /* Handle complex or vector values represented as CONCAT or VEC_CONCAT
    8461              :      of two parts.  */
    8462     39825078 :   if (GET_CODE (op) == CONCAT
    8463     39825078 :       || GET_CODE (op) == VEC_CONCAT)
    8464              :     {
    8465       192261 :       poly_uint64 final_offset;
    8466       192261 :       rtx part, res;
    8467              : 
    8468       192261 :       machine_mode part_mode = GET_MODE (XEXP (op, 0));
    8469       192261 :       if (part_mode == VOIDmode)
    8470           11 :         part_mode = GET_MODE_INNER (GET_MODE (op));
    8471       384522 :       poly_uint64 part_size = GET_MODE_SIZE (part_mode);
    8472       192261 :       if (known_lt (byte, part_size))
    8473              :         {
    8474       190706 :           part = XEXP (op, 0);
    8475       190706 :           final_offset = byte;
    8476              :         }
    8477         1555 :       else if (known_ge (byte, part_size))
    8478              :         {
    8479         1555 :           part = XEXP (op, 1);
    8480         1555 :           final_offset = byte - part_size;
    8481              :         }
    8482              :       else
    8483              :         return NULL_RTX;
    8484              : 
    8485       192261 :       if (maybe_gt (final_offset + outersize, part_size))
    8486              :         return NULL_RTX;
    8487              : 
    8488       128469 :       part_mode = GET_MODE (part);
    8489       128469 :       if (part_mode == VOIDmode)
    8490            0 :         part_mode = GET_MODE_INNER (GET_MODE (op));
    8491       128469 :       res = simplify_subreg (outermode, part, part_mode, final_offset);
    8492       128469 :       if (res)
    8493              :         return res;
    8494          295 :       if (validate_subreg (outermode, part_mode, part, final_offset))
    8495          295 :         return gen_rtx_SUBREG (outermode, part, final_offset);
    8496              :       return NULL_RTX;
    8497              :     }
    8498              : 
    8499              :   /* Simplify
    8500              :         (subreg (vec_merge (X)
    8501              :                            (vector)
    8502              :                            (const_int ((1 << N) | M)))
    8503              :                 (N * sizeof (outermode)))
    8504              :      to
    8505              :         (subreg (X) (N * sizeof (outermode)))
    8506              :    */
    8507     39632817 :   unsigned int idx;
    8508     79265634 :   if (constant_multiple_p (byte, GET_MODE_SIZE (outermode), &idx)
    8509     39632817 :       && idx < HOST_BITS_PER_WIDE_INT
    8510     39632817 :       && GET_CODE (op) == VEC_MERGE
    8511       614616 :       && GET_MODE_INNER (innermode) == outermode
    8512         4891 :       && CONST_INT_P (XEXP (op, 2))
    8513     39637126 :       && (UINTVAL (XEXP (op, 2)) & (HOST_WIDE_INT_1U << idx)) != 0)
    8514         4300 :     return simplify_gen_subreg (outermode, XEXP (op, 0), innermode, byte);
    8515              : 
    8516              :   /* A SUBREG resulting from a zero extension may fold to zero if
    8517              :      it extracts higher bits that the ZERO_EXTEND's source bits.  */
    8518     39628517 :   if (GET_CODE (op) == ZERO_EXTEND && SCALAR_INT_MODE_P (innermode))
    8519              :     {
    8520       213603 :       poly_uint64 bitpos = subreg_lsb_1 (outermode, innermode, byte);
    8521       213603 :       if (known_ge (bitpos, GET_MODE_PRECISION (GET_MODE (XEXP (op, 0)))))
    8522        54918 :         return CONST0_RTX (outermode);
    8523              :     }
    8524              : 
    8525              :   /* Optimize SUBREGS of scalar integral ASHIFT by a valid constant.  */
    8526     39573599 :   if (GET_CODE (op) == ASHIFT
    8527       699624 :       && SCALAR_INT_MODE_P (innermode)
    8528       672583 :       && CONST_INT_P (XEXP (op, 1))
    8529       598327 :       && INTVAL (XEXP (op, 1)) > 0
    8530     40871500 :       && known_gt (GET_MODE_BITSIZE (innermode), INTVAL (XEXP (op, 1))))
    8531              :     {
    8532       598277 :       HOST_WIDE_INT val = INTVAL (XEXP (op, 1));
    8533              :       /* A lowpart SUBREG of a ASHIFT by a constant may fold to zero.  */
    8534       598277 :       if (known_eq (subreg_lowpart_offset (outermode, innermode), byte)
    8535      1159072 :           && known_le (GET_MODE_BITSIZE (outermode), val))
    8536       193892 :         return CONST0_RTX (outermode);
    8537              :       /* Optimize the highpart SUBREG of a suitable ASHIFT (ZERO_EXTEND).  */
    8538       439340 :       if (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND
    8539        35634 :           && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
    8540        70954 :           && known_eq (GET_MODE_BITSIZE (outermode), val)
    8541        69910 :           && known_eq (GET_MODE_BITSIZE (innermode), 2 * val)
    8542       474974 :           && known_eq (subreg_highpart_offset (outermode, innermode), byte))
    8543        34955 :         return XEXP (XEXP (op, 0), 0);
    8544              :     }
    8545              : 
    8546     40829730 :   auto distribute_subreg = [&](rtx op)
    8547              :     {
    8548      1450023 :       return simplify_subreg (outermode, op, innermode, byte);
    8549     39379707 :     };
    8550              : 
    8551              :   /* Try distributing the subreg through logic operations, if that
    8552              :      leads to all subexpressions being simplified.  For example,
    8553              :      distributing the outer subreg in:
    8554              : 
    8555              :        (subreg:SI (not:QI (subreg:QI (reg:SI X) <lowpart>)) 0)
    8556              : 
    8557              :      gives:
    8558              : 
    8559              :        (not:SI (reg:SI X))
    8560              : 
    8561              :      This should be a win if the outermode is word_mode, since logical
    8562              :      operations on word_mode should (a) be no more expensive than logical
    8563              :      operations on subword modes and (b) are likely to be cheaper than
    8564              :      logical operations on multiword modes.
    8565              : 
    8566              :      Otherwise, handle the case where the subreg is non-narrowing and does
    8567              :      not change the number of words.  The non-narrowing condition ensures
    8568              :      that we don't convert word_mode operations to subword operations.  */
    8569     39379707 :   scalar_int_mode int_outermode, int_innermode;
    8570     39379707 :   if (is_a <scalar_int_mode> (outermode, &int_outermode)
    8571     32632108 :       && is_a <scalar_int_mode> (innermode, &int_innermode)
    8572     70838736 :       && (outermode == word_mode
    8573     17704112 :           || ((GET_MODE_PRECISION (int_outermode)
    8574     17704112 :                >= GET_MODE_PRECISION (int_innermode))
    8575      4505226 :               && (CEIL (GET_MODE_SIZE (int_outermode), UNITS_PER_WORD)
    8576      4423786 :                   <= CEIL (GET_MODE_SIZE (int_innermode), UNITS_PER_WORD)))))
    8577     18119084 :     switch (GET_CODE (op))
    8578              :       {
    8579        30862 :       case NOT:
    8580        30862 :         if (rtx op0 = distribute_subreg (XEXP (op, 0)))
    8581         1439 :           return simplify_gen_unary (GET_CODE (op), outermode, op0, outermode);
    8582              :         break;
    8583              : 
    8584       456171 :       case AND:
    8585       456171 :       case IOR:
    8586       456171 :       case XOR:
    8587       456171 :         if (rtx op0 = distribute_subreg (XEXP (op, 0)))
    8588       205436 :           if (rtx op1 = distribute_subreg (XEXP (op, 1)))
    8589       200435 :             return simplify_gen_binary (GET_CODE (op), outermode, op0, op1);
    8590              :         break;
    8591              : 
    8592              :       default:
    8593              :         break;
    8594              :       }
    8595              : 
    8596     39177833 :   if (is_a <scalar_int_mode> (outermode, &int_outermode)
    8597     32430234 :       && is_a <scalar_int_mode> (innermode, &int_innermode)
    8598     71608067 :       && known_eq (byte, subreg_lowpart_offset (int_outermode, int_innermode)))
    8599              :     {
    8600              :       /* Handle polynomial integers.  The upper bits of a paradoxical
    8601              :          subreg are undefined, so this is safe regardless of whether
    8602              :          we're truncating or extending.  */
    8603     29089042 :       if (CONST_POLY_INT_P (op))
    8604              :         {
    8605              :           poly_wide_int val
    8606              :             = poly_wide_int::from (const_poly_int_value (op),
    8607              :                                    GET_MODE_PRECISION (int_outermode),
    8608              :                                    SIGNED);
    8609              :           return immed_wide_int_const (val, int_outermode);
    8610              :         }
    8611              : 
    8612     29089042 :       if (GET_MODE_PRECISION (int_outermode)
    8613     29089042 :           < GET_MODE_PRECISION (int_innermode))
    8614              :         {
    8615     16533425 :           rtx tem = simplify_truncation (int_outermode, op, int_innermode);
    8616     16533425 :           if (tem)
    8617              :             return tem;
    8618              :         }
    8619              :     }
    8620              : 
    8621              :   /* If the outer mode is not integral, try taking a subreg with the equivalent
    8622              :      integer outer mode and then bitcasting the result.
    8623              :      Other simplifications rely on integer to integer subregs and we'd
    8624              :      potentially miss out on optimizations otherwise.  */
    8625     76459396 :   if (known_gt (GET_MODE_SIZE (innermode),
    8626              :                 GET_MODE_SIZE (outermode))
    8627     18985382 :       && SCALAR_INT_MODE_P (innermode)
    8628     17842373 :       && !SCALAR_INT_MODE_P (outermode)
    8629     57393020 :       && int_mode_for_size (GET_MODE_BITSIZE (outermode),
    8630        88970 :                             0).exists (&int_outermode))
    8631              :     {
    8632        88970 :       rtx tem = simplify_subreg (int_outermode, op, innermode, byte);
    8633        88970 :       if (tem)
    8634         1984 :         return lowpart_subreg (outermode, tem, int_outermode);
    8635              :     }
    8636              : 
    8637              :   /* If OP is a vector comparison and the subreg is not changing the
    8638              :      number of elements or the size of the elements, change the result
    8639              :      of the comparison to the new mode.  */
    8640     38227714 :   if (COMPARISON_P (op)
    8641       263310 :       && VECTOR_MODE_P (outermode)
    8642       190836 :       && VECTOR_MODE_P (innermode)
    8643       572484 :       && known_eq (GET_MODE_NUNITS (outermode), GET_MODE_NUNITS (innermode))
    8644     38575906 :       && known_eq (GET_MODE_UNIT_SIZE (outermode),
    8645              :                    GET_MODE_UNIT_SIZE (innermode)))
    8646       115720 :     return simplify_gen_relational (GET_CODE (op), outermode, innermode,
    8647       115720 :                                     XEXP (op, 0), XEXP (op, 1));
    8648              : 
    8649              :   /* Distribute non-paradoxical subregs through logic ops in cases where
    8650              :      one term disappears.
    8651              : 
    8652              :      (subreg:M1 (and:M2 X C1)) -> (subreg:M1 X)
    8653              :      (subreg:M1 (ior:M2 X C1)) -> (subreg:M1 C1)
    8654              :      (subreg:M1 (xor:M2 X C1)) -> (subreg:M1 (not:M2 X))
    8655              : 
    8656              :      if M2 is no smaller than M1 and (subreg:M1 C1) is all-ones.
    8657              : 
    8658              :      (subreg:M1 (and:M2 X C2)) -> (subreg:M1 C2)
    8659              :      (subreg:M1 (ior/xor:M2 X C2)) -> (subreg:M1 X)
    8660              : 
    8661              :      if M2 is no smaller than M1 and (subreg:M1 C2) is zero.  */
    8662     38111994 :   if (known_ge (innersize, outersize)
    8663     25081869 :       && GET_MODE_CLASS (outermode) == GET_MODE_CLASS (innermode)
    8664     23063719 :       && (GET_CODE (op) == AND || GET_CODE (op) == IOR || GET_CODE (op) == XOR)
    8665     39651655 :       && CONSTANT_P (XEXP (op, 1)))
    8666              :     {
    8667       750316 :       rtx op1_subreg = distribute_subreg (XEXP (op, 1));
    8668       750316 :       if (op1_subreg == CONSTM1_RTX (outermode))
    8669              :         {
    8670       115511 :           if (GET_CODE (op) == IOR)
    8671              :             return op1_subreg;
    8672       115277 :           rtx op0 = XEXP (op, 0);
    8673       115277 :           if (GET_CODE (op) == XOR)
    8674          793 :             op0 = simplify_gen_unary (NOT, innermode, op0, innermode);
    8675       115277 :           return simplify_gen_subreg (outermode, op0, innermode, byte);
    8676              :         }
    8677              : 
    8678       634805 :       if (op1_subreg == CONST0_RTX (outermode))
    8679        12535 :         return (GET_CODE (op) == AND
    8680        12535 :                 ? op1_subreg
    8681         7238 :                 : distribute_subreg (XEXP (op, 0)));
    8682              :     }
    8683              : 
    8684              :   return NULL_RTX;
    8685              : }
    8686              : 
    8687              : /* Make a SUBREG operation or equivalent if it folds.  */
    8688              : 
    8689              : rtx
    8690     42615533 : simplify_context::simplify_gen_subreg (machine_mode outermode, rtx op,
    8691              :                                        machine_mode innermode,
    8692              :                                        poly_uint64 byte)
    8693              : {
    8694     42615533 :   rtx newx;
    8695              : 
    8696     42615533 :   newx = simplify_subreg (outermode, op, innermode, byte);
    8697     42615533 :   if (newx)
    8698              :     return newx;
    8699              : 
    8700     19890459 :   if (GET_CODE (op) == SUBREG
    8701     19890459 :       || GET_CODE (op) == CONCAT
    8702     19855805 :       || CONST_SCALAR_INT_P (op)
    8703     19855779 :       || CONST_DOUBLE_AS_FLOAT_P (op)
    8704     19855779 :       || CONST_FIXED_P (op)
    8705     19855779 :       || GET_CODE (op) == CONST_VECTOR)
    8706              :     return NULL_RTX;
    8707              : 
    8708     19855769 :   if (validate_subreg (outermode, innermode, op, byte))
    8709     19824900 :     return gen_rtx_SUBREG (outermode, op, byte);
    8710              : 
    8711              :   return NULL_RTX;
    8712              : }
    8713              : 
    8714              : /* Generates a subreg to get the least significant part of EXPR (in mode
    8715              :    INNER_MODE) to OUTER_MODE.  */
    8716              : 
    8717              : rtx
    8718     31736529 : simplify_context::lowpart_subreg (machine_mode outer_mode, rtx expr,
    8719              :                                   machine_mode inner_mode)
    8720              : {
    8721     31736529 :   return simplify_gen_subreg (outer_mode, expr, inner_mode,
    8722     31736529 :                               subreg_lowpart_offset (outer_mode, inner_mode));
    8723              : }
    8724              : 
    8725              : /* Generate RTX to select element at INDEX out of vector OP.  */
    8726              : 
    8727              : rtx
    8728       637445 : simplify_context::simplify_gen_vec_select (rtx op, unsigned int index)
    8729              : {
    8730       637445 :   gcc_assert (VECTOR_MODE_P (GET_MODE (op)));
    8731              : 
    8732       637445 :   scalar_mode imode = GET_MODE_INNER (GET_MODE (op));
    8733              : 
    8734      1274890 :   if (known_eq (index * GET_MODE_SIZE (imode),
    8735              :                 subreg_lowpart_offset (imode, GET_MODE (op))))
    8736              :     {
    8737       637295 :       rtx res = lowpart_subreg (imode, op, GET_MODE (op));
    8738       637295 :       if (res)
    8739              :         return res;
    8740              :     }
    8741              : 
    8742          218 :   rtx tmp = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (1, GEN_INT (index)));
    8743          218 :   return gen_rtx_VEC_SELECT (imode, op, tmp);
    8744              : }
    8745              : 
    8746              : 
    8747              : /* Simplify X, an rtx expression.
    8748              : 
    8749              :    Return the simplified expression or NULL if no simplifications
    8750              :    were possible.
    8751              : 
    8752              :    This is the preferred entry point into the simplification routines;
    8753              :    however, we still allow passes to call the more specific routines.
    8754              : 
    8755              :    Right now GCC has three (yes, three) major bodies of RTL simplification
    8756              :    code that need to be unified.
    8757              : 
    8758              :         1. fold_rtx in cse.cc.  This code uses various CSE specific
    8759              :            information to aid in RTL simplification.
    8760              : 
    8761              :         2. simplify_rtx in combine.cc.  Similar to fold_rtx, except that
    8762              :            it uses combine specific information to aid in RTL
    8763              :            simplification.
    8764              : 
    8765              :         3. The routines in this file.
    8766              : 
    8767              : 
    8768              :    Long term we want to only have one body of simplification code; to
    8769              :    get to that state I recommend the following steps:
    8770              : 
    8771              :         1. Pour over fold_rtx & simplify_rtx and move any simplifications
    8772              :            which are not pass dependent state into these routines.
    8773              : 
    8774              :         2. As code is moved by #1, change fold_rtx & simplify_rtx to
    8775              :            use this routine whenever possible.
    8776              : 
    8777              :         3. Allow for pass dependent state to be provided to these
    8778              :            routines and add simplifications based on the pass dependent
    8779              :            state.  Remove code from cse.cc & combine.cc that becomes
    8780              :            redundant/dead.
    8781              : 
    8782              :     It will take time, but ultimately the compiler will be easier to
    8783              :     maintain and improve.  It's totally silly that when we add a
    8784              :     simplification that it needs to be added to 4 places (3 for RTL
    8785              :     simplification and 1 for tree simplification.  */
    8786              : 
    8787              : rtx
    8788     46480653 : simplify_rtx (const_rtx x)
    8789              : {
    8790     46480653 :   const enum rtx_code code = GET_CODE (x);
    8791     46480653 :   const machine_mode mode = GET_MODE (x);
    8792              : 
    8793     46480653 :   switch (GET_RTX_CLASS (code))
    8794              :     {
    8795       873666 :     case RTX_UNARY:
    8796      1747332 :       return simplify_unary_operation (code, mode,
    8797       873666 :                                        XEXP (x, 0), GET_MODE (XEXP (x, 0)));
    8798     26869171 :     case RTX_COMM_ARITH:
    8799     26869171 :       if (swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
    8800       533324 :         return simplify_gen_binary (code, mode, XEXP (x, 1), XEXP (x, 0));
    8801              : 
    8802              :       /* Fall through.  */
    8803              : 
    8804     32381752 :     case RTX_BIN_ARITH:
    8805     32381752 :       return simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
    8806              : 
    8807        64432 :     case RTX_TERNARY:
    8808        64432 :     case RTX_BITFIELD_OPS:
    8809        64432 :       return simplify_ternary_operation (code, mode, GET_MODE (XEXP (x, 0)),
    8810        64432 :                                          XEXP (x, 0), XEXP (x, 1),
    8811        64432 :                                          XEXP (x, 2));
    8812              : 
    8813       172682 :     case RTX_COMPARE:
    8814       172682 :     case RTX_COMM_COMPARE:
    8815       172682 :       return simplify_relational_operation (code, mode,
    8816       172682 :                                             ((GET_MODE (XEXP (x, 0))
    8817              :                                              != VOIDmode)
    8818              :                                             ? GET_MODE (XEXP (x, 0))
    8819          440 :                                             : GET_MODE (XEXP (x, 1))),
    8820       172682 :                                             XEXP (x, 0),
    8821       345364 :                                             XEXP (x, 1));
    8822              : 
    8823       231250 :     case RTX_EXTRA:
    8824       231250 :       if (code == SUBREG)
    8825         2454 :         return simplify_subreg (mode, SUBREG_REG (x),
    8826         2454 :                                 GET_MODE (SUBREG_REG (x)),
    8827         2454 :                                 SUBREG_BYTE (x));
    8828              :       break;
    8829              : 
    8830      6530845 :     case RTX_OBJ:
    8831      6530845 :       if (code == LO_SUM)
    8832              :         {
    8833              :           /* Convert (lo_sum (high FOO) FOO) to FOO.  */
    8834            0 :           if (GET_CODE (XEXP (x, 0)) == HIGH
    8835            0 :               && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
    8836            0 :           return XEXP (x, 1);
    8837              :         }
    8838              :       break;
    8839              : 
    8840              :     default:
    8841              :       break;
    8842              :     }
    8843              :   return NULL;
    8844              : }
    8845              : 
    8846              : #if CHECKING_P
    8847              : 
    8848              : namespace selftest {
    8849              : 
    8850              : /* Make a unique pseudo REG of mode MODE for use by selftests.  */
    8851              : 
    8852              : static rtx
    8853         2672 : make_test_reg (machine_mode mode)
    8854              : {
    8855         2672 :   static int test_reg_num = LAST_VIRTUAL_REGISTER + 1;
    8856              : 
    8857         2672 :   return gen_rtx_REG (mode, test_reg_num++);
    8858              : }
    8859              : 
    8860              : static void
    8861           40 : test_scalar_int_ops (machine_mode mode)
    8862              : {
    8863           40 :   rtx op0 = make_test_reg (mode);
    8864           40 :   rtx op1 = make_test_reg (mode);
    8865           40 :   rtx six = GEN_INT (6);
    8866              : 
    8867           40 :   rtx neg_op0 = simplify_gen_unary (NEG, mode, op0, mode);
    8868           40 :   rtx not_op0 = simplify_gen_unary (NOT, mode, op0, mode);
    8869           40 :   rtx bswap_op0 = simplify_gen_unary (BSWAP, mode, op0, mode);
    8870              : 
    8871           40 :   rtx and_op0_op1 = simplify_gen_binary (AND, mode, op0, op1);
    8872           40 :   rtx ior_op0_op1 = simplify_gen_binary (IOR, mode, op0, op1);
    8873           40 :   rtx xor_op0_op1 = simplify_gen_binary (XOR, mode, op0, op1);
    8874              : 
    8875           40 :   rtx and_op0_6 = simplify_gen_binary (AND, mode, op0, six);
    8876           40 :   rtx and_op1_6 = simplify_gen_binary (AND, mode, op1, six);
    8877              : 
    8878              :   /* Test some binary identities.  */
    8879           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (PLUS, mode, op0, const0_rtx));
    8880           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (PLUS, mode, const0_rtx, op0));
    8881           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (MINUS, mode, op0, const0_rtx));
    8882           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (MULT, mode, op0, const1_rtx));
    8883           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (MULT, mode, const1_rtx, op0));
    8884           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (DIV, mode, op0, const1_rtx));
    8885           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (AND, mode, op0, constm1_rtx));
    8886           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (AND, mode, constm1_rtx, op0));
    8887           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (IOR, mode, op0, const0_rtx));
    8888           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (IOR, mode, const0_rtx, op0));
    8889           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (XOR, mode, op0, const0_rtx));
    8890           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (XOR, mode, const0_rtx, op0));
    8891           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (ASHIFT, mode, op0, const0_rtx));
    8892           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (ROTATE, mode, op0, const0_rtx));
    8893           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (ASHIFTRT, mode, op0, const0_rtx));
    8894           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (LSHIFTRT, mode, op0, const0_rtx));
    8895           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (ROTATERT, mode, op0, const0_rtx));
    8896              : 
    8897              :   /* Test some self-inverse operations.  */
    8898           40 :   ASSERT_RTX_EQ (op0, simplify_gen_unary (NEG, mode, neg_op0, mode));
    8899           40 :   ASSERT_RTX_EQ (op0, simplify_gen_unary (NOT, mode, not_op0, mode));
    8900           40 :   ASSERT_RTX_EQ (op0, simplify_gen_unary (BSWAP, mode, bswap_op0, mode));
    8901              : 
    8902              :   /* Test some reflexive operations.  */
    8903           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (AND, mode, op0, op0));
    8904           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (IOR, mode, op0, op0));
    8905           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (SMIN, mode, op0, op0));
    8906           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (SMAX, mode, op0, op0));
    8907           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (UMIN, mode, op0, op0));
    8908           40 :   ASSERT_RTX_EQ (op0, simplify_gen_binary (UMAX, mode, op0, op0));
    8909              : 
    8910           40 :   ASSERT_RTX_EQ (const0_rtx, simplify_gen_binary (MINUS, mode, op0, op0));
    8911           40 :   ASSERT_RTX_EQ (const0_rtx, simplify_gen_binary (XOR, mode, op0, op0));
    8912              : 
    8913              :   /* Test simplify_distributive_operation.  */
    8914           40 :   ASSERT_RTX_EQ (simplify_gen_binary (AND, mode, xor_op0_op1, six),
    8915              :                  simplify_gen_binary (XOR, mode, and_op0_6, and_op1_6));
    8916           40 :   ASSERT_RTX_EQ (simplify_gen_binary (AND, mode, ior_op0_op1, six),
    8917              :                  simplify_gen_binary (IOR, mode, and_op0_6, and_op1_6));
    8918           40 :   ASSERT_RTX_EQ (simplify_gen_binary (AND, mode, and_op0_op1, six),
    8919              :                  simplify_gen_binary (AND, mode, and_op0_6, and_op1_6));
    8920              : 
    8921              :   /* Test useless extensions are eliminated.  */
    8922           40 :   ASSERT_RTX_EQ (op0, simplify_gen_unary (TRUNCATE, mode, op0, mode));
    8923           40 :   ASSERT_RTX_EQ (op0, simplify_gen_unary (ZERO_EXTEND, mode, op0, mode));
    8924           40 :   ASSERT_RTX_EQ (op0, simplify_gen_unary (SIGN_EXTEND, mode, op0, mode));
    8925           40 :   ASSERT_RTX_EQ (op0, lowpart_subreg (mode, op0, mode));
    8926           40 : }
    8927              : 
    8928              : /* Verify some simplifications of integer extension/truncation.
    8929              :    Machine mode BMODE is the guaranteed wider than SMODE.  */
    8930              : 
    8931              : static void
    8932           24 : test_scalar_int_ext_ops (machine_mode bmode, machine_mode smode)
    8933              : {
    8934           24 :   rtx sreg = make_test_reg (smode);
    8935              : 
    8936              :   /* Check truncation of extension.  */
    8937           24 :   ASSERT_RTX_EQ (simplify_gen_unary (TRUNCATE, smode,
    8938              :                                      simplify_gen_unary (ZERO_EXTEND, bmode,
    8939              :                                                          sreg, smode),
    8940              :                                      bmode),
    8941              :                  sreg);
    8942           24 :   ASSERT_RTX_EQ (simplify_gen_unary (TRUNCATE, smode,
    8943              :                                      simplify_gen_unary (SIGN_EXTEND, bmode,
    8944              :                                                          sreg, smode),
    8945              :                                      bmode),
    8946              :                  sreg);
    8947           24 :   ASSERT_RTX_EQ (simplify_gen_unary (TRUNCATE, smode,
    8948              :                                      lowpart_subreg (bmode, sreg, smode),
    8949              :                                      bmode),
    8950              :                  sreg);
    8951              : 
    8952              :   /* Test extensions, followed by logic ops, followed by truncations.  */
    8953           24 :   rtx bsubreg = lowpart_subreg (bmode, sreg, smode);
    8954           24 :   rtx smask = gen_int_mode (GET_MODE_MASK (smode), bmode);
    8955           24 :   rtx inv_smask = gen_int_mode (~GET_MODE_MASK (smode), bmode);
    8956           24 :   ASSERT_RTX_EQ (lowpart_subreg (smode,
    8957              :                                  simplify_gen_binary (AND, bmode,
    8958              :                                                       bsubreg, smask),
    8959              :                                  bmode),
    8960              :                  sreg);
    8961           24 :   ASSERT_RTX_EQ (lowpart_subreg (smode,
    8962              :                                  simplify_gen_binary (AND, bmode,
    8963              :                                                       bsubreg, inv_smask),
    8964              :                                  bmode),
    8965              :                  const0_rtx);
    8966           24 :   ASSERT_RTX_EQ (lowpart_subreg (smode,
    8967              :                                  simplify_gen_binary (IOR, bmode,
    8968              :                                                       bsubreg, smask),
    8969              :                                  bmode),
    8970              :                  constm1_rtx);
    8971           24 :   ASSERT_RTX_EQ (lowpart_subreg (smode,
    8972              :                                  simplify_gen_binary (IOR, bmode,
    8973              :                                                       bsubreg, inv_smask),
    8974              :                                  bmode),
    8975              :                  sreg);
    8976           24 :   ASSERT_RTX_EQ (lowpart_subreg (smode,
    8977              :                                  simplify_gen_binary (XOR, bmode,
    8978              :                                                       bsubreg, smask),
    8979              :                                  bmode),
    8980              :                  lowpart_subreg (smode,
    8981              :                                  gen_rtx_NOT (bmode, bsubreg),
    8982              :                                  bmode));
    8983           24 :   ASSERT_RTX_EQ (lowpart_subreg (smode,
    8984              :                                  simplify_gen_binary (XOR, bmode,
    8985              :                                                       bsubreg, inv_smask),
    8986              :                                  bmode),
    8987              :                  sreg);
    8988              : 
    8989           24 :   if (known_le (GET_MODE_PRECISION (bmode), BITS_PER_WORD))
    8990              :     {
    8991           24 :       rtx breg1 = make_test_reg (bmode);
    8992           24 :       rtx breg2 = make_test_reg (bmode);
    8993           24 :       rtx ssubreg1 = lowpart_subreg (smode, breg1, bmode);
    8994           24 :       rtx ssubreg2 = lowpart_subreg (smode, breg2, bmode);
    8995           24 :       rtx not_1 = simplify_gen_unary (NOT, smode, ssubreg1, smode);
    8996           24 :       rtx and_12 = simplify_gen_binary (AND, smode, ssubreg1, ssubreg2);
    8997           24 :       rtx ior_12 = simplify_gen_binary (IOR, smode, ssubreg1, ssubreg2);
    8998           24 :       rtx xor_12 = simplify_gen_binary (XOR, smode, ssubreg1, ssubreg2);
    8999           24 :       rtx and_n12 = simplify_gen_binary (AND, smode, not_1, ssubreg2);
    9000           24 :       rtx ior_n12 = simplify_gen_binary (IOR, smode, not_1, ssubreg2);
    9001           24 :       rtx xor_12_c = simplify_gen_binary (XOR, smode, xor_12, const1_rtx);
    9002           24 :       ASSERT_RTX_EQ (lowpart_subreg (bmode, not_1, smode),
    9003              :                      gen_rtx_NOT (bmode, breg1));
    9004           24 :       ASSERT_RTX_EQ (lowpart_subreg (bmode, and_12, smode),
    9005              :                      gen_rtx_AND (bmode, breg1, breg2));
    9006           24 :       ASSERT_RTX_EQ (lowpart_subreg (bmode, ior_12, smode),
    9007              :                      gen_rtx_IOR (bmode, breg1, breg2));
    9008           24 :       ASSERT_RTX_EQ (lowpart_subreg (bmode, xor_12, smode),
    9009              :                      gen_rtx_XOR (bmode, breg1, breg2));
    9010           24 :       ASSERT_RTX_EQ (lowpart_subreg (bmode, and_n12, smode),
    9011              :                      gen_rtx_AND (bmode, gen_rtx_NOT (bmode, breg1), breg2));
    9012           24 :       ASSERT_RTX_EQ (lowpart_subreg (bmode, ior_n12, smode),
    9013              :                      gen_rtx_IOR (bmode, gen_rtx_NOT (bmode, breg1), breg2));
    9014           24 :       ASSERT_RTX_EQ (lowpart_subreg (bmode, xor_12_c, smode),
    9015              :                      gen_rtx_XOR (bmode,
    9016              :                                   gen_rtx_XOR (bmode, breg1, breg2),
    9017              :                                   const1_rtx));
    9018              :     }
    9019           24 : }
    9020              : 
    9021              : /* Verify more simplifications of integer extension/truncation.
    9022              :    BMODE is wider than MMODE which is wider than SMODE.  */
    9023              : 
    9024              : static void
    9025           16 : test_scalar_int_ext_ops2 (machine_mode bmode, machine_mode mmode,
    9026              :                           machine_mode smode)
    9027              : {
    9028           16 :   rtx breg = make_test_reg (bmode);
    9029           16 :   rtx mreg = make_test_reg (mmode);
    9030           16 :   rtx sreg = make_test_reg (smode);
    9031              : 
    9032              :   /* Check truncate of truncate.  */
    9033           16 :   ASSERT_RTX_EQ (simplify_gen_unary (TRUNCATE, smode,
    9034              :                                      simplify_gen_unary (TRUNCATE, mmode,
    9035              :                                                          breg, bmode),
    9036              :                                      mmode),
    9037              :                  simplify_gen_unary (TRUNCATE, smode, breg, bmode));
    9038              : 
    9039              :   /* Check extension of extension.  */
    9040           16 :   ASSERT_RTX_EQ (simplify_gen_unary (ZERO_EXTEND, bmode,
    9041              :                                      simplify_gen_unary (ZERO_EXTEND, mmode,
    9042              :                                                          sreg, smode),
    9043              :                                      mmode),
    9044              :                  simplify_gen_unary (ZERO_EXTEND, bmode, sreg, smode));
    9045           16 :   ASSERT_RTX_EQ (simplify_gen_unary (SIGN_EXTEND, bmode,
    9046              :                                      simplify_gen_unary (SIGN_EXTEND, mmode,
    9047              :                                                          sreg, smode),
    9048              :                                      mmode),
    9049              :                  simplify_gen_unary (SIGN_EXTEND, bmode, sreg, smode));
    9050           16 :   ASSERT_RTX_EQ (simplify_gen_unary (SIGN_EXTEND, bmode,
    9051              :                                      simplify_gen_unary (ZERO_EXTEND, mmode,
    9052              :                                                          sreg, smode),
    9053              :                                      mmode),
    9054              :                  simplify_gen_unary (ZERO_EXTEND, bmode, sreg, smode));
    9055              : 
    9056              :   /* Check truncation of extension.  */
    9057           16 :   ASSERT_RTX_EQ (simplify_gen_unary (TRUNCATE, smode,
    9058              :                                      simplify_gen_unary (ZERO_EXTEND, bmode,
    9059              :                                                          mreg, mmode),
    9060              :                                      bmode),
    9061              :                  simplify_gen_unary (TRUNCATE, smode, mreg, mmode));
    9062           16 :   ASSERT_RTX_EQ (simplify_gen_unary (TRUNCATE, smode,
    9063              :                                      simplify_gen_unary (SIGN_EXTEND, bmode,
    9064              :                                                          mreg, mmode),
    9065              :                                      bmode),
    9066              :                  simplify_gen_unary (TRUNCATE, smode, mreg, mmode));
    9067           16 :   ASSERT_RTX_EQ (simplify_gen_unary (TRUNCATE, smode,
    9068              :                                      lowpart_subreg (bmode, mreg, mmode),
    9069              :                                      bmode),
    9070              :                  simplify_gen_unary (TRUNCATE, smode, mreg, mmode));
    9071           16 : }
    9072              : 
    9073              : /* Test comparisons of comparisons, with the inner comparisons being
    9074              :    between values of mode MODE2 and producing results of mode MODE1,
    9075              :    and with the outer comparisons producing results of mode MODE0.  */
    9076              : 
    9077              : static void
    9078            4 : test_comparisons (machine_mode mode0, machine_mode mode1, machine_mode mode2)
    9079              : {
    9080            4 :   rtx reg0 = make_test_reg (mode2);
    9081            4 :   rtx reg1 = make_test_reg (mode2);
    9082              : 
    9083            4 :   static const rtx_code codes[] = {
    9084              :     EQ, NE, LT, LTU, LE, LEU, GE, GEU, GT, GTU
    9085              :   };
    9086            4 :   constexpr auto num_codes = ARRAY_SIZE (codes);
    9087            4 :   rtx cmps[num_codes];
    9088            4 :   rtx vals[] = { constm1_rtx, const0_rtx, const1_rtx };
    9089              : 
    9090           44 :   for (unsigned int i = 0; i < num_codes; ++i)
    9091           40 :     cmps[i] = gen_rtx_fmt_ee (codes[i], mode1, reg0, reg1);
    9092              : 
    9093           44 :   for (auto code : codes)
    9094          440 :     for (unsigned int i0 = 0; i0 < num_codes; ++i0)
    9095         4400 :       for (unsigned int i1 = 0; i1 < num_codes; ++i1)
    9096              :         {
    9097         4000 :           rtx cmp_res = simplify_relational_operation (code, mode0, mode1,
    9098              :                                                        cmps[i0], cmps[i1]);
    9099         4000 :           if (i0 >= 2 && i1 >= 2 && (i0 ^ i1) & 1)
    9100         1280 :             ASSERT_TRUE (cmp_res == NULL_RTX);
    9101              :           else
    9102              :             {
    9103         2720 :               ASSERT_TRUE (cmp_res != NULL_RTX
    9104              :                            && (CONSTANT_P (cmp_res)
    9105              :                                || (COMPARISON_P (cmp_res)
    9106              :                                    && GET_MODE (cmp_res) == mode0
    9107              :                                    && REG_P (XEXP (cmp_res, 0))
    9108              :                                    && REG_P (XEXP (cmp_res, 1)))));
    9109        10880 :               for (rtx reg0_val : vals)
    9110        32640 :                 for (rtx reg1_val : vals)
    9111              :                   {
    9112        24480 :                     rtx val0 = simplify_const_relational_operation
    9113        24480 :                       (codes[i0], mode1, reg0_val, reg1_val);
    9114        24480 :                     rtx val1 = simplify_const_relational_operation
    9115        24480 :                       (codes[i1], mode1, reg0_val, reg1_val);
    9116        24480 :                     rtx val = simplify_const_relational_operation
    9117        24480 :                       (code, mode0, val0, val1);
    9118        24480 :                     rtx folded = cmp_res;
    9119        24480 :                     if (COMPARISON_P (cmp_res))
    9120        16704 :                       folded = simplify_const_relational_operation
    9121        16704 :                         (GET_CODE (cmp_res), mode0,
    9122        16704 :                          XEXP (cmp_res, 0) == reg0 ? reg0_val : reg1_val,
    9123        16704 :                          XEXP (cmp_res, 1) == reg0 ? reg0_val : reg1_val);
    9124        24480 :                     ASSERT_RTX_EQ (val, folded);
    9125              :                   }
    9126              :             }
    9127              :         }
    9128            4 : }
    9129              : 
    9130              : 
    9131              : /* Verify some simplifications involving scalar expressions.  */
    9132              : 
    9133              : static void
    9134            4 : test_scalar_ops ()
    9135              : {
    9136          500 :   for (unsigned int i = 0; i < NUM_MACHINE_MODES; ++i)
    9137              :     {
    9138          496 :       machine_mode mode = (machine_mode) i;
    9139          496 :       if (SCALAR_INT_MODE_P (mode) && mode != BImode)
    9140           40 :         test_scalar_int_ops (mode);
    9141              :     }
    9142              : 
    9143            4 :   test_scalar_int_ext_ops (HImode, QImode);
    9144            4 :   test_scalar_int_ext_ops (SImode, QImode);
    9145            4 :   test_scalar_int_ext_ops (SImode, HImode);
    9146            4 :   test_scalar_int_ext_ops (DImode, QImode);
    9147            4 :   test_scalar_int_ext_ops (DImode, HImode);
    9148            4 :   test_scalar_int_ext_ops (DImode, SImode);
    9149              : 
    9150            4 :   test_scalar_int_ext_ops2 (SImode, HImode, QImode);
    9151            4 :   test_scalar_int_ext_ops2 (DImode, HImode, QImode);
    9152            4 :   test_scalar_int_ext_ops2 (DImode, SImode, QImode);
    9153            4 :   test_scalar_int_ext_ops2 (DImode, SImode, HImode);
    9154              : 
    9155            4 :   test_comparisons (QImode, HImode, SImode);
    9156            4 : }
    9157              : 
    9158              : /* Test vector simplifications involving VEC_DUPLICATE in which the
    9159              :    operands and result have vector mode MODE.  SCALAR_REG is a pseudo
    9160              :    register that holds one element of MODE.  */
    9161              : 
    9162              : static void
    9163          224 : test_vector_ops_duplicate (machine_mode mode, rtx scalar_reg)
    9164              : {
    9165          224 :   scalar_mode inner_mode = GET_MODE_INNER (mode);
    9166          224 :   rtx duplicate = gen_rtx_VEC_DUPLICATE (mode, scalar_reg);
    9167          448 :   poly_uint64 nunits = GET_MODE_NUNITS (mode);
    9168          224 :   if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT)
    9169              :     {
    9170              :       /* Test some simple unary cases with VEC_DUPLICATE arguments.  */
    9171          124 :       rtx not_scalar_reg = gen_rtx_NOT (inner_mode, scalar_reg);
    9172          124 :       rtx duplicate_not = gen_rtx_VEC_DUPLICATE (mode, not_scalar_reg);
    9173          124 :       ASSERT_RTX_EQ (duplicate,
    9174              :                      simplify_unary_operation (NOT, mode,
    9175              :                                                duplicate_not, mode));
    9176              : 
    9177          124 :       rtx neg_scalar_reg = gen_rtx_NEG (inner_mode, scalar_reg);
    9178          124 :       rtx duplicate_neg = gen_rtx_VEC_DUPLICATE (mode, neg_scalar_reg);
    9179          124 :       ASSERT_RTX_EQ (duplicate,
    9180              :                      simplify_unary_operation (NEG, mode,
    9181              :                                                duplicate_neg, mode));
    9182              : 
    9183              :       /* Test some simple binary cases with VEC_DUPLICATE arguments.  */
    9184          124 :       ASSERT_RTX_EQ (duplicate,
    9185              :                      simplify_binary_operation (PLUS, mode, duplicate,
    9186              :                                                 CONST0_RTX (mode)));
    9187              : 
    9188          124 :       ASSERT_RTX_EQ (duplicate,
    9189              :                      simplify_binary_operation (MINUS, mode, duplicate,
    9190              :                                                 CONST0_RTX (mode)));
    9191              : 
    9192          124 :       ASSERT_RTX_PTR_EQ (CONST0_RTX (mode),
    9193              :                          simplify_binary_operation (MINUS, mode, duplicate,
    9194              :                                                     duplicate));
    9195              :     }
    9196              : 
    9197              :   /* Test a scalar VEC_SELECT of a VEC_DUPLICATE.  */
    9198          224 :   rtx zero_par = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (1, const0_rtx));
    9199          224 :   ASSERT_RTX_PTR_EQ (scalar_reg,
    9200              :                      simplify_binary_operation (VEC_SELECT, inner_mode,
    9201              :                                                 duplicate, zero_par));
    9202              : 
    9203          224 :   unsigned HOST_WIDE_INT const_nunits;
    9204          224 :   if (nunits.is_constant (&const_nunits))
    9205              :     {
    9206              :       /* And again with the final element.  */
    9207          224 :       rtx last_index = gen_int_mode (const_nunits - 1, word_mode);
    9208          224 :       rtx last_par = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (1, last_index));
    9209          224 :       ASSERT_RTX_PTR_EQ (scalar_reg,
    9210              :                          simplify_binary_operation (VEC_SELECT, inner_mode,
    9211              :                                                     duplicate, last_par));
    9212              : 
    9213              :       /* Test a scalar subreg of a VEC_MERGE of a VEC_DUPLICATE.  */
    9214              :       /* Skip this test for vectors of booleans, because offset is in bytes,
    9215              :          while vec_merge indices are in elements (usually bits).  */
    9216          224 :       if (GET_MODE_CLASS (mode) != MODE_VECTOR_BOOL)
    9217              :         {
    9218          224 :           rtx vector_reg = make_test_reg (mode);
    9219         3508 :           for (unsigned HOST_WIDE_INT i = 0; i < const_nunits; i++)
    9220              :             {
    9221         3288 :               if (i >= HOST_BITS_PER_WIDE_INT)
    9222              :                 break;
    9223         3284 :               rtx mask = GEN_INT ((HOST_WIDE_INT_1U << i) | (i + 1));
    9224         3284 :               rtx vm = gen_rtx_VEC_MERGE (mode, duplicate, vector_reg, mask);
    9225         6568 :               poly_uint64 offset = i * GET_MODE_SIZE (inner_mode);
    9226              : 
    9227         3284 :               ASSERT_RTX_EQ (scalar_reg,
    9228              :                              simplify_gen_subreg (inner_mode, vm,
    9229              :                                                   mode, offset));
    9230              :             }
    9231              :         }
    9232              :     }
    9233              : 
    9234              :   /* Test a scalar subreg of a VEC_DUPLICATE.  */
    9235          224 :   poly_uint64 offset = subreg_lowpart_offset (inner_mode, mode);
    9236          224 :   ASSERT_RTX_EQ (scalar_reg,
    9237              :                  simplify_gen_subreg (inner_mode, duplicate,
    9238              :                                       mode, offset));
    9239              : 
    9240          224 :   machine_mode narrower_mode;
    9241          224 :   if (maybe_ne (nunits, 2U)
    9242          184 :       && multiple_p (nunits, 2)
    9243          396 :       && mode_for_vector (inner_mode, 2).exists (&narrower_mode)
    9244          396 :       && VECTOR_MODE_P (narrower_mode))
    9245              :     {
    9246              :       /* Test VEC_DUPLICATE of a vector.  */
    9247          172 :       rtx_vector_builder nbuilder (narrower_mode, 2, 1);
    9248          172 :       nbuilder.quick_push (const0_rtx);
    9249          172 :       nbuilder.quick_push (const1_rtx);
    9250          172 :       rtx_vector_builder builder (mode, 2, 1);
    9251          172 :       builder.quick_push (const0_rtx);
    9252          172 :       builder.quick_push (const1_rtx);
    9253          172 :       ASSERT_RTX_EQ (builder.build (),
    9254              :                      simplify_unary_operation (VEC_DUPLICATE, mode,
    9255              :                                                nbuilder.build (),
    9256              :                                                narrower_mode));
    9257              : 
    9258              :       /* Test VEC_SELECT of a vector.  */
    9259          172 :       rtx vec_par
    9260          172 :         = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, const1_rtx, const0_rtx));
    9261          172 :       rtx narrower_duplicate
    9262          172 :         = gen_rtx_VEC_DUPLICATE (narrower_mode, scalar_reg);
    9263          172 :       ASSERT_RTX_EQ (narrower_duplicate,
    9264              :                      simplify_binary_operation (VEC_SELECT, narrower_mode,
    9265              :                                                 duplicate, vec_par));
    9266              : 
    9267              :       /* Test a vector subreg of a VEC_DUPLICATE.  */
    9268          172 :       poly_uint64 offset = subreg_lowpart_offset (narrower_mode, mode);
    9269          172 :       ASSERT_RTX_EQ (narrower_duplicate,
    9270              :                      simplify_gen_subreg (narrower_mode, duplicate,
    9271              :                                           mode, offset));
    9272          172 :     }
    9273          224 : }
    9274              : 
    9275              : /* Test vector simplifications involving VEC_SERIES in which the
    9276              :    operands and result have vector mode MODE.  SCALAR_REG is a pseudo
    9277              :    register that holds one element of MODE.  */
    9278              : 
    9279              : static void
    9280           92 : test_vector_ops_series (machine_mode mode, rtx scalar_reg)
    9281              : {
    9282              :   /* Test unary cases with VEC_SERIES arguments.  */
    9283           92 :   scalar_mode inner_mode = GET_MODE_INNER (mode);
    9284           92 :   rtx duplicate = gen_rtx_VEC_DUPLICATE (mode, scalar_reg);
    9285           92 :   rtx neg_scalar_reg = gen_rtx_NEG (inner_mode, scalar_reg);
    9286           92 :   rtx series_0_r = gen_rtx_VEC_SERIES (mode, const0_rtx, scalar_reg);
    9287           92 :   rtx series_0_nr = gen_rtx_VEC_SERIES (mode, const0_rtx, neg_scalar_reg);
    9288           92 :   rtx series_nr_1 = gen_rtx_VEC_SERIES (mode, neg_scalar_reg, const1_rtx);
    9289           92 :   rtx series_r_m1 = gen_rtx_VEC_SERIES (mode, scalar_reg, constm1_rtx);
    9290           92 :   rtx series_r_r = gen_rtx_VEC_SERIES (mode, scalar_reg, scalar_reg);
    9291           92 :   rtx series_nr_nr = gen_rtx_VEC_SERIES (mode, neg_scalar_reg,
    9292              :                                          neg_scalar_reg);
    9293           92 :   ASSERT_RTX_EQ (series_0_r,
    9294              :                  simplify_unary_operation (NEG, mode, series_0_nr, mode));
    9295           92 :   ASSERT_RTX_EQ (series_r_m1,
    9296              :                  simplify_unary_operation (NEG, mode, series_nr_1, mode));
    9297           92 :   ASSERT_RTX_EQ (series_r_r,
    9298              :                  simplify_unary_operation (NEG, mode, series_nr_nr, mode));
    9299              : 
    9300              :   /* Test that a VEC_SERIES with a zero step is simplified away.  */
    9301           92 :   ASSERT_RTX_EQ (duplicate,
    9302              :                  simplify_binary_operation (VEC_SERIES, mode,
    9303              :                                             scalar_reg, const0_rtx));
    9304              : 
    9305              :   /* Test PLUS and MINUS with VEC_SERIES.  */
    9306           92 :   rtx series_0_1 = gen_const_vec_series (mode, const0_rtx, const1_rtx);
    9307           92 :   rtx series_0_m1 = gen_const_vec_series (mode, const0_rtx, constm1_rtx);
    9308           92 :   rtx series_r_1 = gen_rtx_VEC_SERIES (mode, scalar_reg, const1_rtx);
    9309           92 :   ASSERT_RTX_EQ (series_r_r,
    9310              :                  simplify_binary_operation (PLUS, mode, series_0_r,
    9311              :                                             duplicate));
    9312           92 :   ASSERT_RTX_EQ (series_r_1,
    9313              :                  simplify_binary_operation (PLUS, mode, duplicate,
    9314              :                                             series_0_1));
    9315           92 :   ASSERT_RTX_EQ (series_r_m1,
    9316              :                  simplify_binary_operation (PLUS, mode, duplicate,
    9317              :                                             series_0_m1));
    9318           92 :   ASSERT_RTX_EQ (series_0_r,
    9319              :                  simplify_binary_operation (MINUS, mode, series_r_r,
    9320              :                                             duplicate));
    9321           92 :   ASSERT_RTX_EQ (series_r_m1,
    9322              :                  simplify_binary_operation (MINUS, mode, duplicate,
    9323              :                                             series_0_1));
    9324           92 :   ASSERT_RTX_EQ (series_r_1,
    9325              :                  simplify_binary_operation (MINUS, mode, duplicate,
    9326              :                                             series_0_m1));
    9327           92 :   ASSERT_RTX_EQ (series_0_m1,
    9328              :                  simplify_binary_operation (VEC_SERIES, mode, const0_rtx,
    9329              :                                             constm1_rtx));
    9330              : 
    9331              :   /* Test NEG on constant vector series.  */
    9332           92 :   ASSERT_RTX_EQ (series_0_m1,
    9333              :                  simplify_unary_operation (NEG, mode, series_0_1, mode));
    9334           92 :   ASSERT_RTX_EQ (series_0_1,
    9335              :                  simplify_unary_operation (NEG, mode, series_0_m1, mode));
    9336              : 
    9337              :   /* Test PLUS and MINUS on constant vector series.  */
    9338           92 :   rtx scalar2 = gen_int_mode (2, inner_mode);
    9339           92 :   rtx scalar3 = gen_int_mode (3, inner_mode);
    9340           92 :   rtx series_1_1 = gen_const_vec_series (mode, const1_rtx, const1_rtx);
    9341           92 :   rtx series_0_2 = gen_const_vec_series (mode, const0_rtx, scalar2);
    9342           92 :   rtx series_1_3 = gen_const_vec_series (mode, const1_rtx, scalar3);
    9343           92 :   ASSERT_RTX_EQ (series_1_1,
    9344              :                  simplify_binary_operation (PLUS, mode, series_0_1,
    9345              :                                             CONST1_RTX (mode)));
    9346           92 :   ASSERT_RTX_EQ (series_0_m1,
    9347              :                  simplify_binary_operation (PLUS, mode, CONST0_RTX (mode),
    9348              :                                             series_0_m1));
    9349           92 :   ASSERT_RTX_EQ (series_1_3,
    9350              :                  simplify_binary_operation (PLUS, mode, series_1_1,
    9351              :                                             series_0_2));
    9352           92 :   ASSERT_RTX_EQ (series_0_1,
    9353              :                  simplify_binary_operation (MINUS, mode, series_1_1,
    9354              :                                             CONST1_RTX (mode)));
    9355           92 :   ASSERT_RTX_EQ (series_1_1,
    9356              :                  simplify_binary_operation (MINUS, mode, CONST1_RTX (mode),
    9357              :                                             series_0_m1));
    9358           92 :   ASSERT_RTX_EQ (series_1_1,
    9359              :                  simplify_binary_operation (MINUS, mode, series_1_3,
    9360              :                                             series_0_2));
    9361              : 
    9362              :   /* Test MULT between constant vectors.  */
    9363           92 :   rtx vec2 = gen_const_vec_duplicate (mode, scalar2);
    9364           92 :   rtx vec3 = gen_const_vec_duplicate (mode, scalar3);
    9365           92 :   rtx scalar9 = gen_int_mode (9, inner_mode);
    9366           92 :   rtx series_3_9 = gen_const_vec_series (mode, scalar3, scalar9);
    9367           92 :   ASSERT_RTX_EQ (series_0_2,
    9368              :                  simplify_binary_operation (MULT, mode, series_0_1, vec2));
    9369           92 :   ASSERT_RTX_EQ (series_3_9,
    9370              :                  simplify_binary_operation (MULT, mode, vec3, series_1_3));
    9371           92 :   if (!GET_MODE_NUNITS (mode).is_constant ())
    9372              :     ASSERT_FALSE (simplify_binary_operation (MULT, mode, series_0_1,
    9373              :                                              series_0_1));
    9374              : 
    9375              :   /* Test ASHIFT between constant vectors.  */
    9376           92 :   ASSERT_RTX_EQ (series_0_2,
    9377              :                  simplify_binary_operation (ASHIFT, mode, series_0_1,
    9378              :                                             CONST1_RTX (mode)));
    9379           92 :   if (!GET_MODE_NUNITS (mode).is_constant ())
    9380              :     ASSERT_FALSE (simplify_binary_operation (ASHIFT, mode, CONST1_RTX (mode),
    9381              :                                              series_0_1));
    9382           92 : }
    9383              : 
    9384              : static rtx
    9385         3136 : simplify_merge_mask (rtx x, rtx mask, int op)
    9386              : {
    9387            0 :   return simplify_context ().simplify_merge_mask (x, mask, op);
    9388              : }
    9389              : 
    9390              : /* Verify simplify_merge_mask works correctly.  */
    9391              : 
    9392              : static void
    9393          224 : test_vec_merge (machine_mode mode)
    9394              : {
    9395          224 :   rtx op0 = make_test_reg (mode);
    9396          224 :   rtx op1 = make_test_reg (mode);
    9397          224 :   rtx op2 = make_test_reg (mode);
    9398          224 :   rtx op3 = make_test_reg (mode);
    9399          224 :   rtx op4 = make_test_reg (mode);
    9400          224 :   rtx op5 = make_test_reg (mode);
    9401          224 :   rtx mask1 = make_test_reg (SImode);
    9402          224 :   rtx mask2 = make_test_reg (SImode);
    9403          224 :   rtx vm1 = gen_rtx_VEC_MERGE (mode, op0, op1, mask1);
    9404          224 :   rtx vm2 = gen_rtx_VEC_MERGE (mode, op2, op3, mask1);
    9405          224 :   rtx vm3 = gen_rtx_VEC_MERGE (mode, op4, op5, mask1);
    9406              : 
    9407              :   /* Simple vec_merge.  */
    9408          224 :   ASSERT_EQ (op0, simplify_merge_mask (vm1, mask1, 0));
    9409          224 :   ASSERT_EQ (op1, simplify_merge_mask (vm1, mask1, 1));
    9410          224 :   ASSERT_EQ (NULL_RTX, simplify_merge_mask (vm1, mask2, 0));
    9411          224 :   ASSERT_EQ (NULL_RTX, simplify_merge_mask (vm1, mask2, 1));
    9412              : 
    9413              :   /* Nested vec_merge.
    9414              :      It's tempting to make this simplify right down to opN, but we don't
    9415              :      because all the simplify_* functions assume that the operands have
    9416              :      already been simplified.  */
    9417          224 :   rtx nvm = gen_rtx_VEC_MERGE (mode, vm1, vm2, mask1);
    9418          224 :   ASSERT_EQ (vm1, simplify_merge_mask (nvm, mask1, 0));
    9419          224 :   ASSERT_EQ (vm2, simplify_merge_mask (nvm, mask1, 1));
    9420              : 
    9421              :   /* Intermediate unary op. */
    9422          224 :   rtx unop = gen_rtx_NOT (mode, vm1);
    9423          224 :   ASSERT_RTX_EQ (gen_rtx_NOT (mode, op0),
    9424              :                  simplify_merge_mask (unop, mask1, 0));
    9425          224 :   ASSERT_RTX_EQ (gen_rtx_NOT (mode, op1),
    9426              :                  simplify_merge_mask (unop, mask1, 1));
    9427              : 
    9428              :   /* Intermediate binary op. */
    9429          224 :   rtx binop = gen_rtx_PLUS (mode, vm1, vm2);
    9430          224 :   ASSERT_RTX_EQ (gen_rtx_PLUS (mode, op0, op2),
    9431              :                  simplify_merge_mask (binop, mask1, 0));
    9432          224 :   ASSERT_RTX_EQ (gen_rtx_PLUS (mode, op1, op3),
    9433              :                  simplify_merge_mask (binop, mask1, 1));
    9434              : 
    9435              :   /* Intermediate ternary op. */
    9436          224 :   rtx tenop = gen_rtx_FMA (mode, vm1, vm2, vm3);
    9437          224 :   ASSERT_RTX_EQ (gen_rtx_FMA (mode, op0, op2, op4),
    9438              :                  simplify_merge_mask (tenop, mask1, 0));
    9439          224 :   ASSERT_RTX_EQ (gen_rtx_FMA (mode, op1, op3, op5),
    9440              :                  simplify_merge_mask (tenop, mask1, 1));
    9441              : 
    9442              :   /* Side effects.  */
    9443          224 :   rtx badop0 = gen_rtx_PRE_INC (mode, op0);
    9444          224 :   rtx badvm = gen_rtx_VEC_MERGE (mode, badop0, op1, mask1);
    9445          224 :   ASSERT_EQ (badop0, simplify_merge_mask (badvm, mask1, 0));
    9446          224 :   ASSERT_EQ (NULL_RTX, simplify_merge_mask (badvm, mask1, 1));
    9447              : 
    9448              :   /* Called indirectly.  */
    9449          224 :   ASSERT_RTX_EQ (gen_rtx_VEC_MERGE (mode, op0, op3, mask1),
    9450              :                  simplify_rtx (nvm));
    9451          224 : }
    9452              : 
    9453              : /* Test that vector rotate formation works at RTL level.  Try various
    9454              :    combinations of (REG << C) [|,^,+] (REG >> (<bitwidth> - C)).  */
    9455              : 
    9456              : static void
    9457           92 : test_vector_rotate (rtx reg)
    9458              : {
    9459           92 :   machine_mode mode = GET_MODE (reg);
    9460           92 :   unsigned bitwidth = GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT;
    9461           92 :   rtx plus_rtx = gen_rtx_PLUS (mode, reg, reg);
    9462           92 :   rtx lshftrt_amnt = GEN_INT (bitwidth - 1);
    9463           92 :   lshftrt_amnt = gen_const_vec_duplicate (mode, lshftrt_amnt);
    9464           92 :   rtx lshiftrt_rtx = gen_rtx_LSHIFTRT (mode, reg, lshftrt_amnt);
    9465           92 :   rtx rotate_rtx = gen_rtx_ROTATE (mode, reg, CONST1_RTX (mode));
    9466              :   /* Test explicitly the case where ASHIFT (x, 1) is a PLUS (x, x).  */
    9467           92 :   ASSERT_RTX_EQ (rotate_rtx,
    9468              :              simplify_rtx (gen_rtx_IOR (mode, plus_rtx, lshiftrt_rtx)));
    9469           92 :   ASSERT_RTX_EQ (rotate_rtx,
    9470              :              simplify_rtx (gen_rtx_XOR (mode, plus_rtx, lshiftrt_rtx)));
    9471           92 :   ASSERT_RTX_EQ (rotate_rtx,
    9472              :              simplify_rtx (gen_rtx_PLUS (mode, plus_rtx, lshiftrt_rtx)));
    9473              : 
    9474              :   /* Don't go through every possible rotate amount to save execution time.
    9475              :      Multiple of BITS_PER_UNIT amounts could conceivably be simplified to
    9476              :      other bswap operations sometimes. Go through just the odd amounts.  */
    9477         1380 :   for (unsigned i = 3; i < bitwidth - 2; i += 2)
    9478              :     {
    9479         1288 :       rtx rot_amnt = gen_const_vec_duplicate (mode, GEN_INT (i));
    9480         1288 :       rtx ashift_rtx = gen_rtx_ASHIFT (mode, reg, rot_amnt);
    9481         1288 :       lshftrt_amnt = gen_const_vec_duplicate (mode, GEN_INT (bitwidth - i));
    9482         1288 :       lshiftrt_rtx = gen_rtx_LSHIFTRT (mode, reg, lshftrt_amnt);
    9483         1288 :       rotate_rtx = gen_rtx_ROTATE (mode, reg, rot_amnt);
    9484         1288 :       ASSERT_RTX_EQ (rotate_rtx,
    9485              :                  simplify_rtx (gen_rtx_IOR (mode, ashift_rtx, lshiftrt_rtx)));
    9486         1288 :       ASSERT_RTX_EQ (rotate_rtx,
    9487              :                  simplify_rtx (gen_rtx_XOR (mode, ashift_rtx, lshiftrt_rtx)));
    9488         1288 :       ASSERT_RTX_EQ (rotate_rtx,
    9489              :                  simplify_rtx (gen_rtx_PLUS (mode, ashift_rtx, lshiftrt_rtx)));
    9490              :     }
    9491           92 : }
    9492              : 
    9493              : /* Test subregs of integer vector constant X, trying elements in
    9494              :    the range [ELT_BIAS, ELT_BIAS + constant_lower_bound (NELTS)),
    9495              :    where NELTS is the number of elements in X.  Subregs involving
    9496              :    elements [ELT_BIAS, ELT_BIAS + FIRST_VALID) are expected to fail.  */
    9497              : 
    9498              : static void
    9499          276 : test_vector_subregs_modes (rtx x, poly_uint64 elt_bias = 0,
    9500              :                            unsigned int first_valid = 0)
    9501              : {
    9502          276 :   machine_mode inner_mode = GET_MODE (x);
    9503          276 :   scalar_mode int_mode = GET_MODE_INNER (inner_mode);
    9504              : 
    9505        34500 :   for (unsigned int modei = 0; modei < NUM_MACHINE_MODES; ++modei)
    9506              :     {
    9507        34224 :       machine_mode outer_mode = (machine_mode) modei;
    9508        34224 :       if (!VECTOR_MODE_P (outer_mode))
    9509        18768 :         continue;
    9510              : 
    9511        15456 :       unsigned int outer_nunits;
    9512        15456 :       if (GET_MODE_INNER (outer_mode) == int_mode
    9513         1932 :           && GET_MODE_NUNITS (outer_mode).is_constant (&outer_nunits)
    9514        20412 :           && multiple_p (GET_MODE_NUNITS (inner_mode), outer_nunits))
    9515              :         {
    9516              :           /* Test subregs in which the outer mode is a smaller,
    9517              :              constant-sized vector of the same element type.  */
    9518         1092 :           unsigned int limit
    9519         1092 :             = constant_lower_bound (GET_MODE_NUNITS (inner_mode));
    9520         8028 :           for (unsigned int elt = 0; elt < limit; elt += outer_nunits)
    9521              :             {
    9522         6936 :               rtx expected = NULL_RTX;
    9523         6936 :               if (elt >= first_valid)
    9524              :                 {
    9525         6936 :                   rtx_vector_builder builder (outer_mode, outer_nunits, 1);
    9526        39768 :                   for (unsigned int i = 0; i < outer_nunits; ++i)
    9527        32832 :                     builder.quick_push (CONST_VECTOR_ELT (x, elt + i));
    9528         6936 :                   expected = builder.build ();
    9529         6936 :                 }
    9530        13872 :               poly_uint64 byte = (elt_bias + elt) * GET_MODE_SIZE (int_mode);
    9531         6936 :               ASSERT_RTX_EQ (expected,
    9532              :                              simplify_subreg (outer_mode, x,
    9533              :                                               inner_mode, byte));
    9534              :             }
    9535              :         }
    9536        28728 :       else if (known_eq (GET_MODE_SIZE (outer_mode),
    9537              :                          GET_MODE_SIZE (inner_mode))
    9538         2040 :                && known_eq (elt_bias, 0U)
    9539         2040 :                && (GET_MODE_CLASS (outer_mode) != MODE_VECTOR_BOOL
    9540            0 :                    || known_eq (GET_MODE_BITSIZE (outer_mode),
    9541              :                                 GET_MODE_NUNITS (outer_mode)))
    9542         2040 :                && (!FLOAT_MODE_P (outer_mode)
    9543        15876 :                    || (FLOAT_MODE_FORMAT (outer_mode)->ieee_bits
    9544         1104 :                        == GET_MODE_UNIT_PRECISION (outer_mode)))
    9545        14364 :                && (GET_MODE_SIZE (inner_mode).is_constant ()
    9546              :                    || !CONST_VECTOR_STEPPED_P (x)))
    9547              :         {
    9548              :           /* Try converting to OUTER_MODE and back.  */
    9549         1800 :           rtx outer_x = simplify_subreg (outer_mode, x, inner_mode, 0);
    9550         1800 :           ASSERT_TRUE (outer_x != NULL_RTX);
    9551         1800 :           ASSERT_RTX_EQ (x, simplify_subreg (inner_mode, outer_x,
    9552              :                                              outer_mode, 0));
    9553              :         }
    9554              :     }
    9555              : 
    9556          276 :   if (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
    9557              :     {
    9558              :       /* Test each byte in the element range.  */
    9559          276 :       unsigned int limit
    9560          276 :         = constant_lower_bound (GET_MODE_SIZE (inner_mode));
    9561        14604 :       for (unsigned int i = 0; i < limit; ++i)
    9562              :         {
    9563        14328 :           unsigned int elt = i / GET_MODE_SIZE (int_mode);
    9564        14328 :           rtx expected = NULL_RTX;
    9565        14328 :           if (elt >= first_valid)
    9566              :             {
    9567        14328 :               unsigned int byte_shift = i % GET_MODE_SIZE (int_mode);
    9568        14328 :               if (BYTES_BIG_ENDIAN)
    9569              :                 byte_shift = GET_MODE_SIZE (int_mode) - byte_shift - 1;
    9570        14328 :               rtx_mode_t vec_elt (CONST_VECTOR_ELT (x, elt), int_mode);
    9571        14328 :               wide_int shifted_elt
    9572        14328 :                 = wi::lrshift (vec_elt, byte_shift * BITS_PER_UNIT);
    9573        14328 :               expected = immed_wide_int_const (shifted_elt, QImode);
    9574        14328 :             }
    9575        28656 :           poly_uint64 byte = elt_bias * GET_MODE_SIZE (int_mode) + i;
    9576        14328 :           ASSERT_RTX_EQ (expected,
    9577              :                          simplify_subreg (QImode, x, inner_mode, byte));
    9578              :         }
    9579              :     }
    9580          276 : }
    9581              : 
    9582              : /* Test constant subregs of integer vector mode INNER_MODE, using 1
    9583              :    element per pattern.  */
    9584              : 
    9585              : static void
    9586           92 : test_vector_subregs_repeating (machine_mode inner_mode)
    9587              : {
    9588          184 :   poly_uint64 nunits = GET_MODE_NUNITS (inner_mode);
    9589           92 :   unsigned int min_nunits = constant_lower_bound (nunits);
    9590           92 :   scalar_mode int_mode = GET_MODE_INNER (inner_mode);
    9591           92 :   unsigned int count = gcd (min_nunits, 8);
    9592              : 
    9593           92 :   rtx_vector_builder builder (inner_mode, count, 1);
    9594          684 :   for (unsigned int i = 0; i < count; ++i)
    9595          592 :     builder.quick_push (gen_int_mode (8 - i, int_mode));
    9596           92 :   rtx x = builder.build ();
    9597              : 
    9598           92 :   test_vector_subregs_modes (x);
    9599           92 :   if (!nunits.is_constant ())
    9600              :     test_vector_subregs_modes (x, nunits - min_nunits);
    9601           92 : }
    9602              : 
    9603              : /* Test constant subregs of integer vector mode INNER_MODE, using 2
    9604              :    elements per pattern.  */
    9605              : 
    9606              : static void
    9607           92 : test_vector_subregs_fore_back (machine_mode inner_mode)
    9608              : {
    9609          184 :   poly_uint64 nunits = GET_MODE_NUNITS (inner_mode);
    9610           92 :   unsigned int min_nunits = constant_lower_bound (nunits);
    9611           92 :   scalar_mode int_mode = GET_MODE_INNER (inner_mode);
    9612           92 :   unsigned int count = gcd (min_nunits, 4);
    9613              : 
    9614           92 :   rtx_vector_builder builder (inner_mode, count, 2);
    9615          444 :   for (unsigned int i = 0; i < count; ++i)
    9616          352 :     builder.quick_push (gen_int_mode (i, int_mode));
    9617          444 :   for (unsigned int i = 0; i < count; ++i)
    9618          352 :     builder.quick_push (gen_int_mode (-1 - (int) i, int_mode));
    9619           92 :   rtx x = builder.build ();
    9620              : 
    9621           92 :   test_vector_subregs_modes (x);
    9622           92 :   if (!nunits.is_constant ())
    9623              :     test_vector_subregs_modes (x, nunits - min_nunits, count);
    9624           92 : }
    9625              : 
    9626              : /* Test constant subregs of integer vector mode INNER_MODE, using 3
    9627              :    elements per pattern.  */
    9628              : 
    9629              : static void
    9630           92 : test_vector_subregs_stepped (machine_mode inner_mode)
    9631              : {
    9632              :   /* Build { 0, 1, 2, 3, ... }.  */
    9633           92 :   scalar_mode int_mode = GET_MODE_INNER (inner_mode);
    9634           92 :   rtx_vector_builder builder (inner_mode, 1, 3);
    9635          368 :   for (unsigned int i = 0; i < 3; ++i)
    9636          276 :     builder.quick_push (gen_int_mode (i, int_mode));
    9637           92 :   rtx x = builder.build ();
    9638              : 
    9639           92 :   test_vector_subregs_modes (x);
    9640           92 : }
    9641              : 
    9642              : /* Test constant subregs of integer vector mode INNER_MODE.  */
    9643              : 
    9644              : static void
    9645           92 : test_vector_subregs (machine_mode inner_mode)
    9646              : {
    9647           92 :   test_vector_subregs_repeating (inner_mode);
    9648           92 :   test_vector_subregs_fore_back (inner_mode);
    9649           92 :   test_vector_subregs_stepped (inner_mode);
    9650           92 : }
    9651              : 
    9652              : /* Verify some simplifications involving vectors.  */
    9653              : 
    9654              : static void
    9655            4 : test_vector_ops ()
    9656              : {
    9657          500 :   for (unsigned int i = 0; i < NUM_MACHINE_MODES; ++i)
    9658              :     {
    9659          496 :       machine_mode mode = (machine_mode) i;
    9660          496 :       if (VECTOR_MODE_P (mode))
    9661              :         {
    9662          448 :           rtx scalar_reg = make_test_reg (GET_MODE_INNER (mode));
    9663          224 :           test_vector_ops_duplicate (mode, scalar_reg);
    9664          224 :           rtx vector_reg = make_test_reg (mode);
    9665          224 :           if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
    9666          348 :               && maybe_gt (GET_MODE_NUNITS (mode), 2))
    9667              :             {
    9668           92 :               test_vector_ops_series (mode, scalar_reg);
    9669           92 :               test_vector_subregs (mode);
    9670           92 :               test_vector_rotate (vector_reg);
    9671              :             }
    9672          224 :           test_vec_merge (mode);
    9673              :         }
    9674              :     }
    9675            4 : }
    9676              : 
    9677              : template<unsigned int N>
    9678              : struct simplify_const_poly_int_tests
    9679              : {
    9680              :   static void run ();
    9681              : };
    9682              : 
    9683              : template<>
    9684              : struct simplify_const_poly_int_tests<1>
    9685              : {
    9686              :   static void run () {}
    9687              : };
    9688              : 
    9689              : /* Test various CONST_POLY_INT properties.  */
    9690              : 
    9691              : template<unsigned int N>
    9692              : void
    9693              : simplify_const_poly_int_tests<N>::run ()
    9694              : {
    9695              :   using poly_int64 = poly_int<N, HOST_WIDE_INT>;
    9696              :   rtx x1 = gen_int_mode (poly_int64 (1, 1), QImode);
    9697              :   rtx x2 = gen_int_mode (poly_int64 (-80, 127), QImode);
    9698              :   rtx x3 = gen_int_mode (poly_int64 (-79, -128), QImode);
    9699              :   rtx x4 = gen_int_mode (poly_int64 (5, 4), QImode);
    9700              :   rtx x5 = gen_int_mode (poly_int64 (30, 24), QImode);
    9701              :   rtx x6 = gen_int_mode (poly_int64 (20, 16), QImode);
    9702              :   rtx x7 = gen_int_mode (poly_int64 (7, 4), QImode);
    9703              :   rtx x8 = gen_int_mode (poly_int64 (30, 24), HImode);
    9704              :   rtx x9 = gen_int_mode (poly_int64 (-30, -24), HImode);
    9705              :   rtx x10 = gen_int_mode (poly_int64 (-31, -24), HImode);
    9706              :   rtx two = GEN_INT (2);
    9707              :   rtx six = GEN_INT (6);
    9708              :   poly_uint64 offset = subreg_lowpart_offset (QImode, HImode);
    9709              : 
    9710              :   /* These tests only try limited operation combinations.  Fuller arithmetic
    9711              :      testing is done directly on poly_ints.  */
    9712              :   ASSERT_EQ (simplify_unary_operation (NEG, HImode, x8, HImode), x9);
    9713              :   ASSERT_EQ (simplify_unary_operation (NOT, HImode, x8, HImode), x10);
    9714              :   ASSERT_EQ (simplify_unary_operation (TRUNCATE, QImode, x8, HImode), x5);
    9715              :   ASSERT_EQ (simplify_binary_operation (PLUS, QImode, x1, x2), x3);
    9716              :   ASSERT_EQ (simplify_binary_operation (MINUS, QImode, x3, x1), x2);
    9717              :   ASSERT_EQ (simplify_binary_operation (MULT, QImode, x4, six), x5);
    9718              :   ASSERT_EQ (simplify_binary_operation (MULT, QImode, six, x4), x5);
    9719              :   ASSERT_EQ (simplify_binary_operation (ASHIFT, QImode, x4, two), x6);
    9720              :   ASSERT_EQ (simplify_binary_operation (IOR, QImode, x4, two), x7);
    9721              :   ASSERT_EQ (simplify_subreg (HImode, x5, QImode, 0), x8);
    9722              :   ASSERT_EQ (simplify_subreg (QImode, x8, HImode, offset), x5);
    9723              : }
    9724              : 
    9725              : /* Run all of the selftests within this file.  */
    9726              : 
    9727              : void
    9728            4 : simplify_rtx_cc_tests ()
    9729              : {
    9730            4 :   test_scalar_ops ();
    9731            4 :   test_vector_ops ();
    9732            4 :   simplify_const_poly_int_tests<NUM_POLY_INT_COEFFS>::run ();
    9733            4 : }
    9734              : 
    9735              : } // namespace selftest
    9736              : 
    9737              : #endif /* CHECKING_P */
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.