LCOV - code coverage report
Current view: top level - gcc - internal-fn.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 71.6 % 2544 1821
Test Date: 2026-04-20 14:57:17 Functions: 59.8 % 174 104
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Internal functions.
       2              :    Copyright (C) 2011-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify it under
       7              : the terms of the GNU General Public License as published by the Free
       8              : Software Foundation; either version 3, or (at your option) any later
       9              : version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      12              : WARRANTY; without even the implied warranty of MERCHANTABILITY or
      13              : FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      14              : for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : #include "config.h"
      21              : #define INCLUDE_MEMORY
      22              : #include "system.h"
      23              : #include "coretypes.h"
      24              : #include "backend.h"
      25              : #include "target.h"
      26              : #include "rtl.h"
      27              : #include "tree.h"
      28              : #include "gimple.h"
      29              : #include "predict.h"
      30              : #include "stringpool.h"
      31              : #include "tree-vrp.h"
      32              : #include "tree-ssanames.h"
      33              : #include "expmed.h"
      34              : #include "memmodel.h"
      35              : #include "optabs.h"
      36              : #include "emit-rtl.h"
      37              : #include "diagnostic-core.h"
      38              : #include "fold-const.h"
      39              : #include "internal-fn.h"
      40              : #include "stor-layout.h"
      41              : #include "dojump.h"
      42              : #include "expr.h"
      43              : #include "stringpool.h"
      44              : #include "attribs.h"
      45              : #include "asan.h"
      46              : #include "ubsan.h"
      47              : #include "recog.h"
      48              : #include "builtins.h"
      49              : #include "optabs-tree.h"
      50              : #include "gimple-ssa.h"
      51              : #include "tree-phinodes.h"
      52              : #include "ssa-iterators.h"
      53              : #include "explow.h"
      54              : #include "rtl-iter.h"
      55              : #include "gimple-range.h"
      56              : #include "fold-const-call.h"
      57              : #include "tree-ssa-live.h"
      58              : #include "tree-outof-ssa.h"
      59              : #include "gcc-urlifier.h"
      60              : 
      61              : /* For lang_hooks.types.type_for_mode.  */
      62              : #include "langhooks.h"
      63              : 
      64              : /* The names of each internal function, indexed by function number.  */
      65              : const char *const internal_fn_name_array[] = {
      66              : #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
      67              : #include "internal-fn.def"
      68              :   "<invalid-fn>"
      69              : };
      70              : 
      71              : /* The ECF_* flags of each internal function, indexed by function number.  */
      72              : const int internal_fn_flags_array[] = {
      73              : #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
      74              : #include "internal-fn.def"
      75              :   0
      76              : };
      77              : 
      78              : /* Return the internal function called NAME, or IFN_LAST if there's
      79              :    no such function.  */
      80              : 
      81              : internal_fn
      82            5 : lookup_internal_fn (const char *name)
      83              : {
      84            5 :   typedef hash_map<nofree_string_hash, internal_fn> name_to_fn_map_type;
      85            5 :   static name_to_fn_map_type *name_to_fn_map;
      86              : 
      87            5 :   if (!name_to_fn_map)
      88              :     {
      89            3 :       name_to_fn_map = new name_to_fn_map_type (IFN_LAST);
      90          882 :       for (unsigned int i = 0; i < IFN_LAST; ++i)
      91         1758 :         name_to_fn_map->put (internal_fn_name (internal_fn (i)),
      92          879 :                              internal_fn (i));
      93              :     }
      94            5 :   internal_fn *entry = name_to_fn_map->get (name);
      95            5 :   return entry ? *entry : IFN_LAST;
      96              : }
      97              : 
      98              : /* Geven an internal_fn IFN that is a widening function, return its
      99              :    corresponding LO and HI internal_fns.  */
     100              : 
     101              : extern void
     102        51949 : lookup_hilo_internal_fn (internal_fn ifn, internal_fn *lo, internal_fn *hi)
     103              : {
     104        51949 :   gcc_assert (widening_fn_p (ifn));
     105              : 
     106        51949 :   switch (ifn)
     107              :     {
     108            0 :     default:
     109            0 :       gcc_unreachable ();
     110              : #define DEF_INTERNAL_FN(NAME, FLAGS, TYPE)
     111              : #define DEF_INTERNAL_WIDENING_OPTAB_FN(NAME, F, S, SO, UO, T)   \
     112              :     case IFN_##NAME:                                            \
     113              :       *lo = internal_fn (IFN_##NAME##_LO);                      \
     114              :       *hi = internal_fn (IFN_##NAME##_HI);                      \
     115              :       break;
     116              : #include "internal-fn.def"
     117              :     }
     118        51949 : }
     119              : 
     120              : /* Given an internal_fn IFN that is a widening function, return its
     121              :    corresponding _EVEN and _ODD internal_fns in *EVEN and *ODD.  */
     122              : 
     123              : extern void
     124        51949 : lookup_evenodd_internal_fn (internal_fn ifn, internal_fn *even,
     125              :                             internal_fn *odd)
     126              : {
     127        51949 :   gcc_assert (widening_fn_p (ifn));
     128              : 
     129        51949 :   switch (ifn)
     130              :     {
     131            0 :     default:
     132            0 :       gcc_unreachable ();
     133              : #define DEF_INTERNAL_FN(NAME, FLAGS, TYPE)
     134              : #define DEF_INTERNAL_WIDENING_OPTAB_FN(NAME, F, S, SO, UO, T)   \
     135              :     case IFN_##NAME:                                            \
     136              :       *even = internal_fn (IFN_##NAME##_EVEN);                  \
     137              :       *odd = internal_fn (IFN_##NAME##_ODD);                    \
     138              :       break;
     139              : #include "internal-fn.def"
     140              :     }
     141        51949 : }
     142              : 
     143              : 
     144              : /* Fnspec of each internal function, indexed by function number.  */
     145              : const_tree internal_fn_fnspec_array[IFN_LAST + 1];
     146              : 
     147              : void
     148       288452 : init_internal_fns ()
     149              : {
     150              : #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
     151              :   if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
     152              :     build_string ((int) sizeof (FNSPEC) - 1, FNSPEC ? FNSPEC : "");
     153              : #include "internal-fn.def"
     154       288452 :   internal_fn_fnspec_array[IFN_LAST] = 0;
     155       288452 : }
     156              : 
     157              : /* Create static initializers for the information returned by
     158              :    direct_internal_fn.  */
     159              : #define not_direct { -2, -2, false }
     160              : #define mask_load_direct { -1, 2, false }
     161              : #define load_lanes_direct { -1, -1, false }
     162              : #define mask_load_lanes_direct { -1, -1, false }
     163              : #define gather_load_direct { 3, 1, false }
     164              : #define strided_load_direct { -1, -1, false }
     165              : #define len_load_direct { -1, -1, false }
     166              : #define mask_len_load_direct { -1, 4, false }
     167              : #define mask_store_direct { 3, 2, false }
     168              : #define store_lanes_direct { 0, 0, false }
     169              : #define mask_store_lanes_direct { 0, 0, false }
     170              : #define vec_cond_mask_direct { 1, 0, false }
     171              : #define vec_cond_mask_len_direct { 1, 1, false }
     172              : #define vec_cond_direct { 2, 0, false }
     173              : #define scatter_store_direct { 3, 1, false }
     174              : #define strided_store_direct { 1, 1, false }
     175              : #define len_store_direct { 3, 3, false }
     176              : #define mask_len_store_direct { 4, 5, false }
     177              : #define vec_set_direct { 3, 3, false }
     178              : #define vec_extract_direct { 0, -1, false }
     179              : #define unary_direct { 0, 0, true }
     180              : #define unary_convert_direct { -1, 0, true }
     181              : #define binary_direct { 0, 0, true }
     182              : #define ternary_direct { 0, 0, true }
     183              : #define cond_unary_direct { 1, 1, true }
     184              : #define cond_binary_direct { 1, 1, true }
     185              : #define cond_ternary_direct { 1, 1, true }
     186              : #define cond_len_unary_direct { 1, 1, true }
     187              : #define cond_len_binary_direct { 1, 1, true }
     188              : #define cond_len_ternary_direct { 1, 1, true }
     189              : #define while_direct { 0, 2, false }
     190              : #define fold_extract_direct { 2, 2, false }
     191              : #define fold_len_extract_direct { 2, 2, false }
     192              : #define fold_left_direct { 1, 1, false }
     193              : #define mask_fold_left_direct { 1, 1, false }
     194              : #define mask_len_fold_left_direct { 1, 1, false }
     195              : #define check_ptrs_direct { 0, 0, false }
     196              : #define crc_direct { 1, -1, true }
     197              : #define reduc_sbool_direct { 0, 0, true }
     198              : #define select_vl_direct { 2, 0, false }
     199              : 
     200              : const direct_internal_fn_info direct_internal_fn_array[IFN_LAST + 1] = {
     201              : #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) not_direct,
     202              : #define DEF_INTERNAL_OPTAB_FN(CODE, FLAGS, OPTAB, TYPE) TYPE##_direct,
     203              : #define DEF_INTERNAL_SIGNED_OPTAB_FN(CODE, FLAGS, SELECTOR, SIGNED_OPTAB, \
     204              :                                      UNSIGNED_OPTAB, TYPE) TYPE##_direct,
     205              : #include "internal-fn.def"
     206              :   not_direct
     207              : };
     208              : 
     209              : /* Like create_output_operand, but for callers that will use
     210              :    assign_call_lhs afterwards.  */
     211              : 
     212              : static void
     213        88818 : create_call_lhs_operand (expand_operand *op, rtx lhs_rtx, machine_mode mode)
     214              : {
     215              :   /* Do not assign directly to a promoted subreg, since there is no
     216              :      guarantee that the instruction will leave the upper bits of the
     217              :      register in the state required by SUBREG_PROMOTED_SIGN.  */
     218        88818 :   rtx dest = lhs_rtx;
     219        88818 :   if (dest && GET_CODE (dest) == SUBREG && SUBREG_PROMOTED_VAR_P (dest))
     220              :     dest = NULL_RTX;
     221        88818 :   create_output_operand (op, dest, mode);
     222        88818 : }
     223              : 
     224              : /* Move the result of an expanded instruction into the lhs of a gimple call.
     225              :    LHS is the lhs of the call, LHS_RTX is its expanded form, and OP is the
     226              :    result of the expanded instruction.  OP should have been set up by
     227              :    create_call_lhs_operand.  */
     228              : 
     229              : static void
     230        88818 : assign_call_lhs (tree lhs, rtx lhs_rtx, expand_operand *op)
     231              : {
     232        88818 :   if (rtx_equal_p (lhs_rtx, op->value))
     233              :     return;
     234              : 
     235              :   /* If the return value has an integral type, convert the instruction
     236              :      result to that type.  This is useful for things that return an
     237              :      int regardless of the size of the input.  If the instruction result
     238              :      is smaller than required, assume that it is signed.
     239              : 
     240              :      If the return value has a nonintegral type, its mode must match
     241              :      the instruction result.  */
     242         3230 :   if (GET_CODE (lhs_rtx) == SUBREG && SUBREG_PROMOTED_VAR_P (lhs_rtx))
     243              :     {
     244              :       /* If this is a scalar in a register that is stored in a wider
     245              :          mode than the declared mode, compute the result into its
     246              :          declared mode and then convert to the wider mode.  */
     247            0 :       gcc_checking_assert (INTEGRAL_TYPE_P (TREE_TYPE (lhs)));
     248            0 :       rtx tmp = convert_to_mode (GET_MODE (lhs_rtx), op->value, 0);
     249            0 :       convert_move (SUBREG_REG (lhs_rtx), tmp,
     250            0 :                     SUBREG_PROMOTED_SIGN (lhs_rtx));
     251              :     }
     252         3230 :   else if (GET_MODE (lhs_rtx) == GET_MODE (op->value))
     253           39 :     emit_move_insn (lhs_rtx, op->value);
     254              :   else
     255              :     {
     256         3191 :       gcc_checking_assert (INTEGRAL_TYPE_P (TREE_TYPE (lhs)));
     257         3191 :       convert_move (lhs_rtx, op->value, 0);
     258              :     }
     259              : }
     260              : 
     261              : /* Expand STMT using instruction ICODE.  The instruction has NOUTPUTS
     262              :    output operands and NINPUTS input operands, where NOUTPUTS is either
     263              :    0 or 1.  The output operand (if any) comes first, followed by the
     264              :    NINPUTS input operands.  */
     265              : 
     266              : static void
     267        78568 : expand_fn_using_insn (gcall *stmt, insn_code icode, unsigned int noutputs,
     268              :                       unsigned int ninputs)
     269              : {
     270        78568 :   gcc_assert (icode != CODE_FOR_nothing);
     271              : 
     272        78568 :   expand_operand *ops = XALLOCAVEC (expand_operand, noutputs + ninputs);
     273        78568 :   unsigned int opno = 0;
     274        78568 :   rtx lhs_rtx = NULL_RTX;
     275        78568 :   tree lhs = gimple_call_lhs (stmt);
     276              : 
     277        78568 :   if (noutputs)
     278              :     {
     279        78568 :       gcc_assert (noutputs == 1);
     280        78568 :       if (lhs)
     281        78568 :         lhs_rtx = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
     282        78568 :       create_call_lhs_operand (&ops[opno], lhs_rtx,
     283        78568 :                                insn_data[icode].operand[opno].mode);
     284        78568 :       opno += 1;
     285              :     }
     286              :   else
     287            0 :     gcc_assert (!lhs);
     288              : 
     289       222535 :   for (unsigned int i = 0; i < ninputs; ++i)
     290              :     {
     291       143967 :       tree rhs = gimple_call_arg (stmt, i);
     292       143967 :       tree rhs_type = TREE_TYPE (rhs);
     293       143967 :       rtx rhs_rtx = expand_normal (rhs);
     294       143967 :       if (INTEGRAL_TYPE_P (rhs_type))
     295         6123 :         create_convert_operand_from (&ops[opno], rhs_rtx,
     296         6123 :                                      TYPE_MODE (rhs_type),
     297         6123 :                                      TYPE_UNSIGNED (rhs_type));
     298       137844 :       else if (TREE_CODE (rhs) == SSA_NAME
     299       112995 :                && SSA_NAME_IS_DEFAULT_DEF (rhs)
     300       142089 :                && VAR_P (SSA_NAME_VAR (rhs)))
     301          249 :         create_undefined_input_operand (&ops[opno], TYPE_MODE (rhs_type));
     302        57589 :       else if (VECTOR_BOOLEAN_TYPE_P (rhs_type)
     303         1531 :                && SCALAR_INT_MODE_P (TYPE_MODE (rhs_type))
     304       139126 :                && maybe_ne (GET_MODE_PRECISION (TYPE_MODE (rhs_type)),
     305         1531 :                             TYPE_VECTOR_SUBPARTS (rhs_type).to_constant ()))
     306              :         {
     307              :           /* Ensure that the vector bitmasks do not have excess bits.  */
     308          432 :           int nunits = TYPE_VECTOR_SUBPARTS (rhs_type).to_constant ();
     309          432 :           rtx tmp = expand_binop (TYPE_MODE (rhs_type), and_optab, rhs_rtx,
     310          432 :                                   GEN_INT ((HOST_WIDE_INT_1U << nunits) - 1),
     311              :                                   NULL_RTX, true, OPTAB_WIDEN);
     312          432 :           create_input_operand (&ops[opno], tmp, TYPE_MODE (rhs_type));
     313              :         }
     314              :       else
     315       137163 :         create_input_operand (&ops[opno], rhs_rtx, TYPE_MODE (rhs_type));
     316       143967 :       opno += 1;
     317              :     }
     318              : 
     319        78568 :   gcc_assert (opno == noutputs + ninputs);
     320        78568 :   expand_insn (icode, opno, ops);
     321        78568 :   if (lhs_rtx)
     322        78568 :     assign_call_lhs (lhs, lhs_rtx, &ops[0]);
     323        78568 : }
     324              : 
     325              : /* ARRAY_TYPE is an array of vector modes.  Return the associated insn
     326              :    for load-lanes-style optab OPTAB, or CODE_FOR_nothing if none.  */
     327              : 
     328              : static enum insn_code
     329            0 : get_multi_vector_move (tree array_type, convert_optab optab)
     330              : {
     331            0 :   machine_mode imode;
     332            0 :   machine_mode vmode;
     333              : 
     334            0 :   gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
     335            0 :   imode = TYPE_MODE (array_type);
     336            0 :   vmode = TYPE_MODE (TREE_TYPE (array_type));
     337              : 
     338            0 :   return convert_optab_handler (optab, imode, vmode);
     339              : }
     340              : 
     341              : /* Add mask, else, and len arguments according to the STMT.  */
     342              : 
     343              : static unsigned int
     344         1479 : add_mask_else_and_len_args (expand_operand *ops, unsigned int opno, gcall *stmt)
     345              : {
     346         1479 :   internal_fn ifn = gimple_call_internal_fn (stmt);
     347         1479 :   int len_index = internal_fn_len_index (ifn);
     348              :   /* BIAS is always consecutive next of LEN.  */
     349         1479 :   int bias_index = len_index + 1;
     350         1479 :   int mask_index = internal_fn_mask_index (ifn);
     351              : 
     352              :   /* The order of arguments is always {mask, else, len, bias}.  */
     353         1479 :   if (mask_index >= 0)
     354              :     {
     355         1479 :       tree mask = gimple_call_arg (stmt, mask_index);
     356         1479 :       rtx mask_rtx = expand_normal (mask);
     357              : 
     358         1479 :       tree mask_type = TREE_TYPE (mask);
     359         1479 :       if (VECTOR_BOOLEAN_TYPE_P (mask_type)
     360         1479 :           && SCALAR_INT_MODE_P (TYPE_MODE (mask_type))
     361         2515 :           && maybe_ne (GET_MODE_PRECISION (TYPE_MODE (mask_type)),
     362         2294 :                        TYPE_VECTOR_SUBPARTS (mask_type).to_constant ()))
     363              :         {
     364              :           /* Ensure that the vector bitmasks do not have excess bits.  */
     365          221 :           int nunits = TYPE_VECTOR_SUBPARTS (mask_type).to_constant ();
     366          221 :           mask_rtx = expand_binop (TYPE_MODE (mask_type), and_optab, mask_rtx,
     367          221 :                                    GEN_INT ((HOST_WIDE_INT_1U << nunits) - 1),
     368              :                                    NULL_RTX, true, OPTAB_WIDEN);
     369              :         }
     370              : 
     371         1479 :       create_input_operand (&ops[opno++], mask_rtx,
     372         1479 :                             TYPE_MODE (TREE_TYPE (mask)));
     373              :     }
     374              : 
     375         1479 :   int els_index = internal_fn_else_index (ifn);
     376         1479 :   if (els_index >= 0)
     377              :     {
     378          721 :       tree els = gimple_call_arg (stmt, els_index);
     379          721 :       tree els_type = TREE_TYPE (els);
     380          721 :       if (TREE_CODE (els) == SSA_NAME
     381            0 :           && SSA_NAME_IS_DEFAULT_DEF (els)
     382          721 :           && VAR_P (SSA_NAME_VAR (els)))
     383            0 :         create_undefined_input_operand (&ops[opno++], TYPE_MODE (els_type));
     384              :       else
     385              :         {
     386          721 :           rtx els_rtx = expand_normal (els);
     387          721 :           create_input_operand (&ops[opno++], els_rtx, TYPE_MODE (els_type));
     388              :         }
     389              :     }
     390         1479 :   if (len_index >= 0)
     391              :     {
     392            0 :       tree len = gimple_call_arg (stmt, len_index);
     393            0 :       rtx len_rtx = expand_normal (len);
     394            0 :       create_convert_operand_from (&ops[opno++], len_rtx,
     395            0 :                                    TYPE_MODE (TREE_TYPE (len)),
     396            0 :                                    TYPE_UNSIGNED (TREE_TYPE (len)));
     397            0 :       tree biast = gimple_call_arg (stmt, bias_index);
     398            0 :       rtx bias = expand_normal (biast);
     399            0 :       create_input_operand (&ops[opno++], bias, QImode);
     400              :     }
     401         1479 :   return opno;
     402              : }
     403              : 
     404              : /* Expand LOAD_LANES call STMT using optab OPTAB.  */
     405              : 
     406              : static void
     407            0 : expand_load_lanes_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
     408              : {
     409            0 :   class expand_operand ops[2];
     410            0 :   tree type, lhs, rhs;
     411            0 :   rtx target, mem;
     412              : 
     413            0 :   lhs = gimple_call_lhs (stmt);
     414            0 :   rhs = gimple_call_arg (stmt, 0);
     415            0 :   type = TREE_TYPE (lhs);
     416              : 
     417            0 :   target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
     418            0 :   mem = expand_normal (rhs);
     419              : 
     420            0 :   gcc_assert (MEM_P (mem));
     421            0 :   PUT_MODE (mem, TYPE_MODE (type));
     422              : 
     423            0 :   create_call_lhs_operand (&ops[0], target, TYPE_MODE (type));
     424            0 :   create_fixed_operand (&ops[1], mem);
     425            0 :   expand_insn (get_multi_vector_move (type, optab), 2, ops);
     426            0 :   assign_call_lhs (lhs, target, &ops[0]);
     427            0 : }
     428              : 
     429              : /* Expand STORE_LANES call STMT using optab OPTAB.  */
     430              : 
     431              : static void
     432            0 : expand_store_lanes_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
     433              : {
     434            0 :   class expand_operand ops[2];
     435            0 :   tree type, lhs, rhs;
     436            0 :   rtx target, reg;
     437              : 
     438            0 :   lhs = gimple_call_lhs (stmt);
     439            0 :   rhs = gimple_call_arg (stmt, 0);
     440            0 :   type = TREE_TYPE (rhs);
     441              : 
     442            0 :   target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
     443            0 :   reg = expand_normal (rhs);
     444              : 
     445            0 :   gcc_assert (MEM_P (target));
     446            0 :   PUT_MODE (target, TYPE_MODE (type));
     447              : 
     448            0 :   create_fixed_operand (&ops[0], target);
     449            0 :   create_input_operand (&ops[1], reg, TYPE_MODE (type));
     450            0 :   expand_insn (get_multi_vector_move (type, optab), 2, ops);
     451            0 : }
     452              : 
     453              : static void
     454            0 : expand_ANNOTATE (internal_fn, gcall *)
     455              : {
     456            0 :   gcc_unreachable ();
     457              : }
     458              : 
     459              : /* This should get expanded in omp_device_lower pass.  */
     460              : 
     461              : static void
     462            0 : expand_GOMP_USE_SIMT (internal_fn, gcall *)
     463              : {
     464            0 :   gcc_unreachable ();
     465              : }
     466              : 
     467              : /* This should get expanded in omp_device_lower pass.  */
     468              : 
     469              : static void
     470            0 : expand_GOMP_SIMT_ENTER (internal_fn, gcall *)
     471              : {
     472            0 :   gcc_unreachable ();
     473              : }
     474              : 
     475              : /* Allocate per-lane storage and begin non-uniform execution region.  */
     476              : 
     477              : static void
     478            0 : expand_GOMP_SIMT_ENTER_ALLOC (internal_fn, gcall *stmt)
     479              : {
     480            0 :   rtx target;
     481            0 :   tree lhs = gimple_call_lhs (stmt);
     482            0 :   if (lhs)
     483            0 :     target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
     484              :   else
     485            0 :     target = gen_reg_rtx (Pmode);
     486            0 :   rtx size = expand_normal (gimple_call_arg (stmt, 0));
     487            0 :   rtx align = expand_normal (gimple_call_arg (stmt, 1));
     488            0 :   class expand_operand ops[3];
     489            0 :   create_call_lhs_operand (&ops[0], target, Pmode);
     490            0 :   create_input_operand (&ops[1], size, Pmode);
     491            0 :   create_input_operand (&ops[2], align, Pmode);
     492            0 :   gcc_assert (targetm.have_omp_simt_enter ());
     493            0 :   expand_insn (targetm.code_for_omp_simt_enter, 3, ops);
     494            0 :   assign_call_lhs (lhs, target, &ops[0]);
     495            0 : }
     496              : 
     497              : /* Deallocate per-lane storage and leave non-uniform execution region.  */
     498              : 
     499              : static void
     500            0 : expand_GOMP_SIMT_EXIT (internal_fn, gcall *stmt)
     501              : {
     502            0 :   gcc_checking_assert (!gimple_call_lhs (stmt));
     503            0 :   rtx arg = expand_normal (gimple_call_arg (stmt, 0));
     504            0 :   class expand_operand ops[1];
     505            0 :   create_input_operand (&ops[0], arg, Pmode);
     506            0 :   gcc_assert (targetm.have_omp_simt_exit ());
     507            0 :   expand_insn (targetm.code_for_omp_simt_exit, 1, ops);
     508            0 : }
     509              : 
     510              : /* Lane index on SIMT targets: thread index in the warp on NVPTX.  On targets
     511              :    without SIMT execution this should be expanded in omp_device_lower pass.  */
     512              : 
     513              : static void
     514            0 : expand_GOMP_SIMT_LANE (internal_fn, gcall *stmt)
     515              : {
     516            0 :   tree lhs = gimple_call_lhs (stmt);
     517            0 :   if (!lhs)
     518              :     return;
     519              : 
     520            0 :   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
     521            0 :   gcc_assert (targetm.have_omp_simt_lane ());
     522            0 :   emit_insn (targetm.gen_omp_simt_lane (target));
     523              : }
     524              : 
     525              : /* This should get expanded in omp_device_lower pass.  */
     526              : 
     527              : static void
     528            0 : expand_GOMP_SIMT_VF (internal_fn, gcall *)
     529              : {
     530            0 :   gcc_unreachable ();
     531              : }
     532              : 
     533              : /* This should get expanded in omp_device_lower pass.  */
     534              : 
     535              : static void
     536            0 : expand_GOMP_MAX_VF (internal_fn, gcall *)
     537              : {
     538            0 :   gcc_unreachable ();
     539              : }
     540              : 
     541              : /* This should get expanded in omp_device_lower pass.  */
     542              : 
     543              : static void
     544            0 : expand_GOMP_TARGET_REV (internal_fn, gcall *)
     545              : {
     546            0 :   gcc_unreachable ();
     547              : }
     548              : 
     549              : /* Lane index of the first SIMT lane that supplies a non-zero argument.
     550              :    This is a SIMT counterpart to GOMP_SIMD_LAST_LANE, used to represent the
     551              :    lane that executed the last iteration for handling OpenMP lastprivate.  */
     552              : 
     553              : static void
     554            0 : expand_GOMP_SIMT_LAST_LANE (internal_fn, gcall *stmt)
     555              : {
     556            0 :   tree lhs = gimple_call_lhs (stmt);
     557            0 :   if (!lhs)
     558            0 :     return;
     559              : 
     560            0 :   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
     561            0 :   rtx cond = expand_normal (gimple_call_arg (stmt, 0));
     562            0 :   machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
     563            0 :   class expand_operand ops[2];
     564            0 :   create_call_lhs_operand (&ops[0], target, mode);
     565            0 :   create_input_operand (&ops[1], cond, mode);
     566            0 :   gcc_assert (targetm.have_omp_simt_last_lane ());
     567            0 :   expand_insn (targetm.code_for_omp_simt_last_lane, 2, ops);
     568            0 :   assign_call_lhs (lhs, target, &ops[0]);
     569              : }
     570              : 
     571              : /* Non-transparent predicate used in SIMT lowering of OpenMP "ordered".  */
     572              : 
     573              : static void
     574            0 : expand_GOMP_SIMT_ORDERED_PRED (internal_fn, gcall *stmt)
     575              : {
     576            0 :   tree lhs = gimple_call_lhs (stmt);
     577            0 :   if (!lhs)
     578            0 :     return;
     579              : 
     580            0 :   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
     581            0 :   rtx ctr = expand_normal (gimple_call_arg (stmt, 0));
     582            0 :   machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
     583            0 :   class expand_operand ops[2];
     584            0 :   create_call_lhs_operand (&ops[0], target, mode);
     585            0 :   create_input_operand (&ops[1], ctr, mode);
     586            0 :   gcc_assert (targetm.have_omp_simt_ordered ());
     587            0 :   expand_insn (targetm.code_for_omp_simt_ordered, 2, ops);
     588            0 :   assign_call_lhs (lhs, target, &ops[0]);
     589              : }
     590              : 
     591              : /* "Or" boolean reduction across SIMT lanes: return non-zero in all lanes if
     592              :    any lane supplies a non-zero argument.  */
     593              : 
     594              : static void
     595            0 : expand_GOMP_SIMT_VOTE_ANY (internal_fn, gcall *stmt)
     596              : {
     597            0 :   tree lhs = gimple_call_lhs (stmt);
     598            0 :   if (!lhs)
     599            0 :     return;
     600              : 
     601            0 :   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
     602            0 :   rtx cond = expand_normal (gimple_call_arg (stmt, 0));
     603            0 :   machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
     604            0 :   class expand_operand ops[2];
     605            0 :   create_call_lhs_operand (&ops[0], target, mode);
     606            0 :   create_input_operand (&ops[1], cond, mode);
     607            0 :   gcc_assert (targetm.have_omp_simt_vote_any ());
     608            0 :   expand_insn (targetm.code_for_omp_simt_vote_any, 2, ops);
     609            0 :   assign_call_lhs (lhs, target, &ops[0]);
     610              : }
     611              : 
     612              : /* Exchange between SIMT lanes with a "butterfly" pattern: source lane index
     613              :    is destination lane index XOR given offset.  */
     614              : 
     615              : static void
     616            0 : expand_GOMP_SIMT_XCHG_BFLY (internal_fn, gcall *stmt)
     617              : {
     618            0 :   tree lhs = gimple_call_lhs (stmt);
     619            0 :   if (!lhs)
     620            0 :     return;
     621              : 
     622            0 :   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
     623            0 :   rtx src = expand_normal (gimple_call_arg (stmt, 0));
     624            0 :   rtx idx = expand_normal (gimple_call_arg (stmt, 1));
     625            0 :   machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
     626            0 :   class expand_operand ops[3];
     627            0 :   create_call_lhs_operand (&ops[0], target, mode);
     628            0 :   create_input_operand (&ops[1], src, mode);
     629            0 :   create_input_operand (&ops[2], idx, SImode);
     630            0 :   gcc_assert (targetm.have_omp_simt_xchg_bfly ());
     631            0 :   expand_insn (targetm.code_for_omp_simt_xchg_bfly, 3, ops);
     632            0 :   assign_call_lhs (lhs, target, &ops[0]);
     633              : }
     634              : 
     635              : /* Exchange between SIMT lanes according to given source lane index.  */
     636              : 
     637              : static void
     638            0 : expand_GOMP_SIMT_XCHG_IDX (internal_fn, gcall *stmt)
     639              : {
     640            0 :   tree lhs = gimple_call_lhs (stmt);
     641            0 :   if (!lhs)
     642            0 :     return;
     643              : 
     644            0 :   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
     645            0 :   rtx src = expand_normal (gimple_call_arg (stmt, 0));
     646            0 :   rtx idx = expand_normal (gimple_call_arg (stmt, 1));
     647            0 :   machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
     648            0 :   class expand_operand ops[3];
     649            0 :   create_call_lhs_operand (&ops[0], target, mode);
     650            0 :   create_input_operand (&ops[1], src, mode);
     651            0 :   create_input_operand (&ops[2], idx, SImode);
     652            0 :   gcc_assert (targetm.have_omp_simt_xchg_idx ());
     653            0 :   expand_insn (targetm.code_for_omp_simt_xchg_idx, 3, ops);
     654            0 :   assign_call_lhs (lhs, target, &ops[0]);
     655              : }
     656              : 
     657              : /* This should get expanded in adjust_simduid_builtins.  */
     658              : 
     659              : static void
     660            0 : expand_GOMP_SIMD_LANE (internal_fn, gcall *)
     661              : {
     662            0 :   gcc_unreachable ();
     663              : }
     664              : 
     665              : /* This should get expanded in adjust_simduid_builtins.  */
     666              : 
     667              : static void
     668            0 : expand_GOMP_SIMD_VF (internal_fn, gcall *)
     669              : {
     670            0 :   gcc_unreachable ();
     671              : }
     672              : 
     673              : /* This should get expanded in adjust_simduid_builtins.  */
     674              : 
     675              : static void
     676            0 : expand_GOMP_SIMD_LAST_LANE (internal_fn, gcall *)
     677              : {
     678            0 :   gcc_unreachable ();
     679              : }
     680              : 
     681              : /* This should get expanded in adjust_simduid_builtins.  */
     682              : 
     683              : static void
     684            0 : expand_GOMP_SIMD_ORDERED_START (internal_fn, gcall *)
     685              : {
     686            0 :   gcc_unreachable ();
     687              : }
     688              : 
     689              : /* This should get expanded in adjust_simduid_builtins.  */
     690              : 
     691              : static void
     692            0 : expand_GOMP_SIMD_ORDERED_END (internal_fn, gcall *)
     693              : {
     694            0 :   gcc_unreachable ();
     695              : }
     696              : 
     697              : /* This should get expanded in gimplify_omp_dispatch.  */
     698              : 
     699              : static void
     700            0 : expand_GOMP_DISPATCH (internal_fn, gcall *)
     701              : {
     702            0 :   gcc_unreachable ();
     703              : }
     704              : 
     705              : /* This should get expanded in the sanopt pass.  */
     706              : 
     707              : static void
     708            0 : expand_UBSAN_NULL (internal_fn, gcall *)
     709              : {
     710            0 :   gcc_unreachable ();
     711              : }
     712              : 
     713              : /* This should get expanded in the sanopt pass.  */
     714              : 
     715              : static void
     716            0 : expand_UBSAN_BOUNDS (internal_fn, gcall *)
     717              : {
     718            0 :   gcc_unreachable ();
     719              : }
     720              : 
     721              : /* This should get expanded in the sanopt pass.  */
     722              : 
     723              : static void
     724            0 : expand_UBSAN_VPTR (internal_fn, gcall *)
     725              : {
     726            0 :   gcc_unreachable ();
     727              : }
     728              : 
     729              : /* This should get expanded in the sanopt pass.  */
     730              : 
     731              : static void
     732            0 : expand_UBSAN_PTR (internal_fn, gcall *)
     733              : {
     734            0 :   gcc_unreachable ();
     735              : }
     736              : 
     737              : /* This should get expanded in the sanopt pass.  */
     738              : 
     739              : static void
     740            0 : expand_UBSAN_OBJECT_SIZE (internal_fn, gcall *)
     741              : {
     742            0 :   gcc_unreachable ();
     743              : }
     744              : 
     745              : /* This should get expanded in the sanopt pass.  */
     746              : 
     747              : static void
     748            0 : expand_HWASAN_CHECK (internal_fn, gcall *)
     749              : {
     750            0 :   gcc_unreachable ();
     751              : }
     752              : 
     753              : /* For hwasan stack tagging:
     754              :    Tag memory which is dynamically allocated.  */
     755              : static void
     756            0 : expand_HWASAN_ALLOCA_POISON (internal_fn, gcall *gc)
     757              : {
     758            0 :   gcc_assert (ptr_mode == Pmode);
     759            0 :   tree g_target = gimple_call_lhs (gc);
     760            0 :   tree g_ptr = gimple_call_arg (gc, 0);
     761            0 :   tree g_size = gimple_call_arg (gc, 1);
     762              : 
     763              :   /* There is no target; this happens, usually, when we have an alloca of zero
     764              :      size.  */
     765            0 :   if (!g_target)
     766              :     return;
     767            0 :   rtx target = expand_normal (g_target);
     768            0 :   rtx ptr = expand_normal (g_ptr);
     769            0 :   rtx size = expand_normal (g_size);
     770              : 
     771              :   /* No size, nothing to do.  */
     772            0 :   if (size == const0_rtx)
     773              :     return;
     774              : 
     775              :   /* Get new tag for the alloca'd memory.
     776              :      Doing a regular add_tag () like so:
     777              :         rtx tag = targetm.memtag.add_tag (hwasan_frame_base (), 0,
     778              :                                           hwasan_current_frame_tag ());
     779              :      gets a new tag, which can be used for tagging memory.  But for alloca, we
     780              :      need both tagged memory and a tagged pointer to pass to consumers.  Invoke
     781              :      insert_random_tag () instead to add a random tag to ptr to get a tagged
     782              :      pointer that will work for both purposes.  */
     783            0 :   rtx tagged_ptr
     784            0 :     = force_reg (Pmode, targetm.memtag.insert_random_tag (ptr, NULL_RTX));
     785            0 :   rtx tag = targetm.memtag.extract_tag (tagged_ptr, NULL_RTX);
     786              : 
     787            0 :   if (memtag_sanitize_p ())
     788              :     {
     789              :       /* Need to put the tagged ptr into the `target` RTX for consumers
     790              :          of alloca'd memory.  */
     791            0 :       if (tagged_ptr != target)
     792            0 :         emit_move_insn (target, tagged_ptr);
     793              :       /* Tag the memory.  */
     794            0 :       emit_insn (targetm.gen_tag_memory (ptr, tag, size));
     795            0 :       hwasan_increment_frame_tag ();
     796              :     }
     797              :   else
     798            0 :     gcc_unreachable ();
     799              : }
     800              : 
     801              : /* For hwasan stack tagging:
     802              :    Clear tags on the dynamically allocated space.
     803              :    For use after an object dynamically allocated on the stack goes out of
     804              :    scope.  */
     805              : static void
     806            0 : expand_HWASAN_ALLOCA_UNPOISON (internal_fn, gcall *gc)
     807              : {
     808            0 :   gcc_assert (Pmode == ptr_mode);
     809            0 :   tree restored_position = gimple_call_arg (gc, 0);
     810            0 :   rtx restored_rtx = expand_expr (restored_position, NULL_RTX, VOIDmode,
     811              :                                   EXPAND_NORMAL);
     812            0 :   rtx off = expand_simple_binop (Pmode, MINUS, restored_rtx,
     813              :                                  stack_pointer_rtx, NULL_RTX, 0,
     814              :                                  OPTAB_WIDEN);
     815              : 
     816            0 :   if (memtag_sanitize_p ())
     817            0 :     emit_insn (targetm.gen_tag_memory (virtual_stack_dynamic_rtx,
     818              :                                        HWASAN_STACK_BACKGROUND,
     819              :                                        off));
     820              :   else
     821              :     {
     822            0 :       rtx func = init_one_libfunc ("__hwasan_tag_memory");
     823            0 :       emit_library_call_value (func, NULL_RTX, LCT_NORMAL, VOIDmode,
     824            0 :                                virtual_stack_dynamic_rtx, Pmode,
     825              :                                HWASAN_STACK_BACKGROUND, QImode,
     826            0 :                                off, Pmode);
     827              :     }
     828            0 : }
     829              : 
     830              : /* For hwasan stack tagging:
     831              :    Return a tag to be used for a dynamic allocation.  */
     832              : static void
     833            0 : expand_HWASAN_CHOOSE_TAG (internal_fn, gcall *gc)
     834              : {
     835            0 :   tree tag = gimple_call_lhs (gc);
     836            0 :   rtx target = expand_expr (tag, NULL_RTX, VOIDmode, EXPAND_NORMAL);
     837            0 :   machine_mode mode = GET_MODE (target);
     838            0 :   gcc_assert (mode == QImode);
     839              : 
     840            0 :   rtx base_tag = targetm.memtag.extract_tag (hwasan_frame_base (), NULL_RTX);
     841            0 :   gcc_assert (base_tag);
     842            0 :   rtx tag_offset = gen_int_mode (hwasan_current_frame_tag (), QImode);
     843            0 :   rtx chosen_tag = expand_simple_binop (QImode, PLUS, base_tag, tag_offset,
     844              :                                         target, /* unsignedp = */1,
     845              :                                         OPTAB_WIDEN);
     846            0 :   chosen_tag = hwasan_truncate_to_tag_size (chosen_tag, target);
     847              : 
     848              :   /* Really need to put the tag into the `target` RTX.  */
     849            0 :   if (chosen_tag != target)
     850              :     {
     851            0 :       rtx temp = chosen_tag;
     852            0 :       gcc_assert (GET_MODE (chosen_tag) == mode);
     853            0 :       emit_move_insn (target, temp);
     854              :     }
     855              : 
     856            0 :   hwasan_increment_frame_tag ();
     857            0 : }
     858              : 
     859              : /* For hwasan stack tagging:
     860              :    Tag a region of space in the shadow stack according to the base pointer of
     861              :    an object on the stack.  N.b. the length provided in the internal call is
     862              :    required to be aligned to HWASAN_TAG_GRANULE_SIZE.  */
     863              : static void
     864            0 : expand_HWASAN_MARK (internal_fn, gcall *gc)
     865              : {
     866            0 :   gcc_assert (ptr_mode == Pmode);
     867            0 :   HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (gc, 0));
     868            0 :   bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
     869              : 
     870            0 :   tree base = gimple_call_arg (gc, 1);
     871            0 :   gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
     872            0 :   rtx base_rtx = expand_normal (base);
     873              : 
     874            0 :   rtx tag = is_poison ? HWASAN_STACK_BACKGROUND
     875            0 :     : targetm.memtag.extract_tag (base_rtx, NULL_RTX);
     876            0 :   rtx address = targetm.memtag.untagged_pointer (base_rtx, NULL_RTX);
     877              : 
     878            0 :   tree len = gimple_call_arg (gc, 2);
     879            0 :   rtx r_len = expand_normal (len);
     880              : 
     881            0 :   if (memtag_sanitize_p ())
     882            0 :     emit_insn (targetm.gen_tag_memory (address, tag, r_len));
     883              :   else
     884              :     {
     885            0 :       rtx func = init_one_libfunc ("__hwasan_tag_memory");
     886            0 :       emit_library_call (func, LCT_NORMAL, VOIDmode, address, Pmode,
     887            0 :                          tag, QImode, r_len, Pmode);
     888              :     }
     889            0 : }
     890              : 
     891              : /* For hwasan stack tagging:
     892              :    Store a tag into a pointer.  */
     893              : static void
     894            0 : expand_HWASAN_SET_TAG (internal_fn, gcall *gc)
     895              : {
     896            0 :   gcc_assert (ptr_mode == Pmode);
     897            0 :   tree g_target = gimple_call_lhs (gc);
     898            0 :   tree g_ptr = gimple_call_arg (gc, 0);
     899            0 :   tree g_tag = gimple_call_arg (gc, 1);
     900              : 
     901            0 :   rtx ptr = expand_normal (g_ptr);
     902            0 :   rtx tag = expand_expr (g_tag, NULL_RTX, QImode, EXPAND_NORMAL);
     903            0 :   rtx target = expand_normal (g_target);
     904              : 
     905            0 :   rtx untagged = targetm.memtag.untagged_pointer (ptr, target);
     906            0 :   rtx tagged_value = targetm.memtag.set_tag (untagged, tag, target);
     907            0 :   if (tagged_value != target)
     908            0 :     emit_move_insn (target, tagged_value);
     909            0 : }
     910              : 
     911              : /* This should get expanded in the sanopt pass.  */
     912              : 
     913              : static void
     914            0 : expand_ASAN_CHECK (internal_fn, gcall *)
     915              : {
     916            0 :   gcc_unreachable ();
     917              : }
     918              : 
     919              : /* This should get expanded in the sanopt pass.  */
     920              : 
     921              : static void
     922            0 : expand_ASAN_MARK (internal_fn, gcall *)
     923              : {
     924            0 :   gcc_unreachable ();
     925              : }
     926              : 
     927              : /* This should get expanded in the sanopt pass.  */
     928              : 
     929              : static void
     930            0 : expand_ASAN_POISON (internal_fn, gcall *)
     931              : {
     932            0 :   gcc_unreachable ();
     933              : }
     934              : 
     935              : /* This should get expanded in the sanopt pass.  */
     936              : 
     937              : static void
     938            0 : expand_ASAN_POISON_USE (internal_fn, gcall *)
     939              : {
     940            0 :   gcc_unreachable ();
     941              : }
     942              : 
     943              : /* This should get expanded in the tsan pass.  */
     944              : 
     945              : static void
     946            0 : expand_TSAN_FUNC_EXIT (internal_fn, gcall *)
     947              : {
     948            0 :   gcc_unreachable ();
     949              : }
     950              : 
     951              : /* This should get expanded in the lower pass.  */
     952              : 
     953              : static void
     954           25 : expand_FALLTHROUGH (internal_fn, gcall *call)
     955              : {
     956           25 :   auto_urlify_attributes sentinel;
     957           25 :   error_at (gimple_location (call),
     958              :             "invalid use of attribute %<fallthrough%>");
     959           25 : }
     960              : 
     961              : /* Return minimum precision needed to represent all values
     962              :    of ARG in SIGNed integral type.  */
     963              : 
     964              : static int
     965       155602 : get_min_precision (tree arg, signop sign)
     966              : {
     967       155602 :   int prec = TYPE_PRECISION (TREE_TYPE (arg));
     968       155602 :   int cnt = 0;
     969       155602 :   signop orig_sign = sign;
     970       155602 :   if (TREE_CODE (arg) == INTEGER_CST)
     971              :     {
     972        40501 :       int p;
     973        40501 :       if (TYPE_SIGN (TREE_TYPE (arg)) != sign)
     974              :         {
     975        14411 :           widest_int w = wi::to_widest (arg);
     976        14411 :           w = wi::ext (w, prec, sign);
     977        14411 :           p = wi::min_precision (w, sign);
     978        14411 :         }
     979              :       else
     980        26090 :         p = wi::min_precision (wi::to_wide (arg), sign);
     981        40501 :       return MIN (p, prec);
     982              :     }
     983       115041 :   while (CONVERT_EXPR_P (arg)
     984         6563 :          && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
     985       128167 :          && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
     986              :     {
     987         6503 :       arg = TREE_OPERAND (arg, 0);
     988         6503 :       if (TYPE_PRECISION (TREE_TYPE (arg)) < prec)
     989              :         {
     990          342 :           if (TYPE_UNSIGNED (TREE_TYPE (arg)))
     991              :             sign = UNSIGNED;
     992          319 :           else if (sign == UNSIGNED
     993          319 :                    && (get_range_pos_neg (arg,
     994              :                                           currently_expanding_gimple_stmt)
     995              :                        != 1))
     996            0 :             return prec + (orig_sign != sign);
     997          342 :           prec = TYPE_PRECISION (TREE_TYPE (arg));
     998              :         }
     999         6503 :       if (++cnt > 30)
    1000            0 :         return prec + (orig_sign != sign);
    1001              :     }
    1002       115041 :   if (CONVERT_EXPR_P (arg)
    1003           60 :       && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
    1004       115161 :       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) > prec)
    1005              :     {
    1006              :       /* We have e.g. (unsigned short) y_2 where int y_2 = (int) x_1(D);
    1007              :          If y_2's min precision is smaller than prec, return that.  */
    1008           60 :       int oprec = get_min_precision (TREE_OPERAND (arg, 0), sign);
    1009           60 :       if (oprec < prec)
    1010           60 :         return oprec + (orig_sign != sign);
    1011              :     }
    1012       115041 :   if (TREE_CODE (arg) != SSA_NAME)
    1013            0 :     return prec + (orig_sign != sign);
    1014       115041 :   int_range_max r;
    1015       115041 :   gimple *cg = currently_expanding_gimple_stmt;
    1016       230423 :   while (!get_range_query (cfun)->range_of_expr (r, arg, cg)
    1017       115382 :          || r.varying_p ()
    1018       257856 :          || r.undefined_p ())
    1019              :     {
    1020        88291 :       gimple *g = SSA_NAME_DEF_STMT (arg);
    1021        88291 :       if (is_gimple_assign (g)
    1022        88291 :           && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
    1023              :         {
    1024         1331 :           tree t = gimple_assign_rhs1 (g);
    1025         2662 :           if (INTEGRAL_TYPE_P (TREE_TYPE (t))
    1026         1694 :               && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
    1027              :             {
    1028          341 :               arg = t;
    1029          341 :               if (TYPE_PRECISION (TREE_TYPE (arg)) < prec)
    1030              :                 {
    1031            0 :                   if (TYPE_UNSIGNED (TREE_TYPE (arg)))
    1032              :                     sign = UNSIGNED;
    1033            0 :                   else if (sign == UNSIGNED
    1034            0 :                            && get_range_pos_neg (arg, g) != 1)
    1035            0 :                     return prec + (orig_sign != sign);
    1036            0 :                   prec = TYPE_PRECISION (TREE_TYPE (arg));
    1037              :                 }
    1038          341 :               if (++cnt > 30)
    1039            0 :                 return prec + (orig_sign != sign);
    1040          341 :               continue;
    1041              :             }
    1042              :         }
    1043        87950 :       return prec + (orig_sign != sign);
    1044              :     }
    1045        27091 :   if (sign == TYPE_SIGN (TREE_TYPE (arg)))
    1046              :     {
    1047        19741 :       int p1 = wi::min_precision (r.lower_bound (), sign);
    1048        19741 :       int p2 = wi::min_precision (r.upper_bound (), sign);
    1049        19741 :       p1 = MAX (p1, p2);
    1050        19741 :       prec = MIN (prec, p1);
    1051              :     }
    1052        14597 :   else if (sign == UNSIGNED && !wi::neg_p (r.lower_bound (), SIGNED))
    1053              :     {
    1054         7243 :       int p = wi::min_precision (r.upper_bound (), UNSIGNED);
    1055         7350 :       prec = MIN (prec, p);
    1056              :     }
    1057        27091 :   return prec + (orig_sign != sign);
    1058       115041 : }
    1059              : 
    1060              : /* Helper for expand_*_overflow.  Set the __imag__ part to true
    1061              :    (1 except for signed:1 type, in which case store -1).  */
    1062              : 
    1063              : static void
    1064        80477 : expand_arith_set_overflow (tree lhs, rtx target)
    1065              : {
    1066        80477 :   if (TYPE_PRECISION (TREE_TYPE (TREE_TYPE (lhs))) == 1
    1067        80477 :       && !TYPE_UNSIGNED (TREE_TYPE (TREE_TYPE (lhs))))
    1068          194 :     write_complex_part (target, constm1_rtx, true, false);
    1069              :   else
    1070        80283 :     write_complex_part (target, const1_rtx, true, false);
    1071        80477 : }
    1072              : 
    1073              : /* Helper for expand_*_overflow.  Store RES into the __real__ part
    1074              :    of TARGET.  If RES has larger MODE than __real__ part of TARGET,
    1075              :    set the __imag__ part to 1 if RES doesn't fit into it.  Similarly
    1076              :    if LHS has smaller precision than its mode.  */
    1077              : 
    1078              : static void
    1079        71845 : expand_arith_overflow_result_store (tree lhs, rtx target,
    1080              :                                     scalar_int_mode mode, rtx res)
    1081              : {
    1082        71845 :   scalar_int_mode tgtmode
    1083       143690 :     = as_a <scalar_int_mode> (GET_MODE_INNER (GET_MODE (target)));
    1084        71845 :   rtx lres = res;
    1085        71845 :   if (tgtmode != mode)
    1086              :     {
    1087        17934 :       rtx_code_label *done_label = gen_label_rtx ();
    1088        17934 :       int uns = TYPE_UNSIGNED (TREE_TYPE (TREE_TYPE (lhs)));
    1089        17934 :       lres = convert_modes (tgtmode, mode, res, uns);
    1090        17934 :       gcc_assert (GET_MODE_PRECISION (tgtmode) < GET_MODE_PRECISION (mode));
    1091        17934 :       do_compare_rtx_and_jump (res, convert_modes (mode, tgtmode, lres, uns),
    1092              :                                EQ, true, mode, NULL_RTX, NULL, done_label,
    1093              :                                profile_probability::very_likely ());
    1094        17934 :       expand_arith_set_overflow (lhs, target);
    1095        17934 :       emit_label (done_label);
    1096              :     }
    1097        71845 :   int prec = TYPE_PRECISION (TREE_TYPE (TREE_TYPE (lhs)));
    1098        71845 :   int tgtprec = GET_MODE_PRECISION (tgtmode);
    1099        71845 :   if (prec < tgtprec)
    1100              :     {
    1101         3543 :       rtx_code_label *done_label = gen_label_rtx ();
    1102         3543 :       int uns = TYPE_UNSIGNED (TREE_TYPE (TREE_TYPE (lhs)));
    1103         3543 :       res = lres;
    1104         3543 :       if (uns)
    1105              :         {
    1106         2008 :           rtx mask
    1107         2008 :             = immed_wide_int_const (wi::shifted_mask (0, prec, false, tgtprec),
    1108              :                                     tgtmode);
    1109         2008 :           lres = expand_simple_binop (tgtmode, AND, res, mask, NULL_RTX,
    1110              :                                       true, OPTAB_LIB_WIDEN);
    1111              :         }
    1112              :       else
    1113              :         {
    1114         1535 :           lres = expand_shift (LSHIFT_EXPR, tgtmode, res, tgtprec - prec,
    1115              :                                NULL_RTX, 1);
    1116         1535 :           lres = expand_shift (RSHIFT_EXPR, tgtmode, lres, tgtprec - prec,
    1117              :                                NULL_RTX, 0);
    1118              :         }
    1119         3543 :       do_compare_rtx_and_jump (res, lres,
    1120              :                                EQ, true, tgtmode, NULL_RTX, NULL, done_label,
    1121              :                                profile_probability::very_likely ());
    1122         3543 :       expand_arith_set_overflow (lhs, target);
    1123         3543 :       emit_label (done_label);
    1124              :     }
    1125        71845 :   write_complex_part (target, lres, false, false);
    1126        71845 : }
    1127              : 
    1128              : /* Helper for expand_*_overflow.  Store RES into TARGET.  */
    1129              : 
    1130              : static void
    1131         4659 : expand_ubsan_result_store (tree lhs, rtx target, scalar_int_mode mode,
    1132              :                            rtx res, rtx_code_label *do_error)
    1133              : {
    1134         4659 :   if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
    1135         4659 :       && TYPE_PRECISION (TREE_TYPE (lhs)) < GET_MODE_PRECISION (mode))
    1136              :     {
    1137           54 :       int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
    1138           54 :       int prec = TYPE_PRECISION (TREE_TYPE (lhs));
    1139           54 :       int tgtprec = GET_MODE_PRECISION (mode);
    1140           54 :       rtx resc = gen_reg_rtx (mode), lres;
    1141           54 :       emit_move_insn (resc, res);
    1142           54 :       if (uns)
    1143              :         {
    1144            0 :           rtx mask
    1145            0 :             = immed_wide_int_const (wi::shifted_mask (0, prec, false, tgtprec),
    1146              :                                     mode);
    1147            0 :           lres = expand_simple_binop (mode, AND, res, mask, NULL_RTX,
    1148              :                                       true, OPTAB_LIB_WIDEN);
    1149              :         }
    1150              :       else
    1151              :         {
    1152           54 :           lres = expand_shift (LSHIFT_EXPR, mode, res, tgtprec - prec,
    1153              :                                NULL_RTX, 1);
    1154           54 :           lres = expand_shift (RSHIFT_EXPR, mode, lres, tgtprec - prec,
    1155              :                                NULL_RTX, 0);
    1156              :         }
    1157           54 :       if (lres != res)
    1158           54 :         emit_move_insn (res, lres);
    1159           54 :       do_compare_rtx_and_jump (res, resc,
    1160              :                                NE, true, mode, NULL_RTX, NULL, do_error,
    1161              :                                profile_probability::very_unlikely ());
    1162              :     }
    1163         4659 :   if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
    1164              :     /* If this is a scalar in a register that is stored in a wider mode
    1165              :        than the declared mode, compute the result into its declared mode
    1166              :        and then convert to the wider mode.  Our value is the computed
    1167              :        expression.  */
    1168            0 :     convert_move (SUBREG_REG (target), res, SUBREG_PROMOTED_SIGN (target));
    1169              :   else
    1170         4659 :     emit_move_insn (target, res);
    1171         4659 : }
    1172              : 
    1173              : /* Add sub/add overflow checking to the statement STMT.
    1174              :    CODE says whether the operation is +, or -.  */
    1175              : 
    1176              : void
    1177        39577 : expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
    1178              :                         tree arg0, tree arg1, bool unsr_p, bool uns0_p,
    1179              :                         bool uns1_p, bool is_ubsan, tree *datap)
    1180              : {
    1181        39577 :   rtx res, target = NULL_RTX;
    1182        39577 :   tree fn;
    1183        39577 :   rtx_code_label *done_label = gen_label_rtx ();
    1184        39577 :   rtx_code_label *do_error = gen_label_rtx ();
    1185        39577 :   do_pending_stack_adjust ();
    1186        39577 :   rtx op0 = expand_normal (arg0);
    1187        39577 :   rtx op1 = expand_normal (arg1);
    1188        39577 :   scalar_int_mode mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (arg0));
    1189        39577 :   int prec = GET_MODE_PRECISION (mode);
    1190        39577 :   rtx sgn = immed_wide_int_const (wi::min_value (prec, SIGNED), mode);
    1191        39577 :   bool do_xor = false;
    1192              : 
    1193        39577 :   if (is_ubsan)
    1194         3386 :     gcc_assert (!unsr_p && !uns0_p && !uns1_p);
    1195              : 
    1196        39577 :   if (lhs)
    1197              :     {
    1198        39049 :       target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    1199        39049 :       if (!is_ubsan)
    1200        36191 :         write_complex_part (target, const0_rtx, true, false);
    1201              :     }
    1202              : 
    1203              :   /* We assume both operands and result have the same precision
    1204              :      here (GET_MODE_BITSIZE (mode)), S stands for signed type
    1205              :      with that precision, U for unsigned type with that precision,
    1206              :      sgn for unsigned most significant bit in that precision.
    1207              :      s1 is signed first operand, u1 is unsigned first operand,
    1208              :      s2 is signed second operand, u2 is unsigned second operand,
    1209              :      sr is signed result, ur is unsigned result and the following
    1210              :      rules say how to compute result (which is always result of
    1211              :      the operands as if both were unsigned, cast to the right
    1212              :      signedness) and how to compute whether operation overflowed.
    1213              : 
    1214              :      s1 + s2 -> sr
    1215              :         res = (S) ((U) s1 + (U) s2)
    1216              :         ovf = s2 < 0 ? res > s1 : res < s1 (or jump on overflow)
    1217              :      s1 - s2 -> sr
    1218              :         res = (S) ((U) s1 - (U) s2)
    1219              :         ovf = s2 < 0 ? res < s1 : res > s2 (or jump on overflow)
    1220              :      u1 + u2 -> ur
    1221              :         res = u1 + u2
    1222              :         ovf = res < u1 (or jump on carry, but RTL opts will handle it)
    1223              :      u1 - u2 -> ur
    1224              :         res = u1 - u2
    1225              :         ovf = res > u1 (or jump on carry, but RTL opts will handle it)
    1226              :      s1 + u2 -> sr
    1227              :         res = (S) ((U) s1 + u2)
    1228              :         ovf = ((U) res ^ sgn) < u2
    1229              :      s1 + u2 -> ur
    1230              :         t1 = (S) (u2 ^ sgn)
    1231              :         t2 = s1 + t1
    1232              :         res = (U) t2 ^ sgn
    1233              :         ovf = t1 < 0 ? t2 > s1 : t2 < s1 (or jump on overflow)
    1234              :      s1 - u2 -> sr
    1235              :         res = (S) ((U) s1 - u2)
    1236              :         ovf = u2 > ((U) s1 ^ sgn)
    1237              :      s1 - u2 -> ur
    1238              :         res = (U) s1 - u2
    1239              :         ovf = s1 < 0 || u2 > (U) s1
    1240              :      u1 - s2 -> sr
    1241              :         res = u1 - (U) s2
    1242              :         ovf = u1 >= ((U) s2 ^ sgn)
    1243              :      u1 - s2 -> ur
    1244              :         t1 = u1 ^ sgn
    1245              :         t2 = t1 - (U) s2
    1246              :         res = t2 ^ sgn
    1247              :         ovf = s2 < 0 ? (S) t2 < (S) t1 : (S) t2 > (S) t1 (or jump on overflow)
    1248              :      s1 + s2 -> ur
    1249              :         res = (U) s1 + (U) s2
    1250              :         ovf = s2 < 0 ? (s1 | (S) res) < 0) : (s1 & (S) res) < 0)
    1251              :      u1 + u2 -> sr
    1252              :         res = (S) (u1 + u2)
    1253              :         ovf = (U) res < u2 || res < 0
    1254              :      u1 - u2 -> sr
    1255              :         res = (S) (u1 - u2)
    1256              :         ovf = u1 >= u2 ? res < 0 : res >= 0
    1257              :      s1 - s2 -> ur
    1258              :         res = (U) s1 - (U) s2
    1259              :         ovf = s2 >= 0 ? ((s1 | (S) res) < 0) : ((s1 & (S) res) < 0)  */
    1260              : 
    1261        39577 :   if (code == PLUS_EXPR && uns0_p && !uns1_p)
    1262              :     {
    1263              :       /* PLUS_EXPR is commutative, if operand signedness differs,
    1264              :          canonicalize to the first operand being signed and second
    1265              :          unsigned to simplify following code.  */
    1266              :       std::swap (op0, op1);
    1267              :       std::swap (arg0, arg1);
    1268              :       uns0_p = false;
    1269              :       uns1_p = true;
    1270              :     }
    1271              : 
    1272              :   /* u1 +- u2 -> ur  */
    1273        38742 :   if (uns0_p && uns1_p && unsr_p)
    1274              :     {
    1275        21401 :       insn_code icode = optab_handler (code == PLUS_EXPR ? uaddv4_optab
    1276              :                                        : usubv4_optab, mode);
    1277        15961 :       if (icode != CODE_FOR_nothing)
    1278              :         {
    1279        15056 :           class expand_operand ops[4];
    1280        15056 :           rtx_insn *last = get_last_insn ();
    1281              : 
    1282        15056 :           res = gen_reg_rtx (mode);
    1283        15056 :           create_output_operand (&ops[0], res, mode);
    1284        15056 :           create_input_operand (&ops[1], op0, mode);
    1285        15056 :           create_input_operand (&ops[2], op1, mode);
    1286        15056 :           create_fixed_operand (&ops[3], do_error);
    1287        15056 :           if (maybe_expand_insn (icode, 4, ops))
    1288              :             {
    1289        15056 :               last = get_last_insn ();
    1290        15056 :               if (profile_status_for_fn (cfun) != PROFILE_ABSENT
    1291        12461 :                   && JUMP_P (last)
    1292        12461 :                   && any_condjump_p (last)
    1293        27517 :                   && !find_reg_note (last, REG_BR_PROB, 0))
    1294        12461 :                 add_reg_br_prob_note (last,
    1295              :                                       profile_probability::very_unlikely ());
    1296        15056 :               emit_jump (done_label);
    1297        15056 :               goto do_error_label;
    1298              :             }
    1299              : 
    1300            0 :           delete_insns_since (last);
    1301              :         }
    1302              : 
    1303              :       /* Compute the operation.  On RTL level, the addition is always
    1304              :          unsigned.  */
    1305         1810 :       res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
    1306              :                           op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
    1307          905 :       rtx tem = op0;
    1308              :       /* For PLUS_EXPR, the operation is commutative, so we can pick
    1309              :          operand to compare against.  For prec <= BITS_PER_WORD, I think
    1310              :          preferring REG operand is better over CONST_INT, because
    1311              :          the CONST_INT might enlarge the instruction or CSE would need
    1312              :          to figure out we'd already loaded it into a register before.
    1313              :          For prec > BITS_PER_WORD, I think CONST_INT might be more beneficial,
    1314              :          as then the multi-word comparison can be perhaps simplified.  */
    1315          905 :       if (code == PLUS_EXPR
    1316            0 :           && (prec <= BITS_PER_WORD
    1317            0 :               ? (CONST_SCALAR_INT_P (op0) && REG_P (op1))
    1318            0 :               : CONST_SCALAR_INT_P (op1)))
    1319          905 :         tem = op1;
    1320         1810 :       do_compare_rtx_and_jump (res, tem, code == PLUS_EXPR ? GEU : LEU,
    1321              :                                true, mode, NULL_RTX, NULL, done_label,
    1322              :                                profile_probability::very_likely ());
    1323          905 :       goto do_error_label;
    1324              :     }
    1325              : 
    1326              :   /* s1 +- u2 -> sr  */
    1327        23616 :   if (!uns0_p && uns1_p && !unsr_p)
    1328              :     {
    1329              :       /* Compute the operation.  On RTL level, the addition is always
    1330              :          unsigned.  */
    1331         2111 :       res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
    1332              :                           op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
    1333         2111 :       rtx tem = expand_binop (mode, add_optab,
    1334              :                               code == PLUS_EXPR ? res : op0, sgn,
    1335              :                               NULL_RTX, false, OPTAB_LIB_WIDEN);
    1336         1309 :       do_compare_rtx_and_jump (tem, op1, GEU, true, mode, NULL_RTX, NULL,
    1337              :                                done_label, profile_probability::very_likely ());
    1338         1309 :       goto do_error_label;
    1339              :     }
    1340              : 
    1341              :   /* s1 + u2 -> ur  */
    1342        22307 :   if (code == PLUS_EXPR && !uns0_p && uns1_p && unsr_p)
    1343              :     {
    1344         1919 :       op1 = expand_binop (mode, add_optab, op1, sgn, NULL_RTX, false,
    1345              :                           OPTAB_LIB_WIDEN);
    1346              :       /* As we've changed op1, we have to avoid using the value range
    1347              :          for the original argument.  */
    1348         1919 :       arg1 = error_mark_node;
    1349         1919 :       do_xor = true;
    1350         1919 :       goto do_signed;
    1351              :     }
    1352              : 
    1353              :   /* u1 - s2 -> ur  */
    1354        20388 :   if (code == MINUS_EXPR && uns0_p && !uns1_p && unsr_p)
    1355              :     {
    1356          820 :       op0 = expand_binop (mode, add_optab, op0, sgn, NULL_RTX, false,
    1357              :                           OPTAB_LIB_WIDEN);
    1358              :       /* As we've changed op0, we have to avoid using the value range
    1359              :          for the original argument.  */
    1360          820 :       arg0 = error_mark_node;
    1361          820 :       do_xor = true;
    1362          820 :       goto do_signed;
    1363              :     }
    1364              : 
    1365              :   /* s1 - u2 -> ur  */
    1366        19568 :   if (code == MINUS_EXPR && !uns0_p && uns1_p && unsr_p)
    1367              :     {
    1368              :       /* Compute the operation.  On RTL level, the addition is always
    1369              :          unsigned.  */
    1370         1358 :       res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
    1371              :                           OPTAB_LIB_WIDEN);
    1372         1358 :       int pos_neg = get_range_pos_neg (arg0, currently_expanding_gimple_stmt);
    1373         1358 :       if (pos_neg == 2)
    1374              :         /* If ARG0 is known to be always negative, this is always overflow.  */
    1375          148 :         emit_jump (do_error);
    1376         1210 :       else if (pos_neg == 3)
    1377              :         /* If ARG0 is not known to be always positive, check at runtime.  */
    1378         1210 :         do_compare_rtx_and_jump (op0, const0_rtx, LT, false, mode, NULL_RTX,
    1379              :                                  NULL, do_error, profile_probability::very_unlikely ());
    1380         1358 :       do_compare_rtx_and_jump (op1, op0, LEU, true, mode, NULL_RTX, NULL,
    1381              :                                done_label, profile_probability::very_likely ());
    1382         1358 :       goto do_error_label;
    1383              :     }
    1384              : 
    1385              :   /* u1 - s2 -> sr  */
    1386        18210 :   if (code == MINUS_EXPR && uns0_p && !uns1_p && !unsr_p)
    1387              :     {
    1388              :       /* Compute the operation.  On RTL level, the addition is always
    1389              :          unsigned.  */
    1390          390 :       res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
    1391              :                           OPTAB_LIB_WIDEN);
    1392          390 :       rtx tem = expand_binop (mode, add_optab, op1, sgn, NULL_RTX, false,
    1393              :                               OPTAB_LIB_WIDEN);
    1394          390 :       do_compare_rtx_and_jump (op0, tem, LTU, true, mode, NULL_RTX, NULL,
    1395              :                                done_label, profile_probability::very_likely ());
    1396          390 :       goto do_error_label;
    1397              :     }
    1398              : 
    1399              :   /* u1 + u2 -> sr  */
    1400        17820 :   if (code == PLUS_EXPR && uns0_p && uns1_p && !unsr_p)
    1401              :     {
    1402              :       /* Compute the operation.  On RTL level, the addition is always
    1403              :          unsigned.  */
    1404         1393 :       res = expand_binop (mode, add_optab, op0, op1, NULL_RTX, false,
    1405              :                           OPTAB_LIB_WIDEN);
    1406         1393 :       do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode, NULL_RTX,
    1407              :                                NULL, do_error, profile_probability::very_unlikely ());
    1408         1393 :       rtx tem = op1;
    1409              :       /* The operation is commutative, so we can pick operand to compare
    1410              :          against.  For prec <= BITS_PER_WORD, I think preferring REG operand
    1411              :          is better over CONST_INT, because the CONST_INT might enlarge the
    1412              :          instruction or CSE would need to figure out we'd already loaded it
    1413              :          into a register before.  For prec > BITS_PER_WORD, I think CONST_INT
    1414              :          might be more beneficial, as then the multi-word comparison can be
    1415              :          perhaps simplified.  */
    1416         1393 :       if (prec <= BITS_PER_WORD
    1417         1393 :           ? (CONST_SCALAR_INT_P (op1) && REG_P (op0))
    1418          448 :           : CONST_SCALAR_INT_P (op0))
    1419         1393 :         tem = op0;
    1420         1393 :       do_compare_rtx_and_jump (res, tem, GEU, true, mode, NULL_RTX, NULL,
    1421              :                                done_label, profile_probability::very_likely ());
    1422         1393 :       goto do_error_label;
    1423              :     }
    1424              : 
    1425              :   /* s1 +- s2 -> ur  */
    1426        16427 :   if (!uns0_p && !uns1_p && unsr_p)
    1427              :     {
    1428              :       /* Compute the operation.  On RTL level, the addition is always
    1429              :          unsigned.  */
    1430         3516 :       res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
    1431              :                           op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
    1432         2359 :       int pos_neg = get_range_pos_neg (arg1, currently_expanding_gimple_stmt);
    1433         2359 :       if (code == PLUS_EXPR)
    1434              :         {
    1435         1202 :           int pos_neg0 = get_range_pos_neg (arg0,
    1436              :                                             currently_expanding_gimple_stmt);
    1437         1202 :           if (pos_neg0 != 3 && pos_neg == 3)
    1438              :             {
    1439              :               std::swap (op0, op1);
    1440              :               pos_neg = pos_neg0;
    1441              :             }
    1442              :         }
    1443         2290 :       rtx tem;
    1444         2290 :       if (pos_neg != 3)
    1445              :         {
    1446         1941 :           tem = expand_binop (mode, ((pos_neg == 1) ^ (code == MINUS_EXPR))
    1447              :                                     ? and_optab : ior_optab,
    1448              :                               op0, res, NULL_RTX, false, OPTAB_LIB_WIDEN);
    1449         1161 :           do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL,
    1450              :                                    NULL, done_label, profile_probability::very_likely ());
    1451              :         }
    1452              :       else
    1453              :         {
    1454         1198 :           rtx_code_label *do_ior_label = gen_label_rtx ();
    1455         1620 :           do_compare_rtx_and_jump (op1, const0_rtx,
    1456              :                                    code == MINUS_EXPR ? GE : LT, false, mode,
    1457              :                                    NULL_RTX, NULL, do_ior_label,
    1458              :                                    profile_probability::even ());
    1459         1198 :           tem = expand_binop (mode, and_optab, op0, res, NULL_RTX, false,
    1460              :                               OPTAB_LIB_WIDEN);
    1461         1198 :           do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
    1462              :                                    NULL, done_label, profile_probability::very_likely ());
    1463         1198 :           emit_jump (do_error);
    1464         1198 :           emit_label (do_ior_label);
    1465         1198 :           tem = expand_binop (mode, ior_optab, op0, res, NULL_RTX, false,
    1466              :                               OPTAB_LIB_WIDEN);
    1467         1198 :           do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
    1468              :                                    NULL, done_label, profile_probability::very_likely ());
    1469              :         }
    1470         2359 :       goto do_error_label;
    1471              :     }
    1472              : 
    1473              :   /* u1 - u2 -> sr  */
    1474        14068 :   if (code == MINUS_EXPR && uns0_p && uns1_p && !unsr_p)
    1475              :     {
    1476              :       /* Compute the operation.  On RTL level, the addition is always
    1477              :          unsigned.  */
    1478         1960 :       res = expand_binop (mode, sub_optab, op0, op1, NULL_RTX, false,
    1479              :                           OPTAB_LIB_WIDEN);
    1480         1960 :       rtx_code_label *op0_geu_op1 = gen_label_rtx ();
    1481         1960 :       do_compare_rtx_and_jump (op0, op1, GEU, true, mode, NULL_RTX, NULL,
    1482              :                                op0_geu_op1, profile_probability::even ());
    1483         1960 :       do_compare_rtx_and_jump (res, const0_rtx, LT, false, mode, NULL_RTX,
    1484              :                                NULL, done_label, profile_probability::very_likely ());
    1485         1960 :       emit_jump (do_error);
    1486         1960 :       emit_label (op0_geu_op1);
    1487         1960 :       do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode, NULL_RTX,
    1488              :                                NULL, done_label, profile_probability::very_likely ());
    1489         1960 :       goto do_error_label;
    1490              :     }
    1491              : 
    1492        12108 :   gcc_assert (!uns0_p && !uns1_p && !unsr_p);
    1493              : 
    1494              :   /* s1 +- s2 -> sr  */
    1495        12108 :  do_signed:
    1496        14847 :   {
    1497        21471 :     insn_code icode = optab_handler (code == PLUS_EXPR ? addv4_optab
    1498              :                                      : subv4_optab, mode);
    1499        14847 :     if (icode != CODE_FOR_nothing)
    1500              :       {
    1501        14847 :         class expand_operand ops[4];
    1502        14847 :         rtx_insn *last = get_last_insn ();
    1503              : 
    1504        14847 :         res = gen_reg_rtx (mode);
    1505        14847 :         create_output_operand (&ops[0], res, mode);
    1506        14847 :         create_input_operand (&ops[1], op0, mode);
    1507        14847 :         create_input_operand (&ops[2], op1, mode);
    1508        14847 :         create_fixed_operand (&ops[3], do_error);
    1509        14847 :         if (maybe_expand_insn (icode, 4, ops))
    1510              :           {
    1511        14847 :             last = get_last_insn ();
    1512        14847 :             if (profile_status_for_fn (cfun) != PROFILE_ABSENT
    1513         9647 :                 && JUMP_P (last)
    1514         9647 :                 && any_condjump_p (last)
    1515        24494 :                 && !find_reg_note (last, REG_BR_PROB, 0))
    1516         9647 :               add_reg_br_prob_note (last,
    1517              :                                     profile_probability::very_unlikely ());
    1518        14847 :             emit_jump (done_label);
    1519        14847 :             goto do_error_label;
    1520              :           }
    1521              : 
    1522            0 :         delete_insns_since (last);
    1523              :       }
    1524              : 
    1525              :     /* Compute the operation.  On RTL level, the addition is always
    1526              :        unsigned.  */
    1527            0 :     res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
    1528              :                         op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
    1529              : 
    1530              :     /* If we can prove that one of the arguments (for MINUS_EXPR only
    1531              :        the second operand, as subtraction is not commutative) is always
    1532              :        non-negative or always negative, we can do just one comparison
    1533              :        and conditional jump.  */
    1534            0 :     int pos_neg = get_range_pos_neg (arg1, currently_expanding_gimple_stmt);
    1535            0 :     if (code == PLUS_EXPR)
    1536              :       {
    1537            0 :         int pos_neg0 = get_range_pos_neg (arg0,
    1538              :                                           currently_expanding_gimple_stmt);
    1539            0 :         if (pos_neg0 != 3 && pos_neg == 3)
    1540              :           {
    1541              :             std::swap (op0, op1);
    1542              :             pos_neg = pos_neg0;
    1543              :           }
    1544              :       }
    1545              : 
    1546              :     /* Addition overflows if and only if the two operands have the same sign,
    1547              :        and the result has the opposite sign.  Subtraction overflows if and
    1548              :        only if the two operands have opposite sign, and the subtrahend has
    1549              :        the same sign as the result.  Here 0 is counted as positive.  */
    1550            0 :     if (pos_neg == 3)
    1551              :       {
    1552              :         /* Compute op0 ^ op1 (operands have opposite sign).  */
    1553            0 :         rtx op_xor = expand_binop (mode, xor_optab, op0, op1, NULL_RTX, false,
    1554              :                                    OPTAB_LIB_WIDEN);
    1555              : 
    1556              :         /* Compute res ^ op1 (result and 2nd operand have opposite sign).  */
    1557            0 :         rtx res_xor = expand_binop (mode, xor_optab, res, op1, NULL_RTX, false,
    1558              :                                     OPTAB_LIB_WIDEN);
    1559              : 
    1560            0 :         rtx tem;
    1561            0 :         if (code == PLUS_EXPR)
    1562              :           {
    1563              :             /* Compute (res ^ op1) & ~(op0 ^ op1).  */
    1564            0 :             tem = expand_unop (mode, one_cmpl_optab, op_xor, NULL_RTX, false);
    1565            0 :             tem = expand_binop (mode, and_optab, res_xor, tem, NULL_RTX, false,
    1566              :                                 OPTAB_LIB_WIDEN);
    1567              :           }
    1568              :         else
    1569              :           {
    1570              :             /* Compute (op0 ^ op1) & ~(res ^ op1).  */
    1571            0 :             tem = expand_unop (mode, one_cmpl_optab, res_xor, NULL_RTX, false);
    1572            0 :             tem = expand_binop (mode, and_optab, op_xor, tem, NULL_RTX, false,
    1573              :                                 OPTAB_LIB_WIDEN);
    1574              :           }
    1575              : 
    1576              :         /* No overflow if the result has bit sign cleared.  */
    1577            0 :         do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
    1578              :                                  NULL, done_label, profile_probability::very_likely ());
    1579              :       }
    1580              : 
    1581              :     /* Compare the result of the operation with the first operand.
    1582              :        No overflow for addition if second operand is positive and result
    1583              :        is larger or second operand is negative and result is smaller.
    1584              :        Likewise for subtraction with sign of second operand flipped.  */
    1585              :     else
    1586            0 :       do_compare_rtx_and_jump (res, op0,
    1587            0 :                                (pos_neg == 1) ^ (code == MINUS_EXPR) ? GE : LE,
    1588              :                                false, mode, NULL_RTX, NULL, done_label,
    1589              :                                profile_probability::very_likely ());
    1590              :   }
    1591              : 
    1592        39577 :  do_error_label:
    1593        39577 :   emit_label (do_error);
    1594        39577 :   if (is_ubsan)
    1595              :     {
    1596              :       /* Expand the ubsan builtin call.  */
    1597         3386 :       push_temp_slots ();
    1598         3386 :       fn = ubsan_build_overflow_builtin (code, loc, TREE_TYPE (arg0),
    1599              :                                          arg0, arg1, datap);
    1600         3386 :       expand_normal (fn);
    1601         3386 :       pop_temp_slots ();
    1602         3386 :       do_pending_stack_adjust ();
    1603              :     }
    1604        36191 :   else if (lhs)
    1605        36191 :     expand_arith_set_overflow (lhs, target);
    1606              : 
    1607              :   /* We're done.  */
    1608        39577 :   emit_label (done_label);
    1609              : 
    1610        39577 :   if (lhs)
    1611              :     {
    1612        39049 :       if (is_ubsan)
    1613         2858 :         expand_ubsan_result_store (lhs, target, mode, res, do_error);
    1614              :       else
    1615              :         {
    1616        36191 :           if (do_xor)
    1617         2739 :             res = expand_binop (mode, add_optab, res, sgn, NULL_RTX, false,
    1618              :                                 OPTAB_LIB_WIDEN);
    1619              : 
    1620        36191 :           expand_arith_overflow_result_store (lhs, target, mode, res);
    1621              :         }
    1622              :     }
    1623        39577 : }
    1624              : 
    1625              : /* Add negate overflow checking to the statement STMT.  */
    1626              : 
    1627              : static void
    1628          973 : expand_neg_overflow (location_t loc, tree lhs, tree arg1, bool is_ubsan,
    1629              :                      tree *datap)
    1630              : {
    1631          973 :   rtx res, op1;
    1632          973 :   tree fn;
    1633          973 :   rtx_code_label *done_label, *do_error;
    1634          973 :   rtx target = NULL_RTX;
    1635              : 
    1636          973 :   done_label = gen_label_rtx ();
    1637          973 :   do_error = gen_label_rtx ();
    1638              : 
    1639          973 :   do_pending_stack_adjust ();
    1640          973 :   op1 = expand_normal (arg1);
    1641              : 
    1642          973 :   scalar_int_mode mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (arg1));
    1643          973 :   if (lhs)
    1644              :     {
    1645          841 :       target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    1646          841 :       if (!is_ubsan)
    1647          402 :         write_complex_part (target, const0_rtx, true, false);
    1648              :     }
    1649              : 
    1650          973 :   enum insn_code icode = optab_handler (negv3_optab, mode);
    1651          973 :   if (icode != CODE_FOR_nothing)
    1652              :     {
    1653          895 :       class expand_operand ops[3];
    1654          895 :       rtx_insn *last = get_last_insn ();
    1655              : 
    1656          895 :       res = gen_reg_rtx (mode);
    1657          895 :       create_output_operand (&ops[0], res, mode);
    1658          895 :       create_input_operand (&ops[1], op1, mode);
    1659          895 :       create_fixed_operand (&ops[2], do_error);
    1660          895 :       if (maybe_expand_insn (icode, 3, ops))
    1661              :         {
    1662          895 :           last = get_last_insn ();
    1663          895 :           if (profile_status_for_fn (cfun) != PROFILE_ABSENT
    1664          606 :               && JUMP_P (last)
    1665          606 :               && any_condjump_p (last)
    1666         1501 :               && !find_reg_note (last, REG_BR_PROB, 0))
    1667          606 :             add_reg_br_prob_note (last,
    1668              :                                   profile_probability::very_unlikely ());
    1669          895 :           emit_jump (done_label);
    1670              :         }
    1671              :       else
    1672              :         {
    1673            0 :           delete_insns_since (last);
    1674            0 :           icode = CODE_FOR_nothing;
    1675              :         }
    1676              :     }
    1677              : 
    1678          895 :   if (icode == CODE_FOR_nothing)
    1679              :     {
    1680              :       /* Compute the operation.  On RTL level, the addition is always
    1681              :          unsigned.  */
    1682           78 :       res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
    1683              : 
    1684              :       /* Compare the operand with the most negative value.  */
    1685           78 :       rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
    1686           78 :       do_compare_rtx_and_jump (op1, minv, NE, true, mode, NULL_RTX, NULL,
    1687              :                                done_label, profile_probability::very_likely ());
    1688              :     }
    1689              : 
    1690          973 :   emit_label (do_error);
    1691          973 :   if (is_ubsan)
    1692              :     {
    1693              :       /* Expand the ubsan builtin call.  */
    1694          571 :       push_temp_slots ();
    1695          571 :       fn = ubsan_build_overflow_builtin (NEGATE_EXPR, loc, TREE_TYPE (arg1),
    1696              :                                          arg1, NULL_TREE, datap);
    1697          571 :       expand_normal (fn);
    1698          571 :       pop_temp_slots ();
    1699          571 :       do_pending_stack_adjust ();
    1700              :     }
    1701          402 :   else if (lhs)
    1702          402 :     expand_arith_set_overflow (lhs, target);
    1703              : 
    1704              :   /* We're done.  */
    1705          973 :   emit_label (done_label);
    1706              : 
    1707          973 :   if (lhs)
    1708              :     {
    1709          841 :       if (is_ubsan)
    1710          439 :         expand_ubsan_result_store (lhs, target, mode, res, do_error);
    1711              :       else
    1712          402 :         expand_arith_overflow_result_store (lhs, target, mode, res);
    1713              :     }
    1714          973 : }
    1715              : 
    1716              : /* Return true if UNS WIDEN_MULT_EXPR with result mode WMODE and operand
    1717              :    mode MODE can be expanded without using a libcall.  */
    1718              : 
    1719              : static bool
    1720            0 : can_widen_mult_without_libcall (scalar_int_mode wmode, scalar_int_mode mode,
    1721              :                                 rtx op0, rtx op1, bool uns)
    1722              : {
    1723            0 :   if (find_widening_optab_handler (umul_widen_optab, wmode, mode)
    1724              :       != CODE_FOR_nothing)
    1725              :     return true;
    1726              : 
    1727            0 :   if (find_widening_optab_handler (smul_widen_optab, wmode, mode)
    1728              :       != CODE_FOR_nothing)
    1729              :     return true;
    1730              : 
    1731            0 :   rtx_insn *last = get_last_insn ();
    1732            0 :   if (CONSTANT_P (op0))
    1733            0 :     op0 = convert_modes (wmode, mode, op0, uns);
    1734              :   else
    1735            0 :     op0 = gen_raw_REG (wmode, LAST_VIRTUAL_REGISTER + 1);
    1736            0 :   if (CONSTANT_P (op1))
    1737            0 :     op1 = convert_modes (wmode, mode, op1, uns);
    1738              :   else
    1739            0 :     op1 = gen_raw_REG (wmode, LAST_VIRTUAL_REGISTER + 2);
    1740            0 :   rtx ret = expand_mult (wmode, op0, op1, NULL_RTX, uns, true);
    1741            0 :   delete_insns_since (last);
    1742            0 :   return ret != NULL_RTX;
    1743              : }
    1744              : 
    1745              : /* Add mul overflow checking to the statement STMT.  */
    1746              : 
    1747              : static void
    1748        18456 : expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
    1749              :                      bool unsr_p, bool uns0_p, bool uns1_p, bool is_ubsan,
    1750              :                      tree *datap)
    1751              : {
    1752        18456 :   rtx res, op0, op1;
    1753        18456 :   tree fn, type;
    1754        18456 :   rtx_code_label *done_label, *do_error;
    1755        18456 :   rtx target = NULL_RTX;
    1756        18456 :   signop sign;
    1757        18456 :   enum insn_code icode;
    1758        18456 :   int save_flag_trapv = flag_trapv;
    1759              : 
    1760              :   /* We don't want any __mulv?i3 etc. calls from the expansion of
    1761              :      these internal functions, so disable -ftrapv temporarily.  */
    1762        18456 :   flag_trapv = 0;
    1763        18456 :   done_label = gen_label_rtx ();
    1764        18456 :   do_error = gen_label_rtx ();
    1765              : 
    1766        18456 :   do_pending_stack_adjust ();
    1767        18456 :   op0 = expand_normal (arg0);
    1768        18456 :   op1 = expand_normal (arg1);
    1769              : 
    1770        18456 :   scalar_int_mode mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (arg0));
    1771        18456 :   bool uns = unsr_p;
    1772        18456 :   if (lhs)
    1773              :     {
    1774        18312 :       target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    1775        18312 :       if (!is_ubsan)
    1776        16950 :         write_complex_part (target, const0_rtx, true, false);
    1777              :     }
    1778              : 
    1779        17094 :   if (is_ubsan)
    1780         1506 :     gcc_assert (!unsr_p && !uns0_p && !uns1_p);
    1781              : 
    1782              :   /* We assume both operands and result have the same precision
    1783              :      here (GET_MODE_BITSIZE (mode)), S stands for signed type
    1784              :      with that precision, U for unsigned type with that precision,
    1785              :      sgn for unsigned most significant bit in that precision.
    1786              :      s1 is signed first operand, u1 is unsigned first operand,
    1787              :      s2 is signed second operand, u2 is unsigned second operand,
    1788              :      sr is signed result, ur is unsigned result and the following
    1789              :      rules say how to compute result (which is always result of
    1790              :      the operands as if both were unsigned, cast to the right
    1791              :      signedness) and how to compute whether operation overflowed.
    1792              :      main_ovf (false) stands for jump on signed multiplication
    1793              :      overflow or the main algorithm with uns == false.
    1794              :      main_ovf (true) stands for jump on unsigned multiplication
    1795              :      overflow or the main algorithm with uns == true.
    1796              : 
    1797              :      s1 * s2 -> sr
    1798              :         res = (S) ((U) s1 * (U) s2)
    1799              :         ovf = main_ovf (false)
    1800              :      u1 * u2 -> ur
    1801              :         res = u1 * u2
    1802              :         ovf = main_ovf (true)
    1803              :      s1 * u2 -> ur
    1804              :         res = (U) s1 * u2
    1805              :         ovf = (s1 < 0 && u2) || main_ovf (true)
    1806              :      u1 * u2 -> sr
    1807              :         res = (S) (u1 * u2)
    1808              :         ovf = res < 0 || main_ovf (true)
    1809              :      s1 * u2 -> sr
    1810              :         res = (S) ((U) s1 * u2)
    1811              :         ovf = (S) u2 >= 0 ? main_ovf (false)
    1812              :                           : (s1 != 0 && (s1 != -1 || u2 != (U) res))
    1813              :      s1 * s2 -> ur
    1814              :         t1 = (s1 & s2) < 0 ? (-(U) s1) : ((U) s1)
    1815              :         t2 = (s1 & s2) < 0 ? (-(U) s2) : ((U) s2)
    1816              :         res = t1 * t2
    1817              :         ovf = (s1 ^ s2) < 0 ? (s1 && s2) : main_ovf (true)  */
    1818              : 
    1819        18456 :   if (uns0_p && !uns1_p)
    1820              :     {
    1821              :       /* Multiplication is commutative, if operand signedness differs,
    1822              :          canonicalize to the first operand being signed and second
    1823              :          unsigned to simplify following code.  */
    1824         1479 :       std::swap (op0, op1);
    1825         1479 :       std::swap (arg0, arg1);
    1826         1479 :       uns0_p = false;
    1827         1479 :       uns1_p = true;
    1828              :     }
    1829              : 
    1830        18456 :   int pos_neg0 = get_range_pos_neg (arg0, currently_expanding_gimple_stmt);
    1831        18456 :   int pos_neg1 = get_range_pos_neg (arg1, currently_expanding_gimple_stmt);
    1832              :   /* Unsigned types with smaller than mode precision, even if they have most
    1833              :      significant bit set, are still zero-extended.  */
    1834        18456 :   if (uns0_p && TYPE_PRECISION (TREE_TYPE (arg0)) < GET_MODE_PRECISION (mode))
    1835              :     pos_neg0 = 1;
    1836        18456 :   if (uns1_p && TYPE_PRECISION (TREE_TYPE (arg1)) < GET_MODE_PRECISION (mode))
    1837              :     pos_neg1 = 1;
    1838              : 
    1839              :   /* s1 * u2 -> ur  */
    1840        18456 :   if (!uns0_p && uns1_p && unsr_p)
    1841              :     {
    1842         2403 :       switch (pos_neg0)
    1843              :         {
    1844            0 :         case 1:
    1845              :           /* If s1 is non-negative, just perform normal u1 * u2 -> ur.  */
    1846         1676 :           goto do_main;
    1847          531 :         case 2:
    1848              :           /* If s1 is negative, avoid the main code, just multiply and
    1849              :              signal overflow if op1 is not 0.  */
    1850          531 :           struct separate_ops ops;
    1851          531 :           ops.code = MULT_EXPR;
    1852          531 :           ops.type = TREE_TYPE (arg1);
    1853          531 :           ops.op0 = make_tree (ops.type, op0);
    1854          531 :           ops.op1 = make_tree (ops.type, op1);
    1855          531 :           ops.op2 = NULL_TREE;
    1856          531 :           ops.location = loc;
    1857          531 :           res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
    1858          531 :           do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
    1859              :                                    NULL, done_label, profile_probability::very_likely ());
    1860          727 :           goto do_error_label;
    1861         1872 :         case 3:
    1862         3744 :           if (get_min_precision (arg1, UNSIGNED)
    1863         1872 :               + get_min_precision (arg0, SIGNED) <= GET_MODE_PRECISION (mode))
    1864              :             {
    1865              :               /* If the first operand is sign extended from narrower type, the
    1866              :                  second operand is zero extended from narrower type and
    1867              :                  the sum of the two precisions is smaller or equal to the
    1868              :                  result precision: if the first argument is at runtime
    1869              :                  non-negative, maximum result will be 0x7e81 or 0x7f..fe80..01
    1870              :                  and there will be no overflow, if the first argument is
    1871              :                  negative and the second argument zero, the result will be
    1872              :                  0 and there will be no overflow, if the first argument is
    1873              :                  negative and the second argument positive, the result when
    1874              :                  treated as signed will be negative (minimum -0x7f80 or
    1875              :                  -0x7f..f80..0) there will be always overflow.  So, do
    1876              :                  res = (U) (s1 * u2)
    1877              :                  ovf = (S) res < 0  */
    1878          196 :               struct separate_ops ops;
    1879          196 :               ops.code = MULT_EXPR;
    1880          196 :               ops.type
    1881          196 :                 = build_nonstandard_integer_type (GET_MODE_PRECISION (mode),
    1882              :                                                   1);
    1883          196 :               ops.op0 = make_tree (ops.type, op0);
    1884          196 :               ops.op1 = make_tree (ops.type, op1);
    1885          196 :               ops.op2 = NULL_TREE;
    1886          196 :               ops.location = loc;
    1887          196 :               res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
    1888          196 :               do_compare_rtx_and_jump (res, const0_rtx, GE, false,
    1889              :                                        mode, NULL_RTX, NULL, done_label,
    1890              :                                        profile_probability::very_likely ());
    1891          196 :               goto do_error_label;
    1892              :             }
    1893         1676 :           rtx_code_label *do_main_label;
    1894         1676 :           do_main_label = gen_label_rtx ();
    1895         1676 :           do_compare_rtx_and_jump (op0, const0_rtx, GE, false, mode, NULL_RTX,
    1896              :                                    NULL, do_main_label, profile_probability::very_likely ());
    1897         1676 :           do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
    1898              :                                    NULL, do_main_label, profile_probability::very_likely ());
    1899         1676 :           expand_arith_set_overflow (lhs, target);
    1900         1676 :           emit_label (do_main_label);
    1901         1676 :           goto do_main;
    1902            0 :         default:
    1903            0 :           gcc_unreachable ();
    1904              :         }
    1905              :     }
    1906              : 
    1907              :   /* u1 * u2 -> sr  */
    1908        16053 :   if (uns0_p && uns1_p && !unsr_p)
    1909              :     {
    1910         1490 :       if ((pos_neg0 | pos_neg1) == 1)
    1911              :         {
    1912              :           /* If both arguments are zero extended from narrower types,
    1913              :              the MSB will be clear on both and so we can pretend it is
    1914              :              a normal s1 * s2 -> sr multiplication.  */
    1915              :           uns0_p = false;
    1916              :           uns1_p = false;
    1917              :         }
    1918              :       else
    1919         1037 :         uns = true;
    1920              :       /* Rest of handling of this case after res is computed.  */
    1921         1490 :       goto do_main;
    1922              :     }
    1923              : 
    1924              :   /* s1 * u2 -> sr  */
    1925        14563 :   if (!uns0_p && uns1_p && !unsr_p)
    1926              :     {
    1927         1553 :       switch (pos_neg1)
    1928              :         {
    1929           48 :         case 1:
    1930         1016 :           goto do_main;
    1931          537 :         case 2:
    1932              :           /* If (S) u2 is negative (i.e. u2 is larger than maximum of S,
    1933              :              avoid the main code, just multiply and signal overflow
    1934              :              unless 0 * u2 or -1 * ((U) Smin).  */
    1935          537 :           struct separate_ops ops;
    1936          537 :           ops.code = MULT_EXPR;
    1937          537 :           ops.type = TREE_TYPE (arg1);
    1938          537 :           ops.op0 = make_tree (ops.type, op0);
    1939          537 :           ops.op1 = make_tree (ops.type, op1);
    1940          537 :           ops.op2 = NULL_TREE;
    1941          537 :           ops.location = loc;
    1942          537 :           res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
    1943          537 :           do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
    1944              :                                    NULL, done_label, profile_probability::very_likely ());
    1945          537 :           do_compare_rtx_and_jump (op0, constm1_rtx, NE, true, mode, NULL_RTX,
    1946              :                                    NULL, do_error, profile_probability::very_unlikely ());
    1947          537 :           int prec;
    1948          537 :           prec = GET_MODE_PRECISION (mode);
    1949          537 :           rtx sgn;
    1950          537 :           sgn = immed_wide_int_const (wi::min_value (prec, SIGNED), mode);
    1951          537 :           do_compare_rtx_and_jump (op1, sgn, EQ, true, mode, NULL_RTX,
    1952              :                                    NULL, done_label, profile_probability::very_likely ());
    1953          537 :           goto do_error_label;
    1954          968 :         case 3:
    1955              :           /* Rest of handling of this case after res is computed.  */
    1956          968 :           goto do_main;
    1957            0 :         default:
    1958            0 :           gcc_unreachable ();
    1959              :         }
    1960              :     }
    1961              : 
    1962              :   /* s1 * s2 -> ur  */
    1963        13010 :   if (!uns0_p && !uns1_p && unsr_p)
    1964              :     {
    1965         2078 :       rtx tem;
    1966         2078 :       switch (pos_neg0 | pos_neg1)
    1967              :         {
    1968            0 :         case 1: /* Both operands known to be non-negative.  */
    1969            0 :           goto do_main;
    1970           77 :         case 2: /* Both operands known to be negative.  */
    1971           77 :           op0 = expand_unop (mode, neg_optab, op0, NULL_RTX, false);
    1972           77 :           op1 = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
    1973              :           /* Avoid looking at arg0/arg1 ranges, as we've changed
    1974              :              the arguments.  */
    1975           77 :           arg0 = error_mark_node;
    1976           77 :           arg1 = error_mark_node;
    1977           77 :           goto do_main;
    1978         2001 :         case 3:
    1979         2001 :           if ((pos_neg0 ^ pos_neg1) == 3)
    1980              :             {
    1981              :               /* If one operand is known to be negative and the other
    1982              :                  non-negative, this overflows always, unless the non-negative
    1983              :                  one is 0.  Just do normal multiply and set overflow
    1984              :                  unless one of the operands is 0.  */
    1985            0 :               struct separate_ops ops;
    1986            0 :               ops.code = MULT_EXPR;
    1987            0 :               ops.type
    1988            0 :                 = build_nonstandard_integer_type (GET_MODE_PRECISION (mode),
    1989              :                                                   1);
    1990            0 :               ops.op0 = make_tree (ops.type, op0);
    1991            0 :               ops.op1 = make_tree (ops.type, op1);
    1992            0 :               ops.op2 = NULL_TREE;
    1993            0 :               ops.location = loc;
    1994            0 :               res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
    1995            0 :               do_compare_rtx_and_jump (pos_neg0 == 1 ? op0 : op1, const0_rtx, EQ,
    1996              :                                        true, mode, NULL_RTX, NULL, done_label,
    1997              :                                        profile_probability::very_likely ());
    1998            0 :               goto do_error_label;
    1999              :             }
    2000         4002 :           if (get_min_precision (arg0, SIGNED)
    2001         2001 :               + get_min_precision (arg1, SIGNED) <= GET_MODE_PRECISION (mode))
    2002              :             {
    2003              :               /* If both operands are sign extended from narrower types and
    2004              :                  the sum of the two precisions is smaller or equal to the
    2005              :                  result precision: if both arguments are at runtime
    2006              :                  non-negative, maximum result will be 0x3f01 or 0x3f..f0..01
    2007              :                  and there will be no overflow, if both arguments are negative,
    2008              :                  maximum result will be 0x40..00 and there will be no overflow
    2009              :                  either, if one argument is positive and the other argument
    2010              :                  negative, the result when treated as signed will be negative
    2011              :                  and there will be always overflow, and if one argument is
    2012              :                  zero and the other negative the result will be zero and no
    2013              :                  overflow.  So, do
    2014              :                  res = (U) (s1 * s2)
    2015              :                  ovf = (S) res < 0  */
    2016          225 :               struct separate_ops ops;
    2017          225 :               ops.code = MULT_EXPR;
    2018          225 :               ops.type
    2019          225 :                 = build_nonstandard_integer_type (GET_MODE_PRECISION (mode),
    2020              :                                                   1);
    2021          225 :               ops.op0 = make_tree (ops.type, op0);
    2022          225 :               ops.op1 = make_tree (ops.type, op1);
    2023          225 :               ops.op2 = NULL_TREE;
    2024          225 :               ops.location = loc;
    2025          225 :               res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
    2026          225 :               do_compare_rtx_and_jump (res, const0_rtx, GE, false,
    2027              :                                        mode, NULL_RTX, NULL, done_label,
    2028              :                                        profile_probability::very_likely ());
    2029          225 :               goto do_error_label;
    2030              :             }
    2031              :           /* The general case, do all the needed comparisons at runtime.  */
    2032         1776 :           rtx_code_label *do_main_label, *after_negate_label;
    2033         1776 :           rtx rop0, rop1;
    2034         1776 :           rop0 = gen_reg_rtx (mode);
    2035         1776 :           rop1 = gen_reg_rtx (mode);
    2036         1776 :           emit_move_insn (rop0, op0);
    2037         1776 :           emit_move_insn (rop1, op1);
    2038         1776 :           op0 = rop0;
    2039         1776 :           op1 = rop1;
    2040         1776 :           do_main_label = gen_label_rtx ();
    2041         1776 :           after_negate_label = gen_label_rtx ();
    2042         1776 :           tem = expand_binop (mode, and_optab, op0, op1, NULL_RTX, false,
    2043              :                               OPTAB_LIB_WIDEN);
    2044         1776 :           do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
    2045              :                                    NULL, after_negate_label, profile_probability::very_likely ());
    2046              :           /* Both arguments negative here, negate them and continue with
    2047              :              normal unsigned overflow checking multiplication.  */
    2048         1776 :           emit_move_insn (op0, expand_unop (mode, neg_optab, op0,
    2049              :                                             NULL_RTX, false));
    2050         1776 :           emit_move_insn (op1, expand_unop (mode, neg_optab, op1,
    2051              :                                             NULL_RTX, false));
    2052              :           /* Avoid looking at arg0/arg1 ranges, as we might have changed
    2053              :              the arguments.  */
    2054         1776 :           arg0 = error_mark_node;
    2055         1776 :           arg1 = error_mark_node;
    2056         1776 :           emit_jump (do_main_label);
    2057         1776 :           emit_label (after_negate_label);
    2058         1776 :           tem = expand_binop (mode, xor_optab, op0, op1, NULL_RTX, false,
    2059              :                               OPTAB_LIB_WIDEN);
    2060         1776 :           do_compare_rtx_and_jump (tem, const0_rtx, GE, false, mode, NULL_RTX,
    2061              :                                    NULL, do_main_label,
    2062              :                                    profile_probability::very_likely ());
    2063              :           /* One argument is negative here, the other positive.  This
    2064              :              overflows always, unless one of the arguments is 0.  But
    2065              :              if e.g. s2 is 0, (U) s1 * 0 doesn't overflow, whatever s1
    2066              :              is, thus we can keep do_main code oring in overflow as is.  */
    2067         1776 :           if (pos_neg0 != 2)
    2068         1689 :             do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
    2069              :                                      NULL, do_main_label,
    2070              :                                      profile_probability::very_unlikely ());
    2071         1776 :           if (pos_neg1 != 2)
    2072          699 :             do_compare_rtx_and_jump (op1, const0_rtx, EQ, true, mode, NULL_RTX,
    2073              :                                      NULL, do_main_label,
    2074              :                                      profile_probability::very_unlikely ());
    2075         1776 :           expand_arith_set_overflow (lhs, target);
    2076         1776 :           emit_label (do_main_label);
    2077         1776 :           goto do_main;
    2078            0 :         default:
    2079            0 :           gcc_unreachable ();
    2080              :         }
    2081              :     }
    2082              : 
    2083        10932 :  do_main:
    2084        16967 :   type = build_nonstandard_integer_type (GET_MODE_PRECISION (mode), uns);
    2085        16967 :   sign = uns ? UNSIGNED : SIGNED;
    2086        16967 :   icode = optab_handler (uns ? umulv4_optab : mulv4_optab, mode);
    2087        16967 :   if (uns
    2088        10042 :       && (integer_pow2p (arg0) || integer_pow2p (arg1))
    2089        17833 :       && (optimize_insn_for_speed_p () || icode == CODE_FOR_nothing))
    2090              :     {
    2091              :       /* Optimize unsigned multiplication by power of 2 constant
    2092              :          using 2 shifts, one for result, one to extract the shifted
    2093              :          out bits to see if they are all zero.
    2094              :          Don't do this if optimizing for size and we have umulv4_optab,
    2095              :          in that case assume multiplication will be shorter.
    2096              :          This is heuristics based on the single target that provides
    2097              :          umulv4 right now (i?86/x86_64), if further targets add it, this
    2098              :          might need to be revisited.
    2099              :          Cases where both operands are constant should be folded already
    2100              :          during GIMPLE, and cases where one operand is constant but not
    2101              :          power of 2 are questionable, either the WIDEN_MULT_EXPR case
    2102              :          below can be done without multiplication, just by shifts and adds,
    2103              :          or we'd need to divide the result (and hope it actually doesn't
    2104              :          really divide nor multiply) and compare the result of the division
    2105              :          with the original operand.  */
    2106          865 :       rtx opn0 = op0;
    2107          865 :       rtx opn1 = op1;
    2108          865 :       tree argn0 = arg0;
    2109          865 :       tree argn1 = arg1;
    2110          865 :       if (integer_pow2p (arg0))
    2111              :         {
    2112            0 :           std::swap (opn0, opn1);
    2113            0 :           std::swap (argn0, argn1);
    2114              :         }
    2115          865 :       int cnt = tree_log2 (argn1);
    2116          865 :       if (cnt >= 0 && cnt < GET_MODE_PRECISION (mode))
    2117              :         {
    2118          865 :           rtx upper = const0_rtx;
    2119          865 :           res = expand_shift (LSHIFT_EXPR, mode, opn0, cnt, NULL_RTX, uns);
    2120          865 :           if (cnt != 0)
    2121          693 :             upper = expand_shift (RSHIFT_EXPR, mode, opn0,
    2122          693 :                                   GET_MODE_PRECISION (mode) - cnt,
    2123              :                                   NULL_RTX, uns);
    2124          865 :           do_compare_rtx_and_jump (upper, const0_rtx, EQ, true, mode,
    2125              :                                    NULL_RTX, NULL, done_label,
    2126              :                                    profile_probability::very_likely ());
    2127          865 :           goto do_error_label;
    2128              :         }
    2129              :     }
    2130        16102 :   if (icode != CODE_FOR_nothing)
    2131              :     {
    2132        13181 :       class expand_operand ops[4];
    2133        13181 :       rtx_insn *last = get_last_insn ();
    2134              : 
    2135        13181 :       res = gen_reg_rtx (mode);
    2136        13181 :       create_output_operand (&ops[0], res, mode);
    2137        13181 :       create_input_operand (&ops[1], op0, mode);
    2138        13181 :       create_input_operand (&ops[2], op1, mode);
    2139        13181 :       create_fixed_operand (&ops[3], do_error);
    2140        13181 :       if (maybe_expand_insn (icode, 4, ops))
    2141              :         {
    2142        13181 :           last = get_last_insn ();
    2143        13181 :           if (profile_status_for_fn (cfun) != PROFILE_ABSENT
    2144         8988 :               && JUMP_P (last)
    2145         8988 :               && any_condjump_p (last)
    2146        22169 :               && !find_reg_note (last, REG_BR_PROB, 0))
    2147         8988 :             add_reg_br_prob_note (last,
    2148              :                                   profile_probability::very_unlikely ());
    2149        13181 :           emit_jump (done_label);
    2150              :         }
    2151              :       else
    2152              :         {
    2153            0 :           delete_insns_since (last);
    2154            0 :           icode = CODE_FOR_nothing;
    2155              :         }
    2156              :     }
    2157              : 
    2158            0 :   if (icode == CODE_FOR_nothing)
    2159              :     {
    2160         2921 :       struct separate_ops ops;
    2161         2921 :       int prec = GET_MODE_PRECISION (mode);
    2162         2921 :       scalar_int_mode hmode, wmode;
    2163         2921 :       ops.op0 = make_tree (type, op0);
    2164         2921 :       ops.op1 = make_tree (type, op1);
    2165         2921 :       ops.op2 = NULL_TREE;
    2166         2921 :       ops.location = loc;
    2167              : 
    2168              :       /* Optimize unsigned overflow check where we don't use the
    2169              :          multiplication result, just whether overflow happened.
    2170              :          If we can do MULT_HIGHPART_EXPR, that followed by
    2171              :          comparison of the result against zero is cheapest.
    2172              :          We'll still compute res, but it should be DCEd later.  */
    2173         2921 :       use_operand_p use;
    2174         2921 :       gimple *use_stmt;
    2175         2921 :       if (!is_ubsan
    2176         2921 :           && lhs
    2177         2901 :           && uns
    2178         1859 :           && !(uns0_p && uns1_p && !unsr_p)
    2179         1574 :           && can_mult_highpart_p (mode, uns) == 1
    2180            0 :           && single_imm_use (lhs, &use, &use_stmt)
    2181            0 :           && is_gimple_assign (use_stmt)
    2182         2921 :           && gimple_assign_rhs_code (use_stmt) == IMAGPART_EXPR)
    2183            0 :         goto highpart;
    2184              : 
    2185         2921 :       if (GET_MODE_2XWIDER_MODE (mode).exists (&wmode)
    2186         2921 :           && targetm.scalar_mode_supported_p (wmode)
    2187            0 :           && can_widen_mult_without_libcall (wmode, mode, op0, op1, uns))
    2188              :         {
    2189            0 :         twoxwider:
    2190            0 :           ops.code = WIDEN_MULT_EXPR;
    2191            0 :           ops.type
    2192            0 :             = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), uns);
    2193              : 
    2194            0 :           res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
    2195            0 :           rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res, prec,
    2196              :                                      NULL_RTX, uns);
    2197            0 :           hipart = convert_modes (mode, wmode, hipart, uns);
    2198            0 :           res = convert_modes (mode, wmode, res, uns);
    2199            0 :           if (uns)
    2200              :             /* For the unsigned multiplication, there was overflow if
    2201              :                HIPART is non-zero.  */
    2202            0 :             do_compare_rtx_and_jump (hipart, const0_rtx, EQ, true, mode,
    2203              :                                      NULL_RTX, NULL, done_label,
    2204              :                                      profile_probability::very_likely ());
    2205              :           else
    2206              :             {
    2207              :               /* RES is used more than once, place it in a pseudo.  */
    2208            0 :               res = force_reg (mode, res);
    2209              : 
    2210            0 :               rtx signbit = expand_shift (RSHIFT_EXPR, mode, res, prec - 1,
    2211              :                                           NULL_RTX, 0);
    2212              :               /* RES is low half of the double width result, HIPART
    2213              :                  the high half.  There was overflow if
    2214              :                  HIPART is different from RES < 0 ? -1 : 0.  */
    2215            0 :               do_compare_rtx_and_jump (signbit, hipart, EQ, true, mode,
    2216              :                                        NULL_RTX, NULL, done_label,
    2217              :                                        profile_probability::very_likely ());
    2218              :             }
    2219              :         }
    2220         2921 :       else if (can_mult_highpart_p (mode, uns) == 1)
    2221              :         {
    2222            0 :         highpart:
    2223            0 :           ops.code = MULT_HIGHPART_EXPR;
    2224            0 :           ops.type = type;
    2225              : 
    2226            0 :           rtx hipart = expand_expr_real_2 (&ops, NULL_RTX, mode,
    2227              :                                            EXPAND_NORMAL);
    2228            0 :           ops.code = MULT_EXPR;
    2229            0 :           res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
    2230            0 :           if (uns)
    2231              :             /* For the unsigned multiplication, there was overflow if
    2232              :                HIPART is non-zero.  */
    2233            0 :             do_compare_rtx_and_jump (hipart, const0_rtx, EQ, true, mode,
    2234              :                                      NULL_RTX, NULL, done_label,
    2235              :                                      profile_probability::very_likely ());
    2236              :           else
    2237              :             {
    2238            0 :               rtx signbit = expand_shift (RSHIFT_EXPR, mode, res, prec - 1,
    2239              :                                           NULL_RTX, 0);
    2240              :               /* RES is low half of the double width result, HIPART
    2241              :                  the high half.  There was overflow if
    2242              :                  HIPART is different from RES < 0 ? -1 : 0.  */
    2243            0 :               do_compare_rtx_and_jump (signbit, hipart, EQ, true, mode,
    2244              :                                        NULL_RTX, NULL, done_label,
    2245              :                                        profile_probability::very_likely ());
    2246              :             }
    2247              : 
    2248              :         }
    2249         2921 :       else if (int_mode_for_size (prec / 2, 1).exists (&hmode)
    2250         2921 :                && 2 * GET_MODE_PRECISION (hmode) == prec)
    2251              :         {
    2252         2921 :           rtx_code_label *large_op0 = gen_label_rtx ();
    2253         2921 :           rtx_code_label *small_op0_large_op1 = gen_label_rtx ();
    2254         2921 :           rtx_code_label *one_small_one_large = gen_label_rtx ();
    2255         2921 :           rtx_code_label *both_ops_large = gen_label_rtx ();
    2256         2921 :           rtx_code_label *after_hipart_neg = uns ? NULL : gen_label_rtx ();
    2257         1062 :           rtx_code_label *after_lopart_neg = uns ? NULL : gen_label_rtx ();
    2258         2921 :           rtx_code_label *do_overflow = gen_label_rtx ();
    2259         2921 :           rtx_code_label *hipart_different = uns ? NULL : gen_label_rtx ();
    2260              : 
    2261         2921 :           unsigned int hprec = GET_MODE_PRECISION (hmode);
    2262         2921 :           rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
    2263              :                                       NULL_RTX, uns);
    2264         2921 :           hipart0 = convert_modes (hmode, mode, hipart0, uns);
    2265         2921 :           rtx lopart0 = convert_modes (hmode, mode, op0, uns);
    2266         2921 :           rtx signbit0 = const0_rtx;
    2267         2921 :           if (!uns)
    2268         1062 :             signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
    2269              :                                      NULL_RTX, 0);
    2270         2921 :           rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
    2271              :                                       NULL_RTX, uns);
    2272         2921 :           hipart1 = convert_modes (hmode, mode, hipart1, uns);
    2273         2921 :           rtx lopart1 = convert_modes (hmode, mode, op1, uns);
    2274         2921 :           rtx signbit1 = const0_rtx;
    2275         2921 :           if (!uns)
    2276         1062 :             signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
    2277              :                                      NULL_RTX, 0);
    2278              : 
    2279         2921 :           res = gen_reg_rtx (mode);
    2280              : 
    2281              :           /* True if op0 resp. op1 are known to be in the range of
    2282              :              halfstype.  */
    2283         2921 :           bool op0_small_p = false;
    2284         2921 :           bool op1_small_p = false;
    2285              :           /* True if op0 resp. op1 are known to have all zeros or all ones
    2286              :              in the upper half of bits, but are not known to be
    2287              :              op{0,1}_small_p.  */
    2288         2921 :           bool op0_medium_p = false;
    2289         2921 :           bool op1_medium_p = false;
    2290              :           /* -1 if op{0,1} is known to be negative, 0 if it is known to be
    2291              :              nonnegative, 1 if unknown.  */
    2292         2921 :           int op0_sign = 1;
    2293         2921 :           int op1_sign = 1;
    2294              : 
    2295         2921 :           if (pos_neg0 == 1)
    2296              :             op0_sign = 0;
    2297         2595 :           else if (pos_neg0 == 2)
    2298          299 :             op0_sign = -1;
    2299         2921 :           if (pos_neg1 == 1)
    2300              :             op1_sign = 0;
    2301         1634 :           else if (pos_neg1 == 2)
    2302          499 :             op1_sign = -1;
    2303              : 
    2304         2921 :           unsigned int mprec0 = prec;
    2305         2921 :           if (arg0 != error_mark_node)
    2306         2277 :             mprec0 = get_min_precision (arg0, sign);
    2307         2921 :           if (mprec0 <= hprec)
    2308              :             op0_small_p = true;
    2309         2673 :           else if (!uns && mprec0 <= hprec + 1)
    2310         2921 :             op0_medium_p = true;
    2311         2921 :           unsigned int mprec1 = prec;
    2312         2921 :           if (arg1 != error_mark_node)
    2313         2277 :             mprec1 = get_min_precision (arg1, sign);
    2314         2921 :           if (mprec1 <= hprec)
    2315              :             op1_small_p = true;
    2316         2227 :           else if (!uns && mprec1 <= hprec + 1)
    2317         2921 :             op1_medium_p = true;
    2318              : 
    2319         2921 :           int smaller_sign = 1;
    2320         2921 :           int larger_sign = 1;
    2321         2921 :           if (op0_small_p)
    2322              :             {
    2323              :               smaller_sign = op0_sign;
    2324              :               larger_sign = op1_sign;
    2325              :             }
    2326         2673 :           else if (op1_small_p)
    2327              :             {
    2328              :               smaller_sign = op1_sign;
    2329              :               larger_sign = op0_sign;
    2330              :             }
    2331         1979 :           else if (op0_sign == op1_sign)
    2332              :             {
    2333          923 :               smaller_sign = op0_sign;
    2334          923 :               larger_sign = op0_sign;
    2335              :             }
    2336              : 
    2337         2673 :           if (!op0_small_p)
    2338         2673 :             do_compare_rtx_and_jump (signbit0, hipart0, NE, true, hmode,
    2339              :                                      NULL_RTX, NULL, large_op0,
    2340              :                                      profile_probability::unlikely ());
    2341              : 
    2342         2921 :           if (!op1_small_p)
    2343         2227 :             do_compare_rtx_and_jump (signbit1, hipart1, NE, true, hmode,
    2344              :                                      NULL_RTX, NULL, small_op0_large_op1,
    2345              :                                      profile_probability::unlikely ());
    2346              : 
    2347              :           /* If both op0 and op1 are sign (!uns) or zero (uns) extended from
    2348              :              hmode to mode, the multiplication will never overflow.  We can
    2349              :              do just one hmode x hmode => mode widening multiplication.  */
    2350         2921 :           tree halfstype = build_nonstandard_integer_type (hprec, uns);
    2351         2921 :           ops.op0 = make_tree (halfstype, lopart0);
    2352         2921 :           ops.op1 = make_tree (halfstype, lopart1);
    2353         2921 :           ops.code = WIDEN_MULT_EXPR;
    2354         2921 :           ops.type = type;
    2355         2921 :           rtx thisres
    2356         2921 :             = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
    2357         2921 :           emit_move_insn (res, thisres);
    2358         2921 :           emit_jump (done_label);
    2359              : 
    2360         2921 :           emit_label (small_op0_large_op1);
    2361              : 
    2362              :           /* If op0 is sign (!uns) or zero (uns) extended from hmode to mode,
    2363              :              but op1 is not, just swap the arguments and handle it as op1
    2364              :              sign/zero extended, op0 not.  */
    2365         2921 :           rtx larger = gen_reg_rtx (mode);
    2366         2921 :           rtx hipart = gen_reg_rtx (hmode);
    2367         2921 :           rtx lopart = gen_reg_rtx (hmode);
    2368         2921 :           emit_move_insn (larger, op1);
    2369         2921 :           emit_move_insn (hipart, hipart1);
    2370         2921 :           emit_move_insn (lopart, lopart0);
    2371         2921 :           emit_jump (one_small_one_large);
    2372              : 
    2373         2921 :           emit_label (large_op0);
    2374              : 
    2375         2921 :           if (!op1_small_p)
    2376         2227 :             do_compare_rtx_and_jump (signbit1, hipart1, NE, true, hmode,
    2377              :                                      NULL_RTX, NULL, both_ops_large,
    2378              :                                      profile_probability::unlikely ());
    2379              : 
    2380              :           /* If op1 is sign (!uns) or zero (uns) extended from hmode to mode,
    2381              :              but op0 is not, prepare larger, hipart and lopart pseudos and
    2382              :              handle it together with small_op0_large_op1.  */
    2383         2921 :           emit_move_insn (larger, op0);
    2384         2921 :           emit_move_insn (hipart, hipart0);
    2385         2921 :           emit_move_insn (lopart, lopart1);
    2386              : 
    2387         2921 :           emit_label (one_small_one_large);
    2388              : 
    2389              :           /* lopart is the low part of the operand that is sign extended
    2390              :              to mode, larger is the other operand, hipart is the
    2391              :              high part of larger and lopart0 and lopart1 are the low parts
    2392              :              of both operands.
    2393              :              We perform lopart0 * lopart1 and lopart * hipart widening
    2394              :              multiplications.  */
    2395         2921 :           tree halfutype = build_nonstandard_integer_type (hprec, 1);
    2396         2921 :           ops.op0 = make_tree (halfutype, lopart0);
    2397         2921 :           ops.op1 = make_tree (halfutype, lopart1);
    2398         2921 :           rtx lo0xlo1
    2399         2921 :             = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
    2400              : 
    2401         2921 :           ops.op0 = make_tree (halfutype, lopart);
    2402         2921 :           ops.op1 = make_tree (halfutype, hipart);
    2403         2921 :           rtx loxhi = gen_reg_rtx (mode);
    2404         2921 :           rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
    2405         2921 :           emit_move_insn (loxhi, tem);
    2406              : 
    2407         2921 :           if (!uns)
    2408              :             {
    2409              :               /* if (hipart < 0) loxhi -= lopart << (bitsize / 2);  */
    2410         1062 :               if (larger_sign == 0)
    2411          111 :                 emit_jump (after_hipart_neg);
    2412          951 :               else if (larger_sign != -1)
    2413          946 :                 do_compare_rtx_and_jump (hipart, const0_rtx, GE, false, hmode,
    2414              :                                          NULL_RTX, NULL, after_hipart_neg,
    2415              :                                          profile_probability::even ());
    2416              : 
    2417         1062 :               tem = convert_modes (mode, hmode, lopart, 1);
    2418         1062 :               tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
    2419         1062 :               tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
    2420              :                                          1, OPTAB_WIDEN);
    2421         1062 :               emit_move_insn (loxhi, tem);
    2422              : 
    2423         1062 :               emit_label (after_hipart_neg);
    2424              : 
    2425              :               /* if (lopart < 0) loxhi -= larger;  */
    2426         1062 :               if (smaller_sign == 0)
    2427          327 :                 emit_jump (after_lopart_neg);
    2428          735 :               else if (smaller_sign != -1)
    2429          530 :                 do_compare_rtx_and_jump (lopart, const0_rtx, GE, false, hmode,
    2430              :                                          NULL_RTX, NULL, after_lopart_neg,
    2431              :                                          profile_probability::even ());
    2432              : 
    2433         1062 :               tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
    2434              :                                          1, OPTAB_WIDEN);
    2435         1062 :               emit_move_insn (loxhi, tem);
    2436              : 
    2437         1062 :               emit_label (after_lopart_neg);
    2438              :             }
    2439              : 
    2440              :           /* loxhi += (uns) lo0xlo1 >> (bitsize / 2);  */
    2441         2921 :           tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
    2442         2921 :           tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
    2443              :                                      1, OPTAB_WIDEN);
    2444         2921 :           emit_move_insn (loxhi, tem);
    2445              : 
    2446              :           /* if (loxhi >> (bitsize / 2)
    2447              :                  == (hmode) loxhi >> (bitsize / 2 - 1))  (if !uns)
    2448              :              if (loxhi >> (bitsize / 2) == 0               (if uns).  */
    2449         2921 :           rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
    2450              :                                           NULL_RTX, 0);
    2451         2921 :           hipartloxhi = convert_modes (hmode, mode, hipartloxhi, 0);
    2452         2921 :           rtx signbitloxhi = const0_rtx;
    2453         2921 :           if (!uns)
    2454         1062 :             signbitloxhi = expand_shift (RSHIFT_EXPR, hmode,
    2455              :                                          convert_modes (hmode, mode,
    2456              :                                                         loxhi, 0),
    2457         1062 :                                          hprec - 1, NULL_RTX, 0);
    2458              : 
    2459         2921 :           do_compare_rtx_and_jump (signbitloxhi, hipartloxhi, NE, true, hmode,
    2460              :                                    NULL_RTX, NULL, do_overflow,
    2461              :                                    profile_probability::very_unlikely ());
    2462              : 
    2463              :           /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1;  */
    2464         2921 :           rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
    2465              :                                            NULL_RTX, 1);
    2466         2921 :           tem = convert_modes (mode, hmode,
    2467              :                                convert_modes (hmode, mode, lo0xlo1, 1), 1);
    2468              : 
    2469         2921 :           tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
    2470              :                                      1, OPTAB_WIDEN);
    2471         2921 :           if (tem != res)
    2472            0 :             emit_move_insn (res, tem);
    2473         2921 :           emit_jump (done_label);
    2474              : 
    2475         2921 :           emit_label (both_ops_large);
    2476              : 
    2477              :           /* If both operands are large (not sign (!uns) or zero (uns)
    2478              :              extended from hmode), then perform the full multiplication
    2479              :              which will be the result of the operation.
    2480              :              The only cases which don't overflow are for signed multiplication
    2481              :              some cases where both hipart0 and highpart1 are 0 or -1.
    2482              :              For unsigned multiplication when high parts are both non-zero
    2483              :              this overflows always.  */
    2484         2921 :           ops.code = MULT_EXPR;
    2485         2921 :           ops.op0 = make_tree (type, op0);
    2486         2921 :           ops.op1 = make_tree (type, op1);
    2487         2921 :           tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
    2488         2921 :           emit_move_insn (res, tem);
    2489              : 
    2490         2921 :           if (!uns)
    2491              :             {
    2492         1062 :               if (!op0_medium_p)
    2493              :                 {
    2494         1062 :                   tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
    2495              :                                              NULL_RTX, 1, OPTAB_WIDEN);
    2496         1062 :                   do_compare_rtx_and_jump (tem, const1_rtx, GTU, true, hmode,
    2497              :                                            NULL_RTX, NULL, do_error,
    2498              :                                            profile_probability::very_unlikely ());
    2499              :                 }
    2500              : 
    2501         1062 :               if (!op1_medium_p)
    2502              :                 {
    2503         1062 :                   tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
    2504              :                                              NULL_RTX, 1, OPTAB_WIDEN);
    2505         1062 :                   do_compare_rtx_and_jump (tem, const1_rtx, GTU, true, hmode,
    2506              :                                            NULL_RTX, NULL, do_error,
    2507              :                                            profile_probability::very_unlikely ());
    2508              :                 }
    2509              : 
    2510              :               /* At this point hipart{0,1} are both in [-1, 0].  If they are
    2511              :                  the same, overflow happened if res is non-positive, if they
    2512              :                  are different, overflow happened if res is positive.  */
    2513         1062 :               if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
    2514           23 :                 emit_jump (hipart_different);
    2515         1039 :               else if (op0_sign == 1 || op1_sign == 1)
    2516          937 :                 do_compare_rtx_and_jump (hipart0, hipart1, NE, true, hmode,
    2517              :                                          NULL_RTX, NULL, hipart_different,
    2518              :                                          profile_probability::even ());
    2519              : 
    2520         1062 :               do_compare_rtx_and_jump (res, const0_rtx, LE, false, mode,
    2521              :                                        NULL_RTX, NULL, do_error,
    2522              :                                        profile_probability::very_unlikely ());
    2523         1062 :               emit_jump (done_label);
    2524              : 
    2525         1062 :               emit_label (hipart_different);
    2526              : 
    2527         1062 :               do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode,
    2528              :                                        NULL_RTX, NULL, do_error,
    2529              :                                        profile_probability::very_unlikely ());
    2530         1062 :               emit_jump (done_label);
    2531              :             }
    2532              : 
    2533         2921 :           emit_label (do_overflow);
    2534              : 
    2535              :           /* Overflow, do full multiplication and fallthru into do_error.  */
    2536         2921 :           ops.op0 = make_tree (type, op0);
    2537         2921 :           ops.op1 = make_tree (type, op1);
    2538         2921 :           tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
    2539         2921 :           emit_move_insn (res, tem);
    2540              :         }
    2541            0 :       else if (GET_MODE_2XWIDER_MODE (mode).exists (&wmode)
    2542            0 :                && targetm.scalar_mode_supported_p (wmode))
    2543              :         /* Even emitting a libcall is better than not detecting overflow
    2544              :            at all.  */
    2545            0 :         goto twoxwider;
    2546              :       else
    2547              :         {
    2548            0 :           gcc_assert (!is_ubsan);
    2549            0 :           ops.code = MULT_EXPR;
    2550            0 :           ops.type = type;
    2551            0 :           res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
    2552            0 :           emit_jump (done_label);
    2553              :         }
    2554              :     }
    2555              : 
    2556        13181 :  do_error_label:
    2557        18456 :   emit_label (do_error);
    2558        18456 :   if (is_ubsan)
    2559              :     {
    2560              :       /* Expand the ubsan builtin call.  */
    2561         1506 :       push_temp_slots ();
    2562         1506 :       fn = ubsan_build_overflow_builtin (MULT_EXPR, loc, TREE_TYPE (arg0),
    2563              :                                          arg0, arg1, datap);
    2564         1506 :       expand_normal (fn);
    2565         1506 :       pop_temp_slots ();
    2566         1506 :       do_pending_stack_adjust ();
    2567              :     }
    2568        16950 :   else if (lhs)
    2569        16950 :     expand_arith_set_overflow (lhs, target);
    2570              : 
    2571              :   /* We're done.  */
    2572        18456 :   emit_label (done_label);
    2573              : 
    2574              :   /* u1 * u2 -> sr  */
    2575        18456 :   if (uns0_p && uns1_p && !unsr_p)
    2576              :     {
    2577         1037 :       rtx_code_label *all_done_label = gen_label_rtx ();
    2578         1037 :       do_compare_rtx_and_jump (res, const0_rtx, GE, false, mode, NULL_RTX,
    2579              :                                NULL, all_done_label, profile_probability::very_likely ());
    2580         1037 :       expand_arith_set_overflow (lhs, target);
    2581         1037 :       emit_label (all_done_label);
    2582              :     }
    2583              : 
    2584              :   /* s1 * u2 -> sr  */
    2585        18456 :   if (!uns0_p && uns1_p && !unsr_p && pos_neg1 == 3)
    2586              :     {
    2587          968 :       rtx_code_label *all_done_label = gen_label_rtx ();
    2588          968 :       rtx_code_label *set_noovf = gen_label_rtx ();
    2589          968 :       do_compare_rtx_and_jump (op1, const0_rtx, GE, false, mode, NULL_RTX,
    2590              :                                NULL, all_done_label, profile_probability::very_likely ());
    2591          968 :       expand_arith_set_overflow (lhs, target);
    2592          968 :       do_compare_rtx_and_jump (op0, const0_rtx, EQ, true, mode, NULL_RTX,
    2593              :                                NULL, set_noovf, profile_probability::very_likely ());
    2594          968 :       do_compare_rtx_and_jump (op0, constm1_rtx, NE, true, mode, NULL_RTX,
    2595              :                                NULL, all_done_label, profile_probability::very_unlikely ());
    2596          968 :       do_compare_rtx_and_jump (op1, res, NE, true, mode, NULL_RTX, NULL,
    2597              :                                all_done_label, profile_probability::very_unlikely ());
    2598          968 :       emit_label (set_noovf);
    2599          968 :       write_complex_part (target, const0_rtx, true, false);
    2600          968 :       emit_label (all_done_label);
    2601              :     }
    2602              : 
    2603        18456 :   if (lhs)
    2604              :     {
    2605        18312 :       if (is_ubsan)
    2606         1362 :         expand_ubsan_result_store (lhs, target, mode, res, do_error);
    2607              :       else
    2608        16950 :         expand_arith_overflow_result_store (lhs, target, mode, res);
    2609              :     }
    2610        18456 :   flag_trapv = save_flag_trapv;
    2611        18456 : }
    2612              : 
    2613              : /* Expand UBSAN_CHECK_* internal function if it has vector operands.  */
    2614              : 
    2615              : static void
    2616          549 : expand_vector_ubsan_overflow (location_t loc, enum tree_code code, tree lhs,
    2617              :                               tree arg0, tree arg1)
    2618              : {
    2619          549 :   poly_uint64 cnt = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
    2620          549 :   rtx_code_label *loop_lab = NULL;
    2621          549 :   rtx cntvar = NULL_RTX;
    2622          549 :   tree cntv = NULL_TREE;
    2623          549 :   tree eltype = TREE_TYPE (TREE_TYPE (arg0));
    2624          549 :   tree sz = TYPE_SIZE (eltype);
    2625          549 :   tree data = NULL_TREE;
    2626          549 :   tree resv = NULL_TREE;
    2627          549 :   rtx lhsr = NULL_RTX;
    2628          549 :   rtx resvr = NULL_RTX;
    2629          549 :   unsigned HOST_WIDE_INT const_cnt = 0;
    2630          549 :   bool use_loop_p = (!cnt.is_constant (&const_cnt) || const_cnt > 4);
    2631          549 :   int save_flag_trapv = flag_trapv;
    2632              : 
    2633              :   /* We don't want any __mulv?i3 etc. calls from the expansion of
    2634              :      these internal functions, so disable -ftrapv temporarily.  */
    2635          549 :   flag_trapv = 0;
    2636          549 :   if (lhs)
    2637              :     {
    2638          549 :       optab op;
    2639          549 :       lhsr = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    2640          141 :       if (!VECTOR_MODE_P (GET_MODE (lhsr))
    2641          408 :           || (op = optab_for_tree_code (code, TREE_TYPE (arg0),
    2642              :                                         optab_default)) == unknown_optab
    2643          957 :           || (optab_handler (op, TYPE_MODE (TREE_TYPE (arg0)))
    2644              :               == CODE_FOR_nothing))
    2645              :         {
    2646          141 :           if (MEM_P (lhsr))
    2647          141 :             resv = make_tree (TREE_TYPE (lhs), lhsr);
    2648              :           else
    2649              :             {
    2650            0 :               resvr = assign_temp (TREE_TYPE (lhs), 1, 1);
    2651            0 :               resv = make_tree (TREE_TYPE (lhs), resvr);
    2652              :             }
    2653              :         }
    2654              :     }
    2655          549 :   if (use_loop_p)
    2656              :     {
    2657          417 :       do_pending_stack_adjust ();
    2658          417 :       loop_lab = gen_label_rtx ();
    2659          417 :       cntvar = gen_reg_rtx (TYPE_MODE (sizetype));
    2660          417 :       cntv = make_tree (sizetype, cntvar);
    2661          417 :       emit_move_insn (cntvar, const0_rtx);
    2662          417 :       emit_label (loop_lab);
    2663              :     }
    2664          549 :   if (TREE_CODE (arg0) != VECTOR_CST)
    2665              :     {
    2666          461 :       rtx arg0r = expand_normal (arg0);
    2667          461 :       arg0 = make_tree (TREE_TYPE (arg0), arg0r);
    2668              :     }
    2669          549 :   if (TREE_CODE (arg1) != VECTOR_CST)
    2670              :     {
    2671          537 :       rtx arg1r = expand_normal (arg1);
    2672          537 :       arg1 = make_tree (TREE_TYPE (arg1), arg1r);
    2673              :     }
    2674         2154 :   for (unsigned int i = 0; i < (use_loop_p ? 1 : const_cnt); i++)
    2675              :     {
    2676          945 :       tree op0, op1, res = NULL_TREE;
    2677          945 :       if (use_loop_p)
    2678              :         {
    2679          417 :           tree atype = build_array_type_nelts (eltype, cnt);
    2680          417 :           op0 = uniform_vector_p (arg0);
    2681          417 :           if (op0 == NULL_TREE)
    2682              :             {
    2683          351 :               op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, atype, arg0);
    2684          351 :               op0 = build4_loc (loc, ARRAY_REF, eltype, op0, cntv,
    2685              :                                 NULL_TREE, NULL_TREE);
    2686              :             }
    2687          417 :           op1 = uniform_vector_p (arg1);
    2688          417 :           if (op1 == NULL_TREE)
    2689              :             {
    2690          411 :               op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, atype, arg1);
    2691          411 :               op1 = build4_loc (loc, ARRAY_REF, eltype, op1, cntv,
    2692              :                                 NULL_TREE, NULL_TREE);
    2693              :             }
    2694          417 :           if (resv)
    2695              :             {
    2696          141 :               res = fold_build1_loc (loc, VIEW_CONVERT_EXPR, atype, resv);
    2697          141 :               res = build4_loc (loc, ARRAY_REF, eltype, res, cntv,
    2698              :                                 NULL_TREE, NULL_TREE);
    2699              :             }
    2700              :         }
    2701              :       else
    2702              :         {
    2703          528 :           tree bitpos = bitsize_int (tree_to_uhwi (sz) * i);
    2704          528 :           op0 = fold_build3_loc (loc, BIT_FIELD_REF, eltype, arg0, sz, bitpos);
    2705          528 :           op1 = fold_build3_loc (loc, BIT_FIELD_REF, eltype, arg1, sz, bitpos);
    2706          528 :           if (resv)
    2707            0 :             res = fold_build3_loc (loc, BIT_FIELD_REF, eltype, resv, sz,
    2708              :                                    bitpos);
    2709              :         }
    2710          945 :       switch (code)
    2711              :         {
    2712          317 :         case PLUS_EXPR:
    2713          317 :           expand_addsub_overflow (loc, PLUS_EXPR, res, op0, op1,
    2714              :                                   false, false, false, true, &data);
    2715          317 :           break;
    2716          462 :         case MINUS_EXPR:
    2717          462 :           if (use_loop_p ? integer_zerop (arg0) : integer_zerop (op0))
    2718          154 :             expand_neg_overflow (loc, res, op1, true, &data);
    2719              :           else
    2720          308 :             expand_addsub_overflow (loc, MINUS_EXPR, res, op0, op1,
    2721              :                                     false, false, false, true, &data);
    2722              :           break;
    2723          166 :         case MULT_EXPR:
    2724          166 :           expand_mul_overflow (loc, res, op0, op1, false, false, false,
    2725              :                                true, &data);
    2726          166 :           break;
    2727            0 :         default:
    2728            0 :           gcc_unreachable ();
    2729              :         }
    2730              :     }
    2731          549 :   if (use_loop_p)
    2732              :     {
    2733          417 :       struct separate_ops ops;
    2734          417 :       ops.code = PLUS_EXPR;
    2735          417 :       ops.type = TREE_TYPE (cntv);
    2736          417 :       ops.op0 = cntv;
    2737          417 :       ops.op1 = build_int_cst (TREE_TYPE (cntv), 1);
    2738          417 :       ops.op2 = NULL_TREE;
    2739          417 :       ops.location = loc;
    2740          417 :       rtx ret = expand_expr_real_2 (&ops, cntvar, TYPE_MODE (sizetype),
    2741              :                                     EXPAND_NORMAL);
    2742          417 :       if (ret != cntvar)
    2743            0 :         emit_move_insn (cntvar, ret);
    2744          417 :       rtx cntrtx = gen_int_mode (cnt, TYPE_MODE (sizetype));
    2745          834 :       do_compare_rtx_and_jump (cntvar, cntrtx, NE, false,
    2746          417 :                                TYPE_MODE (sizetype), NULL_RTX, NULL, loop_lab,
    2747              :                                profile_probability::very_likely ());
    2748              :     }
    2749          549 :   if (lhs && resv == NULL_TREE)
    2750              :     {
    2751          408 :       struct separate_ops ops;
    2752          408 :       ops.code = code;
    2753          408 :       ops.type = TREE_TYPE (arg0);
    2754          408 :       ops.op0 = arg0;
    2755          408 :       ops.op1 = arg1;
    2756          408 :       ops.op2 = NULL_TREE;
    2757          408 :       ops.location = loc;
    2758          408 :       rtx ret = expand_expr_real_2 (&ops, lhsr, TYPE_MODE (TREE_TYPE (arg0)),
    2759              :                                     EXPAND_NORMAL);
    2760          408 :       if (ret != lhsr)
    2761            5 :         emit_move_insn (lhsr, ret);
    2762              :     }
    2763          141 :   else if (resvr)
    2764            0 :     emit_move_insn (lhsr, resvr);
    2765          549 :   flag_trapv = save_flag_trapv;
    2766          549 : }
    2767              : 
    2768              : /* Expand UBSAN_CHECK_ADD call STMT.  */
    2769              : 
    2770              : static void
    2771         1787 : expand_UBSAN_CHECK_ADD (internal_fn, gcall *stmt)
    2772              : {
    2773         1787 :   location_t loc = gimple_location (stmt);
    2774         1787 :   tree lhs = gimple_call_lhs (stmt);
    2775         1787 :   tree arg0 = gimple_call_arg (stmt, 0);
    2776         1787 :   tree arg1 = gimple_call_arg (stmt, 1);
    2777         1787 :   if (VECTOR_TYPE_P (TREE_TYPE (arg0)))
    2778          185 :     expand_vector_ubsan_overflow (loc, PLUS_EXPR, lhs, arg0, arg1);
    2779              :   else
    2780         1602 :     expand_addsub_overflow (loc, PLUS_EXPR, lhs, arg0, arg1,
    2781              :                             false, false, false, true, NULL);
    2782         1787 : }
    2783              : 
    2784              : /* Expand UBSAN_CHECK_SUB call STMT.  */
    2785              : 
    2786              : static void
    2787         1840 : expand_UBSAN_CHECK_SUB (internal_fn, gcall *stmt)
    2788              : {
    2789         1840 :   location_t loc = gimple_location (stmt);
    2790         1840 :   tree lhs = gimple_call_lhs (stmt);
    2791         1840 :   tree arg0 = gimple_call_arg (stmt, 0);
    2792         1840 :   tree arg1 = gimple_call_arg (stmt, 1);
    2793         1840 :   if (VECTOR_TYPE_P (TREE_TYPE (arg0)))
    2794          264 :     expand_vector_ubsan_overflow (loc, MINUS_EXPR, lhs, arg0, arg1);
    2795         1576 :   else if (integer_zerop (arg0))
    2796          417 :     expand_neg_overflow (loc, lhs, arg1, true, NULL);
    2797              :   else
    2798         1159 :     expand_addsub_overflow (loc, MINUS_EXPR, lhs, arg0, arg1,
    2799              :                             false, false, false, true, NULL);
    2800         1840 : }
    2801              : 
    2802              : /* Expand UBSAN_CHECK_MUL call STMT.  */
    2803              : 
    2804              : static void
    2805         1440 : expand_UBSAN_CHECK_MUL (internal_fn, gcall *stmt)
    2806              : {
    2807         1440 :   location_t loc = gimple_location (stmt);
    2808         1440 :   tree lhs = gimple_call_lhs (stmt);
    2809         1440 :   tree arg0 = gimple_call_arg (stmt, 0);
    2810         1440 :   tree arg1 = gimple_call_arg (stmt, 1);
    2811         1440 :   if (VECTOR_TYPE_P (TREE_TYPE (arg0)))
    2812          100 :     expand_vector_ubsan_overflow (loc, MULT_EXPR, lhs, arg0, arg1);
    2813              :   else
    2814         1340 :     expand_mul_overflow (loc, lhs, arg0, arg1, false, false, false, true,
    2815              :                          NULL);
    2816         1440 : }
    2817              : 
    2818              : /* Helper function for {ADD,SUB,MUL}_OVERFLOW call stmt expansion.  */
    2819              : 
    2820              : static void
    2821        71621 : expand_arith_overflow (enum tree_code code, gimple *stmt)
    2822              : {
    2823        71621 :   tree lhs = gimple_call_lhs (stmt);
    2824        71621 :   if (lhs == NULL_TREE)
    2825              :     return;
    2826        71621 :   tree arg0 = gimple_call_arg (stmt, 0);
    2827        71621 :   tree arg1 = gimple_call_arg (stmt, 1);
    2828        71621 :   tree type = TREE_TYPE (TREE_TYPE (lhs));
    2829        71621 :   int uns0_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
    2830        71621 :   int uns1_p = TYPE_UNSIGNED (TREE_TYPE (arg1));
    2831        71621 :   int unsr_p = TYPE_UNSIGNED (type);
    2832        71621 :   int prec0 = TYPE_PRECISION (TREE_TYPE (arg0));
    2833        71621 :   int prec1 = TYPE_PRECISION (TREE_TYPE (arg1));
    2834        71621 :   int precres = TYPE_PRECISION (type);
    2835        71621 :   location_t loc = gimple_location (stmt);
    2836        71621 :   if (!uns0_p && get_range_pos_neg (arg0, stmt) == 1)
    2837              :     uns0_p = true;
    2838        71621 :   if (!uns1_p && get_range_pos_neg (arg1, stmt) == 1)
    2839              :     uns1_p = true;
    2840       103458 :   int pr = get_min_precision (arg0, uns0_p ? UNSIGNED : SIGNED);
    2841        71621 :   prec0 = MIN (prec0, pr);
    2842        94647 :   pr = get_min_precision (arg1, uns1_p ? UNSIGNED : SIGNED);
    2843        71621 :   prec1 = MIN (prec1, pr);
    2844        71621 :   int save_flag_trapv = flag_trapv;
    2845              : 
    2846              :   /* We don't want any __mulv?i3 etc. calls from the expansion of
    2847              :      these internal functions, so disable -ftrapv temporarily.  */
    2848        71621 :   flag_trapv = 0;
    2849              :   /* If uns0_p && uns1_p, precop is minimum needed precision
    2850              :      of unsigned type to hold the exact result, otherwise
    2851              :      precop is minimum needed precision of signed type to
    2852              :      hold the exact result.  */
    2853        71621 :   int precop;
    2854        71621 :   if (code == MULT_EXPR)
    2855        23183 :     precop = prec0 + prec1 + (uns0_p != uns1_p);
    2856              :   else
    2857              :     {
    2858        48438 :       if (uns0_p == uns1_p)
    2859        31041 :         precop = MAX (prec0, prec1) + 1;
    2860        17397 :       else if (uns0_p)
    2861         6123 :         precop = MAX (prec0 + 1, prec1) + 1;
    2862              :       else
    2863        11274 :         precop = MAX (prec0, prec1 + 1) + 1;
    2864              :     }
    2865        71621 :   int orig_precres = precres;
    2866              : 
    2867        90167 :   do
    2868              :     {
    2869        90167 :       if ((uns0_p && uns1_p)
    2870        90167 :           ? ((precop + !unsr_p) <= precres
    2871              :              /* u1 - u2 -> ur can overflow, no matter what precision
    2872              :                 the result has.  */
    2873        35421 :              && (code != MINUS_EXPR || !unsr_p))
    2874        54746 :           : (!unsr_p && precop <= precres))
    2875              :         {
    2876              :           /* The infinity precision result will always fit into result.  */
    2877        18302 :           rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    2878        18302 :           write_complex_part (target, const0_rtx, true, false);
    2879        18302 :           scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
    2880        18302 :           struct separate_ops ops;
    2881        18302 :           ops.code = code;
    2882        18302 :           ops.type = type;
    2883        18302 :           ops.op0 = fold_convert_loc (loc, type, arg0);
    2884        18302 :           ops.op1 = fold_convert_loc (loc, type, arg1);
    2885        18302 :           ops.op2 = NULL_TREE;
    2886        18302 :           ops.location = loc;
    2887        18302 :           rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
    2888        18302 :           expand_arith_overflow_result_store (lhs, target, mode, tem);
    2889        18302 :           flag_trapv = save_flag_trapv;
    2890        18302 :           return;
    2891              :         }
    2892              : 
    2893              :       /* For operations with low precision, if target doesn't have them, start
    2894              :          with precres widening right away, otherwise do it only if the most
    2895              :          simple cases can't be used.  */
    2896        71865 :       const int min_precision = targetm.min_arithmetic_precision ();
    2897        71865 :       if (orig_precres == precres && precres < min_precision)
    2898              :         ;
    2899        70602 :       else if ((uns0_p && uns1_p && unsr_p && prec0 <= precres
    2900        21216 :                 && prec1 <= precres)
    2901        49389 :           || ((!uns0_p || !uns1_p) && !unsr_p
    2902        19341 :               && prec0 + uns0_p <= precres
    2903        16560 :               && prec1 + uns1_p <= precres))
    2904              :         {
    2905        34287 :           arg0 = fold_convert_loc (loc, type, arg0);
    2906        34287 :           arg1 = fold_convert_loc (loc, type, arg1);
    2907        34287 :           switch (code)
    2908              :             {
    2909        10179 :             case MINUS_EXPR:
    2910        10179 :               if (integer_zerop (arg0) && !unsr_p)
    2911              :                 {
    2912          402 :                   expand_neg_overflow (loc, lhs, arg1, false, NULL);
    2913          402 :                   flag_trapv = save_flag_trapv;
    2914          402 :                   return;
    2915              :                 }
    2916              :               /* FALLTHRU */
    2917        24459 :             case PLUS_EXPR:
    2918        24459 :               expand_addsub_overflow (loc, code, lhs, arg0, arg1, unsr_p,
    2919              :                                       unsr_p, unsr_p, false, NULL);
    2920        24459 :               flag_trapv = save_flag_trapv;
    2921        24459 :               return;
    2922         9426 :             case MULT_EXPR:
    2923         9426 :               expand_mul_overflow (loc, lhs, arg0, arg1, unsr_p,
    2924              :                                    unsr_p, unsr_p, false, NULL);
    2925         9426 :               flag_trapv = save_flag_trapv;
    2926         9426 :               return;
    2927            0 :             default:
    2928            0 :               gcc_unreachable ();
    2929              :             }
    2930              :         }
    2931              : 
    2932              :       /* For sub-word operations, retry with a wider type first.  */
    2933        37579 :       if (orig_precres == precres && precop <= BITS_PER_WORD)
    2934              :         {
    2935        19641 :           int p = MAX (min_precision, precop);
    2936        19641 :           scalar_int_mode m = smallest_int_mode_for_size (p).require ();
    2937        19641 :           tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
    2938              :                                                         uns0_p && uns1_p
    2939        19641 :                                                         && unsr_p);
    2940        19641 :           p = TYPE_PRECISION (optype);
    2941        19641 :           if (p > precres)
    2942              :             {
    2943        17988 :               precres = p;
    2944        17988 :               unsr_p = TYPE_UNSIGNED (optype);
    2945        17988 :               type = optype;
    2946        17988 :               continue;
    2947              :             }
    2948              :         }
    2949              : 
    2950        19590 :       if (prec0 <= precres && prec1 <= precres)
    2951              :         {
    2952        19032 :           tree types[2];
    2953        19032 :           if (unsr_p)
    2954              :             {
    2955        10937 :               types[0] = build_nonstandard_integer_type (precres, 0);
    2956        10937 :               types[1] = type;
    2957              :             }
    2958              :           else
    2959              :             {
    2960         8095 :               types[0] = type;
    2961         8095 :               types[1] = build_nonstandard_integer_type (precres, 1);
    2962              :             }
    2963        19032 :           arg0 = fold_convert_loc (loc, types[uns0_p], arg0);
    2964        19032 :           arg1 = fold_convert_loc (loc, types[uns1_p], arg1);
    2965        19032 :           if (code != MULT_EXPR)
    2966        11508 :             expand_addsub_overflow (loc, code, lhs, arg0, arg1, unsr_p,
    2967              :                                     uns0_p, uns1_p, false, NULL);
    2968              :           else
    2969         7524 :             expand_mul_overflow (loc, lhs, arg0, arg1, unsr_p,
    2970              :                                  uns0_p, uns1_p, false, NULL);
    2971        19032 :           flag_trapv = save_flag_trapv;
    2972        19032 :           return;
    2973              :         }
    2974              : 
    2975              :       /* Retry with a wider type.  */
    2976          558 :       if (orig_precres == precres)
    2977              :         {
    2978          558 :           int p = MAX (prec0, prec1);
    2979          558 :           scalar_int_mode m = smallest_int_mode_for_size (p).require ();
    2980          558 :           tree optype = build_nonstandard_integer_type (GET_MODE_PRECISION (m),
    2981              :                                                         uns0_p && uns1_p
    2982          558 :                                                         && unsr_p);
    2983          558 :           p = TYPE_PRECISION (optype);
    2984          558 :           if (p > precres)
    2985              :             {
    2986          558 :               precres = p;
    2987          558 :               unsr_p = TYPE_UNSIGNED (optype);
    2988          558 :               type = optype;
    2989          558 :               continue;
    2990              :             }
    2991              :         }
    2992              : 
    2993            0 :       gcc_unreachable ();
    2994              :     }
    2995              :   while (1);
    2996              : }
    2997              : 
    2998              : /* Expand ADD_OVERFLOW STMT.  */
    2999              : 
    3000              : static void
    3001        24994 : expand_ADD_OVERFLOW (internal_fn, gcall *stmt)
    3002              : {
    3003        24994 :   expand_arith_overflow (PLUS_EXPR, stmt);
    3004        24994 : }
    3005              : 
    3006              : /* Expand SUB_OVERFLOW STMT.  */
    3007              : 
    3008              : static void
    3009        23444 : expand_SUB_OVERFLOW (internal_fn, gcall *stmt)
    3010              : {
    3011        23444 :   expand_arith_overflow (MINUS_EXPR, stmt);
    3012        23444 : }
    3013              : 
    3014              : /* Expand MUL_OVERFLOW STMT.  */
    3015              : 
    3016              : static void
    3017        23183 : expand_MUL_OVERFLOW (internal_fn, gcall *stmt)
    3018              : {
    3019        23183 :   expand_arith_overflow (MULT_EXPR, stmt);
    3020        23183 : }
    3021              : 
    3022              : /* Expand UADDC STMT.  */
    3023              : 
    3024              : static void
    3025        15324 : expand_UADDC (internal_fn ifn, gcall *stmt)
    3026              : {
    3027        15324 :   tree lhs = gimple_call_lhs (stmt);
    3028        15324 :   tree arg1 = gimple_call_arg (stmt, 0);
    3029        15324 :   tree arg2 = gimple_call_arg (stmt, 1);
    3030        15324 :   tree arg3 = gimple_call_arg (stmt, 2);
    3031        15324 :   tree type = TREE_TYPE (arg1);
    3032        15324 :   machine_mode mode = TYPE_MODE (type);
    3033        21541 :   insn_code icode = optab_handler (ifn == IFN_UADDC
    3034              :                                    ? uaddc5_optab : usubc5_optab, mode);
    3035        15324 :   rtx op1 = expand_normal (arg1);
    3036        15324 :   rtx op2 = expand_normal (arg2);
    3037        15324 :   rtx op3 = expand_normal (arg3);
    3038        15324 :   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    3039        15324 :   rtx re = gen_reg_rtx (mode);
    3040        15324 :   rtx im = gen_reg_rtx (mode);
    3041        15324 :   class expand_operand ops[5];
    3042        15324 :   create_output_operand (&ops[0], re, mode);
    3043        15324 :   create_output_operand (&ops[1], im, mode);
    3044        15324 :   create_input_operand (&ops[2], op1, mode);
    3045        15324 :   create_input_operand (&ops[3], op2, mode);
    3046        15324 :   create_input_operand (&ops[4], op3, mode);
    3047        15324 :   expand_insn (icode, 5, ops);
    3048        15324 :   write_complex_part (target, re, false, false);
    3049        15324 :   write_complex_part (target, im, true, false);
    3050        15324 : }
    3051              : 
    3052              : /* Expand USUBC STMT.  */
    3053              : 
    3054              : static void
    3055         6217 : expand_USUBC (internal_fn ifn, gcall *stmt)
    3056              : {
    3057         6217 :   expand_UADDC (ifn, stmt);
    3058         6217 : }
    3059              : 
    3060              : /* This should get folded in tree-vectorizer.cc.  */
    3061              : 
    3062              : static void
    3063            0 : expand_LOOP_VECTORIZED (internal_fn, gcall *)
    3064              : {
    3065            0 :   gcc_unreachable ();
    3066              : }
    3067              : 
    3068              : /* This should get folded in tree-vectorizer.cc.  */
    3069              : 
    3070              : static void
    3071            0 : expand_LOOP_DIST_ALIAS (internal_fn, gcall *)
    3072              : {
    3073            0 :   gcc_unreachable ();
    3074              : }
    3075              : 
    3076              : /* Return a memory reference of type TYPE for argument INDEX of STMT.
    3077              :    Use argument INDEX + 1 to derive the second (TBAA) operand.  */
    3078              : 
    3079              : static tree
    3080         1479 : expand_call_mem_ref (tree type, gcall *stmt, int index)
    3081              : {
    3082         1479 :   tree addr = gimple_call_arg (stmt, index);
    3083         1479 :   tree alias_ptr_type = TREE_TYPE (gimple_call_arg (stmt, index + 1));
    3084         1479 :   unsigned int align = tree_to_shwi (gimple_call_arg (stmt, index + 1));
    3085         1479 :   if (TYPE_ALIGN (type) != align)
    3086         1213 :     type = build_aligned_type (type, align);
    3087              : 
    3088         1479 :   tree tmp = addr;
    3089         1479 :   if (TREE_CODE (tmp) == SSA_NAME)
    3090              :     {
    3091         1349 :       gimple *def = get_gimple_for_ssa_name (tmp);
    3092         1349 :       if (def && gimple_assign_single_p (def))
    3093          598 :         tmp = gimple_assign_rhs1 (def);
    3094              :     }
    3095              : 
    3096         1479 :   if (TREE_CODE (tmp) == ADDR_EXPR)
    3097              :     {
    3098          728 :       tree mem = TREE_OPERAND (tmp, 0);
    3099          728 :       if (TREE_CODE (mem) == TARGET_MEM_REF
    3100          728 :           && types_compatible_p (TREE_TYPE (mem), type))
    3101              :         {
    3102          598 :           tree offset = TMR_OFFSET (mem);
    3103          598 :           if (type != TREE_TYPE (mem)
    3104          144 :               || alias_ptr_type != TREE_TYPE (offset)
    3105          742 :               || !integer_zerop (offset))
    3106              :             {
    3107          481 :               mem = copy_node (mem);
    3108          962 :               TMR_OFFSET (mem) = wide_int_to_tree (alias_ptr_type,
    3109          481 :                                                    wi::to_poly_wide (offset));
    3110          481 :               TREE_TYPE (mem) = type;
    3111              :             }
    3112          598 :           return mem;
    3113              :         }
    3114              :     }
    3115              : 
    3116          881 :   return fold_build2 (MEM_REF, type, addr, build_int_cst (alias_ptr_type, 0));
    3117              : }
    3118              : 
    3119              : /* Expand MASK_LOAD{,_LANES}, MASK_LEN_LOAD or LEN_LOAD call STMT using optab
    3120              :  * OPTAB.  */
    3121              : 
    3122              : static void
    3123          721 : expand_partial_load_optab_fn (internal_fn ifn, gcall *stmt, convert_optab optab)
    3124              : {
    3125          721 :   int i = 0;
    3126          721 :   class expand_operand ops[6];
    3127          721 :   tree type, lhs, rhs, maskt;
    3128          721 :   rtx mem, target;
    3129          721 :   insn_code icode;
    3130              : 
    3131          721 :   maskt = gimple_call_arg (stmt, internal_fn_mask_index (ifn));
    3132          721 :   lhs = gimple_call_lhs (stmt);
    3133          721 :   if (lhs == NULL_TREE)
    3134            0 :     return;
    3135          721 :   type = TREE_TYPE (lhs);
    3136          721 :   rhs = expand_call_mem_ref (type, stmt, 0);
    3137              : 
    3138          721 :   if (optab == vec_mask_load_lanes_optab
    3139          721 :       || optab == vec_mask_len_load_lanes_optab)
    3140            0 :     icode = get_multi_vector_move (type, optab);
    3141          721 :   else if (optab == len_load_optab)
    3142            0 :     icode = direct_optab_handler (optab, TYPE_MODE (type));
    3143              :   else
    3144          721 :     icode = convert_optab_handler (optab, TYPE_MODE (type),
    3145          721 :                                    TYPE_MODE (TREE_TYPE (maskt)));
    3146              : 
    3147          721 :   mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    3148          721 :   gcc_assert (MEM_P (mem));
    3149              :   /* The built MEM_REF does not accurately reflect that the load
    3150              :      is only partial.  Clear it.  */
    3151          721 :   set_mem_expr (mem, NULL_TREE);
    3152          721 :   clear_mem_offset (mem);
    3153          721 :   target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    3154          721 :   create_call_lhs_operand (&ops[i++], target, TYPE_MODE (type));
    3155          721 :   create_fixed_operand (&ops[i++], mem);
    3156          721 :   i = add_mask_else_and_len_args (ops, i, stmt);
    3157          721 :   expand_insn (icode, i, ops);
    3158              : 
    3159          721 :   assign_call_lhs (lhs, target, &ops[0]);
    3160              : }
    3161              : 
    3162              : #define expand_mask_load_optab_fn expand_partial_load_optab_fn
    3163              : #define expand_mask_load_lanes_optab_fn expand_mask_load_optab_fn
    3164              : #define expand_len_load_optab_fn expand_partial_load_optab_fn
    3165              : #define expand_mask_len_load_optab_fn expand_partial_load_optab_fn
    3166              : 
    3167              : /* Expand MASK_STORE{,_LANES}, MASK_LEN_STORE or LEN_STORE call STMT using optab
    3168              :  * OPTAB.  */
    3169              : 
    3170              : static void
    3171          758 : expand_partial_store_optab_fn (internal_fn ifn, gcall *stmt, convert_optab optab)
    3172              : {
    3173          758 :   int i = 0;
    3174          758 :   class expand_operand ops[5];
    3175          758 :   tree type, lhs, rhs, maskt;
    3176          758 :   rtx mem, reg;
    3177          758 :   insn_code icode;
    3178              : 
    3179          758 :   maskt = gimple_call_arg (stmt, internal_fn_mask_index (ifn));
    3180          758 :   rhs = gimple_call_arg (stmt, internal_fn_stored_value_index (ifn));
    3181          758 :   type = TREE_TYPE (rhs);
    3182          758 :   lhs = expand_call_mem_ref (type, stmt, 0);
    3183              : 
    3184          758 :   if (optab == vec_mask_store_lanes_optab
    3185          758 :       || optab == vec_mask_len_store_lanes_optab)
    3186            0 :     icode = get_multi_vector_move (type, optab);
    3187          758 :   else if (optab == len_store_optab)
    3188            0 :     icode = direct_optab_handler (optab, TYPE_MODE (type));
    3189              :   else
    3190          758 :     icode = convert_optab_handler (optab, TYPE_MODE (type),
    3191          758 :                                    TYPE_MODE (TREE_TYPE (maskt)));
    3192              : 
    3193          758 :   mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    3194          758 :   gcc_assert (MEM_P (mem));
    3195              :   /* The built MEM_REF does not accurately reflect that the store
    3196              :      is only partial.  Clear it.  */
    3197          758 :   set_mem_expr (mem, NULL_TREE);
    3198          758 :   clear_mem_offset (mem);
    3199          758 :   reg = expand_normal (rhs);
    3200          758 :   create_fixed_operand (&ops[i++], mem);
    3201          758 :   create_input_operand (&ops[i++], reg, TYPE_MODE (type));
    3202          758 :   i = add_mask_else_and_len_args (ops, i, stmt);
    3203          758 :   expand_insn (icode, i, ops);
    3204          758 : }
    3205              : 
    3206              : #define expand_mask_store_optab_fn expand_partial_store_optab_fn
    3207              : #define expand_mask_store_lanes_optab_fn expand_mask_store_optab_fn
    3208              : #define expand_len_store_optab_fn expand_partial_store_optab_fn
    3209              : #define expand_mask_len_store_optab_fn expand_partial_store_optab_fn
    3210              : 
    3211              : /* Expand VCOND_MASK optab internal function.
    3212              :    The expansion of STMT happens based on OPTAB table associated.  */
    3213              : 
    3214              : static void
    3215         8977 : expand_vec_cond_mask_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
    3216              : {
    3217         8977 :   class expand_operand ops[4];
    3218              : 
    3219         8977 :   tree lhs = gimple_call_lhs (stmt);
    3220         8977 :   tree op0 = gimple_call_arg (stmt, 0);
    3221         8977 :   tree op1 = gimple_call_arg (stmt, 1);
    3222         8977 :   tree op2 = gimple_call_arg (stmt, 2);
    3223         8977 :   tree vec_cond_type = TREE_TYPE (lhs);
    3224              : 
    3225         8977 :   machine_mode mode = TYPE_MODE (vec_cond_type);
    3226         8977 :   machine_mode mask_mode = TYPE_MODE (TREE_TYPE (op0));
    3227         8977 :   enum insn_code icode = convert_optab_handler (optab, mode, mask_mode);
    3228         8977 :   rtx mask, rtx_op1, rtx_op2;
    3229              : 
    3230         8977 :   gcc_assert (icode != CODE_FOR_nothing);
    3231              : 
    3232              :   /* Find the comparison generating the mask OP0.  */
    3233         8977 :   tree cmp_op0 = NULL_TREE;
    3234         8977 :   tree cmp_op1 = NULL_TREE;
    3235         8977 :   enum tree_code cmp_code = TREE_CODE (op0);
    3236         8977 :   if (TREE_CODE_CLASS (cmp_code) == tcc_comparison)
    3237              :     {
    3238            0 :       cmp_op0 = TREE_OPERAND (op0, 0);
    3239            0 :       cmp_op1 = TREE_OPERAND (op0, 1);
    3240              :     }
    3241         8977 :   else if (cmp_code == SSA_NAME)
    3242              :     {
    3243         7671 :       gimple *def_stmt = get_gimple_for_ssa_name (op0);
    3244         7671 :       if (def_stmt && is_gimple_assign (def_stmt))
    3245              :         {
    3246         6102 :           cmp_code = gimple_assign_rhs_code (def_stmt);
    3247         6102 :           if (TREE_CODE_CLASS (cmp_code) == tcc_comparison)
    3248              :             {
    3249         4405 :               cmp_op0 = gimple_assign_rhs1 (def_stmt);
    3250         4405 :               cmp_op1 = gimple_assign_rhs2 (def_stmt);
    3251              :             }
    3252              :         }
    3253              :     }
    3254              : 
    3255              :   /* Decide whether to invert comparison based on rtx_cost.  */
    3256         4405 :   if (cmp_op0)
    3257              :     {
    3258         4405 :       enum tree_code rev_code;
    3259         4405 :       tree op_type = TREE_TYPE (cmp_op0);
    3260         4405 :       int unsignedp = TYPE_UNSIGNED (op_type);
    3261         4405 :       rev_code = invert_tree_comparison (cmp_code, HONOR_NANS (op_type));
    3262              : 
    3263         4405 :       if (rev_code != ERROR_MARK)
    3264              :         {
    3265         3258 :           tree cmp_type = TREE_TYPE (op0);
    3266         3258 :           machine_mode cmp_mode = TYPE_MODE (cmp_type);
    3267         3258 :           machine_mode op_mode = TYPE_MODE (op_type);
    3268         3258 :           bool speed_p = optimize_insn_for_speed_p ();
    3269         3258 :           rtx reg = gen_raw_REG (op_mode, LAST_VIRTUAL_REGISTER + 1);
    3270         3258 :           enum rtx_code cmp_rtx_code = convert_tree_comp_to_rtx (cmp_code,
    3271              :                                                                  unsignedp);
    3272         3258 :           rtx veccmp = gen_rtx_fmt_ee (cmp_rtx_code, cmp_mode, reg, reg);
    3273         3258 :           int old_cost = rtx_cost (veccmp, cmp_mode, SET, 0, speed_p);
    3274         3258 :           enum rtx_code rev_rtx_code = convert_tree_comp_to_rtx (rev_code,
    3275              :                                                                  unsignedp);
    3276         3258 :           PUT_CODE (veccmp, rev_rtx_code);
    3277         3258 :           int new_cost = rtx_cost (veccmp, cmp_mode, SET, 0, speed_p);
    3278         3258 :           if (new_cost < old_cost)
    3279              :             {
    3280          336 :               op0 = fold_build2_loc (EXPR_LOCATION (op0), rev_code,
    3281              :                                      cmp_type, cmp_op0, cmp_op1);
    3282          336 :               std::swap (op1, op2);
    3283              :             }
    3284              : 
    3285         3258 :           if (dump_file && (dump_flags & TDF_DETAILS))
    3286              :             {
    3287            0 :               fprintf (dump_file,
    3288              :                        ";; %sswapping operands of .VCOND_MASK\n",
    3289              :                        new_cost >= old_cost ? "not " : "");
    3290            0 :               fprintf (dump_file,
    3291              :                        ";; cost of original %s: %d\n",
    3292            0 :                        GET_RTX_NAME (cmp_rtx_code), old_cost);
    3293            0 :               fprintf (dump_file,
    3294              :                        ";; cost of replacement %s: %d\n",
    3295            0 :                        GET_RTX_NAME (rev_rtx_code), new_cost);
    3296              :             }
    3297              :         }
    3298              :     }
    3299              : 
    3300         8977 :   mask = expand_normal (op0);
    3301         8977 :   rtx_op1 = expand_normal (op1);
    3302         8977 :   rtx_op2 = expand_normal (op2);
    3303              : 
    3304         8977 :   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    3305         8977 :   create_call_lhs_operand (&ops[0], target, mode);
    3306         8977 :   create_input_operand (&ops[1], rtx_op1, mode);
    3307         8977 :   create_input_operand (&ops[2], rtx_op2, mode);
    3308         8977 :   create_input_operand (&ops[3], mask, mask_mode);
    3309         8977 :   expand_insn (icode, 4, ops);
    3310         8977 :   assign_call_lhs (lhs, target, &ops[0]);
    3311         8977 : }
    3312              : 
    3313              : /* Expand VEC_SET internal functions.  */
    3314              : 
    3315              : static void
    3316          123 : expand_vec_set_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
    3317              : {
    3318          123 :   tree lhs = gimple_call_lhs (stmt);
    3319          123 :   tree op0 = gimple_call_arg (stmt, 0);
    3320          123 :   tree op1 = gimple_call_arg (stmt, 1);
    3321          123 :   tree op2 = gimple_call_arg (stmt, 2);
    3322          123 :   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    3323          123 :   rtx src = expand_normal (op0);
    3324              : 
    3325          123 :   machine_mode outermode = TYPE_MODE (TREE_TYPE (op0));
    3326          123 :   scalar_mode innermode = GET_MODE_INNER (outermode);
    3327              : 
    3328          123 :   rtx value = expand_normal (op1);
    3329          123 :   rtx pos = expand_normal (op2);
    3330              : 
    3331          123 :   class expand_operand ops[3];
    3332          123 :   enum insn_code icode = optab_handler (optab, outermode);
    3333              : 
    3334          123 :   if (icode != CODE_FOR_nothing)
    3335              :     {
    3336          123 :       rtx temp = gen_reg_rtx (outermode);
    3337          123 :       emit_move_insn (temp, src);
    3338              : 
    3339          123 :       create_fixed_operand (&ops[0], temp);
    3340          123 :       create_input_operand (&ops[1], value, innermode);
    3341          123 :       create_convert_operand_from (&ops[2], pos, TYPE_MODE (TREE_TYPE (op2)),
    3342              :                                    true);
    3343          123 :       if (maybe_expand_insn (icode, 3, ops))
    3344              :         {
    3345          123 :           emit_move_insn (target, temp);
    3346          123 :           return;
    3347              :         }
    3348              :     }
    3349            0 :   gcc_unreachable ();
    3350              : }
    3351              : 
    3352              : static void
    3353         1828 : expand_ABNORMAL_DISPATCHER (internal_fn, gcall *)
    3354              : {
    3355         1828 : }
    3356              : 
    3357              : static void
    3358            0 : expand_BUILTIN_EXPECT (internal_fn, gcall *stmt)
    3359              : {
    3360              :   /* When guessing was done, the hints should be already stripped away.  */
    3361            0 :   gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
    3362              : 
    3363            0 :   rtx target;
    3364            0 :   tree lhs = gimple_call_lhs (stmt);
    3365            0 :   if (lhs)
    3366            0 :     target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    3367              :   else
    3368            0 :     target = const0_rtx;
    3369            0 :   rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
    3370            0 :   if (lhs && val != target)
    3371            0 :     emit_move_insn (target, val);
    3372            0 : }
    3373              : 
    3374              : /* IFN_VA_ARG is supposed to be expanded at pass_stdarg.  So this dummy function
    3375              :    should never be called.  */
    3376              : 
    3377              : static void
    3378            0 : expand_VA_ARG (internal_fn, gcall *)
    3379              : {
    3380            0 :   gcc_unreachable ();
    3381              : }
    3382              : 
    3383              : /* IFN_VEC_CONVERT is supposed to be expanded at pass_lower_vector.  So this
    3384              :    dummy function should never be called.  */
    3385              : 
    3386              : static void
    3387            0 : expand_VEC_CONVERT (internal_fn, gcall *)
    3388              : {
    3389            0 :   gcc_unreachable ();
    3390              : }
    3391              : 
    3392              : /* Expand IFN_RAWMEMCHR internal function.  */
    3393              : 
    3394              : void
    3395            0 : expand_RAWMEMCHR (internal_fn, gcall *stmt)
    3396              : {
    3397            0 :   expand_operand ops[3];
    3398              : 
    3399            0 :   tree lhs = gimple_call_lhs (stmt);
    3400            0 :   if (!lhs)
    3401            0 :     return;
    3402            0 :   machine_mode lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
    3403            0 :   rtx lhs_rtx = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    3404            0 :   create_call_lhs_operand (&ops[0], lhs_rtx, lhs_mode);
    3405              : 
    3406            0 :   tree mem = gimple_call_arg (stmt, 0);
    3407            0 :   rtx mem_rtx = get_memory_rtx (mem, NULL);
    3408            0 :   create_fixed_operand (&ops[1], mem_rtx);
    3409              : 
    3410            0 :   tree pattern = gimple_call_arg (stmt, 1);
    3411            0 :   machine_mode mode = TYPE_MODE (TREE_TYPE (pattern));
    3412            0 :   rtx pattern_rtx = expand_normal (pattern);
    3413            0 :   create_input_operand (&ops[2], pattern_rtx, mode);
    3414              : 
    3415            0 :   insn_code icode = direct_optab_handler (rawmemchr_optab, mode);
    3416              : 
    3417            0 :   expand_insn (icode, 3, ops);
    3418            0 :   assign_call_lhs (lhs, lhs_rtx, &ops[0]);
    3419              : }
    3420              : 
    3421              : /* Expand the IFN_UNIQUE function according to its first argument.  */
    3422              : 
    3423              : static void
    3424            0 : expand_UNIQUE (internal_fn, gcall *stmt)
    3425              : {
    3426            0 :   rtx pattern = NULL_RTX;
    3427            0 :   enum ifn_unique_kind kind
    3428            0 :     = (enum ifn_unique_kind) TREE_INT_CST_LOW (gimple_call_arg (stmt, 0));
    3429              : 
    3430            0 :   switch (kind)
    3431              :     {
    3432            0 :     default:
    3433            0 :       gcc_unreachable ();
    3434              : 
    3435            0 :     case IFN_UNIQUE_UNSPEC:
    3436            0 :       if (targetm.have_unique ())
    3437            0 :         pattern = targetm.gen_unique ();
    3438              :       break;
    3439              : 
    3440            0 :     case IFN_UNIQUE_OACC_FORK:
    3441            0 :     case IFN_UNIQUE_OACC_JOIN:
    3442            0 :       if (targetm.have_oacc_fork () && targetm.have_oacc_join ())
    3443              :         {
    3444            0 :           tree lhs = gimple_call_lhs (stmt);
    3445            0 :           rtx target = const0_rtx;
    3446              : 
    3447            0 :           if (lhs)
    3448            0 :             target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    3449              : 
    3450            0 :           rtx data_dep = expand_normal (gimple_call_arg (stmt, 1));
    3451            0 :           rtx axis = expand_normal (gimple_call_arg (stmt, 2));
    3452              : 
    3453            0 :           if (kind == IFN_UNIQUE_OACC_FORK)
    3454            0 :             pattern = targetm.gen_oacc_fork (target, data_dep, axis);
    3455              :           else
    3456            0 :             pattern = targetm.gen_oacc_join (target, data_dep, axis);
    3457              :         }
    3458              :       else
    3459            0 :         gcc_unreachable ();
    3460              :       break;
    3461              :     }
    3462              : 
    3463            0 :   if (pattern)
    3464            0 :     emit_insn (pattern);
    3465            0 : }
    3466              : 
    3467              : /* Expand the IFN_DEFERRED_INIT function:
    3468              :    LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL);
    3469              : 
    3470              :    Initialize the LHS with zero/pattern according to its second argument
    3471              :    INIT_TYPE:
    3472              :    if INIT_TYPE is AUTO_INIT_ZERO, use zeroes to initialize;
    3473              :    if INIT_TYPE is AUTO_INIT_PATTERN, use 0xFE byte-repeatable pattern
    3474              :      to initialize;
    3475              :    The LHS variable is initialized including paddings.
    3476              :    The reasons to choose 0xFE for pattern initialization are:
    3477              :      1. It is a non-canonical virtual address on x86_64, and at the
    3478              :         high end of the i386 kernel address space.
    3479              :      2. It is a very large float value (-1.694739530317379e+38).
    3480              :      3. It is also an unusual number for integers.  */
    3481              : #define INIT_PATTERN_VALUE  0xFE
    3482              : static void
    3483        31451 : expand_DEFERRED_INIT (internal_fn, gcall *stmt)
    3484              : {
    3485        31451 :   tree lhs = gimple_call_lhs (stmt);
    3486        31451 :   tree var_size = gimple_call_arg (stmt, 0);
    3487        31451 :   enum auto_init_type init_type
    3488        31451 :     = (enum auto_init_type) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
    3489        31451 :   bool reg_lhs = true;
    3490              : 
    3491        31451 :   tree var_type = TREE_TYPE (lhs);
    3492        31451 :   gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
    3493              : 
    3494        31451 :   if (TREE_CODE (lhs) == SSA_NAME)
    3495              :     reg_lhs = true;
    3496              :   else
    3497              :     {
    3498              :       tree lhs_base = lhs;
    3499        20182 :       while (handled_component_p (lhs_base))
    3500            4 :         lhs_base = TREE_OPERAND (lhs_base, 0);
    3501        20178 :       reg_lhs = (mem_ref_refers_to_non_mem_p (lhs_base)
    3502        20178 :                  || non_mem_decl_p (lhs_base));
    3503              :       /* If this expands to a register and the underlying decl is wrapped in
    3504              :          a MEM_REF that just serves as an access type change expose the decl
    3505              :          if it is of correct size.  This avoids a situation as in PR103271
    3506              :          if the target does not support a direct move to the registers mode.  */
    3507         1677 :       if (reg_lhs
    3508         1677 :           && TREE_CODE (lhs_base) == MEM_REF
    3509            4 :           && TREE_CODE (TREE_OPERAND (lhs_base, 0)) == ADDR_EXPR
    3510            4 :           && DECL_P (TREE_OPERAND (TREE_OPERAND (lhs_base, 0), 0))
    3511            4 :           && integer_zerop (TREE_OPERAND (lhs_base, 1))
    3512            4 :           && tree_fits_uhwi_p (var_size)
    3513            4 :           && tree_int_cst_equal
    3514            4 :                (var_size,
    3515            4 :                 DECL_SIZE_UNIT (TREE_OPERAND (TREE_OPERAND (lhs_base, 0), 0))))
    3516              :         {
    3517            4 :           lhs = TREE_OPERAND (TREE_OPERAND (lhs_base, 0), 0);
    3518            4 :           var_type = TREE_TYPE (lhs);
    3519              :         }
    3520              :     }
    3521              : 
    3522        20178 :   if (!reg_lhs)
    3523              :     {
    3524              :       /* If the variable is not in register, expand to a memset
    3525              :          to initialize it.  */
    3526        18501 :       mark_addressable (lhs);
    3527        18501 :       tree var_addr = build_fold_addr_expr (lhs);
    3528              : 
    3529        18501 :       tree value = (init_type == AUTO_INIT_PATTERN)
    3530        18501 :                     ? build_int_cst (integer_type_node,
    3531              :                                      INIT_PATTERN_VALUE)
    3532        18425 :                     : integer_zero_node;
    3533        37002 :       tree m_call = build_call_expr (builtin_decl_implicit (BUILT_IN_MEMSET),
    3534              :                                      3, var_addr, value, var_size);
    3535              :       /* Expand this memset call.  */
    3536        18501 :       expand_builtin_memset (m_call, NULL_RTX, TYPE_MODE (var_type));
    3537              :     }
    3538              :   else
    3539              :     {
    3540              :       /* If this variable is in a register use expand_assignment.
    3541              :          For boolean scalars force zero-init.  */
    3542        12950 :       tree init;
    3543        12950 :       scalar_int_mode var_mode;
    3544        12950 :       if (TREE_CODE (TREE_TYPE (lhs)) != BOOLEAN_TYPE
    3545        12699 :           && tree_fits_uhwi_p (var_size)
    3546        12699 :           && (init_type == AUTO_INIT_PATTERN
    3547        12615 :               || !is_gimple_reg_type (var_type))
    3548         1761 :           && int_mode_for_size (tree_to_uhwi (var_size) * BITS_PER_UNIT,
    3549        12955 :                                 0).exists (&var_mode)
    3550        14711 :           && have_insn_for (SET, var_mode))
    3551              :         {
    3552         1756 :           unsigned HOST_WIDE_INT total_bytes = tree_to_uhwi (var_size);
    3553         1756 :           unsigned char *buf = XALLOCAVEC (unsigned char, total_bytes);
    3554         1756 :           memset (buf, (init_type == AUTO_INIT_PATTERN
    3555              :                         ? INIT_PATTERN_VALUE : 0), total_bytes);
    3556         1756 :           tree itype = build_nonstandard_integer_type
    3557         1756 :                          (total_bytes * BITS_PER_UNIT, 1);
    3558         1756 :           wide_int w = wi::from_buffer (buf, total_bytes);
    3559         1756 :           init = wide_int_to_tree (itype, w);
    3560              :           /* Pun the LHS to make sure its type has constant size
    3561              :              unless it is an SSA name where that's already known.  */
    3562         1756 :           if (TREE_CODE (lhs) != SSA_NAME)
    3563         1677 :             lhs = build1 (VIEW_CONVERT_EXPR, itype, lhs);
    3564              :           else
    3565           79 :             init = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), init);
    3566         1756 :         }
    3567              :       else
    3568              :         /* Use zero-init also for variable-length sizes.  */
    3569        11194 :         init = build_zero_cst (var_type);
    3570              : 
    3571        12950 :       expand_assignment (lhs, init, false);
    3572              :     }
    3573        31451 : }
    3574              : 
    3575              : /* Expand the IFN_ACCESS_WITH_SIZE function:
    3576              :    ACCESS_WITH_SIZE (REF_TO_OBJ, REF_TO_SIZE,
    3577              :                      TYPE_OF_SIZE + ACCESS_MODE, TYPE_SIZE_UNIT for element)
    3578              :    which returns the REF_TO_OBJ same as the 1st argument;
    3579              : 
    3580              :    1st argument REF_TO_OBJ: The reference to the object;
    3581              :    2nd argument REF_TO_SIZE: The reference to the size of the object,
    3582              :    3rd argument TYPE_OF_SIZE + ACCESS_MODE: An integer constant with a pointer
    3583              :      TYPE.
    3584              :      The pointee TYPE of the pointer TYPE is the TYPE of the object referenced
    3585              :         by REF_TO_SIZE.
    3586              :      The integer constant value represents the ACCESS_MODE:
    3587              :         0: none
    3588              :         1: read_only
    3589              :         2: write_only
    3590              :         3: read_write
    3591              : 
    3592              :    4th argument: The TYPE_SIZE_UNIT of the element TYPE of the array.
    3593              : 
    3594              :    Both the return type and the type of the first argument of this
    3595              :    function have been converted from the incomplete array type to
    3596              :    the corresponding pointer type.
    3597              : 
    3598              :    For each call to a .ACCESS_WITH_SIZE, replace it with its 1st argument.  */
    3599              : 
    3600              : static void
    3601          518 : expand_ACCESS_WITH_SIZE (internal_fn, gcall *stmt)
    3602              : {
    3603          518 :   tree lhs = gimple_call_lhs (stmt);
    3604          518 :   tree ref_to_obj = gimple_call_arg (stmt, 0);
    3605          518 :   if (lhs)
    3606          518 :     expand_assignment (lhs, ref_to_obj, false);
    3607          518 : }
    3608              : 
    3609              : /* The size of an OpenACC compute dimension.  */
    3610              : 
    3611              : static void
    3612         5695 : expand_GOACC_DIM_SIZE (internal_fn, gcall *stmt)
    3613              : {
    3614         5695 :   tree lhs = gimple_call_lhs (stmt);
    3615              : 
    3616         5695 :   if (!lhs)
    3617              :     return;
    3618              : 
    3619         5695 :   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    3620         5695 :   if (targetm.have_oacc_dim_size ())
    3621              :     {
    3622            0 :       rtx dim = expand_expr (gimple_call_arg (stmt, 0), NULL_RTX,
    3623              :                              VOIDmode, EXPAND_NORMAL);
    3624            0 :       emit_insn (targetm.gen_oacc_dim_size (target, dim));
    3625              :     }
    3626              :   else
    3627         5695 :     emit_move_insn (target, GEN_INT (1));
    3628              : }
    3629              : 
    3630              : /* The position of an OpenACC execution engine along one compute axis.  */
    3631              : 
    3632              : static void
    3633         4567 : expand_GOACC_DIM_POS (internal_fn, gcall *stmt)
    3634              : {
    3635         4567 :   tree lhs = gimple_call_lhs (stmt);
    3636              : 
    3637         4567 :   if (!lhs)
    3638              :     return;
    3639              : 
    3640         4567 :   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    3641         4567 :   if (targetm.have_oacc_dim_pos ())
    3642              :     {
    3643            0 :       rtx dim = expand_expr (gimple_call_arg (stmt, 0), NULL_RTX,
    3644              :                              VOIDmode, EXPAND_NORMAL);
    3645            0 :       emit_insn (targetm.gen_oacc_dim_pos (target, dim));
    3646              :     }
    3647              :   else
    3648         4567 :     emit_move_insn (target, const0_rtx);
    3649              : }
    3650              : 
    3651              : /* This is expanded by oacc_device_lower pass.  */
    3652              : 
    3653              : static void
    3654            0 : expand_GOACC_LOOP (internal_fn, gcall *)
    3655              : {
    3656            0 :   gcc_unreachable ();
    3657              : }
    3658              : 
    3659              : /* This is expanded by oacc_device_lower pass.  */
    3660              : 
    3661              : static void
    3662            0 : expand_GOACC_REDUCTION (internal_fn, gcall *)
    3663              : {
    3664            0 :   gcc_unreachable ();
    3665              : }
    3666              : 
    3667              : /* This is expanded by oacc_device_lower pass.  */
    3668              : 
    3669              : static void
    3670            0 : expand_GOACC_TILE (internal_fn, gcall *)
    3671              : {
    3672            0 :   gcc_unreachable ();
    3673              : }
    3674              : 
    3675              : /* Set errno to EDOM.  */
    3676              : 
    3677              : static void
    3678            0 : expand_SET_EDOM (internal_fn, gcall *)
    3679              : {
    3680              : #ifdef TARGET_EDOM
    3681              : #ifdef GEN_ERRNO_RTX
    3682              :   rtx errno_rtx = GEN_ERRNO_RTX;
    3683              : #else
    3684              :   rtx errno_rtx = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
    3685              : #endif
    3686              :   emit_move_insn (errno_rtx,
    3687              :                   gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
    3688              : #else
    3689            0 :   gcc_unreachable ();
    3690              : #endif
    3691              : }
    3692              : 
    3693              : /* Expand atomic bit test and set.  */
    3694              : 
    3695              : static void
    3696          146 : expand_ATOMIC_BIT_TEST_AND_SET (internal_fn, gcall *call)
    3697              : {
    3698          146 :   expand_ifn_atomic_bit_test_and (call);
    3699          146 : }
    3700              : 
    3701              : /* Expand atomic bit test and complement.  */
    3702              : 
    3703              : static void
    3704          146 : expand_ATOMIC_BIT_TEST_AND_COMPLEMENT (internal_fn, gcall *call)
    3705              : {
    3706          146 :   expand_ifn_atomic_bit_test_and (call);
    3707          146 : }
    3708              : 
    3709              : /* Expand atomic bit test and reset.  */
    3710              : 
    3711              : static void
    3712          135 : expand_ATOMIC_BIT_TEST_AND_RESET (internal_fn, gcall *call)
    3713              : {
    3714          135 :   expand_ifn_atomic_bit_test_and (call);
    3715          135 : }
    3716              : 
    3717              : /* Expand atomic bit test and set.  */
    3718              : 
    3719              : static void
    3720        13871 : expand_ATOMIC_COMPARE_EXCHANGE (internal_fn, gcall *call)
    3721              : {
    3722        13871 :   expand_ifn_atomic_compare_exchange (call);
    3723        13871 : }
    3724              : 
    3725              : /* Expand atomic add fetch and cmp with 0.  */
    3726              : 
    3727              : static void
    3728          886 : expand_ATOMIC_ADD_FETCH_CMP_0 (internal_fn, gcall *call)
    3729              : {
    3730          886 :   expand_ifn_atomic_op_fetch_cmp_0 (call);
    3731          886 : }
    3732              : 
    3733              : /* Expand atomic sub fetch and cmp with 0.  */
    3734              : 
    3735              : static void
    3736          738 : expand_ATOMIC_SUB_FETCH_CMP_0 (internal_fn, gcall *call)
    3737              : {
    3738          738 :   expand_ifn_atomic_op_fetch_cmp_0 (call);
    3739          738 : }
    3740              : 
    3741              : /* Expand atomic and fetch and cmp with 0.  */
    3742              : 
    3743              : static void
    3744          176 : expand_ATOMIC_AND_FETCH_CMP_0 (internal_fn, gcall *call)
    3745              : {
    3746          176 :   expand_ifn_atomic_op_fetch_cmp_0 (call);
    3747          176 : }
    3748              : 
    3749              : /* Expand atomic or fetch and cmp with 0.  */
    3750              : 
    3751              : static void
    3752          112 : expand_ATOMIC_OR_FETCH_CMP_0 (internal_fn, gcall *call)
    3753              : {
    3754          112 :   expand_ifn_atomic_op_fetch_cmp_0 (call);
    3755          112 : }
    3756              : 
    3757              : /* Expand atomic xor fetch and cmp with 0.  */
    3758              : 
    3759              : static void
    3760          176 : expand_ATOMIC_XOR_FETCH_CMP_0 (internal_fn, gcall *call)
    3761              : {
    3762          176 :   expand_ifn_atomic_op_fetch_cmp_0 (call);
    3763          176 : }
    3764              : 
    3765              : /* Expand LAUNDER to assignment, lhs = arg0.  */
    3766              : 
    3767              : static void
    3768           29 : expand_LAUNDER (internal_fn, gcall *call)
    3769              : {
    3770           29 :   tree lhs = gimple_call_lhs (call);
    3771              : 
    3772           29 :   if (!lhs)
    3773              :     return;
    3774              : 
    3775           28 :   expand_assignment (lhs, gimple_call_arg (call, 0), false);
    3776              : }
    3777              : 
    3778              : /* Expand {MASK_,}SCATTER_STORE{S,U} call CALL using optab OPTAB.  */
    3779              : 
    3780              : static void
    3781            0 : expand_scatter_store_optab_fn (internal_fn, gcall *stmt, direct_optab optab)
    3782              : {
    3783            0 :   internal_fn ifn = gimple_call_internal_fn (stmt);
    3784            0 :   int rhs_index = internal_fn_stored_value_index (ifn);
    3785            0 :   tree base = gimple_call_arg (stmt, 0);
    3786            0 :   tree offset = gimple_call_arg (stmt, internal_fn_offset_index (ifn));
    3787            0 :   tree scale = gimple_call_arg (stmt, internal_fn_scale_index (ifn));
    3788            0 :   tree rhs = gimple_call_arg (stmt, rhs_index);
    3789              : 
    3790            0 :   rtx base_rtx = expand_normal (base);
    3791            0 :   rtx offset_rtx = expand_normal (offset);
    3792            0 :   HOST_WIDE_INT scale_int = tree_to_shwi (scale);
    3793            0 :   rtx rhs_rtx = expand_normal (rhs);
    3794              : 
    3795            0 :   class expand_operand ops[8];
    3796            0 :   int i = 0;
    3797            0 :   create_address_operand (&ops[i++], base_rtx);
    3798            0 :   create_input_operand (&ops[i++], offset_rtx, TYPE_MODE (TREE_TYPE (offset)));
    3799            0 :   create_integer_operand (&ops[i++], TYPE_UNSIGNED (TREE_TYPE (offset)));
    3800            0 :   create_integer_operand (&ops[i++], scale_int);
    3801            0 :   create_input_operand (&ops[i++], rhs_rtx, TYPE_MODE (TREE_TYPE (rhs)));
    3802            0 :   i = add_mask_else_and_len_args (ops, i, stmt);
    3803              : 
    3804            0 :   insn_code icode = convert_optab_handler (optab, TYPE_MODE (TREE_TYPE (rhs)),
    3805            0 :                                            TYPE_MODE (TREE_TYPE (offset)));
    3806            0 :   expand_insn (icode, i, ops);
    3807            0 : }
    3808              : 
    3809              : /* Expand {MASK_,}GATHER_LOAD call CALL using optab OPTAB.  */
    3810              : 
    3811              : static void
    3812            0 : expand_gather_load_optab_fn (internal_fn ifn, gcall *stmt, direct_optab optab)
    3813              : {
    3814            0 :   tree lhs = gimple_call_lhs (stmt);
    3815            0 :   tree base = gimple_call_arg (stmt, 0);
    3816            0 :   tree offset = gimple_call_arg (stmt, internal_fn_offset_index (ifn));
    3817            0 :   tree scale = gimple_call_arg (stmt, internal_fn_scale_index (ifn));
    3818              : 
    3819            0 :   rtx lhs_rtx = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    3820            0 :   rtx base_rtx = expand_normal (base);
    3821            0 :   rtx offset_rtx = expand_normal (offset);
    3822            0 :   HOST_WIDE_INT scale_int = tree_to_shwi (scale);
    3823              : 
    3824            0 :   int i = 0;
    3825            0 :   class expand_operand ops[9];
    3826            0 :   create_call_lhs_operand (&ops[i++], lhs_rtx, TYPE_MODE (TREE_TYPE (lhs)));
    3827            0 :   create_address_operand (&ops[i++], base_rtx);
    3828            0 :   create_input_operand (&ops[i++], offset_rtx, TYPE_MODE (TREE_TYPE (offset)));
    3829            0 :   create_integer_operand (&ops[i++], TYPE_UNSIGNED (TREE_TYPE (offset)));
    3830            0 :   create_integer_operand (&ops[i++], scale_int);
    3831            0 :   i = add_mask_else_and_len_args (ops, i, stmt);
    3832            0 :   insn_code icode = convert_optab_handler (optab, TYPE_MODE (TREE_TYPE (lhs)),
    3833            0 :                                            TYPE_MODE (TREE_TYPE (offset)));
    3834            0 :   expand_insn (icode, i, ops);
    3835            0 :   assign_call_lhs (lhs, lhs_rtx, &ops[0]);
    3836            0 : }
    3837              : 
    3838              : /* Expand MASK_LEN_STRIDED_LOAD call CALL by optab OPTAB.  */
    3839              : 
    3840              : static void
    3841            0 : expand_strided_load_optab_fn (ATTRIBUTE_UNUSED internal_fn, gcall *stmt,
    3842              :                               direct_optab optab)
    3843              : {
    3844            0 :   tree lhs = gimple_call_lhs (stmt);
    3845            0 :   tree base = gimple_call_arg (stmt, 0);
    3846            0 :   tree stride = gimple_call_arg (stmt, 1);
    3847              : 
    3848            0 :   rtx lhs_rtx = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    3849            0 :   rtx base_rtx = expand_normal (base);
    3850            0 :   rtx stride_rtx = expand_normal (stride);
    3851              : 
    3852            0 :   unsigned i = 0;
    3853            0 :   class expand_operand ops[7];
    3854            0 :   machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
    3855              : 
    3856            0 :   create_output_operand (&ops[i++], lhs_rtx, mode);
    3857            0 :   create_address_operand (&ops[i++], base_rtx);
    3858            0 :   create_address_operand (&ops[i++], stride_rtx);
    3859              : 
    3860            0 :   i = add_mask_else_and_len_args (ops, i, stmt);
    3861            0 :   expand_insn (direct_optab_handler (optab, mode), i, ops);
    3862              : 
    3863            0 :   if (!rtx_equal_p (lhs_rtx, ops[0].value))
    3864            0 :     emit_move_insn (lhs_rtx, ops[0].value);
    3865            0 : }
    3866              : 
    3867              : /* Expand MASK_LEN_STRIDED_STORE call CALL by optab OPTAB.  */
    3868              : 
    3869              : static void
    3870            0 : expand_strided_store_optab_fn (ATTRIBUTE_UNUSED internal_fn, gcall *stmt,
    3871              :                                direct_optab optab)
    3872              : {
    3873            0 :   internal_fn fn = gimple_call_internal_fn (stmt);
    3874            0 :   int rhs_index = internal_fn_stored_value_index (fn);
    3875              : 
    3876            0 :   tree base = gimple_call_arg (stmt, 0);
    3877            0 :   tree stride = gimple_call_arg (stmt, 1);
    3878            0 :   tree rhs = gimple_call_arg (stmt, rhs_index);
    3879              : 
    3880            0 :   rtx base_rtx = expand_normal (base);
    3881            0 :   rtx stride_rtx = expand_normal (stride);
    3882            0 :   rtx rhs_rtx = expand_normal (rhs);
    3883              : 
    3884            0 :   unsigned i = 0;
    3885            0 :   class expand_operand ops[6];
    3886            0 :   machine_mode mode = TYPE_MODE (TREE_TYPE (rhs));
    3887              : 
    3888            0 :   create_address_operand (&ops[i++], base_rtx);
    3889            0 :   create_address_operand (&ops[i++], stride_rtx);
    3890            0 :   create_input_operand (&ops[i++], rhs_rtx, mode);
    3891              : 
    3892            0 :   i = add_mask_else_and_len_args (ops, i, stmt);
    3893            0 :   expand_insn (direct_optab_handler (optab, mode), i, ops);
    3894            0 : }
    3895              : 
    3896              : /* Helper for expand_DIVMOD.  Return true if the sequence starting with
    3897              :    INSN contains any call insns or insns with {,U}{DIV,MOD} rtxes.  */
    3898              : 
    3899              : static bool
    3900          844 : contains_call_div_mod (rtx_insn *insn)
    3901              : {
    3902          844 :   subrtx_iterator::array_type array;
    3903         4849 :   for (; insn; insn = NEXT_INSN (insn))
    3904         4849 :     if (CALL_P (insn))
    3905              :       return true;
    3906         4005 :     else if (INSN_P (insn))
    3907        17912 :       FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
    3908        13907 :         switch (GET_CODE (*iter))
    3909              :           {
    3910            0 :           case CALL:
    3911            0 :           case DIV:
    3912            0 :           case UDIV:
    3913            0 :           case MOD:
    3914            0 :           case UMOD:
    3915            0 :             return true;
    3916        13907 :           default:
    3917        13907 :             break;
    3918              :           }
    3919              :   return false;
    3920          844 :  }
    3921              : 
    3922              : /* Expand DIVMOD() using:
    3923              :  a) optab handler for udivmod/sdivmod if it is available.
    3924              :  b) If optab_handler doesn't exist, generate call to
    3925              :     target-specific divmod libfunc.  */
    3926              : 
    3927              : static void
    3928        11543 : expand_DIVMOD (internal_fn, gcall *call_stmt)
    3929              : {
    3930        11543 :   tree lhs = gimple_call_lhs (call_stmt);
    3931        11543 :   tree arg0 = gimple_call_arg (call_stmt, 0);
    3932        11543 :   tree arg1 = gimple_call_arg (call_stmt, 1);
    3933              : 
    3934        11543 :   gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
    3935        11543 :   tree type = TREE_TYPE (TREE_TYPE (lhs));
    3936        11543 :   machine_mode mode = TYPE_MODE (type);
    3937        11543 :   bool unsignedp = TYPE_UNSIGNED (type);
    3938        11543 :   optab tab = (unsignedp) ? udivmod_optab : sdivmod_optab;
    3939              : 
    3940        11543 :   rtx op0 = expand_normal (arg0);
    3941        11543 :   rtx op1 = expand_normal (arg1);
    3942        11543 :   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    3943              : 
    3944        11543 :   rtx quotient = NULL_RTX, remainder = NULL_RTX;
    3945        11543 :   rtx_insn *insns = NULL;
    3946              : 
    3947        11543 :   if (TREE_CODE (arg1) == INTEGER_CST)
    3948              :     {
    3949              :       /* For DIVMOD by integral constants, there could be efficient code
    3950              :          expanded inline e.g. using shifts and plus/minus.  Try to expand
    3951              :          the division and modulo and if it emits any library calls or any
    3952              :          {,U}{DIV,MOD} rtxes throw it away and use a divmod optab or
    3953              :          divmod libcall.  */
    3954         1211 :       scalar_int_mode int_mode;
    3955         1211 :       if (remainder == NULL_RTX
    3956         1211 :           && optimize
    3957         1211 :           && CONST_INT_P (op1)
    3958         1205 :           && !pow2p_hwi (INTVAL (op1))
    3959         2410 :           && is_int_mode (TYPE_MODE (type), &int_mode)
    3960         1406 :           && GET_MODE_SIZE (int_mode) == 2 * UNITS_PER_WORD
    3961         1205 :           && optab_handler (and_optab, word_mode) != CODE_FOR_nothing
    3962         1205 :           && optab_handler (add_optab, word_mode) != CODE_FOR_nothing
    3963         1205 :           && optimize_insn_for_speed_p ())
    3964              :         {
    3965         1202 :           rtx_insn *last = get_last_insn ();
    3966         1202 :           remainder = NULL_RTX;
    3967         3606 :           quotient = expand_doubleword_divmod (int_mode, op0, op1, &remainder,
    3968         1202 :                                                TYPE_UNSIGNED (type));
    3969         1202 :           if (quotient != NULL_RTX)
    3970              :             {
    3971          367 :               if (optab_handler (mov_optab, int_mode) != CODE_FOR_nothing)
    3972              :                 {
    3973          367 :                   rtx_insn *move = emit_move_insn (quotient, quotient);
    3974          734 :                   set_dst_reg_note (move, REG_EQUAL,
    3975          367 :                                     gen_rtx_fmt_ee (TYPE_UNSIGNED (type)
    3976              :                                                     ? UDIV : DIV, int_mode,
    3977              :                                                     copy_rtx (op0), op1),
    3978              :                                     quotient);
    3979          367 :                   move = emit_move_insn (remainder, remainder);
    3980          734 :                   set_dst_reg_note (move, REG_EQUAL,
    3981          367 :                                     gen_rtx_fmt_ee (TYPE_UNSIGNED (type)
    3982              :                                                     ? UMOD : MOD, int_mode,
    3983              :                                                     copy_rtx (op0), op1),
    3984              :                                     quotient);
    3985              :                 }
    3986              :             }
    3987              :           else
    3988          835 :             delete_insns_since (last);
    3989              :         }
    3990              : 
    3991         1211 :       if (remainder == NULL_RTX)
    3992              :         {
    3993          844 :           struct separate_ops ops;
    3994          844 :           ops.code = TRUNC_DIV_EXPR;
    3995          844 :           ops.type = type;
    3996          844 :           ops.op0 = make_tree (ops.type, op0);
    3997          844 :           ops.op1 = arg1;
    3998          844 :           ops.op2 = NULL_TREE;
    3999          844 :           ops.location = gimple_location (call_stmt);
    4000          844 :           start_sequence ();
    4001          844 :           quotient = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
    4002          844 :           if (contains_call_div_mod (get_insns ()))
    4003          844 :             quotient = NULL_RTX;
    4004              :           else
    4005              :             {
    4006            0 :               ops.code = TRUNC_MOD_EXPR;
    4007            0 :               remainder = expand_expr_real_2 (&ops, NULL_RTX, mode,
    4008              :                                               EXPAND_NORMAL);
    4009            0 :               if (contains_call_div_mod (get_insns ()))
    4010            0 :                 remainder = NULL_RTX;
    4011              :             }
    4012          844 :           if (remainder)
    4013            0 :             insns = get_insns ();
    4014          844 :           end_sequence ();
    4015              :         }
    4016              :     }
    4017              : 
    4018        11543 :   if (remainder)
    4019          367 :     emit_insn (insns);
    4020              : 
    4021              :   /* Check if optab_handler exists for divmod_optab for given mode.  */
    4022        11176 :   else if (optab_handler (tab, mode) != CODE_FOR_nothing)
    4023              :     {
    4024        10280 :       quotient = gen_reg_rtx (mode);
    4025        10280 :       remainder = gen_reg_rtx (mode);
    4026        10280 :       expand_twoval_binop (tab, op0, op1, quotient, remainder, unsignedp);
    4027              :     }
    4028              : 
    4029              :   /* Generate call to divmod libfunc if it exists.  */
    4030          896 :   else if (rtx libfunc = optab_libfunc (tab, mode))
    4031          896 :     targetm.expand_divmod_libfunc (libfunc, mode, op0, op1,
    4032              :                                    &quotient, &remainder);
    4033              : 
    4034              :   else
    4035            0 :     gcc_unreachable ();
    4036              : 
    4037              :   /* Wrap the return value (quotient, remainder) within COMPLEX_EXPR.  */
    4038        23086 :   expand_expr (build2 (COMPLEX_EXPR, TREE_TYPE (lhs),
    4039        11543 :                        make_tree (TREE_TYPE (arg0), quotient),
    4040        11543 :                        make_tree (TREE_TYPE (arg1), remainder)),
    4041              :                target, VOIDmode, EXPAND_NORMAL);
    4042        11543 : }
    4043              : 
    4044              : /* Expand a NOP.  */
    4045              : 
    4046              : static void
    4047            1 : expand_NOP (internal_fn, gcall *)
    4048              : {
    4049              :   /* Nothing.  But it shouldn't really prevail.  */
    4050            1 : }
    4051              : 
    4052              : /* Coroutines, all should have been processed at this stage.  */
    4053              : 
    4054              : static void
    4055            0 : expand_CO_FRAME (internal_fn, gcall *)
    4056              : {
    4057            0 :   gcc_unreachable ();
    4058              : }
    4059              : 
    4060              : static void
    4061            0 : expand_CO_YIELD (internal_fn, gcall *)
    4062              : {
    4063            0 :   gcc_unreachable ();
    4064              : }
    4065              : 
    4066              : static void
    4067            0 : expand_CO_SUSPN (internal_fn, gcall *)
    4068              : {
    4069            0 :   gcc_unreachable ();
    4070              : }
    4071              : 
    4072              : static void
    4073            0 : expand_CO_ACTOR (internal_fn, gcall *)
    4074              : {
    4075            0 :   gcc_unreachable ();
    4076              : }
    4077              : 
    4078              : /* Expand a call to FN using the operands in STMT.  FN has a single
    4079              :    output operand and NARGS input operands.  */
    4080              : 
    4081              : static void
    4082        78124 : expand_direct_optab_fn (internal_fn fn, gcall *stmt, direct_optab optab,
    4083              :                         unsigned int nargs)
    4084              : {
    4085        78124 :   tree_pair types = direct_internal_fn_types (fn, stmt);
    4086        78124 :   insn_code icode = direct_optab_handler (optab, TYPE_MODE (types.first));
    4087        78124 :   expand_fn_using_insn (stmt, icode, 1, nargs);
    4088        78124 : }
    4089              : 
    4090              : /* Expand WHILE_ULT call STMT using optab OPTAB.  */
    4091              : 
    4092              : static void
    4093            0 : expand_while_optab_fn (internal_fn, gcall *stmt, convert_optab optab)
    4094              : {
    4095            0 :   expand_operand ops[4];
    4096            0 :   tree rhs_type[2];
    4097              : 
    4098            0 :   tree lhs = gimple_call_lhs (stmt);
    4099            0 :   tree lhs_type = TREE_TYPE (lhs);
    4100            0 :   rtx lhs_rtx = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    4101            0 :   create_call_lhs_operand (&ops[0], lhs_rtx, TYPE_MODE (lhs_type));
    4102              : 
    4103            0 :   for (unsigned int i = 0; i < 2; ++i)
    4104              :     {
    4105            0 :       tree rhs = gimple_call_arg (stmt, i);
    4106            0 :       rhs_type[i] = TREE_TYPE (rhs);
    4107            0 :       rtx rhs_rtx = expand_normal (rhs);
    4108            0 :       create_input_operand (&ops[i + 1], rhs_rtx, TYPE_MODE (rhs_type[i]));
    4109              :     }
    4110              : 
    4111            0 :   int opcnt;
    4112            0 :   if (!VECTOR_MODE_P (TYPE_MODE (lhs_type)))
    4113              :     {
    4114              :       /* When the mask is an integer mode the exact vector length may not
    4115              :          be clear to the backend, so we pass it in operand[3].
    4116              :          Use the vector in arg2 for the most reliable intended size.  */
    4117            0 :       tree type = TREE_TYPE (gimple_call_arg (stmt, 2));
    4118            0 :       create_integer_operand (&ops[3], TYPE_VECTOR_SUBPARTS (type));
    4119            0 :       opcnt = 4;
    4120              :     }
    4121              :   else
    4122              :     /* The mask has a vector type so the length operand is unnecessary.  */
    4123              :     opcnt = 3;
    4124              : 
    4125            0 :   insn_code icode = convert_optab_handler (optab, TYPE_MODE (rhs_type[0]),
    4126            0 :                                            TYPE_MODE (lhs_type));
    4127              : 
    4128            0 :   expand_insn (icode, opcnt, ops);
    4129            0 :   assign_call_lhs (lhs, lhs_rtx, &ops[0]);
    4130            0 : }
    4131              : 
    4132              : /* Expand a call to a convert-like optab using the operands in STMT.
    4133              :    FN has a single output operand and NARGS input operands.  */
    4134              : 
    4135              : static void
    4136          444 : expand_convert_optab_fn (internal_fn fn, gcall *stmt, convert_optab optab,
    4137              :                          unsigned int nargs)
    4138              : {
    4139          444 :   tree_pair types = direct_internal_fn_types (fn, stmt);
    4140          444 :   insn_code icode = convert_optab_handler (optab, TYPE_MODE (types.first),
    4141          444 :                                           TYPE_MODE (types.second));
    4142          444 :   expand_fn_using_insn (stmt, icode, 1, nargs);
    4143          444 : }
    4144              : 
    4145              : /* Expand CRC call STMT.  */
    4146              : 
    4147              : static void
    4148          240 : expand_crc_optab_fn (internal_fn fn, gcall *stmt, convert_optab optab)
    4149              : {
    4150          240 :   tree lhs = gimple_call_lhs (stmt);
    4151          240 :   tree rhs1 = gimple_call_arg (stmt, 0); // crc
    4152          240 :   tree rhs2 = gimple_call_arg (stmt, 1); // data
    4153          240 :   tree rhs3 = gimple_call_arg (stmt, 2); // polynomial
    4154              : 
    4155          240 :   tree result_type = TREE_TYPE (lhs);
    4156          240 :   tree data_type = TREE_TYPE (rhs2);
    4157              : 
    4158          240 :   gcc_assert (TYPE_MODE (result_type) >= TYPE_MODE (data_type));
    4159              : 
    4160          240 :   rtx dest = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    4161          240 :   rtx crc = expand_normal (rhs1);
    4162          240 :   rtx data = expand_normal (rhs2);
    4163          240 :   rtx polynomial;
    4164          240 :   if (TREE_CODE (rhs3) != INTEGER_CST)
    4165              :     {
    4166            0 :       error ("third argument to %<crc%> builtins must be a constant");
    4167            0 :       polynomial = const0_rtx;
    4168              :     }
    4169              :   else
    4170          240 :     polynomial = convert_to_mode (TYPE_MODE (result_type), expand_normal (rhs3), 0);
    4171              : 
    4172              :   /* Use target specific expansion if it exists.
    4173              :      Otherwise, generate table-based CRC.  */
    4174          240 :   if (direct_internal_fn_supported_p (fn, tree_pair (data_type, result_type),
    4175              :                                       OPTIMIZE_FOR_SPEED))
    4176              :     {
    4177            5 :       class expand_operand ops[4];
    4178              : 
    4179            5 :       if (dump_file && (dump_flags & TDF_DETAILS))
    4180              :         {
    4181            0 :           fprintf (dump_file,
    4182              :                    ";; using optab for crc_%u_polynomial_"
    4183              :                    HOST_WIDE_INT_PRINT_HEX "\n",
    4184            0 :                    GET_MODE_BITSIZE (GET_MODE (dest)).to_constant (),
    4185            0 :                    TREE_INT_CST_LOW (rhs3));
    4186              :         }
    4187              : 
    4188            5 :       create_call_lhs_operand (&ops[0], dest, TYPE_MODE (result_type));
    4189            5 :       create_input_operand (&ops[1], crc, TYPE_MODE (result_type));
    4190            5 :       create_input_operand (&ops[2], data, TYPE_MODE (data_type));
    4191            5 :       create_input_operand (&ops[3], polynomial, TYPE_MODE (result_type));
    4192            5 :       insn_code icode = convert_optab_handler (optab, TYPE_MODE (data_type),
    4193            5 :                                                TYPE_MODE (result_type));
    4194            5 :       expand_insn (icode, 4, ops);
    4195            5 :       assign_call_lhs (lhs, dest, &ops[0]);
    4196              :     }
    4197              :   else
    4198              :     {
    4199              :       /* We're bypassing all the operand conversions that are done in the
    4200              :          case when we get an icode, operands and pass that off to expand_insn.
    4201              : 
    4202              :          That path has special case handling for promoted return values which
    4203              :          we must emulate here (is the same kind of special treatment ever
    4204              :          needed for input arguments here?).
    4205              : 
    4206              :          In particular we do not want to store directly into a promoted
    4207              :          SUBREG destination, instead store into a suitably sized pseudo.  */
    4208          235 :       rtx orig_dest = dest;
    4209          235 :       if (SUBREG_P (dest) && SUBREG_PROMOTED_VAR_P (dest))
    4210            0 :         dest = gen_reg_rtx (GET_MODE (dest));
    4211              : 
    4212              :       /* If it's IFN_CRC generate bit-forward CRC.  */
    4213          235 :       if (fn == IFN_CRC)
    4214          116 :         expand_crc_table_based (dest, crc, data, polynomial,
    4215          116 :                                 TYPE_MODE (data_type));
    4216              :       else
    4217              :         /* If it's IFN_CRC_REV generate bit-reversed CRC.  */
    4218          119 :         expand_reversed_crc_table_based (dest, crc, data, polynomial,
    4219          119 :                                          TYPE_MODE (data_type),
    4220              :                                          generate_reflecting_code_standard);
    4221              : 
    4222              :       /* Now get the return value where it needs to be, taking care to
    4223              :          ensure it's promoted appropriately if the ABI demands it.
    4224              : 
    4225              :          Re-use assign_call_lhs to handle the details.  */
    4226          235 :       class expand_operand ops[4];
    4227          235 :       create_call_lhs_operand (&ops[0], dest, TYPE_MODE (result_type));
    4228          235 :       ops[0].value = dest;
    4229          235 :       assign_call_lhs (lhs, orig_dest, &ops[0]);
    4230              :     }
    4231          240 : }
    4232              : 
    4233              : /* Expand .REDUC_SBOOL_{AND,IOR,XOR}.  */
    4234              : 
    4235              : static void
    4236           33 : expand_reduc_sbool_optab_fn (internal_fn fn, gcall *stmt, direct_optab optab)
    4237              : {
    4238           33 :   tree_pair types = direct_internal_fn_types (fn, stmt);
    4239           33 :   insn_code icode = direct_optab_handler (optab, TYPE_MODE (types.first));
    4240              : 
    4241              :   /* Below copied from expand_fn_using_insn.  */
    4242              : 
    4243           33 :   gcc_assert (icode != CODE_FOR_nothing);
    4244              : 
    4245           33 :   expand_operand *ops = XALLOCAVEC (expand_operand, 3);
    4246           33 :   rtx lhs_rtx = NULL_RTX;
    4247           33 :   tree lhs = gimple_call_lhs (stmt);
    4248           33 :   if (lhs)
    4249           33 :     lhs_rtx = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    4250           33 :   create_call_lhs_operand (&ops[0], lhs_rtx,
    4251           33 :                            insn_data[icode].operand[0].mode);
    4252              : 
    4253           33 :   tree rhs = gimple_call_arg (stmt, 0);
    4254           33 :   tree rhs_type = TREE_TYPE (rhs);
    4255           33 :   rtx rhs_rtx = expand_normal (rhs);
    4256           33 :   gcc_assert (VECTOR_BOOLEAN_TYPE_P (rhs_type));
    4257           33 :   create_input_operand (&ops[1], rhs_rtx, TYPE_MODE (rhs_type));
    4258           33 :   if (SCALAR_INT_MODE_P (TYPE_MODE (rhs_type)))
    4259              :     {
    4260           18 :       rtx nunits = GEN_INT (TYPE_VECTOR_SUBPARTS (rhs_type).to_constant ());
    4261           18 :       gcc_assert (insn_operand_matches (icode, 2, nunits));
    4262           18 :       create_input_operand (&ops[2], nunits, SImode);
    4263              :     }
    4264           51 :   expand_insn (icode, SCALAR_INT_MODE_P (TYPE_MODE (rhs_type)) ? 3 : 2, ops);
    4265           33 :   if (lhs_rtx)
    4266           33 :     assign_call_lhs (lhs, lhs_rtx, &ops[0]);
    4267           33 : }
    4268              : 
    4269              : /* Expanders for optabs that can use expand_direct_optab_fn.  */
    4270              : 
    4271              : #define expand_unary_optab_fn(FN, STMT, OPTAB) \
    4272              :   expand_direct_optab_fn (FN, STMT, OPTAB, 1)
    4273              : 
    4274              : #define expand_binary_optab_fn(FN, STMT, OPTAB) \
    4275              :   expand_direct_optab_fn (FN, STMT, OPTAB, 2)
    4276              : 
    4277              : #define expand_ternary_optab_fn(FN, STMT, OPTAB) \
    4278              :   expand_direct_optab_fn (FN, STMT, OPTAB, 3)
    4279              : 
    4280              : #define expand_cond_unary_optab_fn(FN, STMT, OPTAB) \
    4281              :   expand_direct_optab_fn (FN, STMT, OPTAB, 3)
    4282              : 
    4283              : #define expand_cond_binary_optab_fn(FN, STMT, OPTAB) \
    4284              :   expand_direct_optab_fn (FN, STMT, OPTAB, 4)
    4285              : 
    4286              : #define expand_cond_ternary_optab_fn(FN, STMT, OPTAB) \
    4287              :   expand_direct_optab_fn (FN, STMT, OPTAB, 5)
    4288              : 
    4289              : #define expand_cond_len_unary_optab_fn(FN, STMT, OPTAB) \
    4290              :   expand_direct_optab_fn (FN, STMT, OPTAB, 5)
    4291              : 
    4292              : #define expand_cond_len_binary_optab_fn(FN, STMT, OPTAB) \
    4293              :   expand_direct_optab_fn (FN, STMT, OPTAB, 6)
    4294              : 
    4295              : #define expand_cond_len_ternary_optab_fn(FN, STMT, OPTAB) \
    4296              :   expand_direct_optab_fn (FN, STMT, OPTAB, 7)
    4297              : 
    4298              : #define expand_fold_extract_optab_fn(FN, STMT, OPTAB) \
    4299              :   expand_direct_optab_fn (FN, STMT, OPTAB, 3)
    4300              : 
    4301              : #define expand_fold_len_extract_optab_fn(FN, STMT, OPTAB) \
    4302              :   expand_direct_optab_fn (FN, STMT, OPTAB, 5)
    4303              : 
    4304              : #define expand_fold_left_optab_fn(FN, STMT, OPTAB) \
    4305              :   expand_direct_optab_fn (FN, STMT, OPTAB, 2)
    4306              : 
    4307              : #define expand_mask_fold_left_optab_fn(FN, STMT, OPTAB) \
    4308              :   expand_direct_optab_fn (FN, STMT, OPTAB, 3)
    4309              : 
    4310              : #define expand_mask_len_fold_left_optab_fn(FN, STMT, OPTAB) \
    4311              :   expand_direct_optab_fn (FN, STMT, OPTAB, 5)
    4312              : 
    4313              : #define expand_check_ptrs_optab_fn(FN, STMT, OPTAB) \
    4314              :   expand_direct_optab_fn (FN, STMT, OPTAB, 4)
    4315              : 
    4316              : #define expand_select_vl_optab_fn(FN, STMT, OPTAB) \
    4317              :   expand_convert_optab_fn (FN, STMT, OPTAB, 3)
    4318              : 
    4319              : /* Expanders for optabs that can use expand_convert_optab_fn.  */
    4320              : 
    4321              : #define expand_unary_convert_optab_fn(FN, STMT, OPTAB) \
    4322              :   expand_convert_optab_fn (FN, STMT, OPTAB, 1)
    4323              : 
    4324              : #define expand_vec_extract_optab_fn(FN, STMT, OPTAB) \
    4325              :   expand_convert_optab_fn (FN, STMT, OPTAB, 2)
    4326              : 
    4327              : /* RETURN_TYPE and ARGS are a return type and argument list that are
    4328              :    in principle compatible with FN (which satisfies direct_internal_fn_p).
    4329              :    Return the types that should be used to determine whether the
    4330              :    target supports FN.  */
    4331              : 
    4332              : tree_pair
    4333         8205 : direct_internal_fn_types (internal_fn fn, tree return_type, tree *args)
    4334              : {
    4335         8205 :   const direct_internal_fn_info &info = direct_internal_fn (fn);
    4336         8205 :   tree type0 = (info.type0 < 0 ? return_type : TREE_TYPE (args[info.type0]));
    4337         8205 :   tree type1 = (info.type1 < 0 ? return_type : TREE_TYPE (args[info.type1]));
    4338         8205 :   return tree_pair (type0, type1);
    4339              : }
    4340              : 
    4341              : /* CALL is a call whose return type and arguments are in principle
    4342              :    compatible with FN (which satisfies direct_internal_fn_p).  Return the
    4343              :    types that should be used to determine whether the target supports FN.  */
    4344              : 
    4345              : tree_pair
    4346       137550 : direct_internal_fn_types (internal_fn fn, gcall *call)
    4347              : {
    4348       137550 :   const direct_internal_fn_info &info = direct_internal_fn (fn);
    4349       137550 :   tree op0 = (info.type0 < 0
    4350       137550 :               ? gimple_call_lhs (call)
    4351       136078 :               : gimple_call_arg (call, info.type0));
    4352       137550 :   tree op1 = (info.type1 < 0
    4353       137550 :               ? gimple_call_lhs (call)
    4354       137509 :               : gimple_call_arg (call, info.type1));
    4355       137550 :   return tree_pair (TREE_TYPE (op0), TREE_TYPE (op1));
    4356              : }
    4357              : 
    4358              : /* Return true if OPTAB is supported for TYPES (whose modes should be
    4359              :    the same) when the optimization type is OPT_TYPE.  Used for simple
    4360              :    direct optabs.  */
    4361              : 
    4362              : static bool
    4363      3991117 : direct_optab_supported_p (direct_optab optab, tree_pair types,
    4364              :                           optimization_type opt_type)
    4365              : {
    4366      3991117 :   machine_mode mode = TYPE_MODE (types.first);
    4367      3991117 :   gcc_checking_assert (mode == TYPE_MODE (types.second));
    4368      3991117 :   return direct_optab_handler (optab, mode, opt_type) != CODE_FOR_nothing;
    4369              : }
    4370              : 
    4371              : /* Return true if OPTAB is supported for TYPES, where the first type
    4372              :    is the destination and the second type is the source.  Used for
    4373              :    convert optabs.  */
    4374              : 
    4375              : static bool
    4376        80553 : convert_optab_supported_p (convert_optab optab, tree_pair types,
    4377              :                            optimization_type opt_type)
    4378              : {
    4379        80553 :   return (convert_optab_handler (optab, TYPE_MODE (types.first),
    4380        80553 :                                  TYPE_MODE (types.second), opt_type)
    4381        80553 :           != CODE_FOR_nothing);
    4382              : }
    4383              : 
    4384              : /* Return true if load/store lanes optab OPTAB is supported for
    4385              :    array type TYPES.first when the optimization type is OPT_TYPE.  */
    4386              : 
    4387              : static bool
    4388            0 : multi_vector_optab_supported_p (convert_optab optab, tree_pair types,
    4389              :                                 optimization_type opt_type)
    4390              : {
    4391            0 :   gcc_assert (TREE_CODE (types.first) == ARRAY_TYPE);
    4392            0 :   machine_mode imode = TYPE_MODE (types.first);
    4393            0 :   machine_mode vmode = TYPE_MODE (TREE_TYPE (types.first));
    4394            0 :   return (convert_optab_handler (optab, imode, vmode, opt_type)
    4395            0 :           != CODE_FOR_nothing);
    4396              : }
    4397              : 
    4398              : #define direct_unary_optab_supported_p direct_optab_supported_p
    4399              : #define direct_unary_convert_optab_supported_p convert_optab_supported_p
    4400              : #define direct_binary_optab_supported_p direct_optab_supported_p
    4401              : #define direct_ternary_optab_supported_p direct_optab_supported_p
    4402              : #define direct_cond_unary_optab_supported_p direct_optab_supported_p
    4403              : #define direct_cond_binary_optab_supported_p direct_optab_supported_p
    4404              : #define direct_cond_ternary_optab_supported_p direct_optab_supported_p
    4405              : #define direct_cond_len_unary_optab_supported_p direct_optab_supported_p
    4406              : #define direct_cond_len_binary_optab_supported_p direct_optab_supported_p
    4407              : #define direct_cond_len_ternary_optab_supported_p direct_optab_supported_p
    4408              : #define direct_crc_optab_supported_p convert_optab_supported_p
    4409              : #define direct_mask_load_optab_supported_p convert_optab_supported_p
    4410              : #define direct_load_lanes_optab_supported_p multi_vector_optab_supported_p
    4411              : #define direct_mask_load_lanes_optab_supported_p multi_vector_optab_supported_p
    4412              : #define direct_gather_load_optab_supported_p convert_optab_supported_p
    4413              : #define direct_strided_load_optab_supported_p direct_optab_supported_p
    4414              : #define direct_len_load_optab_supported_p direct_optab_supported_p
    4415              : #define direct_mask_len_load_optab_supported_p convert_optab_supported_p
    4416              : #define direct_mask_store_optab_supported_p convert_optab_supported_p
    4417              : #define direct_store_lanes_optab_supported_p multi_vector_optab_supported_p
    4418              : #define direct_mask_store_lanes_optab_supported_p multi_vector_optab_supported_p
    4419              : #define direct_vec_cond_mask_optab_supported_p convert_optab_supported_p
    4420              : #define direct_vec_cond_optab_supported_p convert_optab_supported_p
    4421              : #define direct_scatter_store_optab_supported_p convert_optab_supported_p
    4422              : #define direct_strided_store_optab_supported_p direct_optab_supported_p
    4423              : #define direct_len_store_optab_supported_p direct_optab_supported_p
    4424              : #define direct_mask_len_store_optab_supported_p convert_optab_supported_p
    4425              : #define direct_while_optab_supported_p convert_optab_supported_p
    4426              : #define direct_fold_extract_optab_supported_p direct_optab_supported_p
    4427              : #define direct_fold_len_extract_optab_supported_p direct_optab_supported_p
    4428              : #define direct_fold_left_optab_supported_p direct_optab_supported_p
    4429              : #define direct_mask_fold_left_optab_supported_p direct_optab_supported_p
    4430              : #define direct_mask_len_fold_left_optab_supported_p direct_optab_supported_p
    4431              : #define direct_check_ptrs_optab_supported_p direct_optab_supported_p
    4432              : #define direct_vec_set_optab_supported_p direct_optab_supported_p
    4433              : #define direct_vec_extract_optab_supported_p convert_optab_supported_p
    4434              : #define direct_reduc_sbool_optab_supported_p direct_optab_supported_p
    4435              : #define direct_select_vl_optab_supported_p convert_optab_supported_p
    4436              : 
    4437              : /* Return the optab used by internal function FN.  */
    4438              : 
    4439              : optab
    4440       208685 : direct_internal_fn_optab (internal_fn fn, tree_pair types)
    4441              : {
    4442       208685 :   switch (fn)
    4443              :     {
    4444              : #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
    4445              :     case IFN_##CODE: break;
    4446              : #define DEF_INTERNAL_OPTAB_FN(CODE, FLAGS, OPTAB, TYPE) \
    4447              :     case IFN_##CODE: return OPTAB##_optab;
    4448              : #define DEF_INTERNAL_SIGNED_OPTAB_FN(CODE, FLAGS, SELECTOR, SIGNED_OPTAB, \
    4449              :                                      UNSIGNED_OPTAB, TYPE)              \
    4450              :     case IFN_##CODE: return (TYPE_UNSIGNED (types.SELECTOR)             \
    4451              :                              ? UNSIGNED_OPTAB ## _optab                 \
    4452              :                              : SIGNED_OPTAB ## _optab);
    4453              : #include "internal-fn.def"
    4454              : 
    4455              :     case IFN_LAST:
    4456              :       break;
    4457              :     }
    4458            0 :   gcc_unreachable ();
    4459              : }
    4460              : 
    4461              : /* Return the optab used by internal function FN.  */
    4462              : 
    4463              : static optab
    4464       960404 : direct_internal_fn_optab (internal_fn fn)
    4465              : {
    4466       960404 :   switch (fn)
    4467              :     {
    4468              : #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
    4469              :     case IFN_##CODE: break;
    4470              : #define DEF_INTERNAL_OPTAB_FN(CODE, FLAGS, OPTAB, TYPE) \
    4471              :     case IFN_##CODE: return OPTAB##_optab;
    4472              : #include "internal-fn.def"
    4473              : 
    4474              :     case IFN_LAST:
    4475              :       break;
    4476              :     }
    4477            0 :   gcc_unreachable ();
    4478              : }
    4479              : 
    4480              : /* Return true if TYPE's mode has the same format as TYPE, and if there is
    4481              :    a 1:1 correspondence between the values that the mode can store and the
    4482              :    values that the type can store.  */
    4483              : 
    4484              : static bool
    4485      8272297 : type_strictly_matches_mode_p (const_tree type)
    4486              : {
    4487              :   /* The masked vector operations have both vector data operands and vector
    4488              :      boolean operands.  The vector data operands are expected to have a vector
    4489              :      mode,  but the vector boolean operands can be an integer mode rather than
    4490              :      a vector mode,  depending on how TARGET_VECTORIZE_GET_MASK_MODE is
    4491              :      defined.  PR116103.  */
    4492      1075530 :   if (VECTOR_BOOLEAN_TYPE_P (type)
    4493       268488 :       && SCALAR_INT_MODE_P (TYPE_MODE (type))
    4494      8341243 :       && TYPE_PRECISION (TREE_TYPE (type)) == 1)
    4495              :     return true;
    4496              : 
    4497      8203352 :   if (VECTOR_TYPE_P (type))
    4498      1006585 :     return VECTOR_MODE_P (TYPE_MODE (type));
    4499              : 
    4500      7196767 :   if (INTEGRAL_TYPE_P (type))
    4501      6793705 :     return type_has_mode_precision_p (type);
    4502              : 
    4503       403062 :   if (SCALAR_FLOAT_TYPE_P (type) || COMPLEX_FLOAT_TYPE_P (type))
    4504              :     return true;
    4505              : 
    4506              :   return false;
    4507              : }
    4508              : 
    4509              : /* Returns true if both types of TYPE_PAIR strictly match their modes,
    4510              :    else returns false.  */
    4511              : 
    4512              : static bool
    4513      4200627 : type_pair_strictly_matches_mode_p (tree_pair type_pair)
    4514              : {
    4515      4200627 :   return type_strictly_matches_mode_p (type_pair.first)
    4516      4200627 :     && type_strictly_matches_mode_p (type_pair.second);
    4517              : }
    4518              : 
    4519              : /* Return true if FN is supported for the types in TYPES when the
    4520              :    optimization type is OPT_TYPE.  The types are those associated with
    4521              :    the "type0" and "type1" fields of FN's direct_internal_fn_info
    4522              :    structure.  */
    4523              : 
    4524              : bool
    4525      4200627 : direct_internal_fn_supported_p (internal_fn fn, tree_pair types,
    4526              :                                 optimization_type opt_type)
    4527              : {
    4528      4200627 :   if (!type_pair_strictly_matches_mode_p (types))
    4529              :     return false;
    4530              : 
    4531      4071670 :   switch (fn)
    4532              :     {
    4533              : #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
    4534              :     case IFN_##CODE: break;
    4535              : #define DEF_INTERNAL_OPTAB_FN(CODE, FLAGS, OPTAB, TYPE) \
    4536              :     case IFN_##CODE: \
    4537              :       return direct_##TYPE##_optab_supported_p (OPTAB##_optab, types, \
    4538              :                                                 opt_type);
    4539              : #define DEF_INTERNAL_SIGNED_OPTAB_FN(CODE, FLAGS, SELECTOR, SIGNED_OPTAB, \
    4540              :                                      UNSIGNED_OPTAB, TYPE)              \
    4541              :     case IFN_##CODE:                                                    \
    4542              :       {                                                                 \
    4543              :         optab which_optab = (TYPE_UNSIGNED (types.SELECTOR)             \
    4544              :                              ? UNSIGNED_OPTAB ## _optab                 \
    4545              :                              : SIGNED_OPTAB ## _optab);                 \
    4546              :         return direct_##TYPE##_optab_supported_p (which_optab, types,   \
    4547              :                                                   opt_type);            \
    4548              :       }
    4549              : #include "internal-fn.def"
    4550              : 
    4551              :     case IFN_LAST:
    4552              :       break;
    4553              :     }
    4554            0 :   gcc_unreachable ();
    4555              : }
    4556              : 
    4557              : /* Return true if FN is supported for type TYPE when the optimization
    4558              :    type is OPT_TYPE.  The caller knows that the "type0" and "type1"
    4559              :    fields of FN's direct_internal_fn_info structure are the same.  */
    4560              : 
    4561              : bool
    4562      4046513 : direct_internal_fn_supported_p (internal_fn fn, tree type,
    4563              :                                 optimization_type opt_type)
    4564              : {
    4565      4046513 :   const direct_internal_fn_info &info = direct_internal_fn (fn);
    4566      4046513 :   gcc_checking_assert (info.type0 == info.type1);
    4567      4046513 :   return direct_internal_fn_supported_p (fn, tree_pair (type, type), opt_type);
    4568              : }
    4569              : 
    4570              : /* Return true if the STMT is supported when the optimization type is OPT_TYPE,
    4571              :    given that STMT is a call to a direct internal function.  */
    4572              : 
    4573              : bool
    4574          384 : direct_internal_fn_supported_p (gcall *stmt, optimization_type opt_type)
    4575              : {
    4576          384 :   internal_fn fn = gimple_call_internal_fn (stmt);
    4577          384 :   tree_pair types = direct_internal_fn_types (fn, stmt);
    4578          384 :   return direct_internal_fn_supported_p (fn, types, opt_type);
    4579              : }
    4580              : 
    4581              : /* Return true if FN is a binary operation and if FN is commutative.  */
    4582              : 
    4583              : bool
    4584     33016688 : commutative_binary_fn_p (internal_fn fn)
    4585              : {
    4586     33016688 :   switch (fn)
    4587              :     {
    4588              :     case IFN_AVG_FLOOR:
    4589              :     case IFN_AVG_CEIL:
    4590              :     case IFN_MULH:
    4591              :     case IFN_MULHS:
    4592              :     case IFN_MULHRS:
    4593              :     case IFN_FMIN:
    4594              :     case IFN_FMAX:
    4595              :     case IFN_COMPLEX_MUL:
    4596              :     case IFN_UBSAN_CHECK_ADD:
    4597              :     case IFN_UBSAN_CHECK_MUL:
    4598              :     case IFN_ADD_OVERFLOW:
    4599              :     case IFN_MUL_OVERFLOW:
    4600              :     case IFN_SAT_ADD:
    4601              :     case IFN_SAT_MUL:
    4602              :     case IFN_VEC_WIDEN_PLUS:
    4603              :     case IFN_VEC_WIDEN_PLUS_LO:
    4604              :     case IFN_VEC_WIDEN_PLUS_HI:
    4605              :     case IFN_VEC_WIDEN_PLUS_EVEN:
    4606              :     case IFN_VEC_WIDEN_PLUS_ODD:
    4607              :       return true;
    4608              : 
    4609     32125998 :     default:
    4610     32125998 :       return false;
    4611              :     }
    4612              : }
    4613              : 
    4614              : /* Return true if FN is a ternary operation and if its first two arguments
    4615              :    are commutative.  */
    4616              : 
    4617              : bool
    4618     26808720 : commutative_ternary_fn_p (internal_fn fn)
    4619              : {
    4620     26808720 :   switch (fn)
    4621              :     {
    4622              :     case IFN_FMA:
    4623              :     case IFN_FMS:
    4624              :     case IFN_FNMA:
    4625              :     case IFN_FNMS:
    4626              :     case IFN_UADDC:
    4627              :       return true;
    4628              : 
    4629     26679979 :     default:
    4630     26679979 :       return false;
    4631              :     }
    4632              : }
    4633              : 
    4634              : /* Return true if FN is an associative binary operation.  */
    4635              : 
    4636              : bool
    4637           52 : associative_binary_fn_p (internal_fn fn)
    4638              : {
    4639           52 :   switch (fn)
    4640              :     {
    4641              :     case IFN_FMIN:
    4642              :     case IFN_FMAX:
    4643              :       return true;
    4644              : 
    4645            0 :     default:
    4646            0 :       return false;
    4647              :     }
    4648              : }
    4649              : 
    4650              : /* If FN is commutative in two consecutive arguments, return the
    4651              :    index of the first, otherwise return -1.  */
    4652              : 
    4653              : int
    4654     27135059 : first_commutative_argument (internal_fn fn)
    4655              : {
    4656     27135059 :   switch (fn)
    4657              :     {
    4658              :     case IFN_COND_ADD:
    4659              :     case IFN_COND_MUL:
    4660              :     case IFN_COND_MIN:
    4661              :     case IFN_COND_MAX:
    4662              :     case IFN_COND_FMIN:
    4663              :     case IFN_COND_FMAX:
    4664              :     case IFN_COND_AND:
    4665              :     case IFN_COND_IOR:
    4666              :     case IFN_COND_XOR:
    4667              :     case IFN_COND_FMA:
    4668              :     case IFN_COND_FMS:
    4669              :     case IFN_COND_FNMA:
    4670              :     case IFN_COND_FNMS:
    4671              :     case IFN_COND_LEN_ADD:
    4672              :     case IFN_COND_LEN_MUL:
    4673              :     case IFN_COND_LEN_MIN:
    4674              :     case IFN_COND_LEN_MAX:
    4675              :     case IFN_COND_LEN_FMIN:
    4676              :     case IFN_COND_LEN_FMAX:
    4677              :     case IFN_COND_LEN_AND:
    4678              :     case IFN_COND_LEN_IOR:
    4679              :     case IFN_COND_LEN_XOR:
    4680              :     case IFN_COND_LEN_FMA:
    4681              :     case IFN_COND_LEN_FMS:
    4682              :     case IFN_COND_LEN_FNMA:
    4683              :     case IFN_COND_LEN_FNMS:
    4684              :       return 1;
    4685              : 
    4686     27116605 :     default:
    4687     27116605 :       if (commutative_binary_fn_p (fn)
    4688     27116605 :           || commutative_ternary_fn_p (fn))
    4689       436650 :         return 0;
    4690              :       return -1;
    4691              :     }
    4692              : }
    4693              : 
    4694              : /* Return true if this CODE describes an internal_fn that returns a vector with
    4695              :    elements twice as wide as the element size of the input vectors.  */
    4696              : 
    4697              : bool
    4698      1770573 : widening_fn_p (code_helper code)
    4699              : {
    4700      1770573 :   if (!code.is_fn_code ())
    4701              :     return false;
    4702              : 
    4703       160206 :   if (!internal_fn_p ((combined_fn) code))
    4704              :     return false;
    4705              : 
    4706       160206 :   internal_fn fn = as_internal_fn ((combined_fn) code);
    4707       160206 :   switch (fn)
    4708              :     {
    4709              :     #define DEF_INTERNAL_WIDENING_OPTAB_FN(NAME, F, S, SO, UO, T) \
    4710              :     case IFN_##NAME:                                              \
    4711              :     case IFN_##NAME##_HI:                                         \
    4712              :     case IFN_##NAME##_LO:                                         \
    4713              :     case IFN_##NAME##_EVEN:                                       \
    4714              :     case IFN_##NAME##_ODD:                                        \
    4715              :       return true;
    4716              :     #include "internal-fn.def"
    4717              : 
    4718              :     default:
    4719              :       return false;
    4720              :     }
    4721              : }
    4722              : 
    4723              : /* Return true if this CODE describes an internal_fn that returns a vector with
    4724              :    elements twice as wide as the element size of the input vectors and operates
    4725              :    on even/odd parts of the input.  */
    4726              : 
    4727              : bool
    4728        17150 : widening_evenodd_fn_p (code_helper code)
    4729              : {
    4730        17150 :   if (!code.is_fn_code ())
    4731              :     return false;
    4732              : 
    4733            0 :   if (!internal_fn_p ((combined_fn) code))
    4734              :     return false;
    4735              : 
    4736            0 :   internal_fn fn = as_internal_fn ((combined_fn) code);
    4737            0 :   switch (fn)
    4738              :     {
    4739              :     #define DEF_INTERNAL_WIDENING_OPTAB_FN(NAME, F, S, SO, UO, T) \
    4740              :     case IFN_##NAME##_EVEN:                                       \
    4741              :     case IFN_##NAME##_ODD:                                        \
    4742              :       return true;
    4743              :     #include "internal-fn.def"
    4744              : 
    4745              :     default:
    4746              :       return false;
    4747              :     }
    4748              : }
    4749              : 
    4750              : /* Return true if IFN_SET_EDOM is supported.  */
    4751              : 
    4752              : bool
    4753          139 : set_edom_supported_p (void)
    4754              : {
    4755              : #ifdef TARGET_EDOM
    4756              :   return true;
    4757              : #else
    4758          139 :   return false;
    4759              : #endif
    4760              : }
    4761              : 
    4762              : #define DEF_INTERNAL_OPTAB_FN(CODE, FLAGS, OPTAB, TYPE) \
    4763              :   static void                                           \
    4764              :   expand_##CODE (internal_fn fn, gcall *stmt)           \
    4765              :   {                                                     \
    4766              :     expand_##TYPE##_optab_fn (fn, stmt, OPTAB##_optab); \
    4767              :   }
    4768              : #define DEF_INTERNAL_INT_EXT_FN(CODE, FLAGS, OPTAB, TYPE)
    4769              : #define DEF_INTERNAL_SIGNED_OPTAB_FN(CODE, FLAGS, SELECTOR, SIGNED_OPTAB, \
    4770              :                                      UNSIGNED_OPTAB, TYPE)              \
    4771              :   static void                                                           \
    4772              :   expand_##CODE (internal_fn fn, gcall *stmt)                           \
    4773              :   {                                                                     \
    4774              :     tree_pair types = direct_internal_fn_types (fn, stmt);              \
    4775              :     optab which_optab = direct_internal_fn_optab (fn, types);           \
    4776              :     expand_##TYPE##_optab_fn (fn, stmt, which_optab);                   \
    4777              :   }
    4778              : #include "internal-fn.def"
    4779              : 
    4780              : /* Routines to expand each internal function, indexed by function number.
    4781              :    Each routine has the prototype:
    4782              : 
    4783              :        expand_<NAME> (gcall *stmt)
    4784              : 
    4785              :    where STMT is the statement that performs the call. */
    4786              : static void (*const internal_fn_expanders[]) (internal_fn, gcall *) = {
    4787              : 
    4788              : #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
    4789              : #include "internal-fn.def"
    4790              :   0
    4791              : };
    4792              : 
    4793              : /* Invoke T(CODE, SUFFIX) for each conditional function IFN_COND_##SUFFIX
    4794              :    that maps to a tree code CODE.  There is also an IFN_COND_LEN_##SUFFIX
    4795              :    for each such IFN_COND_##SUFFIX.  */
    4796              : #define FOR_EACH_CODE_MAPPING(T) \
    4797              :   T (PLUS_EXPR, ADD) \
    4798              :   T (MINUS_EXPR, SUB) \
    4799              :   T (MULT_EXPR, MUL) \
    4800              :   T (TRUNC_DIV_EXPR, DIV) \
    4801              :   T (TRUNC_MOD_EXPR, MOD) \
    4802              :   T (RDIV_EXPR, RDIV) \
    4803              :   T (MIN_EXPR, MIN) \
    4804              :   T (MAX_EXPR, MAX) \
    4805              :   T (BIT_AND_EXPR, AND) \
    4806              :   T (BIT_IOR_EXPR, IOR) \
    4807              :   T (BIT_XOR_EXPR, XOR) \
    4808              :   T (LSHIFT_EXPR, SHL) \
    4809              :   T (RSHIFT_EXPR, SHR) \
    4810              :   T (NEGATE_EXPR, NEG)
    4811              : 
    4812              : /* Return a function that only performs CODE when a certain condition is met
    4813              :    and that uses a given fallback value otherwise.  For example, if CODE is
    4814              :    a binary operation associated with conditional function FN:
    4815              : 
    4816              :      LHS = FN (COND, A, B, ELSE)
    4817              : 
    4818              :    is equivalent to the C expression:
    4819              : 
    4820              :      LHS = COND ? A CODE B : ELSE;
    4821              : 
    4822              :    operating elementwise if the operands are vectors.
    4823              : 
    4824              :    Return IFN_LAST if no such function exists.  */
    4825              : 
    4826              : internal_fn
    4827       555078 : get_conditional_internal_fn (tree_code code)
    4828              : {
    4829       555078 :   switch (code)
    4830              :     {
    4831              : #define CASE(CODE, IFN) case CODE: return IFN_COND_##IFN;
    4832              :       FOR_EACH_CODE_MAPPING(CASE)
    4833              : #undef CASE
    4834              :     default:
    4835              :       return IFN_LAST;
    4836              :     }
    4837              : }
    4838              : 
    4839              : /* If IFN implements the conditional form of a tree code, return that
    4840              :    tree code, otherwise return ERROR_MARK.  */
    4841              : 
    4842              : tree_code
    4843      3232278 : conditional_internal_fn_code (internal_fn ifn)
    4844              : {
    4845      3232278 :   switch (ifn)
    4846              :     {
    4847              : #define CASE(CODE, IFN)                                                        \
    4848              :   case IFN_COND_##IFN:                                                         \
    4849              :   case IFN_COND_LEN_##IFN:                                                     \
    4850              :     return CODE;
    4851              :       FOR_EACH_CODE_MAPPING (CASE)
    4852              : #undef CASE
    4853              :       default:
    4854              :         return ERROR_MARK;
    4855              :     }
    4856              : }
    4857              : 
    4858              : /* Like get_conditional_internal_fn, but return a function that
    4859              :    additionally restricts the operation to the leading elements
    4860              :    of a vector.  The number of elements to process is given by a length
    4861              :    and bias pair, as for IFN_LOAD_LEN.  The values of the remaining
    4862              :    elements are taken from the fallback ("else") argument.
    4863              : 
    4864              :    For example, if CODE is a binary operation associated with FN:
    4865              : 
    4866              :      LHS = FN (COND, A, B, ELSE, LEN, BIAS)
    4867              : 
    4868              :    is equivalent to the C code:
    4869              : 
    4870              :      for (int i = 0; i < NUNITS; i++)
    4871              :       {
    4872              :         if (i < LEN + BIAS && COND[i])
    4873              :           LHS[i] = A[i] CODE B[i];
    4874              :         else
    4875              :           LHS[i] = ELSE[i];
    4876              :       }
    4877              : */
    4878              : 
    4879              : internal_fn
    4880       532516 : get_conditional_len_internal_fn (tree_code code)
    4881              : {
    4882       532516 :   switch (code)
    4883              :     {
    4884              : #define CASE(CODE, IFN) case CODE: return IFN_COND_LEN_##IFN;
    4885              :       FOR_EACH_CODE_MAPPING(CASE)
    4886              : #undef CASE
    4887              :     default:
    4888              :       return IFN_LAST;
    4889              :     }
    4890              : }
    4891              : 
    4892              : /* Invoke T(IFN) for each internal function IFN that also has an
    4893              :    IFN_COND_* form.  */
    4894              : #define FOR_EACH_COND_FN_PAIR(T) \
    4895              :   T (FMAX) \
    4896              :   T (FMIN) \
    4897              :   T (FMA) \
    4898              :   T (FMS) \
    4899              :   T (FNMA) \
    4900              :   T (FNMS) \
    4901              :   T (SQRT) \
    4902              :   T (ROUND) \
    4903              :   T (FLOOR) \
    4904              :   T (RINT) \
    4905              :   T (CEIL)
    4906              : 
    4907              : /* Return a function that only performs internal function FN when a
    4908              :    certain condition is met and that uses a given fallback value otherwise.
    4909              :    In other words, the returned function FN' is such that:
    4910              : 
    4911              :      LHS = FN' (COND, A1, ... An, ELSE)
    4912              : 
    4913              :    is equivalent to the C expression:
    4914              : 
    4915              :      LHS = COND ? FN (A1, ..., An) : ELSE;
    4916              : 
    4917              :    operating elementwise if the operands are vectors.
    4918              : 
    4919              :    Return IFN_LAST if no such function exists.  */
    4920              : 
    4921              : internal_fn
    4922         8512 : get_conditional_internal_fn (internal_fn fn)
    4923              : {
    4924         8512 :   switch (fn)
    4925              :     {
    4926              : #define CASE(NAME) case IFN_##NAME: return IFN_COND_##NAME;
    4927         1850 :       FOR_EACH_COND_FN_PAIR(CASE)
    4928              : #undef CASE
    4929         6661 :     default:
    4930         6661 :       return IFN_LAST;
    4931              :     }
    4932              : }
    4933              : 
    4934              : /* If there exists an internal function like IFN that operates on vectors,
    4935              :    but with additional length and bias parameters, return the internal_fn
    4936              :    for that function, otherwise return IFN_LAST.  */
    4937              : internal_fn
    4938        10009 : get_len_internal_fn (internal_fn fn)
    4939              : {
    4940        10009 :   switch (fn)
    4941              :     {
    4942              : #define DEF_INTERNAL_COND_FN(NAME, ...)                                        \
    4943              :   case IFN_COND_##NAME:                                                        \
    4944              :     return IFN_COND_LEN_##NAME;
    4945              : #define DEF_INTERNAL_SIGNED_COND_FN(NAME, ...)                                 \
    4946              :   case IFN_COND_##NAME:                                                        \
    4947              :     return IFN_COND_LEN_##NAME;
    4948              : #include "internal-fn.def"
    4949         6623 :     default:
    4950         6623 :       break;
    4951              :     }
    4952              : 
    4953         6623 :   switch (fn)
    4954              :     {
    4955              :     case IFN_MASK_LOAD:
    4956              :       return IFN_MASK_LEN_LOAD;
    4957              :     case IFN_MASK_LOAD_LANES:
    4958              :       return IFN_MASK_LEN_LOAD_LANES;
    4959              :     case IFN_MASK_GATHER_LOAD:
    4960              :       return IFN_MASK_LEN_GATHER_LOAD;
    4961              :     default:
    4962              :       return IFN_LAST;
    4963              :     }
    4964              : }
    4965              : 
    4966              : /* If IFN implements the conditional form of an unconditional internal
    4967              :    function, return that unconditional function, otherwise return IFN_LAST.  */
    4968              : 
    4969              : internal_fn
    4970      3186338 : get_unconditional_internal_fn (internal_fn ifn)
    4971              : {
    4972      3186338 :   switch (ifn)
    4973              :     {
    4974              : #define CASE(NAME)                                                             \
    4975              :     case IFN_COND_##NAME:                                                      \
    4976              :     case IFN_COND_LEN_##NAME:                                                  \
    4977              :       return IFN_##NAME;
    4978              : FOR_EACH_COND_FN_PAIR (CASE)
    4979              : #undef CASE
    4980              :     default:
    4981              :       return IFN_LAST;
    4982              :     }
    4983              : }
    4984              : 
    4985              : /* Return true if STMT can be interpreted as a conditional tree code
    4986              :    operation of the form:
    4987              : 
    4988              :      LHS = COND ? OP (RHS1, ...) : ELSE;
    4989              : 
    4990              :    operating elementwise if the operands are vectors.  This includes
    4991              :    the case of an all-true COND, so that the operation always happens.
    4992              : 
    4993              :    There is an alternative approach to interpret the STMT when the operands
    4994              :    are vectors which is the operation predicated by both conditional mask
    4995              :    and loop control length, the equivalent C code:
    4996              : 
    4997              :      for (int i = 0; i < NUNTIS; i++)
    4998              :       {
    4999              :         if (i < LEN + BIAS && COND[i])
    5000              :           LHS[i] = A[i] CODE B[i];
    5001              :         else
    5002              :           LHS[i] = ELSE[i];
    5003              :       }
    5004              : 
    5005              :    When returning true, set:
    5006              : 
    5007              :    - *COND_OUT to the condition COND, or to NULL_TREE if the condition
    5008              :      is known to be all-true
    5009              :    - *CODE_OUT to the tree code
    5010              :    - OPS[I] to operand I of *CODE_OUT
    5011              :    - *ELSE_OUT to the fallback value ELSE, or to NULL_TREE if the
    5012              :      condition is known to be all true.
    5013              :    - *LEN to the len argument if it COND_LEN_* operations or to NULL_TREE.
    5014              :    - *BIAS to the bias argument if it COND_LEN_* operations or to NULL_TREE.  */
    5015              : 
    5016              : bool
    5017        40532 : can_interpret_as_conditional_op_p (gimple *stmt, tree *cond_out,
    5018              :                                    tree_code *code_out,
    5019              :                                    tree (&ops)[3], tree *else_out,
    5020              :                                    tree *len, tree *bias)
    5021              : {
    5022        40532 :   *len = NULL_TREE;
    5023        40532 :   *bias = NULL_TREE;
    5024        40532 :   if (gassign *assign = dyn_cast <gassign *> (stmt))
    5025              :     {
    5026        37725 :       *cond_out = NULL_TREE;
    5027        37725 :       *code_out = gimple_assign_rhs_code (assign);
    5028        37725 :       ops[0] = gimple_assign_rhs1 (assign);
    5029        37725 :       ops[1] = gimple_assign_rhs2 (assign);
    5030        37725 :       ops[2] = gimple_assign_rhs3 (assign);
    5031        37725 :       *else_out = NULL_TREE;
    5032        37725 :       return true;
    5033              :     }
    5034         2807 :   if (gcall *call = dyn_cast <gcall *> (stmt))
    5035         2229 :     if (gimple_call_internal_p (call))
    5036              :       {
    5037         2172 :         internal_fn ifn = gimple_call_internal_fn (call);
    5038         2172 :         tree_code code = conditional_internal_fn_code (ifn);
    5039         2172 :         int len_index = internal_fn_len_index (ifn);
    5040         2172 :         int cond_nargs = len_index >= 0 ? 4 : 2;
    5041         2172 :         if (code != ERROR_MARK)
    5042              :           {
    5043          216 :             *cond_out = gimple_call_arg (call, 0);
    5044          216 :             *code_out = code;
    5045          216 :             unsigned int nops = gimple_call_num_args (call) - cond_nargs;
    5046          864 :             for (unsigned int i = 0; i < 3; ++i)
    5047          648 :               ops[i] = i < nops ? gimple_call_arg (call, i + 1) : NULL_TREE;
    5048          216 :             *else_out = gimple_call_arg (call, nops + 1);
    5049          216 :             if (len_index < 0)
    5050              :               {
    5051          216 :                 if (integer_truep (*cond_out))
    5052              :                   {
    5053            0 :                     *cond_out = NULL_TREE;
    5054            0 :                     *else_out = NULL_TREE;
    5055              :                   }
    5056              :               }
    5057              :             else
    5058              :               {
    5059            0 :                 *len = gimple_call_arg (call, len_index);
    5060            0 :                 *bias = gimple_call_arg (call, len_index + 1);
    5061              :               }
    5062          216 :             return true;
    5063              :           }
    5064              :       }
    5065              :   return false;
    5066              : }
    5067              : 
    5068              : /* Return true if IFN is some form of load from memory.  */
    5069              : 
    5070              : bool
    5071      1002025 : internal_load_fn_p (internal_fn fn)
    5072              : {
    5073      1002025 :   switch (fn)
    5074              :     {
    5075              :     case IFN_MASK_LOAD:
    5076              :     case IFN_LOAD_LANES:
    5077              :     case IFN_MASK_LOAD_LANES:
    5078              :     case IFN_MASK_LEN_LOAD_LANES:
    5079              :     case IFN_GATHER_LOAD:
    5080              :     case IFN_MASK_GATHER_LOAD:
    5081              :     case IFN_MASK_LEN_GATHER_LOAD:
    5082              :     case IFN_LEN_LOAD:
    5083              :     case IFN_MASK_LEN_LOAD:
    5084              :       return true;
    5085              : 
    5086       272955 :     default:
    5087       272955 :       return false;
    5088              :     }
    5089              : }
    5090              : 
    5091              : /* Return true if IFN is some form of store to memory.  */
    5092              : 
    5093              : bool
    5094       502442 : internal_store_fn_p (internal_fn fn)
    5095              : {
    5096       502442 :   switch (fn)
    5097              :     {
    5098              :     case IFN_MASK_STORE:
    5099              :     case IFN_STORE_LANES:
    5100              :     case IFN_MASK_STORE_LANES:
    5101              :     case IFN_MASK_LEN_STORE_LANES:
    5102              :     case IFN_SCATTER_STORE:
    5103              :     case IFN_MASK_SCATTER_STORE:
    5104              :     case IFN_MASK_LEN_SCATTER_STORE:
    5105              :     case IFN_LEN_STORE:
    5106              :     case IFN_MASK_LEN_STORE:
    5107              :       return true;
    5108              : 
    5109       491869 :     default:
    5110       491869 :       return false;
    5111              :     }
    5112              : }
    5113              : 
    5114              : /* Return true if IFN is some form of gather load or scatter store.  */
    5115              : 
    5116              : bool
    5117        40189 : internal_gather_scatter_fn_p (internal_fn fn)
    5118              : {
    5119        40189 :   switch (fn)
    5120              :     {
    5121              :     case IFN_GATHER_LOAD:
    5122              :     case IFN_MASK_GATHER_LOAD:
    5123              :     case IFN_MASK_LEN_GATHER_LOAD:
    5124              :     case IFN_SCATTER_STORE:
    5125              :     case IFN_MASK_SCATTER_STORE:
    5126              :     case IFN_MASK_LEN_SCATTER_STORE:
    5127              :       return true;
    5128              : 
    5129        40189 :     default:
    5130        40189 :       return false;
    5131              :     }
    5132              : }
    5133              : 
    5134              : /* If FN takes a vector len argument, return the index of that argument,
    5135              :    otherwise return -1.  */
    5136              : 
    5137              : int
    5138      3080382 : internal_fn_len_index (internal_fn fn)
    5139              : {
    5140      3080382 :   switch (fn)
    5141              :     {
    5142              :     case IFN_LEN_LOAD:
    5143              :       return 3;
    5144              :     case IFN_LEN_STORE:
    5145              :       return 2;
    5146              : 
    5147              :     case IFN_MASK_LEN_SCATTER_STORE:
    5148              :       return 6;
    5149              : 
    5150              :     case IFN_MASK_LEN_STRIDED_LOAD:
    5151              :       return 5;
    5152              : 
    5153              :     case IFN_MASK_LEN_GATHER_LOAD:
    5154              :       return 7;
    5155              : 
    5156              :     case IFN_COND_LEN_FMA:
    5157              :     case IFN_COND_LEN_FMS:
    5158              :     case IFN_COND_LEN_FNMA:
    5159              :     case IFN_COND_LEN_FNMS:
    5160              :       return 5;
    5161              : 
    5162              :     case IFN_COND_LEN_ADD:
    5163              :     case IFN_COND_LEN_SUB:
    5164              :     case IFN_COND_LEN_MUL:
    5165              :     case IFN_COND_LEN_DIV:
    5166              :     case IFN_COND_LEN_MOD:
    5167              :     case IFN_COND_LEN_RDIV:
    5168              :     case IFN_COND_LEN_MIN:
    5169              :     case IFN_COND_LEN_MAX:
    5170              :     case IFN_COND_LEN_FMIN:
    5171              :     case IFN_COND_LEN_FMAX:
    5172              :     case IFN_COND_LEN_AND:
    5173              :     case IFN_COND_LEN_IOR:
    5174              :     case IFN_COND_LEN_XOR:
    5175              :     case IFN_COND_LEN_SHL:
    5176              :     case IFN_COND_LEN_SHR:
    5177              :     case IFN_MASK_LEN_STRIDED_STORE:
    5178              :       return 4;
    5179              : 
    5180              :     case IFN_COND_LEN_NEG:
    5181              :     case IFN_MASK_LEN_STORE:
    5182              :     case IFN_MASK_LEN_STORE_LANES:
    5183              :     case IFN_VCOND_MASK_LEN:
    5184              :       return 3;
    5185              : 
    5186              :     case IFN_MASK_LEN_LOAD:
    5187              :     case IFN_MASK_LEN_LOAD_LANES:
    5188              :       return 4;
    5189              : 
    5190              :     default:
    5191              :       return -1;
    5192              :     }
    5193              : }
    5194              : 
    5195              : /* If FN is an IFN_COND_* or IFN_COND_LEN_* function, return the index of the
    5196              :    argument that is used when the condition is false.  Return -1 otherwise.  */
    5197              : 
    5198              : int
    5199        62847 : internal_fn_else_index (internal_fn fn)
    5200              : {
    5201        62847 :   switch (fn)
    5202              :     {
    5203              :     case IFN_COND_NEG:
    5204              :     case IFN_COND_NOT:
    5205              :     case IFN_COND_SQRT:
    5206              :     case IFN_COND_CEIL:
    5207              :     case IFN_COND_FLOOR:
    5208              :     case IFN_COND_ROUND:
    5209              :     case IFN_COND_RINT:
    5210              :     case IFN_COND_LEN_NEG:
    5211              :     case IFN_COND_LEN_NOT:
    5212              :     case IFN_COND_LEN_SQRT:
    5213              :     case IFN_COND_LEN_CEIL:
    5214              :     case IFN_COND_LEN_FLOOR:
    5215              :     case IFN_COND_LEN_ROUND:
    5216              :     case IFN_COND_LEN_RINT:
    5217              :       return 2;
    5218              : 
    5219              :     case IFN_LEN_LOAD:
    5220              :       return 2;
    5221              : 
    5222              :     case IFN_COND_ADD:
    5223              :     case IFN_COND_SUB:
    5224              :     case IFN_COND_MUL:
    5225              :     case IFN_COND_DIV:
    5226              :     case IFN_COND_MOD:
    5227              :     case IFN_COND_MIN:
    5228              :     case IFN_COND_MAX:
    5229              :     case IFN_COND_FMIN:
    5230              :     case IFN_COND_FMAX:
    5231              :     case IFN_COND_AND:
    5232              :     case IFN_COND_IOR:
    5233              :     case IFN_COND_XOR:
    5234              :     case IFN_COND_SHL:
    5235              :     case IFN_COND_SHR:
    5236              :     case IFN_COND_LEN_ADD:
    5237              :     case IFN_COND_LEN_SUB:
    5238              :     case IFN_COND_LEN_MUL:
    5239              :     case IFN_COND_LEN_DIV:
    5240              :     case IFN_COND_LEN_MOD:
    5241              :     case IFN_COND_LEN_MIN:
    5242              :     case IFN_COND_LEN_MAX:
    5243              :     case IFN_COND_LEN_FMIN:
    5244              :     case IFN_COND_LEN_FMAX:
    5245              :     case IFN_COND_LEN_AND:
    5246              :     case IFN_COND_LEN_IOR:
    5247              :     case IFN_COND_LEN_XOR:
    5248              :     case IFN_COND_LEN_SHL:
    5249              :     case IFN_COND_LEN_SHR:
    5250              :       return 3;
    5251              : 
    5252              :     case IFN_MASK_LOAD:
    5253              :     case IFN_MASK_LEN_LOAD:
    5254              :     case IFN_MASK_LOAD_LANES:
    5255              :     case IFN_MASK_LEN_LOAD_LANES:
    5256              :       return 3;
    5257              : 
    5258              :     case IFN_COND_FMA:
    5259              :     case IFN_COND_FMS:
    5260              :     case IFN_COND_FNMA:
    5261              :     case IFN_COND_FNMS:
    5262              :     case IFN_COND_LEN_FMA:
    5263              :     case IFN_COND_LEN_FMS:
    5264              :     case IFN_COND_LEN_FNMA:
    5265              :     case IFN_COND_LEN_FNMS:
    5266              :     case IFN_MASK_LEN_STRIDED_LOAD:
    5267              :       return 4;
    5268              : 
    5269              :     case IFN_MASK_GATHER_LOAD:
    5270              :     case IFN_MASK_LEN_GATHER_LOAD:
    5271              :       return 6;
    5272              : 
    5273              :     default:
    5274              :       return -1;
    5275              :     }
    5276              : 
    5277              :   return -1;
    5278              : }
    5279              : 
    5280              : /* If FN takes a vector mask argument, return the index of that argument,
    5281              :    otherwise return -1.  */
    5282              : 
    5283              : int
    5284       193629 : internal_fn_mask_index (internal_fn fn)
    5285              : {
    5286       193629 :   switch (fn)
    5287              :     {
    5288              :     case IFN_MASK_LOAD:
    5289              :     case IFN_MASK_LOAD_LANES:
    5290              :     case IFN_MASK_LEN_LOAD_LANES:
    5291              :     case IFN_MASK_STORE:
    5292              :     case IFN_MASK_STORE_LANES:
    5293              :     case IFN_MASK_LEN_STORE_LANES:
    5294              :     case IFN_MASK_LEN_LOAD:
    5295              :     case IFN_MASK_LEN_STORE:
    5296              :       return 2;
    5297              : 
    5298            0 :     case IFN_MASK_LEN_STRIDED_LOAD:
    5299            0 :     case IFN_MASK_LEN_STRIDED_STORE:
    5300            0 :       return 3;
    5301              : 
    5302            0 :     case IFN_MASK_GATHER_LOAD:
    5303            0 :     case IFN_MASK_SCATTER_STORE:
    5304            0 :     case IFN_MASK_LEN_GATHER_LOAD:
    5305            0 :     case IFN_MASK_LEN_SCATTER_STORE:
    5306            0 :       return 5;
    5307              : 
    5308              :     case IFN_VCOND_MASK:
    5309              :     case IFN_VCOND_MASK_LEN:
    5310              :       return 0;
    5311              : 
    5312       154759 :     default:
    5313       154759 :       return (conditional_internal_fn_code (fn) != ERROR_MARK
    5314       154759 :               || get_unconditional_internal_fn (fn) != IFN_LAST ? 0 : -1);
    5315              :     }
    5316              : }
    5317              : 
    5318              : /* If FN takes a value that should be stored to memory, return the index
    5319              :    of that argument, otherwise return -1.  */
    5320              : 
    5321              : int
    5322        24747 : internal_fn_stored_value_index (internal_fn fn)
    5323              : {
    5324        24747 :   switch (fn)
    5325              :     {
    5326              :     case IFN_MASK_LEN_STRIDED_STORE:
    5327              :       return 2;
    5328              : 
    5329              :     case IFN_MASK_STORE:
    5330              :     case IFN_MASK_STORE_LANES:
    5331              :       return 3;
    5332              :     case IFN_SCATTER_STORE:
    5333              :     case IFN_MASK_SCATTER_STORE:
    5334              :     case IFN_MASK_LEN_SCATTER_STORE:
    5335              :       return 4;
    5336              : 
    5337              :     case IFN_LEN_STORE:
    5338              :       return 4;
    5339              : 
    5340              :     case IFN_MASK_LEN_STORE:
    5341              :     case IFN_MASK_LEN_STORE_LANES:
    5342              :       return 5;
    5343              : 
    5344              :     default:
    5345              :       return -1;
    5346              :     }
    5347              : }
    5348              : 
    5349              : /* If FN has an alias pointer return its index, otherwise return -1.  */
    5350              : 
    5351              : int
    5352            0 : internal_fn_alias_ptr_index (internal_fn fn)
    5353              : {
    5354            0 :   switch (fn)
    5355              :     {
    5356              :     case IFN_MASK_LOAD:
    5357              :     case IFN_MASK_LEN_LOAD:
    5358              :     case IFN_GATHER_LOAD:
    5359              :     case IFN_MASK_GATHER_LOAD:
    5360              :     case IFN_MASK_LEN_GATHER_LOAD:
    5361              :     case IFN_SCATTER_STORE:
    5362              :     case IFN_MASK_SCATTER_STORE:
    5363              :     case IFN_MASK_LEN_SCATTER_STORE:
    5364              :       return 1;
    5365              : 
    5366            0 :     default:
    5367            0 :       return -1;
    5368              :     }
    5369              : }
    5370              : 
    5371              : /* If FN is a gather/scatter return the index of its offset argument,
    5372              :    otherwise return -1.  */
    5373              : 
    5374              : int
    5375            0 : internal_fn_offset_index (internal_fn fn)
    5376              : {
    5377            0 :   if (!internal_gather_scatter_fn_p (fn))
    5378              :     return -1;
    5379              : 
    5380            0 :   switch (fn)
    5381              :     {
    5382              :     case IFN_GATHER_LOAD:
    5383              :     case IFN_MASK_GATHER_LOAD:
    5384              :     case IFN_MASK_LEN_GATHER_LOAD:
    5385              :     case IFN_SCATTER_STORE:
    5386              :     case IFN_MASK_SCATTER_STORE:
    5387              :     case IFN_MASK_LEN_SCATTER_STORE:
    5388              :       return 2;
    5389              : 
    5390              :     default:
    5391              :       return -1;
    5392              :     }
    5393              : }
    5394              : 
    5395              : /* If FN is a gather/scatter return the index of its scale argument,
    5396              :    otherwise return -1.  */
    5397              : 
    5398              : int
    5399            0 : internal_fn_scale_index (internal_fn fn)
    5400              : {
    5401            0 :   if (!internal_gather_scatter_fn_p (fn))
    5402              :     return -1;
    5403              : 
    5404            0 :   switch (fn)
    5405              :     {
    5406              :     case IFN_GATHER_LOAD:
    5407              :     case IFN_MASK_GATHER_LOAD:
    5408              :     case IFN_MASK_LEN_GATHER_LOAD:
    5409              :     case IFN_SCATTER_STORE:
    5410              :     case IFN_MASK_SCATTER_STORE:
    5411              :     case IFN_MASK_LEN_SCATTER_STORE:
    5412              :       return 3;
    5413              : 
    5414              :     default:
    5415              :       return -1;
    5416              :     }
    5417              : }
    5418              : 
    5419              : /* Store all supported else values for the optab referred to by ICODE
    5420              :    in ELSE_VALS.  The index of the else operand must be specified in
    5421              :    ELSE_INDEX.  */
    5422              : 
    5423              : void
    5424        45983 : get_supported_else_vals (enum insn_code icode, unsigned else_index,
    5425              :                          vec<int> &else_vals)
    5426              : {
    5427        45983 :   const struct insn_data_d *data = &insn_data[icode];
    5428        45983 :   if ((int) else_index >= data->n_operands || (int) else_index == -1)
    5429              :     return;
    5430              : 
    5431        45983 :   machine_mode else_mode = data->operand[else_index].mode;
    5432              : 
    5433        45983 :   else_vals.truncate (0);
    5434              : 
    5435              :   /* For now we only support else values of 0, -1, and "undefined".  */
    5436        45983 :   if (insn_operand_matches (icode, else_index, CONST0_RTX (else_mode)))
    5437        45983 :     else_vals.safe_push (MASK_LOAD_ELSE_ZERO);
    5438              : 
    5439        45983 :   if (insn_operand_matches (icode, else_index, gen_rtx_SCRATCH (else_mode)))
    5440            0 :     else_vals.safe_push (MASK_LOAD_ELSE_UNDEFINED);
    5441              : 
    5442        45983 :   if (GET_MODE_CLASS (else_mode) == MODE_VECTOR_INT
    5443        45983 :       && insn_operand_matches (icode, else_index, CONSTM1_RTX (else_mode)))
    5444            0 :     else_vals.safe_push (MASK_LOAD_ELSE_M1);
    5445              : }
    5446              : 
    5447              : /* Return true if the else value ELSE_VAL (one of MASK_LOAD_ELSE_ZERO,
    5448              :    MASK_LOAD_ELSE_M1, and MASK_LOAD_ELSE_UNDEFINED) is valid fo the optab
    5449              :    referred to by ICODE.  The index of the else operand must be specified
    5450              :    in ELSE_INDEX.  */
    5451              : 
    5452              : bool
    5453            0 : supported_else_val_p (enum insn_code icode, unsigned else_index, int else_val)
    5454              : {
    5455            0 :   if (else_val != MASK_LOAD_ELSE_ZERO && else_val != MASK_LOAD_ELSE_M1
    5456            0 :       && else_val != MASK_LOAD_ELSE_UNDEFINED)
    5457            0 :     gcc_unreachable ();
    5458              : 
    5459            0 :   auto_vec<int> else_vals;
    5460            0 :   get_supported_else_vals (icode, else_index, else_vals);
    5461            0 :   return else_vals.contains (else_val);
    5462            0 : }
    5463              : 
    5464              : /* Return true if the target supports gather load or scatter store function
    5465              :    IFN.  For loads, VECTOR_TYPE is the vector type of the load result,
    5466              :    while for stores it is the vector type of the stored data argument.
    5467              :    MEMORY_ELEMENT_TYPE is the type of the memory elements being loaded
    5468              :    or stored.  OFFSET_VECTOR_TYPE is the vector type that holds the
    5469              :    offset from the shared base address of each loaded or stored element.
    5470              :    SCALE is the amount by which these offsets should be multiplied
    5471              :    *after* they have been extended to address width.
    5472              :    If the target supports the gather load the supported else values
    5473              :    will be added to the vector ELSVAL points to if it is nonzero.  */
    5474              : 
    5475              : bool
    5476      3473672 : internal_gather_scatter_fn_supported_p (internal_fn ifn, tree vector_type,
    5477              :                                         tree memory_element_type,
    5478              :                                         tree offset_vector_type, int scale,
    5479              :                                         vec<int> *elsvals)
    5480              : {
    5481      3473672 :   if (!tree_int_cst_equal (TYPE_SIZE (TREE_TYPE (vector_type)),
    5482      3473672 :                            TYPE_SIZE (memory_element_type)))
    5483              :     return false;
    5484      3473672 :   if (maybe_ne (TYPE_VECTOR_SUBPARTS (vector_type),
    5485      6947344 :                 TYPE_VECTOR_SUBPARTS (offset_vector_type)))
    5486              :     return false;
    5487       952654 :   optab optab = direct_internal_fn_optab (ifn);
    5488       952654 :   insn_code icode = convert_optab_handler (optab, TYPE_MODE (vector_type),
    5489       952654 :                                            TYPE_MODE (offset_vector_type));
    5490       952654 :   int output_ops = internal_load_fn_p (ifn) ? 1 : 0;
    5491       952654 :   bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (offset_vector_type));
    5492       952654 :   bool ok = icode != CODE_FOR_nothing
    5493            0 :     && insn_operand_matches (icode, 2 + output_ops, GEN_INT (unsigned_p))
    5494       952654 :     && insn_operand_matches (icode, 3 + output_ops, GEN_INT (scale));
    5495              : 
    5496       952654 :   if (ok && elsvals)
    5497            0 :     get_supported_else_vals
    5498            0 :       (icode, internal_fn_else_index (IFN_MASK_GATHER_LOAD), *elsvals);
    5499              : 
    5500              :   return ok;
    5501              : }
    5502              : 
    5503              : /* Return true if the target supports a strided load/store function IFN
    5504              :    with VECTOR_TYPE.  If supported and ELSVALS is nonzero the supported else
    5505              :    values will be added to the vector ELSVALS points to.  */
    5506              : 
    5507              : bool
    5508         2540 : internal_strided_fn_supported_p (internal_fn ifn, tree vector_type,
    5509              :                                  vec<int> *elsvals)
    5510              : {
    5511         2540 :   machine_mode mode = TYPE_MODE (vector_type);
    5512         2540 :   optab optab = direct_internal_fn_optab (ifn);
    5513         2540 :   insn_code icode = direct_optab_handler (optab, mode);
    5514              : 
    5515         2540 :   bool ok = icode != CODE_FOR_nothing;
    5516              : 
    5517         2540 :   if (ok && elsvals)
    5518            0 :     get_supported_else_vals (icode, internal_fn_else_index (ifn), *elsvals);
    5519              : 
    5520         2540 :   return ok;
    5521              : }
    5522              : 
    5523              : /* Return true if the target supports IFN_CHECK_{RAW,WAR}_PTRS function IFN
    5524              :    for pointers of type TYPE when the accesses have LENGTH bytes and their
    5525              :    common byte alignment is ALIGN.  */
    5526              : 
    5527              : bool
    5528         5210 : internal_check_ptrs_fn_supported_p (internal_fn ifn, tree type,
    5529              :                                     poly_uint64 length, unsigned int align)
    5530              : {
    5531         5210 :   machine_mode mode = TYPE_MODE (type);
    5532         5210 :   optab optab = direct_internal_fn_optab (ifn);
    5533         5210 :   insn_code icode = direct_optab_handler (optab, mode);
    5534         5210 :   if (icode == CODE_FOR_nothing)
    5535              :     return false;
    5536            0 :   rtx length_rtx = immed_wide_int_const (length, mode);
    5537            0 :   return (insn_operand_matches (icode, 3, length_rtx)
    5538            0 :           && insn_operand_matches (icode, 4, GEN_INT (align)));
    5539              : }
    5540              : 
    5541              : /* Return the supported bias for IFN which is either IFN_{LEN_,MASK_LEN_,}LOAD
    5542              :    or IFN_{LEN_,MASK_LEN_,}STORE.  For now we only support the biases of 0 and
    5543              :    -1 (in case 0 is not an allowable length for {len_,mask_len_}load or
    5544              :    {len_,mask_len_}store). If none of the biases match what the backend
    5545              :    provides, return VECT_PARTIAL_BIAS_UNSUPPORTED.  */
    5546              : 
    5547              : signed char
    5548            0 : internal_len_load_store_bias (internal_fn ifn, machine_mode mode)
    5549              : {
    5550            0 :   optab optab = direct_internal_fn_optab (ifn);
    5551            0 :   insn_code icode = direct_optab_handler (optab, mode);
    5552            0 :   int bias_idx = internal_fn_len_index (ifn) + 1;
    5553              : 
    5554            0 :   if (icode == CODE_FOR_nothing)
    5555              :     {
    5556            0 :       machine_mode mask_mode;
    5557            0 :       if (!targetm.vectorize.get_mask_mode (mode).exists (&mask_mode))
    5558            0 :         return VECT_PARTIAL_BIAS_UNSUPPORTED;
    5559            0 :       if (ifn == IFN_LEN_LOAD)
    5560              :         {
    5561              :           /* Try MASK_LEN_LOAD.  */
    5562            0 :           optab = direct_internal_fn_optab (IFN_MASK_LEN_LOAD);
    5563            0 :           bias_idx = internal_fn_len_index (IFN_MASK_LEN_LOAD) + 1;
    5564              :         }
    5565              :       else
    5566              :         {
    5567              :           /* Try MASK_LEN_STORE.  */
    5568            0 :           optab = direct_internal_fn_optab (IFN_MASK_LEN_STORE);
    5569            0 :           bias_idx = internal_fn_len_index (IFN_MASK_LEN_STORE) + 1;
    5570              :         }
    5571            0 :       icode = convert_optab_handler (optab, mode, mask_mode);
    5572              :     }
    5573              : 
    5574            0 :   if (icode != CODE_FOR_nothing)
    5575              :     {
    5576              :       /* For now we only support biases of 0 or -1.  Try both of them.  */
    5577            0 :       if (insn_operand_matches (icode, bias_idx, GEN_INT (0)))
    5578              :         return 0;
    5579            0 :       if (insn_operand_matches (icode, bias_idx, GEN_INT (-1)))
    5580              :         return -1;
    5581              :     }
    5582              : 
    5583              :   return VECT_PARTIAL_BIAS_UNSUPPORTED;
    5584              : }
    5585              : 
    5586              : /* Expand STMT as though it were a call to internal function FN.  */
    5587              : 
    5588              : void
    5589       255800 : expand_internal_call (internal_fn fn, gcall *stmt)
    5590              : {
    5591       255800 :   internal_fn_expanders[fn] (fn, stmt);
    5592       255800 : }
    5593              : 
    5594              : /* Expand STMT, which is a call to internal function FN.  */
    5595              : 
    5596              : void
    5597       223287 : expand_internal_call (gcall *stmt)
    5598              : {
    5599       223287 :   expand_internal_call (gimple_call_internal_fn (stmt), stmt);
    5600       223287 : }
    5601              : 
    5602              : /* If TYPE is a vector type, return true if IFN is a direct internal
    5603              :    function that is supported for that type.  If TYPE is a scalar type,
    5604              :    return true if IFN is a direct internal function that is supported for
    5605              :    the target's preferred vector version of TYPE.  */
    5606              : 
    5607              : bool
    5608        15601 : vectorized_internal_fn_supported_p (internal_fn ifn, tree type)
    5609              : {
    5610        15601 :   if (VECTOR_MODE_P (TYPE_MODE (type)))
    5611         9350 :     return direct_internal_fn_supported_p (ifn, type, OPTIMIZE_FOR_SPEED);
    5612              : 
    5613         6251 :   scalar_mode smode;
    5614         6251 :   if (VECTOR_TYPE_P (type)
    5615         6251 :       || !is_a <scalar_mode> (TYPE_MODE (type), &smode))
    5616           11 :     return false;
    5617              : 
    5618         6240 :   machine_mode vmode = targetm.vectorize.preferred_simd_mode (smode);
    5619         6240 :   if (VECTOR_MODE_P (vmode))
    5620              :     {
    5621         6203 :       tree vectype = build_vector_type_for_mode (type, vmode);
    5622         6203 :       if (direct_internal_fn_supported_p (ifn, vectype, OPTIMIZE_FOR_SPEED))
    5623              :         return true;
    5624              :     }
    5625              : 
    5626         4230 :   auto_vector_modes vector_modes;
    5627         4230 :   targetm.vectorize.autovectorize_vector_modes (&vector_modes, true);
    5628        26394 :   for (machine_mode base_mode : vector_modes)
    5629        13704 :     if (related_vector_mode (base_mode, smode).exists (&vmode))
    5630              :       {
    5631        12054 :         tree vectype = build_vector_type_for_mode (type, vmode);
    5632        12054 :         if (direct_internal_fn_supported_p (ifn, vectype, OPTIMIZE_FOR_SPEED))
    5633              :           return true;
    5634              :       }
    5635              : 
    5636              :   return false;
    5637         4230 : }
    5638              : 
    5639              : void
    5640            0 : expand_SHUFFLEVECTOR (internal_fn, gcall *)
    5641              : {
    5642            0 :   gcc_unreachable ();
    5643              : }
    5644              : 
    5645              : void
    5646            0 : expand_PHI (internal_fn, gcall *)
    5647              : {
    5648            0 :   gcc_unreachable ();
    5649              : }
    5650              : 
    5651              : void
    5652          279 : expand_SPACESHIP (internal_fn, gcall *stmt)
    5653              : {
    5654          279 :   tree lhs = gimple_call_lhs (stmt);
    5655          279 :   tree rhs1 = gimple_call_arg (stmt, 0);
    5656          279 :   tree rhs2 = gimple_call_arg (stmt, 1);
    5657          279 :   tree rhs3 = gimple_call_arg (stmt, 2);
    5658          279 :   tree type = TREE_TYPE (rhs1);
    5659              : 
    5660          279 :   do_pending_stack_adjust ();
    5661              : 
    5662          279 :   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    5663          279 :   rtx op1 = expand_normal (rhs1);
    5664          279 :   rtx op2 = expand_normal (rhs2);
    5665          279 :   rtx op3 = expand_normal (rhs3);
    5666              : 
    5667          279 :   class expand_operand ops[4];
    5668          279 :   create_call_lhs_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (lhs)));
    5669          279 :   create_input_operand (&ops[1], op1, TYPE_MODE (type));
    5670          279 :   create_input_operand (&ops[2], op2, TYPE_MODE (type));
    5671          279 :   create_input_operand (&ops[3], op3, TYPE_MODE (TREE_TYPE (rhs3)));
    5672          279 :   insn_code icode = optab_handler (spaceship_optab, TYPE_MODE (type));
    5673          279 :   expand_insn (icode, 4, ops);
    5674          279 :   assign_call_lhs (lhs, target, &ops[0]);
    5675          279 : }
    5676              : 
    5677              : void
    5678            0 : expand_ASSUME (internal_fn, gcall *)
    5679              : {
    5680            0 : }
    5681              : 
    5682              : void
    5683            0 : expand_MASK_CALL (internal_fn, gcall *)
    5684              : {
    5685              :   /* This IFN should only exist between ifcvt and vect passes.  */
    5686            0 :   gcc_unreachable ();
    5687              : }
    5688              : 
    5689              : void
    5690         1576 : expand_MULBITINT (internal_fn, gcall *stmt)
    5691              : {
    5692         1576 :   rtx_mode_t args[6];
    5693        11032 :   for (int i = 0; i < 6; i++)
    5694         9456 :     args[i] = rtx_mode_t (expand_normal (gimple_call_arg (stmt, i)),
    5695        18912 :                           (i & 1) ? SImode : ptr_mode);
    5696         1576 :   rtx fun = init_one_libfunc ("__mulbitint3");
    5697         1576 :   emit_library_call_value_1 (0, fun, NULL_RTX, LCT_NORMAL, VOIDmode, 6, args);
    5698         1576 : }
    5699              : 
    5700              : void
    5701          147 : expand_DIVMODBITINT (internal_fn, gcall *stmt)
    5702              : {
    5703          147 :   rtx_mode_t args[8];
    5704         1323 :   for (int i = 0; i < 8; i++)
    5705         1176 :     args[i] = rtx_mode_t (expand_normal (gimple_call_arg (stmt, i)),
    5706         2352 :                           (i & 1) ? SImode : ptr_mode);
    5707          147 :   rtx fun = init_one_libfunc ("__divmodbitint4");
    5708          147 :   emit_library_call_value_1 (0, fun, NULL_RTX, LCT_NORMAL, VOIDmode, 8, args);
    5709          147 : }
    5710              : 
    5711              : void
    5712          179 : expand_FLOATTOBITINT (internal_fn, gcall *stmt)
    5713              : {
    5714          179 :   machine_mode mode = TYPE_MODE (TREE_TYPE (gimple_call_arg (stmt, 2)));
    5715          179 :   rtx arg0 = expand_normal (gimple_call_arg (stmt, 0));
    5716          179 :   rtx arg1 = expand_normal (gimple_call_arg (stmt, 1));
    5717          179 :   rtx arg2 = expand_normal (gimple_call_arg (stmt, 2));
    5718          179 :   const char *mname = GET_MODE_NAME (mode);
    5719          179 :   unsigned mname_len = strlen (mname);
    5720          179 :   int len = 12 + mname_len;
    5721          179 :   if (DECIMAL_FLOAT_MODE_P (mode))
    5722           57 :     len += 4;
    5723          179 :   char *libfunc_name = XALLOCAVEC (char, len);
    5724          179 :   char *p = libfunc_name;
    5725          179 :   const char *q;
    5726          179 :   if (DECIMAL_FLOAT_MODE_P (mode))
    5727              :     {
    5728              : #if ENABLE_DECIMAL_BID_FORMAT
    5729           57 :       memcpy (p, "__bid_fix", 9);
    5730              : #else
    5731              :       memcpy (p, "__dpd_fix", 9);
    5732              : #endif
    5733           57 :       p += 9;
    5734              :     }
    5735              :   else
    5736              :     {
    5737          122 :       memcpy (p, "__fix", 5);
    5738          122 :       p += 5;
    5739              :     }
    5740          537 :   for (q = mname; *q; q++)
    5741          358 :     *p++ = TOLOWER (*q);
    5742          179 :   memcpy (p, "bitint", 7);
    5743          179 :   rtx fun = init_one_libfunc (libfunc_name);
    5744          179 :   emit_library_call (fun, LCT_NORMAL, VOIDmode, arg0, ptr_mode, arg1,
    5745              :                      SImode, arg2, mode);
    5746          179 : }
    5747              : 
    5748              : void
    5749          138 : expand_BITINTTOFLOAT (internal_fn, gcall *stmt)
    5750              : {
    5751          138 :   tree lhs = gimple_call_lhs (stmt);
    5752          138 :   if (!lhs)
    5753              :     return;
    5754          138 :   machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
    5755          138 :   rtx arg0 = expand_normal (gimple_call_arg (stmt, 0));
    5756          138 :   rtx arg1 = expand_normal (gimple_call_arg (stmt, 1));
    5757          138 :   const char *mname = GET_MODE_NAME (mode);
    5758          138 :   unsigned mname_len = strlen (mname);
    5759          138 :   int len = 14 + mname_len;
    5760          138 :   if (DECIMAL_FLOAT_MODE_P (mode))
    5761           17 :     len += 4;
    5762          138 :   char *libfunc_name = XALLOCAVEC (char, len);
    5763          138 :   char *p = libfunc_name;
    5764          138 :   const char *q;
    5765          138 :   if (DECIMAL_FLOAT_MODE_P (mode))
    5766              :     {
    5767              : #if ENABLE_DECIMAL_BID_FORMAT
    5768           17 :       memcpy (p, "__bid_floatbitint", 17);
    5769              : #else
    5770              :       memcpy (p, "__dpd_floatbitint", 17);
    5771              : #endif
    5772           17 :       p += 17;
    5773              :     }
    5774              :   else
    5775              :     {
    5776          121 :       memcpy (p, "__floatbitint", 13);
    5777          121 :       p += 13;
    5778              :     }
    5779          414 :   for (q = mname; *q; q++)
    5780          276 :     *p++ = TOLOWER (*q);
    5781          138 :   *p = '\0';
    5782          138 :   rtx fun = init_one_libfunc (libfunc_name);
    5783          138 :   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    5784          138 :   rtx val = emit_library_call_value (fun, target, LCT_PURE, mode,
    5785              :                                      arg0, ptr_mode, arg1, SImode);
    5786          138 :   if (val != target)
    5787            0 :     emit_move_insn (target, val);
    5788              : }
    5789              : 
    5790              : static bool
    5791         5735 : expand_bitquery (internal_fn fn, gcall *stmt)
    5792              : {
    5793         5735 :   tree lhs = gimple_call_lhs (stmt);
    5794         5735 :   if (lhs == NULL_TREE)
    5795              :     return false;
    5796         5729 :   tree arg = gimple_call_arg (stmt, 0);
    5797         5729 :   if (TREE_CODE (arg) == INTEGER_CST)
    5798              :     {
    5799            0 :       tree ret = fold_const_call (as_combined_fn (fn), TREE_TYPE (arg), arg);
    5800            0 :       gcc_checking_assert (ret && TREE_CODE (ret) == INTEGER_CST);
    5801            0 :       expand_assignment (lhs, ret, false);
    5802            0 :       return false;
    5803              :     }
    5804              :   return true;
    5805              : }
    5806              : 
    5807              : void
    5808            1 : expand_CLRSB (internal_fn fn, gcall *stmt)
    5809              : {
    5810            1 :   if (expand_bitquery (fn, stmt))
    5811            0 :     expand_unary_optab_fn (fn, stmt, clrsb_optab);
    5812            1 : }
    5813              : 
    5814              : void
    5815         4246 : expand_CLZ (internal_fn fn, gcall *stmt)
    5816              : {
    5817         4246 :   if (expand_bitquery (fn, stmt))
    5818         4245 :     expand_unary_optab_fn (fn, stmt, clz_optab);
    5819         4246 : }
    5820              : 
    5821              : void
    5822          974 : expand_CTZ (internal_fn fn, gcall *stmt)
    5823              : {
    5824          974 :   if (expand_bitquery (fn, stmt))
    5825          973 :     expand_unary_optab_fn (fn, stmt, ctz_optab);
    5826          974 : }
    5827              : 
    5828              : void
    5829          132 : expand_FFS (internal_fn fn, gcall *stmt)
    5830              : {
    5831          132 :   if (expand_bitquery (fn, stmt))
    5832          131 :     expand_unary_optab_fn (fn, stmt, ffs_optab);
    5833          132 : }
    5834              : 
    5835              : void
    5836          165 : expand_PARITY (internal_fn fn, gcall *stmt)
    5837              : {
    5838          165 :   if (expand_bitquery (fn, stmt))
    5839          164 :     expand_unary_optab_fn (fn, stmt, parity_optab);
    5840          165 : }
    5841              : 
    5842              : void
    5843          217 : expand_POPCOUNT (internal_fn fn, gcall *stmt)
    5844              : {
    5845          217 :   if (!expand_bitquery (fn, stmt))
    5846              :     return;
    5847          216 :   if (gimple_call_num_args (stmt) == 1)
    5848              :     {
    5849          208 :       expand_unary_optab_fn (fn, stmt, popcount_optab);
    5850          208 :       return;
    5851              :     }
    5852              :   /* If .POPCOUNT call has 2 arguments, match_single_bit_test marked it
    5853              :      because the result is only used in an equality comparison against 1.
    5854              :      Use rtx costs in that case to determine if .POPCOUNT (arg) == 1
    5855              :      or (arg ^ (arg - 1)) > arg - 1 is cheaper.
    5856              :      If .POPCOUNT second argument is 0, we additionally know that arg
    5857              :      is non-zero, so use arg & (arg - 1) == 0 instead.
    5858              :      If .POPCOUNT second argument is -1, the comparison was either `<= 1`
    5859              :      or `> 1`.  */
    5860            8 :   bool speed_p = optimize_insn_for_speed_p ();
    5861            8 :   tree lhs = gimple_call_lhs (stmt);
    5862            8 :   tree arg = gimple_call_arg (stmt, 0);
    5863            8 :   bool nonzero_arg = integer_zerop (gimple_call_arg (stmt, 1));
    5864            8 :   bool was_le = integer_minus_onep (gimple_call_arg (stmt, 1));
    5865            8 :   if (was_le)
    5866            0 :     nonzero_arg = true;
    5867            8 :   tree type = TREE_TYPE (arg);
    5868            8 :   machine_mode mode = TYPE_MODE (type);
    5869            8 :   machine_mode lhsmode = TYPE_MODE (TREE_TYPE (lhs));
    5870            8 :   do_pending_stack_adjust ();
    5871            8 :   start_sequence ();
    5872            8 :   expand_unary_optab_fn (fn, stmt, popcount_optab);
    5873            8 :   rtx_insn *popcount_insns = end_sequence ();
    5874            8 :   start_sequence ();
    5875            8 :   rtx plhs = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    5876            8 :   rtx pcmp = emit_store_flag (NULL_RTX, EQ, plhs, const1_rtx, lhsmode, 0, 0);
    5877            8 :   if (pcmp == NULL_RTX)
    5878              :     {
    5879            0 :     fail:
    5880            0 :       end_sequence ();
    5881            0 :       emit_insn (popcount_insns);
    5882            0 :       return;
    5883              :     }
    5884            8 :   rtx_insn *popcount_cmp_insns = end_sequence ();
    5885            8 :   start_sequence ();
    5886            8 :   rtx op0 = expand_normal (arg);
    5887            8 :   rtx argm1 = expand_simple_binop (mode, PLUS, op0, constm1_rtx, NULL_RTX,
    5888              :                                    1, OPTAB_WIDEN);
    5889            8 :   if (argm1 == NULL_RTX)
    5890            0 :     goto fail;
    5891           12 :   rtx argxorargm1 = expand_simple_binop (mode, nonzero_arg ? AND : XOR, op0,
    5892              :                                          argm1, NULL_RTX, 1, OPTAB_WIDEN);
    5893            8 :   if (argxorargm1 == NULL_RTX)
    5894            0 :     goto fail;
    5895            8 :   rtx cmp;
    5896            8 :   if (nonzero_arg)
    5897            4 :     cmp = emit_store_flag (NULL_RTX, EQ, argxorargm1, const0_rtx, mode, 1, 1);
    5898              :   else
    5899            4 :     cmp = emit_store_flag (NULL_RTX, GTU, argxorargm1, argm1, mode, 1, 1);
    5900            8 :   if (cmp == NULL_RTX)
    5901            0 :     goto fail;
    5902            8 :   rtx_insn *cmp_insns = end_sequence ();
    5903            8 :   unsigned popcount_cost = (seq_cost (popcount_insns, speed_p)
    5904            8 :                             + seq_cost (popcount_cmp_insns, speed_p));
    5905            8 :   unsigned cmp_cost = seq_cost (cmp_insns, speed_p);
    5906              : 
    5907            8 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5908            0 :     fprintf(dump_file, "popcount == 1: popcount cost: %u; cmp cost: %u\n",
    5909              :             popcount_cost, cmp_cost);
    5910              : 
    5911            8 :   if (popcount_cost <= cmp_cost)
    5912            8 :     emit_insn (popcount_insns);
    5913              :   else
    5914              :     {
    5915            0 :       start_sequence ();
    5916            0 :       emit_insn (cmp_insns);
    5917            0 :       plhs = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
    5918            0 :       if (GET_MODE (cmp) != GET_MODE (plhs))
    5919            0 :         cmp = convert_to_mode (GET_MODE (plhs), cmp, 1);
    5920              :       /* For `<= 1`, we need to produce `2 - cmp` or `cmp ? 1 : 2` as that
    5921              :          then gets compared against 1 and we need the false case to be 2.  */
    5922            0 :       if (was_le)
    5923              :         {
    5924            0 :           cmp = expand_simple_binop (GET_MODE (cmp), MINUS, const2_rtx,
    5925              :                                      cmp, NULL_RTX, 1, OPTAB_WIDEN);
    5926            0 :           if (!cmp)
    5927            0 :             goto fail;
    5928              :         }
    5929            0 :       emit_move_insn (plhs, cmp);
    5930            0 :       rtx_insn *all_insns = end_sequence ();
    5931            0 :       emit_insn (all_insns);
    5932              :     }
    5933              : }
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.