LCOV - code coverage report
Current view: top level - gcc - tree-ssa-forwprop.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 94.4 % 3086 2914
Test Date: 2026-02-28 14:20:25 Functions: 100.0 % 63 63
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Forward propagation of expressions for single use variables.
       2              :    Copyright (C) 2004-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify
       7              : it under the terms of the GNU General Public License as published by
       8              : the Free Software Foundation; either version 3, or (at your option)
       9              : any later version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful,
      12              : but WITHOUT ANY WARRANTY; without even the implied warranty of
      13              : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
      14              : GNU General Public License for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : #include "config.h"
      21              : #include "system.h"
      22              : #include "coretypes.h"
      23              : #include "backend.h"
      24              : #include "rtl.h"
      25              : #include "tree.h"
      26              : #include "gimple.h"
      27              : #include "cfghooks.h"
      28              : #include "tree-pass.h"
      29              : #include "ssa.h"
      30              : #include "expmed.h"
      31              : #include "optabs-query.h"
      32              : #include "gimple-pretty-print.h"
      33              : #include "fold-const.h"
      34              : #include "stor-layout.h"
      35              : #include "gimple-iterator.h"
      36              : #include "gimple-fold.h"
      37              : #include "tree-eh.h"
      38              : #include "gimplify.h"
      39              : #include "gimplify-me.h"
      40              : #include "tree-cfg.h"
      41              : #include "expr.h"
      42              : #include "tree-dfa.h"
      43              : #include "tree-ssa-propagate.h"
      44              : #include "tree-ssa-dom.h"
      45              : #include "tree-ssa-strlen.h"
      46              : #include "builtins.h"
      47              : #include "tree-cfgcleanup.h"
      48              : #include "cfganal.h"
      49              : #include "optabs-tree.h"
      50              : #include "insn-config.h"
      51              : #include "recog.h"
      52              : #include "cfgloop.h"
      53              : #include "tree-vectorizer.h"
      54              : #include "tree-vector-builder.h"
      55              : #include "vec-perm-indices.h"
      56              : #include "internal-fn.h"
      57              : #include "cgraph.h"
      58              : #include "tree-ssa.h"
      59              : #include "gimple-range.h"
      60              : #include "tree-ssa-dce.h"
      61              : 
      62              : /* This pass propagates the RHS of assignment statements into use
      63              :    sites of the LHS of the assignment.  It's basically a specialized
      64              :    form of tree combination.   It is hoped all of this can disappear
      65              :    when we have a generalized tree combiner.
      66              : 
      67              :    One class of common cases we handle is forward propagating a single use
      68              :    variable into a COND_EXPR.
      69              : 
      70              :      bb0:
      71              :        x = a COND b;
      72              :        if (x) goto ... else goto ...
      73              : 
      74              :    Will be transformed into:
      75              : 
      76              :      bb0:
      77              :        if (a COND b) goto ... else goto ...
      78              : 
      79              :    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
      80              : 
      81              :    Or (assuming c1 and c2 are constants):
      82              : 
      83              :      bb0:
      84              :        x = a + c1;
      85              :        if (x EQ/NEQ c2) goto ... else goto ...
      86              : 
      87              :    Will be transformed into:
      88              : 
      89              :      bb0:
      90              :         if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
      91              : 
      92              :    Similarly for x = a - c1.
      93              : 
      94              :    Or
      95              : 
      96              :      bb0:
      97              :        x = !a
      98              :        if (x) goto ... else goto ...
      99              : 
     100              :    Will be transformed into:
     101              : 
     102              :      bb0:
     103              :         if (a == 0) goto ... else goto ...
     104              : 
     105              :    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
     106              :    For these cases, we propagate A into all, possibly more than one,
     107              :    COND_EXPRs that use X.
     108              : 
     109              :    Or
     110              : 
     111              :      bb0:
     112              :        x = (typecast) a
     113              :        if (x) goto ... else goto ...
     114              : 
     115              :    Will be transformed into:
     116              : 
     117              :      bb0:
     118              :         if (a != 0) goto ... else goto ...
     119              : 
     120              :    (Assuming a is an integral type and x is a boolean or x is an
     121              :     integral and a is a boolean.)
     122              : 
     123              :    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
     124              :    For these cases, we propagate A into all, possibly more than one,
     125              :    COND_EXPRs that use X.
     126              : 
     127              :    In addition to eliminating the variable and the statement which assigns
     128              :    a value to the variable, we may be able to later thread the jump without
     129              :    adding insane complexity in the dominator optimizer.
     130              : 
     131              :    Also note these transformations can cascade.  We handle this by having
     132              :    a worklist of COND_EXPR statements to examine.  As we make a change to
     133              :    a statement, we put it back on the worklist to examine on the next
     134              :    iteration of the main loop.
     135              : 
     136              :    A second class of propagation opportunities arises for ADDR_EXPR
     137              :    nodes.
     138              : 
     139              :      ptr = &x->y->z;
     140              :      res = *ptr;
     141              : 
     142              :    Will get turned into
     143              : 
     144              :      res = x->y->z;
     145              : 
     146              :    Or
     147              :      ptr = (type1*)&type2var;
     148              :      res = *ptr
     149              : 
     150              :    Will get turned into (if type1 and type2 are the same size
     151              :    and neither have volatile on them):
     152              :      res = VIEW_CONVERT_EXPR<type1>(type2var)
     153              : 
     154              :    Or
     155              : 
     156              :      ptr = &x[0];
     157              :      ptr2 = ptr + <constant>;
     158              : 
     159              :    Will get turned into
     160              : 
     161              :      ptr2 = &x[constant/elementsize];
     162              : 
     163              :   Or
     164              : 
     165              :      ptr = &x[0];
     166              :      offset = index * element_size;
     167              :      offset_p = (pointer) offset;
     168              :      ptr2 = ptr + offset_p
     169              : 
     170              :   Will get turned into:
     171              : 
     172              :      ptr2 = &x[index];
     173              : 
     174              :   Or
     175              :     ssa = (int) decl
     176              :     res = ssa & 1
     177              : 
     178              :   Provided that decl has known alignment >= 2, will get turned into
     179              : 
     180              :     res = 0
     181              : 
     182              :   We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
     183              :   allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
     184              :   {NOT_EXPR,NEG_EXPR}.
     185              : 
     186              :    This will (of course) be extended as other needs arise.  */
     187              : 
     188              : /* Data structure that contains simplifiable vectorized permute sequences.
     189              :    See recognise_vec_perm_simplify_seq () for a description of the sequence.  */
     190              : 
     191              : struct _vec_perm_simplify_seq
     192              : {
     193              :   /* Defining stmts of vectors in the sequence.  */
     194              :   gassign *v_1_stmt;
     195              :   gassign *v_2_stmt;
     196              :   gassign *v_x_stmt;
     197              :   gassign *v_y_stmt;
     198              :   /* Final permute statement.  */
     199              :   gassign *stmt;
     200              :   /* New selector indices for stmt.  */
     201              :   tree new_sel;
     202              :   /* Elements of each vector and selector.  */
     203              :   unsigned int nelts;
     204              : };
     205              : typedef struct _vec_perm_simplify_seq *vec_perm_simplify_seq;
     206              : 
     207              : static bool forward_propagate_addr_expr (tree, tree, bool);
     208              : 
     209              : /* Set to true if we delete dead edges during the optimization.  */
     210              : static bool cfg_changed;
     211              : 
     212              : static tree rhs_to_tree (tree type, gimple *stmt);
     213              : 
     214              : static bitmap to_purge;
     215              : 
     216              : /* Const-and-copy lattice.  */
     217              : static vec<tree> lattice;
     218              : 
     219              : /* Set the lattice entry for NAME to VAL.  */
     220              : static void
     221     32089623 : fwprop_set_lattice_val (tree name, tree val)
     222              : {
     223     32089623 :   if (TREE_CODE (name) == SSA_NAME)
     224              :     {
     225     32089623 :       if (SSA_NAME_VERSION (name) >= lattice.length ())
     226              :         {
     227        32193 :           lattice.reserve (num_ssa_names - lattice.length ());
     228        21462 :           lattice.quick_grow_cleared (num_ssa_names);
     229              :         }
     230     32089623 :       lattice[SSA_NAME_VERSION (name)] = val;
     231              :       /* As this now constitutes a copy duplicate points-to
     232              :          and range info appropriately.  */
     233     32089623 :       if (TREE_CODE (val) == SSA_NAME)
     234     31647279 :         maybe_duplicate_ssa_info_at_copy (name, val);
     235              :     }
     236     32089623 : }
     237              : 
     238              : /* Invalidate the lattice entry for NAME, done when releasing SSA names.  */
     239              : static void
     240       903681 : fwprop_invalidate_lattice (tree name)
     241              : {
     242       903681 :   if (name
     243       901337 :       && TREE_CODE (name) == SSA_NAME
     244      1804891 :       && SSA_NAME_VERSION (name) < lattice.length ())
     245       901181 :     lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
     246       903681 : }
     247              : 
     248              : /* Get the statement we can propagate from into NAME skipping
     249              :    trivial copies.  Returns the statement which defines the
     250              :    propagation source or NULL_TREE if there is no such one.
     251              :    If SINGLE_USE_ONLY is set considers only sources which have
     252              :    a single use chain up to NAME.  If SINGLE_USE_P is non-null,
     253              :    it is set to whether the chain to NAME is a single use chain
     254              :    or not.  SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set.  */
     255              : 
     256              : static gimple *
     257     27713632 : get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
     258              : {
     259     27713632 :   bool single_use = true;
     260              : 
     261     27714616 :   do {
     262     27714124 :     gimple *def_stmt = SSA_NAME_DEF_STMT (name);
     263              : 
     264     27714124 :     if (!has_single_use (name))
     265              :       {
     266     15128337 :         single_use = false;
     267     15128337 :         if (single_use_only)
     268              :           return NULL;
     269              :       }
     270              : 
     271              :     /* If name is defined by a PHI node or is the default def, bail out.  */
     272     27712713 :     if (!is_gimple_assign (def_stmt))
     273              :       return NULL;
     274              : 
     275              :     /* If def_stmt is a simple copy, continue looking.  */
     276     19530931 :     if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
     277          492 :       name = gimple_assign_rhs1 (def_stmt);
     278              :     else
     279              :       {
     280     19530439 :         if (!single_use_only && single_use_p)
     281     19237818 :           *single_use_p = single_use;
     282              : 
     283     19530439 :         return def_stmt;
     284              :       }
     285          492 :   } while (1);
     286              : }
     287              : 
     288              : /* Checks if the destination ssa name in DEF_STMT can be used as
     289              :    propagation source.  Returns true if so, otherwise false.  */
     290              : 
     291              : static bool
     292     27454814 : can_propagate_from (gimple *def_stmt)
     293              : {
     294     27454814 :   gcc_assert (is_gimple_assign (def_stmt));
     295              : 
     296              :   /* If the rhs has side-effects we cannot propagate from it.  */
     297     27454814 :   if (gimple_has_volatile_ops (def_stmt))
     298              :     return false;
     299              : 
     300              :   /* If the rhs is a load we cannot propagate from it.  */
     301     26863964 :   if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
     302     26863964 :       || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
     303              :     return false;
     304              : 
     305              :   /* Constants can be always propagated.  */
     306     13369300 :   if (gimple_assign_single_p (def_stmt)
     307     13369300 :       && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
     308              :     return true;
     309              : 
     310              :   /* We cannot propagate ssa names that occur in abnormal phi nodes.  */
     311     13369300 :   if (stmt_references_abnormal_ssa_name (def_stmt))
     312              :     return false;
     313              : 
     314              :   /* If the definition is a conversion of a pointer to a function type,
     315              :      then we cannot apply optimizations as some targets require
     316              :      function pointers to be canonicalized and in this case this
     317              :      optimization could eliminate a necessary canonicalization.  */
     318     13368615 :   if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
     319              :     {
     320      3224591 :       tree rhs = gimple_assign_rhs1 (def_stmt);
     321      3224591 :       if (FUNCTION_POINTER_TYPE_P (TREE_TYPE (rhs)))
     322              :         return false;
     323              :     }
     324              : 
     325              :   return true;
     326              : }
     327              : 
     328              : /* Remove a chain of dead statements starting at the definition of
     329              :    NAME.  The chain is linked via the first operand of the defining statements.
     330              :    If NAME was replaced in its only use then this function can be used
     331              :    to clean up dead stmts.  The function handles already released SSA
     332              :    names gracefully.  */
     333              : 
     334              : static void
     335       235363 : remove_prop_source_from_use (tree name)
     336              : {
     337       296105 :   gimple_stmt_iterator gsi;
     338       296105 :   gimple *stmt;
     339              : 
     340       296105 :   do {
     341       296105 :     basic_block bb;
     342              : 
     343       296105 :     if (SSA_NAME_IN_FREE_LIST (name)
     344       296062 :         || SSA_NAME_IS_DEFAULT_DEF (name)
     345       588559 :         || !has_zero_uses (name))
     346              :       break;
     347              : 
     348        61199 :     stmt = SSA_NAME_DEF_STMT (name);
     349        61199 :     if (gimple_code (stmt) == GIMPLE_PHI
     350        61199 :         || gimple_has_side_effects (stmt))
     351              :       break;
     352              : 
     353        61199 :     bb = gimple_bb (stmt);
     354        61199 :     gsi = gsi_for_stmt (stmt);
     355        61199 :     unlink_stmt_vdef (stmt);
     356        61199 :     if (gsi_remove (&gsi, true))
     357            6 :       bitmap_set_bit (to_purge, bb->index);
     358        61199 :     fwprop_invalidate_lattice (gimple_get_lhs (stmt));
     359        61199 :     release_defs (stmt);
     360              : 
     361        61199 :     name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
     362        61199 :   } while (name && TREE_CODE (name) == SSA_NAME);
     363              : 
     364       235363 : }
     365              : 
     366              : /* Return the rhs of a gassign *STMT in a form of a single tree,
     367              :    converted to type TYPE.
     368              : 
     369              :    This should disappear, but is needed so we can combine expressions and use
     370              :    the fold() interfaces. Long term, we need to develop folding and combine
     371              :    routines that deal with gimple exclusively . */
     372              : 
     373              : static tree
     374      7319329 : rhs_to_tree (tree type, gimple *stmt)
     375              : {
     376      7319329 :   location_t loc = gimple_location (stmt);
     377      7319329 :   enum tree_code code = gimple_assign_rhs_code (stmt);
     378      7319329 :   switch (get_gimple_rhs_class (code))
     379              :     {
     380        11980 :     case GIMPLE_TERNARY_RHS:
     381        11980 :       return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
     382              :                               gimple_assign_rhs2 (stmt),
     383        11980 :                               gimple_assign_rhs3 (stmt));
     384      5021015 :     case GIMPLE_BINARY_RHS:
     385      5021015 :       return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
     386      5021015 :                               gimple_assign_rhs2 (stmt));
     387      2026741 :     case GIMPLE_UNARY_RHS:
     388      2026741 :       return build1 (code, type, gimple_assign_rhs1 (stmt));
     389       259593 :     case GIMPLE_SINGLE_RHS:
     390       259593 :       return gimple_assign_rhs1 (stmt);
     391            0 :     default:
     392            0 :       gcc_unreachable ();
     393              :     }
     394              : }
     395              : 
     396              : /* Combine OP0 CODE OP1 in the context of a COND_EXPR.  Returns
     397              :    the folded result in a form suitable for COND_EXPR_COND or
     398              :    NULL_TREE, if there is no suitable simplified form.  If
     399              :    INVARIANT_ONLY is true only gimple_min_invariant results are
     400              :    considered simplified.  */
     401              : 
     402              : static tree
     403      8239151 : combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
     404              :                         tree op0, tree op1, bool invariant_only)
     405              : {
     406      8239151 :   tree t;
     407              : 
     408      8239151 :   gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
     409              : 
     410      8239151 :   fold_defer_overflow_warnings ();
     411      8239151 :   t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
     412      8239151 :   if (!t)
     413              :     {
     414      4680362 :       fold_undefer_overflow_warnings (false, NULL, 0);
     415      4680362 :       return NULL_TREE;
     416              :     }
     417              : 
     418              :   /* Require that we got a boolean type out if we put one in.  */
     419      3558789 :   gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
     420              : 
     421              :   /* Canonicalize the combined condition for use in a COND_EXPR.  */
     422      3558789 :   t = canonicalize_cond_expr_cond (t);
     423              : 
     424              :   /* Bail out if we required an invariant but didn't get one.  */
     425      3558789 :   if (!t || (invariant_only && !is_gimple_min_invariant (t)))
     426              :     {
     427      3325526 :       fold_undefer_overflow_warnings (false, NULL, 0);
     428      3325526 :       return NULL_TREE;
     429              :     }
     430              : 
     431       233263 :   bool nowarn = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
     432       233263 :   fold_undefer_overflow_warnings (!nowarn, stmt, 0);
     433              : 
     434       233263 :   return t;
     435              : }
     436              : 
     437              : /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
     438              :    of its operand.  Return a new comparison tree or NULL_TREE if there
     439              :    were no simplifying combines.  */
     440              : 
     441              : static tree
     442     21773655 : forward_propagate_into_comparison_1 (gimple *stmt,
     443              :                                      enum tree_code code, tree type,
     444              :                                      tree op0, tree op1)
     445              : {
     446     21773655 :   tree tmp = NULL_TREE;
     447     21773655 :   tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
     448     21773655 :   bool single_use0_p = false, single_use1_p = false;
     449              : 
     450              :   /* For comparisons use the first operand, that is likely to
     451              :      simplify comparisons against constants.  */
     452     21773655 :   if (TREE_CODE (op0) == SSA_NAME)
     453              :     {
     454     21734408 :       gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
     455     21734408 :       if (def_stmt && can_propagate_from (def_stmt))
     456              :         {
     457      5553976 :           enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
     458      5553976 :           bool invariant_only_p = !single_use0_p;
     459              : 
     460      5553976 :           rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
     461              : 
     462              :           /* Always combine comparisons or conversions from booleans.  */
     463      5553976 :           if (TREE_CODE (op1) == INTEGER_CST
     464      5553976 :               && ((CONVERT_EXPR_CODE_P (def_code)
     465       881500 :                    && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
     466              :                       == BOOLEAN_TYPE)
     467      3576283 :                   || TREE_CODE_CLASS (def_code) == tcc_comparison))
     468              :             invariant_only_p = false;
     469              : 
     470      5553976 :           tmp = combine_cond_expr_cond (stmt, code, type,
     471              :                                         rhs0, op1, invariant_only_p);
     472      5553976 :           if (tmp)
     473              :             return tmp;
     474              :         }
     475              :     }
     476              : 
     477              :   /* If that wasn't successful, try the second operand.  */
     478     21548566 :   if (TREE_CODE (op1) == SSA_NAME)
     479              :     {
     480      5434936 :       gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
     481      5434936 :       if (def_stmt && can_propagate_from (def_stmt))
     482              :         {
     483      1765353 :           rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
     484      3530706 :           tmp = combine_cond_expr_cond (stmt, code, type,
     485      1765353 :                                         op0, rhs1, !single_use1_p);
     486      1765353 :           if (tmp)
     487              :             return tmp;
     488              :         }
     489              :     }
     490              : 
     491              :   /* If that wasn't successful either, try both operands.  */
     492     21542332 :   if (rhs0 != NULL_TREE
     493     21542332 :       && rhs1 != NULL_TREE)
     494       919822 :     tmp = combine_cond_expr_cond (stmt, code, type,
     495              :                                   rhs0, rhs1,
     496       919822 :                                   !(single_use0_p && single_use1_p));
     497              : 
     498              :   return tmp;
     499              : }
     500              : 
     501              : /* Propagate from the ssa name definition statements of the assignment
     502              :    from a comparison at *GSI into the conditional if that simplifies it.
     503              :    Returns true if the stmt was modified.  */
     504              : 
     505              : static bool
     506      2515964 : forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
     507              : {
     508      2515964 :   gimple *stmt = gsi_stmt (*gsi);
     509      2515964 :   tree tmp;
     510      2515964 :   tree type = TREE_TYPE (gimple_assign_lhs (stmt));
     511      2515964 :   tree rhs1 = gimple_assign_rhs1 (stmt);
     512      2515964 :   tree rhs2 = gimple_assign_rhs2 (stmt);
     513              : 
     514              :   /* Combine the comparison with defining statements.  */
     515      2515964 :   tmp = forward_propagate_into_comparison_1 (stmt,
     516              :                                              gimple_assign_rhs_code (stmt),
     517              :                                              type, rhs1, rhs2);
     518      2515964 :   if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
     519              :     {
     520         7043 :       if (dump_file)
     521              :         {
     522            0 :           fprintf (dump_file, "  Replaced '");
     523            0 :           print_gimple_expr (dump_file, stmt, 0);
     524            0 :           fprintf (dump_file, "' with '");
     525            0 :           print_generic_expr (dump_file, tmp);
     526            0 :           fprintf (dump_file, "'\n");
     527              :         }
     528         7043 :       gimple_assign_set_rhs_from_tree (gsi, tmp);
     529         7043 :       fold_stmt (gsi);
     530         7043 :       update_stmt (gsi_stmt (*gsi));
     531              : 
     532         7043 :       if (TREE_CODE (rhs1) == SSA_NAME)
     533         7043 :         remove_prop_source_from_use (rhs1);
     534         7043 :       if (TREE_CODE (rhs2) == SSA_NAME)
     535         2882 :         remove_prop_source_from_use (rhs2);
     536         7043 :       return true;
     537              :     }
     538              : 
     539              :   return false;
     540              : }
     541              : 
     542              : /* Propagate from the ssa name definition statements of COND_EXPR
     543              :    in GIMPLE_COND statement STMT into the conditional if that simplifies it.
     544              :    Returns zero if no statement was changed, one if there were
     545              :    changes and two if cfg_cleanup needs to run.  */
     546              : 
     547              : static int
     548     19257691 : forward_propagate_into_gimple_cond (gcond *stmt)
     549              : {
     550     19257691 :   tree tmp;
     551     19257691 :   enum tree_code code = gimple_cond_code (stmt);
     552     19257691 :   tree rhs1 = gimple_cond_lhs (stmt);
     553     19257691 :   tree rhs2 = gimple_cond_rhs (stmt);
     554              : 
     555              :   /* GIMPLE_COND will always be a comparison.  */
     556     19257691 :   gcc_assert (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison);
     557              : 
     558     19257691 :   tmp = forward_propagate_into_comparison_1 (stmt, code,
     559              :                                              boolean_type_node,
     560              :                                              rhs1, rhs2);
     561     19257691 :   if (tmp
     562     19257691 :       && is_gimple_condexpr_for_cond (tmp))
     563              :     {
     564       219885 :       if (dump_file)
     565              :         {
     566            9 :           fprintf (dump_file, "  Replaced '");
     567            9 :           print_gimple_expr (dump_file, stmt, 0);
     568            9 :           fprintf (dump_file, "' with '");
     569            9 :           print_generic_expr (dump_file, tmp);
     570            9 :           fprintf (dump_file, "'\n");
     571              :         }
     572              : 
     573       219885 :       gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
     574       219885 :       update_stmt (stmt);
     575              : 
     576       219885 :       if (TREE_CODE (rhs1) == SSA_NAME)
     577       219885 :         remove_prop_source_from_use (rhs1);
     578       219885 :       if (TREE_CODE (rhs2) == SSA_NAME)
     579         5552 :         remove_prop_source_from_use (rhs2);
     580       219885 :       return is_gimple_min_invariant (tmp) ? 2 : 1;
     581              :     }
     582              : 
     583     19037806 :   if (canonicalize_bool_cond (stmt, gimple_bb (stmt)))
     584              :     return 1;
     585              : 
     586              :   return 0;
     587              : }
     588              : 
     589              : /* We've just substituted an ADDR_EXPR into stmt.  Update all the
     590              :    relevant data structures to match.  */
     591              : 
     592              : static void
     593      1925713 : tidy_after_forward_propagate_addr (gimple *stmt)
     594              : {
     595              :   /* We may have turned a trapping insn into a non-trapping insn.  */
     596      1925713 :   if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
     597          131 :     bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
     598              : 
     599      1925713 :   if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
     600       247702 :      recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
     601      1925713 : }
     602              : 
     603              : /* NAME is a SSA_NAME representing DEF_RHS which is of the form
     604              :    ADDR_EXPR <whatever>.
     605              : 
     606              :    Try to forward propagate the ADDR_EXPR into the use USE_STMT.
     607              :    Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
     608              :    node or for recovery of array indexing from pointer arithmetic.
     609              : 
     610              :    Return true if the propagation was successful (the propagation can
     611              :    be not totally successful, yet things may have been changed).  */
     612              : 
     613              : static bool
     614      2746468 : forward_propagate_addr_expr_1 (tree name, tree def_rhs,
     615              :                                gimple_stmt_iterator *use_stmt_gsi,
     616              :                                bool single_use_p)
     617              : {
     618      2746468 :   tree lhs, rhs, rhs2, array_ref;
     619      2746468 :   gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
     620      2746468 :   enum tree_code rhs_code;
     621      2746468 :   bool res = true;
     622              : 
     623      2746468 :   gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
     624              : 
     625      2746468 :   lhs = gimple_assign_lhs (use_stmt);
     626      2746468 :   rhs_code = gimple_assign_rhs_code (use_stmt);
     627      2746468 :   rhs = gimple_assign_rhs1 (use_stmt);
     628              : 
     629              :   /* Do not perform copy-propagation but recurse through copy chains.  */
     630      2746468 :   if (TREE_CODE (lhs) == SSA_NAME
     631      1358613 :       && rhs_code == SSA_NAME)
     632         6888 :     return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
     633              : 
     634              :   /* The use statement could be a conversion.  Recurse to the uses of the
     635              :      lhs as copyprop does not copy through pointer to integer to pointer
     636              :      conversions and FRE does not catch all cases either.
     637              :      Treat the case of a single-use name and
     638              :      a conversion to def_rhs type separate, though.  */
     639      2739580 :   if (TREE_CODE (lhs) == SSA_NAME
     640      1351725 :       && CONVERT_EXPR_CODE_P (rhs_code))
     641              :     {
     642              :       /* If there is a point in a conversion chain where the types match
     643              :          so we can remove a conversion re-materialize the address here
     644              :          and stop.  */
     645        23978 :       if (single_use_p
     646        23978 :           && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
     647              :         {
     648            1 :           gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
     649            1 :           gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
     650            1 :           return true;
     651              :         }
     652              : 
     653              :       /* Else recurse if the conversion preserves the address value.  */
     654        47954 :       if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
     655            2 :            || POINTER_TYPE_P (TREE_TYPE (lhs)))
     656        47954 :           && (TYPE_PRECISION (TREE_TYPE (lhs))
     657        23977 :               >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
     658        23910 :         return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
     659              : 
     660              :       return false;
     661              :     }
     662              : 
     663              :   /* If this isn't a conversion chain from this on we only can propagate
     664              :      into compatible pointer contexts.  */
     665      2715602 :   if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
     666              :     return false;
     667              : 
     668              :   /* Propagate through constant pointer adjustments.  */
     669      2694944 :   if (TREE_CODE (lhs) == SSA_NAME
     670      1308262 :       && rhs_code == POINTER_PLUS_EXPR
     671      1308262 :       && rhs == name
     672      2858436 :       && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
     673              :     {
     674       118042 :       tree new_def_rhs;
     675              :       /* As we come here with non-invariant addresses in def_rhs we need
     676              :          to make sure we can build a valid constant offsetted address
     677              :          for further propagation.  Simply rely on fold building that
     678              :          and check after the fact.  */
     679       118042 :       new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
     680              :                                  def_rhs,
     681              :                                  fold_convert (ptr_type_node,
     682              :                                                gimple_assign_rhs2 (use_stmt)));
     683       118042 :       if (TREE_CODE (new_def_rhs) == MEM_REF
     684       118042 :           && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
     685              :         return false;
     686       114094 :       new_def_rhs = build1 (ADDR_EXPR, TREE_TYPE (rhs), new_def_rhs);
     687              : 
     688              :       /* Recurse.  If we could propagate into all uses of lhs do not
     689              :          bother to replace into the current use but just pretend we did.  */
     690       114094 :       if (forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
     691              :         return true;
     692              : 
     693        38084 :       if (useless_type_conversion_p (TREE_TYPE (lhs),
     694        38084 :                                      TREE_TYPE (new_def_rhs)))
     695        38084 :         gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
     696              :                                         new_def_rhs);
     697            0 :       else if (is_gimple_min_invariant (new_def_rhs))
     698            0 :         gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
     699              :       else
     700              :         return false;
     701        38084 :       gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
     702        38084 :       update_stmt (use_stmt);
     703        38084 :       return true;
     704              :     }
     705              : 
     706              :   /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
     707              :      ADDR_EXPR will not appear on the LHS.  */
     708      2576902 :   tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
     709      3893500 :   while (handled_component_p (*lhsp))
     710      1316598 :     lhsp = &TREE_OPERAND (*lhsp, 0);
     711      2576902 :   lhs = *lhsp;
     712              : 
     713              :   /* Now see if the LHS node is a MEM_REF using NAME.  If so,
     714              :      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
     715      2576902 :   if (TREE_CODE (lhs) == MEM_REF
     716      2576902 :       && TREE_OPERAND (lhs, 0) == name)
     717              :     {
     718       870808 :       tree def_rhs_base;
     719       870808 :       poly_int64 def_rhs_offset;
     720              :       /* If the address is invariant we can always fold it.  */
     721       870808 :       if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
     722              :                                                          &def_rhs_offset)))
     723              :         {
     724       827868 :           poly_offset_int off = mem_ref_offset (lhs);
     725       827868 :           tree new_ptr;
     726       827868 :           off += def_rhs_offset;
     727       827868 :           if (TREE_CODE (def_rhs_base) == MEM_REF)
     728              :             {
     729       808493 :               off += mem_ref_offset (def_rhs_base);
     730       808493 :               new_ptr = TREE_OPERAND (def_rhs_base, 0);
     731              :             }
     732              :           else
     733        19375 :             new_ptr = build_fold_addr_expr (def_rhs_base);
     734       827868 :           TREE_OPERAND (lhs, 0) = new_ptr;
     735       827868 :           TREE_OPERAND (lhs, 1)
     736       827868 :             = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
     737       827868 :           tidy_after_forward_propagate_addr (use_stmt);
     738              :           /* Continue propagating into the RHS if this was not the only use.  */
     739       827868 :           if (single_use_p)
     740       223145 :             return true;
     741              :         }
     742              :       /* If the LHS is a plain dereference and the value type is the same as
     743              :          that of the pointed-to type of the address we can put the
     744              :          dereferenced address on the LHS preserving the original alias-type.  */
     745        42940 :       else if (integer_zerop (TREE_OPERAND (lhs, 1))
     746        17321 :                && ((gimple_assign_lhs (use_stmt) == lhs
     747        13967 :                     && useless_type_conversion_p
     748        13967 :                          (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
     749        13967 :                           TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
     750        13097 :                    || types_compatible_p (TREE_TYPE (lhs),
     751        13097 :                                           TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
     752              :                /* Don't forward anything into clobber stmts if it would result
     753              :                   in the lhs no longer being a MEM_REF.  */
     754        50171 :                && (!gimple_clobber_p (use_stmt)
     755          161 :                    || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
     756              :         {
     757         7070 :           tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
     758         7070 :           tree new_offset, new_base, saved, new_lhs;
     759        25480 :           while (handled_component_p (*def_rhs_basep))
     760        11340 :             def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
     761         7070 :           saved = *def_rhs_basep;
     762         7070 :           if (TREE_CODE (*def_rhs_basep) == MEM_REF)
     763              :             {
     764         3712 :               new_base = TREE_OPERAND (*def_rhs_basep, 0);
     765         3712 :               new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
     766              :                                          TREE_OPERAND (*def_rhs_basep, 1));
     767              :             }
     768              :           else
     769              :             {
     770         3358 :               new_base = build_fold_addr_expr (*def_rhs_basep);
     771         3358 :               new_offset = TREE_OPERAND (lhs, 1);
     772              :             }
     773         7070 :           *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
     774              :                                    new_base, new_offset);
     775         7070 :           TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
     776         7070 :           TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
     777         7070 :           TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
     778         7070 :           new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
     779         7070 :           *lhsp = new_lhs;
     780         7070 :           TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
     781         7070 :           TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
     782         7070 :           *def_rhs_basep = saved;
     783         7070 :           tidy_after_forward_propagate_addr (use_stmt);
     784              :           /* Continue propagating into the RHS if this was not the
     785              :              only use.  */
     786         7070 :           if (single_use_p)
     787              :             return true;
     788              :         }
     789              :       else
     790              :         /* We can have a struct assignment dereferencing our name twice.
     791              :            Note that we didn't propagate into the lhs to not falsely
     792              :            claim we did when propagating into the rhs.  */
     793              :         res = false;
     794              :     }
     795              : 
     796              :   /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
     797              :      nodes from the RHS.  */
     798      2350446 :   tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
     799      2350446 :   if (TREE_CODE (*rhsp) == ADDR_EXPR)
     800       236164 :     rhsp = &TREE_OPERAND (*rhsp, 0);
     801      3325284 :   while (handled_component_p (*rhsp))
     802       974838 :     rhsp = &TREE_OPERAND (*rhsp, 0);
     803      2350446 :   rhs = *rhsp;
     804              : 
     805              :   /* Now see if the RHS node is a MEM_REF using NAME.  If so,
     806              :      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
     807      2350446 :   if (TREE_CODE (rhs) == MEM_REF
     808      2350446 :       && TREE_OPERAND (rhs, 0) == name)
     809              :     {
     810      1109609 :       tree def_rhs_base;
     811      1109609 :       poly_int64 def_rhs_offset;
     812      1109609 :       if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
     813              :                                                          &def_rhs_offset)))
     814              :         {
     815      1076314 :           poly_offset_int off = mem_ref_offset (rhs);
     816      1076314 :           tree new_ptr;
     817      1076314 :           off += def_rhs_offset;
     818      1076314 :           if (TREE_CODE (def_rhs_base) == MEM_REF)
     819              :             {
     820      1050208 :               off += mem_ref_offset (def_rhs_base);
     821      1050208 :               new_ptr = TREE_OPERAND (def_rhs_base, 0);
     822              :             }
     823              :           else
     824        26106 :             new_ptr = build_fold_addr_expr (def_rhs_base);
     825      1076314 :           TREE_OPERAND (rhs, 0) = new_ptr;
     826      1076314 :           TREE_OPERAND (rhs, 1)
     827      1076314 :             = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
     828      1076314 :           fold_stmt_inplace (use_stmt_gsi);
     829      1076314 :           tidy_after_forward_propagate_addr (use_stmt);
     830      1076314 :           return res;
     831              :         }
     832              :       /* If the RHS is a plain dereference and the value type is the same as
     833              :          that of the pointed-to type of the address we can put the
     834              :          dereferenced address on the RHS preserving the original alias-type.  */
     835        33295 :       else if (integer_zerop (TREE_OPERAND (rhs, 1))
     836        33295 :                && ((gimple_assign_rhs1 (use_stmt) == rhs
     837        19321 :                     && useless_type_conversion_p
     838        19321 :                          (TREE_TYPE (gimple_assign_lhs (use_stmt)),
     839        19321 :                           TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
     840        22065 :                    || types_compatible_p (TREE_TYPE (rhs),
     841        22065 :                                           TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
     842              :         {
     843        14461 :           tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
     844        14461 :           tree new_offset, new_base, saved, new_rhs;
     845        51172 :           while (handled_component_p (*def_rhs_basep))
     846        22250 :             def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
     847        14461 :           saved = *def_rhs_basep;
     848        14461 :           if (TREE_CODE (*def_rhs_basep) == MEM_REF)
     849              :             {
     850         7025 :               new_base = TREE_OPERAND (*def_rhs_basep, 0);
     851         7025 :               new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
     852              :                                          TREE_OPERAND (*def_rhs_basep, 1));
     853              :             }
     854              :           else
     855              :             {
     856         7436 :               new_base = build_fold_addr_expr (*def_rhs_basep);
     857         7436 :               new_offset = TREE_OPERAND (rhs, 1);
     858              :             }
     859        14461 :           *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
     860              :                                    new_base, new_offset);
     861        14461 :           TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
     862        14461 :           TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
     863        14461 :           TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
     864        14461 :           new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
     865        14461 :           *rhsp = new_rhs;
     866        14461 :           TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
     867        14461 :           TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
     868        14461 :           *def_rhs_basep = saved;
     869        14461 :           fold_stmt_inplace (use_stmt_gsi);
     870        14461 :           tidy_after_forward_propagate_addr (use_stmt);
     871        14461 :           return res;
     872              :         }
     873              :     }
     874              : 
     875              :   /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
     876              :      is nothing to do. */
     877      1259671 :   if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
     878      1259671 :       || gimple_assign_rhs1 (use_stmt) != name)
     879              :     return false;
     880              : 
     881              :   /* The remaining cases are all for turning pointer arithmetic into
     882              :      array indexing.  They only apply when we have the address of
     883              :      element zero in an array.  If that is not the case then there
     884              :      is nothing to do.  */
     885        45450 :   array_ref = TREE_OPERAND (def_rhs, 0);
     886        45450 :   if ((TREE_CODE (array_ref) != ARRAY_REF
     887         4543 :        || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
     888         4543 :        || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
     889        46928 :       && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
     890              :     return false;
     891              : 
     892        23006 :   rhs2 = gimple_assign_rhs2 (use_stmt);
     893              :   /* Optimize &x[C1] p+ C2 to  &x p+ C3 with C3 = C1 * element_size + C2.  */
     894        23006 :   if (TREE_CODE (rhs2) == INTEGER_CST)
     895              :     {
     896            0 :       tree new_rhs = build1_loc (gimple_location (use_stmt),
     897            0 :                                  ADDR_EXPR, TREE_TYPE (def_rhs),
     898            0 :                                  fold_build2 (MEM_REF,
     899              :                                               TREE_TYPE (TREE_TYPE (def_rhs)),
     900              :                                               unshare_expr (def_rhs),
     901              :                                               fold_convert (ptr_type_node,
     902              :                                                             rhs2)));
     903            0 :       gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
     904            0 :       use_stmt = gsi_stmt (*use_stmt_gsi);
     905            0 :       update_stmt (use_stmt);
     906            0 :       tidy_after_forward_propagate_addr (use_stmt);
     907            0 :       return true;
     908              :     }
     909              : 
     910              :   return false;
     911              : }
     912              : 
     913              : /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
     914              : 
     915              :    Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
     916              :    Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
     917              :    node or for recovery of array indexing from pointer arithmetic.
     918              : 
     919              :    PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
     920              :    the single use in the previous invocation.  Pass true when calling
     921              :    this as toplevel.
     922              : 
     923              :    Returns true, if all uses have been propagated into.  */
     924              : 
     925              : static bool
     926      3217223 : forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
     927              : {
     928      3217223 :   bool all = true;
     929      3217223 :   bool single_use_p = parent_single_use_p && has_single_use (name);
     930              : 
     931     16919958 :   for (gimple *use_stmt : gather_imm_use_stmts (name))
     932              :     {
     933      7268289 :       bool result;
     934      7268289 :       tree use_rhs;
     935              : 
     936              :       /* If the use is not in a simple assignment statement, then
     937              :          there is nothing we can do.  */
     938      7268289 :       if (!is_gimple_assign (use_stmt))
     939              :         {
     940      4521821 :           if (!is_gimple_debug (use_stmt))
     941      1889326 :             all = false;
     942      4521821 :           continue;
     943              :         }
     944              : 
     945      2746468 :       gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
     946      2746468 :       result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
     947              :                                               single_use_p);
     948              :       /* If the use has moved to a different statement adjust
     949              :          the update machinery for the old statement too.  */
     950      2746468 :       if (use_stmt != gsi_stmt (gsi))
     951              :         {
     952            0 :           update_stmt (use_stmt);
     953            0 :           use_stmt = gsi_stmt (gsi);
     954              :         }
     955      2746468 :       update_stmt (use_stmt);
     956      2746468 :       all &= result;
     957              : 
     958              :       /* Remove intermediate now unused copy and conversion chains.  */
     959      2746468 :       use_rhs = gimple_assign_rhs1 (use_stmt);
     960      2746468 :       if (result
     961      1434941 :           && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
     962      1196476 :           && TREE_CODE (use_rhs) == SSA_NAME
     963      2826093 :           && has_zero_uses (gimple_assign_lhs (use_stmt)))
     964              :         {
     965        79625 :           gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
     966        79625 :           fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
     967        79625 :           release_defs (use_stmt);
     968        79625 :           gsi_remove (&gsi, true);
     969              :         }
     970      3217223 :     }
     971              : 
     972      3217223 :   return all && has_zero_uses (name);
     973              : }
     974              : 
     975              : 
     976              : /* Helper function for simplify_gimple_switch.  Remove case labels that
     977              :    have values outside the range of the new type.  */
     978              : 
     979              : static void
     980        11784 : simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type,
     981              :                                   vec<std::pair<int, int> > &edges_to_remove)
     982              : {
     983        11784 :   unsigned int branch_num = gimple_switch_num_labels (stmt);
     984        11784 :   auto_vec<tree> labels (branch_num);
     985        11784 :   unsigned int i, len;
     986              : 
     987              :   /* Collect the existing case labels in a VEC, and preprocess it as if
     988              :      we are gimplifying a GENERIC SWITCH_EXPR.  */
     989        74031 :   for (i = 1; i < branch_num; i++)
     990        50463 :     labels.quick_push (gimple_switch_label (stmt, i));
     991        11784 :   preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
     992              : 
     993              :   /* If any labels were removed, replace the existing case labels
     994              :      in the GIMPLE_SWITCH statement with the correct ones.
     995              :      Note that the type updates were done in-place on the case labels,
     996              :      so we only have to replace the case labels in the GIMPLE_SWITCH
     997              :      if the number of labels changed.  */
     998        11784 :   len = labels.length ();
     999        11784 :   if (len < branch_num - 1)
    1000              :     {
    1001            0 :       bitmap target_blocks;
    1002            0 :       edge_iterator ei;
    1003            0 :       edge e;
    1004              : 
    1005              :       /* Corner case: *all* case labels have been removed as being
    1006              :          out-of-range for INDEX_TYPE.  Push one label and let the
    1007              :          CFG cleanups deal with this further.  */
    1008            0 :       if (len == 0)
    1009              :         {
    1010            0 :           tree label, elt;
    1011              : 
    1012            0 :           label = CASE_LABEL (gimple_switch_default_label (stmt));
    1013            0 :           elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
    1014            0 :           labels.quick_push (elt);
    1015            0 :           len = 1;
    1016              :         }
    1017              : 
    1018            0 :       for (i = 0; i < labels.length (); i++)
    1019            0 :         gimple_switch_set_label (stmt, i + 1, labels[i]);
    1020            0 :       for (i++ ; i < branch_num; i++)
    1021            0 :         gimple_switch_set_label (stmt, i, NULL_TREE);
    1022            0 :       gimple_switch_set_num_labels (stmt, len + 1);
    1023              : 
    1024              :       /* Cleanup any edges that are now dead.  */
    1025            0 :       target_blocks = BITMAP_ALLOC (NULL);
    1026            0 :       for (i = 0; i < gimple_switch_num_labels (stmt); i++)
    1027              :         {
    1028            0 :           tree elt = gimple_switch_label (stmt, i);
    1029            0 :           basic_block target = label_to_block (cfun, CASE_LABEL (elt));
    1030            0 :           bitmap_set_bit (target_blocks, target->index);
    1031              :         }
    1032            0 :       for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
    1033              :         {
    1034            0 :           if (! bitmap_bit_p (target_blocks, e->dest->index))
    1035            0 :             edges_to_remove.safe_push (std::make_pair (e->src->index,
    1036            0 :                                                        e->dest->index));
    1037              :           else
    1038            0 :             ei_next (&ei);
    1039              :         }
    1040            0 :       BITMAP_FREE (target_blocks);
    1041              :     }
    1042        11784 : }
    1043              : 
    1044              : /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
    1045              :    the condition which we may be able to optimize better.  */
    1046              : 
    1047              : static bool
    1048       110086 : simplify_gimple_switch (gswitch *stmt,
    1049              :                         vec<std::pair<int, int> > &edges_to_remove,
    1050              :                         bitmap simple_dce_worklist)
    1051              : {
    1052              :   /* The optimization that we really care about is removing unnecessary
    1053              :      casts.  That will let us do much better in propagating the inferred
    1054              :      constant at the switch target.  */
    1055       110086 :   tree cond = gimple_switch_index (stmt);
    1056       110086 :   if (TREE_CODE (cond) == SSA_NAME)
    1057              :     {
    1058       110085 :       gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
    1059       110085 :       if (gimple_assign_cast_p (def_stmt))
    1060              :         {
    1061        12266 :           tree def = gimple_assign_rhs1 (def_stmt);
    1062        12266 :           if (TREE_CODE (def) != SSA_NAME)
    1063              :             return false;
    1064              : 
    1065              :           /* If we have an extension or sign-change that preserves the
    1066              :              values we check against then we can copy the source value into
    1067              :              the switch.  */
    1068        12266 :           tree ti = TREE_TYPE (def);
    1069        12266 :           if (INTEGRAL_TYPE_P (ti)
    1070        12266 :               && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
    1071              :             {
    1072        12021 :               size_t n = gimple_switch_num_labels (stmt);
    1073        12021 :               tree min = NULL_TREE, max = NULL_TREE;
    1074        12021 :               if (n > 1)
    1075              :                 {
    1076        12021 :                   min = CASE_LOW (gimple_switch_label (stmt, 1));
    1077        12021 :                   if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
    1078          154 :                     max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
    1079              :                   else
    1080        11867 :                     max = CASE_LOW (gimple_switch_label (stmt, n - 1));
    1081              :                 }
    1082        12021 :               if ((!min || int_fits_type_p (min, ti))
    1083        12017 :                   && (!max || int_fits_type_p (max, ti)))
    1084              :                 {
    1085        11784 :                   bitmap_set_bit (simple_dce_worklist,
    1086        11784 :                                   SSA_NAME_VERSION (cond));
    1087        11784 :                   gimple_switch_set_index (stmt, def);
    1088        11784 :                   simplify_gimple_switch_label_vec (stmt, ti,
    1089              :                                                     edges_to_remove);
    1090        11784 :                   update_stmt (stmt);
    1091        11784 :                   return true;
    1092              :                 }
    1093              :             }
    1094              :         }
    1095              :     }
    1096              : 
    1097              :   return false;
    1098              : }
    1099              : 
    1100              : /* For pointers p2 and p1 return p2 - p1 if the
    1101              :    difference is known and constant, otherwise return NULL.  */
    1102              : 
    1103              : static tree
    1104         5234 : constant_pointer_difference (tree p1, tree p2)
    1105              : {
    1106         5234 :   int i, j;
    1107              : #define CPD_ITERATIONS 5
    1108         5234 :   tree exps[2][CPD_ITERATIONS];
    1109         5234 :   tree offs[2][CPD_ITERATIONS];
    1110         5234 :   int cnt[2];
    1111              : 
    1112        15702 :   for (i = 0; i < 2; i++)
    1113              :     {
    1114        10468 :       tree p = i ? p1 : p2;
    1115        10468 :       tree off = size_zero_node;
    1116        10468 :       gimple *stmt;
    1117        10468 :       enum tree_code code;
    1118              : 
    1119              :       /* For each of p1 and p2 we need to iterate at least
    1120              :          twice, to handle ADDR_EXPR directly in p1/p2,
    1121              :          SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
    1122              :          on definition's stmt RHS.  Iterate a few extra times.  */
    1123        10468 :       j = 0;
    1124        12412 :       do
    1125              :         {
    1126        12412 :           if (!POINTER_TYPE_P (TREE_TYPE (p)))
    1127              :             break;
    1128        12406 :           if (TREE_CODE (p) == ADDR_EXPR)
    1129              :             {
    1130         8988 :               tree q = TREE_OPERAND (p, 0);
    1131         8988 :               poly_int64 offset;
    1132         8988 :               tree base = get_addr_base_and_unit_offset (q, &offset);
    1133         8988 :               if (base)
    1134              :                 {
    1135         8168 :                   q = base;
    1136         8168 :                   if (maybe_ne (offset, 0))
    1137         3407 :                     off = size_binop (PLUS_EXPR, off, size_int (offset));
    1138              :                 }
    1139         8988 :               if (TREE_CODE (q) == MEM_REF
    1140         8988 :                   && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
    1141              :                 {
    1142          213 :                   p = TREE_OPERAND (q, 0);
    1143          213 :                   off = size_binop (PLUS_EXPR, off,
    1144              :                                     wide_int_to_tree (sizetype,
    1145              :                                                       mem_ref_offset (q)));
    1146              :                 }
    1147              :               else
    1148              :                 {
    1149         8775 :                   exps[i][j] = q;
    1150         8775 :                   offs[i][j++] = off;
    1151         8775 :                   break;
    1152              :                 }
    1153              :             }
    1154         3631 :           if (TREE_CODE (p) != SSA_NAME)
    1155              :             break;
    1156         3631 :           exps[i][j] = p;
    1157         3631 :           offs[i][j++] = off;
    1158         3631 :           if (j == CPD_ITERATIONS)
    1159              :             break;
    1160         3631 :           stmt = SSA_NAME_DEF_STMT (p);
    1161         3631 :           if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
    1162              :             break;
    1163         2852 :           code = gimple_assign_rhs_code (stmt);
    1164         2852 :           if (code == POINTER_PLUS_EXPR)
    1165              :             {
    1166         1486 :               if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
    1167              :                 break;
    1168          955 :               off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
    1169          955 :               p = gimple_assign_rhs1 (stmt);
    1170              :             }
    1171         1366 :           else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
    1172          989 :             p = gimple_assign_rhs1 (stmt);
    1173              :           else
    1174              :             break;
    1175              :         }
    1176              :       while (1);
    1177        10468 :       cnt[i] = j;
    1178              :     }
    1179              : 
    1180         7316 :   for (i = 0; i < cnt[0]; i++)
    1181         9755 :     for (j = 0; j < cnt[1]; j++)
    1182         7673 :       if (exps[0][i] == exps[1][j])
    1183         4344 :         return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
    1184              : 
    1185              :   return NULL_TREE;
    1186              : }
    1187              : 
    1188              : /* Helper function for optimize_aggr_zeroprop.
    1189              :    Props the zeroing (memset, VAL) that was done in DEST+OFFSET:LEN
    1190              :    (DEFSTMT) into the STMT.  Returns true if the STMT was updated.  */
    1191              : static void
    1192     20440779 : optimize_aggr_zeroprop_1 (gimple *defstmt, gimple *stmt,
    1193              :                           tree dest, poly_int64 offset, tree val,
    1194              :                           poly_offset_int len)
    1195              : {
    1196     20440779 :   tree src2;
    1197     20440779 :   tree len2 = NULL_TREE;
    1198     20440779 :   poly_int64 offset2;
    1199              : 
    1200     20440779 :   if (gimple_call_builtin_p (stmt, BUILT_IN_MEMCPY)
    1201        18085 :       && TREE_CODE (gimple_call_arg (stmt, 1)) == ADDR_EXPR
    1202     20454122 :       && poly_int_tree_p (gimple_call_arg (stmt, 2)))
    1203              :     {
    1204        12323 :       src2 = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
    1205        12323 :       len2 = gimple_call_arg (stmt, 2);
    1206              :     }
    1207     20428456 :    else if (gimple_assign_load_p (stmt) && gimple_store_p (stmt))
    1208              :      {
    1209      1846567 :         src2 = gimple_assign_rhs1 (stmt);
    1210      1846567 :         len2 = (TREE_CODE (src2) == COMPONENT_REF
    1211      1846567 :                 ? DECL_SIZE_UNIT (TREE_OPERAND (src2, 1))
    1212      1679326 :                 : TYPE_SIZE_UNIT (TREE_TYPE (src2)));
    1213              :         /* Can only handle zero memsets. */
    1214      1846567 :         if (!integer_zerop (val))
    1215     20419414 :           return;
    1216              :      }
    1217              :    else
    1218     18581889 :      return;
    1219              : 
    1220      1857928 :   if (len2 == NULL_TREE
    1221      1857928 :       || !poly_int_tree_p (len2))
    1222              :     return;
    1223              : 
    1224      1857928 :   src2 = get_addr_base_and_unit_offset (src2, &offset2);
    1225      1857928 :   if (src2 == NULL_TREE
    1226      1857928 :       || maybe_lt (offset2, offset))
    1227              :     return;
    1228              : 
    1229       857655 :   if (!operand_equal_p (dest, src2, 0))
    1230              :     return;
    1231              : 
    1232              :   /* [ dest + offset, dest + offset + len - 1 ] is set to val.
    1233              :      Make sure that
    1234              :      [ dest + offset2, dest + offset2 + len2 - 1 ] is a subset of that.  */
    1235       130606 :   if (maybe_gt (wi::to_poly_offset (len2) + (offset2 - offset),
    1236              :                 len))
    1237              :     return;
    1238              : 
    1239        21365 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1240              :     {
    1241           32 :       fprintf (dump_file, "Simplified\n  ");
    1242           32 :       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
    1243           32 :       fprintf (dump_file, "after previous\n  ");
    1244           32 :       print_gimple_stmt (dump_file, defstmt, 0, dump_flags);
    1245              :     }
    1246        21365 :   gimple *orig_stmt = stmt;
    1247              :   /* For simplicity, don't change the kind of the stmt,
    1248              :      turn dest = src; into dest = {}; and memcpy (&dest, &src, len);
    1249              :      into memset (&dest, val, len);
    1250              :      In theory we could change dest = src into memset if dest
    1251              :      is addressable (maybe beneficial if val is not 0), or
    1252              :      memcpy (&dest, &src, len) into dest = {} if len is the size
    1253              :      of dest, dest isn't volatile.  */
    1254        21365 :   if (is_gimple_assign (stmt))
    1255              :     {
    1256        21360 :       tree ctor_type = TREE_TYPE (gimple_assign_lhs (stmt));
    1257        21360 :       tree ctor = build_constructor (ctor_type, NULL);
    1258        21360 :       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
    1259        21360 :       gimple_assign_set_rhs_from_tree (&gsi, ctor);
    1260        21360 :       update_stmt (stmt);
    1261        21360 :       statistics_counter_event (cfun, "copy zeroing propagation of aggregate", 1);
    1262              :     }
    1263              :   else /* If stmt is memcpy, transform it into memset.  */
    1264              :     {
    1265            5 :       gcall *call = as_a <gcall *> (stmt);
    1266            5 :       tree fndecl = builtin_decl_implicit (BUILT_IN_MEMSET);
    1267            5 :       gimple_call_set_fndecl (call, fndecl);
    1268            5 :       gimple_call_set_fntype (call, TREE_TYPE (fndecl));
    1269            5 :       gimple_call_set_arg (call, 1, val);
    1270            5 :       update_stmt (stmt);
    1271            5 :       statistics_counter_event (cfun, "memcpy to memset changed", 1);
    1272              :     }
    1273              : 
    1274        21365 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1275              :     {
    1276           32 :       fprintf (dump_file, "into\n  ");
    1277           32 :       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
    1278              :     }
    1279              : 
    1280              :   /* Mark the bb for eh cleanup if needed.  */
    1281        21365 :   if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
    1282            6 :     bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
    1283              : }
    1284              : 
    1285              : /* Optimize
    1286              :    a = {}; // DEST = value ;; LEN(nullptr)
    1287              :    b = a;
    1288              :    into
    1289              :    a = {};
    1290              :    b = {};
    1291              :    Similarly for memset (&a, ..., sizeof (a)); instead of a = {};
    1292              :    and/or memcpy (&b, &a, sizeof (a)); instead of b = a;  */
    1293              : 
    1294              : static void
    1295     30337969 : optimize_aggr_zeroprop (gimple *stmt, bool full_walk)
    1296              : {
    1297     30337969 :   ao_ref read;
    1298     60675938 :   if (gimple_has_volatile_ops (stmt))
    1299     26374970 :     return;
    1300              : 
    1301     29413053 :   tree dest = NULL_TREE;
    1302     29413053 :   tree val = integer_zero_node;
    1303     29413053 :   tree len = NULL_TREE;
    1304     29413053 :   bool can_use_tbba = true;
    1305              : 
    1306     29413053 :   if (gimple_call_builtin_p (stmt, BUILT_IN_MEMSET)
    1307       110687 :       && TREE_CODE (gimple_call_arg (stmt, 0)) == ADDR_EXPR
    1308        54846 :       && TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
    1309     29465620 :       && poly_int_tree_p (gimple_call_arg (stmt, 2)))
    1310              :     {
    1311        49791 :       dest = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
    1312        49791 :       len = gimple_call_arg (stmt, 2);
    1313        49791 :       val = gimple_call_arg (stmt, 1);
    1314        49791 :       ao_ref_init_from_ptr_and_size (&read, gimple_call_arg (stmt, 0), len);
    1315        49791 :       can_use_tbba = false;
    1316              :     }
    1317     29363262 :   else if (gimple_store_p (stmt)
    1318     29302206 :            && gimple_assign_single_p (stmt)
    1319     58665468 :            && TREE_CODE (gimple_assign_rhs1 (stmt)) == STRING_CST)
    1320              :     {
    1321        36400 :       tree str = gimple_assign_rhs1 (stmt);
    1322        36400 :       dest = gimple_assign_lhs (stmt);
    1323        36400 :       ao_ref_init (&read, dest);
    1324              :       /* The string must contain all null char's for now.  */
    1325        41834 :       for (int i = 0; i < TREE_STRING_LENGTH (str); i++)
    1326              :         {
    1327        39151 :           if (TREE_STRING_POINTER (str)[i] != 0)
    1328              :             {
    1329              :               dest = NULL_TREE;
    1330              :               break;
    1331              :             }
    1332              :         }
    1333              :     }
    1334              :   /* A store of integer (scalar, vector or complex) zeros is
    1335              :      a zero store. */
    1336     29326862 :   else if (gimple_store_p (stmt)
    1337     29265806 :            && gimple_assign_single_p (stmt)
    1338     58592668 :            && integer_zerop (gimple_assign_rhs1 (stmt)))
    1339              :     {
    1340      3508116 :       tree rhs = gimple_assign_rhs1 (stmt);
    1341      3508116 :       tree type = TREE_TYPE (rhs);
    1342      3508116 :       dest = gimple_assign_lhs (stmt);
    1343      3508116 :       ao_ref_init (&read, dest);
    1344              :       /* For integral types, the type precision needs to be a multiply of BITS_PER_UNIT. */
    1345      3508116 :       if (INTEGRAL_TYPE_P (type)
    1346      3508116 :           && (TYPE_PRECISION (type) % BITS_PER_UNIT) != 0)
    1347              :         dest = NULL_TREE;
    1348              :     }
    1349     25818746 :   else if (gimple_store_p (stmt)
    1350     25757690 :            && gimple_assign_single_p (stmt)
    1351     25757690 :            && TREE_CODE (gimple_assign_rhs1 (stmt)) == CONSTRUCTOR
    1352     26515445 :            && !gimple_clobber_p (stmt))
    1353              :     {
    1354       696699 :       dest = gimple_assign_lhs (stmt);
    1355       696699 :       ao_ref_init (&read, dest);
    1356              :     }
    1357              : 
    1358      4082107 :   if (dest == NULL_TREE)
    1359     25364663 :     return;
    1360              : 
    1361      4048390 :   if (len == NULL_TREE)
    1362      3998599 :     len = (TREE_CODE (dest) == COMPONENT_REF
    1363      3998599 :            ? DECL_SIZE_UNIT (TREE_OPERAND (dest, 1))
    1364      1743150 :            : TYPE_SIZE_UNIT (TREE_TYPE (dest)));
    1365      3998599 :   if (len == NULL_TREE
    1366      4048390 :       || !poly_int_tree_p (len))
    1367              :     return;
    1368              : 
    1369              :   /* This store needs to be on the byte boundary and pointing to an object.  */
    1370      4048390 :   poly_int64 offset;
    1371      4048390 :   tree dest_base = get_addr_base_and_unit_offset (dest, &offset);
    1372      4048390 :   if (dest_base == NULL_TREE)
    1373              :     return;
    1374              : 
    1375              :   /* Setup the worklist.  */
    1376      3962999 :   auto_vec<std::pair<tree, unsigned>> worklist;
    1377      3962999 :   unsigned limit = full_walk ? param_sccvn_max_alias_queries_per_access : 0;
    1378      7925998 :   worklist.safe_push (std::make_pair (gimple_vdef (stmt), limit));
    1379              : 
    1380     26008848 :   while (!worklist.is_empty ())
    1381              :     {
    1382     18082850 :       std::pair<tree, unsigned> top = worklist.pop ();
    1383     18082850 :       tree vdef = top.first;
    1384     18082850 :       limit = top.second;
    1385     18082850 :       gimple *use_stmt;
    1386     18082850 :       imm_use_iterator iter;
    1387     58532664 :       FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
    1388              :         {
    1389              :           /* Handling PHI nodes might not be worth it so don't.  */
    1390     22366964 :           if (is_a <gphi*> (use_stmt))
    1391      1926185 :             continue;
    1392              : 
    1393              :           /* If this statement does not clobber add the vdef stmt to the
    1394              :              worklist.
    1395              :              After hitting the limit, allow clobbers to able to pass through.  */
    1396      2001838 :           if ((limit != 0 || gimple_clobber_p (use_stmt))
    1397     18474618 :               && gimple_vdef (use_stmt)
    1398     35968674 :               && !stmt_may_clobber_ref_p_1 (use_stmt, &read,
    1399              :                                            /* tbaa_p = */ can_use_tbba))
    1400              :               {
    1401     14119851 :                 unsigned new_limit = limit == 0 ? 0 : limit - 1;
    1402     28239702 :                 worklist.safe_push (std::make_pair (gimple_vdef (use_stmt),
    1403              :                                                     new_limit));
    1404              :               }
    1405              : 
    1406     20440779 :            optimize_aggr_zeroprop_1 (stmt, use_stmt, dest_base, offset,
    1407     20440779 :                                      val, wi::to_poly_offset (len));
    1408     18082850 :         }
    1409              :     }
    1410              : 
    1411      3962999 : }
    1412              : 
    1413              : /* Returns the pointer to the base of the object of the
    1414              :    reference EXPR and extracts the information about
    1415              :    the offset of the access, storing it to PBYTESIZE,
    1416              :    PBYTEPOS and PREVERSEP.
    1417              :    If the access is not a byte sized or position is not
    1418              :    on the byte, return NULL.  */
    1419              : static tree
    1420      5178988 : split_core_and_offset_size (tree expr,
    1421              :                             poly_int64 *pbytesize, poly_int64 *pbytepos,
    1422              :                             tree *poffset, int *preversep)
    1423              : {
    1424      5178988 :   tree core;
    1425      5178988 :   machine_mode mode;
    1426      5178988 :   int unsignedp, volatilep;
    1427      5178988 :   poly_int64 bitsize;
    1428      5178988 :   poly_int64 bitpos;
    1429      5178988 :   location_t loc = EXPR_LOCATION (expr);
    1430              : 
    1431      5178988 :   core = get_inner_reference (expr, &bitsize, &bitpos,
    1432              :                               poffset, &mode, &unsignedp, preversep,
    1433              :                               &volatilep);
    1434     10357976 :   if (!multiple_p (bitsize, BITS_PER_UNIT, pbytesize))
    1435              :     return NULL_TREE;
    1436      5178988 :   if (!multiple_p (bitpos, BITS_PER_UNIT, pbytepos))
    1437              :     return NULL_TREE;
    1438              :   /* If we are left with MEM[a + CST] strip that and add it to the
    1439              :      pbytepos and return a. */
    1440      5178988 :   if (TREE_CODE (core) == MEM_REF)
    1441              :     {
    1442      1213911 :       poly_offset_int tem;
    1443      1213911 :       tem = wi::to_poly_offset (TREE_OPERAND (core, 1));
    1444      1213911 :       tem += *pbytepos;
    1445      1213911 :       if (tem.to_shwi (pbytepos))
    1446      1211988 :         return TREE_OPERAND (core, 0);
    1447              :     }
    1448      3967000 :   core = build_fold_addr_expr_loc (loc, core);
    1449      3967000 :   STRIP_NOPS (core);
    1450      3967000 :   return core;
    1451              : }
    1452              : 
    1453              : /* Returns a new src based on the
    1454              :    copy `DEST = SRC` and for the old SRC2.
    1455              :    Returns null if SRC2 is not related to DEST.  */
    1456              : 
    1457              : static tree
    1458      1198196 : new_src_based_on_copy (tree src2, tree dest, tree src)
    1459              : {
    1460              :   /* If the second src is not exactly the same as dest,
    1461              :      try to handle it seperately; see it is address/size equivalent.
    1462              :      Handles `a` and `a.b` and `MEM<char[N]>(&a)` which all have
    1463              :      the same size and offsets as address/size equivalent.
    1464              :      This allows copying over a memcpy and also one for copying
    1465              :      where one field is the same size as the whole struct.  */
    1466      1198196 :   if (operand_equal_p (dest, src2))
    1467              :     return src;
    1468              :   /* if both dest and src2 are decls, then we know these 2
    1469              :      accesses can't be the same.  */
    1470       698104 :   if (DECL_P (dest) && DECL_P (src2))
    1471              :     return NULL_TREE;
    1472              :   /* A VCE can't be used with imag/real or BFR so reject them early. */
    1473       365657 :   if (TREE_CODE (src) == IMAGPART_EXPR
    1474       365657 :       || TREE_CODE (src) == REALPART_EXPR
    1475       365657 :       || TREE_CODE (src) == BIT_FIELD_REF)
    1476              :     return NULL_TREE;
    1477       365657 :   tree core1, core2;
    1478       365657 :   poly_int64 bytepos1, bytepos2;
    1479       365657 :   poly_int64 bytesize1, bytesize2;
    1480       365657 :   tree toffset1, toffset2;
    1481       365657 :   int reversep1 = 0;
    1482       365657 :   int reversep2 = 0;
    1483       365657 :   poly_int64 diff = 0;
    1484       365657 :   core1 = split_core_and_offset_size (dest, &bytesize1, &bytepos1,
    1485              :                                           &toffset1, &reversep1);
    1486       365657 :   core2 = split_core_and_offset_size (src2, &bytesize2, &bytepos2,
    1487              :                                           &toffset2, &reversep2);
    1488       365657 :   if (!core1 || !core2)
    1489              :     return NULL_TREE;
    1490       365657 :   if (reversep1 != reversep2)
    1491              :     return NULL_TREE;
    1492              :   /* The sizes of the 2 accesses need to be the same. */
    1493       365657 :   if (!known_eq (bytesize1, bytesize2))
    1494              :     return NULL_TREE;
    1495       158203 :   if (!operand_equal_p (core1, core2, 0))
    1496              :     return NULL_TREE;
    1497              : 
    1498        22157 :   if (toffset1 && toffset2)
    1499              :     {
    1500            2 :       tree type = TREE_TYPE (toffset1);
    1501            2 :       if (type != TREE_TYPE (toffset2))
    1502            0 :         toffset2 = fold_convert (type, toffset2);
    1503              : 
    1504            2 :       tree tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
    1505            2 :       if (!cst_and_fits_in_hwi (tdiff))
    1506              :         return NULL_TREE;
    1507              : 
    1508            0 :       diff = int_cst_value (tdiff);
    1509            0 :     }
    1510        22155 :   else if (toffset1 || toffset2)
    1511              :     {
    1512              :       /* If only one of the offsets is non-constant, the difference cannot
    1513              :          be a constant.  */
    1514              :       return NULL_TREE;
    1515              :     }
    1516        22123 :   diff += bytepos1 - bytepos2;
    1517              :   /* The offset between the 2 need to be 0. */
    1518        22123 :   if (!known_eq (diff, 0))
    1519              :     return NULL_TREE;
    1520        21402 :   return fold_build1 (VIEW_CONVERT_EXPR,TREE_TYPE (src2), src);
    1521              : }
    1522              : 
    1523              : /* Returns true if SRC and DEST are the same address such that
    1524              :    `SRC == DEST;` is considered a nop. This is more than an
    1525              :    operand_equal_p check as it needs to be similar to
    1526              :    new_src_based_on_copy.  */
    1527              : 
    1528              : static bool
    1529      4269459 : same_for_assignment (tree src, tree dest)
    1530              : {
    1531      4269459 :   if (operand_equal_p (dest, src, 0))
    1532              :     return true;
    1533              :   /* if both dest and src2 are decls, then we know these 2
    1534              :      accesses can't be the same.  */
    1535      4266550 :   if (DECL_P (dest) && DECL_P (src))
    1536              :     return false;
    1537              : 
    1538      2223837 :   tree core1, core2;
    1539      2223837 :   poly_int64 bytepos1, bytepos2;
    1540      2223837 :   poly_int64 bytesize1, bytesize2;
    1541      2223837 :   tree toffset1, toffset2;
    1542      2223837 :   int reversep1 = 0;
    1543      2223837 :   int reversep2 = 0;
    1544      2223837 :   poly_int64 diff = 0;
    1545      2223837 :   core1 = split_core_and_offset_size (dest, &bytesize1, &bytepos1,
    1546              :                                       &toffset1, &reversep1);
    1547      2223837 :   core2 = split_core_and_offset_size (src, &bytesize2, &bytepos2,
    1548              :                                       &toffset2, &reversep2);
    1549      2223837 :   if (!core1 || !core2)
    1550              :     return false;
    1551      2223837 :   if (reversep1 != reversep2)
    1552              :     return false;
    1553              :   /* The sizes of the 2 accesses need to be the same. */
    1554      2223837 :   if (!known_eq (bytesize1, bytesize2))
    1555              :     return false;
    1556      2222923 :   if (!operand_equal_p (core1, core2, 0))
    1557              :     return false;
    1558         6091 :   if (toffset1 && toffset2)
    1559              :     {
    1560          343 :       tree type = TREE_TYPE (toffset1);
    1561          343 :       if (type != TREE_TYPE (toffset2))
    1562            0 :         toffset2 = fold_convert (type, toffset2);
    1563              : 
    1564          343 :       tree tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
    1565          343 :       if (!cst_and_fits_in_hwi (tdiff))
    1566              :         return false;
    1567              : 
    1568            0 :       diff = int_cst_value (tdiff);
    1569            0 :     }
    1570         5748 :   else if (toffset1 || toffset2)
    1571              :     {
    1572              :       /* If only one of the offsets is non-constant, the difference cannot
    1573              :          be a constant.  */
    1574              :       return false;
    1575              :     }
    1576         5748 :   diff += bytepos1 - bytepos2;
    1577              :   /* The offset between the 2 need to be 0. */
    1578         5748 :   if (!known_eq (diff, 0))
    1579              :     return false;
    1580              :   return true;
    1581              : }
    1582              : 
    1583              : /* Helper function for optimize_agr_copyprop.
    1584              :    For aggregate copies in USE_STMT, see if DEST
    1585              :    is on the lhs of USE_STMT and replace it with SRC. */
    1586              : static void
    1587       966785 : optimize_agr_copyprop_1 (gimple *stmt, gimple *use_stmt,
    1588              :                          tree dest, tree src)
    1589              : {
    1590       966785 :   gcc_assert (gimple_assign_load_p (use_stmt)
    1591              :               && gimple_store_p (use_stmt));
    1592      1933570 :   if (gimple_has_volatile_ops (use_stmt))
    1593       589270 :     return;
    1594       966784 :   tree dest2 = gimple_assign_lhs (use_stmt);
    1595       966784 :   tree src2 = gimple_assign_rhs1 (use_stmt);
    1596              :   /* If the new store is `src2 = src2;` skip over it. */
    1597       966784 :   if (same_for_assignment (src2, dest2))
    1598              :     return;
    1599       966221 :   src = new_src_based_on_copy (src2, dest, src);
    1600       966221 :   if (!src)
    1601              :     return;
    1602              :   /* For 2 memory refences and using a temporary to do the copy,
    1603              :      don't remove the temporary as the 2 memory references might overlap.
    1604              :      Note t does not need to be decl as it could be field.
    1605              :      See PR 22237 for full details.
    1606              :      E.g.
    1607              :      t = *a; #DEST = SRC;
    1608              :      *b = t; #DEST2 = SRC2;
    1609              :      Cannot be convert into
    1610              :      t = *a;
    1611              :      *b = *a;
    1612              :      Though the following is allowed to be done:
    1613              :      t = *a;
    1614              :      *a = t;
    1615              :      And convert it into:
    1616              :      t = *a;
    1617              :      *a = *a;
    1618              :      */
    1619       402272 :   if (!operand_equal_p (dest2, src, 0)
    1620       402272 :       && !DECL_P (dest2) && !DECL_P (src))
    1621              :     {
    1622              :       /* If *a and *b have the same base see if
    1623              :          the offset between the two is greater than
    1624              :          or equal to the size of the type. */
    1625        27692 :       poly_int64 offset1, offset2;
    1626        27692 :       tree len = TYPE_SIZE_UNIT (TREE_TYPE (src));
    1627        27692 :       if (len == NULL_TREE
    1628        27692 :           || !tree_fits_poly_int64_p (len))
    1629        24757 :         return;
    1630        27692 :       tree base1 = get_addr_base_and_unit_offset (dest2, &offset1);
    1631        27692 :       tree base2 = get_addr_base_and_unit_offset (src, &offset2);
    1632        27692 :       poly_int64 size = tree_to_poly_int64 (len);
    1633              :       /* If the bases are 2 different decls,
    1634              :          then there can be no overlapping.  */
    1635        27692 :       if (base1 && base2
    1636        27642 :           && DECL_P (base1) && DECL_P (base2)
    1637         1669 :           && base1 != base2)
    1638              :         ;
    1639              :       /* If we can't figure out the base or the bases are
    1640              :          not equal then fall back to an alignment check.  */
    1641        26237 :       else if (!base1
    1642        26237 :                || !base2
    1643        26237 :                || !operand_equal_p (base1, base2))
    1644              :         {
    1645        25880 :           unsigned int align1 = get_object_alignment (src);
    1646        25880 :           unsigned int align2 = get_object_alignment (dest2);
    1647        25880 :           align1 /= BITS_PER_UNIT;
    1648        25880 :           align2 /= BITS_PER_UNIT;
    1649              :           /* If the alignment of either object is less
    1650              :              than the size then there is a possibility
    1651              :              of overlapping.  */
    1652        25880 :           if (maybe_lt (align1, size)
    1653        25880 :               || maybe_lt (align2, size))
    1654        24757 :             return;
    1655              :         }
    1656              :       /* Make sure [offset1, offset1 + len - 1] does
    1657              :          not overlap with [offset2, offset2 + len - 1],
    1658              :          it is ok if they are at the same location though.  */
    1659          357 :       else if (ranges_maybe_overlap_p (offset1, size, offset2, size)
    1660          357 :           && !known_eq (offset2, offset1))
    1661              :         return;
    1662              :     }
    1663              : 
    1664       377515 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1665              :     {
    1666           11 :       fprintf (dump_file, "Simplified\n  ");
    1667           11 :       print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
    1668           11 :       fprintf (dump_file, "after previous\n  ");
    1669           11 :       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
    1670              :     }
    1671       377515 :   gimple *orig_stmt = use_stmt;
    1672       377515 :   gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
    1673       377515 :   gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (src));
    1674       377515 :   update_stmt (use_stmt);
    1675              : 
    1676       377515 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1677              :     {
    1678           11 :       fprintf (dump_file, "into\n  ");
    1679           11 :       print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
    1680              :     }
    1681       377515 :   if (maybe_clean_or_replace_eh_stmt (orig_stmt, use_stmt))
    1682            0 :     bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
    1683       377515 :   statistics_counter_event (cfun, "copy prop for aggregate", 1);
    1684              : }
    1685              : 
    1686              : /* Helper function for optimize_agr_copyprop_1, propagate aggregates
    1687              :    into the arguments of USE_STMT if the argument matches with DEST;
    1688              :    replacing it with SRC.  */
    1689              : static void
    1690       685516 : optimize_agr_copyprop_arg (gimple *defstmt, gcall *call,
    1691              :                            tree dest, tree src)
    1692              : {
    1693       685516 :   bool changed = false;
    1694      2271149 :   for (unsigned arg = 0; arg < gimple_call_num_args (call); arg++)
    1695              :     {
    1696      1585633 :       tree *argptr = gimple_call_arg_ptr (call, arg);
    1697      2984446 :       if (TREE_CODE (*argptr) == SSA_NAME
    1698       908815 :           || is_gimple_min_invariant (*argptr)
    1699      1772453 :           || TYPE_VOLATILE (TREE_TYPE (*argptr)))
    1700      1398813 :         continue;
    1701       186820 :       tree newsrc = new_src_based_on_copy (*argptr, dest, src);
    1702       186820 :       if (!newsrc)
    1703       112540 :         continue;
    1704              : 
    1705        74280 :       if (dump_file && (dump_flags & TDF_DETAILS))
    1706              :         {
    1707            9 :           fprintf (dump_file, "Simplified\n  ");
    1708            9 :           print_gimple_stmt (dump_file, call, 0, dump_flags);
    1709            9 :           fprintf (dump_file, "after previous\n  ");
    1710            9 :           print_gimple_stmt (dump_file, defstmt, 0, dump_flags);
    1711              :         }
    1712        74280 :       *argptr = unshare_expr (newsrc);
    1713        74280 :       changed = true;
    1714        74280 :       if (dump_file && (dump_flags & TDF_DETAILS))
    1715              :         {
    1716            9 :           fprintf (dump_file, "into\n  ");
    1717            9 :           print_gimple_stmt (dump_file, call, 0, dump_flags);
    1718              :         }
    1719              :     }
    1720       685516 :   if (changed)
    1721        74103 :     update_stmt (call);
    1722       685516 : }
    1723              : 
    1724              : /* Helper function for optimize_agr_copyprop, propagate aggregates
    1725              :    into the return stmt USE if the operand of the return matches DEST;
    1726              :    replacing it with SRC.  */
    1727              : static void
    1728       116888 : optimize_agr_copyprop_return (gimple *defstmt, greturn *use,
    1729              :                               tree dest, tree src)
    1730              : {
    1731       116888 :   tree rvalue = gimple_return_retval (use);
    1732       116888 :   if (!rvalue
    1733        74674 :       || TREE_CODE (rvalue) == SSA_NAME
    1734        66440 :       || is_gimple_min_invariant (rvalue)
    1735       182933 :       || TYPE_VOLATILE (TREE_TYPE (rvalue)))
    1736        50844 :     return;
    1737              : 
    1738              :   /* `return <retval>;` is already the best it could be.
    1739              :      Likewise `return *<retval>_N(D)`.  */
    1740        66044 :   if (TREE_CODE (rvalue) == RESULT_DECL
    1741        66044 :       || (TREE_CODE (rvalue) == MEM_REF
    1742            0 :           && TREE_CODE (TREE_OPERAND (rvalue, 0)) == SSA_NAME
    1743            0 :           && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (rvalue, 0)))
    1744              :                == RESULT_DECL))
    1745              :     return;
    1746        45155 :   tree newsrc = new_src_based_on_copy (rvalue, dest, src);
    1747        45155 :   if (!newsrc)
    1748              :     return;
    1749              :   /* Currently only support non-global vars.
    1750              :      See PR 124099 on enumtls not supporting expanding for GIMPLE_RETURN.
    1751              :      FIXME: could support VCEs too?  */
    1752        44942 :   if (!VAR_P (newsrc) || is_global_var (newsrc))
    1753              :     return;
    1754        20308 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1755              :     {
    1756            1 :       fprintf (dump_file, "Simplified\n  ");
    1757            1 :       print_gimple_stmt (dump_file, use, 0, dump_flags);
    1758            1 :       fprintf (dump_file, "after previous\n  ");
    1759            1 :       print_gimple_stmt (dump_file, defstmt, 0, dump_flags);
    1760              :     }
    1761        20308 :   gimple_return_set_retval (use, newsrc);
    1762        20308 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1763              :     {
    1764            1 :       fprintf (dump_file, "into\n  ");
    1765            1 :       print_gimple_stmt (dump_file, use, 0, dump_flags);
    1766              :     }
    1767        20308 :   update_stmt (use);
    1768              : }
    1769              : 
    1770              : /* Optimizes
    1771              :    DEST = SRC;
    1772              :    DEST2 = DEST; # DEST2 = SRC2;
    1773              :    into
    1774              :    DEST = SRC;
    1775              :    DEST2 = SRC;
    1776              :    STMT is the first statement and SRC is the common
    1777              :    between the statements.
    1778              : 
    1779              :    Also optimizes:
    1780              :    DEST = SRC;
    1781              :    call_func(..., DEST, ...);
    1782              :    into:
    1783              :    DEST = SRC;
    1784              :    call_func(..., SRC, ...);
    1785              : 
    1786              : */
    1787              : static void
    1788      3711510 : optimize_agr_copyprop (gimple *stmt)
    1789              : {
    1790      7423020 :   if (gimple_has_volatile_ops (stmt))
    1791       411424 :     return;
    1792              : 
    1793              :   /* Can't prop if the statement could throw.  */
    1794      3710371 :   if (stmt_could_throw_p (cfun, stmt))
    1795              :     return;
    1796              : 
    1797      3302675 :   tree dest = gimple_assign_lhs (stmt);
    1798      3302675 :   tree src = gimple_assign_rhs1 (stmt);
    1799              :   /* If the statement is `src = src;` then ignore it. */
    1800      3302675 :   if (same_for_assignment (dest, src))
    1801              :     return;
    1802              : 
    1803      3300086 :   tree vdef = gimple_vdef (stmt);
    1804      3300086 :   imm_use_iterator iter;
    1805      3300086 :   gimple *use_stmt;
    1806     12870295 :   FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
    1807              :     {
    1808      6270123 :       if (gimple_assign_load_p (use_stmt)
    1809      6270123 :           && gimple_store_p (use_stmt))
    1810       966785 :         optimize_agr_copyprop_1 (stmt, use_stmt, dest, src);
    1811      5303338 :       else if (is_gimple_call (use_stmt))
    1812       685516 :         optimize_agr_copyprop_arg (stmt, as_a<gcall*>(use_stmt), dest, src);
    1813      4617822 :       else if (is_a<greturn*> (use_stmt))
    1814       116888 :         optimize_agr_copyprop_return (stmt, as_a<greturn*>(use_stmt), dest, src);
    1815      3300086 :     }
    1816              : }
    1817              : 
    1818              : /* Simple DSE of the lhs from a clobber STMT.
    1819              :    This is used mostly to clean up from optimize_agr_copyprop and
    1820              :    to remove (exactly one) extra copy that might later on confuse SRA.
    1821              :    An example is:
    1822              :    ;; write to a and such.
    1823              :    b = a; // This statement is to be removed
    1824              :    b = {CLOBBER};
    1825              :    SRA will totally scalarize b (which means also a) here for the extra copy
    1826              :    which is not something welcomed. So removing the copy will
    1827              :    allow SRA to move the scalarization of a further down or not at all.
    1828              :    */
    1829              : static void
    1830      6767334 : do_simple_agr_dse (gassign *stmt, bool full_walk)
    1831              : {
    1832              :   /* Don't do this while in -Og as we want to keep around the copy
    1833              :      for debuggability.  */
    1834      6767334 :   if (optimize_debug)
    1835      4703042 :     return;
    1836      6763913 :   ao_ref read;
    1837      6763913 :   basic_block bb = gimple_bb (stmt);
    1838      6763913 :   tree lhs = gimple_assign_lhs (stmt);
    1839              :   /* Only handle clobbers of a full decl.  */
    1840      6763913 :   if (!DECL_P (lhs))
    1841              :     return;
    1842      6062310 :   ao_ref_init (&read, lhs);
    1843      6062310 :   tree vuse = gimple_vuse (stmt);
    1844      6062310 :   unsigned limit = full_walk ? param_sccvn_max_alias_queries_per_access : 4;
    1845     15451746 :   while (limit)
    1846              :     {
    1847     15441289 :       gimple *ostmt = SSA_NAME_DEF_STMT (vuse);
    1848              :       /* Don't handle phis, just declare to be done. */
    1849     15441289 :       if (is_a<gphi*>(ostmt) || gimple_nop_p (ostmt))
    1850              :         break;
    1851     13387454 :       basic_block obb = gimple_bb (ostmt);
    1852              :       /* If the clobber is not fully dominating the statement define,
    1853              :          then it is not "simple" to detect if the define is fully clobbered.  */
    1854     13387454 :       if (obb != bb && !dominated_by_p (CDI_DOMINATORS, bb, obb))
    1855      3998018 :         return;
    1856     13387454 :       gimple *use_stmt;
    1857     13387454 :       imm_use_iterator iter;
    1858     54137867 :       FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (ostmt))
    1859              :         {
    1860     15749638 :           basic_block ubb = gimple_bb (use_stmt);
    1861     15749638 :           if (stmt == use_stmt)
    1862      4607193 :             continue;
    1863              :           /* If the use is a clobber for lhs,
    1864              :              then it can be safely skipped; this happens with eh
    1865              :              and sometimes jump threading.  */
    1866     11142445 :           if (gimple_clobber_p (use_stmt)
    1867     11142445 :               && lhs == gimple_assign_lhs (use_stmt))
    1868       158540 :             continue;
    1869              :           /* If the use is a phi and it is single use then check if that single use
    1870              :              is a clobber and lhs is the same.  */
    1871     10983905 :           if (gphi *use_phi = dyn_cast<gphi*>(use_stmt))
    1872              :             {
    1873       321133 :               use_operand_p ou;
    1874       321133 :               gimple *ostmt;
    1875       321133 :               if (single_imm_use (gimple_phi_result (use_phi), &ou, &ostmt)
    1876       273256 :                   && gimple_clobber_p (ostmt)
    1877       536359 :                   && lhs == gimple_assign_lhs (ostmt))
    1878        64940 :                 continue;
    1879              :               /* A phi node will never be dominating the clobber.  */
    1880       256193 :               return;
    1881              :             }
    1882              :           /* The use needs to be dominating the clobber. */
    1883      1331476 :           if ((ubb != bb && !dominated_by_p (CDI_DOMINATORS, bb, ubb))
    1884     11333618 :               || ref_maybe_used_by_stmt_p (use_stmt, &read, false))
    1885      1082304 :             return;
    1886              :           /* Count the above alias lookup towards the limit. */
    1887      9580468 :           limit--;
    1888      9580468 :           if (limit == 0)
    1889              :             return;
    1890      1774133 :         }
    1891     11613321 :       vuse = gimple_vuse (ostmt);
    1892              :       /* This is a call with an assignment to the clobber decl,
    1893              :          remove the lhs or the whole stmt if it was pure/const. */
    1894     11613321 :       if (is_a <gcall*>(ostmt)
    1895     11613321 :           && lhs == gimple_call_lhs (ostmt))
    1896              :         {
    1897              :           /* Don't remove stores/statements that are needed for non-call
    1898              :               eh to work.  */
    1899         3191 :           if (stmt_unremovable_because_of_non_call_eh_p (cfun, ostmt))
    1900              :             return;
    1901              :           /* If we delete a stmt that could throw, mark the block
    1902              :              in to_purge to cleanup afterwards.  */
    1903         3185 :           if (stmt_could_throw_p (cfun, ostmt))
    1904          907 :             bitmap_set_bit (to_purge, obb->index);
    1905         3185 :           int flags = gimple_call_flags (ostmt);
    1906         3185 :           if ((flags & (ECF_PURE|ECF_CONST|ECF_NOVOPS))
    1907          225 :               && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
    1908              :             {
    1909          141 :                gimple_stmt_iterator gsi = gsi_for_stmt (ostmt);
    1910          141 :                if (dump_file && (dump_flags & TDF_DETAILS))
    1911              :                 {
    1912           14 :                   fprintf (dump_file, "Removing dead call store stmt ");
    1913           14 :                   print_gimple_stmt (dump_file, ostmt, 0);
    1914           14 :                   fprintf (dump_file, "\n");
    1915              :                 }
    1916          141 :               unlink_stmt_vdef (ostmt);
    1917          141 :               release_defs (ostmt);
    1918          141 :               gsi_remove (&gsi, true);
    1919          141 :               statistics_counter_event (cfun, "delete call dead store", 1);
    1920              :               /* Only remove the first store previous statement. */
    1921          141 :               return;
    1922              :             }
    1923              :           /* Make sure we do not remove a return slot we cannot reconstruct
    1924              :              later.  */
    1925         3044 :           if (gimple_call_return_slot_opt_p (as_a <gcall *>(ostmt))
    1926         3044 :               && (TREE_ADDRESSABLE (TREE_TYPE (gimple_call_fntype (ostmt)))
    1927          440 :                   || !poly_int_tree_p
    1928          440 :                       (TYPE_SIZE (TREE_TYPE (gimple_call_fntype (ostmt))))))
    1929              :             return;
    1930          545 :           if (dump_file && (dump_flags & TDF_DETAILS))
    1931              :             {
    1932            6 :               fprintf (dump_file, "Removing lhs of call stmt ");
    1933            6 :               print_gimple_stmt (dump_file, ostmt, 0);
    1934            6 :               fprintf (dump_file, "\n");
    1935              :             }
    1936          545 :           gimple_call_set_lhs (ostmt, NULL_TREE);
    1937          545 :           update_stmt (ostmt);
    1938          545 :           statistics_counter_event (cfun, "removed lhs call", 1);
    1939          545 :           return;
    1940              :         }
    1941              :       /* This an assignment store to the clobbered decl,
    1942              :          then maybe remove it. */
    1943     11610130 :       if (is_a <gassign*>(ostmt)
    1944      9788962 :           && gimple_store_p (ostmt)
    1945      9788962 :           && !gimple_clobber_p (ostmt)
    1946     14497999 :           && lhs == gimple_assign_lhs (ostmt))
    1947              :         {
    1948              :           /* Don't remove stores/statements that are needed for non-call
    1949              :               eh to work.  */
    1950       151609 :           if (stmt_unremovable_because_of_non_call_eh_p (cfun, ostmt))
    1951              :             return;
    1952              :           /* If we delete a stmt that could throw, mark the block
    1953              :              in to_purge to cleanup afterwards.  */
    1954       146525 :           if (stmt_could_throw_p (cfun, ostmt))
    1955            0 :             bitmap_set_bit (to_purge, obb->index);
    1956       146525 :           gimple_stmt_iterator gsi = gsi_for_stmt (ostmt);
    1957       146525 :           if (dump_file && (dump_flags & TDF_DETAILS))
    1958              :             {
    1959           12 :               fprintf (dump_file, "Removing dead store stmt ");
    1960           12 :               print_gimple_stmt (dump_file, ostmt, 0);
    1961           12 :               fprintf (dump_file, "\n");
    1962              :             }
    1963       146525 :           unlink_stmt_vdef (ostmt);
    1964       146525 :           release_defs (ostmt);
    1965       146525 :           gsi_remove (&gsi, true);
    1966       146525 :           statistics_counter_event (cfun, "delete dead store", 1);
    1967              :           /* Only remove the first store previous statement. */
    1968       146525 :           return;
    1969              :         }
    1970              :       /* If the statement uses or maybe writes to the decl,
    1971              :          then nothing is to be removed. Don't know if the write
    1972              :          to the decl is partial write or a full one so the need
    1973              :          to stop.
    1974              :          e.g.
    1975              :          b.c = a;
    1976              :            Easier to stop here rather than do a full partial
    1977              :            dse of this statement.
    1978              :          b = {CLOBBER}; */
    1979     11458521 :       if (stmt_may_clobber_ref_p_1 (ostmt, &read, false)
    1980     11458521 :           || ref_maybe_used_by_stmt_p (ostmt, &read, false))
    1981      2069085 :         return;
    1982      9389436 :       limit--;
    1983              :     }
    1984              : }
    1985              : 
    1986              : /* Optimizes builtin memcmps for small constant sizes.
    1987              :    GSI_P is the GSI for the call. STMT is the call itself.
    1988              :    */
    1989              : 
    1990              : static bool
    1991       462827 : simplify_builtin_memcmp (gimple_stmt_iterator *gsi_p, gcall *stmt)
    1992              : {
    1993              :   /* Make sure memcmp arguments are the correct type.  */
    1994       462827 :   if (gimple_call_num_args (stmt) != 3)
    1995              :     return false;
    1996       462827 :   tree arg1 = gimple_call_arg (stmt, 0);
    1997       462827 :   tree arg2 = gimple_call_arg (stmt, 1);
    1998       462827 :   tree len = gimple_call_arg (stmt, 2);
    1999              : 
    2000       462827 :   if (!POINTER_TYPE_P (TREE_TYPE (arg1)))
    2001              :     return false;
    2002       462827 :   if (!POINTER_TYPE_P (TREE_TYPE (arg2)))
    2003              :     return false;
    2004       462827 :   if (!INTEGRAL_TYPE_P (TREE_TYPE (len)))
    2005              :     return false;
    2006              : 
    2007              :   /* The return value of the memcmp has to be used
    2008              :      equality comparison to zero. */
    2009       462827 :   tree res = gimple_call_lhs (stmt);
    2010              : 
    2011       462827 :   if (!res || !use_in_zero_equality (res))
    2012        13932 :     return false;
    2013              : 
    2014       448895 :   unsigned HOST_WIDE_INT leni;
    2015              : 
    2016       448895 :   if (tree_fits_uhwi_p (len)
    2017       625396 :       && (leni = tree_to_uhwi (len)) <= GET_MODE_SIZE (word_mode)
    2018       529079 :       && pow2p_hwi (leni))
    2019              :     {
    2020        19045 :       leni *= CHAR_TYPE_SIZE;
    2021        19045 :       unsigned align1 = get_pointer_alignment (arg1);
    2022        19045 :       unsigned align2 = get_pointer_alignment (arg2);
    2023        19045 :       unsigned align = MIN (align1, align2);
    2024        19045 :       scalar_int_mode mode;
    2025        19045 :       if (int_mode_for_size (leni, 1).exists (&mode)
    2026        19045 :           && (align >= leni || !targetm.slow_unaligned_access (mode, align)))
    2027              :         {
    2028        19045 :           location_t loc = gimple_location (stmt);
    2029        19045 :           tree type, off;
    2030        19045 :           type = build_nonstandard_integer_type (leni, 1);
    2031        38090 :           gcc_assert (known_eq (GET_MODE_BITSIZE (TYPE_MODE (type)), leni));
    2032        19045 :           tree ptrtype = build_pointer_type_for_mode (char_type_node,
    2033              :                                                       ptr_mode, true);
    2034        19045 :           off = build_int_cst (ptrtype, 0);
    2035              : 
    2036              :           /* Create unaligned types if needed. */
    2037        19045 :           tree type1 = type, type2 = type;
    2038        19045 :           if (TYPE_ALIGN (type1) > align1)
    2039         7721 :             type1 = build_aligned_type (type1, align1);
    2040        19045 :           if (TYPE_ALIGN (type2) > align2)
    2041         8221 :             type2 = build_aligned_type (type2, align2);
    2042              : 
    2043        19045 :           arg1 = build2_loc (loc, MEM_REF, type1, arg1, off);
    2044        19045 :           arg2 = build2_loc (loc, MEM_REF, type2, arg2, off);
    2045        19045 :           tree tem1 = fold_const_aggregate_ref (arg1);
    2046        19045 :           if (tem1)
    2047          219 :             arg1 = tem1;
    2048        19045 :           tree tem2 = fold_const_aggregate_ref (arg2);
    2049        19045 :           if (tem2)
    2050         7412 :             arg2 = tem2;
    2051        19045 :           res = fold_convert_loc (loc, TREE_TYPE (res),
    2052              :                                   fold_build2_loc (loc, NE_EXPR,
    2053              :                                                    boolean_type_node,
    2054              :                                                    arg1, arg2));
    2055        19045 :           gimplify_and_update_call_from_tree (gsi_p, res);
    2056        19045 :           return true;
    2057              :         }
    2058              :     }
    2059              : 
    2060              :   /* Replace memcmp with memcmp_eq if the above fails. */
    2061       429850 :   if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)) == BUILT_IN_MEMCMP_EQ)
    2062              :     return false;
    2063       340554 :   if (!fold_before_rtl_expansion_p ())
    2064              :     return false;
    2065        89296 :   gimple_call_set_fndecl (stmt, builtin_decl_explicit (BUILT_IN_MEMCMP_EQ));
    2066        89296 :   update_stmt (stmt);
    2067        89296 :   return true;
    2068              : }
    2069              : 
    2070              : /* Optimizes builtin memchrs for small constant sizes with a const string.
    2071              :    GSI_P is the GSI for the call. STMT is the call itself.
    2072              :    */
    2073              : 
    2074              : static bool
    2075        15253 : simplify_builtin_memchr (gimple_stmt_iterator *gsi_p, gcall *stmt)
    2076              : {
    2077        15253 :   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
    2078              :     return false;
    2079              : 
    2080        15253 :   if (gimple_call_num_args (stmt) != 3)
    2081              :     return false;
    2082              : 
    2083        15253 :   tree res = gimple_call_lhs (stmt);
    2084        15253 :   if (!res || !use_in_zero_equality (res))
    2085        13828 :     return false;
    2086              : 
    2087         1425 :   tree ptr = gimple_call_arg (stmt, 0);
    2088         1425 :   if (TREE_CODE (ptr) != ADDR_EXPR
    2089         1425 :       || TREE_CODE (TREE_OPERAND (ptr, 0)) != STRING_CST)
    2090              :     return false;
    2091              : 
    2092          422 :   unsigned HOST_WIDE_INT slen
    2093          422 :     = TREE_STRING_LENGTH (TREE_OPERAND (ptr, 0));
    2094              :   /* It must be a non-empty string constant.  */
    2095          422 :   if (slen < 2)
    2096              :     return false;
    2097              : 
    2098              :   /* For -Os, only simplify strings with a single character.  */
    2099          418 :   if (!optimize_bb_for_speed_p (gimple_bb (stmt))
    2100          418 :       && slen > 2)
    2101              :     return false;
    2102              : 
    2103          402 :   tree size = gimple_call_arg (stmt, 2);
    2104              :   /* Size must be a constant which is <= UNITS_PER_WORD and
    2105              :      <= the string length.  */
    2106          402 :   if (!tree_fits_uhwi_p (size))
    2107              :     return false;
    2108              : 
    2109          402 :   unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
    2110          403 :   if (sz == 0 || sz > UNITS_PER_WORD || sz >= slen)
    2111              :     return false;
    2112              : 
    2113          350 :   tree ch = gimple_call_arg (stmt, 1);
    2114          350 :   location_t loc = gimple_location (stmt);
    2115          350 :   if (!useless_type_conversion_p (char_type_node,
    2116          350 :                                   TREE_TYPE (ch)))
    2117          350 :     ch = fold_convert_loc (loc, char_type_node, ch);
    2118          350 :   const char *p = TREE_STRING_POINTER (TREE_OPERAND (ptr, 0));
    2119          350 :   unsigned int isize = sz;
    2120          350 :   tree *op = XALLOCAVEC (tree, isize);
    2121         1261 :   for (unsigned int i = 0; i < isize; i++)
    2122              :     {
    2123          911 :       op[i] = build_int_cst (char_type_node, p[i]);
    2124          911 :       op[i] = fold_build2_loc (loc, EQ_EXPR, boolean_type_node,
    2125              :                                op[i], ch);
    2126              :     }
    2127          911 :   for (unsigned int i = isize - 1; i >= 1; i--)
    2128          561 :     op[i - 1] = fold_convert_loc (loc, boolean_type_node,
    2129              :                                   fold_build2_loc (loc,
    2130              :                                                    BIT_IOR_EXPR,
    2131              :                                                    boolean_type_node,
    2132          561 :                                                    op[i - 1],
    2133          561 :                                                    op[i]));
    2134          350 :   res = fold_convert_loc (loc, TREE_TYPE (res), op[0]);
    2135          350 :   gimplify_and_update_call_from_tree (gsi_p, res);
    2136          350 :   return true;
    2137              : }
    2138              : 
    2139              : /* *GSI_P is a GIMPLE_CALL to a builtin function.
    2140              :    Optimize
    2141              :    memcpy (p, "abcd", 4); // STMT1
    2142              :    memset (p + 4, ' ', 3); // STMT2
    2143              :    into
    2144              :    memcpy (p, "abcd   ", 7);
    2145              :    call if the latter can be stored by pieces during expansion.
    2146              : */
    2147              : 
    2148              : static bool
    2149       110847 : simplify_builtin_memcpy_memset (gimple_stmt_iterator *gsi_p, gcall *stmt2)
    2150              : {
    2151       110847 :   if (gimple_call_num_args (stmt2) != 3
    2152       110847 :       || gimple_call_lhs (stmt2)
    2153              :       || CHAR_BIT != 8
    2154       110847 :       || BITS_PER_UNIT != 8)
    2155              :     return false;
    2156              : 
    2157       211873 :   tree vuse = gimple_vuse (stmt2);
    2158       103370 :   if (vuse == NULL)
    2159              :     return false;
    2160       103370 :   gimple *stmt1 = SSA_NAME_DEF_STMT (vuse);
    2161              : 
    2162       103370 :   tree callee1;
    2163       103370 :   tree ptr1, src1, str1, off1, len1, lhs1;
    2164       103370 :   tree ptr2 = gimple_call_arg (stmt2, 0);
    2165       103370 :   tree val2 = gimple_call_arg (stmt2, 1);
    2166       103370 :   tree len2 = gimple_call_arg (stmt2, 2);
    2167       103370 :   tree diff, vdef, new_str_cst;
    2168       103370 :   gimple *use_stmt;
    2169       103370 :   unsigned int ptr1_align;
    2170       103370 :   unsigned HOST_WIDE_INT src_len;
    2171       103370 :   char *src_buf;
    2172       103370 :   use_operand_p use_p;
    2173              : 
    2174       103370 :   if (!tree_fits_shwi_p (val2)
    2175        99147 :       || !tree_fits_uhwi_p (len2)
    2176       165363 :       || compare_tree_int (len2, 1024) == 1)
    2177        46460 :     return false;
    2178              : 
    2179        56910 :   if (is_gimple_call (stmt1))
    2180              :     {
    2181              :       /* If first stmt is a call, it needs to be memcpy
    2182              :          or mempcpy, with string literal as second argument and
    2183              :          constant length.  */
    2184        29451 :       callee1 = gimple_call_fndecl (stmt1);
    2185        29451 :       if (callee1 == NULL_TREE
    2186        29335 :           || !fndecl_built_in_p (callee1, BUILT_IN_NORMAL)
    2187        55345 :           || gimple_call_num_args (stmt1) != 3)
    2188              :         return false;
    2189        24608 :       if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
    2190        24608 :           && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
    2191              :         return false;
    2192        10867 :       ptr1 = gimple_call_arg (stmt1, 0);
    2193        10867 :       src1 = gimple_call_arg (stmt1, 1);
    2194        10867 :       len1 = gimple_call_arg (stmt1, 2);
    2195        10867 :       lhs1 = gimple_call_lhs (stmt1);
    2196        10867 :       if (!tree_fits_uhwi_p (len1))
    2197              :         return false;
    2198        10780 :       str1 = string_constant (src1, &off1, NULL, NULL);
    2199        10780 :       if (str1 == NULL_TREE)
    2200              :         return false;
    2201         4875 :       if (!tree_fits_uhwi_p (off1)
    2202         4875 :           || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
    2203         4875 :           || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
    2204         4875 :                                      - tree_to_uhwi (off1)) > 0
    2205         4875 :           || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
    2206        14625 :           || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
    2207         4875 :              != TYPE_MODE (char_type_node))
    2208            0 :         return false;
    2209              :     }
    2210        27459 :   else if (gimple_assign_single_p (stmt1))
    2211              :     {
    2212              :       /* Otherwise look for length 1 memcpy optimized into
    2213              :          assignment.  */
    2214        16753 :       ptr1 = gimple_assign_lhs (stmt1);
    2215        16753 :       src1 = gimple_assign_rhs1 (stmt1);
    2216        16753 :       if (TREE_CODE (ptr1) != MEM_REF
    2217         3512 :           || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
    2218        17787 :           || !tree_fits_shwi_p (src1))
    2219        16401 :         return false;
    2220          352 :       ptr1 = build_fold_addr_expr (ptr1);
    2221          352 :       STRIP_USELESS_TYPE_CONVERSION (ptr1);
    2222          352 :       callee1 = NULL_TREE;
    2223          352 :       len1 = size_one_node;
    2224          352 :       lhs1 = NULL_TREE;
    2225          352 :       off1 = size_zero_node;
    2226          352 :       str1 = NULL_TREE;
    2227              :     }
    2228              :   else
    2229              :     return false;
    2230              : 
    2231         5227 :   diff = constant_pointer_difference (ptr1, ptr2);
    2232         5227 :   if (diff == NULL && lhs1 != NULL)
    2233              :     {
    2234            7 :       diff = constant_pointer_difference (lhs1, ptr2);
    2235            7 :       if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
    2236            7 :           && diff != NULL)
    2237            7 :         diff = size_binop (PLUS_EXPR, diff,
    2238              :                            fold_convert (sizetype, len1));
    2239              :     }
    2240              :   /* If the difference between the second and first destination pointer
    2241              :      is not constant, or is bigger than memcpy length, bail out.  */
    2242         5227 :   if (diff == NULL
    2243         4344 :       || !tree_fits_uhwi_p (diff)
    2244         4344 :       || tree_int_cst_lt (len1, diff)
    2245         9327 :       || compare_tree_int (diff, 1024) == 1)
    2246         1127 :     return false;
    2247              : 
    2248              :   /* Use maximum of difference plus memset length and memcpy length
    2249              :      as the new memcpy length, if it is too big, bail out.  */
    2250         4100 :   src_len = tree_to_uhwi (diff);
    2251         4100 :   src_len += tree_to_uhwi (len2);
    2252         4100 :   if (src_len < tree_to_uhwi (len1))
    2253              :     src_len = tree_to_uhwi (len1);
    2254         4100 :   if (src_len > 1024)
    2255              :     return false;
    2256              : 
    2257              :   /* If mempcpy value is used elsewhere, bail out, as mempcpy
    2258              :      with bigger length will return different result.  */
    2259         4100 :   if (lhs1 != NULL_TREE
    2260          193 :       && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
    2261         4107 :       && (TREE_CODE (lhs1) != SSA_NAME
    2262            7 :           || !single_imm_use (lhs1, &use_p, &use_stmt)
    2263            7 :           || use_stmt != stmt2))
    2264            0 :     return false;
    2265              : 
    2266              :   /* If anything reads memory in between memcpy and memset
    2267              :      call, the modified memcpy call might change it.  */
    2268         4100 :   vdef = gimple_vdef (stmt1);
    2269         4100 :   if (vdef != NULL
    2270         4100 :       && (!single_imm_use (vdef, &use_p, &use_stmt)
    2271         3345 :           || use_stmt != stmt2))
    2272              :     return false;
    2273              : 
    2274         3345 :   ptr1_align = get_pointer_alignment (ptr1);
    2275              :   /* Construct the new source string literal.  */
    2276         3345 :   src_buf = XALLOCAVEC (char, src_len + 1);
    2277         3345 :   if (callee1)
    2278         3184 :     memcpy (src_buf,
    2279         3184 :             TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
    2280              :             tree_to_uhwi (len1));
    2281              :   else
    2282          161 :     src_buf[0] = tree_to_shwi (src1);
    2283         3345 :   memset (src_buf + tree_to_uhwi (diff),
    2284         3345 :           tree_to_shwi (val2), tree_to_uhwi (len2));
    2285         3345 :   src_buf[src_len] = '\0';
    2286              :   /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
    2287              :      handle embedded '\0's.  */
    2288         3345 :   if (strlen (src_buf) != src_len)
    2289              :     return false;
    2290         3255 :   rtl_profile_for_bb (gimple_bb (stmt2));
    2291              :   /* If the new memcpy wouldn't be emitted by storing the literal
    2292              :      by pieces, this optimization might enlarge .rodata too much,
    2293              :      as commonly used string literals couldn't be shared any
    2294              :      longer.  */
    2295         3255 :   if (!can_store_by_pieces (src_len,
    2296              :                             builtin_strncpy_read_str,
    2297              :                             src_buf, ptr1_align, false))
    2298              :     return false;
    2299              : 
    2300         2471 :   new_str_cst = build_string_literal (src_len, src_buf);
    2301         2471 :   if (callee1)
    2302              :     {
    2303              :       /* If STMT1 is a mem{,p}cpy call, adjust it and remove
    2304              :          memset call.  */
    2305         2344 :       if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
    2306            7 :         gimple_call_set_lhs (stmt1, NULL_TREE);
    2307         2344 :       gimple_call_set_arg (stmt1, 1, new_str_cst);
    2308         2344 :       gimple_call_set_arg (stmt1, 2,
    2309         2344 :                            build_int_cst (TREE_TYPE (len1), src_len));
    2310         2344 :       update_stmt (stmt1);
    2311         2344 :       unlink_stmt_vdef (stmt2);
    2312         2344 :       gsi_replace (gsi_p, gimple_build_nop (), false);
    2313         2344 :       fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
    2314         2344 :       release_defs (stmt2);
    2315         2344 :       if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
    2316              :         {
    2317            7 :           fwprop_invalidate_lattice (lhs1);
    2318            7 :           release_ssa_name (lhs1);
    2319              :         }
    2320         2344 :       return true;
    2321              :     }
    2322              :   else
    2323              :     {
    2324              :       /* Otherwise, if STMT1 is length 1 memcpy optimized into
    2325              :          assignment, remove STMT1 and change memset call into
    2326              :          memcpy call.  */
    2327          127 :       gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
    2328              : 
    2329          127 :       if (!is_gimple_val (ptr1))
    2330           12 :         ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
    2331              :                                          true, GSI_SAME_STMT);
    2332          127 :       tree fndecl = builtin_decl_explicit (BUILT_IN_MEMCPY);
    2333          127 :       gimple_call_set_fndecl (stmt2, fndecl);
    2334          127 :       gimple_call_set_fntype (stmt2,
    2335          127 :                               TREE_TYPE (fndecl));
    2336          127 :       gimple_call_set_arg (stmt2, 0, ptr1);
    2337          127 :       gimple_call_set_arg (stmt2, 1, new_str_cst);
    2338          127 :       gimple_call_set_arg (stmt2, 2,
    2339          127 :                            build_int_cst (TREE_TYPE (len2), src_len));
    2340          127 :       unlink_stmt_vdef (stmt1);
    2341          127 :       gsi_remove (&gsi, true);
    2342          127 :       fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
    2343          127 :       release_defs (stmt1);
    2344          127 :       update_stmt (stmt2);
    2345          127 :       return false;
    2346              :     }
    2347              : }
    2348              : 
    2349              : 
    2350              : /* Try to optimize out __builtin_stack_restore.  Optimize it out
    2351              :    if there is another __builtin_stack_restore in the same basic
    2352              :    block and no calls or ASM_EXPRs are in between, or if this block's
    2353              :    only outgoing edge is to EXIT_BLOCK and there are no calls or
    2354              :    ASM_EXPRs after this __builtin_stack_restore.
    2355              :    Note restore right before a noreturn function is not needed.
    2356              :    And skip some cheap calls that will most likely become an instruction.
    2357              :    Restoring the stack before a call is important to be able to keep
    2358              :    stack usage down so that call does not run out of stack.  */
    2359              : 
    2360              : 
    2361              : static bool
    2362        10378 : optimize_stack_restore (gimple_stmt_iterator *gsi, gimple *call)
    2363              : {
    2364        10378 :   if (!fold_before_rtl_expansion_p ())
    2365              :     return false;
    2366         2530 :   tree callee;
    2367         2530 :   gimple *stmt;
    2368              : 
    2369         2530 :   basic_block bb = gsi_bb (*gsi);
    2370              : 
    2371         2530 :   if (gimple_call_num_args (call) != 1
    2372         2530 :       || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
    2373         5060 :       || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
    2374              :     return false;
    2375              : 
    2376         2530 :   gimple_stmt_iterator i = *gsi;
    2377         6341 :   for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
    2378              :     {
    2379         4272 :       stmt = gsi_stmt (i);
    2380         4272 :       if (is_a<gasm*> (stmt))
    2381              :         return false;
    2382         4271 :       gcall *call = dyn_cast<gcall*>(stmt);
    2383         4271 :       if (!call)
    2384         3602 :         continue;
    2385              : 
    2386              :       /* We can remove the restore in front of noreturn
    2387              :          calls.  Since the restore will happen either
    2388              :          via an unwind/longjmp or not at all. */
    2389          669 :       if (gimple_call_noreturn_p (call))
    2390              :         break;
    2391              : 
    2392              :       /* Internal calls are ok, to bypass
    2393              :          check first since fndecl will be null. */
    2394          653 :       if (gimple_call_internal_p (call))
    2395            1 :         continue;
    2396              : 
    2397          652 :       callee = gimple_call_fndecl (call);
    2398              :       /* Non-builtin calls are not ok. */
    2399          652 :       if (!callee
    2400          652 :           || !fndecl_built_in_p (callee))
    2401              :         return false;
    2402              : 
    2403              :       /* Do not remove stack updates before strub leave.  */
    2404          576 :       if (fndecl_built_in_p (callee, BUILT_IN___STRUB_LEAVE)
    2405              :           /* Alloca calls are not ok either. */
    2406          576 :           || fndecl_builtin_alloc_p (callee))
    2407              :         return false;
    2408              : 
    2409          364 :       if (fndecl_built_in_p (callee, BUILT_IN_STACK_RESTORE))
    2410           52 :         goto second_stack_restore;
    2411              : 
    2412              :       /* If not a simple or inexpensive builtin, then it is not ok either. */
    2413          312 :       if (!is_simple_builtin (callee)
    2414          312 :           && !is_inexpensive_builtin (callee))
    2415              :         return false;
    2416              :     }
    2417              : 
    2418              :   /* Allow one successor of the exit block, or zero successors.  */
    2419         2085 :   switch (EDGE_COUNT (bb->succs))
    2420              :     {
    2421              :     case 0:
    2422              :       break;
    2423         1998 :     case 1:
    2424         1998 :       if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
    2425              :         return false;
    2426              :       break;
    2427              :     default:
    2428              :       return false;
    2429              :     }
    2430         1728 :  second_stack_restore:
    2431              : 
    2432              :   /* If there's exactly one use, then zap the call to __builtin_stack_save.
    2433              :      If there are multiple uses, then the last one should remove the call.
    2434              :      In any case, whether the call to __builtin_stack_save can be removed
    2435              :      or not is irrelevant to removing the call to __builtin_stack_restore.  */
    2436         1728 :   if (has_single_use (gimple_call_arg (call, 0)))
    2437              :     {
    2438         1551 :       gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
    2439         1551 :       if (is_gimple_call (stack_save))
    2440              :         {
    2441         1549 :           callee = gimple_call_fndecl (stack_save);
    2442         1549 :           if (callee && fndecl_built_in_p (callee, BUILT_IN_STACK_SAVE))
    2443              :             {
    2444         1549 :               gimple_stmt_iterator stack_save_gsi;
    2445         1549 :               tree rhs;
    2446              : 
    2447         1549 :               stack_save_gsi = gsi_for_stmt (stack_save);
    2448         1549 :               rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
    2449         1549 :               replace_call_with_value (&stack_save_gsi, rhs);
    2450              :             }
    2451              :         }
    2452              :     }
    2453              : 
    2454              :   /* No effect, so the statement will be deleted.  */
    2455         1728 :   replace_call_with_value (gsi, NULL_TREE);
    2456         1728 :   return true;
    2457              : }
    2458              : 
    2459              : /* *GSI_P is a GIMPLE_CALL to a builtin function.
    2460              :    Optimize
    2461              :    memcpy (p, "abcd", 4);
    2462              :    memset (p + 4, ' ', 3);
    2463              :    into
    2464              :    memcpy (p, "abcd   ", 7);
    2465              :    call if the latter can be stored by pieces during expansion.
    2466              : 
    2467              :    Optimize
    2468              :    memchr ("abcd", a, 4) == 0;
    2469              :    or
    2470              :    memchr ("abcd", a, 4) != 0;
    2471              :    to
    2472              :    (a == 'a' || a == 'b' || a == 'c' || a == 'd') == 0
    2473              :    or
    2474              :    (a == 'a' || a == 'b' || a == 'c' || a == 'd') != 0
    2475              : 
    2476              :    Also canonicalize __atomic_fetch_op (p, x, y) op x
    2477              :    to __atomic_op_fetch (p, x, y) or
    2478              :    __atomic_op_fetch (p, x, y) iop x
    2479              :    to __atomic_fetch_op (p, x, y) when possible (also __sync).  */
    2480              : 
    2481              : static bool
    2482      6128230 : simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2, bool full_walk)
    2483              : {
    2484      6128230 :   gimple *stmt2 = gsi_stmt (*gsi_p);
    2485      6128230 :   enum built_in_function other_atomic = END_BUILTINS;
    2486      6128230 :   enum tree_code atomic_op = ERROR_MARK;
    2487              : 
    2488      6128230 :   switch (DECL_FUNCTION_CODE (callee2))
    2489              :     {
    2490        10378 :     case BUILT_IN_STACK_RESTORE:
    2491        10378 :       return optimize_stack_restore (gsi_p, as_a<gcall*>(stmt2));
    2492       462827 :     case BUILT_IN_MEMCMP:
    2493       462827 :     case BUILT_IN_MEMCMP_EQ:
    2494       462827 :       return simplify_builtin_memcmp (gsi_p, as_a<gcall*>(stmt2));
    2495        15253 :     case BUILT_IN_MEMCHR:
    2496        15253 :       return simplify_builtin_memchr (gsi_p, as_a<gcall*>(stmt2));
    2497              : 
    2498       110847 :     case BUILT_IN_MEMSET:
    2499       110847 :       if (gimple_call_num_args (stmt2) == 3)
    2500              :         {
    2501              :           /* Try to prop the zeroing/value of the memset to memcpy
    2502              :              if the dest is an address and the value is a constant. */
    2503       110847 :           optimize_aggr_zeroprop (stmt2, full_walk);
    2504              :         }
    2505       110847 :       return simplify_builtin_memcpy_memset (gsi_p, as_a<gcall*>(stmt2));
    2506              : 
    2507              :  #define CASE_ATOMIC(NAME, OTHER, OP) \
    2508              :     case BUILT_IN_##NAME##_1:                                           \
    2509              :     case BUILT_IN_##NAME##_2:                                           \
    2510              :     case BUILT_IN_##NAME##_4:                                           \
    2511              :     case BUILT_IN_##NAME##_8:                                           \
    2512              :     case BUILT_IN_##NAME##_16:                                          \
    2513              :       atomic_op = OP;                                                   \
    2514              :       other_atomic                                                      \
    2515              :         = (enum built_in_function) (BUILT_IN_##OTHER##_1                \
    2516              :                                     + (DECL_FUNCTION_CODE (callee2)     \
    2517              :                                        - BUILT_IN_##NAME##_1));         \
    2518              :       goto handle_atomic_fetch_op;
    2519              : 
    2520        48981 :     CASE_ATOMIC (ATOMIC_FETCH_ADD, ATOMIC_ADD_FETCH, PLUS_EXPR)
    2521         7125 :     CASE_ATOMIC (ATOMIC_FETCH_SUB, ATOMIC_SUB_FETCH, MINUS_EXPR)
    2522         2876 :     CASE_ATOMIC (ATOMIC_FETCH_AND, ATOMIC_AND_FETCH, BIT_AND_EXPR)
    2523         2895 :     CASE_ATOMIC (ATOMIC_FETCH_XOR, ATOMIC_XOR_FETCH, BIT_XOR_EXPR)
    2524         3823 :     CASE_ATOMIC (ATOMIC_FETCH_OR, ATOMIC_OR_FETCH, BIT_IOR_EXPR)
    2525              : 
    2526         2363 :     CASE_ATOMIC (SYNC_FETCH_AND_ADD, SYNC_ADD_AND_FETCH, PLUS_EXPR)
    2527         2004 :     CASE_ATOMIC (SYNC_FETCH_AND_SUB, SYNC_SUB_AND_FETCH, MINUS_EXPR)
    2528         1876 :     CASE_ATOMIC (SYNC_FETCH_AND_AND, SYNC_AND_AND_FETCH, BIT_AND_EXPR)
    2529         2144 :     CASE_ATOMIC (SYNC_FETCH_AND_XOR, SYNC_XOR_AND_FETCH, BIT_XOR_EXPR)
    2530         1987 :     CASE_ATOMIC (SYNC_FETCH_AND_OR, SYNC_OR_AND_FETCH, BIT_IOR_EXPR)
    2531              : 
    2532        14324 :     CASE_ATOMIC (ATOMIC_ADD_FETCH, ATOMIC_FETCH_ADD, MINUS_EXPR)
    2533         8528 :     CASE_ATOMIC (ATOMIC_SUB_FETCH, ATOMIC_FETCH_SUB, PLUS_EXPR)
    2534         2380 :     CASE_ATOMIC (ATOMIC_XOR_FETCH, ATOMIC_FETCH_XOR, BIT_XOR_EXPR)
    2535              : 
    2536          821 :     CASE_ATOMIC (SYNC_ADD_AND_FETCH, SYNC_FETCH_AND_ADD, MINUS_EXPR)
    2537          732 :     CASE_ATOMIC (SYNC_SUB_AND_FETCH, SYNC_FETCH_AND_SUB, PLUS_EXPR)
    2538          800 :     CASE_ATOMIC (SYNC_XOR_AND_FETCH, SYNC_FETCH_AND_XOR, BIT_XOR_EXPR)
    2539              : 
    2540              : #undef CASE_ATOMIC
    2541              : 
    2542       103659 :     handle_atomic_fetch_op:
    2543       103659 :       if (gimple_call_num_args (stmt2) >= 2 && gimple_call_lhs (stmt2))
    2544              :         {
    2545        60238 :           tree lhs2 = gimple_call_lhs (stmt2), lhsc = lhs2;
    2546        60238 :           tree arg = gimple_call_arg (stmt2, 1);
    2547        60238 :           gimple *use_stmt, *cast_stmt = NULL;
    2548        60238 :           use_operand_p use_p;
    2549        60238 :           tree ndecl = builtin_decl_explicit (other_atomic);
    2550              : 
    2551        60238 :           if (ndecl == NULL_TREE || !single_imm_use (lhs2, &use_p, &use_stmt))
    2552              :             break;
    2553              : 
    2554        59109 :           if (gimple_assign_cast_p (use_stmt))
    2555              :             {
    2556        31652 :               cast_stmt = use_stmt;
    2557        31652 :               lhsc = gimple_assign_lhs (cast_stmt);
    2558        31652 :               if (lhsc == NULL_TREE
    2559        31652 :                   || !INTEGRAL_TYPE_P (TREE_TYPE (lhsc))
    2560        31101 :                   || (TYPE_PRECISION (TREE_TYPE (lhsc))
    2561        31101 :                       != TYPE_PRECISION (TREE_TYPE (lhs2)))
    2562        61227 :                   || !single_imm_use (lhsc, &use_p, &use_stmt))
    2563              :                 {
    2564         2605 :                   use_stmt = cast_stmt;
    2565         2605 :                   cast_stmt = NULL;
    2566         2605 :                   lhsc = lhs2;
    2567              :                 }
    2568              :             }
    2569              : 
    2570        59109 :           bool ok = false;
    2571        59109 :           tree oarg = NULL_TREE;
    2572        59109 :           enum tree_code ccode = ERROR_MARK;
    2573        59109 :           tree crhs1 = NULL_TREE, crhs2 = NULL_TREE;
    2574        59109 :           if (is_gimple_assign (use_stmt)
    2575        59109 :               && gimple_assign_rhs_code (use_stmt) == atomic_op)
    2576              :             {
    2577         1416 :               if (gimple_assign_rhs1 (use_stmt) == lhsc)
    2578         1016 :                 oarg = gimple_assign_rhs2 (use_stmt);
    2579          400 :               else if (atomic_op != MINUS_EXPR)
    2580              :                 oarg = gimple_assign_rhs1 (use_stmt);
    2581              :             }
    2582        57693 :           else if (atomic_op == MINUS_EXPR
    2583        13160 :                    && is_gimple_assign (use_stmt)
    2584         3609 :                    && gimple_assign_rhs_code (use_stmt) == PLUS_EXPR
    2585          199 :                    && TREE_CODE (arg) == INTEGER_CST
    2586        57892 :                    && (TREE_CODE (gimple_assign_rhs2 (use_stmt))
    2587              :                        == INTEGER_CST))
    2588              :             {
    2589          183 :               tree a = fold_convert (TREE_TYPE (lhs2), arg);
    2590          183 :               tree o = fold_convert (TREE_TYPE (lhs2),
    2591              :                                      gimple_assign_rhs2 (use_stmt));
    2592          183 :               if (wi::to_wide (a) == wi::neg (wi::to_wide (o)))
    2593              :                 ok = true;
    2594              :             }
    2595        57510 :           else if (atomic_op == BIT_AND_EXPR || atomic_op == BIT_IOR_EXPR)
    2596              :             ;
    2597        52268 :           else if (gimple_code (use_stmt) == GIMPLE_COND)
    2598              :             {
    2599        19429 :               ccode = gimple_cond_code (use_stmt);
    2600        19429 :               crhs1 = gimple_cond_lhs (use_stmt);
    2601        19429 :               crhs2 = gimple_cond_rhs (use_stmt);
    2602              :             }
    2603        32839 :           else if (is_gimple_assign (use_stmt))
    2604              :             {
    2605         9522 :               if (gimple_assign_rhs_class (use_stmt) == GIMPLE_BINARY_RHS)
    2606              :                 {
    2607         3941 :                   ccode = gimple_assign_rhs_code (use_stmt);
    2608         3941 :                   crhs1 = gimple_assign_rhs1 (use_stmt);
    2609         3941 :                   crhs2 = gimple_assign_rhs2 (use_stmt);
    2610              :                 }
    2611         5581 :               else if (gimple_assign_rhs_code (use_stmt) == COND_EXPR)
    2612              :                 {
    2613            0 :                   tree cond = gimple_assign_rhs1 (use_stmt);
    2614            0 :                   if (COMPARISON_CLASS_P (cond))
    2615              :                     {
    2616            0 :                       ccode = TREE_CODE (cond);
    2617            0 :                       crhs1 = TREE_OPERAND (cond, 0);
    2618            0 :                       crhs2 = TREE_OPERAND (cond, 1);
    2619              :                     }
    2620              :                 }
    2621              :             }
    2622        24386 :           if (ccode == EQ_EXPR || ccode == NE_EXPR)
    2623              :             {
    2624              :               /* Deal with x - y == 0 or x ^ y == 0
    2625              :                  being optimized into x == y and x + cst == 0
    2626              :                  into x == -cst.  */
    2627        22186 :               tree o = NULL_TREE;
    2628        22186 :               if (crhs1 == lhsc)
    2629              :                 o = crhs2;
    2630          133 :               else if (crhs2 == lhsc)
    2631          133 :                 o = crhs1;
    2632        22186 :               if (o && atomic_op != PLUS_EXPR)
    2633              :                 oarg = o;
    2634        10054 :               else if (o
    2635        10054 :                        && TREE_CODE (o) == INTEGER_CST
    2636        10054 :                        && TREE_CODE (arg) == INTEGER_CST)
    2637              :                 {
    2638         9344 :                   tree a = fold_convert (TREE_TYPE (lhs2), arg);
    2639         9344 :                   o = fold_convert (TREE_TYPE (lhs2), o);
    2640         9344 :                   if (wi::to_wide (a) == wi::neg (wi::to_wide (o)))
    2641        59109 :                     ok = true;
    2642              :                 }
    2643              :             }
    2644        59109 :           if (oarg && !ok)
    2645              :             {
    2646        13548 :               if (operand_equal_p (arg, oarg, 0))
    2647              :                 ok = true;
    2648        12220 :               else if (TREE_CODE (arg) == SSA_NAME
    2649         2179 :                        && TREE_CODE (oarg) == SSA_NAME)
    2650              :                 {
    2651          745 :                   tree oarg2 = oarg;
    2652          745 :                   if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (oarg)))
    2653              :                     {
    2654          104 :                       gimple *g = SSA_NAME_DEF_STMT (oarg);
    2655          104 :                       oarg2 = gimple_assign_rhs1 (g);
    2656          104 :                       if (TREE_CODE (oarg2) != SSA_NAME
    2657          104 :                           || !INTEGRAL_TYPE_P (TREE_TYPE (oarg2))
    2658          208 :                           || (TYPE_PRECISION (TREE_TYPE (oarg2))
    2659          104 :                               != TYPE_PRECISION (TREE_TYPE (oarg))))
    2660              :                         oarg2 = oarg;
    2661              :                     }
    2662          745 :                   if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg)))
    2663              :                     {
    2664          544 :                       gimple *g = SSA_NAME_DEF_STMT (arg);
    2665          544 :                       tree rhs1 = gimple_assign_rhs1 (g);
    2666              :                       /* Handle e.g.
    2667              :                          x.0_1 = (long unsigned int) x_4(D);
    2668              :                          _2 = __atomic_fetch_add_8 (&vlong, x.0_1, 0);
    2669              :                          _3 = (long int) _2;
    2670              :                          _7 = x_4(D) + _3;  */
    2671          544 :                       if (rhs1 == oarg || rhs1 == oarg2)
    2672              :                         ok = true;
    2673              :                       /* Handle e.g.
    2674              :                          x.18_1 = (short unsigned int) x_5(D);
    2675              :                          _2 = (int) x.18_1;
    2676              :                          _3 = __atomic_fetch_xor_2 (&vshort, _2, 0);
    2677              :                          _4 = (short int) _3;
    2678              :                          _8 = x_5(D) ^ _4;
    2679              :                          This happens only for char/short.  */
    2680          160 :                       else if (TREE_CODE (rhs1) == SSA_NAME
    2681          160 :                                && INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
    2682          320 :                                && (TYPE_PRECISION (TREE_TYPE (rhs1))
    2683          160 :                                    == TYPE_PRECISION (TREE_TYPE (lhs2))))
    2684              :                         {
    2685          160 :                           g = SSA_NAME_DEF_STMT (rhs1);
    2686          160 :                           if (gimple_assign_cast_p (g)
    2687          160 :                               && (gimple_assign_rhs1 (g) == oarg
    2688            0 :                                   || gimple_assign_rhs1 (g) == oarg2))
    2689              :                             ok = true;
    2690              :                         }
    2691              :                     }
    2692          745 :                   if (!ok && arg == oarg2)
    2693              :                     /* Handle e.g.
    2694              :                        _1 = __sync_fetch_and_add_4 (&v, x_5(D));
    2695              :                        _2 = (int) _1;
    2696              :                        x.0_3 = (int) x_5(D);
    2697              :                        _7 = _2 + x.0_3;  */
    2698              :                     ok = true;
    2699              :                 }
    2700              :             }
    2701              : 
    2702        57781 :           if (ok)
    2703              :             {
    2704         2544 :               tree new_lhs = make_ssa_name (TREE_TYPE (lhs2));
    2705         2544 :               gimple_call_set_lhs (stmt2, new_lhs);
    2706         2544 :               gimple_call_set_fndecl (stmt2, ndecl);
    2707         2544 :               gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
    2708         2544 :               if (ccode == ERROR_MARK)
    2709         2000 :                 gimple_assign_set_rhs_with_ops (&gsi, cast_stmt
    2710              :                                                 ? NOP_EXPR : SSA_NAME,
    2711              :                                                 new_lhs);
    2712              :               else
    2713              :                 {
    2714         1321 :                   crhs1 = new_lhs;
    2715         1321 :                   crhs2 = build_zero_cst (TREE_TYPE (lhs2));
    2716         1321 :                   if (gimple_code (use_stmt) == GIMPLE_COND)
    2717              :                     {
    2718          982 :                       gcond *cond_stmt = as_a <gcond *> (use_stmt);
    2719          982 :                       gimple_cond_set_lhs (cond_stmt, crhs1);
    2720          982 :                       gimple_cond_set_rhs (cond_stmt, crhs2);
    2721              :                     }
    2722          339 :                   else if (gimple_assign_rhs_class (use_stmt)
    2723              :                            == GIMPLE_BINARY_RHS)
    2724              :                     {
    2725          339 :                       gimple_assign_set_rhs1 (use_stmt, crhs1);
    2726          339 :                       gimple_assign_set_rhs2 (use_stmt, crhs2);
    2727              :                     }
    2728              :                   else
    2729              :                     {
    2730            0 :                       gcc_checking_assert (gimple_assign_rhs_code (use_stmt)
    2731              :                                            == COND_EXPR);
    2732            0 :                       tree cond = build2 (ccode, boolean_type_node,
    2733              :                                           crhs1, crhs2);
    2734            0 :                       gimple_assign_set_rhs1 (use_stmt, cond);
    2735              :                     }
    2736              :                 }
    2737         2544 :               update_stmt (use_stmt);
    2738         2544 :               if (atomic_op != BIT_AND_EXPR
    2739         2544 :                   && atomic_op != BIT_IOR_EXPR
    2740         2544 :                   && !stmt_ends_bb_p (stmt2))
    2741              :                 {
    2742              :                   /* For the benefit of debug stmts, emit stmt(s) to set
    2743              :                      lhs2 to the value it had from the new builtin.
    2744              :                      E.g. if it was previously:
    2745              :                      lhs2 = __atomic_fetch_add_8 (ptr, arg, 0);
    2746              :                      emit:
    2747              :                      new_lhs = __atomic_add_fetch_8 (ptr, arg, 0);
    2748              :                      lhs2 = new_lhs - arg;
    2749              :                      We also keep cast_stmt if any in the IL for
    2750              :                      the same reasons.
    2751              :                      These stmts will be DCEd later and proper debug info
    2752              :                      will be emitted.
    2753              :                      This is only possible for reversible operations
    2754              :                      (+/-/^) and without -fnon-call-exceptions.  */
    2755         2203 :                   gsi = gsi_for_stmt (stmt2);
    2756         2203 :                   tree type = TREE_TYPE (lhs2);
    2757         2203 :                   if (TREE_CODE (arg) == INTEGER_CST)
    2758         1621 :                     arg = fold_convert (type, arg);
    2759          582 :                   else if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
    2760              :                     {
    2761            0 :                       tree narg = make_ssa_name (type);
    2762            0 :                       gimple *g = gimple_build_assign (narg, NOP_EXPR, arg);
    2763            0 :                       gsi_insert_after (&gsi, g, GSI_NEW_STMT);
    2764            0 :                       arg = narg;
    2765              :                     }
    2766         2203 :                   enum tree_code rcode;
    2767         2203 :                   switch (atomic_op)
    2768              :                     {
    2769              :                     case PLUS_EXPR: rcode = MINUS_EXPR; break;
    2770          726 :                     case MINUS_EXPR: rcode = PLUS_EXPR; break;
    2771          492 :                     case BIT_XOR_EXPR: rcode = atomic_op; break;
    2772            0 :                     default: gcc_unreachable ();
    2773              :                     }
    2774         2203 :                   gimple *g = gimple_build_assign (lhs2, rcode, new_lhs, arg);
    2775         2203 :                   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
    2776         2203 :                   update_stmt (stmt2);
    2777              :                 }
    2778              :               else
    2779              :                 {
    2780              :                   /* For e.g.
    2781              :                      lhs2 = __atomic_fetch_or_8 (ptr, arg, 0);
    2782              :                      after we change it to
    2783              :                      new_lhs = __atomic_or_fetch_8 (ptr, arg, 0);
    2784              :                      there is no way to find out the lhs2 value (i.e.
    2785              :                      what the atomic memory contained before the operation),
    2786              :                      values of some bits are lost.  We have checked earlier
    2787              :                      that we don't have any non-debug users except for what
    2788              :                      we are already changing, so we need to reset the
    2789              :                      debug stmts and remove the cast_stmt if any.  */
    2790          341 :                   imm_use_iterator iter;
    2791          676 :                   FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs2)
    2792          335 :                     if (use_stmt != cast_stmt)
    2793              :                       {
    2794          168 :                         gcc_assert (is_gimple_debug (use_stmt));
    2795          168 :                         gimple_debug_bind_reset_value (use_stmt);
    2796          168 :                         update_stmt (use_stmt);
    2797          341 :                       }
    2798          341 :                   if (cast_stmt)
    2799              :                     {
    2800          167 :                       gsi = gsi_for_stmt (cast_stmt);
    2801          167 :                       gsi_remove (&gsi, true);
    2802              :                     }
    2803          341 :                   update_stmt (stmt2);
    2804          341 :                   release_ssa_name (lhs2);
    2805              :                 }
    2806              :             }
    2807              :         }
    2808              :       break;
    2809              : 
    2810              :     default:
    2811              :       break;
    2812              :     }
    2813              :   return false;
    2814              : }
    2815              : 
    2816              : /* Given a ssa_name in NAME see if it was defined by an assignment and
    2817              :    set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
    2818              :    to the second operand on the rhs. */
    2819              : 
    2820              : static inline void
    2821     17234633 : defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
    2822              : {
    2823     17234633 :   gimple *def;
    2824     17234633 :   enum tree_code code1;
    2825     17234633 :   tree arg11;
    2826     17234633 :   tree arg21;
    2827     17234633 :   tree arg31;
    2828     17234633 :   enum gimple_rhs_class grhs_class;
    2829              : 
    2830     17234633 :   code1 = TREE_CODE (name);
    2831     17234633 :   arg11 = name;
    2832     17234633 :   arg21 = NULL_TREE;
    2833     17234633 :   arg31 = NULL_TREE;
    2834     17234633 :   grhs_class = get_gimple_rhs_class (code1);
    2835              : 
    2836     17234633 :   if (code1 == SSA_NAME)
    2837              :     {
    2838     11484783 :       def = SSA_NAME_DEF_STMT (name);
    2839              : 
    2840     11484783 :       if (def && is_gimple_assign (def)
    2841     18583310 :           && can_propagate_from (def))
    2842              :         {
    2843      4909519 :           code1 = gimple_assign_rhs_code (def);
    2844      4909519 :           arg11 = gimple_assign_rhs1 (def);
    2845      4909519 :           arg21 = gimple_assign_rhs2 (def);
    2846      4909519 :           arg31 = gimple_assign_rhs3 (def);
    2847              :         }
    2848              :     }
    2849      5749850 :   else if (grhs_class != GIMPLE_SINGLE_RHS)
    2850            0 :     code1 = ERROR_MARK;
    2851              : 
    2852     17234633 :   *code = code1;
    2853     17234633 :   *arg1 = arg11;
    2854     17234633 :   if (arg2)
    2855     17217397 :     *arg2 = arg21;
    2856     17234633 :   if (arg31)
    2857         2206 :     *code = ERROR_MARK;
    2858     17234633 : }
    2859              : 
    2860              : 
    2861              : /* Recognize rotation patterns.  Return true if a transformation
    2862              :    applied, otherwise return false.
    2863              : 
    2864              :    We are looking for X with unsigned type T with bitsize B, OP being
    2865              :    +, | or ^, some type T2 wider than T.  For:
    2866              :    (X << CNT1) OP (X >> CNT2)                               iff CNT1 + CNT2 == B
    2867              :    ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
    2868              : 
    2869              :    transform these into:
    2870              :    X r<< CNT1
    2871              : 
    2872              :    Or for:
    2873              :    (X << Y) OP (X >> (B - Y))
    2874              :    (X << (int) Y) OP (X >> (int) (B - Y))
    2875              :    ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
    2876              :    ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
    2877              :    (X << Y) | (X >> ((-Y) & (B - 1)))
    2878              :    (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
    2879              :    ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
    2880              :    ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
    2881              : 
    2882              :    transform these into (last 2 only if ranger can prove Y < B
    2883              :    or Y = N * B):
    2884              :    X r<< Y
    2885              :    or
    2886              :    X r<< (& & (B - 1))
    2887              :    The latter for the forms with T2 wider than T if ranger can't prove Y < B.
    2888              : 
    2889              :    Or for:
    2890              :    (X << (Y & (B - 1))) | (X >> ((-Y) & (B - 1)))
    2891              :    (X << (int) (Y & (B - 1))) | (X >> (int) ((-Y) & (B - 1)))
    2892              :    ((T) ((T2) X << (Y & (B - 1)))) | ((T) ((T2) X >> ((-Y) & (B - 1))))
    2893              :    ((T) ((T2) X << (int) (Y & (B - 1)))) \
    2894              :      | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
    2895              : 
    2896              :    transform these into:
    2897              :    X r<< (Y & (B - 1))
    2898              : 
    2899              :    Note, in the patterns with T2 type, the type of OP operands
    2900              :    might be even a signed type, but should have precision B.
    2901              :    Expressions with & (B - 1) should be recognized only if B is
    2902              :    a power of 2.  */
    2903              : 
    2904              : static bool
    2905     10121909 : simplify_rotate (gimple_stmt_iterator *gsi)
    2906              : {
    2907     10121909 :   gimple *stmt = gsi_stmt (*gsi);
    2908     10121909 :   tree arg[2], rtype, rotcnt = NULL_TREE;
    2909     10121909 :   tree def_arg1[2], def_arg2[2];
    2910     10121909 :   enum tree_code def_code[2];
    2911     10121909 :   tree lhs;
    2912     10121909 :   int i;
    2913     10121909 :   bool swapped_p = false;
    2914     10121909 :   gimple *g;
    2915     10121909 :   gimple *def_arg_stmt[2] = { NULL, NULL };
    2916     10121909 :   int wider_prec = 0;
    2917     10121909 :   bool add_masking = false;
    2918              : 
    2919     10121909 :   arg[0] = gimple_assign_rhs1 (stmt);
    2920     10121909 :   arg[1] = gimple_assign_rhs2 (stmt);
    2921     10121909 :   rtype = TREE_TYPE (arg[0]);
    2922              : 
    2923              :   /* Only create rotates in complete modes.  Other cases are not
    2924              :      expanded properly.  */
    2925     10121909 :   if (!INTEGRAL_TYPE_P (rtype)
    2926     10121909 :       || !type_has_mode_precision_p (rtype))
    2927      1554305 :     return false;
    2928              : 
    2929     25702812 :   for (i = 0; i < 2; i++)
    2930              :     {
    2931     17135208 :       defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
    2932     17135208 :       if (TREE_CODE (arg[i]) == SSA_NAME)
    2933     11385358 :         def_arg_stmt[i] = SSA_NAME_DEF_STMT (arg[i]);
    2934              :     }
    2935              : 
    2936              :   /* Look through narrowing (or same precision) conversions.  */
    2937      7606332 :   if (CONVERT_EXPR_CODE_P (def_code[0])
    2938       961272 :       && CONVERT_EXPR_CODE_P (def_code[1])
    2939       139870 :       && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
    2940       116861 :       && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
    2941       109262 :       && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
    2942       109262 :          == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
    2943        63577 :       && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) >= TYPE_PRECISION (rtype)
    2944        44040 :       && has_single_use (arg[0])
    2945      8600406 :       && has_single_use (arg[1]))
    2946              :     {
    2947        28479 :       wider_prec = TYPE_PRECISION (TREE_TYPE (def_arg1[0]));
    2948        85437 :       for (i = 0; i < 2; i++)
    2949              :         {
    2950        56958 :           arg[i] = def_arg1[i];
    2951        56958 :           defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
    2952        56958 :           if (TREE_CODE (arg[i]) == SSA_NAME)
    2953        56958 :             def_arg_stmt[i] = SSA_NAME_DEF_STMT (arg[i]);
    2954              :         }
    2955              :     }
    2956              :   else
    2957              :     {
    2958              :       /* Handle signed rotate; the RSHIFT_EXPR has to be done
    2959              :          in unsigned type but LSHIFT_EXPR could be signed.  */
    2960      8539125 :       i = (def_code[0] == LSHIFT_EXPR || def_code[0] == RSHIFT_EXPR);
    2961      7589468 :       if (CONVERT_EXPR_CODE_P (def_code[i])
    2962       949657 :           && (def_code[1 - i] == LSHIFT_EXPR || def_code[1 - i] == RSHIFT_EXPR)
    2963        28168 :           && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[i]))
    2964        27084 :           && TYPE_PRECISION (rtype) == TYPE_PRECISION (TREE_TYPE (def_arg1[i]))
    2965      8542465 :           && has_single_use (arg[i]))
    2966              :         {
    2967         1995 :           arg[i] = def_arg1[i];
    2968         1995 :           defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
    2969         1995 :           if (TREE_CODE (arg[i]) == SSA_NAME)
    2970         1995 :             def_arg_stmt[i] = SSA_NAME_DEF_STMT (arg[i]);
    2971              :         }
    2972              :     }
    2973              : 
    2974              :   /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR.  */
    2975      8765389 :   for (i = 0; i < 2; i++)
    2976      8740851 :     if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
    2977              :       return false;
    2978       239667 :     else if (!has_single_use (arg[i]))
    2979              :       return false;
    2980        24538 :   if (def_code[0] == def_code[1])
    2981              :     return false;
    2982              : 
    2983              :   /* If we've looked through narrowing conversions before, look through
    2984              :      widening conversions from unsigned type with the same precision
    2985              :      as rtype here.  */
    2986        20241 :   if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
    2987        19348 :     for (i = 0; i < 2; i++)
    2988              :       {
    2989        12900 :         tree tem;
    2990        12900 :         enum tree_code code;
    2991        12900 :         defcodefor_name (def_arg1[i], &code, &tem, NULL);
    2992            4 :         if (!CONVERT_EXPR_CODE_P (code)
    2993        12896 :             || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
    2994        25796 :             || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
    2995            4 :           return false;
    2996        12896 :         def_arg1[i] = tem;
    2997              :       }
    2998              :   /* Both shifts have to use the same first operand.  */
    2999        20237 :   if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
    3000        32166 :       || !types_compatible_p (TREE_TYPE (def_arg1[0]),
    3001        11929 :                               TREE_TYPE (def_arg1[1])))
    3002              :     {
    3003         8308 :       if ((TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
    3004         8308 :            != TYPE_PRECISION (TREE_TYPE (def_arg1[1])))
    3005         8308 :           || (TYPE_UNSIGNED (TREE_TYPE (def_arg1[0]))
    3006         8308 :               == TYPE_UNSIGNED (TREE_TYPE (def_arg1[1]))))
    3007         8284 :         return false;
    3008              : 
    3009              :       /* Handle signed rotate; the RSHIFT_EXPR has to be done
    3010              :          in unsigned type but LSHIFT_EXPR could be signed.  */
    3011          540 :       i = def_code[0] != RSHIFT_EXPR;
    3012          540 :       if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[i])))
    3013              :         return false;
    3014              : 
    3015          507 :       tree tem;
    3016          507 :       enum tree_code code;
    3017          507 :       defcodefor_name (def_arg1[i], &code, &tem, NULL);
    3018          304 :       if (!CONVERT_EXPR_CODE_P (code)
    3019          203 :           || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
    3020          710 :           || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
    3021              :         return false;
    3022          194 :       def_arg1[i] = tem;
    3023          194 :       if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
    3024          218 :           || !types_compatible_p (TREE_TYPE (def_arg1[0]),
    3025           24 :                                   TREE_TYPE (def_arg1[1])))
    3026          170 :         return false;
    3027              :     }
    3028        11929 :   else if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
    3029              :     return false;
    3030              : 
    3031              :   /* CNT1 + CNT2 == B case above.  */
    3032        10698 :   if (tree_fits_uhwi_p (def_arg2[0])
    3033         1210 :       && tree_fits_uhwi_p (def_arg2[1])
    3034        10698 :       && tree_to_uhwi (def_arg2[0])
    3035         1210 :          + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
    3036              :     rotcnt = def_arg2[0];
    3037         9768 :   else if (TREE_CODE (def_arg2[0]) != SSA_NAME
    3038         9488 :            || TREE_CODE (def_arg2[1]) != SSA_NAME)
    3039              :     return false;
    3040              :   else
    3041              :     {
    3042         9488 :       tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
    3043         9488 :       enum tree_code cdef_code[2];
    3044         9488 :       gimple *def_arg_alt_stmt[2] = { NULL, NULL };
    3045         9488 :       int check_range = 0;
    3046         9488 :       gimple *check_range_stmt = NULL;
    3047              :       /* Look through conversion of the shift count argument.
    3048              :          The C/C++ FE cast any shift count argument to integer_type_node.
    3049              :          The only problem might be if the shift count type maximum value
    3050              :          is equal or smaller than number of bits in rtype.  */
    3051        28464 :       for (i = 0; i < 2; i++)
    3052              :         {
    3053        18976 :           def_arg2_alt[i] = def_arg2[i];
    3054        18976 :           defcodefor_name (def_arg2[i], &cdef_code[i],
    3055              :                            &cdef_arg1[i], &cdef_arg2[i]);
    3056        14716 :           if (CONVERT_EXPR_CODE_P (cdef_code[i])
    3057         4260 :               && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
    3058         4260 :               && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
    3059         8520 :                  > floor_log2 (TYPE_PRECISION (rtype))
    3060        23236 :               && type_has_mode_precision_p (TREE_TYPE (cdef_arg1[i])))
    3061              :             {
    3062         4260 :               def_arg2_alt[i] = cdef_arg1[i];
    3063         4260 :               if (TREE_CODE (def_arg2[i]) == SSA_NAME)
    3064         4260 :                 def_arg_alt_stmt[i] = SSA_NAME_DEF_STMT (def_arg2[i]);
    3065         4260 :               defcodefor_name (def_arg2_alt[i], &cdef_code[i],
    3066              :                                &cdef_arg1[i], &cdef_arg2[i]);
    3067              :             }
    3068              :           else
    3069        14716 :             def_arg_alt_stmt[i] = def_arg_stmt[i];
    3070              :         }
    3071        25796 :       for (i = 0; i < 2; i++)
    3072              :         /* Check for one shift count being Y and the other B - Y,
    3073              :            with optional casts.  */
    3074        18625 :         if (cdef_code[i] == MINUS_EXPR
    3075          862 :             && tree_fits_shwi_p (cdef_arg1[i])
    3076          862 :             && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
    3077        19447 :             && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
    3078              :           {
    3079          822 :             tree tem;
    3080          822 :             enum tree_code code;
    3081              : 
    3082          822 :             if (cdef_arg2[i] == def_arg2[1 - i]
    3083          472 :                 || cdef_arg2[i] == def_arg2_alt[1 - i])
    3084              :               {
    3085          350 :                 rotcnt = cdef_arg2[i];
    3086          350 :                 check_range = -1;
    3087          350 :                 if (cdef_arg2[i] == def_arg2[1 - i])
    3088          350 :                   check_range_stmt = def_arg_stmt[1 - i];
    3089              :                 else
    3090            0 :                   check_range_stmt = def_arg_alt_stmt[1 - i];
    3091          806 :                 break;
    3092              :               }
    3093          472 :             defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
    3094           16 :             if (CONVERT_EXPR_CODE_P (code)
    3095          456 :                 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
    3096          456 :                 && TYPE_PRECISION (TREE_TYPE (tem))
    3097          912 :                    > floor_log2 (TYPE_PRECISION (rtype))
    3098          456 :                 && type_has_mode_precision_p (TREE_TYPE (tem))
    3099          928 :                 && (tem == def_arg2[1 - i]
    3100          288 :                     || tem == def_arg2_alt[1 - i]))
    3101              :               {
    3102          456 :                 rotcnt = tem;
    3103          456 :                 check_range = -1;
    3104          456 :                 if (tem == def_arg2[1 - i])
    3105          168 :                   check_range_stmt = def_arg_stmt[1 - i];
    3106              :                 else
    3107          288 :                   check_range_stmt = def_arg_alt_stmt[1 - i];
    3108              :                 break;
    3109              :               }
    3110              :           }
    3111              :         /* The above sequence isn't safe for Y being 0,
    3112              :            because then one of the shifts triggers undefined behavior.
    3113              :            This alternative is safe even for rotation count of 0.
    3114              :            One shift count is Y and the other (-Y) & (B - 1).
    3115              :            Or one shift count is Y & (B - 1) and the other (-Y) & (B - 1).  */
    3116        17803 :         else if (cdef_code[i] == BIT_AND_EXPR
    3117        28724 :                  && pow2p_hwi (TYPE_PRECISION (rtype))
    3118        12416 :                  && tree_fits_shwi_p (cdef_arg2[i])
    3119        24832 :                  && tree_to_shwi (cdef_arg2[i])
    3120        12416 :                     == TYPE_PRECISION (rtype) - 1
    3121        12356 :                  && TREE_CODE (cdef_arg1[i]) == SSA_NAME
    3122        30159 :                  && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
    3123              :           {
    3124         2296 :             tree tem;
    3125         2296 :             enum tree_code code;
    3126              : 
    3127         2296 :             defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
    3128         2099 :             if (CONVERT_EXPR_CODE_P (code)
    3129          197 :                 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
    3130          197 :                 && TYPE_PRECISION (TREE_TYPE (tem))
    3131          394 :                    > floor_log2 (TYPE_PRECISION (rtype))
    3132         2493 :                 && type_has_mode_precision_p (TREE_TYPE (tem)))
    3133          197 :               defcodefor_name (tem, &code, &tem, NULL);
    3134              : 
    3135         2296 :             if (code == NEGATE_EXPR)
    3136              :               {
    3137         1525 :                 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
    3138              :                   {
    3139          854 :                     rotcnt = tem;
    3140          854 :                     check_range = 1;
    3141          854 :                     if (tem == def_arg2[1 - i])
    3142          846 :                       check_range_stmt = def_arg_stmt[1 - i];
    3143              :                     else
    3144            8 :                       check_range_stmt = def_arg_alt_stmt[1 - i];
    3145         1511 :                     break;
    3146              :                   }
    3147          671 :                 tree tem2;
    3148          671 :                 defcodefor_name (tem, &code, &tem2, NULL);
    3149          237 :                 if (CONVERT_EXPR_CODE_P (code)
    3150          434 :                     && INTEGRAL_TYPE_P (TREE_TYPE (tem2))
    3151          434 :                     && TYPE_PRECISION (TREE_TYPE (tem2))
    3152          868 :                        > floor_log2 (TYPE_PRECISION (rtype))
    3153         1105 :                     && type_has_mode_precision_p (TREE_TYPE (tem2)))
    3154              :                   {
    3155          434 :                     if (tem2 == def_arg2[1 - i]
    3156          434 :                         || tem2 == def_arg2_alt[1 - i])
    3157              :                       {
    3158          228 :                         rotcnt = tem2;
    3159          228 :                         check_range = 1;
    3160          228 :                         if (tem2 == def_arg2[1 - i])
    3161            0 :                           check_range_stmt = def_arg_stmt[1 - i];
    3162              :                         else
    3163          228 :                           check_range_stmt = def_arg_alt_stmt[1 - i];
    3164              :                         break;
    3165              :                       }
    3166              :                   }
    3167              :                 else
    3168          237 :                   tem2 = NULL_TREE;
    3169              : 
    3170          443 :                 if (cdef_code[1 - i] == BIT_AND_EXPR
    3171          430 :                     && tree_fits_shwi_p (cdef_arg2[1 - i])
    3172          860 :                     && tree_to_shwi (cdef_arg2[1 - i])
    3173          430 :                        == TYPE_PRECISION (rtype) - 1
    3174          873 :                     && TREE_CODE (cdef_arg1[1 - i]) == SSA_NAME)
    3175              :                   {
    3176          430 :                     if (tem == cdef_arg1[1 - i]
    3177          205 :                         || tem2 == cdef_arg1[1 - i])
    3178              :                       {
    3179              :                         rotcnt = def_arg2[1 - i];
    3180          429 :                         break;
    3181              :                       }
    3182          193 :                     tree tem3;
    3183          193 :                     defcodefor_name (cdef_arg1[1 - i], &code, &tem3, NULL);
    3184            0 :                     if (CONVERT_EXPR_CODE_P (code)
    3185          193 :                         && INTEGRAL_TYPE_P (TREE_TYPE (tem3))
    3186          193 :                         && TYPE_PRECISION (TREE_TYPE (tem3))
    3187          386 :                            > floor_log2 (TYPE_PRECISION (rtype))
    3188          386 :                         && type_has_mode_precision_p (TREE_TYPE (tem3)))
    3189              :                       {
    3190          193 :                         if (tem == tem3 || tem2 == tem3)
    3191              :                           {
    3192              :                             rotcnt = def_arg2[1 - i];
    3193              :                             break;
    3194              :                           }
    3195              :                       }
    3196              :                   }
    3197              :               }
    3198              :           }
    3199         2317 :       if (check_range && wider_prec > TYPE_PRECISION (rtype))
    3200              :         {
    3201         1533 :           if (TREE_CODE (rotcnt) != SSA_NAME)
    3202          573 :             return false;
    3203         1533 :           int_range_max r;
    3204         1533 :           range_query *q = get_range_query (cfun);
    3205         1533 :           if (q == get_global_range_query ())
    3206         1522 :             q = enable_ranger (cfun);
    3207         1533 :           if (!q->range_of_expr (r, rotcnt, check_range_stmt))
    3208              :             {
    3209            0 :               if (check_range > 0)
    3210              :                 return false;
    3211            0 :               r.set_varying (TREE_TYPE (rotcnt));
    3212              :             }
    3213         1533 :           int prec = TYPE_PRECISION (TREE_TYPE (rotcnt));
    3214         1533 :           signop sign = TYPE_SIGN (TREE_TYPE (rotcnt));
    3215         1533 :           wide_int min = wide_int::from (TYPE_PRECISION (rtype), prec, sign);
    3216         1533 :           wide_int max = wide_int::from (wider_prec - 1, prec, sign);
    3217         1533 :           if (check_range < 0)
    3218          616 :             max = min;
    3219         1533 :           int_range<1> r2 (TREE_TYPE (rotcnt), min, max);
    3220         1533 :           r.intersect (r2);
    3221         1533 :           if (!r.undefined_p ())
    3222              :             {
    3223         1181 :               if (check_range > 0)
    3224              :                 {
    3225          589 :                   int_range_max r3;
    3226         1844 :                   for (int i = TYPE_PRECISION (rtype) + 1; i < wider_prec;
    3227         1255 :                        i += TYPE_PRECISION (rtype))
    3228              :                     {
    3229         1255 :                       int j = i + TYPE_PRECISION (rtype) - 2;
    3230         1255 :                       min = wide_int::from (i, prec, sign);
    3231         1255 :                       max = wide_int::from (MIN (j, wider_prec - 1),
    3232         1255 :                                             prec, sign);
    3233         1255 :                       int_range<1> r4 (TREE_TYPE (rotcnt), min, max);
    3234         1255 :                       r3.union_ (r4);
    3235         1255 :                     }
    3236          589 :                   r.intersect (r3);
    3237          589 :                   if (!r.undefined_p ())
    3238          573 :                     return false;
    3239          589 :                 }
    3240              :               add_masking = true;
    3241              :             }
    3242         1533 :         }
    3243         8915 :       if (rotcnt == NULL_TREE)
    3244              :         return false;
    3245         1744 :       swapped_p = i != 1;
    3246              :     }
    3247              : 
    3248         2674 :   if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
    3249         2674 :                                   TREE_TYPE (rotcnt)))
    3250              :     {
    3251          496 :       g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
    3252              :                                NOP_EXPR, rotcnt);
    3253          496 :       gsi_insert_before (gsi, g, GSI_SAME_STMT);
    3254          496 :       rotcnt = gimple_assign_lhs (g);
    3255              :     }
    3256         2674 :   if (add_masking)
    3257              :     {
    3258          608 :       g = gimple_build_assign (make_ssa_name (TREE_TYPE (rotcnt)),
    3259              :                                BIT_AND_EXPR, rotcnt,
    3260          608 :                                build_int_cst (TREE_TYPE (rotcnt),
    3261          608 :                                               TYPE_PRECISION (rtype) - 1));
    3262          608 :       gsi_insert_before (gsi, g, GSI_SAME_STMT);
    3263          608 :       rotcnt = gimple_assign_lhs (g);
    3264              :     }
    3265         2674 :   lhs = gimple_assign_lhs (stmt);
    3266         2674 :   if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
    3267         1010 :     lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
    3268         2674 :   g = gimple_build_assign (lhs,
    3269         2674 :                            ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
    3270              :                            ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
    3271         2674 :   if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
    3272              :     {
    3273         1010 :       gsi_insert_before (gsi, g, GSI_SAME_STMT);
    3274         1010 :       g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
    3275              :     }
    3276         2674 :   gsi_replace (gsi, g, false);
    3277         2674 :   return true;
    3278              : }
    3279              : 
    3280              : 
    3281              : /* Check whether an array contains a valid table according to VALIDATE_FN.  */
    3282              : template<typename ValidateFn>
    3283              : static bool
    3284           14 : check_table_array (tree ctor, HOST_WIDE_INT &zero_val, unsigned bits,
    3285              :                   ValidateFn validate_fn)
    3286              : {
    3287              :   tree elt, idx;
    3288           14 :   unsigned HOST_WIDE_INT i, raw_idx = 0;
    3289           14 :   unsigned matched = 0;
    3290              : 
    3291           14 :   zero_val = 0;
    3292              : 
    3293          542 :   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), i, idx, elt)
    3294              :     {
    3295          542 :       if (!tree_fits_shwi_p (idx))
    3296              :         return false;
    3297          542 :       if (!tree_fits_shwi_p (elt) && TREE_CODE (elt) != RAW_DATA_CST)
    3298              :         return false;
    3299              : 
    3300          542 :       unsigned HOST_WIDE_INT index = tree_to_shwi (idx);
    3301              :       HOST_WIDE_INT val;
    3302              : 
    3303          542 :       if (TREE_CODE (elt) == INTEGER_CST)
    3304          478 :         val = tree_to_shwi (elt);
    3305              :       else
    3306              :         {
    3307           64 :           if (raw_idx == (unsigned) RAW_DATA_LENGTH (elt))
    3308              :             {
    3309            0 :               raw_idx = 0;
    3310            0 :               continue;
    3311              :             }
    3312           64 :           if (TYPE_UNSIGNED (TREE_TYPE (elt)))
    3313            0 :             val = RAW_DATA_UCHAR_ELT (elt, raw_idx);
    3314              :           else
    3315           64 :             val = RAW_DATA_SCHAR_ELT (elt, raw_idx);
    3316           64 :           index += raw_idx;
    3317           64 :           raw_idx++;
    3318           64 :           i--;
    3319              :         }
    3320              : 
    3321          542 :       if (index > bits * 2)
    3322              :         return false;
    3323              : 
    3324          542 :       if (index == 0)
    3325              :         {
    3326           14 :           zero_val = val;
    3327           14 :           matched++;
    3328              :         }
    3329              : 
    3330          542 :       if (val >= 0 && val < bits && validate_fn (val, index))
    3331          480 :         matched++;
    3332              : 
    3333          542 :       if (matched > bits)
    3334              :         return true;
    3335              :     }
    3336              : 
    3337              :   return false;
    3338              : }
    3339              : 
    3340              : /* Check whether a string contains a valid table according to VALIDATE_FN.  */
    3341              : template<typename ValidateFn>
    3342              : static bool
    3343            4 : check_table_string (tree string, HOST_WIDE_INT &zero_val,unsigned bits,
    3344              :                     ValidateFn validate_fn)
    3345              : {
    3346            4 :   unsigned HOST_WIDE_INT len = TREE_STRING_LENGTH (string);
    3347            4 :   unsigned matched = 0;
    3348            4 :   const unsigned char *p = (const unsigned char *) TREE_STRING_POINTER (string);
    3349              : 
    3350            4 :   if (len < bits || len > bits * 2)
    3351              :     return false;
    3352              : 
    3353            4 :   zero_val = p[0];
    3354              : 
    3355          164 :   for (unsigned i = 0; i < len; i++)
    3356          160 :     if (p[i] < bits && validate_fn (p[i], i))
    3357          160 :       matched++;
    3358              : 
    3359            4 :   return matched == bits;
    3360              : }
    3361              : 
    3362              : /* Check whether CTOR contains a valid table according to VALIDATE_FN.  */
    3363              : template<typename ValidateFn>
    3364              : static bool
    3365           26 : check_table (tree ctor, tree type, HOST_WIDE_INT &zero_val, unsigned bits,
    3366              :              ValidateFn validate_fn)
    3367              : {
    3368           26 :   if (TREE_CODE (ctor) == CONSTRUCTOR)
    3369           14 :     return check_table_array (ctor, zero_val, bits, validate_fn);
    3370              :   else if (TREE_CODE (ctor) == STRING_CST
    3371           12 :            && TYPE_PRECISION (type) == CHAR_TYPE_SIZE)
    3372            4 :     return check_table_string (ctor, zero_val, bits, validate_fn);
    3373              :   return false;
    3374              : }
    3375              : 
    3376              : /* Match.pd function to match the ctz expression.  */
    3377              : extern bool gimple_ctz_table_index (tree, tree *, tree (*)(tree));
    3378              : extern bool gimple_clz_table_index (tree, tree *, tree (*)(tree));
    3379              : 
    3380              : /* Recognize count leading and trailing zeroes idioms.
    3381              :    The canonical form is array[((x & -x) * C) >> SHIFT] where C is a magic
    3382              :    constant which when multiplied by a power of 2 creates a unique value
    3383              :    in the top 5 or 6 bits.  This is then indexed into a table which maps it
    3384              :    to the number of trailing zeroes.  Array[0] is returned so the caller can
    3385              :    emit an appropriate sequence depending on whether ctz (0) is defined on
    3386              :    the target.  */
    3387              : 
    3388              : static bool
    3389      1962912 : simplify_count_zeroes (gimple_stmt_iterator *gsi)
    3390              : {
    3391      1962912 :   gimple *stmt = gsi_stmt (*gsi);
    3392      1962912 :   tree array_ref = gimple_assign_rhs1 (stmt);
    3393      1962912 :   tree res_ops[3];
    3394              : 
    3395      1962912 :   gcc_checking_assert (TREE_CODE (array_ref) == ARRAY_REF);
    3396              : 
    3397      1962912 :   internal_fn fn = IFN_LAST;
    3398              :   /* For CTZ we recognize ((x & -x) * C) >> SHIFT where the array data
    3399              :      represents the number of trailing zeros.  */
    3400      1962912 :   if (gimple_ctz_table_index (TREE_OPERAND (array_ref, 1), &res_ops[0], NULL))
    3401              :     fn = IFN_CTZ;
    3402              :   /* For CLZ we recognize
    3403              :        x |= x >> 1;
    3404              :        x |= x >> 2;
    3405              :        x |= x >> 4;
    3406              :        x |= x >> 8;
    3407              :        x |= x >> 16;
    3408              :        (x * C) >> SHIFT
    3409              :      where 31 minus the array data represents the number of leading zeros.  */
    3410      1962890 :   else if (gimple_clz_table_index (TREE_OPERAND (array_ref, 1), &res_ops[0],
    3411              :                                    NULL))
    3412              :     fn = IFN_CLZ;
    3413              :   else
    3414              :     return false;
    3415              : 
    3416           31 :   HOST_WIDE_INT zero_val;
    3417           31 :   tree type = TREE_TYPE (array_ref);
    3418           31 :   tree array = TREE_OPERAND (array_ref, 0);
    3419           31 :   tree input_type = TREE_TYPE (res_ops[0]);
    3420           31 :   unsigned input_bits = tree_to_shwi (TYPE_SIZE (input_type));
    3421              : 
    3422              :   /* Check the array element type is not wider than 32 bits and the input is
    3423              :      an unsigned 32-bit or 64-bit type.  */
    3424           31 :   if (TYPE_PRECISION (type) > 32 || !TYPE_UNSIGNED (input_type))
    3425              :     return false;
    3426           27 :   if (input_bits != 32 && input_bits != 64)
    3427              :     return false;
    3428              : 
    3429           27 :   if (!direct_internal_fn_supported_p (fn, input_type, OPTIMIZE_FOR_BOTH))
    3430              :     return false;
    3431              : 
    3432              :   /* Check the lower bound of the array is zero.  */
    3433           27 :   tree low = array_ref_low_bound (array_ref);
    3434           27 :   if (!low || !integer_zerop (low))
    3435            0 :     return false;
    3436              : 
    3437              :   /* Check the shift extracts the top 5..7 bits.  */
    3438           27 :   unsigned shiftval = tree_to_shwi (res_ops[2]);
    3439           27 :   if (shiftval < input_bits - 7 || shiftval > input_bits - 5)
    3440              :     return false;
    3441              : 
    3442           26 :   tree ctor = ctor_for_folding (array);
    3443           26 :   if (!ctor)
    3444              :     return false;
    3445           26 :   unsigned HOST_WIDE_INT mulval = tree_to_uhwi (res_ops[1]);
    3446           26 :   if (fn == IFN_CTZ)
    3447              :     {
    3448          429 :       auto checkfn = [&](unsigned data, unsigned i) -> bool
    3449              :         {
    3450          412 :           unsigned HOST_WIDE_INT mask
    3451          412 :             = ((HOST_WIDE_INT_1U << (input_bits - shiftval)) - 1) << shiftval;
    3452          412 :           return (((mulval << data) & mask) >> shiftval) == i;
    3453           17 :         };
    3454           17 :       if (!check_table (ctor, type, zero_val, input_bits, checkfn))
    3455            8 :         return false;
    3456              :     }
    3457            9 :   else if (fn == IFN_CLZ)
    3458              :     {
    3459          297 :       auto checkfn = [&](unsigned data, unsigned i) -> bool
    3460              :         {
    3461          288 :           unsigned HOST_WIDE_INT mask
    3462          288 :             = ((HOST_WIDE_INT_1U << (input_bits - shiftval)) - 1) << shiftval;
    3463          288 :           return (((((HOST_WIDE_INT_1U << (data + 1)) - 1) * mulval) & mask)
    3464          288 :                   >> shiftval) == i;
    3465            9 :         };
    3466            9 :     if (!check_table (ctor, type, zero_val, input_bits, checkfn))
    3467            0 :       return false;
    3468              :     }
    3469              : 
    3470           18 :   HOST_WIDE_INT ctz_val = -1;
    3471           18 :   bool zero_ok;
    3472           18 :   if (fn == IFN_CTZ)
    3473              :     {
    3474            9 :       ctz_val = 0;
    3475           18 :       zero_ok = CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_INT_TYPE_MODE (input_type),
    3476              :                                            ctz_val) == 2;
    3477              :     }
    3478            9 :   else if (fn == IFN_CLZ)
    3479              :     {
    3480            9 :       ctz_val = 32;
    3481            9 :       zero_ok = CLZ_DEFINED_VALUE_AT_ZERO (SCALAR_INT_TYPE_MODE (input_type),
    3482              :                                            ctz_val) == 2;
    3483            9 :       zero_val = input_bits - 1 - zero_val;
    3484              :     }
    3485           18 :   int nargs = 2;
    3486              : 
    3487              :   /* If the input value can't be zero, don't special case ctz (0).  */
    3488           18 :   range_query *q = get_range_query (cfun);
    3489           18 :   if (q == get_global_range_query ())
    3490           18 :     q = enable_ranger (cfun);
    3491           18 :   int_range_max vr;
    3492           18 :   if (q->range_of_expr (vr, res_ops[0], stmt)
    3493           18 :       && !range_includes_zero_p (vr))
    3494              :     {
    3495            4 :       zero_ok = true;
    3496            4 :       zero_val = 0;
    3497            4 :       ctz_val = 0;
    3498            4 :       nargs = 1;
    3499              :     }
    3500              : 
    3501           18 :   gimple_seq seq = NULL;
    3502           18 :   gimple *g;
    3503           18 :   gcall *call = gimple_build_call_internal (fn, nargs, res_ops[0],
    3504              :                                             nargs == 1 ? NULL_TREE
    3505           32 :                                             : build_int_cst (integer_type_node,
    3506           14 :                                                              ctz_val));
    3507           18 :   gimple_set_location (call, gimple_location (stmt));
    3508           18 :   gimple_set_lhs (call, make_ssa_name (integer_type_node));
    3509           18 :   gimple_seq_add_stmt (&seq, call);
    3510              : 
    3511           18 :   tree prev_lhs = gimple_call_lhs (call);
    3512              : 
    3513           18 :   if (zero_ok && zero_val == ctz_val)
    3514              :     ;
    3515              :   /* Emit ctz (x) & 31 if ctz (0) is 32 but we need to return 0.  */
    3516            6 :   else if (zero_ok && zero_val == 0 && ctz_val == input_bits)
    3517              :     {
    3518            5 :       g = gimple_build_assign (make_ssa_name (integer_type_node),
    3519              :                                BIT_AND_EXPR, prev_lhs,
    3520              :                                build_int_cst (integer_type_node,
    3521            5 :                                               input_bits - 1));
    3522            5 :       gimple_set_location (g, gimple_location (stmt));
    3523            5 :       gimple_seq_add_stmt (&seq, g);
    3524            5 :       prev_lhs = gimple_assign_lhs (g);
    3525              :     }
    3526              :   /* As fallback emit a conditional move.  */
    3527              :   else
    3528              :     {
    3529            7 :       g = gimple_build_assign (make_ssa_name (boolean_type_node), EQ_EXPR,
    3530              :                                res_ops[0], build_zero_cst (input_type));
    3531            7 :       gimple_set_location (g, gimple_location (stmt));
    3532            7 :       gimple_seq_add_stmt (&seq, g);
    3533            7 :       tree cond = gimple_assign_lhs (g);
    3534            7 :       g = gimple_build_assign (make_ssa_name (integer_type_node),
    3535              :                                COND_EXPR, cond,
    3536            7 :                                build_int_cst (integer_type_node, zero_val),
    3537              :                                prev_lhs);
    3538            7 :       gimple_set_location (g, gimple_location (stmt));
    3539            7 :       gimple_seq_add_stmt (&seq, g);
    3540            7 :       prev_lhs = gimple_assign_lhs (g);
    3541              :     }
    3542              : 
    3543           18 :   if (fn == IFN_CLZ)
    3544              :     {
    3545            9 :       g = gimple_build_assign (make_ssa_name (integer_type_node),
    3546              :                                MINUS_EXPR,
    3547              :                                build_int_cst (integer_type_node,
    3548            9 :                                               input_bits - 1),
    3549              :                                prev_lhs);
    3550            9 :       gimple_set_location (g, gimple_location (stmt));
    3551            9 :       gimple_seq_add_stmt (&seq, g);
    3552            9 :       prev_lhs = gimple_assign_lhs (g);
    3553              :     }
    3554              : 
    3555           18 :   g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, prev_lhs);
    3556           18 :   gimple_seq_add_stmt (&seq, g);
    3557           18 :   gsi_replace_with_seq (gsi, seq, true);
    3558           18 :   return true;
    3559           18 : }
    3560              : 
    3561              : 
    3562              : /* Determine whether applying the 2 permutations (mask1 then mask2)
    3563              :    gives back one of the input.  */
    3564              : 
    3565              : static int
    3566           34 : is_combined_permutation_identity (tree mask1, tree mask2)
    3567              : {
    3568           34 :   tree mask;
    3569           34 :   unsigned HOST_WIDE_INT nelts, i, j;
    3570           34 :   bool maybe_identity1 = true;
    3571           34 :   bool maybe_identity2 = true;
    3572              : 
    3573           34 :   gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
    3574              :                        && TREE_CODE (mask2) == VECTOR_CST);
    3575              : 
    3576              :   /* For VLA masks, check for the following pattern:
    3577              :      v1 = VEC_PERM_EXPR (v0, ..., mask1)
    3578              :      v2 = VEC_PERM_EXPR (v1, ..., mask2)
    3579              :      -->
    3580              :      v2 = v0
    3581              :      if mask1 == mask2 == {nelts - 1, nelts - 2, ...}.  */
    3582              : 
    3583           34 :   if (operand_equal_p (mask1, mask2, 0)
    3584           34 :       && !VECTOR_CST_NELTS (mask1).is_constant ())
    3585              :     {
    3586              :       vec_perm_builder builder;
    3587              :       if (tree_to_vec_perm_builder (&builder, mask1))
    3588              :         {
    3589              :           poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (mask1));
    3590              :           vec_perm_indices sel (builder, 1, nelts);
    3591              :           if (sel.series_p (0, 1, nelts - 1, -1))
    3592              :             return 1;
    3593              :         }
    3594              :     }
    3595              : 
    3596           34 :   mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
    3597           34 :   if (mask == NULL_TREE || TREE_CODE (mask) != VECTOR_CST)
    3598              :     return 0;
    3599              : 
    3600           34 :   if (!VECTOR_CST_NELTS (mask).is_constant (&nelts))
    3601              :     return 0;
    3602           60 :   for (i = 0; i < nelts; i++)
    3603              :     {
    3604           60 :       tree val = VECTOR_CST_ELT (mask, i);
    3605           60 :       gcc_assert (TREE_CODE (val) == INTEGER_CST);
    3606           60 :       j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
    3607           60 :       if (j == i)
    3608              :         maybe_identity2 = false;
    3609           47 :       else if (j == i + nelts)
    3610              :         maybe_identity1 = false;
    3611              :       else
    3612              :         return 0;
    3613              :     }
    3614            0 :   return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
    3615              : }
    3616              : 
    3617              : /* Combine a shuffle with its arguments.  Returns true if there were any
    3618              :    changes made.  */
    3619              : 
    3620              : static bool
    3621       181766 : simplify_permutation (gimple_stmt_iterator *gsi)
    3622              : {
    3623       181766 :   gimple *stmt = gsi_stmt (*gsi);
    3624       181766 :   gimple *def_stmt = NULL;
    3625       181766 :   tree op0, op1, op2, op3, arg0, arg1;
    3626       181766 :   enum tree_code code, code2 = ERROR_MARK;
    3627       181766 :   bool single_use_op0 = false;
    3628              : 
    3629       181766 :   gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
    3630              : 
    3631       181766 :   op0 = gimple_assign_rhs1 (stmt);
    3632       181766 :   op1 = gimple_assign_rhs2 (stmt);
    3633       181766 :   op2 = gimple_assign_rhs3 (stmt);
    3634              : 
    3635       181766 :   if (TREE_CODE (op2) != VECTOR_CST)
    3636              :     return false;
    3637              : 
    3638       179029 :   if (TREE_CODE (op0) == VECTOR_CST)
    3639              :     {
    3640              :       code = VECTOR_CST;
    3641              :       arg0 = op0;
    3642              :     }
    3643       177167 :   else if (TREE_CODE (op0) == SSA_NAME)
    3644              :     {
    3645       177167 :       def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
    3646       177167 :       if (!def_stmt)
    3647              :         return false;
    3648       169148 :       code = gimple_assign_rhs_code (def_stmt);
    3649       169148 :       if (code == VIEW_CONVERT_EXPR)
    3650              :         {
    3651         1385 :           tree rhs = gimple_assign_rhs1 (def_stmt);
    3652         1385 :           tree name = TREE_OPERAND (rhs, 0);
    3653         1385 :           if (TREE_CODE (name) != SSA_NAME)
    3654              :             return false;
    3655         1385 :           if (!has_single_use (name))
    3656          218 :             single_use_op0 = false;
    3657              :           /* Here we update the def_stmt through this VIEW_CONVERT_EXPR,
    3658              :              but still keep the code to indicate it comes from
    3659              :              VIEW_CONVERT_EXPR.  */
    3660         1385 :           def_stmt = SSA_NAME_DEF_STMT (name);
    3661         1385 :           if (!def_stmt || !is_gimple_assign (def_stmt))
    3662              :             return false;
    3663          598 :           if (gimple_assign_rhs_code (def_stmt) != CONSTRUCTOR)
    3664              :             return false;
    3665              :         }
    3666       167862 :       if (!can_propagate_from (def_stmt))
    3667              :         return false;
    3668        19542 :       arg0 = gimple_assign_rhs1 (def_stmt);
    3669              :     }
    3670              :   else
    3671              :     return false;
    3672              : 
    3673              :   /* Two consecutive shuffles.  */
    3674        19542 :   if (code == VEC_PERM_EXPR)
    3675              :     {
    3676         6287 :       tree orig;
    3677         6287 :       int ident;
    3678              : 
    3679         6287 :       if (op0 != op1)
    3680              :         return false;
    3681           34 :       op3 = gimple_assign_rhs3 (def_stmt);
    3682           34 :       if (TREE_CODE (op3) != VECTOR_CST)
    3683              :         return false;
    3684           34 :       ident = is_combined_permutation_identity (op3, op2);
    3685           34 :       if (!ident)
    3686              :         return false;
    3687            0 :       orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
    3688            0 :                           : gimple_assign_rhs2 (def_stmt);
    3689            0 :       gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
    3690            0 :       gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
    3691            0 :       gimple_set_num_ops (stmt, 2);
    3692            0 :       update_stmt (stmt);
    3693            0 :       remove_prop_source_from_use (op0);
    3694            0 :       return true;
    3695              :     }
    3696        15117 :   else if (code == CONSTRUCTOR
    3697        15117 :            || code == VECTOR_CST
    3698              :            || code == VIEW_CONVERT_EXPR)
    3699              :     {
    3700         2622 :       if (op0 != op1)
    3701              :         {
    3702         2445 :           if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
    3703              :             return false;
    3704              : 
    3705         2090 :           if (TREE_CODE (op1) == VECTOR_CST)
    3706              :             arg1 = op1;
    3707         1601 :           else if (TREE_CODE (op1) == SSA_NAME)
    3708              :             {
    3709         1601 :               gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
    3710         1601 :               if (!def_stmt2)
    3711              :                 return false;
    3712          165 :               code2 = gimple_assign_rhs_code (def_stmt2);
    3713          165 :               if (code2 == VIEW_CONVERT_EXPR)
    3714              :                 {
    3715            4 :                   tree rhs = gimple_assign_rhs1 (def_stmt2);
    3716            4 :                   tree name = TREE_OPERAND (rhs, 0);
    3717            4 :                   if (TREE_CODE (name) != SSA_NAME)
    3718              :                     return false;
    3719            4 :                   if (!has_single_use (name))
    3720              :                     return false;
    3721            3 :                   def_stmt2 = SSA_NAME_DEF_STMT (name);
    3722            3 :                   if (!def_stmt2 || !is_gimple_assign (def_stmt2))
    3723              :                     return false;
    3724            0 :                   if (gimple_assign_rhs_code (def_stmt2) != CONSTRUCTOR)
    3725              :                     return false;
    3726              :                 }
    3727          161 :               else if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
    3728              :                 return false;
    3729           47 :               if (!can_propagate_from (def_stmt2))
    3730              :                 return false;
    3731           47 :               arg1 = gimple_assign_rhs1 (def_stmt2);
    3732              :             }
    3733              :           else
    3734              :             return false;
    3735              :         }
    3736              :       else
    3737              :         {
    3738              :           /* Already used twice in this statement.  */
    3739          177 :           if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
    3740              :             return false;
    3741              :           arg1 = arg0;
    3742              :         }
    3743              : 
    3744              :       /* If there are any VIEW_CONVERT_EXPRs found when finding permutation
    3745              :          operands source, check whether it's valid to transform and prepare
    3746              :          the required new operands.  */
    3747          616 :       if (code == VIEW_CONVERT_EXPR || code2 == VIEW_CONVERT_EXPR)
    3748              :         {
    3749              :           /* Figure out the target vector type to which operands should be
    3750              :              converted.  If both are CONSTRUCTOR, the types should be the
    3751              :              same, otherwise, use the one of CONSTRUCTOR.  */
    3752           18 :           tree tgt_type = NULL_TREE;
    3753           18 :           if (code == VIEW_CONVERT_EXPR)
    3754              :             {
    3755           18 :               gcc_assert (gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR);
    3756           18 :               code = CONSTRUCTOR;
    3757           18 :               tgt_type = TREE_TYPE (arg0);
    3758              :             }
    3759           18 :           if (code2 == VIEW_CONVERT_EXPR)
    3760              :             {
    3761            0 :               tree arg1_type = TREE_TYPE (arg1);
    3762            0 :               if (tgt_type == NULL_TREE)
    3763              :                 tgt_type = arg1_type;
    3764            0 :               else if (tgt_type != arg1_type)
    3765           17 :                 return false;
    3766              :             }
    3767              : 
    3768           18 :           if (!VECTOR_TYPE_P (tgt_type))
    3769              :             return false;
    3770           18 :           tree op2_type = TREE_TYPE (op2);
    3771              : 
    3772              :           /* Figure out the shrunk factor.  */
    3773           18 :           poly_uint64 tgt_units = TYPE_VECTOR_SUBPARTS (tgt_type);
    3774           18 :           poly_uint64 op2_units = TYPE_VECTOR_SUBPARTS (op2_type);
    3775           18 :           if (maybe_gt (tgt_units, op2_units))
    3776              :             return false;
    3777           18 :           unsigned int factor;
    3778           35 :           if (!constant_multiple_p (op2_units, tgt_units, &factor))
    3779              :             return false;
    3780              : 
    3781              :           /* Build the new permutation control vector as target vector.  */
    3782           18 :           vec_perm_builder builder;
    3783           18 :           if (!tree_to_vec_perm_builder (&builder, op2))
    3784              :             return false;
    3785           18 :           vec_perm_indices indices (builder, 2, op2_units);
    3786           18 :           vec_perm_indices new_indices;
    3787           18 :           if (new_indices.new_shrunk_vector (indices, factor))
    3788              :             {
    3789            1 :               tree mask_type = tgt_type;
    3790            1 :               if (!VECTOR_INTEGER_TYPE_P (mask_type))
    3791              :                 {
    3792            0 :                   tree elem_type = TREE_TYPE (mask_type);
    3793            0 :                   unsigned elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
    3794            0 :                   tree int_type = build_nonstandard_integer_type (elem_size, 0);
    3795            0 :                   mask_type = build_vector_type (int_type, tgt_units);
    3796              :                 }
    3797            1 :               op2 = vec_perm_indices_to_tree (mask_type, new_indices);
    3798              :             }
    3799              :           else
    3800           17 :             return false;
    3801              : 
    3802              :           /* Convert the VECTOR_CST to the appropriate vector type.  */
    3803            1 :           if (tgt_type != TREE_TYPE (arg0))
    3804            0 :             arg0 = fold_build1 (VIEW_CONVERT_EXPR, tgt_type, arg0);
    3805            1 :           else if (tgt_type != TREE_TYPE (arg1))
    3806            0 :             arg1 = fold_build1 (VIEW_CONVERT_EXPR, tgt_type, arg1);
    3807           35 :         }
    3808              : 
    3809              :       /* VIEW_CONVERT_EXPR should be updated to CONSTRUCTOR before.  */
    3810          599 :       gcc_assert (code == CONSTRUCTOR || code == VECTOR_CST);
    3811              : 
    3812              :       /* Shuffle of a constructor.  */
    3813          599 :       tree res_type
    3814          599 :         = build_vector_type (TREE_TYPE (TREE_TYPE (arg0)),
    3815          599 :                              TYPE_VECTOR_SUBPARTS (TREE_TYPE (op2)));
    3816          599 :       tree opt = fold_ternary (VEC_PERM_EXPR, res_type, arg0, arg1, op2);
    3817          599 :       if (!opt
    3818          280 :           || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
    3819              :         return false;
    3820              :       /* Found VIEW_CONVERT_EXPR before, need one explicit conversion.  */
    3821          280 :       if (res_type != TREE_TYPE (op0))
    3822              :         {
    3823            1 :           tree name = make_ssa_name (TREE_TYPE (opt));
    3824            1 :           gimple *ass_stmt = gimple_build_assign (name, opt);
    3825            1 :           gsi_insert_before (gsi, ass_stmt, GSI_SAME_STMT);
    3826            1 :           opt = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op0), name);
    3827              :         }
    3828          280 :       gimple_assign_set_rhs_from_tree (gsi, opt);
    3829          280 :       update_stmt (gsi_stmt (*gsi));
    3830          280 :       if (TREE_CODE (op0) == SSA_NAME)
    3831            1 :         remove_prop_source_from_use (op0);
    3832          280 :       if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
    3833            0 :         remove_prop_source_from_use (op1);
    3834          280 :       return true;
    3835              :     }
    3836              : 
    3837              :   return false;
    3838              : }
    3839              : 
    3840              : /* Get the BIT_FIELD_REF definition of VAL, if any, looking through
    3841              :    conversions with code CONV_CODE or update it if still ERROR_MARK.
    3842              :    Return NULL_TREE if no such matching def was found.  */
    3843              : 
    3844              : static tree
    3845       391344 : get_bit_field_ref_def (tree val, enum tree_code &conv_code)
    3846              : {
    3847       391344 :   if (TREE_CODE (val) != SSA_NAME)
    3848              :     return NULL_TREE ;
    3849       365520 :   gimple *def_stmt = get_prop_source_stmt (val, false, NULL);
    3850       365520 :   if (!def_stmt)
    3851              :     return NULL_TREE;
    3852       292456 :   enum tree_code code = gimple_assign_rhs_code (def_stmt);
    3853       292456 :   if (code == FLOAT_EXPR
    3854       292456 :       || code == FIX_TRUNC_EXPR
    3855              :       || CONVERT_EXPR_CODE_P (code))
    3856              :     {
    3857       178852 :       tree op1 = gimple_assign_rhs1 (def_stmt);
    3858       178852 :       if (conv_code == ERROR_MARK)
    3859        86117 :         conv_code = code;
    3860        92735 :       else if (conv_code != code)
    3861              :         return NULL_TREE;
    3862       178827 :       if (TREE_CODE (op1) != SSA_NAME)
    3863              :         return NULL_TREE;
    3864        71971 :       def_stmt = SSA_NAME_DEF_STMT (op1);
    3865        71971 :       if (! is_gimple_assign (def_stmt))
    3866              :         return NULL_TREE;
    3867        57228 :       code = gimple_assign_rhs_code (def_stmt);
    3868              :     }
    3869       170832 :   if (code != BIT_FIELD_REF)
    3870              :     return NULL_TREE;
    3871        22859 :   return gimple_assign_rhs1 (def_stmt);
    3872              : }
    3873              : 
    3874              : /* Recognize a VEC_PERM_EXPR.  Returns true if there were any changes.  */
    3875              : 
    3876              : static bool
    3877       147562 : simplify_vector_constructor (gimple_stmt_iterator *gsi)
    3878              : {
    3879       147562 :   gimple *stmt = gsi_stmt (*gsi);
    3880       147562 :   tree op, orig[2], type;
    3881       147562 :   unsigned i;
    3882       147562 :   unsigned HOST_WIDE_INT nelts;
    3883       147562 :   unsigned HOST_WIDE_INT refnelts;
    3884       147562 :   enum tree_code conv_code;
    3885       147562 :   constructor_elt *elt;
    3886              : 
    3887       147562 :   op = gimple_assign_rhs1 (stmt);
    3888       147562 :   type = TREE_TYPE (op);
    3889       147562 :   gcc_checking_assert (TREE_CODE (op) == CONSTRUCTOR
    3890              :                        && TREE_CODE (type) == VECTOR_TYPE);
    3891              : 
    3892       147562 :   if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
    3893              :     return false;
    3894              : 
    3895       147562 :   orig[0] = NULL;
    3896       147562 :   orig[1] = NULL;
    3897       147562 :   tree orig_elem_type[2] = {};
    3898       147562 :   conv_code = ERROR_MARK;
    3899       147562 :   bool maybe_ident = true;
    3900       147562 :   bool maybe_blend[2] = { true, true };
    3901       147562 :   tree one_constant = NULL_TREE;
    3902       147562 :   tree one_nonconstant = NULL_TREE;
    3903       147562 :   tree subelt;
    3904       147562 :   auto_vec<tree> constants;
    3905       147562 :   constants.safe_grow_cleared (nelts, true);
    3906       147562 :   auto_vec<std::pair<unsigned, unsigned>, 64> elts;
    3907       147562 :   unsigned int tsubelts = 0;
    3908       422149 :   FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
    3909              :     {
    3910       391344 :       tree ref, op1;
    3911       391344 :       unsigned int elem, src_elem_size;
    3912       391344 :       unsigned HOST_WIDE_INT nsubelts = 1;
    3913              : 
    3914       391344 :       if (i >= nelts)
    3915       147562 :         return false;
    3916              : 
    3917              :       /* Look for elements extracted and possibly converted from
    3918              :          another vector.  */
    3919       391344 :       op1 = get_bit_field_ref_def (elt->value, conv_code);
    3920       391344 :       if (op1
    3921        22859 :           && TREE_CODE ((ref = TREE_OPERAND (op1, 0))) == SSA_NAME
    3922         4944 :           && VECTOR_TYPE_P (TREE_TYPE (ref))
    3923         4941 :           && (tree_nop_conversion_p (TREE_TYPE (op1),
    3924         4941 :                                      TREE_TYPE (TREE_TYPE (ref)))
    3925          673 :               || (VECTOR_TYPE_P (TREE_TYPE (op1))
    3926           29 :                   && tree_nop_conversion_p (TREE_TYPE (TREE_TYPE (op1)),
    3927           29 :                                             TREE_TYPE (TREE_TYPE (ref)))
    3928           29 :                   && TYPE_VECTOR_SUBPARTS (TREE_TYPE (op1))
    3929           29 :                         .is_constant (&nsubelts)))
    3930         4297 :           && constant_multiple_p (bit_field_size (op1), nsubelts,
    3931              :                                   &src_elem_size)
    3932       395641 :           && constant_multiple_p (bit_field_offset (op1), src_elem_size, &elem)
    3933       395641 :           && TYPE_VECTOR_SUBPARTS (TREE_TYPE (ref)).is_constant (&refnelts))
    3934              :         {
    3935              :           unsigned int j;
    3936         4558 :           for (j = 0; j < 2; ++j)
    3937              :             {
    3938         4541 :               if (!orig[j])
    3939              :                 {
    3940         2112 :                   if (j == 0
    3941         2302 :                       || useless_type_conversion_p (TREE_TYPE (orig[0]),
    3942          190 :                                                     TREE_TYPE (ref)))
    3943              :                     break;
    3944              :                 }
    3945         2429 :               else if (ref == orig[j])
    3946              :                 break;
    3947              :             }
    3948              :           /* Found a suitable vector element.  */
    3949         4297 :           if (j < 2)
    3950              :             {
    3951         4280 :               orig[j] = ref;
    3952              :               /* Track what element type was actually extracted (which may
    3953              :                  differ in signedness from the vector's element type due to
    3954              :                  tree_nop_conversion_p).  */
    3955         4280 :               if (!orig_elem_type[j])
    3956         2106 :                 orig_elem_type[j] = TREE_TYPE (op1);
    3957         4280 :               if (elem != i || j != 0)
    3958         2088 :                 maybe_ident = false;
    3959         4280 :               if (elem != i)
    3960         2019 :                 maybe_blend[j] = false;
    3961         8581 :               for (unsigned int k = 0; k < nsubelts; ++k)
    3962         4301 :                 elts.safe_push (std::make_pair (j, elem + k));
    3963         4280 :               tsubelts += nsubelts;
    3964         4280 :               continue;
    3965         4280 :             }
    3966              :           /* Else fallthru.  */
    3967              :         }
    3968              :       /* Handle elements not extracted from a vector.
    3969              :           1. constants by permuting with constant vector
    3970              :           2. a unique non-constant element by permuting with a splat vector  */
    3971       387064 :       if (orig[1]
    3972       240551 :           && orig[1] != error_mark_node)
    3973              :         return false;
    3974       387048 :       orig[1] = error_mark_node;
    3975       387048 :       if (VECTOR_TYPE_P (TREE_TYPE (elt->value))
    3976       387048 :           && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (elt->value))
    3977         1761 :                         .is_constant (&nsubelts))
    3978              :         return false;
    3979       387048 :       if (CONSTANT_CLASS_P (elt->value))
    3980              :         {
    3981        25820 :           if (one_nonconstant)
    3982              :             return false;
    3983        17389 :           if (!one_constant)
    3984         8167 :             one_constant = TREE_CODE (elt->value) == VECTOR_CST
    3985         8167 :                            ? VECTOR_CST_ELT (elt->value, 0)
    3986              :                            : elt->value;
    3987        17389 :           if (TREE_CODE (elt->value) == VECTOR_CST)
    3988              :             {
    3989           37 :               for (unsigned int k = 0; k < nsubelts; k++)
    3990           23 :                 constants[tsubelts + k] = VECTOR_CST_ELT (elt->value, k);
    3991              :             }
    3992              :           else
    3993        17375 :             constants[tsubelts] = elt->value;
    3994              :         }
    3995              :       else
    3996              :         {
    3997       361228 :           if (one_constant)
    3998              :             return false;
    3999       353340 :           subelt = VECTOR_TYPE_P (TREE_TYPE (elt->value))
    4000       353340 :                    ? ssa_uniform_vector_p (elt->value)
    4001              :                    : elt->value;
    4002       353340 :           if (!subelt)
    4003              :             return false;
    4004       351603 :           if (!one_nonconstant)
    4005              :             one_nonconstant = subelt;
    4006       214994 :           else if (!operand_equal_p (one_nonconstant, subelt, 0))
    4007              :             return false;
    4008              :         }
    4009       540625 :       for (unsigned int k = 0; k < nsubelts; ++k)
    4010       270318 :         elts.safe_push (std::make_pair (1, tsubelts + k));
    4011       270307 :       tsubelts += nsubelts;
    4012       270307 :       maybe_ident = false;
    4013              :     }
    4014              : 
    4015        61610 :   if (elts.length () < nelts)
    4016              :     return false;
    4017              : 
    4018        30020 :   if (! orig[0]
    4019        30020 :       || ! VECTOR_TYPE_P (TREE_TYPE (orig[0])))
    4020              :     return false;
    4021         1484 :   refnelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig[0])).to_constant ();
    4022              :   /* We currently do not handle larger destination vectors.  */
    4023         1484 :   if (refnelts < nelts)
    4024              :     return false;
    4025              : 
    4026              :   /* Determine the element type for the conversion source.
    4027              :      As orig_elem_type keeps track of the original type, check
    4028              :      if we need to perform a sign swap after permuting.
    4029              :      We need to be able to construct a vector type from the element
    4030              :      type which is not possible for e.g. BitInt or pointers
    4031              :      so pun with an integer type if needed.   */
    4032         1343 :   tree perm_eltype = TREE_TYPE (TREE_TYPE (orig[0]));
    4033         1343 :   bool sign_change_p = false;
    4034         1343 :   if (conv_code != ERROR_MARK
    4035          322 :       && orig_elem_type[0]
    4036         1665 :       && TYPE_SIGN (orig_elem_type[0]) != TYPE_SIGN (perm_eltype))
    4037              :     {
    4038           34 :       perm_eltype = signed_or_unsigned_type_for
    4039           34 :         (TYPE_UNSIGNED (orig_elem_type[0]), perm_eltype);
    4040           34 :       sign_change_p = true;
    4041              :     }
    4042         1343 :   tree conv_src_type = build_vector_type (perm_eltype, nelts);
    4043              : 
    4044         1343 :   if (maybe_ident)
    4045              :     {
    4046              :       /* When there is no conversion, use the target type directly.  */
    4047          376 :       if (conv_code == ERROR_MARK && nelts != refnelts)
    4048          376 :         conv_src_type = type;
    4049          376 :       if (conv_code != ERROR_MARK
    4050          376 :           && !supportable_convert_operation (conv_code, type, conv_src_type,
    4051              :                                              &conv_code))
    4052              :         {
    4053              :           /* Only few targets implement direct conversion patterns so try
    4054              :              some simple special cases via VEC_[UN]PACK[_FLOAT]_LO_EXPR.  */
    4055            5 :           optab optab;
    4056            5 :           insn_code icode;
    4057            5 :           tree halfvectype, dblvectype;
    4058            5 :           enum tree_code unpack_op;
    4059              : 
    4060            5 :           if (!BYTES_BIG_ENDIAN)
    4061            5 :             unpack_op = (FLOAT_TYPE_P (TREE_TYPE (type))
    4062            5 :                          ? VEC_UNPACK_FLOAT_LO_EXPR
    4063              :                          : VEC_UNPACK_LO_EXPR);
    4064              :           else
    4065              :             unpack_op = (FLOAT_TYPE_P (TREE_TYPE (type))
    4066              :                          ? VEC_UNPACK_FLOAT_HI_EXPR
    4067              :                          : VEC_UNPACK_HI_EXPR);
    4068              : 
    4069              :           /* Conversions between DFP and FP have no special tree code
    4070              :              but we cannot handle those since all relevant vector conversion
    4071              :              optabs only have a single mode.  */
    4072            3 :           if (CONVERT_EXPR_CODE_P (conv_code)
    4073            2 :               && FLOAT_TYPE_P (TREE_TYPE (type))
    4074            9 :               && (DECIMAL_FLOAT_TYPE_P (TREE_TYPE (type))
    4075            2 :                   != DECIMAL_FLOAT_TYPE_P (TREE_TYPE (conv_src_type))))
    4076              :             return false;
    4077              : 
    4078            3 :           if (CONVERT_EXPR_CODE_P (conv_code)
    4079            1 :               && (2 * TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig[0])))
    4080            1 :                   == TYPE_PRECISION (TREE_TYPE (type)))
    4081            0 :               && orig_elem_type[0]
    4082            0 :               && useless_type_conversion_p (orig_elem_type[0],
    4083            0 :                                             TREE_TYPE (TREE_TYPE (orig[0])))
    4084            0 :               && mode_for_vector (as_a <scalar_mode>
    4085            0 :                                   (TYPE_MODE (TREE_TYPE (TREE_TYPE (orig[0])))),
    4086            0 :                                   nelts * 2).exists ()
    4087            0 :               && (dblvectype
    4088            0 :                   = build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])),
    4089            0 :                                        nelts * 2))
    4090              :               /* Only use it for vector modes or for vector booleans
    4091              :                  represented as scalar bitmasks.  See PR95528.  */
    4092            0 :               && (VECTOR_MODE_P (TYPE_MODE (dblvectype))
    4093            0 :                   || VECTOR_BOOLEAN_TYPE_P (dblvectype))
    4094            0 :               && (optab = optab_for_tree_code (unpack_op,
    4095              :                                                dblvectype,
    4096              :                                                optab_default))
    4097            0 :               && ((icode = optab_handler (optab, TYPE_MODE (dblvectype)))
    4098              :                   != CODE_FOR_nothing)
    4099            4 :               && (insn_data[icode].operand[0].mode == TYPE_MODE (type)))
    4100              :             {
    4101            0 :               gimple_seq stmts = NULL;
    4102            0 :               tree dbl;
    4103            0 :               if (refnelts == nelts)
    4104              :                 {
    4105              :                   /* ???  Paradoxical subregs don't exist, so insert into
    4106              :                      the lower half of a wider zero vector.  */
    4107            0 :                   dbl = gimple_build (&stmts, BIT_INSERT_EXPR, dblvectype,
    4108              :                                       build_zero_cst (dblvectype), orig[0],
    4109            0 :                                       bitsize_zero_node);
    4110              :                 }
    4111            0 :               else if (refnelts == 2 * nelts)
    4112              :                 dbl = orig[0];
    4113              :               else
    4114            0 :                 dbl = gimple_build (&stmts, BIT_FIELD_REF, dblvectype,
    4115            0 :                                     orig[0], TYPE_SIZE (dblvectype),
    4116            0 :                                     bitsize_zero_node);
    4117            0 :               gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
    4118            0 :               gimple_assign_set_rhs_with_ops (gsi, unpack_op, dbl);
    4119              :             }
    4120            3 :           else if (CONVERT_EXPR_CODE_P (conv_code)
    4121            1 :                    && (TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig[0])))
    4122            1 :                        == 2 * TYPE_PRECISION (TREE_TYPE (type)))
    4123            1 :                    && orig_elem_type[0]
    4124            1 :                    && useless_type_conversion_p (orig_elem_type[0],
    4125            1 :                                                  TREE_TYPE (TREE_TYPE (orig[0])))
    4126            1 :                    && mode_for_vector (as_a <scalar_mode>
    4127            1 :                                          (TYPE_MODE
    4128              :                                            (TREE_TYPE (TREE_TYPE (orig[0])))),
    4129            2 :                                        nelts / 2).exists ()
    4130            1 :                    && (halfvectype
    4131            1 :                          = build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])),
    4132            1 :                                               nelts / 2))
    4133              :                    /* Only use it for vector modes or for vector booleans
    4134              :                       represented as scalar bitmasks.  See PR95528.  */
    4135            1 :                    && (VECTOR_MODE_P (TYPE_MODE (halfvectype))
    4136            0 :                        || VECTOR_BOOLEAN_TYPE_P (halfvectype))
    4137            1 :                    && (optab = optab_for_tree_code (VEC_PACK_TRUNC_EXPR,
    4138              :                                                     halfvectype,
    4139              :                                                     optab_default))
    4140            1 :                    && ((icode = optab_handler (optab, TYPE_MODE (halfvectype)))
    4141              :                        != CODE_FOR_nothing)
    4142            5 :                    && (insn_data[icode].operand[0].mode == TYPE_MODE (type)))
    4143              :             {
    4144            0 :               gimple_seq stmts = NULL;
    4145            0 :               tree low = gimple_build (&stmts, BIT_FIELD_REF, halfvectype,
    4146            0 :                                        orig[0], TYPE_SIZE (halfvectype),
    4147            0 :                                        bitsize_zero_node);
    4148            0 :               tree hig = gimple_build (&stmts, BIT_FIELD_REF, halfvectype,
    4149            0 :                                        orig[0], TYPE_SIZE (halfvectype),
    4150            0 :                                        TYPE_SIZE (halfvectype));
    4151            0 :               gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
    4152            0 :               gimple_assign_set_rhs_with_ops (gsi, VEC_PACK_TRUNC_EXPR,
    4153              :                                               low, hig);
    4154              :             }
    4155              :           else
    4156            4 :             return false;
    4157            0 :           update_stmt (gsi_stmt (*gsi));
    4158            0 :           return true;
    4159              :         }
    4160          371 :       if (nelts != refnelts)
    4161              :         {
    4162           20 :           gassign *lowpart
    4163           20 :             = gimple_build_assign (make_ssa_name (conv_src_type),
    4164              :                                    build3 (BIT_FIELD_REF, conv_src_type,
    4165           20 :                                            orig[0], TYPE_SIZE (conv_src_type),
    4166              :                                            bitsize_zero_node));
    4167           20 :           gsi_insert_before (gsi, lowpart, GSI_SAME_STMT);
    4168           20 :           orig[0] = gimple_assign_lhs (lowpart);
    4169              :         }
    4170          351 :       else if (sign_change_p)
    4171              :         {
    4172            0 :           gassign *conv
    4173            0 :             = gimple_build_assign (make_ssa_name (conv_src_type),
    4174              :                                    build1 (VIEW_CONVERT_EXPR, conv_src_type,
    4175              :                                            orig[0]));
    4176            0 :           gsi_insert_before (gsi, conv, GSI_SAME_STMT);
    4177            0 :           orig[0] = gimple_assign_lhs (conv);
    4178              :         }
    4179          371 :       if (conv_code == ERROR_MARK)
    4180              :         {
    4181          354 :           tree src_type = TREE_TYPE (orig[0]);
    4182          354 :           if (!useless_type_conversion_p (type, src_type))
    4183              :             {
    4184            0 :               gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type),
    4185              :                                     TYPE_VECTOR_SUBPARTS (src_type))
    4186              :                           && tree_nop_conversion_p (TREE_TYPE (type),
    4187              :                                                     TREE_TYPE (src_type)));
    4188            0 :               tree rhs = build1 (VIEW_CONVERT_EXPR, type, orig[0]);
    4189            0 :               orig[0] = make_ssa_name (type);
    4190            0 :               gassign *assign = gimple_build_assign (orig[0], rhs);
    4191            0 :               gsi_insert_before (gsi, assign, GSI_SAME_STMT);
    4192              :             }
    4193          354 :           gimple_assign_set_rhs_from_tree (gsi, orig[0]);
    4194              :         }
    4195              :       else
    4196           17 :         gimple_assign_set_rhs_with_ops (gsi, conv_code, orig[0],
    4197              :                                         NULL_TREE, NULL_TREE);
    4198              :     }
    4199              :   else
    4200              :     {
    4201              :       /* If we combine a vector with a non-vector avoid cases where
    4202              :          we'll obviously end up with more GIMPLE stmts which is when
    4203              :          we'll later not fold this to a single insert into the vector
    4204              :          and we had a single extract originally.  See PR92819.  */
    4205          967 :       if (nelts == 2
    4206          758 :           && refnelts > 2
    4207          168 :           && orig[1] == error_mark_node
    4208           33 :           && !maybe_blend[0])
    4209          379 :         return false;
    4210          940 :       tree mask_type, perm_type;
    4211          940 :       perm_type = TREE_TYPE (orig[0]);
    4212          940 :       if (conv_code != ERROR_MARK
    4213          940 :           && !supportable_convert_operation (conv_code, type, conv_src_type,
    4214              :                                              &conv_code))
    4215              :         return false;
    4216              : 
    4217              :       /* Now that we know the number of elements of the source build the
    4218              :          permute vector.
    4219              :          ???  When the second vector has constant values we can shuffle
    4220              :          it and its source indexes to make the permutation supported.
    4221              :          For now it mimics a blend.  */
    4222          705 :       vec_perm_builder sel (refnelts, refnelts, 1);
    4223          705 :       bool all_same_p = true;
    4224         6050 :       for (i = 0; i < elts.length (); ++i)
    4225              :         {
    4226         2320 :           sel.quick_push (elts[i].second + elts[i].first * refnelts);
    4227         2320 :           all_same_p &= known_eq (sel[i], sel[0]);
    4228              :         }
    4229              :       /* And fill the tail with "something".  It's really don't care,
    4230              :          and ideally we'd allow VEC_PERM to have a smaller destination
    4231              :          vector.  As a heuristic:
    4232              : 
    4233              :          (a) if what we have so far duplicates a single element, make the
    4234              :              tail do the same
    4235              : 
    4236              :          (b) otherwise preserve a uniform orig[0].  This facilitates
    4237              :              later pattern-matching of VEC_PERM_EXPR to a BIT_INSERT_EXPR.  */
    4238         1471 :       for (; i < refnelts; ++i)
    4239         1532 :         sel.quick_push (all_same_p
    4240         2298 :                         ? sel[0]
    4241           48 :                         : (elts[0].second == 0 && elts[0].first == 0
    4242         1012 :                            ? 0 : refnelts) + i);
    4243          945 :       vec_perm_indices indices (sel, orig[1] ? 2 : 1, refnelts);
    4244          705 :       machine_mode vmode = TYPE_MODE (perm_type);
    4245          705 :       if (!can_vec_perm_const_p (vmode, vmode, indices))
    4246              :         return false;
    4247          588 :       mask_type = build_vector_type (ssizetype, refnelts);
    4248          588 :       tree op2 = vec_perm_indices_to_tree (mask_type, indices);
    4249          588 :       bool converted_orig1 = false;
    4250          588 :       gimple_seq stmts = NULL;
    4251          588 :       if (!orig[1])
    4252          197 :         orig[1] = orig[0];
    4253          391 :       else if (orig[1] == error_mark_node
    4254          242 :                && one_nonconstant)
    4255              :         {
    4256              :           /* ???  We can see if we can safely convert to the original
    4257              :              element type.  */
    4258          155 :           converted_orig1 = conv_code != ERROR_MARK;
    4259          155 :           tree target_type = converted_orig1 ? type : perm_type;
    4260          155 :           tree nonconstant_for_splat = one_nonconstant;
    4261              :           /* If there's a nop conversion between the target element type and
    4262              :              the nonconstant's type, convert it.  */
    4263          155 :           if (!useless_type_conversion_p (TREE_TYPE (target_type),
    4264          155 :                                           TREE_TYPE (one_nonconstant)))
    4265            0 :             nonconstant_for_splat
    4266            0 :               = gimple_build (&stmts, NOP_EXPR, TREE_TYPE (target_type),
    4267              :                               one_nonconstant);
    4268          155 :           orig[1] = gimple_build_vector_from_val (&stmts, UNKNOWN_LOCATION,
    4269              :                                                   target_type,
    4270              :                                                   nonconstant_for_splat);
    4271          155 :         }
    4272          236 :       else if (orig[1] == error_mark_node)
    4273              :         {
    4274              :           /* ???  See if we can convert the vector to the original type.  */
    4275           87 :           converted_orig1 = conv_code != ERROR_MARK;
    4276           87 :           unsigned n = converted_orig1 ? nelts : refnelts;
    4277           70 :           tree target_type = converted_orig1 ? type : perm_type;
    4278           87 :           tree_vector_builder vec (target_type, n, 1);
    4279          533 :           for (unsigned i = 0; i < n; ++i)
    4280          864 :             if (i < nelts && constants[i])
    4281              :               {
    4282          225 :                 tree constant = constants[i];
    4283              :                 /* If there's a nop conversion, convert the constant.  */
    4284          225 :                 if (!useless_type_conversion_p (TREE_TYPE (target_type),
    4285          225 :                                                 TREE_TYPE (constant)))
    4286            0 :                   constant = fold_convert (TREE_TYPE (target_type), constant);
    4287          225 :                 vec.quick_push (constant);
    4288              :               }
    4289              :             else
    4290              :               {
    4291              :                 /* ??? Push a don't-care value.  */
    4292          221 :                 tree constant = one_constant;
    4293          221 :                 if (!useless_type_conversion_p (TREE_TYPE (target_type),
    4294          221 :                                                 TREE_TYPE (constant)))
    4295            0 :                   constant = fold_convert (TREE_TYPE (target_type), constant);
    4296          221 :                 vec.quick_push (constant);
    4297              :               }
    4298           87 :           orig[1] = vec.build ();
    4299           87 :         }
    4300          439 :       tree blend_op2 = NULL_TREE;
    4301          439 :       if (converted_orig1)
    4302              :         {
    4303              :           /* Make sure we can do a blend in the target type.  */
    4304           19 :           vec_perm_builder sel (nelts, nelts, 1);
    4305           87 :           for (i = 0; i < elts.length (); ++i)
    4306           68 :             sel.quick_push (elts[i].first
    4307           68 :                             ? elts[i].second + nelts : i);
    4308           19 :           vec_perm_indices indices (sel, 2, nelts);
    4309           19 :           machine_mode vmode = TYPE_MODE (type);
    4310           19 :           if (!can_vec_perm_const_p (vmode, vmode, indices))
    4311            0 :             return false;
    4312           19 :           mask_type = build_vector_type (ssizetype, nelts);
    4313           19 :           blend_op2 = vec_perm_indices_to_tree (mask_type, indices);
    4314           19 :         }
    4315              : 
    4316              :       /* For a real orig[1] (no splat, constant etc.) we might need to
    4317              :          nop-convert it.  Do so here.  */
    4318          588 :       if (orig[1] && orig[1] != error_mark_node
    4319          588 :           && !useless_type_conversion_p (perm_type, TREE_TYPE (orig[1]))
    4320          607 :           && tree_nop_conversion_p (TREE_TYPE (perm_type),
    4321           19 :                                     TREE_TYPE (TREE_TYPE (orig[1]))))
    4322            0 :         orig[1] = gimple_build (&stmts, VIEW_CONVERT_EXPR, perm_type,
    4323              :                                 orig[1]);
    4324              : 
    4325          588 :       tree orig1_for_perm
    4326          588 :         = converted_orig1 ? build_zero_cst (perm_type) : orig[1];
    4327          588 :       tree res = gimple_build (&stmts, VEC_PERM_EXPR, perm_type,
    4328              :                                orig[0], orig1_for_perm, op2);
    4329              :       /* If we're building a smaller vector, extract the element
    4330              :          with the proper type.  */
    4331          588 :       if (nelts != refnelts)
    4332          294 :         res = gimple_build (&stmts, BIT_FIELD_REF,
    4333              :                             conv_code != ERROR_MARK ? conv_src_type : type,
    4334              :                             res,
    4335          147 :                             TYPE_SIZE (conv_code != ERROR_MARK ? conv_src_type
    4336              :                                                                : type),
    4337          147 :                             bitsize_zero_node);
    4338              :       /* Otherwise, we can still have an intermediate sign change.
    4339              :          ??? In that case we have two subsequent conversions.
    4340              :          We should be able to merge them.  */
    4341          441 :       else if (sign_change_p)
    4342           14 :         res = gimple_build (&stmts, VIEW_CONVERT_EXPR, conv_src_type, res);
    4343              :       /* Finally, apply the conversion.  */
    4344          588 :       if (conv_code != ERROR_MARK)
    4345           52 :         res = gimple_build (&stmts, conv_code, type, res);
    4346          536 :       else if (!useless_type_conversion_p (type, TREE_TYPE (res)))
    4347              :         {
    4348            1 :           gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type),
    4349              :                                 TYPE_VECTOR_SUBPARTS (perm_type))
    4350              :                       && tree_nop_conversion_p (TREE_TYPE (type),
    4351              :                                                 TREE_TYPE (perm_type)));
    4352            1 :           res = gimple_build (&stmts, VIEW_CONVERT_EXPR, type, res);
    4353              :         }
    4354              :       /* Blend in the actual constant.  */
    4355          588 :       if (converted_orig1)
    4356           19 :         res = gimple_build (&stmts, VEC_PERM_EXPR, type,
    4357           19 :                             res, orig[1], blend_op2);
    4358          588 :       gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
    4359          588 :       gimple_assign_set_rhs_with_ops (gsi, SSA_NAME, res);
    4360          705 :     }
    4361          959 :   update_stmt (gsi_stmt (*gsi));
    4362          959 :   return true;
    4363       147562 : }
    4364              : 
    4365              : /* Prepare a TARGET_MEM_REF ref so that it can be subsetted as
    4366              :    lvalue.  This splits out an address computation stmt before *GSI
    4367              :    and returns a MEM_REF wrapping the address.  */
    4368              : 
    4369              : static tree
    4370         1096 : prepare_target_mem_ref_lvalue (tree ref, gimple_stmt_iterator *gsi)
    4371              : {
    4372         1096 :   if (TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
    4373          215 :     mark_addressable (TREE_OPERAND (TREE_OPERAND (ref, 0), 0));
    4374         1096 :   tree ptrtype = build_pointer_type (TREE_TYPE (ref));
    4375         1096 :   tree tem = make_ssa_name (ptrtype);
    4376         1096 :   gimple *new_stmt
    4377         1096 :     = gimple_build_assign (tem, build1 (ADDR_EXPR, TREE_TYPE (tem),
    4378              :                                         unshare_expr (ref)));
    4379         1096 :   gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
    4380         2192 :   ref = build2_loc (EXPR_LOCATION (ref),
    4381         1096 :                     MEM_REF, TREE_TYPE (ref), tem,
    4382         1096 :                     build_int_cst (TREE_TYPE (TREE_OPERAND (ref, 1)), 0));
    4383         1096 :   return ref;
    4384              : }
    4385              : 
    4386              : /* Rewrite the vector load at *GSI to component-wise loads if the load
    4387              :    is only used in BIT_FIELD_REF extractions with eventual intermediate
    4388              :    widening.  */
    4389              : 
    4390              : static void
    4391       283431 : optimize_vector_load (gimple_stmt_iterator *gsi)
    4392              : {
    4393       283431 :   gimple *stmt = gsi_stmt (*gsi);
    4394       283431 :   tree lhs = gimple_assign_lhs (stmt);
    4395       283431 :   tree rhs = gimple_assign_rhs1 (stmt);
    4396       283431 :   tree vuse = gimple_vuse (stmt);
    4397              : 
    4398              :   /* Gather BIT_FIELD_REFs to rewrite, looking through
    4399              :      VEC_UNPACK_{LO,HI}_EXPR.  */
    4400       283431 :   use_operand_p use_p;
    4401       283431 :   imm_use_iterator iter;
    4402       283431 :   bool rewrite = true;
    4403       283431 :   bool scalar_use = false;
    4404       283431 :   bool unpack_use = false;
    4405       283431 :   auto_vec<gimple *, 8> bf_stmts;
    4406       283431 :   auto_vec<tree, 8> worklist;
    4407       283431 :   worklist.quick_push (lhs);
    4408       285185 :   do
    4409              :     {
    4410       285185 :       tree def = worklist.pop ();
    4411       285185 :       unsigned HOST_WIDE_INT def_eltsize
    4412       285185 :         = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (def))));
    4413       644874 :       FOR_EACH_IMM_USE_FAST (use_p, iter, def)
    4414              :         {
    4415       339502 :           gimple *use_stmt = USE_STMT (use_p);
    4416       339502 :           if (is_gimple_debug (use_stmt))
    4417        74504 :             continue;
    4418       339439 :           if (!is_gimple_assign (use_stmt))
    4419              :             {
    4420              :               rewrite = false;
    4421       264998 :               break;
    4422              :             }
    4423       304177 :           enum tree_code use_code = gimple_assign_rhs_code (use_stmt);
    4424       304177 :           tree use_rhs = gimple_assign_rhs1 (use_stmt);
    4425       375099 :           if (use_code == BIT_FIELD_REF
    4426        70923 :               && TREE_OPERAND (use_rhs, 0) == def
    4427              :               /* If its on the VEC_UNPACK_{HI,LO}_EXPR
    4428              :                  def need to verify it is element aligned.  */
    4429       375100 :               && (def == lhs
    4430           85 :                   || (known_eq (bit_field_size (use_rhs), def_eltsize)
    4431           85 :                       && constant_multiple_p (bit_field_offset (use_rhs),
    4432              :                                               def_eltsize)
    4433              :                       /* We can simulate the VEC_UNPACK_{HI,LO}_EXPR
    4434              :                          via a NOP_EXPR only for integral types.
    4435              :                          ???  Support VEC_UNPACK_FLOAT_{HI,LO}_EXPR.  */
    4436           85 :                       && INTEGRAL_TYPE_P (TREE_TYPE (use_rhs)))))
    4437              :             {
    4438        70922 :               if (!VECTOR_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
    4439        68854 :                 scalar_use = true;
    4440        70922 :               bf_stmts.safe_push (use_stmt);
    4441        70922 :               continue;
    4442              :             }
    4443              :           /* Walk through one level of VEC_UNPACK_{LO,HI}_EXPR.  */
    4444       233255 :           if (def == lhs
    4445       231548 :               && (use_code == VEC_UNPACK_HI_EXPR
    4446       231548 :                   || use_code == VEC_UNPACK_LO_EXPR)
    4447         3519 :               && use_rhs == lhs)
    4448              :             {
    4449         3519 :               unpack_use = true;
    4450         3519 :               worklist.safe_push (gimple_assign_lhs (use_stmt));
    4451         3519 :               continue;
    4452              :             }
    4453              :           rewrite = false;
    4454              :           break;
    4455       285185 :         }
    4456       285185 :       if (!rewrite)
    4457              :         break;
    4458              :     }
    4459        40374 :   while (!worklist.is_empty ());
    4460              : 
    4461       283431 :   rewrite = rewrite && (scalar_use
    4462        18433 :                         || unpack_use
    4463          575 :                         || !can_implement_p (mov_optab,
    4464          575 :                                              TYPE_MODE (TREE_TYPE (lhs))));
    4465       283431 :   if (!rewrite)
    4466              :     {
    4467       265173 :       gsi_next (gsi);
    4468       265173 :       return;
    4469              :     }
    4470              :   /* We now have all ultimate uses of the load to rewrite in bf_stmts.  */
    4471              : 
    4472              :   /* Prepare the original ref to be wrapped in adjusted BIT_FIELD_REFs.
    4473              :      For TARGET_MEM_REFs we have to separate the LEA from the reference.  */
    4474        18258 :   tree load_rhs = rhs;
    4475        18258 :   if (TREE_CODE (load_rhs) == TARGET_MEM_REF)
    4476         1095 :     load_rhs = prepare_target_mem_ref_lvalue (load_rhs, gsi);
    4477              : 
    4478              :   /* Rewrite the BIT_FIELD_REFs to be actual loads, re-emitting them at
    4479              :      the place of the original load.  */
    4480       120133 :   for (gimple *use_stmt : bf_stmts)
    4481              :     {
    4482        65359 :       tree bfr = gimple_assign_rhs1 (use_stmt);
    4483        65359 :       tree new_rhs = unshare_expr (load_rhs);
    4484        65359 :       if (TREE_OPERAND (bfr, 0) != lhs)
    4485              :         {
    4486              :           /* When the BIT_FIELD_REF is on the promoted vector we have to
    4487              :              adjust it and emit a conversion afterwards.  */
    4488           84 :           gimple *def_stmt
    4489           84 :               = SSA_NAME_DEF_STMT (TREE_OPERAND (bfr, 0));
    4490           84 :           enum tree_code def_code
    4491           84 :               = gimple_assign_rhs_code (def_stmt);
    4492              : 
    4493              :           /* The adjusted BIT_FIELD_REF is of the promotion source
    4494              :              vector size and at half of the offset...  */
    4495           84 :           new_rhs = fold_build3 (BIT_FIELD_REF,
    4496              :                                  TREE_TYPE (TREE_TYPE (lhs)),
    4497              :                                  new_rhs,
    4498              :                                  TYPE_SIZE (TREE_TYPE (TREE_TYPE (lhs))),
    4499              :                                  size_binop (EXACT_DIV_EXPR,
    4500              :                                              TREE_OPERAND (bfr, 2),
    4501              :                                              bitsize_int (2)));
    4502              :           /* ... and offsetted by half of the vector if VEC_UNPACK_HI_EXPR.  */
    4503           84 :           if (def_code == (!BYTES_BIG_ENDIAN
    4504              :                            ? VEC_UNPACK_HI_EXPR : VEC_UNPACK_LO_EXPR))
    4505           42 :             TREE_OPERAND (new_rhs, 2)
    4506           84 :               = size_binop (PLUS_EXPR, TREE_OPERAND (new_rhs, 2),
    4507              :                             size_binop (EXACT_DIV_EXPR,
    4508              :                                         TYPE_SIZE (TREE_TYPE (lhs)),
    4509              :                                         bitsize_int (2)));
    4510           84 :           tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (lhs)));
    4511           84 :           gimple *new_stmt = gimple_build_assign (tem, new_rhs);
    4512           84 :           location_t loc = gimple_location (use_stmt);
    4513           84 :           gimple_set_location (new_stmt, loc);
    4514           84 :           gimple_set_vuse (new_stmt, vuse);
    4515           84 :           gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
    4516              :           /* Perform scalar promotion.  */
    4517           84 :           new_stmt = gimple_build_assign (gimple_assign_lhs (use_stmt),
    4518              :                                           NOP_EXPR, tem);
    4519           84 :           gimple_set_location (new_stmt, loc);
    4520           84 :           gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
    4521              :         }
    4522              :       else
    4523              :         {
    4524              :           /* When the BIT_FIELD_REF is on the original load result
    4525              :              we can just wrap that.  */
    4526        65275 :           tree new_rhs = fold_build3 (BIT_FIELD_REF, TREE_TYPE (bfr),
    4527              :                                       unshare_expr (load_rhs),
    4528              :                                       TREE_OPERAND (bfr, 1),
    4529              :                                       TREE_OPERAND (bfr, 2));
    4530        65275 :           gimple *new_stmt = gimple_build_assign (gimple_assign_lhs (use_stmt),
    4531              :                                                   new_rhs);
    4532        65275 :           location_t loc = gimple_location (use_stmt);
    4533        65275 :           gimple_set_location (new_stmt, loc);
    4534        65275 :           gimple_set_vuse (new_stmt, vuse);
    4535        65275 :           gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
    4536              :         }
    4537        65359 :       gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
    4538        65359 :       unlink_stmt_vdef (use_stmt);
    4539        65359 :       gsi_remove (&gsi2, true);
    4540              :     }
    4541              : 
    4542              :   /* Finally get rid of the intermediate stmts.  */
    4543        18258 :   gimple *use_stmt;
    4544        36636 :   FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
    4545              :     {
    4546          120 :       if (is_gimple_debug (use_stmt))
    4547              :         {
    4548           92 :           if (gimple_debug_bind_p (use_stmt))
    4549              :             {
    4550           92 :               gimple_debug_bind_reset_value (use_stmt);
    4551           92 :               update_stmt (use_stmt);
    4552              :             }
    4553           92 :           continue;
    4554              :         }
    4555           28 :       gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
    4556           28 :       unlink_stmt_vdef (use_stmt);
    4557           28 :       release_defs (use_stmt);
    4558           28 :       gsi_remove (&gsi2, true);
    4559        18258 :     }
    4560              :   /* And the original load.  */
    4561        18258 :   release_defs (stmt);
    4562        18258 :   gsi_remove (gsi, true);
    4563       283431 : }
    4564              : 
    4565              : 
    4566              : /* Primitive "lattice" function for gimple_simplify.  */
    4567              : 
    4568              : static tree
    4569   1590646283 : fwprop_ssa_val (tree name)
    4570              : {
    4571              :   /* First valueize NAME.  */
    4572   1590646283 :   if (TREE_CODE (name) == SSA_NAME
    4573   1590646283 :       && SSA_NAME_VERSION (name) < lattice.length ())
    4574              :     {
    4575   1589804864 :       tree val = lattice[SSA_NAME_VERSION (name)];
    4576   1589804864 :       if (val)
    4577   1590646283 :         name = val;
    4578              :     }
    4579              :   /* We continue matching along SSA use-def edges for SSA names
    4580              :      that are not single-use.  Currently there are no patterns
    4581              :      that would cause any issues with that.  */
    4582   1590646283 :   return name;
    4583              : }
    4584              : 
    4585              : /* Search for opportunities to free half of the lanes in the following pattern:
    4586              : 
    4587              :      v_in = {e0, e1, e2, e3}
    4588              :      v_1 = VEC_PERM <v_in, v_in, {0, 2, 0, 2}>
    4589              :      // v_1 = {e0, e2, e0, e2}
    4590              :      v_2 = VEC_PERM <v_in, v_in, {1, 3, 1, 3}>
    4591              :      // v_2 = {e1, e3, e1, e3}
    4592              : 
    4593              :      v_x = v_1 + v_2
    4594              :      // v_x = {e0+e1, e2+e3, e0+e1, e2+e3}
    4595              :      v_y = v_1 - v_2
    4596              :      // v_y = {e0-e1, e2-e3, e0-e1, e2-e3}
    4597              : 
    4598              :      v_out = VEC_PERM <v_x, v_y, {0, 1, 6, 7}>
    4599              :      // v_out = {e0+e1, e2+e3, e0-e1, e2-e3}
    4600              : 
    4601              :    The last statement could be simplified to:
    4602              :      v_out' = VEC_PERM <v_x, v_y, {0, 1, 4, 5}>
    4603              :      // v_out' = {e0+e1, e2+e3, e0-e1, e2-e3}
    4604              : 
    4605              :    Characteristic properties:
    4606              :    - v_1 and v_2 are created from the same input vector v_in and introduce the
    4607              :      lane duplication (in the selection operand) that we can eliminate.
    4608              :    - v_x and v_y are results from lane-preserving operations that use v_1 and
    4609              :      v_2 as inputs.
    4610              :    - v_out is created by selecting from duplicated lanes.  */
    4611              : 
    4612              : static bool
    4613       179602 : recognise_vec_perm_simplify_seq (gassign *stmt, vec_perm_simplify_seq *seq)
    4614              : {
    4615       179602 :   unsigned HOST_WIDE_INT nelts;
    4616              : 
    4617       179602 :   gcc_checking_assert (stmt);
    4618       179602 :   gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
    4619       179602 :   basic_block bb = gimple_bb (stmt);
    4620              : 
    4621              :   /* Decompose the final vec permute statement.  */
    4622       179602 :   tree v_x = gimple_assign_rhs1 (stmt);
    4623       179602 :   tree v_y = gimple_assign_rhs2 (stmt);
    4624       179602 :   tree sel = gimple_assign_rhs3 (stmt);
    4625              : 
    4626       179602 :   if (TREE_CODE (sel) != VECTOR_CST
    4627       176865 :       || !VECTOR_CST_NELTS (sel).is_constant (&nelts)
    4628       176865 :       || TREE_CODE (v_x) != SSA_NAME
    4629       175013 :       || TREE_CODE (v_y) != SSA_NAME
    4630       172470 :       || !has_single_use (v_x)
    4631       288189 :       || !has_single_use (v_y))
    4632        72721 :     return false;
    4633              : 
    4634              :   /* Don't analyse sequences with many lanes.  */
    4635       106881 :   if (nelts > 4)
    4636              :     return false;
    4637              : 
    4638              :   /* Lookup the definition of v_x and v_y.  */
    4639       104895 :   gassign *v_x_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (v_x));
    4640       104895 :   gassign *v_y_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (v_y));
    4641       104518 :   if (!v_x_stmt || gimple_bb (v_x_stmt) != bb
    4642       209413 :       || !v_y_stmt || gimple_bb (v_y_stmt) != bb)
    4643              :     return false;
    4644              : 
    4645              :   /* Check the operations that define v_x and v_y.  */
    4646       104511 :   if (TREE_CODE_CLASS (gimple_assign_rhs_code (v_x_stmt)) != tcc_binary
    4647       106544 :       || TREE_CODE_CLASS (gimple_assign_rhs_code (v_y_stmt)) != tcc_binary)
    4648              :     return false;
    4649              : 
    4650         2033 :   tree v_x_1 = gimple_assign_rhs1 (v_x_stmt);
    4651         2033 :   tree v_x_2 = gimple_assign_rhs2 (v_x_stmt);
    4652         2033 :   tree v_y_1 = gimple_assign_rhs1 (v_y_stmt);
    4653         2033 :   tree v_y_2 = gimple_assign_rhs2 (v_y_stmt);
    4654              : 
    4655         2033 :   if (v_x_stmt == v_y_stmt
    4656         2033 :       || TREE_CODE (v_x_1) != SSA_NAME
    4657         2030 :       || TREE_CODE (v_x_2) != SSA_NAME
    4658         2028 :       || num_imm_uses (v_x_1) != 2
    4659         3903 :       || num_imm_uses (v_x_2) != 2)
    4660              :     return false;
    4661              : 
    4662         1834 :   if (v_x_1 != v_y_1 || v_x_2 != v_y_2)
    4663              :     {
    4664              :       /* Allow operands of commutative operators to swap.  */
    4665          646 :       if (commutative_tree_code (gimple_assign_rhs_code (v_x_stmt)))
    4666              :         {
    4667              :           /* Keep v_x_1 the first operand for non-commutative operators.  */
    4668          266 :           std::swap (v_x_1, v_x_2);
    4669          266 :           if (v_x_1 != v_y_1 || v_x_2 != v_y_2)
    4670              :             return false;
    4671              :         }
    4672          380 :       else if (commutative_tree_code (gimple_assign_rhs_code (v_y_stmt)))
    4673              :         {
    4674          380 :           if (v_x_1 != v_y_2 || v_x_2 != v_y_1)
    4675              :             return false;
    4676              :         }
    4677              :       else
    4678              :         return false;
    4679              :     }
    4680         1834 :   gassign *v_1_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (v_x_1));
    4681         1834 :   gassign *v_2_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (v_x_2));
    4682         1770 :   if (!v_1_stmt || gimple_bb (v_1_stmt) != bb
    4683         3604 :       || !v_2_stmt || gimple_bb (v_2_stmt) != bb)
    4684              :     return false;
    4685              : 
    4686         1766 :   if (gimple_assign_rhs_code (v_1_stmt) != VEC_PERM_EXPR
    4687         1888 :       || gimple_assign_rhs_code (v_2_stmt) != VEC_PERM_EXPR)
    4688              :     return false;
    4689              : 
    4690              :   /* Decompose initial VEC_PERM_EXPRs.  */
    4691          108 :   tree v_in = gimple_assign_rhs1 (v_1_stmt);
    4692          108 :   tree v_1_sel = gimple_assign_rhs3 (v_1_stmt);
    4693          108 :   tree v_2_sel = gimple_assign_rhs3 (v_2_stmt);
    4694          108 :   if (v_in != gimple_assign_rhs2 (v_1_stmt)
    4695          103 :       || v_in != gimple_assign_rhs1 (v_2_stmt)
    4696          209 :       || v_in != gimple_assign_rhs2 (v_2_stmt))
    4697              :     return false;
    4698              : 
    4699          101 :   unsigned HOST_WIDE_INT v_1_nelts, v_2_nelts;
    4700          101 :   if (TREE_CODE (v_1_sel) != VECTOR_CST
    4701          101 :       || !VECTOR_CST_NELTS (v_1_sel).is_constant (&v_1_nelts)
    4702          101 :       || TREE_CODE (v_2_sel) != VECTOR_CST
    4703          202 :       || !VECTOR_CST_NELTS (v_2_sel).is_constant (&v_2_nelts))
    4704            0 :     return false;
    4705              : 
    4706          101 :   if (nelts != v_1_nelts || nelts != v_2_nelts)
    4707              :     return false;
    4708              : 
    4709              :   /* Create the new selector.  */
    4710          101 :   vec_perm_builder new_sel_perm (nelts, nelts, 1);
    4711          101 :   auto_vec<bool> lanes (nelts);
    4712          101 :   lanes.quick_grow_cleared (nelts);
    4713          505 :   for (unsigned int i = 0; i < nelts; i++)
    4714              :     {
    4715              :       /* Extract the i-th value from the selector.  */
    4716          404 :       unsigned int sel_cst = TREE_INT_CST_LOW (VECTOR_CST_ELT (sel, i));
    4717          404 :       unsigned int lane = sel_cst % nelts;
    4718          404 :       unsigned int offs = sel_cst / nelts;
    4719              : 
    4720              :       /* Check what's in the lane.  */
    4721          404 :       unsigned int e_1 = TREE_INT_CST_LOW (VECTOR_CST_ELT (v_1_sel, lane));
    4722          404 :       unsigned int e_2 = TREE_INT_CST_LOW (VECTOR_CST_ELT (v_2_sel, lane));
    4723              : 
    4724              :       /* Reuse previous lane (if any).  */
    4725          404 :       unsigned int l = 0;
    4726          687 :       for (; l < lane; l++)
    4727              :         {
    4728          481 :           if ((TREE_INT_CST_LOW (VECTOR_CST_ELT (v_1_sel, l)) == e_1)
    4729          481 :               && (TREE_INT_CST_LOW (VECTOR_CST_ELT (v_2_sel, l)) == e_2))
    4730              :             break;
    4731              :         }
    4732              : 
    4733              :       /* Add to narrowed selector.  */
    4734          404 :       new_sel_perm.quick_push (l + offs * nelts);
    4735              : 
    4736              :       /* Mark lane as used.  */
    4737          404 :       lanes[l] = true;
    4738              :     }
    4739              : 
    4740              :   /* Count how many lanes are need.  */
    4741              :   unsigned int cnt = 0;
    4742          505 :   for (unsigned int i = 0; i < nelts; i++)
    4743          404 :     cnt += lanes[i];
    4744              : 
    4745              :   /* If more than (nelts/2) lanes are needed, skip the sequence.  */
    4746          101 :   if (cnt > nelts / 2)
    4747              :     return false;
    4748              : 
    4749              :   /* Check if the resulting permutation is cheap.  */
    4750          101 :   vec_perm_indices new_indices (new_sel_perm, 2, nelts);
    4751          101 :   tree vectype = TREE_TYPE (gimple_assign_lhs (stmt));
    4752          101 :   machine_mode vmode = TYPE_MODE (vectype);
    4753          101 :   if (!can_vec_perm_const_p (vmode, vmode, new_indices, false))
    4754              :     return false;
    4755              : 
    4756          101 :   *seq = XNEW (struct _vec_perm_simplify_seq);
    4757          101 :   (*seq)->stmt = stmt;
    4758          101 :   (*seq)->v_1_stmt = v_1_stmt;
    4759          101 :   (*seq)->v_2_stmt = v_2_stmt;
    4760          101 :   (*seq)->v_x_stmt = v_x_stmt;
    4761          101 :   (*seq)->v_y_stmt = v_y_stmt;
    4762          101 :   (*seq)->nelts = nelts;
    4763          101 :   (*seq)->new_sel = vect_gen_perm_mask_checked (vectype, new_indices);
    4764              : 
    4765          101 :   if (dump_file)
    4766              :     {
    4767           28 :       fprintf (dump_file, "Found vec perm simplify sequence ending with:\n\t");
    4768           28 :       print_gimple_stmt (dump_file, stmt, 0);
    4769              : 
    4770           28 :       if (dump_flags & TDF_DETAILS)
    4771              :         {
    4772           28 :           fprintf (dump_file, "\tNarrowed vec_perm selector: ");
    4773           28 :           print_generic_expr (dump_file, (*seq)->new_sel);
    4774           28 :           fprintf (dump_file, "\n");
    4775              :         }
    4776              :     }
    4777              : 
    4778              :   return true;
    4779          202 : }
    4780              : 
    4781              : /* Reduce the lane consumption of a simplifiable vec perm sequence.  */
    4782              : 
    4783              : static void
    4784           74 : narrow_vec_perm_simplify_seq (const vec_perm_simplify_seq &seq)
    4785              : {
    4786           74 :   gassign *stmt = seq->stmt;
    4787           74 :   if (dump_file && (dump_flags & TDF_DETAILS))
    4788              :     {
    4789           22 :       fprintf (dump_file, "Updating VEC_PERM statement:\n");
    4790           22 :       fprintf (dump_file, "Old stmt: ");
    4791           22 :       print_gimple_stmt (dump_file, stmt, 0);
    4792              :     }
    4793              : 
    4794              :   /* Update the last VEC_PERM statement.  */
    4795           74 :   gimple_assign_set_rhs3 (stmt, seq->new_sel);
    4796           74 :   update_stmt (stmt);
    4797              : 
    4798           74 :   if (dump_file && (dump_flags & TDF_DETAILS))
    4799              :     {
    4800           22 :       fprintf (dump_file, "New stmt: ");
    4801           22 :       print_gimple_stmt (dump_file, stmt, 0);
    4802              :     }
    4803           74 : }
    4804              : 
    4805              : /* Test if we can blend two simplifiable vec permute sequences.
    4806              :    NEED_SWAP will be set, if sequences must be swapped for blending.  */
    4807              : 
    4808              : static bool
    4809           47 : can_blend_vec_perm_simplify_seqs_p (vec_perm_simplify_seq seq1,
    4810              :                                     vec_perm_simplify_seq seq2,
    4811              :                                     bool *need_swap)
    4812              : {
    4813           47 :   unsigned int nelts = seq1->nelts;
    4814           47 :   basic_block bb = gimple_bb (seq1->stmt);
    4815              : 
    4816           47 :   gcc_assert (gimple_bb (seq2->stmt) == bb);
    4817              : 
    4818              :   /* BBs and number of elements must be equal.  */
    4819           47 :   if (gimple_bb (seq2->stmt) != bb || seq2->nelts != nelts)
    4820              :     return false;
    4821              : 
    4822              :   /* We need vectors of the same type.  */
    4823           47 :   if (TREE_TYPE (gimple_assign_lhs (seq1->stmt))
    4824           47 :       != TREE_TYPE (gimple_assign_lhs (seq2->stmt)))
    4825              :     return false;
    4826              : 
    4827              :   /* We require isomorphic operators.  */
    4828           41 :   if (((gimple_assign_rhs_code (seq1->v_x_stmt)
    4829           41 :         != gimple_assign_rhs_code (seq2->v_x_stmt))
    4830           41 :        || (gimple_assign_rhs_code (seq1->v_y_stmt)
    4831           41 :            != gimple_assign_rhs_code (seq2->v_y_stmt))))
    4832              :     return false;
    4833              : 
    4834              :   /* We cannot have any dependencies between the sequences.
    4835              : 
    4836              :      For merging, we will reuse seq1->v_1_stmt and seq1->v_2_stmt.
    4837              :      seq1's v_in is defined before these statements, but we need
    4838              :      to check if seq2's v_in is defined before them as well.
    4839              : 
    4840              :      Further, we will reuse seq2->stmt.  We need to ensure that
    4841              :      seq1->v_x_stmt and seq1->v_y_stmt are before it.
    4842              : 
    4843              :      Note, that we don't need to check the BBs here, because all
    4844              :      statements of both sequences have to be in the same BB.  */
    4845              : 
    4846           41 :   tree seq2_v_in = gimple_assign_rhs1 (seq2->v_1_stmt);
    4847           41 :   if (TREE_CODE (seq2_v_in) != SSA_NAME)
    4848              :     return false;
    4849              : 
    4850           41 :   gassign *seq2_v_in_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (seq2_v_in));
    4851           41 :   if (!seq2_v_in_stmt || gimple_bb (seq2_v_in_stmt) != bb
    4852           41 :       || (gimple_uid (seq2_v_in_stmt) > gimple_uid (seq1->v_1_stmt))
    4853           37 :       || (gimple_uid (seq1->v_x_stmt) > gimple_uid (seq2->stmt))
    4854           37 :       || (gimple_uid (seq1->v_y_stmt) > gimple_uid (seq2->stmt)))
    4855              :     {
    4856            4 :       tree seq1_v_in = gimple_assign_rhs1 (seq1->v_1_stmt);
    4857            4 :       if (TREE_CODE (seq1_v_in) != SSA_NAME)
    4858              :         return false;
    4859              : 
    4860            4 :       gassign *seq1_v_in_stmt
    4861            4 :         = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (seq1_v_in));
    4862              :       /* Let's try to see if we succeed when swapping the sequences.  */
    4863            4 :       if (!seq1_v_in_stmt || gimple_bb (seq1_v_in_stmt)
    4864            0 :           || (gimple_uid (seq1_v_in_stmt) > gimple_uid (seq2->v_1_stmt))
    4865            0 :           || (gimple_uid (seq2->v_x_stmt) > gimple_uid (seq1->stmt))
    4866            0 :           || (gimple_uid (seq2->v_y_stmt) > gimple_uid (seq1->stmt)))
    4867              :         return false;
    4868            0 :       *need_swap = true;
    4869              :     }
    4870              :   else
    4871           37 :     *need_swap = false;
    4872              : 
    4873           37 :   if (dump_file && (dump_flags & TDF_DETAILS))
    4874           11 :     fprintf (dump_file, "Found vec perm simplify sequence pair.\n");
    4875              : 
    4876              :   return true;
    4877              : }
    4878              : 
    4879              : /* Calculate the permutations for blending the two given vec permute
    4880              :    sequences.  This may fail if the resulting permutation is not
    4881              :    supported.  */
    4882              : 
    4883              : static bool
    4884           37 : calc_perm_vec_perm_simplify_seqs (vec_perm_simplify_seq seq1,
    4885              :                                   vec_perm_simplify_seq seq2,
    4886              :                                   vec_perm_indices *seq2_stmt_indices,
    4887              :                                   vec_perm_indices *seq1_v_1_stmt_indices,
    4888              :                                   vec_perm_indices *seq1_v_2_stmt_indices)
    4889              : {
    4890           37 :   unsigned int i;
    4891           37 :   unsigned int nelts = seq1->nelts;
    4892           37 :   auto_vec<unsigned int> lane_assignment;
    4893           37 :   lane_assignment.create (nelts);
    4894              : 
    4895              :   /* Mark all lanes as free.  */
    4896           37 :   lane_assignment.quick_grow_cleared (nelts);
    4897              : 
    4898              :   /* Allocate lanes for seq1.  */
    4899          185 :   for (i = 0; i < nelts; i++)
    4900              :     {
    4901          148 :       unsigned int l = TREE_INT_CST_LOW (VECTOR_CST_ELT (seq1->new_sel, i));
    4902          148 :       l %= nelts;
    4903          148 :       lane_assignment[l] = 1;
    4904              :     }
    4905              : 
    4906              :   /* Allocate lanes for seq2 and calculate selector for seq2->stmt.  */
    4907           37 :   vec_perm_builder seq2_stmt_sel_perm (nelts, nelts, 1);
    4908          185 :   for (i = 0; i < nelts; i++)
    4909              :     {
    4910          148 :       unsigned int sel = TREE_INT_CST_LOW (VECTOR_CST_ELT (seq2->new_sel, i));
    4911          148 :       unsigned int lane = sel % nelts;
    4912          148 :       unsigned int offs = sel / nelts;
    4913          148 :       unsigned int new_sel;
    4914              : 
    4915              :       /* Check if we already allocated the lane for seq2.  */
    4916          148 :       unsigned int j = 0;
    4917          263 :       for (; j < i; j++)
    4918              :         {
    4919          189 :           unsigned int sel_old;
    4920          189 :           sel_old = TREE_INT_CST_LOW (VECTOR_CST_ELT (seq2->new_sel, j));
    4921          189 :           unsigned int lane_old = sel_old % nelts;
    4922          189 :           if (lane == lane_old)
    4923              :             {
    4924           74 :               new_sel = seq2_stmt_sel_perm[j].to_constant ();
    4925           74 :               new_sel = (new_sel % nelts) + offs * nelts;
    4926           74 :               break;
    4927              :             }
    4928              :         }
    4929              : 
    4930              :       /* If the lane is not allocated, we need to do that now.  */
    4931          148 :       if (j == i)
    4932              :         {
    4933              :           unsigned int l_orig = lane;
    4934          182 :           while (lane_assignment[lane] != 0)
    4935              :             {
    4936          108 :               lane = (lane + 1) % nelts;
    4937              : 
    4938              :               /* This should not happen if both sequences utilize no more than
    4939              :                  half of the lanes.  Test anyway to guarantee termination.  */
    4940          108 :               if (lane == l_orig)
    4941            0 :                 return false;
    4942              :             }
    4943              : 
    4944              :           /* Allocate lane.  */
    4945           74 :           lane_assignment[lane] = 2 + l_orig;
    4946           74 :           new_sel = lane + offs * nelts;
    4947              :         }
    4948              : 
    4949          148 :       seq2_stmt_sel_perm.quick_push (new_sel);
    4950              :     }
    4951              : 
    4952              :   /* Check if the resulting permutation is cheap.  */
    4953           37 :   seq2_stmt_indices->new_vector (seq2_stmt_sel_perm, 2, nelts);
    4954           37 :   tree vectype = TREE_TYPE (gimple_assign_lhs (seq2->stmt));
    4955           37 :   machine_mode vmode = TYPE_MODE (vectype);
    4956           37 :   if (!can_vec_perm_const_p (vmode, vmode, *seq2_stmt_indices, false))
    4957              :     return false;
    4958              : 
    4959              :   /* Calculate selectors for seq1->v_1_stmt and seq1->v_2_stmt.  */
    4960           37 :   vec_perm_builder seq1_v_1_stmt_sel_perm (nelts, nelts, 1);
    4961           37 :   vec_perm_builder seq1_v_2_stmt_sel_perm (nelts, nelts, 1);
    4962          185 :   for (i = 0; i < nelts; i++)
    4963              :     {
    4964          148 :       bool use_seq1 = lane_assignment[i] < 2;
    4965          148 :       unsigned int l1, l2;
    4966              : 
    4967          148 :       if (use_seq1)
    4968              :         {
    4969              :           /* Just reuse the selector indices.  */
    4970           74 :           tree s1 = gimple_assign_rhs3 (seq1->v_1_stmt);
    4971           74 :           tree s2 = gimple_assign_rhs3 (seq1->v_2_stmt);
    4972           74 :           l1 = TREE_INT_CST_LOW (VECTOR_CST_ELT (s1, i));
    4973           74 :           l2 = TREE_INT_CST_LOW (VECTOR_CST_ELT (s2, i));
    4974              :         }
    4975              :       else
    4976              :         {
    4977              :           /* We moved the lanes for seq2, so we need to adjust for that.  */
    4978           74 :           tree s1 = gimple_assign_rhs3 (seq2->v_1_stmt);
    4979           74 :           tree s2 = gimple_assign_rhs3 (seq2->v_2_stmt);
    4980           74 :           l1 = TREE_INT_CST_LOW (VECTOR_CST_ELT (s1, lane_assignment[i] - 2));
    4981           74 :           l2 = TREE_INT_CST_LOW (VECTOR_CST_ELT (s2, lane_assignment[i] - 2));
    4982              :         }
    4983              : 
    4984          148 :       l1 %= nelts;
    4985          148 :       l2 %= nelts;
    4986          222 :       seq1_v_1_stmt_sel_perm.quick_push (l1 + (use_seq1 ? 0 : nelts));
    4987          148 :       seq1_v_2_stmt_sel_perm.quick_push (l2 + (use_seq1 ? 0 : nelts));
    4988              :     }
    4989              : 
    4990           37 :   seq1_v_1_stmt_indices->new_vector (seq1_v_1_stmt_sel_perm, 2, nelts);
    4991           37 :   vectype = TREE_TYPE (gimple_assign_lhs (seq1->v_1_stmt));
    4992           37 :   vmode = TYPE_MODE (vectype);
    4993           37 :   if (!can_vec_perm_const_p (vmode, vmode, *seq1_v_1_stmt_indices, false))
    4994              :     return false;
    4995              : 
    4996           37 :   seq1_v_2_stmt_indices->new_vector (seq1_v_2_stmt_sel_perm, 2, nelts);
    4997           37 :   vectype = TREE_TYPE (gimple_assign_lhs (seq1->v_2_stmt));
    4998           37 :   vmode = TYPE_MODE (vectype);
    4999           37 :   if (!can_vec_perm_const_p (vmode, vmode, *seq1_v_2_stmt_indices, false))
    5000              :     return false;
    5001              : 
    5002              :   return true;
    5003           74 : }
    5004              : 
    5005              : /* Blend the two given simplifiable vec permute sequences using the
    5006              :    given permutations.  */
    5007              : 
    5008              : static void
    5009           37 : blend_vec_perm_simplify_seqs (vec_perm_simplify_seq seq1,
    5010              :                               vec_perm_simplify_seq seq2,
    5011              :                               const vec_perm_indices &seq2_stmt_indices,
    5012              :                               const vec_perm_indices &seq1_v_1_stmt_indices,
    5013              :                               const vec_perm_indices &seq1_v_2_stmt_indices)
    5014              : {
    5015              :   /* We don't need to adjust seq1->stmt because its lanes consumption
    5016              :      was already narrowed before entering this function.  */
    5017              : 
    5018              :   /* Adjust seq2->stmt: copy RHS1/RHS2 from seq1->stmt and set new sel.  */
    5019           37 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5020              :     {
    5021           11 :       fprintf (dump_file, "Updating VEC_PERM statement:\n");
    5022           11 :       fprintf (dump_file, "Old stmt: ");
    5023           11 :       print_gimple_stmt (dump_file, seq2->stmt, 0);
    5024              :     }
    5025              : 
    5026           37 :   gimple_assign_set_rhs1 (seq2->stmt, gimple_assign_rhs1 (seq1->stmt));
    5027           74 :   gimple_assign_set_rhs2 (seq2->stmt, gimple_assign_rhs2 (seq1->stmt));
    5028           37 :   tree vectype = TREE_TYPE (gimple_assign_lhs (seq2->stmt));
    5029           37 :   tree sel = vect_gen_perm_mask_checked (vectype, seq2_stmt_indices);
    5030           37 :   gimple_assign_set_rhs3 (seq2->stmt, sel);
    5031           37 :   update_stmt (seq2->stmt);
    5032              : 
    5033           37 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5034              :     {
    5035           11 :       fprintf (dump_file, "New stmt: ");
    5036           11 :       print_gimple_stmt (dump_file, seq2->stmt, 0);
    5037              :     }
    5038              : 
    5039              :   /* Adjust seq1->v_1_stmt: copy RHS2 from seq2->v_1_stmt and set new sel.  */
    5040           37 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5041              :     {
    5042           11 :       fprintf (dump_file, "Updating VEC_PERM statement:\n");
    5043           11 :       fprintf (dump_file, "Old stmt: ");
    5044           11 :       print_gimple_stmt (dump_file, seq1->v_1_stmt, 0);
    5045              :     }
    5046              : 
    5047           37 :   gimple_assign_set_rhs2 (seq1->v_1_stmt, gimple_assign_rhs1 (seq2->v_1_stmt));
    5048           37 :   vectype = TREE_TYPE (gimple_assign_lhs (seq1->v_1_stmt));
    5049           37 :   sel = vect_gen_perm_mask_checked (vectype, seq1_v_1_stmt_indices);
    5050           37 :   gimple_assign_set_rhs3 (seq1->v_1_stmt, sel);
    5051           37 :   update_stmt (seq1->v_1_stmt);
    5052              : 
    5053           37 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5054              :     {
    5055           11 :       fprintf (dump_file, "New stmt: ");
    5056           11 :       print_gimple_stmt (dump_file, seq1->v_1_stmt, 0);
    5057              :     }
    5058              : 
    5059              :   /* Adjust seq1->v_2_stmt: copy RHS2 from seq2->v_2_stmt and set new sel.  */
    5060           37 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5061              :     {
    5062           11 :       fprintf (dump_file, "Updating VEC_PERM statement:\n");
    5063           11 :       fprintf (dump_file, "Old stmt: ");
    5064           11 :       print_gimple_stmt (dump_file, seq1->v_2_stmt, 0);
    5065              :     }
    5066              : 
    5067           37 :   gimple_assign_set_rhs2 (seq1->v_2_stmt, gimple_assign_rhs1 (seq2->v_2_stmt));
    5068           37 :   vectype = TREE_TYPE (gimple_assign_lhs (seq1->v_2_stmt));
    5069           37 :   sel = vect_gen_perm_mask_checked (vectype, seq1_v_2_stmt_indices);
    5070           37 :   gimple_assign_set_rhs3 (seq1->v_2_stmt, sel);
    5071           37 :   update_stmt (seq1->v_2_stmt);
    5072              : 
    5073           37 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5074              :     {
    5075           11 :       fprintf (dump_file, "New stmt: ");
    5076           11 :       print_gimple_stmt (dump_file, seq1->v_2_stmt, 0);
    5077              :     }
    5078              : 
    5079              :   /* At this point, we have four unmodified seq2 stmts, which will be
    5080              :      eliminated by DCE.  */
    5081              : 
    5082           37 :   if (dump_file)
    5083           11 :     fprintf (dump_file, "Vec perm simplify sequences have been blended.\n\n");
    5084           37 : }
    5085              : 
    5086              : /* Try to blend narrowed vec_perm_simplify_seqs pairwise.
    5087              :    The provided list will be empty after this call.  */
    5088              : 
    5089              : static void
    5090    311349188 : process_vec_perm_simplify_seq_list (vec<vec_perm_simplify_seq> *l)
    5091              : {
    5092    311349188 :   unsigned int i, j;
    5093    311349188 :   vec_perm_simplify_seq seq1, seq2;
    5094              : 
    5095    311349188 :   if (l->is_empty ())
    5096    311349143 :     return;
    5097              : 
    5098           45 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5099           13 :     fprintf (dump_file, "\nProcessing %u vec perm simplify sequences.\n",
    5100              :              l->length ());
    5101              : 
    5102          109 :   FOR_EACH_VEC_ELT (*l, i, seq1)
    5103              :     {
    5104           64 :       if (i + 1 < l->length ())
    5105              :         {
    5106           51 :           FOR_EACH_VEC_ELT_FROM (*l, j, seq2, i + 1)
    5107              :             {
    5108           47 :               bool swap = false;
    5109           47 :               if (can_blend_vec_perm_simplify_seqs_p (seq1, seq2, &swap))
    5110              :                 {
    5111           37 :                   vec_perm_indices seq2_stmt_indices;
    5112           37 :                   vec_perm_indices seq1_v_1_stmt_indices;
    5113           37 :                   vec_perm_indices seq1_v_2_stmt_indices;
    5114          111 :                   if (calc_perm_vec_perm_simplify_seqs (swap ? seq2 : seq1,
    5115              :                                                         swap ? seq1 : seq2,
    5116              :                                                         &seq2_stmt_indices,
    5117              :                                                         &seq1_v_1_stmt_indices,
    5118              :                                                         &seq1_v_2_stmt_indices))
    5119              :                     {
    5120              :                       /* Narrow lane usage.  */
    5121           37 :                       narrow_vec_perm_simplify_seq (seq1);
    5122           37 :                       narrow_vec_perm_simplify_seq (seq2);
    5123              : 
    5124              :                       /* Blend sequences.  */
    5125           37 :                       blend_vec_perm_simplify_seqs (swap ? seq2 : seq1,
    5126              :                                                     swap ? seq1 : seq2,
    5127              :                                                     seq2_stmt_indices,
    5128              :                                                     seq1_v_1_stmt_indices,
    5129              :                                                     seq1_v_2_stmt_indices);
    5130              : 
    5131              :                       /* We can use unordered_remove as we break the loop.  */
    5132           37 :                       l->unordered_remove (j);
    5133           37 :                       XDELETE (seq2);
    5134           37 :                       break;
    5135              :                     }
    5136           37 :                 }
    5137              :             }
    5138              :         }
    5139              : 
    5140              :       /* We don't need to call l->remove for seq1.  */
    5141           64 :       XDELETE (seq1);
    5142              :     }
    5143              : 
    5144           45 :   l->truncate (0);
    5145              : }
    5146              : 
    5147              : static void
    5148          101 : append_vec_perm_simplify_seq_list (vec<vec_perm_simplify_seq> *l,
    5149              :                                    const vec_perm_simplify_seq &seq)
    5150              : {
    5151              :   /* If no space on list left, then process the list.  */
    5152          101 :   if (!l->space (1))
    5153            0 :       process_vec_perm_simplify_seq_list (l);
    5154              : 
    5155          101 :   l->quick_push (seq);
    5156          101 : }
    5157              : 
    5158              : /* Main entry point for the forward propagation and statement combine
    5159              :    optimizer.  */
    5160              : 
    5161              : namespace {
    5162              : 
    5163              : const pass_data pass_data_forwprop =
    5164              : {
    5165              :   GIMPLE_PASS, /* type */
    5166              :   "forwprop", /* name */
    5167              :   OPTGROUP_NONE, /* optinfo_flags */
    5168              :   TV_TREE_FORWPROP, /* tv_id */
    5169              :   ( PROP_cfg | PROP_ssa ), /* properties_required */
    5170              :   0, /* properties_provided */
    5171              :   0, /* properties_destroyed */
    5172              :   0, /* todo_flags_start */
    5173              :   0, /* todo_flags_finish */
    5174              : };
    5175              : 
    5176              : class pass_forwprop : public gimple_opt_pass
    5177              : {
    5178              : public:
    5179      1428610 :   pass_forwprop (gcc::context *ctxt)
    5180      2857220 :     : gimple_opt_pass (pass_data_forwprop, ctxt), last_p (false)
    5181              :   {}
    5182              : 
    5183              :   /* opt_pass methods: */
    5184      1142888 :   opt_pass * clone () final override { return new pass_forwprop (m_ctxt); }
    5185      1714332 :   void set_pass_param (unsigned int n, bool param) final override
    5186              :     {
    5187      1714332 :       switch (n)
    5188              :         {
    5189      1142888 :           case 0:
    5190      1142888 :             m_full_walk = param;
    5191      1142888 :             break;
    5192       571444 :           case 1:
    5193       571444 :             last_p = param;
    5194       571444 :             break;
    5195            0 :           default:
    5196            0 :           gcc_unreachable();
    5197              :         }
    5198      1714332 :     }
    5199      5539535 :   bool gate (function *) final override { return flag_tree_forwprop; }
    5200              :   unsigned int execute (function *) final override;
    5201              : 
    5202              :  private:
    5203              :   /* Determines whether the pass instance should set PROP_last_full_fold.  */
    5204              :   bool last_p;
    5205              : 
    5206              :   /* True if the aggregate props are doing a full walk or not.  */
    5207              :   bool m_full_walk = false;
    5208              : }; // class pass_forwprop
    5209              : 
    5210              : /* Attemp to make the BB block of __builtin_unreachable unreachable by changing
    5211              :    the incoming jumps.  Return true if at least one jump was changed.  */
    5212              : 
    5213              : static bool
    5214         3013 : optimize_unreachable (basic_block bb)
    5215              : {
    5216         3013 :   gimple_stmt_iterator gsi;
    5217         3013 :   gimple *stmt;
    5218         3013 :   edge_iterator ei;
    5219         3013 :   edge e;
    5220         3013 :   bool ret;
    5221              : 
    5222         3013 :   ret = false;
    5223         7412 :   FOR_EACH_EDGE (e, ei, bb->preds)
    5224              :     {
    5225         4399 :       gsi = gsi_last_bb (e->src);
    5226         4399 :       if (gsi_end_p (gsi))
    5227          330 :         continue;
    5228              : 
    5229         4069 :       stmt = gsi_stmt (gsi);
    5230         4069 :       if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
    5231              :         {
    5232              :           /* If the condition is already true/false
    5233              :              ignore it. This can happen during copy prop of forwprop. */
    5234          634 :           if (gimple_cond_true_p (cond_stmt)
    5235          626 :               || gimple_cond_false_p (cond_stmt))
    5236            8 :             continue;
    5237          618 :           else if (e->flags & EDGE_TRUE_VALUE)
    5238          537 :             gimple_cond_make_false (cond_stmt);
    5239           81 :           else if (e->flags & EDGE_FALSE_VALUE)
    5240           81 :             gimple_cond_make_true (cond_stmt);
    5241              :           else
    5242            0 :             gcc_unreachable ();
    5243          618 :           update_stmt (cond_stmt);
    5244              :         }
    5245              :       else
    5246              :         {
    5247              :           /* Todo: handle other cases.  Note that unreachable switch case
    5248              :              statements have already been removed.  */
    5249         3443 :           continue;
    5250              :         }
    5251              : 
    5252          618 :       ret = true;
    5253              :     }
    5254              : 
    5255         3013 :   return ret;
    5256              : }
    5257              : 
    5258              : unsigned int
    5259      5536978 : pass_forwprop::execute (function *fun)
    5260              : {
    5261      5536978 :   unsigned int todoflags = 0;
    5262              :   /* Handle a full walk only when expensive optimizations are on.  */
    5263      5536978 :   bool full_walk = m_full_walk && flag_expensive_optimizations;
    5264              : 
    5265      5536978 :   cfg_changed = false;
    5266      5536978 :   if (last_p)
    5267      1043629 :     fun->curr_properties |= PROP_last_full_fold;
    5268              : 
    5269      5536978 :   calculate_dominance_info (CDI_DOMINATORS);
    5270              : 
    5271              :   /* Combine stmts with the stmts defining their operands.  Do that
    5272              :      in an order that guarantees visiting SSA defs before SSA uses.  */
    5273     11073956 :   lattice.create (num_ssa_names);
    5274     11073956 :   lattice.quick_grow_cleared (num_ssa_names);
    5275      5536978 :   int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
    5276      5536978 :   int postorder_num = pre_and_rev_post_order_compute_fn (fun, NULL,
    5277              :                                                          postorder, false);
    5278      5536978 :   int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fun));
    5279     50057573 :   for (int i = 0; i < postorder_num; ++i)
    5280              :     {
    5281     44520595 :       bb_to_rpo[postorder[i]] = i;
    5282     44520595 :       edge_iterator ei;
    5283     44520595 :       edge e;
    5284    107212567 :       FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (fun, postorder[i])->succs)
    5285     62691972 :         e->flags &= ~EDGE_EXECUTABLE;
    5286              :     }
    5287      5536978 :   single_succ_edge (BASIC_BLOCK_FOR_FN (fun, ENTRY_BLOCK))->flags
    5288      5536978 :     |= EDGE_EXECUTABLE;
    5289      5536978 :   auto_vec<gimple *, 4> to_fixup;
    5290      5536978 :   auto_vec<gimple *, 32> to_remove;
    5291      5536978 :   auto_vec<unsigned, 32> to_remove_defs;
    5292      5536978 :   auto_vec<std::pair<int, int>, 10> edges_to_remove;
    5293      5536978 :   auto_bitmap simple_dce_worklist;
    5294      5536978 :   auto_bitmap need_ab_cleanup;
    5295      5536978 :   to_purge = BITMAP_ALLOC (NULL);
    5296      5536978 :   auto_vec<vec_perm_simplify_seq, 8> vec_perm_simplify_seq_list;
    5297     50057573 :   for (int i = 0; i < postorder_num; ++i)
    5298              :     {
    5299     44520595 :       gimple_stmt_iterator gsi;
    5300     44520595 :       basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
    5301     44520595 :       edge_iterator ei;
    5302     44520595 :       edge e;
    5303              : 
    5304              :       /* Skip processing not executable blocks.  We could improve
    5305              :          single_use tracking by at least unlinking uses from unreachable
    5306              :          blocks but since blocks with uses are not processed in a
    5307              :          meaningful order this is probably not worth it.  */
    5308     44520595 :       bool any = false;
    5309     45658696 :       FOR_EACH_EDGE (e, ei, bb->preds)
    5310              :         {
    5311     45644347 :           if ((e->flags & EDGE_EXECUTABLE)
    5312              :               /* We can handle backedges in natural loops correctly but
    5313              :                  for irreducible regions we have to take all backedges
    5314              :                  conservatively when we did not visit the source yet.  */
    5315     45644347 :               || (bb_to_rpo[e->src->index] > i
    5316       664043 :                   && !dominated_by_p (CDI_DOMINATORS, e->src, e->dest)))
    5317              :             {
    5318              :               any = true;
    5319              :               break;
    5320              :             }
    5321              :         }
    5322     44520595 :       if (!any)
    5323        14939 :         continue;
    5324              : 
    5325              :       /* Remove conditions that go directly to unreachable when this is the last forwprop.  */
    5326     44506246 :       if (last_p
    5327      9778706 :           && !(flag_sanitize & SANITIZE_UNREACHABLE))
    5328              :         {
    5329      9773736 :           gimple_stmt_iterator gsi;
    5330      9773736 :           gsi = gsi_start_nondebug_after_labels_bb (bb);
    5331      9774326 :           if (!gsi_end_p (gsi)
    5332      8951056 :               && gimple_call_builtin_p (*gsi, BUILT_IN_UNREACHABLE)
    5333      9776749 :               && optimize_unreachable (bb))
    5334              :             {
    5335          590 :               cfg_changed = true;
    5336          590 :               continue;
    5337              :             }
    5338              :         }
    5339              : 
    5340              :       /* Record degenerate PHIs in the lattice.  */
    5341     60432801 :       for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
    5342     15927145 :            gsi_next (&si))
    5343              :         {
    5344     15927145 :           gphi *phi = si.phi ();
    5345     15927145 :           tree res = gimple_phi_result (phi);
    5346     31854290 :           if (virtual_operand_p (res))
    5347      7273477 :             continue;
    5348              : 
    5349      8653668 :           tree first = NULL_TREE;
    5350      8653668 :           bool all_same = true;
    5351      8653668 :           edge_iterator ei;
    5352      8653668 :           edge e;
    5353     17797881 :           FOR_EACH_EDGE (e, ei, bb->preds)
    5354              :             {
    5355              :               /* Ignore not executable forward edges.  */
    5356     17577009 :               if (!(e->flags & EDGE_EXECUTABLE))
    5357              :                 {
    5358      4036298 :                   if (bb_to_rpo[e->src->index] < i)
    5359         5506 :                     continue;
    5360              :                   /* Avoid equivalences from backedges - while we might
    5361              :                      be able to make irreducible regions reducible and
    5362              :                      thus turning a back into a forward edge we do not
    5363              :                      want to deal with the intermediate SSA issues that
    5364              :                      exposes.  */
    5365              :                   all_same = false;
    5366              :                 }
    5367     17571503 :               tree use = PHI_ARG_DEF_FROM_EDGE (phi, e);
    5368     17571503 :               if (use == res)
    5369              :                 /* The PHI result can also appear on a backedge, if so
    5370              :                    we can ignore this case for the purpose of determining
    5371              :                    the singular value.  */
    5372              :                 ;
    5373     17558886 :               else if (! first)
    5374              :                 first = use;
    5375      8905218 :               else if (! operand_equal_p (first, use, 0))
    5376              :                 {
    5377              :                   all_same = false;
    5378              :                   break;
    5379              :                 }
    5380              :             }
    5381      8653668 :           if (all_same)
    5382              :             {
    5383       216148 :               if (may_propagate_copy (res, first))
    5384       215659 :                 to_remove_defs.safe_push (SSA_NAME_VERSION (res));
    5385       216148 :               fwprop_set_lattice_val (res, first);
    5386              :             }
    5387              :         }
    5388              : 
    5389              :       /* Apply forward propagation to all stmts in the basic-block.
    5390              :          Note we update GSI within the loop as necessary.  */
    5391     44505656 :       unsigned int uid = 1;
    5392    421628347 :       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
    5393              :         {
    5394    332617035 :           gimple *stmt = gsi_stmt (gsi);
    5395    332617035 :           tree lhs, rhs;
    5396    332617035 :           enum tree_code code;
    5397              : 
    5398    332617035 :           gimple_set_uid (stmt, uid++);
    5399              : 
    5400    332617035 :           if (!is_gimple_assign (stmt))
    5401              :             {
    5402    229231274 :               process_vec_perm_simplify_seq_list (&vec_perm_simplify_seq_list);
    5403    229231274 :               gsi_next (&gsi);
    5404    229231274 :               continue;
    5405              :             }
    5406              : 
    5407    103385761 :           lhs = gimple_assign_lhs (stmt);
    5408    103385761 :           rhs = gimple_assign_rhs1 (stmt);
    5409    103385761 :           code = gimple_assign_rhs_code (stmt);
    5410              : 
    5411    140998019 :           if (TREE_CODE (lhs) != SSA_NAME
    5412    103385761 :               || has_zero_uses (lhs))
    5413              :             {
    5414     37612258 :               process_vec_perm_simplify_seq_list (&vec_perm_simplify_seq_list);
    5415     37612258 :               gsi_next (&gsi);
    5416     37612258 :               continue;
    5417              :             }
    5418              : 
    5419              :           /* If this statement sets an SSA_NAME to an address,
    5420              :              try to propagate the address into the uses of the SSA_NAME.  */
    5421     65773503 :           if ((code == ADDR_EXPR
    5422              :                /* Handle pointer conversions on invariant addresses
    5423              :                   as well, as this is valid gimple.  */
    5424     63528046 :                || (CONVERT_EXPR_CODE_P (code)
    5425      8783294 :                    && TREE_CODE (rhs) == ADDR_EXPR
    5426       350078 :                    && POINTER_TYPE_P (TREE_TYPE (lhs))))
    5427     65773727 :               && TREE_CODE (TREE_OPERAND (rhs, 0)) != TARGET_MEM_REF)
    5428              :             {
    5429      2245027 :               tree base = get_base_address (TREE_OPERAND (rhs, 0));
    5430      2245027 :               if ((!base
    5431      2245027 :                    || !DECL_P (base)
    5432       129739 :                    || decl_address_invariant_p (base))
    5433      2245027 :                   && !stmt_references_abnormal_ssa_name (stmt)
    5434      4490038 :                   && forward_propagate_addr_expr (lhs, rhs, true))
    5435              :                 {
    5436       454901 :                   fwprop_invalidate_lattice (gimple_get_lhs (stmt));
    5437       454901 :                   release_defs (stmt);
    5438       454901 :                   gsi_remove (&gsi, true);
    5439              :                 }
    5440              :               else
    5441      1790126 :                 gsi_next (&gsi);
    5442              :             }
    5443     63528476 :           else if (code == POINTER_PLUS_EXPR)
    5444              :             {
    5445      3614823 :               tree off = gimple_assign_rhs2 (stmt);
    5446      3614823 :               if (TREE_CODE (off) == INTEGER_CST
    5447      1119708 :                   && can_propagate_from (stmt)
    5448      1119355 :                   && !simple_iv_increment_p (stmt)
    5449              :                   /* ???  Better adjust the interface to that function
    5450              :                      instead of building new trees here.  */
    5451      4442143 :                   && forward_propagate_addr_expr
    5452      2481960 :                        (lhs,
    5453              :                         build1_loc (gimple_location (stmt),
    5454       827320 :                                     ADDR_EXPR, TREE_TYPE (rhs),
    5455       827320 :                                     fold_build2 (MEM_REF,
    5456              :                                                  TREE_TYPE (TREE_TYPE (rhs)),
    5457              :                                                  rhs,
    5458              :                                                  fold_convert (ptr_type_node,
    5459              :                                                                off))), true))
    5460              :                 {
    5461       305478 :                   fwprop_invalidate_lattice (gimple_get_lhs (stmt));
    5462       305478 :                   release_defs (stmt);
    5463       305478 :                   gsi_remove (&gsi, true);
    5464              :                 }
    5465      3309345 :               else if (is_gimple_min_invariant (rhs))
    5466              :                 {
    5467              :                   /* Make sure to fold &a[0] + off_1 here.  */
    5468       409139 :                   fold_stmt_inplace (&gsi);
    5469       409139 :                   update_stmt (stmt);
    5470       409139 :                   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
    5471       409121 :                     gsi_next (&gsi);
    5472              :                 }
    5473              :               else
    5474      2900206 :                 gsi_next (&gsi);
    5475              :             }
    5476     59913653 :           else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
    5477       211847 :                    && gimple_assign_load_p (stmt)
    5478       134462 :                    && !gimple_has_volatile_ops (stmt)
    5479        40387 :                    && TREE_CODE (rhs) != TARGET_MEM_REF
    5480        40362 :                    && TREE_CODE (rhs) != BIT_FIELD_REF
    5481     59954011 :                    && !stmt_can_throw_internal (fun, stmt))
    5482              :             {
    5483              :               /* Rewrite loads used only in real/imagpart extractions to
    5484              :                  component-wise loads.  */
    5485        40233 :               use_operand_p use_p;
    5486        40233 :               imm_use_iterator iter;
    5487        40233 :               tree vuse = gimple_vuse (stmt);
    5488        40233 :               bool rewrite = true;
    5489        85223 :               FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
    5490              :                 {
    5491        42905 :                   gimple *use_stmt = USE_STMT (use_p);
    5492        42905 :                   if (is_gimple_debug (use_stmt))
    5493          691 :                     continue;
    5494        42214 :                   if (!is_gimple_assign (use_stmt)
    5495        27591 :                       || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
    5496        25543 :                           && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR)
    5497        46280 :                       || TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) != lhs)
    5498              :                     {
    5499              :                       rewrite = false;
    5500              :                       break;
    5501              :                     }
    5502        40233 :                 }
    5503        40233 :               if (rewrite)
    5504              :                 {
    5505         2085 :                   gimple *use_stmt;
    5506         8661 :                   FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
    5507              :                     {
    5508         4491 :                       if (is_gimple_debug (use_stmt))
    5509              :                         {
    5510          454 :                           if (gimple_debug_bind_p (use_stmt))
    5511              :                             {
    5512          454 :                               gimple_debug_bind_reset_value (use_stmt);
    5513          454 :                               update_stmt (use_stmt);
    5514              :                             }
    5515          454 :                           continue;
    5516              :                         }
    5517              : 
    5518         8074 :                       tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
    5519         4037 :                                              TREE_TYPE (TREE_TYPE (rhs)),
    5520              :                                              unshare_expr (rhs));
    5521         4037 :                       gimple *new_stmt
    5522         4037 :                         = gimple_build_assign (gimple_assign_lhs (use_stmt),
    5523              :                                                new_rhs);
    5524              : 
    5525         4037 :                       location_t loc = gimple_location (use_stmt);
    5526         4037 :                       gimple_set_location (new_stmt, loc);
    5527         4037 :                       gimple_set_vuse (new_stmt, vuse);
    5528         4037 :                       gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
    5529         4037 :                       unlink_stmt_vdef (use_stmt);
    5530         4037 :                       gsi_remove (&gsi2, true);
    5531              : 
    5532         4037 :                       gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
    5533         2085 :                     }
    5534              : 
    5535         2085 :                   release_defs (stmt);
    5536         2085 :                   gsi_remove (&gsi, true);
    5537              :                 }
    5538              :               else
    5539        38148 :                 gsi_next (&gsi);
    5540              :             }
    5541     59873420 :           else if (TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE
    5542      1615007 :                    && (TYPE_MODE (TREE_TYPE (lhs)) == BLKmode
    5543              :                        /* After vector lowering rewrite all loads, but
    5544              :                           initially do not since this conflicts with
    5545              :                           vector CONSTRUCTOR to shuffle optimization.  */
    5546      1594025 :                        || (fun->curr_properties & PROP_gimple_lvec))
    5547       889665 :                    && gimple_assign_load_p (stmt)
    5548       297802 :                    && !gimple_has_volatile_ops (stmt)
    5549       283933 :                    && !stmt_can_throw_internal (fun, stmt)
    5550     60157353 :                    && (!VAR_P (rhs) || !DECL_HARD_REGISTER (rhs)))
    5551       283431 :             optimize_vector_load (&gsi);
    5552              : 
    5553     59589989 :           else if (code == COMPLEX_EXPR)
    5554              :             {
    5555              :               /* Rewrite stores of a single-use complex build expression
    5556              :                  to component-wise stores.  */
    5557        36579 :               use_operand_p use_p;
    5558        36579 :               gimple *use_stmt, *def1, *def2;
    5559        36579 :               tree rhs2;
    5560        36579 :               if (single_imm_use (lhs, &use_p, &use_stmt)
    5561        34419 :                   && gimple_store_p (use_stmt)
    5562        41038 :                   && !gimple_has_volatile_ops (use_stmt)
    5563         2603 :                   && is_gimple_assign (use_stmt)
    5564         2599 :                   && (TREE_CODE (TREE_TYPE (gimple_assign_lhs (use_stmt)))
    5565              :                       == COMPLEX_TYPE)
    5566        39173 :                   && (TREE_CODE (gimple_assign_lhs (use_stmt))
    5567              :                       != TARGET_MEM_REF))
    5568              :                 {
    5569         2590 :                   tree use_lhs = gimple_assign_lhs (use_stmt);
    5570         2590 :                   if (auto_var_p (use_lhs))
    5571          601 :                     DECL_NOT_GIMPLE_REG_P (use_lhs) = 1;
    5572         5180 :                   tree new_lhs = build1 (REALPART_EXPR,
    5573         2590 :                                          TREE_TYPE (TREE_TYPE (use_lhs)),
    5574              :                                          unshare_expr (use_lhs));
    5575         2590 :                   gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
    5576         2590 :                   location_t loc = gimple_location (use_stmt);
    5577         2590 :                   gimple_set_location (new_stmt, loc);
    5578         5180 :                   gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
    5579         2590 :                   gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (fun)));
    5580         5180 :                   SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
    5581         5180 :                   gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
    5582         2590 :                   gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
    5583         2590 :                   gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
    5584              : 
    5585         5180 :                   new_lhs = build1 (IMAGPART_EXPR,
    5586         2590 :                                     TREE_TYPE (TREE_TYPE (use_lhs)),
    5587              :                                     unshare_expr (use_lhs));
    5588         2590 :                   gimple_assign_set_lhs (use_stmt, new_lhs);
    5589         2590 :                   gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
    5590         2590 :                   update_stmt (use_stmt);
    5591              : 
    5592         2590 :                   release_defs (stmt);
    5593         2590 :                   gsi_remove (&gsi, true);
    5594              :                 }
    5595              :               /* Rewrite a component-wise load of a complex to a complex
    5596              :                  load if the components are not used separately.  */
    5597        33989 :               else if (TREE_CODE (rhs) == SSA_NAME
    5598        33548 :                        && has_single_use (rhs)
    5599        30060 :                        && ((rhs2 = gimple_assign_rhs2 (stmt)), true)
    5600        30060 :                        && TREE_CODE (rhs2) == SSA_NAME
    5601        28316 :                        && has_single_use (rhs2)
    5602        27895 :                        && (def1 = SSA_NAME_DEF_STMT (rhs),
    5603        27895 :                            gimple_assign_load_p (def1))
    5604         1097 :                        && (def2 = SSA_NAME_DEF_STMT (rhs2),
    5605         1097 :                            gimple_assign_load_p (def2))
    5606         1606 :                        && (gimple_vuse (def1) == gimple_vuse (def2))
    5607          800 :                        && !gimple_has_volatile_ops (def1)
    5608          800 :                        && !gimple_has_volatile_ops (def2)
    5609          800 :                        && !stmt_can_throw_internal (fun, def1)
    5610          800 :                        && !stmt_can_throw_internal (fun, def2)
    5611          800 :                        && gimple_assign_rhs_code (def1) == REALPART_EXPR
    5612          542 :                        && gimple_assign_rhs_code (def2) == IMAGPART_EXPR
    5613        34531 :                        && operand_equal_p (TREE_OPERAND (gimple_assign_rhs1
    5614              :                                                                  (def1), 0),
    5615          542 :                                            TREE_OPERAND (gimple_assign_rhs1
    5616              :                                                                  (def2), 0)))
    5617              :                 {
    5618          542 :                   tree cl = TREE_OPERAND (gimple_assign_rhs1 (def1), 0);
    5619          542 :                   gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (cl));
    5620          542 :                   gcc_assert (gsi_stmt (gsi) == stmt);
    5621         1084 :                   gimple_set_vuse (stmt, gimple_vuse (def1));
    5622          542 :                   gimple_set_modified (stmt, true);
    5623          542 :                   gimple_stmt_iterator gsi2 = gsi_for_stmt (def1);
    5624          542 :                   gsi_remove (&gsi, false);
    5625          542 :                   gsi_insert_after (&gsi2, stmt, GSI_SAME_STMT);
    5626              :                 }
    5627              :               else
    5628        33447 :                 gsi_next (&gsi);
    5629              :             }
    5630     59553410 :           else if (code == CONSTRUCTOR
    5631       149639 :                    && VECTOR_TYPE_P (TREE_TYPE (rhs))
    5632       149639 :                    && TYPE_MODE (TREE_TYPE (rhs)) == BLKmode
    5633         2906 :                    && CONSTRUCTOR_NELTS (rhs) > 0
    5634     59556316 :                    && (!VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
    5635          545 :                        || (TYPE_MODE (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
    5636              :                            != BLKmode)))
    5637              :             {
    5638              :               /* Rewrite stores of a single-use vector constructors
    5639              :                  to component-wise stores if the mode isn't supported.  */
    5640         2905 :               use_operand_p use_p;
    5641         2905 :               gimple *use_stmt;
    5642         2905 :               if (single_imm_use (lhs, &use_p, &use_stmt)
    5643         2474 :                   && gimple_store_p (use_stmt)
    5644         2918 :                   && !gimple_has_volatile_ops (use_stmt)
    5645         1453 :                   && !stmt_can_throw_internal (fun, use_stmt)
    5646         4348 :                   && is_gimple_assign (use_stmt))
    5647              :                 {
    5648         1443 :                   tree elt_t = TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value);
    5649         1443 :                   unsigned HOST_WIDE_INT elt_w
    5650         1443 :                     = tree_to_uhwi (TYPE_SIZE (elt_t));
    5651         1443 :                   unsigned HOST_WIDE_INT n
    5652         1443 :                     = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs)));
    5653         1443 :                   tree use_lhs = gimple_assign_lhs (use_stmt);
    5654         1443 :                   if (auto_var_p (use_lhs))
    5655          540 :                     DECL_NOT_GIMPLE_REG_P (use_lhs) = 1;
    5656          903 :                   else if (TREE_CODE (use_lhs) == TARGET_MEM_REF)
    5657              :                     {
    5658            1 :                       gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
    5659            1 :                       use_lhs = prepare_target_mem_ref_lvalue (use_lhs, &gsi2);
    5660              :                     }
    5661        32696 :                   for (unsigned HOST_WIDE_INT bi = 0; bi < n; bi += elt_w)
    5662              :                     {
    5663        31253 :                       unsigned HOST_WIDE_INT ci = bi / elt_w;
    5664        31253 :                       tree new_rhs;
    5665        31253 :                       if (ci < CONSTRUCTOR_NELTS (rhs))
    5666        30635 :                         new_rhs = CONSTRUCTOR_ELT (rhs, ci)->value;
    5667              :                       else
    5668          618 :                         new_rhs = build_zero_cst (elt_t);
    5669        31253 :                       tree new_lhs = build3 (BIT_FIELD_REF,
    5670              :                                              elt_t,
    5671              :                                              unshare_expr (use_lhs),
    5672        31253 :                                              bitsize_int (elt_w),
    5673        31253 :                                              bitsize_int (bi));
    5674        31253 :                       gimple *new_stmt = gimple_build_assign (new_lhs, new_rhs);
    5675        31253 :                       location_t loc = gimple_location (use_stmt);
    5676        31253 :                       gimple_set_location (new_stmt, loc);
    5677        62506 :                       gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
    5678        31253 :                       gimple_set_vdef (new_stmt,
    5679              :                                        make_ssa_name (gimple_vop (fun)));
    5680        62506 :                       SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
    5681        62506 :                       gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
    5682        31253 :                       gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
    5683        31253 :                       gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
    5684              :                     }
    5685         1443 :                   gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
    5686         1443 :                   unlink_stmt_vdef (use_stmt);
    5687         1443 :                   release_defs (use_stmt);
    5688         1443 :                   gsi_remove (&gsi2, true);
    5689         1443 :                   release_defs (stmt);
    5690         1443 :                   gsi_remove (&gsi, true);
    5691              :                 }
    5692              :               else
    5693         1462 :                 gsi_next (&gsi);
    5694              :             }
    5695     59550505 :           else if (code == VEC_PERM_EXPR)
    5696              :             {
    5697              :               /* Find vectorized sequences, where we can reduce the lane
    5698              :                  utilization.  The narrowing will be donw later and only
    5699              :                  if we find a pair of sequences that can be blended.  */
    5700       179602 :               gassign *assign = dyn_cast <gassign *> (stmt);
    5701       179602 :               vec_perm_simplify_seq seq;
    5702       179602 :               if (recognise_vec_perm_simplify_seq (assign, &seq))
    5703          101 :                 append_vec_perm_simplify_seq_list (&vec_perm_simplify_seq_list,
    5704              :                                                    seq);
    5705              : 
    5706       179602 :               gsi_next (&gsi);
    5707              :           }
    5708              :           else
    5709     59370903 :             gsi_next (&gsi);
    5710              :         }
    5711              : 
    5712     44505656 :       process_vec_perm_simplify_seq_list (&vec_perm_simplify_seq_list);
    5713              : 
    5714              :       /* Combine stmts with the stmts defining their operands.
    5715              :          Note we update GSI within the loop as necessary.  */
    5716    421287733 :       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
    5717              :         {
    5718    332276421 :           gimple *stmt = gsi_stmt (gsi);
    5719              : 
    5720              :           /* Mark stmt as potentially needing revisiting.  */
    5721    332276421 :           gimple_set_plf (stmt, GF_PLF_1, false);
    5722              : 
    5723    332276421 :           bool can_make_abnormal_goto = (is_gimple_call (stmt)
    5724    332276421 :                                          && stmt_can_make_abnormal_goto (stmt));
    5725              : 
    5726              :           /* Substitute from our lattice.  We need to do so only once.  */
    5727    332276421 :           bool substituted_p = false;
    5728    332276421 :           use_operand_p usep;
    5729    332276421 :           ssa_op_iter iter;
    5730    492249980 :           FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_USE)
    5731              :             {
    5732    159973559 :               tree use = USE_FROM_PTR (usep);
    5733    159973559 :               tree val = fwprop_ssa_val (use);
    5734    159973559 :               if (val && val != use)
    5735              :                 {
    5736      1853289 :                   if (!is_gimple_debug (stmt))
    5737      1540427 :                     bitmap_set_bit (simple_dce_worklist, SSA_NAME_VERSION (use));
    5738      1853289 :                   if (may_propagate_copy (use, val))
    5739              :                     {
    5740      1850106 :                       propagate_value (usep, val);
    5741      1850106 :                       substituted_p = true;
    5742              :                     }
    5743              :                 }
    5744              :             }
    5745    332276421 :           if (substituted_p)
    5746      1797045 :             update_stmt (stmt);
    5747      1797045 :           if (substituted_p
    5748      1797045 :               && is_gimple_assign (stmt)
    5749      1083013 :               && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
    5750        19858 :             recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
    5751    332276421 :           if (substituted_p
    5752    332276421 :               && can_make_abnormal_goto
    5753    332276421 :               && !stmt_can_make_abnormal_goto (stmt))
    5754            3 :             bitmap_set_bit (need_ab_cleanup, bb->index);
    5755              : 
    5756    335071446 :           bool changed;
    5757    670142892 :           do
    5758              :             {
    5759    335071446 :               gimple *orig_stmt = stmt = gsi_stmt (gsi);
    5760    335071446 :               bool was_call = is_gimple_call (stmt);
    5761    335071446 :               bool was_noreturn = (was_call
    5762    335071446 :                                    && gimple_call_noreturn_p (stmt));
    5763    335071446 :               changed = false;
    5764              : 
    5765    335071446 :               auto_vec<tree, 8> uses;
    5766    498038286 :               FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_USE)
    5767    162966840 :                 if (uses.space (1))
    5768    162581482 :                   uses.quick_push (USE_FROM_PTR (usep));
    5769              : 
    5770    335071446 :               if (fold_stmt (&gsi, fwprop_ssa_val, simple_dce_worklist))
    5771              :                 {
    5772      2480771 :                   changed = true;
    5773              :                   /* There is no updating of the address
    5774              :                      taken after the last forwprop so update
    5775              :                      the addresses when a folding happened to a call.
    5776              :                      The va_* builtins can remove taking of the address so
    5777              :                      can the sincos->cexpi transformation.  See PR 39643 and PR 20983. */
    5778      2480771 :                   if (was_call && last_p)
    5779      2480771 :                     todoflags |= TODO_update_address_taken;
    5780      2480771 :                   stmt = gsi_stmt (gsi);
    5781              :                   /* Cleanup the CFG if we simplified a condition to
    5782              :                      true or false.  */
    5783      2480771 :                   if (gcond *cond = dyn_cast <gcond *> (stmt))
    5784       976952 :                     if (gimple_cond_true_p (cond)
    5785       976952 :                         || gimple_cond_false_p (cond))
    5786        15436 :                       cfg_changed = true;
    5787              :                   /* Queue old uses for simple DCE if not debug statement.  */
    5788      2480771 :                   if (!is_gimple_debug (stmt))
    5789     10504199 :                     for (tree use : uses)
    5790      3081800 :                       if (TREE_CODE (use) == SSA_NAME
    5791      3081800 :                           && !SSA_NAME_IS_DEFAULT_DEF (use))
    5792      2885582 :                         bitmap_set_bit (simple_dce_worklist,
    5793      2885582 :                                         SSA_NAME_VERSION (use));
    5794      2480771 :                   update_stmt (stmt);
    5795              :                 }
    5796              : 
    5797    335071446 :               switch (gimple_code (stmt))
    5798              :                 {
    5799    104395493 :                 case GIMPLE_ASSIGN:
    5800    104395493 :                   {
    5801    104395493 :                     tree rhs1 = gimple_assign_rhs1 (stmt);
    5802    104395493 :                     enum tree_code code = gimple_assign_rhs_code (stmt);
    5803    104395493 :                     if (gimple_clobber_p (stmt))
    5804      6767334 :                       do_simple_agr_dse (as_a<gassign*>(stmt), full_walk);
    5805     97628159 :                     else if (gimple_store_p (stmt))
    5806              :                       {
    5807     30227122 :                         optimize_aggr_zeroprop (stmt, full_walk);
    5808     30227122 :                         if (gimple_assign_load_p (stmt))
    5809      3711510 :                           optimize_agr_copyprop (stmt);
    5810              :                       }
    5811     67401037 :                     else if (TREE_CODE_CLASS (code) == tcc_comparison)
    5812      2515964 :                       changed |= forward_propagate_into_comparison (&gsi);
    5813     64885073 :                     else if ((code == PLUS_EXPR
    5814     64885073 :                               || code == BIT_IOR_EXPR
    5815     54891757 :                               || code == BIT_XOR_EXPR)
    5816     65013666 :                              && simplify_rotate (&gsi))
    5817              :                       changed = true;
    5818     64882399 :                     else if (code == VEC_PERM_EXPR)
    5819       181766 :                       changed |= simplify_permutation (&gsi);
    5820     64700633 :                     else if (code == CONSTRUCTOR
    5821     64700633 :                              && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
    5822       147562 :                       changed |= simplify_vector_constructor (&gsi);
    5823     64553071 :                     else if (code == ARRAY_REF)
    5824      1962912 :                       changed |= simplify_count_zeroes (&gsi);
    5825              :                     break;
    5826              :                   }
    5827              : 
    5828       110086 :                 case GIMPLE_SWITCH:
    5829       110086 :                   changed |= simplify_gimple_switch (as_a <gswitch *> (stmt),
    5830              :                                                      edges_to_remove,
    5831              :                                                      simple_dce_worklist);
    5832       110086 :                   break;
    5833              : 
    5834     19257691 :                 case GIMPLE_COND:
    5835     19257691 :                   {
    5836     19257691 :                     int did_something = forward_propagate_into_gimple_cond
    5837     19257691 :                                                         (as_a <gcond *> (stmt));
    5838     19257691 :                     if (did_something == 2)
    5839         1700 :                       cfg_changed = true;
    5840     19257691 :                     changed |= did_something != 0;
    5841     19257691 :                     break;
    5842              :                   }
    5843              : 
    5844     23060349 :                 case GIMPLE_CALL:
    5845     23060349 :                   {
    5846     23060349 :                     tree callee = gimple_call_fndecl (stmt);
    5847     23060349 :                     if (callee != NULL_TREE
    5848     23060349 :                         && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
    5849      6128230 :                       changed |= simplify_builtin_call (&gsi, callee, full_walk);
    5850              :                     break;
    5851              :                   }
    5852              : 
    5853    335068772 :                 default:;
    5854              :                 }
    5855              : 
    5856    335068772 :               if (changed || substituted_p)
    5857              :                 {
    5858      4065547 :                   substituted_p = false;
    5859      4065547 :                   stmt = gsi_stmt (gsi);
    5860      4065547 :                   if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
    5861           70 :                     bitmap_set_bit (to_purge, bb->index);
    5862      4065547 :                   if (!was_noreturn
    5863      4065547 :                       && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
    5864           12 :                     to_fixup.safe_push (stmt);
    5865              :                 }
    5866      4065547 :               if (changed)
    5867              :                 {
    5868              :                   /* If the stmt changed then re-visit it and the statements
    5869              :                      inserted before it.  */
    5870      8748481 :                   for (; !gsi_end_p (gsi); gsi_prev (&gsi))
    5871      5547820 :                     if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
    5872              :                       break;
    5873      2795025 :                   if (gsi_end_p (gsi))
    5874       447866 :                     gsi = gsi_start_bb (bb);
    5875              :                   else
    5876      2571092 :                     gsi_next (&gsi);
    5877              :                 }
    5878    335071446 :             }
    5879              :           while (changed);
    5880              : 
    5881              :           /* Stmt no longer needs to be revisited.  */
    5882    332276421 :           stmt = gsi_stmt (gsi);
    5883    332276421 :           gcc_checking_assert (!gimple_plf (stmt, GF_PLF_1));
    5884    332276421 :           gimple_set_plf (stmt, GF_PLF_1, true);
    5885              : 
    5886              :           /* Fill up the lattice.  */
    5887    332276421 :           if (gimple_assign_single_p (stmt))
    5888              :             {
    5889     68853156 :               tree lhs = gimple_assign_lhs (stmt);
    5890     68853156 :               tree rhs = gimple_assign_rhs1 (stmt);
    5891     68853156 :               if (TREE_CODE (lhs) == SSA_NAME)
    5892              :                 {
    5893     31873475 :                   tree val = lhs;
    5894     31873475 :                   if (TREE_CODE (rhs) == SSA_NAME)
    5895       783917 :                     val = fwprop_ssa_val (rhs);
    5896     31089558 :                   else if (is_gimple_min_invariant (rhs))
    5897       419705 :                     val = rhs;
    5898              :                   /* If we can propagate the lattice-value mark the
    5899              :                      stmt for removal.  */
    5900     31873475 :                   if (val != lhs
    5901     31873475 :                       && may_propagate_copy (lhs, val))
    5902      1200254 :                     to_remove_defs.safe_push (SSA_NAME_VERSION (lhs));
    5903     31873475 :                   fwprop_set_lattice_val (lhs, val);
    5904              :                 }
    5905              :             }
    5906    263423265 :           else if (gimple_nop_p (stmt))
    5907        88049 :             to_remove.safe_push (stmt);
    5908              :         }
    5909              : 
    5910              :       /* Substitute in destination PHI arguments.  */
    5911    107186852 :       FOR_EACH_EDGE (e, ei, bb->succs)
    5912     62681196 :         for (gphi_iterator gsi = gsi_start_phis (e->dest);
    5913    104709957 :              !gsi_end_p (gsi); gsi_next (&gsi))
    5914              :           {
    5915     42028761 :             gphi *phi = gsi.phi ();
    5916     42028761 :             use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
    5917     42028761 :             tree arg = USE_FROM_PTR (use_p);
    5918     69248367 :             if (TREE_CODE (arg) != SSA_NAME
    5919     42028761 :                 || virtual_operand_p (arg))
    5920     27219606 :               continue;
    5921     14809155 :             tree val = fwprop_ssa_val (arg);
    5922     14809155 :             if (val != arg
    5923     14809155 :                 && may_propagate_copy (arg, val, !(e->flags & EDGE_ABNORMAL)))
    5924       233591 :               propagate_value (use_p, val);
    5925              :           }
    5926              : 
    5927              :       /* Mark outgoing exectuable edges.  */
    5928     44505656 :       if (edge e = find_taken_edge (bb, NULL))
    5929              :         {
    5930     18903290 :           e->flags |= EDGE_EXECUTABLE;
    5931     44527003 :           if (EDGE_COUNT (bb->succs) > 1)
    5932        21347 :             cfg_changed = true;
    5933              :         }
    5934              :       else
    5935              :         {
    5936     69358924 :           FOR_EACH_EDGE (e, ei, bb->succs)
    5937     43756558 :             e->flags |= EDGE_EXECUTABLE;
    5938              :         }
    5939              :     }
    5940      5536978 :   free (postorder);
    5941      5536978 :   free (bb_to_rpo);
    5942      5536978 :   lattice.release ();
    5943              : 
    5944              :   /* First remove chains of stmts where we check no uses remain.  */
    5945      5536978 :   simple_dce_from_worklist (simple_dce_worklist, to_purge);
    5946              : 
    5947      5872715 :   auto remove = [](gimple *stmt)
    5948              :     {
    5949       335737 :       if (dump_file && (dump_flags & TDF_DETAILS))
    5950              :         {
    5951            1 :           fprintf (dump_file, "Removing dead stmt ");
    5952            1 :           print_gimple_stmt (dump_file, stmt, 0);
    5953            1 :           fprintf (dump_file, "\n");
    5954              :         }
    5955       335737 :       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
    5956       335737 :       if (gimple_code (stmt) == GIMPLE_PHI)
    5957        83200 :         remove_phi_node (&gsi, true);
    5958              :       else
    5959              :         {
    5960       252537 :           unlink_stmt_vdef (stmt);
    5961       252537 :           gsi_remove (&gsi, true);
    5962       252537 :           release_defs (stmt);
    5963              :         }
    5964       335737 :     };
    5965              : 
    5966              :   /* Then remove stmts we know we can remove even though we did not
    5967              :      substitute in dead code regions, so uses can remain.  Do so in reverse
    5968              :      order to make debug stmt creation possible.  */
    5969     12489869 :   while (!to_remove_defs.is_empty())
    5970              :     {
    5971      1415913 :       tree def = ssa_name (to_remove_defs.pop ());
    5972              :       /* For example remove_prop_source_from_use can remove stmts queued
    5973              :          for removal.  Deal with this gracefully.  */
    5974      1415913 :       if (!def)
    5975      1168225 :         continue;
    5976       247688 :       gimple *stmt = SSA_NAME_DEF_STMT (def);
    5977       247688 :       remove (stmt);
    5978              :     }
    5979              : 
    5980              :   /* Wipe other queued stmts that do not have SSA defs.  */
    5981      5625027 :   while (!to_remove.is_empty())
    5982              :     {
    5983        88049 :       gimple *stmt = to_remove.pop ();
    5984        88049 :       remove (stmt);
    5985              :     }
    5986              : 
    5987              :   /* Fixup stmts that became noreturn calls.  This may require splitting
    5988              :      blocks and thus isn't possible during the walk.  Do this
    5989              :      in reverse order so we don't inadvertedly remove a stmt we want to
    5990              :      fixup by visiting a dominating now noreturn call first.  */
    5991      5536990 :   while (!to_fixup.is_empty ())
    5992              :     {
    5993           12 :       gimple *stmt = to_fixup.pop ();
    5994           12 :       if (dump_file && dump_flags & TDF_DETAILS)
    5995              :         {
    5996            0 :           fprintf (dump_file, "Fixing up noreturn call ");
    5997            0 :           print_gimple_stmt (dump_file, stmt, 0);
    5998            0 :           fprintf (dump_file, "\n");
    5999              :         }
    6000           12 :       cfg_changed |= fixup_noreturn_call (stmt);
    6001              :     }
    6002              : 
    6003      5536978 :   cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
    6004      5536978 :   cfg_changed |= gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
    6005      5536978 :   BITMAP_FREE (to_purge);
    6006              : 
    6007              :   /* Remove edges queued from switch stmt simplification.  */
    6008     16610934 :   for (auto ep : edges_to_remove)
    6009              :     {
    6010            0 :       basic_block src = BASIC_BLOCK_FOR_FN (fun, ep.first);
    6011            0 :       basic_block dest = BASIC_BLOCK_FOR_FN (fun, ep.second);
    6012            0 :       edge e;
    6013            0 :       if (src && dest && (e = find_edge (src, dest)))
    6014              :         {
    6015            0 :           free_dominance_info (CDI_DOMINATORS);
    6016            0 :           remove_edge (e);
    6017            0 :           cfg_changed = true;
    6018              :         }
    6019              :     }
    6020              : 
    6021     11072416 :   if (get_range_query (fun) != get_global_range_query ())
    6022         1540 :     disable_ranger (fun);
    6023              : 
    6024      5536978 :   if (cfg_changed)
    6025         9897 :     todoflags |= TODO_cleanup_cfg;
    6026              : 
    6027      5536978 :   return todoflags;
    6028      5536978 : }
    6029              : 
    6030              : } // anon namespace
    6031              : 
    6032              : gimple_opt_pass *
    6033       285722 : make_pass_forwprop (gcc::context *ctxt)
    6034              : {
    6035       285722 :   return new pass_forwprop (ctxt);
    6036              : }
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.