LCOV - code coverage report
Current view: top level - gcc - tree-ssa-forwprop.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 94.4 % 3087 2915
Test Date: 2026-04-20 14:57:17 Functions: 100.0 % 63 63
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Forward propagation of expressions for single use variables.
       2              :    Copyright (C) 2004-2026 Free Software Foundation, Inc.
       3              : 
       4              : This file is part of GCC.
       5              : 
       6              : GCC is free software; you can redistribute it and/or modify
       7              : it under the terms of the GNU General Public License as published by
       8              : the Free Software Foundation; either version 3, or (at your option)
       9              : any later version.
      10              : 
      11              : GCC is distributed in the hope that it will be useful,
      12              : but WITHOUT ANY WARRANTY; without even the implied warranty of
      13              : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
      14              : GNU General Public License for more details.
      15              : 
      16              : You should have received a copy of the GNU General Public License
      17              : along with GCC; see the file COPYING3.  If not see
      18              : <http://www.gnu.org/licenses/>.  */
      19              : 
      20              : #include "config.h"
      21              : #include "system.h"
      22              : #include "coretypes.h"
      23              : #include "backend.h"
      24              : #include "rtl.h"
      25              : #include "tree.h"
      26              : #include "gimple.h"
      27              : #include "cfghooks.h"
      28              : #include "tree-pass.h"
      29              : #include "ssa.h"
      30              : #include "expmed.h"
      31              : #include "optabs-query.h"
      32              : #include "gimple-pretty-print.h"
      33              : #include "fold-const.h"
      34              : #include "stor-layout.h"
      35              : #include "gimple-iterator.h"
      36              : #include "gimple-fold.h"
      37              : #include "tree-eh.h"
      38              : #include "gimplify.h"
      39              : #include "gimplify-me.h"
      40              : #include "tree-cfg.h"
      41              : #include "expr.h"
      42              : #include "tree-dfa.h"
      43              : #include "tree-ssa-propagate.h"
      44              : #include "tree-ssa-dom.h"
      45              : #include "tree-ssa-strlen.h"
      46              : #include "builtins.h"
      47              : #include "tree-cfgcleanup.h"
      48              : #include "cfganal.h"
      49              : #include "optabs-tree.h"
      50              : #include "insn-config.h"
      51              : #include "recog.h"
      52              : #include "cfgloop.h"
      53              : #include "tree-vectorizer.h"
      54              : #include "tree-vector-builder.h"
      55              : #include "vec-perm-indices.h"
      56              : #include "internal-fn.h"
      57              : #include "cgraph.h"
      58              : #include "tree-ssa.h"
      59              : #include "gimple-range.h"
      60              : #include "tree-ssa-dce.h"
      61              : 
      62              : /* This pass propagates the RHS of assignment statements into use
      63              :    sites of the LHS of the assignment.  It's basically a specialized
      64              :    form of tree combination.   It is hoped all of this can disappear
      65              :    when we have a generalized tree combiner.
      66              : 
      67              :    One class of common cases we handle is forward propagating a single use
      68              :    variable into a COND_EXPR.
      69              : 
      70              :      bb0:
      71              :        x = a COND b;
      72              :        if (x) goto ... else goto ...
      73              : 
      74              :    Will be transformed into:
      75              : 
      76              :      bb0:
      77              :        if (a COND b) goto ... else goto ...
      78              : 
      79              :    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
      80              : 
      81              :    Or (assuming c1 and c2 are constants):
      82              : 
      83              :      bb0:
      84              :        x = a + c1;
      85              :        if (x EQ/NEQ c2) goto ... else goto ...
      86              : 
      87              :    Will be transformed into:
      88              : 
      89              :      bb0:
      90              :         if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
      91              : 
      92              :    Similarly for x = a - c1.
      93              : 
      94              :    Or
      95              : 
      96              :      bb0:
      97              :        x = !a
      98              :        if (x) goto ... else goto ...
      99              : 
     100              :    Will be transformed into:
     101              : 
     102              :      bb0:
     103              :         if (a == 0) goto ... else goto ...
     104              : 
     105              :    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
     106              :    For these cases, we propagate A into all, possibly more than one,
     107              :    COND_EXPRs that use X.
     108              : 
     109              :    Or
     110              : 
     111              :      bb0:
     112              :        x = (typecast) a
     113              :        if (x) goto ... else goto ...
     114              : 
     115              :    Will be transformed into:
     116              : 
     117              :      bb0:
     118              :         if (a != 0) goto ... else goto ...
     119              : 
     120              :    (Assuming a is an integral type and x is a boolean or x is an
     121              :     integral and a is a boolean.)
     122              : 
     123              :    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
     124              :    For these cases, we propagate A into all, possibly more than one,
     125              :    COND_EXPRs that use X.
     126              : 
     127              :    In addition to eliminating the variable and the statement which assigns
     128              :    a value to the variable, we may be able to later thread the jump without
     129              :    adding insane complexity in the dominator optimizer.
     130              : 
     131              :    Also note these transformations can cascade.  We handle this by having
     132              :    a worklist of COND_EXPR statements to examine.  As we make a change to
     133              :    a statement, we put it back on the worklist to examine on the next
     134              :    iteration of the main loop.
     135              : 
     136              :    A second class of propagation opportunities arises for ADDR_EXPR
     137              :    nodes.
     138              : 
     139              :      ptr = &x->y->z;
     140              :      res = *ptr;
     141              : 
     142              :    Will get turned into
     143              : 
     144              :      res = x->y->z;
     145              : 
     146              :    Or
     147              :      ptr = (type1*)&type2var;
     148              :      res = *ptr
     149              : 
     150              :    Will get turned into (if type1 and type2 are the same size
     151              :    and neither have volatile on them):
     152              :      res = VIEW_CONVERT_EXPR<type1>(type2var)
     153              : 
     154              :    Or
     155              : 
     156              :      ptr = &x[0];
     157              :      ptr2 = ptr + <constant>;
     158              : 
     159              :    Will get turned into
     160              : 
     161              :      ptr2 = &x[constant/elementsize];
     162              : 
     163              :   Or
     164              : 
     165              :      ptr = &x[0];
     166              :      offset = index * element_size;
     167              :      offset_p = (pointer) offset;
     168              :      ptr2 = ptr + offset_p
     169              : 
     170              :   Will get turned into:
     171              : 
     172              :      ptr2 = &x[index];
     173              : 
     174              :   Or
     175              :     ssa = (int) decl
     176              :     res = ssa & 1
     177              : 
     178              :   Provided that decl has known alignment >= 2, will get turned into
     179              : 
     180              :     res = 0
     181              : 
     182              :   We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
     183              :   allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
     184              :   {NOT_EXPR,NEG_EXPR}.
     185              : 
     186              :    This will (of course) be extended as other needs arise.  */
     187              : 
     188              : /* Data structure that contains simplifiable vectorized permute sequences.
     189              :    See recognise_vec_perm_simplify_seq () for a description of the sequence.  */
     190              : 
     191              : struct _vec_perm_simplify_seq
     192              : {
     193              :   /* Defining stmts of vectors in the sequence.  */
     194              :   gassign *v_1_stmt;
     195              :   gassign *v_2_stmt;
     196              :   gassign *v_x_stmt;
     197              :   gassign *v_y_stmt;
     198              :   /* Final permute statement.  */
     199              :   gassign *stmt;
     200              :   /* New selector indices for stmt.  */
     201              :   tree new_sel;
     202              :   /* Elements of each vector and selector.  */
     203              :   unsigned int nelts;
     204              : };
     205              : typedef struct _vec_perm_simplify_seq *vec_perm_simplify_seq;
     206              : 
     207              : static bool forward_propagate_addr_expr (tree, tree, bool);
     208              : 
     209              : /* Set to true if we delete dead edges during the optimization.  */
     210              : static bool cfg_changed;
     211              : 
     212              : static tree rhs_to_tree (tree type, gimple *stmt);
     213              : 
     214              : static bitmap to_purge;
     215              : 
     216              : /* Const-and-copy lattice.  */
     217              : static vec<tree> lattice;
     218              : 
     219              : /* Set the lattice entry for NAME to VAL.  */
     220              : static void
     221     32075972 : fwprop_set_lattice_val (tree name, tree val)
     222              : {
     223     32075972 :   if (TREE_CODE (name) == SSA_NAME)
     224              :     {
     225     32075972 :       if (SSA_NAME_VERSION (name) >= lattice.length ())
     226              :         {
     227        32190 :           lattice.reserve (num_ssa_names - lattice.length ());
     228        21460 :           lattice.quick_grow_cleared (num_ssa_names);
     229              :         }
     230     32075972 :       lattice[SSA_NAME_VERSION (name)] = val;
     231              :       /* As this now constitutes a copy duplicate points-to
     232              :          and range info appropriately.  */
     233     32075972 :       if (TREE_CODE (val) == SSA_NAME)
     234     31628173 :         maybe_duplicate_ssa_info_at_copy (name, val);
     235              :     }
     236     32075972 : }
     237              : 
     238              : /* Invalidate the lattice entry for NAME, done when releasing SSA names.  */
     239              : static void
     240       910915 : fwprop_invalidate_lattice (tree name)
     241              : {
     242       910915 :   if (name
     243       908571 :       && TREE_CODE (name) == SSA_NAME
     244      1819359 :       && SSA_NAME_VERSION (name) < lattice.length ())
     245       908415 :     lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
     246       910915 : }
     247              : 
     248              : /* Get the statement we can propagate from into NAME skipping
     249              :    trivial copies.  Returns the statement which defines the
     250              :    propagation source or NULL_TREE if there is no such one.
     251              :    If SINGLE_USE_ONLY is set considers only sources which have
     252              :    a single use chain up to NAME.  If SINGLE_USE_P is non-null,
     253              :    it is set to whether the chain to NAME is a single use chain
     254              :    or not.  SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set.  */
     255              : 
     256              : static gimple *
     257     27722925 : get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
     258              : {
     259     27722925 :   bool single_use = true;
     260              : 
     261     27723909 :   do {
     262     27723417 :     gimple *def_stmt = SSA_NAME_DEF_STMT (name);
     263              : 
     264     27723417 :     if (!has_single_use (name))
     265              :       {
     266     15105447 :         single_use = false;
     267     15105447 :         if (single_use_only)
     268              :           return NULL;
     269              :       }
     270              : 
     271              :     /* If name is defined by a PHI node or is the default def, bail out.  */
     272     27721973 :     if (!is_gimple_assign (def_stmt))
     273              :       return NULL;
     274              : 
     275              :     /* If def_stmt is a simple copy, continue looking.  */
     276     19580474 :     if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
     277          492 :       name = gimple_assign_rhs1 (def_stmt);
     278              :     else
     279              :       {
     280     19579982 :         if (!single_use_only && single_use_p)
     281     19276660 :           *single_use_p = single_use;
     282              : 
     283     19579982 :         return def_stmt;
     284              :       }
     285          492 :   } while (1);
     286              : }
     287              : 
     288              : /* Checks if the destination ssa name in DEF_STMT can be used as
     289              :    propagation source.  Returns true if so, otherwise false.  */
     290              : 
     291              : static bool
     292     27473072 : can_propagate_from (gimple *def_stmt)
     293              : {
     294     27473072 :   gcc_assert (is_gimple_assign (def_stmt));
     295              : 
     296              :   /* If the rhs has side-effects we cannot propagate from it.  */
     297     27473072 :   if (gimple_has_volatile_ops (def_stmt))
     298              :     return false;
     299              : 
     300              :   /* If the rhs is a load we cannot propagate from it.  */
     301     26881057 :   if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
     302     26881057 :       || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
     303              :     return false;
     304              : 
     305              :   /* Constants can be always propagated.  */
     306     13306733 :   if (gimple_assign_single_p (def_stmt)
     307     13306733 :       && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
     308              :     return true;
     309              : 
     310              :   /* We cannot propagate ssa names that occur in abnormal phi nodes.  */
     311     13306733 :   if (stmt_references_abnormal_ssa_name (def_stmt))
     312              :     return false;
     313              : 
     314              :   /* If the definition is a conversion of a pointer to a function type,
     315              :      then we cannot apply optimizations as some targets require
     316              :      function pointers to be canonicalized and in this case this
     317              :      optimization could eliminate a necessary canonicalization.  */
     318     13306048 :   if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
     319              :     {
     320      3221118 :       tree rhs = gimple_assign_rhs1 (def_stmt);
     321      3221118 :       if (FUNCTION_POINTER_TYPE_P (TREE_TYPE (rhs)))
     322              :         return false;
     323              :     }
     324              : 
     325              :   return true;
     326              : }
     327              : 
     328              : /* Remove a chain of dead statements starting at the definition of
     329              :    NAME.  The chain is linked via the first operand of the defining statements.
     330              :    If NAME was replaced in its only use then this function can be used
     331              :    to clean up dead stmts.  The function handles already released SSA
     332              :    names gracefully.  */
     333              : 
     334              : static void
     335       237361 : remove_prop_source_from_use (tree name)
     336              : {
     337       297008 :   gimple_stmt_iterator gsi;
     338       297008 :   gimple *stmt;
     339              : 
     340       297008 :   do {
     341       297008 :     basic_block bb;
     342              : 
     343       297008 :     if (SSA_NAME_IN_FREE_LIST (name)
     344       296965 :         || SSA_NAME_IS_DEFAULT_DEF (name)
     345       591989 :         || !has_zero_uses (name))
     346              :       break;
     347              : 
     348        60116 :     stmt = SSA_NAME_DEF_STMT (name);
     349        60116 :     if (gimple_code (stmt) == GIMPLE_PHI
     350        60116 :         || gimple_has_side_effects (stmt))
     351              :       break;
     352              : 
     353        60116 :     bb = gimple_bb (stmt);
     354        60116 :     gsi = gsi_for_stmt (stmt);
     355        60116 :     unlink_stmt_vdef (stmt);
     356        60116 :     if (gsi_remove (&gsi, true))
     357            6 :       bitmap_set_bit (to_purge, bb->index);
     358        60116 :     fwprop_invalidate_lattice (gimple_get_lhs (stmt));
     359        60116 :     release_defs (stmt);
     360              : 
     361        60116 :     name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
     362        60116 :   } while (name && TREE_CODE (name) == SSA_NAME);
     363              : 
     364       237361 : }
     365              : 
     366              : /* Return the rhs of a gassign *STMT in a form of a single tree,
     367              :    converted to type TYPE.
     368              : 
     369              :    This should disappear, but is needed so we can combine expressions and use
     370              :    the fold() interfaces. Long term, we need to develop folding and combine
     371              :    routines that deal with gimple exclusively . */
     372              : 
     373              : static tree
     374      7298640 : rhs_to_tree (tree type, gimple *stmt)
     375              : {
     376      7298640 :   location_t loc = gimple_location (stmt);
     377      7298640 :   enum tree_code code = gimple_assign_rhs_code (stmt);
     378      7298640 :   switch (get_gimple_rhs_class (code))
     379              :     {
     380        12476 :     case GIMPLE_TERNARY_RHS:
     381        12476 :       return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
     382              :                               gimple_assign_rhs2 (stmt),
     383        12476 :                               gimple_assign_rhs3 (stmt));
     384      4985353 :     case GIMPLE_BINARY_RHS:
     385      4985353 :       return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
     386      4985353 :                               gimple_assign_rhs2 (stmt));
     387      2030501 :     case GIMPLE_UNARY_RHS:
     388      2030501 :       return build1 (code, type, gimple_assign_rhs1 (stmt));
     389       270310 :     case GIMPLE_SINGLE_RHS:
     390       270310 :       return gimple_assign_rhs1 (stmt);
     391            0 :     default:
     392            0 :       gcc_unreachable ();
     393              :     }
     394              : }
     395              : 
     396              : /* Combine OP0 CODE OP1 in the context of a COND_EXPR.  Returns
     397              :    the folded result in a form suitable for COND_EXPR_COND or
     398              :    NULL_TREE, if there is no suitable simplified form.  If
     399              :    INVARIANT_ONLY is true only gimple_min_invariant results are
     400              :    considered simplified.  */
     401              : 
     402              : static tree
     403      8216575 : combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
     404              :                         tree op0, tree op1, bool invariant_only)
     405              : {
     406      8216575 :   tree t;
     407              : 
     408      8216575 :   gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
     409              : 
     410      8216575 :   fold_defer_overflow_warnings ();
     411      8216575 :   t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
     412      8216575 :   if (!t)
     413              :     {
     414      4660945 :       fold_undefer_overflow_warnings (false, NULL, 0);
     415      4660945 :       return NULL_TREE;
     416              :     }
     417              : 
     418              :   /* Require that we got a boolean type out if we put one in.  */
     419      3555630 :   gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
     420              : 
     421              :   /* Canonicalize the combined condition for use in a COND_EXPR.  */
     422      3555630 :   t = canonicalize_cond_expr_cond (t);
     423              : 
     424              :   /* Bail out if we required an invariant but didn't get one.  */
     425      3555630 :   if (!t || (invariant_only && !is_gimple_min_invariant (t)))
     426              :     {
     427      3320444 :       fold_undefer_overflow_warnings (false, NULL, 0);
     428      3320444 :       return NULL_TREE;
     429              :     }
     430              : 
     431       235186 :   bool nowarn = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
     432       235186 :   fold_undefer_overflow_warnings (!nowarn, stmt, 0);
     433              : 
     434       235186 :   return t;
     435              : }
     436              : 
     437              : /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
     438              :    of its operand.  Return a new comparison tree or NULL_TREE if there
     439              :    were no simplifying combines.  */
     440              : 
     441              : static tree
     442     21797795 : forward_propagate_into_comparison_1 (gimple *stmt,
     443              :                                      enum tree_code code, tree type,
     444              :                                      tree op0, tree op1)
     445              : {
     446     21797795 :   tree tmp = NULL_TREE;
     447     21797795 :   tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
     448     21797795 :   bool single_use0_p = false, single_use1_p = false;
     449              : 
     450              :   /* For comparisons use the first operand, that is likely to
     451              :      simplify comparisons against constants.  */
     452     21797795 :   if (TREE_CODE (op0) == SSA_NAME)
     453              :     {
     454     21758582 :       gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
     455     21758582 :       if (def_stmt && can_propagate_from (def_stmt))
     456              :         {
     457      5533519 :           enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
     458      5533519 :           bool invariant_only_p = !single_use0_p;
     459              : 
     460      5533519 :           rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
     461              : 
     462              :           /* Always combine comparisons or conversions from booleans.  */
     463      5533519 :           if (TREE_CODE (op1) == INTEGER_CST
     464      5533519 :               && ((CONVERT_EXPR_CODE_P (def_code)
     465       893911 :                    && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
     466              :                       == BOOLEAN_TYPE)
     467      3561483 :                   || TREE_CODE_CLASS (def_code) == tcc_comparison))
     468              :             invariant_only_p = false;
     469              : 
     470      5533519 :           tmp = combine_cond_expr_cond (stmt, code, type,
     471              :                                         rhs0, op1, invariant_only_p);
     472      5533519 :           if (tmp)
     473              :             return tmp;
     474              :         }
     475              :     }
     476              : 
     477              :   /* If that wasn't successful, try the second operand.  */
     478     21570866 :   if (TREE_CODE (op1) == SSA_NAME)
     479              :     {
     480      5404618 :       gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
     481      5404618 :       if (def_stmt && can_propagate_from (def_stmt))
     482              :         {
     483      1765121 :           rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
     484      3530242 :           tmp = combine_cond_expr_cond (stmt, code, type,
     485      1765121 :                                         op0, rhs1, !single_use1_p);
     486      1765121 :           if (tmp)
     487              :             return tmp;
     488              :         }
     489              :     }
     490              : 
     491              :   /* If that wasn't successful either, try both operands.  */
     492     21564556 :   if (rhs0 != NULL_TREE
     493     21564556 :       && rhs1 != NULL_TREE)
     494       917935 :     tmp = combine_cond_expr_cond (stmt, code, type,
     495              :                                   rhs0, rhs1,
     496       917935 :                                   !(single_use0_p && single_use1_p));
     497              : 
     498              :   return tmp;
     499              : }
     500              : 
     501              : /* Propagate from the ssa name definition statements of the assignment
     502              :    from a comparison at *GSI into the conditional if that simplifies it.
     503              :    Returns true if the stmt was modified.  */
     504              : 
     505              : static bool
     506      2543129 : forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
     507              : {
     508      2543129 :   gimple *stmt = gsi_stmt (*gsi);
     509      2543129 :   tree tmp;
     510      2543129 :   tree type = TREE_TYPE (gimple_assign_lhs (stmt));
     511      2543129 :   tree rhs1 = gimple_assign_rhs1 (stmt);
     512      2543129 :   tree rhs2 = gimple_assign_rhs2 (stmt);
     513              : 
     514              :   /* Combine the comparison with defining statements.  */
     515      2543129 :   tmp = forward_propagate_into_comparison_1 (stmt,
     516              :                                              gimple_assign_rhs_code (stmt),
     517              :                                              type, rhs1, rhs2);
     518      2543129 :   if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
     519              :     {
     520         7104 :       if (dump_file)
     521              :         {
     522            0 :           fprintf (dump_file, "  Replaced '");
     523            0 :           print_gimple_expr (dump_file, stmt, 0);
     524            0 :           fprintf (dump_file, "' with '");
     525            0 :           print_generic_expr (dump_file, tmp);
     526            0 :           fprintf (dump_file, "'\n");
     527              :         }
     528         7104 :       gimple_assign_set_rhs_from_tree (gsi, tmp);
     529         7104 :       fold_stmt (gsi);
     530         7104 :       update_stmt (gsi_stmt (*gsi));
     531              : 
     532         7104 :       if (TREE_CODE (rhs1) == SSA_NAME)
     533         7104 :         remove_prop_source_from_use (rhs1);
     534         7104 :       if (TREE_CODE (rhs2) == SSA_NAME)
     535         2939 :         remove_prop_source_from_use (rhs2);
     536         7104 :       return true;
     537              :     }
     538              : 
     539              :   return false;
     540              : }
     541              : 
     542              : /* Propagate from the ssa name definition statements of COND_EXPR
     543              :    in GIMPLE_COND statement STMT into the conditional if that simplifies it.
     544              :    Returns zero if no statement was changed, one if there were
     545              :    changes and two if cfg_cleanup needs to run.  */
     546              : 
     547              : static int
     548     19254666 : forward_propagate_into_gimple_cond (gcond *stmt)
     549              : {
     550     19254666 :   tree tmp;
     551     19254666 :   enum tree_code code = gimple_cond_code (stmt);
     552     19254666 :   tree rhs1 = gimple_cond_lhs (stmt);
     553     19254666 :   tree rhs2 = gimple_cond_rhs (stmt);
     554              : 
     555              :   /* GIMPLE_COND will always be a comparison.  */
     556     19254666 :   gcc_assert (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison);
     557              : 
     558     19254666 :   tmp = forward_propagate_into_comparison_1 (stmt, code,
     559              :                                              boolean_type_node,
     560              :                                              rhs1, rhs2);
     561     19254666 :   if (tmp
     562     19254666 :       && is_gimple_condexpr_for_cond (tmp))
     563              :     {
     564       221750 :       if (dump_file)
     565              :         {
     566            9 :           fprintf (dump_file, "  Replaced '");
     567            9 :           print_gimple_expr (dump_file, stmt, 0);
     568            9 :           fprintf (dump_file, "' with '");
     569            9 :           print_generic_expr (dump_file, tmp);
     570            9 :           fprintf (dump_file, "'\n");
     571              :         }
     572              : 
     573       221750 :       gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
     574       221750 :       update_stmt (stmt);
     575              : 
     576       221750 :       if (TREE_CODE (rhs1) == SSA_NAME)
     577       221750 :         remove_prop_source_from_use (rhs1);
     578       221750 :       if (TREE_CODE (rhs2) == SSA_NAME)
     579         5567 :         remove_prop_source_from_use (rhs2);
     580       221750 :       return is_gimple_min_invariant (tmp) ? 2 : 1;
     581              :     }
     582              : 
     583     19032916 :   if (canonicalize_bool_cond (stmt, gimple_bb (stmt)))
     584              :     return 1;
     585              : 
     586              :   return 0;
     587              : }
     588              : 
     589              : /* We've just substituted an ADDR_EXPR into stmt.  Update all the
     590              :    relevant data structures to match.  */
     591              : 
     592              : static void
     593      1946523 : tidy_after_forward_propagate_addr (gimple *stmt)
     594              : {
     595              :   /* We may have turned a trapping insn into a non-trapping insn.  */
     596      1946523 :   if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
     597          131 :     bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
     598              : 
     599      1946523 :   if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
     600       253577 :      recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
     601      1946523 : }
     602              : 
     603              : /* NAME is a SSA_NAME representing DEF_RHS which is of the form
     604              :    ADDR_EXPR <whatever>.
     605              : 
     606              :    Try to forward propagate the ADDR_EXPR into the use USE_STMT.
     607              :    Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
     608              :    node or for recovery of array indexing from pointer arithmetic.
     609              : 
     610              :    Return true if the propagation was successful (the propagation can
     611              :    be not totally successful, yet things may have been changed).  */
     612              : 
     613              : static bool
     614      2765545 : forward_propagate_addr_expr_1 (tree name, tree def_rhs,
     615              :                                gimple_stmt_iterator *use_stmt_gsi,
     616              :                                bool single_use_p)
     617              : {
     618      2765545 :   tree lhs, rhs, rhs2, array_ref;
     619      2765545 :   gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
     620      2765545 :   enum tree_code rhs_code;
     621      2765545 :   bool res = true;
     622              : 
     623      2765545 :   gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
     624              : 
     625      2765545 :   lhs = gimple_assign_lhs (use_stmt);
     626      2765545 :   rhs_code = gimple_assign_rhs_code (use_stmt);
     627      2765545 :   rhs = gimple_assign_rhs1 (use_stmt);
     628              : 
     629              :   /* Do not perform copy-propagation but recurse through copy chains.  */
     630      2765545 :   if (TREE_CODE (lhs) == SSA_NAME
     631      1376275 :       && rhs_code == SSA_NAME)
     632         6817 :     return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
     633              : 
     634              :   /* The use statement could be a conversion.  Recurse to the uses of the
     635              :      lhs as copyprop does not copy through pointer to integer to pointer
     636              :      conversions and FRE does not catch all cases either.
     637              :      Treat the case of a single-use name and
     638              :      a conversion to def_rhs type separate, though.  */
     639      2758728 :   if (TREE_CODE (lhs) == SSA_NAME
     640      1369458 :       && CONVERT_EXPR_CODE_P (rhs_code))
     641              :     {
     642              :       /* If there is a point in a conversion chain where the types match
     643              :          so we can remove a conversion re-materialize the address here
     644              :          and stop.  */
     645        23879 :       if (single_use_p
     646        23879 :           && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
     647              :         {
     648            1 :           gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
     649            1 :           gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
     650            1 :           return true;
     651              :         }
     652              : 
     653              :       /* Else recurse if the conversion preserves the address value.  */
     654        47756 :       if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
     655            2 :            || POINTER_TYPE_P (TREE_TYPE (lhs)))
     656        47756 :           && (TYPE_PRECISION (TREE_TYPE (lhs))
     657        23878 :               >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
     658        23811 :         return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
     659              : 
     660              :       return false;
     661              :     }
     662              : 
     663              :   /* If this isn't a conversion chain from this on we only can propagate
     664              :      into compatible pointer contexts.  */
     665      2734849 :   if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
     666              :     return false;
     667              : 
     668              :   /* Propagate through constant pointer adjustments.  */
     669      2714302 :   if (TREE_CODE (lhs) == SSA_NAME
     670      1326205 :       && rhs_code == POINTER_PLUS_EXPR
     671      1326205 :       && rhs == name
     672      2877235 :       && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
     673              :     {
     674       117881 :       tree new_def_rhs;
     675              :       /* As we come here with non-invariant addresses in def_rhs we need
     676              :          to make sure we can build a valid constant offsetted address
     677              :          for further propagation.  Simply rely on fold building that
     678              :          and check after the fact.  */
     679       117881 :       new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
     680              :                                  def_rhs,
     681              :                                  fold_convert (ptr_type_node,
     682              :                                                gimple_assign_rhs2 (use_stmt)));
     683       117881 :       if (TREE_CODE (new_def_rhs) == MEM_REF
     684       117881 :           && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
     685              :         return false;
     686       113917 :       new_def_rhs = build1 (ADDR_EXPR, TREE_TYPE (rhs), new_def_rhs);
     687              : 
     688              :       /* Recurse.  If we could propagate into all uses of lhs do not
     689              :          bother to replace into the current use but just pretend we did.  */
     690       113917 :       if (forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
     691              :         return true;
     692              : 
     693        37957 :       if (useless_type_conversion_p (TREE_TYPE (lhs),
     694        37957 :                                      TREE_TYPE (new_def_rhs)))
     695        37957 :         gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
     696              :                                         new_def_rhs);
     697            0 :       else if (is_gimple_min_invariant (new_def_rhs))
     698            0 :         gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
     699              :       else
     700              :         return false;
     701        37957 :       gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
     702        37957 :       update_stmt (use_stmt);
     703        37957 :       return true;
     704              :     }
     705              : 
     706              :   /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
     707              :      ADDR_EXPR will not appear on the LHS.  */
     708      2596421 :   tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
     709      3912378 :   while (handled_component_p (*lhsp))
     710      1315957 :     lhsp = &TREE_OPERAND (*lhsp, 0);
     711      2596421 :   lhs = *lhsp;
     712              : 
     713              :   /* Now see if the LHS node is a MEM_REF using NAME.  If so,
     714              :      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
     715      2596421 :   if (TREE_CODE (lhs) == MEM_REF
     716      2596421 :       && TREE_OPERAND (lhs, 0) == name)
     717              :     {
     718       878392 :       tree def_rhs_base;
     719       878392 :       poly_int64 def_rhs_offset;
     720              :       /* If the address is invariant we can always fold it.  */
     721       878392 :       if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
     722              :                                                          &def_rhs_offset)))
     723              :         {
     724       832696 :           poly_offset_int off = mem_ref_offset (lhs);
     725       832696 :           tree new_ptr;
     726       832696 :           off += def_rhs_offset;
     727       832696 :           if (TREE_CODE (def_rhs_base) == MEM_REF)
     728              :             {
     729       811849 :               off += mem_ref_offset (def_rhs_base);
     730       811849 :               new_ptr = TREE_OPERAND (def_rhs_base, 0);
     731              :             }
     732              :           else
     733        20847 :             new_ptr = build_fold_addr_expr (def_rhs_base);
     734       832696 :           TREE_OPERAND (lhs, 0) = new_ptr;
     735       832696 :           TREE_OPERAND (lhs, 1)
     736       832696 :             = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
     737       832696 :           tidy_after_forward_propagate_addr (use_stmt);
     738              :           /* Continue propagating into the RHS if this was not the only use.  */
     739       832696 :           if (single_use_p)
     740       225746 :             return true;
     741              :         }
     742              :       /* If the LHS is a plain dereference and the value type is the same as
     743              :          that of the pointed-to type of the address we can put the
     744              :          dereferenced address on the LHS preserving the original alias-type.  */
     745        45696 :       else if (integer_zerop (TREE_OPERAND (lhs, 1))
     746        17795 :                && ((gimple_assign_lhs (use_stmt) == lhs
     747        14237 :                     && useless_type_conversion_p
     748        14237 :                          (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
     749        14237 :                           TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
     750        13558 :                    || types_compatible_p (TREE_TYPE (lhs),
     751        13558 :                                           TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
     752              :                /* Don't forward anything into clobber stmts if it would result
     753              :                   in the lhs no longer being a MEM_REF.  */
     754        53144 :                && (!gimple_clobber_p (use_stmt)
     755          161 :                    || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
     756              :         {
     757         7287 :           tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
     758         7287 :           tree new_offset, new_base, saved, new_lhs;
     759        26159 :           while (handled_component_p (*def_rhs_basep))
     760        11585 :             def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
     761         7287 :           saved = *def_rhs_basep;
     762         7287 :           if (TREE_CODE (*def_rhs_basep) == MEM_REF)
     763              :             {
     764         3757 :               new_base = TREE_OPERAND (*def_rhs_basep, 0);
     765         3757 :               new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
     766              :                                          TREE_OPERAND (*def_rhs_basep, 1));
     767              :             }
     768              :           else
     769              :             {
     770         3530 :               new_base = build_fold_addr_expr (*def_rhs_basep);
     771         3530 :               new_offset = TREE_OPERAND (lhs, 1);
     772              :             }
     773         7287 :           *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
     774              :                                    new_base, new_offset);
     775         7287 :           TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
     776         7287 :           TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
     777         7287 :           TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
     778         7287 :           new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
     779         7287 :           *lhsp = new_lhs;
     780         7287 :           TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
     781         7287 :           TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
     782         7287 :           *def_rhs_basep = saved;
     783         7287 :           tidy_after_forward_propagate_addr (use_stmt);
     784              :           /* Continue propagating into the RHS if this was not the
     785              :              only use.  */
     786         7287 :           if (single_use_p)
     787              :             return true;
     788              :         }
     789              :       else
     790              :         /* We can have a struct assignment dereferencing our name twice.
     791              :            Note that we didn't propagate into the lhs to not falsely
     792              :            claim we did when propagating into the rhs.  */
     793              :         res = false;
     794              :     }
     795              : 
     796              :   /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
     797              :      nodes from the RHS.  */
     798      2367147 :   tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
     799      2367147 :   if (TREE_CODE (*rhsp) == ADDR_EXPR)
     800       241848 :     rhsp = &TREE_OPERAND (*rhsp, 0);
     801      3341114 :   while (handled_component_p (*rhsp))
     802       973967 :     rhsp = &TREE_OPERAND (*rhsp, 0);
     803      2367147 :   rhs = *rhsp;
     804              : 
     805              :   /* Now see if the RHS node is a MEM_REF using NAME.  If so,
     806              :      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
     807      2367147 :   if (TREE_CODE (rhs) == MEM_REF
     808      2367147 :       && TREE_OPERAND (rhs, 0) == name)
     809              :     {
     810      1127954 :       tree def_rhs_base;
     811      1127954 :       poly_int64 def_rhs_offset;
     812      1127954 :       if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
     813              :                                                          &def_rhs_offset)))
     814              :         {
     815      1091882 :           poly_offset_int off = mem_ref_offset (rhs);
     816      1091882 :           tree new_ptr;
     817      1091882 :           off += def_rhs_offset;
     818      1091882 :           if (TREE_CODE (def_rhs_base) == MEM_REF)
     819              :             {
     820      1066585 :               off += mem_ref_offset (def_rhs_base);
     821      1066585 :               new_ptr = TREE_OPERAND (def_rhs_base, 0);
     822              :             }
     823              :           else
     824        25297 :             new_ptr = build_fold_addr_expr (def_rhs_base);
     825      1091882 :           TREE_OPERAND (rhs, 0) = new_ptr;
     826      1091882 :           TREE_OPERAND (rhs, 1)
     827      1091882 :             = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
     828      1091882 :           fold_stmt_inplace (use_stmt_gsi);
     829      1091882 :           tidy_after_forward_propagate_addr (use_stmt);
     830      1091882 :           return res;
     831              :         }
     832              :       /* If the RHS is a plain dereference and the value type is the same as
     833              :          that of the pointed-to type of the address we can put the
     834              :          dereferenced address on the RHS preserving the original alias-type.  */
     835        36072 :       else if (integer_zerop (TREE_OPERAND (rhs, 1))
     836        36072 :                && ((gimple_assign_rhs1 (use_stmt) == rhs
     837        19674 :                     && useless_type_conversion_p
     838        19674 :                          (TREE_TYPE (gimple_assign_lhs (use_stmt)),
     839        19674 :                           TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
     840        22491 :                    || types_compatible_p (TREE_TYPE (rhs),
     841        22491 :                                           TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
     842              :         {
     843        14658 :           tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
     844        14658 :           tree new_offset, new_base, saved, new_rhs;
     845        51838 :           while (handled_component_p (*def_rhs_basep))
     846        22522 :             def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
     847        14658 :           saved = *def_rhs_basep;
     848        14658 :           if (TREE_CODE (*def_rhs_basep) == MEM_REF)
     849              :             {
     850         7044 :               new_base = TREE_OPERAND (*def_rhs_basep, 0);
     851         7044 :               new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
     852              :                                          TREE_OPERAND (*def_rhs_basep, 1));
     853              :             }
     854              :           else
     855              :             {
     856         7614 :               new_base = build_fold_addr_expr (*def_rhs_basep);
     857         7614 :               new_offset = TREE_OPERAND (rhs, 1);
     858              :             }
     859        14658 :           *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
     860              :                                    new_base, new_offset);
     861        14658 :           TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
     862        14658 :           TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
     863        14658 :           TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
     864        14658 :           new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
     865        14658 :           *rhsp = new_rhs;
     866        14658 :           TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
     867        14658 :           TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
     868        14658 :           *def_rhs_basep = saved;
     869        14658 :           fold_stmt_inplace (use_stmt_gsi);
     870        14658 :           tidy_after_forward_propagate_addr (use_stmt);
     871        14658 :           return res;
     872              :         }
     873              :     }
     874              : 
     875              :   /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
     876              :      is nothing to do. */
     877      1260607 :   if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
     878      1260607 :       || gimple_assign_rhs1 (use_stmt) != name)
     879              :     return false;
     880              : 
     881              :   /* The remaining cases are all for turning pointer arithmetic into
     882              :      array indexing.  They only apply when we have the address of
     883              :      element zero in an array.  If that is not the case then there
     884              :      is nothing to do.  */
     885        45052 :   array_ref = TREE_OPERAND (def_rhs, 0);
     886        45052 :   if ((TREE_CODE (array_ref) != ARRAY_REF
     887         4553 :        || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
     888         4553 :        || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
     889        46530 :       && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
     890              :     return false;
     891              : 
     892        22590 :   rhs2 = gimple_assign_rhs2 (use_stmt);
     893              :   /* Optimize &x[C1] p+ C2 to  &x p+ C3 with C3 = C1 * element_size + C2.  */
     894        22590 :   if (TREE_CODE (rhs2) == INTEGER_CST)
     895              :     {
     896            0 :       tree new_rhs = build1_loc (gimple_location (use_stmt),
     897            0 :                                  ADDR_EXPR, TREE_TYPE (def_rhs),
     898            0 :                                  fold_build2 (MEM_REF,
     899              :                                               TREE_TYPE (TREE_TYPE (def_rhs)),
     900              :                                               unshare_expr (def_rhs),
     901              :                                               fold_convert (ptr_type_node,
     902              :                                                             rhs2)));
     903            0 :       gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
     904            0 :       use_stmt = gsi_stmt (*use_stmt_gsi);
     905            0 :       update_stmt (use_stmt);
     906            0 :       tidy_after_forward_propagate_addr (use_stmt);
     907            0 :       return true;
     908              :     }
     909              : 
     910              :   return false;
     911              : }
     912              : 
     913              : /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
     914              : 
     915              :    Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
     916              :    Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
     917              :    node or for recovery of array indexing from pointer arithmetic.
     918              : 
     919              :    PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
     920              :    the single use in the previous invocation.  Pass true when calling
     921              :    this as toplevel.
     922              : 
     923              :    Returns true, if all uses have been propagated into.  */
     924              : 
     925              : static bool
     926      3205968 : forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
     927              : {
     928      3205968 :   bool all = true;
     929      3205968 :   bool single_use_p = parent_single_use_p && has_single_use (name);
     930              : 
     931     16903513 :   for (gimple *use_stmt : gather_imm_use_stmts (name))
     932              :     {
     933      7285609 :       bool result;
     934      7285609 :       tree use_rhs;
     935              : 
     936              :       /* If the use is not in a simple assignment statement, then
     937              :          there is nothing we can do.  */
     938      7285609 :       if (!is_gimple_assign (use_stmt))
     939              :         {
     940      4520064 :           if (!is_gimple_debug (use_stmt))
     941      1869635 :             all = false;
     942      4520064 :           continue;
     943              :         }
     944              : 
     945      2765545 :       gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
     946      2765545 :       result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
     947              :                                               single_use_p);
     948              :       /* If the use has moved to a different statement adjust
     949              :          the update machinery for the old statement too.  */
     950      2765545 :       if (use_stmt != gsi_stmt (gsi))
     951              :         {
     952            0 :           update_stmt (use_stmt);
     953            0 :           use_stmt = gsi_stmt (gsi);
     954              :         }
     955      2765545 :       update_stmt (use_stmt);
     956      2765545 :       all &= result;
     957              : 
     958              :       /* Remove intermediate now unused copy and conversion chains.  */
     959      2765545 :       use_rhs = gimple_assign_rhs1 (use_stmt);
     960      2765545 :       if (result
     961      1453421 :           && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
     962      1212101 :           && TREE_CODE (use_rhs) == SSA_NAME
     963      2845194 :           && has_zero_uses (gimple_assign_lhs (use_stmt)))
     964              :         {
     965        79649 :           gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
     966        79649 :           fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
     967        79649 :           release_defs (use_stmt);
     968        79649 :           gsi_remove (&gsi, true);
     969              :         }
     970      3205968 :     }
     971              : 
     972      3205968 :   return all && has_zero_uses (name);
     973              : }
     974              : 
     975              : 
     976              : /* Helper function for simplify_gimple_switch.  Remove case labels that
     977              :    have values outside the range of the new type.  */
     978              : 
     979              : static void
     980        11633 : simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type,
     981              :                                   vec<std::pair<int, int> > &edges_to_remove)
     982              : {
     983        11633 :   unsigned int branch_num = gimple_switch_num_labels (stmt);
     984        11633 :   auto_vec<tree> labels (branch_num);
     985        11633 :   unsigned int i, len;
     986              : 
     987              :   /* Collect the existing case labels in a VEC, and preprocess it as if
     988              :      we are gimplifying a GENERIC SWITCH_EXPR.  */
     989        72698 :   for (i = 1; i < branch_num; i++)
     990        49432 :     labels.quick_push (gimple_switch_label (stmt, i));
     991        11633 :   preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
     992              : 
     993              :   /* If any labels were removed, replace the existing case labels
     994              :      in the GIMPLE_SWITCH statement with the correct ones.
     995              :      Note that the type updates were done in-place on the case labels,
     996              :      so we only have to replace the case labels in the GIMPLE_SWITCH
     997              :      if the number of labels changed.  */
     998        11633 :   len = labels.length ();
     999        11633 :   if (len < branch_num - 1)
    1000              :     {
    1001            0 :       bitmap target_blocks;
    1002            0 :       edge_iterator ei;
    1003            0 :       edge e;
    1004              : 
    1005              :       /* Corner case: *all* case labels have been removed as being
    1006              :          out-of-range for INDEX_TYPE.  Push one label and let the
    1007              :          CFG cleanups deal with this further.  */
    1008            0 :       if (len == 0)
    1009              :         {
    1010            0 :           tree label, elt;
    1011              : 
    1012            0 :           label = CASE_LABEL (gimple_switch_default_label (stmt));
    1013            0 :           elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
    1014            0 :           labels.quick_push (elt);
    1015            0 :           len = 1;
    1016              :         }
    1017              : 
    1018            0 :       for (i = 0; i < labels.length (); i++)
    1019            0 :         gimple_switch_set_label (stmt, i + 1, labels[i]);
    1020            0 :       for (i++ ; i < branch_num; i++)
    1021            0 :         gimple_switch_set_label (stmt, i, NULL_TREE);
    1022            0 :       gimple_switch_set_num_labels (stmt, len + 1);
    1023              : 
    1024              :       /* Cleanup any edges that are now dead.  */
    1025            0 :       target_blocks = BITMAP_ALLOC (NULL);
    1026            0 :       for (i = 0; i < gimple_switch_num_labels (stmt); i++)
    1027              :         {
    1028            0 :           tree elt = gimple_switch_label (stmt, i);
    1029            0 :           basic_block target = label_to_block (cfun, CASE_LABEL (elt));
    1030            0 :           bitmap_set_bit (target_blocks, target->index);
    1031              :         }
    1032            0 :       for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
    1033              :         {
    1034            0 :           if (! bitmap_bit_p (target_blocks, e->dest->index))
    1035            0 :             edges_to_remove.safe_push (std::make_pair (e->src->index,
    1036            0 :                                                        e->dest->index));
    1037              :           else
    1038            0 :             ei_next (&ei);
    1039              :         }
    1040            0 :       BITMAP_FREE (target_blocks);
    1041              :     }
    1042        11633 : }
    1043              : 
    1044              : /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
    1045              :    the condition which we may be able to optimize better.  */
    1046              : 
    1047              : static bool
    1048       103300 : simplify_gimple_switch (gswitch *stmt,
    1049              :                         vec<std::pair<int, int> > &edges_to_remove,
    1050              :                         bitmap simple_dce_worklist)
    1051              : {
    1052              :   /* The optimization that we really care about is removing unnecessary
    1053              :      casts.  That will let us do much better in propagating the inferred
    1054              :      constant at the switch target.  */
    1055       103300 :   tree cond = gimple_switch_index (stmt);
    1056       103300 :   if (TREE_CODE (cond) == SSA_NAME)
    1057              :     {
    1058       103299 :       gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
    1059       103299 :       if (gimple_assign_cast_p (def_stmt))
    1060              :         {
    1061        12115 :           tree def = gimple_assign_rhs1 (def_stmt);
    1062        12115 :           if (TREE_CODE (def) != SSA_NAME)
    1063              :             return false;
    1064              : 
    1065              :           /* If we have an extension or sign-change that preserves the
    1066              :              values we check against then we can copy the source value into
    1067              :              the switch.  */
    1068        12115 :           tree ti = TREE_TYPE (def);
    1069        12115 :           if (INTEGRAL_TYPE_P (ti)
    1070        12115 :               && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
    1071              :             {
    1072        11870 :               size_t n = gimple_switch_num_labels (stmt);
    1073        11870 :               tree min = NULL_TREE, max = NULL_TREE;
    1074        11870 :               if (n > 1)
    1075              :                 {
    1076        11870 :                   min = CASE_LOW (gimple_switch_label (stmt, 1));
    1077        11870 :                   if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
    1078          154 :                     max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
    1079              :                   else
    1080        11716 :                     max = CASE_LOW (gimple_switch_label (stmt, n - 1));
    1081              :                 }
    1082        11870 :               if ((!min || int_fits_type_p (min, ti))
    1083        11866 :                   && (!max || int_fits_type_p (max, ti)))
    1084              :                 {
    1085        11633 :                   bitmap_set_bit (simple_dce_worklist,
    1086        11633 :                                   SSA_NAME_VERSION (cond));
    1087        11633 :                   gimple_switch_set_index (stmt, def);
    1088        11633 :                   simplify_gimple_switch_label_vec (stmt, ti,
    1089              :                                                     edges_to_remove);
    1090        11633 :                   update_stmt (stmt);
    1091        11633 :                   return true;
    1092              :                 }
    1093              :             }
    1094              :         }
    1095              :     }
    1096              : 
    1097              :   return false;
    1098              : }
    1099              : 
    1100              : /* For pointers p2 and p1 return p2 - p1 if the
    1101              :    difference is known and constant, otherwise return NULL.  */
    1102              : 
    1103              : static tree
    1104         5234 : constant_pointer_difference (tree p1, tree p2)
    1105              : {
    1106         5234 :   int i, j;
    1107              : #define CPD_ITERATIONS 5
    1108         5234 :   tree exps[2][CPD_ITERATIONS];
    1109         5234 :   tree offs[2][CPD_ITERATIONS];
    1110         5234 :   int cnt[2];
    1111              : 
    1112        15702 :   for (i = 0; i < 2; i++)
    1113              :     {
    1114        10468 :       tree p = i ? p1 : p2;
    1115        10468 :       tree off = size_zero_node;
    1116        10468 :       gimple *stmt;
    1117        10468 :       enum tree_code code;
    1118              : 
    1119              :       /* For each of p1 and p2 we need to iterate at least
    1120              :          twice, to handle ADDR_EXPR directly in p1/p2,
    1121              :          SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
    1122              :          on definition's stmt RHS.  Iterate a few extra times.  */
    1123        10468 :       j = 0;
    1124        12412 :       do
    1125              :         {
    1126        12412 :           if (!POINTER_TYPE_P (TREE_TYPE (p)))
    1127              :             break;
    1128        12406 :           if (TREE_CODE (p) == ADDR_EXPR)
    1129              :             {
    1130         8988 :               tree q = TREE_OPERAND (p, 0);
    1131         8988 :               poly_int64 offset;
    1132         8988 :               tree base = get_addr_base_and_unit_offset (q, &offset);
    1133         8988 :               if (base)
    1134              :                 {
    1135         8168 :                   q = base;
    1136         8168 :                   if (maybe_ne (offset, 0))
    1137         3407 :                     off = size_binop (PLUS_EXPR, off, size_int (offset));
    1138              :                 }
    1139         8988 :               if (TREE_CODE (q) == MEM_REF
    1140         8988 :                   && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
    1141              :                 {
    1142          213 :                   p = TREE_OPERAND (q, 0);
    1143          213 :                   off = size_binop (PLUS_EXPR, off,
    1144              :                                     wide_int_to_tree (sizetype,
    1145              :                                                       mem_ref_offset (q)));
    1146              :                 }
    1147              :               else
    1148              :                 {
    1149         8775 :                   exps[i][j] = q;
    1150         8775 :                   offs[i][j++] = off;
    1151         8775 :                   break;
    1152              :                 }
    1153              :             }
    1154         3631 :           if (TREE_CODE (p) != SSA_NAME)
    1155              :             break;
    1156         3631 :           exps[i][j] = p;
    1157         3631 :           offs[i][j++] = off;
    1158         3631 :           if (j == CPD_ITERATIONS)
    1159              :             break;
    1160         3631 :           stmt = SSA_NAME_DEF_STMT (p);
    1161         3631 :           if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
    1162              :             break;
    1163         2852 :           code = gimple_assign_rhs_code (stmt);
    1164         2852 :           if (code == POINTER_PLUS_EXPR)
    1165              :             {
    1166         1486 :               if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
    1167              :                 break;
    1168          955 :               off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
    1169          955 :               p = gimple_assign_rhs1 (stmt);
    1170              :             }
    1171         1366 :           else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
    1172          989 :             p = gimple_assign_rhs1 (stmt);
    1173              :           else
    1174              :             break;
    1175              :         }
    1176              :       while (1);
    1177        10468 :       cnt[i] = j;
    1178              :     }
    1179              : 
    1180         7316 :   for (i = 0; i < cnt[0]; i++)
    1181         9755 :     for (j = 0; j < cnt[1]; j++)
    1182         7673 :       if (exps[0][i] == exps[1][j])
    1183         4344 :         return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
    1184              : 
    1185              :   return NULL_TREE;
    1186              : }
    1187              : 
    1188              : /* Helper function for optimize_aggr_zeroprop.
    1189              :    Props the zeroing (memset, VAL) that was done in DEST+OFFSET:LEN
    1190              :    (DEFSTMT) into the STMT.  Returns true if the STMT was updated.  */
    1191              : static void
    1192     22022178 : optimize_aggr_zeroprop_1 (gimple *defstmt, gimple *stmt,
    1193              :                           tree dest, poly_int64 offset, tree val,
    1194              :                           poly_offset_int len)
    1195              : {
    1196     22022178 :   tree src2;
    1197     22022178 :   tree len2 = NULL_TREE;
    1198     22022178 :   poly_int64 offset2;
    1199              : 
    1200     22022178 :   if (gimple_call_builtin_p (stmt, BUILT_IN_MEMCPY)
    1201        19141 :       && TREE_CODE (gimple_call_arg (stmt, 1)) == ADDR_EXPR
    1202     22035709 :       && poly_int_tree_p (gimple_call_arg (stmt, 2)))
    1203              :     {
    1204        12511 :       src2 = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
    1205        12511 :       len2 = gimple_call_arg (stmt, 2);
    1206              :     }
    1207     22009667 :    else if (gimple_assign_load_p (stmt) && gimple_store_p (stmt))
    1208              :      {
    1209      1865134 :         src2 = gimple_assign_rhs1 (stmt);
    1210      1865134 :         len2 = (TREE_CODE (src2) == COMPONENT_REF
    1211      1865134 :                 ? DECL_SIZE_UNIT (TREE_OPERAND (src2, 1))
    1212      1697693 :                 : TYPE_SIZE_UNIT (TREE_TYPE (src2)));
    1213              :         /* Can only handle zero memsets. */
    1214      1865134 :         if (!integer_zerop (val))
    1215     22000353 :           return;
    1216              :      }
    1217              :    else
    1218     20144533 :      return;
    1219              : 
    1220      1876683 :   if (len2 == NULL_TREE
    1221      1876683 :       || !poly_int_tree_p (len2))
    1222              :     return;
    1223              : 
    1224      1876683 :   src2 = get_addr_base_and_unit_offset (src2, &offset2);
    1225      1876683 :   if (src2 == NULL_TREE
    1226      1876683 :       || maybe_lt (offset2, offset))
    1227              :     return;
    1228              : 
    1229       858057 :   if (!operand_equal_p (dest, src2, 0))
    1230              :     return;
    1231              : 
    1232              :   /* [ dest + offset, dest + offset + len - 1 ] is set to val.
    1233              :      Make sure that
    1234              :      [ dest + offset2, dest + offset2 + len2 - 1 ] is a subset of that.  */
    1235       131051 :   if (maybe_gt (wi::to_poly_offset (len2) + (offset2 - offset),
    1236              :                 len))
    1237              :     return;
    1238              : 
    1239        21825 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1240              :     {
    1241           32 :       fprintf (dump_file, "Simplified\n  ");
    1242           32 :       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
    1243           32 :       fprintf (dump_file, "after previous\n  ");
    1244           32 :       print_gimple_stmt (dump_file, defstmt, 0, dump_flags);
    1245              :     }
    1246        21825 :   gimple *orig_stmt = stmt;
    1247              :   /* For simplicity, don't change the kind of the stmt,
    1248              :      turn dest = src; into dest = {}; and memcpy (&dest, &src, len);
    1249              :      into memset (&dest, val, len);
    1250              :      In theory we could change dest = src into memset if dest
    1251              :      is addressable (maybe beneficial if val is not 0), or
    1252              :      memcpy (&dest, &src, len) into dest = {} if len is the size
    1253              :      of dest, dest isn't volatile.  */
    1254        21825 :   if (is_gimple_assign (stmt))
    1255              :     {
    1256        21820 :       tree ctor_type = TREE_TYPE (gimple_assign_lhs (stmt));
    1257        21820 :       tree ctor = build_constructor (ctor_type, NULL);
    1258        21820 :       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
    1259        21820 :       gimple_assign_set_rhs_from_tree (&gsi, ctor);
    1260        21820 :       update_stmt (stmt);
    1261        21820 :       statistics_counter_event (cfun, "copy zeroing propagation of aggregate", 1);
    1262              :     }
    1263              :   else /* If stmt is memcpy, transform it into memset.  */
    1264              :     {
    1265            5 :       gcall *call = as_a <gcall *> (stmt);
    1266            5 :       tree fndecl = builtin_decl_implicit (BUILT_IN_MEMSET);
    1267            5 :       gimple_call_set_fndecl (call, fndecl);
    1268            5 :       gimple_call_set_fntype (call, TREE_TYPE (fndecl));
    1269            5 :       gimple_call_set_arg (call, 1, val);
    1270            5 :       update_stmt (stmt);
    1271            5 :       statistics_counter_event (cfun, "memcpy to memset changed", 1);
    1272              :     }
    1273              : 
    1274        21825 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1275              :     {
    1276           32 :       fprintf (dump_file, "into\n  ");
    1277           32 :       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
    1278              :     }
    1279              : 
    1280              :   /* Mark the bb for eh cleanup if needed.  */
    1281        21825 :   if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
    1282            6 :     bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
    1283              : }
    1284              : 
    1285              : /* Optimize
    1286              :    a = {}; // DEST = value ;; LEN(nullptr)
    1287              :    b = a;
    1288              :    into
    1289              :    a = {};
    1290              :    b = {};
    1291              :    Similarly for memset (&a, ..., sizeof (a)); instead of a = {};
    1292              :    and/or memcpy (&b, &a, sizeof (a)); instead of b = a;  */
    1293              : 
    1294              : static void
    1295     30404879 : optimize_aggr_zeroprop (gimple *stmt, bool full_walk)
    1296              : {
    1297     30404879 :   ao_ref read;
    1298     60809758 :   if (gimple_has_volatile_ops (stmt))
    1299     26441876 :     return;
    1300              : 
    1301     29479858 :   tree dest = NULL_TREE;
    1302     29479858 :   tree val = integer_zero_node;
    1303     29479858 :   tree len = NULL_TREE;
    1304     29479858 :   bool can_use_tbba = true;
    1305              : 
    1306     29479858 :   if (gimple_call_builtin_p (stmt, BUILT_IN_MEMSET)
    1307       109089 :       && TREE_CODE (gimple_call_arg (stmt, 0)) == ADDR_EXPR
    1308        55199 :       && TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
    1309     29532765 :       && poly_int_tree_p (gimple_call_arg (stmt, 2)))
    1310              :     {
    1311        50141 :       dest = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
    1312        50141 :       len = gimple_call_arg (stmt, 2);
    1313        50141 :       val = gimple_call_arg (stmt, 1);
    1314        50141 :       ao_ref_init_from_ptr_and_size (&read, gimple_call_arg (stmt, 0), len);
    1315        50141 :       can_use_tbba = false;
    1316              :     }
    1317     29429717 :   else if (gimple_store_p (stmt)
    1318     29370609 :            && gimple_assign_single_p (stmt)
    1319     58800326 :            && TREE_CODE (gimple_assign_rhs1 (stmt)) == STRING_CST)
    1320              :     {
    1321        33462 :       tree str = gimple_assign_rhs1 (stmt);
    1322        33462 :       dest = gimple_assign_lhs (stmt);
    1323        33462 :       ao_ref_init (&read, dest);
    1324              :       /* The string must contain all null char's for now.  */
    1325        38730 :       for (int i = 0; i < TREE_STRING_LENGTH (str); i++)
    1326              :         {
    1327        36213 :           if (TREE_STRING_POINTER (str)[i] != 0)
    1328              :             {
    1329              :               dest = NULL_TREE;
    1330              :               break;
    1331              :             }
    1332              :         }
    1333              :     }
    1334              :   /* A store of integer (scalar, vector or complex) zeros is
    1335              :      a zero store. */
    1336     29396255 :   else if (gimple_store_p (stmt)
    1337     29337147 :            && gimple_assign_single_p (stmt)
    1338     58733402 :            && integer_zerop (gimple_assign_rhs1 (stmt)))
    1339              :     {
    1340      3505692 :       tree rhs = gimple_assign_rhs1 (stmt);
    1341      3505692 :       tree type = TREE_TYPE (rhs);
    1342      3505692 :       dest = gimple_assign_lhs (stmt);
    1343      3505692 :       ao_ref_init (&read, dest);
    1344              :       /* For integral types, the type precision needs to be a multiply of BITS_PER_UNIT. */
    1345      3505692 :       if (INTEGRAL_TYPE_P (type)
    1346      3505692 :           && (TYPE_PRECISION (type) % BITS_PER_UNIT) != 0)
    1347              :         dest = NULL_TREE;
    1348              :     }
    1349     25890563 :   else if (gimple_store_p (stmt)
    1350     25831455 :            && gimple_assign_single_p (stmt)
    1351     25831455 :            && TREE_CODE (gimple_assign_rhs1 (stmt)) == CONSTRUCTOR
    1352     26584375 :            && !gimple_clobber_p (stmt))
    1353              :     {
    1354       693812 :       dest = gimple_assign_lhs (stmt);
    1355       693812 :       ao_ref_init (&read, dest);
    1356              :     }
    1357              : 
    1358      4079620 :   if (dest == NULL_TREE)
    1359     25431183 :     return;
    1360              : 
    1361      4048675 :   if (len == NULL_TREE)
    1362      3998534 :     len = (TREE_CODE (dest) == COMPONENT_REF
    1363      3998534 :            ? DECL_SIZE_UNIT (TREE_OPERAND (dest, 1))
    1364      1741459 :            : TYPE_SIZE_UNIT (TREE_TYPE (dest)));
    1365      3998534 :   if (len == NULL_TREE
    1366      4048675 :       || !poly_int_tree_p (len))
    1367              :     return;
    1368              : 
    1369              :   /* Sometimes memset can have no vdef due to invalid declaration of memset (const, etc.).  */
    1370     34539202 :   if (!gimple_vdef (stmt))
    1371              :     return;
    1372              : 
    1373              :   /* This store needs to be on the byte boundary and pointing to an object.  */
    1374      4048651 :   poly_int64 offset;
    1375      4048651 :   tree dest_base = get_addr_base_and_unit_offset (dest, &offset);
    1376      4048651 :   if (dest_base == NULL_TREE)
    1377              :     return;
    1378              : 
    1379              :   /* Setup the worklist.  */
    1380      3963003 :   auto_vec<std::pair<tree, unsigned>> worklist;
    1381      3963003 :   unsigned limit = full_walk ? param_sccvn_max_alias_queries_per_access : 0;
    1382      7926006 :   worklist.safe_push (std::make_pair (gimple_vdef (stmt), limit));
    1383              : 
    1384     27528589 :   while (!worklist.is_empty ())
    1385              :     {
    1386     19602583 :       std::pair<tree, unsigned> top = worklist.pop ();
    1387     19602583 :       tree vdef = top.first;
    1388     19602583 :       limit = top.second;
    1389     19602583 :       gimple *use_stmt;
    1390     19602583 :       imm_use_iterator iter;
    1391     63200647 :       FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
    1392              :         {
    1393              :           /* Handling PHI nodes might not be worth it so don't.  */
    1394     23995481 :           if (is_a <gphi*> (use_stmt))
    1395      1973303 :             continue;
    1396              : 
    1397              :           /* If this statement does not clobber add the vdef stmt to the
    1398              :              worklist.
    1399              :              After hitting the limit, allow clobbers to able to pass through.  */
    1400      2003998 :           if ((limit != 0 || gimple_clobber_p (use_stmt))
    1401     20053373 :               && gimple_vdef (use_stmt)
    1402     39062876 :               && !stmt_may_clobber_ref_p_1 (use_stmt, &read,
    1403              :                                            /* tbaa_p = */ can_use_tbba))
    1404              :               {
    1405     15639580 :                 unsigned new_limit = limit == 0 ? 0 : limit - 1;
    1406     31279160 :                 worklist.safe_push (std::make_pair (gimple_vdef (use_stmt),
    1407              :                                                     new_limit));
    1408              :               }
    1409              : 
    1410     22022178 :            optimize_aggr_zeroprop_1 (stmt, use_stmt, dest_base, offset,
    1411     22022178 :                                      val, wi::to_poly_offset (len));
    1412     19602583 :         }
    1413              :     }
    1414              : 
    1415      3963003 : }
    1416              : 
    1417              : /* Returns the pointer to the base of the object of the
    1418              :    reference EXPR and extracts the information about
    1419              :    the offset of the access, storing it to PBYTESIZE,
    1420              :    PBYTEPOS and PREVERSEP.
    1421              :    If the access is not a byte sized or position is not
    1422              :    on the byte, return NULL.  */
    1423              : static tree
    1424      5169550 : split_core_and_offset_size (tree expr,
    1425              :                             poly_int64 *pbytesize, poly_int64 *pbytepos,
    1426              :                             tree *poffset, int *preversep)
    1427              : {
    1428      5169550 :   tree core;
    1429      5169550 :   machine_mode mode;
    1430      5169550 :   int unsignedp, volatilep;
    1431      5169550 :   poly_int64 bitsize;
    1432      5169550 :   poly_int64 bitpos;
    1433      5169550 :   location_t loc = EXPR_LOCATION (expr);
    1434              : 
    1435      5169550 :   core = get_inner_reference (expr, &bitsize, &bitpos,
    1436              :                               poffset, &mode, &unsignedp, preversep,
    1437              :                               &volatilep);
    1438     10339100 :   if (!multiple_p (bitsize, BITS_PER_UNIT, pbytesize))
    1439              :     return NULL_TREE;
    1440      5169550 :   if (!multiple_p (bitpos, BITS_PER_UNIT, pbytepos))
    1441              :     return NULL_TREE;
    1442              :   /* If we are left with MEM[a + CST] strip that and add it to the
    1443              :      pbytepos and return a. */
    1444      5169550 :   if (TREE_CODE (core) == MEM_REF)
    1445              :     {
    1446      1192237 :       poly_offset_int tem;
    1447      1192237 :       tem = wi::to_poly_offset (TREE_OPERAND (core, 1));
    1448      1192237 :       tem += *pbytepos;
    1449      1192237 :       if (tem.to_shwi (pbytepos))
    1450      1190311 :         return TREE_OPERAND (core, 0);
    1451              :     }
    1452      3979239 :   core = build_fold_addr_expr_loc (loc, core);
    1453      3979239 :   STRIP_NOPS (core);
    1454      3979239 :   return core;
    1455              : }
    1456              : 
    1457              : /* Returns a new src based on the
    1458              :    copy `DEST = SRC` and for the old SRC2.
    1459              :    Returns null if SRC2 is not related to DEST.  */
    1460              : 
    1461              : static tree
    1462      1197698 : new_src_based_on_copy (tree src2, tree dest, tree src)
    1463              : {
    1464              :   /* If the second src is not exactly the same as dest,
    1465              :      try to handle it seperately; see it is address/size equivalent.
    1466              :      Handles `a` and `a.b` and `MEM<char[N]>(&a)` which all have
    1467              :      the same size and offsets as address/size equivalent.
    1468              :      This allows copying over a memcpy and also one for copying
    1469              :      where one field is the same size as the whole struct.  */
    1470      1197698 :   if (operand_equal_p (dest, src2))
    1471              :     return src;
    1472              :   /* if both dest and src2 are decls, then we know these 2
    1473              :      accesses can't be the same.  */
    1474       698257 :   if (DECL_P (dest) && DECL_P (src2))
    1475              :     return NULL_TREE;
    1476              :   /* A VCE can't be used with imag/real or BFR so reject them early. */
    1477       367080 :   if (TREE_CODE (src) == IMAGPART_EXPR
    1478       367080 :       || TREE_CODE (src) == REALPART_EXPR
    1479       367080 :       || TREE_CODE (src) == BIT_FIELD_REF)
    1480              :     return NULL_TREE;
    1481       367080 :   tree core1, core2;
    1482       367080 :   poly_int64 bytepos1, bytepos2;
    1483       367080 :   poly_int64 bytesize1, bytesize2;
    1484       367080 :   tree toffset1, toffset2;
    1485       367080 :   int reversep1 = 0;
    1486       367080 :   int reversep2 = 0;
    1487       367080 :   poly_int64 diff = 0;
    1488       367080 :   core1 = split_core_and_offset_size (dest, &bytesize1, &bytepos1,
    1489              :                                           &toffset1, &reversep1);
    1490       367080 :   core2 = split_core_and_offset_size (src2, &bytesize2, &bytepos2,
    1491              :                                           &toffset2, &reversep2);
    1492       367080 :   if (!core1 || !core2)
    1493              :     return NULL_TREE;
    1494       367080 :   if (reversep1 != reversep2)
    1495              :     return NULL_TREE;
    1496              :   /* The sizes of the 2 accesses need to be the same. */
    1497       367080 :   if (!known_eq (bytesize1, bytesize2))
    1498              :     return NULL_TREE;
    1499       159706 :   if (!operand_equal_p (core1, core2, 0))
    1500              :     return NULL_TREE;
    1501              : 
    1502        22265 :   if (toffset1 && toffset2)
    1503              :     {
    1504            2 :       tree type = TREE_TYPE (toffset1);
    1505            2 :       if (type != TREE_TYPE (toffset2))
    1506            0 :         toffset2 = fold_convert (type, toffset2);
    1507              : 
    1508            2 :       tree tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
    1509            2 :       if (!cst_and_fits_in_hwi (tdiff))
    1510              :         return NULL_TREE;
    1511              : 
    1512            0 :       diff = int_cst_value (tdiff);
    1513            0 :     }
    1514        22263 :   else if (toffset1 || toffset2)
    1515              :     {
    1516              :       /* If only one of the offsets is non-constant, the difference cannot
    1517              :          be a constant.  */
    1518              :       return NULL_TREE;
    1519              :     }
    1520        22231 :   diff += bytepos1 - bytepos2;
    1521              :   /* The offset between the 2 need to be 0. */
    1522        22231 :   if (!known_eq (diff, 0))
    1523              :     return NULL_TREE;
    1524        21520 :   return fold_build1 (VIEW_CONVERT_EXPR,TREE_TYPE (src2), src);
    1525              : }
    1526              : 
    1527              : /* Returns true if SRC and DEST are the same address such that
    1528              :    `SRC == DEST;` is considered a nop. This is more than an
    1529              :    operand_equal_p check as it needs to be similar to
    1530              :    new_src_based_on_copy.  */
    1531              : 
    1532              : static bool
    1533      4258782 : same_for_assignment (tree src, tree dest)
    1534              : {
    1535      4258782 :   if (operand_equal_p (dest, src, 0))
    1536              :     return true;
    1537              :   /* if both dest and src2 are decls, then we know these 2
    1538              :      accesses can't be the same.  */
    1539      4255866 :   if (DECL_P (dest) && DECL_P (src))
    1540              :     return false;
    1541              : 
    1542      2217695 :   tree core1, core2;
    1543      2217695 :   poly_int64 bytepos1, bytepos2;
    1544      2217695 :   poly_int64 bytesize1, bytesize2;
    1545      2217695 :   tree toffset1, toffset2;
    1546      2217695 :   int reversep1 = 0;
    1547      2217695 :   int reversep2 = 0;
    1548      2217695 :   poly_int64 diff = 0;
    1549      2217695 :   core1 = split_core_and_offset_size (dest, &bytesize1, &bytepos1,
    1550              :                                       &toffset1, &reversep1);
    1551      2217695 :   core2 = split_core_and_offset_size (src, &bytesize2, &bytepos2,
    1552              :                                       &toffset2, &reversep2);
    1553      2217695 :   if (!core1 || !core2)
    1554              :     return false;
    1555      2217695 :   if (reversep1 != reversep2)
    1556              :     return false;
    1557              :   /* The sizes of the 2 accesses need to be the same. */
    1558      2217695 :   if (!known_eq (bytesize1, bytesize2))
    1559              :     return false;
    1560      2216774 :   if (!operand_equal_p (core1, core2, 0))
    1561              :     return false;
    1562         6026 :   if (toffset1 && toffset2)
    1563              :     {
    1564          313 :       tree type = TREE_TYPE (toffset1);
    1565          313 :       if (type != TREE_TYPE (toffset2))
    1566            0 :         toffset2 = fold_convert (type, toffset2);
    1567              : 
    1568          313 :       tree tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
    1569          313 :       if (!cst_and_fits_in_hwi (tdiff))
    1570              :         return false;
    1571              : 
    1572            0 :       diff = int_cst_value (tdiff);
    1573            0 :     }
    1574         5713 :   else if (toffset1 || toffset2)
    1575              :     {
    1576              :       /* If only one of the offsets is non-constant, the difference cannot
    1577              :          be a constant.  */
    1578              :       return false;
    1579              :     }
    1580         5713 :   diff += bytepos1 - bytepos2;
    1581              :   /* The offset between the 2 need to be 0. */
    1582         5713 :   if (!known_eq (diff, 0))
    1583              :     return false;
    1584              :   return true;
    1585              : }
    1586              : 
    1587              : /* Helper function for optimize_agr_copyprop.
    1588              :    For aggregate copies in USE_STMT, see if DEST
    1589              :    is on the lhs of USE_STMT and replace it with SRC. */
    1590              : static void
    1591       968873 : optimize_agr_copyprop_1 (gimple *stmt, gimple *use_stmt,
    1592              :                          tree dest, tree src)
    1593              : {
    1594       968873 :   gcc_assert (gimple_assign_load_p (use_stmt)
    1595              :               && gimple_store_p (use_stmt));
    1596      1937746 :   if (gimple_has_volatile_ops (use_stmt))
    1597       594411 :     return;
    1598       968872 :   tree dest2 = gimple_assign_lhs (use_stmt);
    1599       968872 :   tree src2 = gimple_assign_rhs1 (use_stmt);
    1600              :   /* If the new store is `src2 = src2;` skip over it. */
    1601       968872 :   if (same_for_assignment (src2, dest2))
    1602              :     return;
    1603       968309 :   src = new_src_based_on_copy (src2, dest, src);
    1604       968309 :   if (!src)
    1605              :     return;
    1606              :   /* For 2 memory refences and using a temporary to do the copy,
    1607              :      don't remove the temporary as the 2 memory references might overlap.
    1608              :      Note t does not need to be decl as it could be field.
    1609              :      See PR 22237 for full details.
    1610              :      E.g.
    1611              :      t = *a; #DEST = SRC;
    1612              :      *b = t; #DEST2 = SRC2;
    1613              :      Cannot be convert into
    1614              :      t = *a;
    1615              :      *b = *a;
    1616              :      Though the following is allowed to be done:
    1617              :      t = *a;
    1618              :      *a = t;
    1619              :      And convert it into:
    1620              :      t = *a;
    1621              :      *a = *a;
    1622              :      */
    1623       402581 :   if (!operand_equal_p (dest2, src, 0)
    1624       402581 :       && !DECL_P (dest2) && !DECL_P (src))
    1625              :     {
    1626              :       /* If *a and *b have the same base see if
    1627              :          the offset between the two is greater than
    1628              :          or equal to the size of the type. */
    1629        31397 :       poly_int64 offset1, offset2;
    1630        31397 :       tree len = TYPE_SIZE_UNIT (TREE_TYPE (src));
    1631        31397 :       if (len == NULL_TREE
    1632        31397 :           || !tree_fits_poly_int64_p (len))
    1633        28119 :         return;
    1634        31397 :       tree base1 = get_addr_base_and_unit_offset (dest2, &offset1);
    1635        31397 :       tree base2 = get_addr_base_and_unit_offset (src, &offset2);
    1636        31397 :       poly_int64 size = tree_to_poly_int64 (len);
    1637              :       /* If the bases are 2 different decls,
    1638              :          then there can be no overlapping.  */
    1639        31397 :       if (base1 && base2
    1640        30592 :           && DECL_P (base1) && DECL_P (base2)
    1641         1806 :           && base1 != base2)
    1642              :         ;
    1643              :       /* If we can't figure out the base or the bases are
    1644              :          not equal then fall back to an alignment check.  */
    1645        29815 :       else if (!base1
    1646        29815 :                || !base2
    1647        29815 :                || !operand_equal_p (base1, base2))
    1648              :         {
    1649        29448 :           unsigned int align1 = get_object_alignment (src);
    1650        29448 :           unsigned int align2 = get_object_alignment (dest2);
    1651        29448 :           align1 /= BITS_PER_UNIT;
    1652        29448 :           align2 /= BITS_PER_UNIT;
    1653              :           /* If the alignment of either object is less
    1654              :              than the size then there is a possibility
    1655              :              of overlapping.  */
    1656        29448 :           if (maybe_lt (align1, size)
    1657        29448 :               || maybe_lt (align2, size))
    1658        28119 :             return;
    1659              :         }
    1660              :       /* Make sure [offset1, offset1 + len - 1] does
    1661              :          not overlap with [offset2, offset2 + len - 1],
    1662              :          it is ok if they are at the same location though.  */
    1663          367 :       else if (ranges_maybe_overlap_p (offset1, size, offset2, size)
    1664          367 :           && !known_eq (offset2, offset1))
    1665              :         return;
    1666              :     }
    1667              : 
    1668       374462 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1669              :     {
    1670           11 :       fprintf (dump_file, "Simplified\n  ");
    1671           11 :       print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
    1672           11 :       fprintf (dump_file, "after previous\n  ");
    1673           11 :       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
    1674              :     }
    1675       374462 :   gimple *orig_stmt = use_stmt;
    1676       374462 :   gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
    1677       374462 :   gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (src));
    1678       374462 :   update_stmt (use_stmt);
    1679              : 
    1680       374462 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1681              :     {
    1682           11 :       fprintf (dump_file, "into\n  ");
    1683           11 :       print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
    1684              :     }
    1685       374462 :   if (maybe_clean_or_replace_eh_stmt (orig_stmt, use_stmt))
    1686            0 :     bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
    1687       374462 :   statistics_counter_event (cfun, "copy prop for aggregate", 1);
    1688              : }
    1689              : 
    1690              : /* Helper function for optimize_agr_copyprop_1, propagate aggregates
    1691              :    into the arguments of USE_STMT if the argument matches with DEST;
    1692              :    replacing it with SRC.  */
    1693              : static void
    1694       679528 : optimize_agr_copyprop_arg (gimple *defstmt, gcall *call,
    1695              :                            tree dest, tree src)
    1696              : {
    1697       679528 :   bool changed = false;
    1698      2258946 :   for (unsigned arg = 0; arg < gimple_call_num_args (call); arg++)
    1699              :     {
    1700      1579418 :       tree *argptr = gimple_call_arg_ptr (call, arg);
    1701      2975896 :       if (TREE_CODE (*argptr) == SSA_NAME
    1702       906419 :           || is_gimple_min_invariant (*argptr)
    1703      1762358 :           || TYPE_VOLATILE (TREE_TYPE (*argptr)))
    1704      1396478 :         continue;
    1705       182940 :       tree newsrc = new_src_based_on_copy (*argptr, dest, src);
    1706       182940 :       if (!newsrc)
    1707       110930 :         continue;
    1708              : 
    1709        72010 :       if (dump_file && (dump_flags & TDF_DETAILS))
    1710              :         {
    1711            9 :           fprintf (dump_file, "Simplified\n  ");
    1712            9 :           print_gimple_stmt (dump_file, call, 0, dump_flags);
    1713            9 :           fprintf (dump_file, "after previous\n  ");
    1714            9 :           print_gimple_stmt (dump_file, defstmt, 0, dump_flags);
    1715              :         }
    1716        72010 :       *argptr = unshare_expr (newsrc);
    1717        72010 :       changed = true;
    1718        72010 :       if (dump_file && (dump_flags & TDF_DETAILS))
    1719              :         {
    1720            9 :           fprintf (dump_file, "into\n  ");
    1721            9 :           print_gimple_stmt (dump_file, call, 0, dump_flags);
    1722              :         }
    1723              :     }
    1724       679528 :   if (changed)
    1725        71840 :     update_stmt (call);
    1726       679528 : }
    1727              : 
    1728              : /* Helper function for optimize_agr_copyprop, propagate aggregates
    1729              :    into the return stmt USE if the operand of the return matches DEST;
    1730              :    replacing it with SRC.  */
    1731              : static void
    1732       118763 : optimize_agr_copyprop_return (gimple *defstmt, greturn *use,
    1733              :                               tree dest, tree src)
    1734              : {
    1735       118763 :   tree rvalue = gimple_return_retval (use);
    1736       118763 :   if (!rvalue
    1737        76236 :       || TREE_CODE (rvalue) == SSA_NAME
    1738        67946 :       || is_gimple_min_invariant (rvalue)
    1739       186314 :       || TYPE_VOLATILE (TREE_TYPE (rvalue)))
    1740        51213 :     return;
    1741              : 
    1742              :   /* `return <retval>;` is already the best it could be.
    1743              :      Likewise `return *<retval>_N(D)`.  */
    1744        67550 :   if (TREE_CODE (rvalue) == RESULT_DECL
    1745        67550 :       || (TREE_CODE (rvalue) == MEM_REF
    1746            0 :           && TREE_CODE (TREE_OPERAND (rvalue, 0)) == SSA_NAME
    1747            0 :           && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (rvalue, 0)))
    1748              :                == RESULT_DECL))
    1749              :     return;
    1750        46449 :   tree newsrc = new_src_based_on_copy (rvalue, dest, src);
    1751        46449 :   if (!newsrc)
    1752              :     return;
    1753              :   /* Currently only support non-global vars.
    1754              :      See PR 124099 on enumtls not supporting expanding for GIMPLE_RETURN.
    1755              :      FIXME: could support VCEs too?  */
    1756        46370 :   if (!VAR_P (newsrc) || is_global_var (newsrc))
    1757              :     return;
    1758        21254 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1759              :     {
    1760            1 :       fprintf (dump_file, "Simplified\n  ");
    1761            1 :       print_gimple_stmt (dump_file, use, 0, dump_flags);
    1762            1 :       fprintf (dump_file, "after previous\n  ");
    1763            1 :       print_gimple_stmt (dump_file, defstmt, 0, dump_flags);
    1764              :     }
    1765        21254 :   gimple_return_set_retval (use, newsrc);
    1766        21254 :   if (dump_file && (dump_flags & TDF_DETAILS))
    1767              :     {
    1768            1 :       fprintf (dump_file, "into\n  ");
    1769            1 :       print_gimple_stmt (dump_file, use, 0, dump_flags);
    1770              :     }
    1771        21254 :   update_stmt (use);
    1772              : }
    1773              : 
    1774              : /* Optimizes
    1775              :    DEST = SRC;
    1776              :    DEST2 = DEST; # DEST2 = SRC2;
    1777              :    into
    1778              :    DEST = SRC;
    1779              :    DEST2 = SRC;
    1780              :    STMT is the first statement and SRC is the common
    1781              :    between the statements.
    1782              : 
    1783              :    Also optimizes:
    1784              :    DEST = SRC;
    1785              :    call_func(..., DEST, ...);
    1786              :    into:
    1787              :    DEST = SRC;
    1788              :    call_func(..., SRC, ...);
    1789              : 
    1790              : */
    1791              : static void
    1792      3698751 : optimize_agr_copyprop (gimple *stmt)
    1793              : {
    1794      7397502 :   if (gimple_has_volatile_ops (stmt))
    1795       411437 :     return;
    1796              : 
    1797              :   /* Can't prop if the statement could throw.  */
    1798      3697612 :   if (stmt_could_throw_p (cfun, stmt))
    1799              :     return;
    1800              : 
    1801      3289910 :   tree dest = gimple_assign_lhs (stmt);
    1802      3289910 :   tree src = gimple_assign_rhs1 (stmt);
    1803              :   /* If the statement is `src = src;` then ignore it. */
    1804      3289910 :   if (same_for_assignment (dest, src))
    1805              :     return;
    1806              : 
    1807      3287314 :   tree vdef = gimple_vdef (stmt);
    1808      3287314 :   imm_use_iterator iter;
    1809      3287314 :   gimple *use_stmt;
    1810     12790450 :   FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
    1811              :     {
    1812      6215822 :       if (gimple_assign_load_p (use_stmt)
    1813      6215822 :           && gimple_store_p (use_stmt))
    1814       968873 :         optimize_agr_copyprop_1 (stmt, use_stmt, dest, src);
    1815      5246949 :       else if (is_gimple_call (use_stmt))
    1816       679528 :         optimize_agr_copyprop_arg (stmt, as_a<gcall*>(use_stmt), dest, src);
    1817      4567421 :       else if (is_a<greturn*> (use_stmt))
    1818       118763 :         optimize_agr_copyprop_return (stmt, as_a<greturn*>(use_stmt), dest, src);
    1819      3287314 :     }
    1820              : }
    1821              : 
    1822              : /* Simple DSE of the lhs from a clobber STMT.
    1823              :    This is used mostly to clean up from optimize_agr_copyprop and
    1824              :    to remove (exactly one) extra copy that might later on confuse SRA.
    1825              :    An example is:
    1826              :    ;; write to a and such.
    1827              :    b = a; // This statement is to be removed
    1828              :    b = {CLOBBER};
    1829              :    SRA will totally scalarize b (which means also a) here for the extra copy
    1830              :    which is not something welcomed. So removing the copy will
    1831              :    allow SRA to move the scalarization of a further down or not at all.
    1832              :    */
    1833              : static void
    1834      6825015 : do_simple_agr_dse (gassign *stmt, bool full_walk)
    1835              : {
    1836              :   /* Don't do this while in -Og as we want to keep around the copy
    1837              :      for debuggability.  */
    1838      6825015 :   if (optimize_debug)
    1839      4757399 :     return;
    1840      6821594 :   ao_ref read;
    1841      6821594 :   basic_block bb = gimple_bb (stmt);
    1842      6821594 :   tree lhs = gimple_assign_lhs (stmt);
    1843              :   /* Only handle clobbers of a full decl.  */
    1844      6821594 :   if (!DECL_P (lhs))
    1845              :     return;
    1846      6118190 :   ao_ref_init (&read, lhs);
    1847      6118190 :   tree vuse = gimple_vuse (stmt);
    1848      6118190 :   unsigned limit = full_walk ? param_sccvn_max_alias_queries_per_access : 4;
    1849     15529440 :   while (limit)
    1850              :     {
    1851     15516813 :       gimple *ostmt = SSA_NAME_DEF_STMT (vuse);
    1852              :       /* Don't handle phis, just declare to be done. */
    1853     15516813 :       if (is_a<gphi*>(ostmt) || gimple_nop_p (ostmt))
    1854              :         break;
    1855     13461824 :       basic_block obb = gimple_bb (ostmt);
    1856              :       /* If the clobber is not fully dominating the statement define,
    1857              :          then it is not "simple" to detect if the define is fully clobbered.  */
    1858     13461824 :       if (obb != bb && !dominated_by_p (CDI_DOMINATORS, bb, obb))
    1859      4050574 :         return;
    1860     13461824 :       gimple *use_stmt;
    1861     13461824 :       imm_use_iterator iter;
    1862     54433823 :       FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (ostmt))
    1863              :         {
    1864     15840130 :           basic_block ubb = gimple_bb (use_stmt);
    1865     15840130 :           if (stmt == use_stmt)
    1866      4665406 :             continue;
    1867              :           /* If the use is a clobber for lhs,
    1868              :              then it can be safely skipped; this happens with eh
    1869              :              and sometimes jump threading.  */
    1870     11174724 :           if (gimple_clobber_p (use_stmt)
    1871     11174724 :               && lhs == gimple_assign_lhs (use_stmt))
    1872       159222 :             continue;
    1873              :           /* If the use is a phi and it is single use then check if that single use
    1874              :              is a clobber and lhs is the same.  */
    1875     11015502 :           if (gphi *use_phi = dyn_cast<gphi*>(use_stmt))
    1876              :             {
    1877       321289 :               use_operand_p ou;
    1878       321289 :               gimple *ostmt;
    1879       321289 :               if (single_imm_use (gimple_phi_result (use_phi), &ou, &ostmt)
    1880       273855 :                   && gimple_clobber_p (ostmt)
    1881       538491 :                   && lhs == gimple_assign_lhs (ostmt))
    1882        63948 :                 continue;
    1883              :               /* A phi node will never be dominating the clobber.  */
    1884       257341 :               return;
    1885              :             }
    1886              :           /* The use needs to be dominating the clobber. */
    1887      1334943 :           if ((ubb != bb && !dominated_by_p (CDI_DOMINATORS, bb, ubb))
    1888     11362912 :               || ref_maybe_used_by_stmt_p (use_stmt, &read, false))
    1889      1091727 :             return;
    1890              :           /* Count the above alias lookup towards the limit. */
    1891      9602486 :           limit--;
    1892      9602486 :           if (limit == 0)
    1893              :             return;
    1894      1791779 :         }
    1895     11670045 :       vuse = gimple_vuse (ostmt);
    1896              :       /* This is a call with an assignment to the clobber decl,
    1897              :          remove the lhs or the whole stmt if it was pure/const. */
    1898     11670045 :       if (is_a <gcall*>(ostmt)
    1899     11670045 :           && lhs == gimple_call_lhs (ostmt))
    1900              :         {
    1901              :           /* Don't remove stores/statements that are needed for non-call
    1902              :               eh to work.  */
    1903         3392 :           if (stmt_unremovable_because_of_non_call_eh_p (cfun, ostmt))
    1904              :             return;
    1905              :           /* If we delete a stmt that could throw, mark the block
    1906              :              in to_purge to cleanup afterwards.  */
    1907         3386 :           if (stmt_could_throw_p (cfun, ostmt))
    1908          951 :             bitmap_set_bit (to_purge, obb->index);
    1909         3386 :           int flags = gimple_call_flags (ostmt);
    1910         3386 :           if ((flags & (ECF_PURE|ECF_CONST|ECF_NOVOPS))
    1911          201 :               && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
    1912              :             {
    1913          117 :                gimple_stmt_iterator gsi = gsi_for_stmt (ostmt);
    1914          117 :                if (dump_file && (dump_flags & TDF_DETAILS))
    1915              :                 {
    1916           14 :                   fprintf (dump_file, "Removing dead call store stmt ");
    1917           14 :                   print_gimple_stmt (dump_file, ostmt, 0);
    1918           14 :                   fprintf (dump_file, "\n");
    1919              :                 }
    1920          117 :               unlink_stmt_vdef (ostmt);
    1921          117 :               release_defs (ostmt);
    1922          117 :               gsi_remove (&gsi, true);
    1923          117 :               statistics_counter_event (cfun, "delete call dead store", 1);
    1924              :               /* Only remove the first store previous statement. */
    1925          117 :               return;
    1926              :             }
    1927              :           /* Make sure we do not remove a return slot we cannot reconstruct
    1928              :              later.  */
    1929         3269 :           if (gimple_call_return_slot_opt_p (as_a <gcall *>(ostmt))
    1930         3269 :               && (TREE_ADDRESSABLE (TREE_TYPE (gimple_call_fntype (ostmt)))
    1931          448 :                   || !poly_int_tree_p
    1932          448 :                       (TYPE_SIZE (TREE_TYPE (gimple_call_fntype (ostmt))))))
    1933              :             return;
    1934          562 :           if (dump_file && (dump_flags & TDF_DETAILS))
    1935              :             {
    1936            6 :               fprintf (dump_file, "Removing lhs of call stmt ");
    1937            6 :               print_gimple_stmt (dump_file, ostmt, 0);
    1938            6 :               fprintf (dump_file, "\n");
    1939              :             }
    1940          562 :           gimple_call_set_lhs (ostmt, NULL_TREE);
    1941          562 :           update_stmt (ostmt);
    1942          562 :           statistics_counter_event (cfun, "removed lhs call", 1);
    1943          562 :           return;
    1944              :         }
    1945              :       /* This an assignment store to the clobbered decl,
    1946              :          then maybe remove it. */
    1947     11666653 :       if (is_a <gassign*>(ostmt)
    1948      9815363 :           && gimple_store_p (ostmt)
    1949      9815363 :           && !gimple_clobber_p (ostmt)
    1950     14561254 :           && lhs == gimple_assign_lhs (ostmt))
    1951              :         {
    1952              :           /* Don't remove stores/statements that are needed for non-call
    1953              :               eh to work.  */
    1954       152715 :           if (stmt_unremovable_because_of_non_call_eh_p (cfun, ostmt))
    1955              :             return;
    1956              :           /* If we delete a stmt that could throw, mark the block
    1957              :              in to_purge to cleanup afterwards.  */
    1958       147631 :           if (stmt_could_throw_p (cfun, ostmt))
    1959            0 :             bitmap_set_bit (to_purge, obb->index);
    1960       147631 :           gimple_stmt_iterator gsi = gsi_for_stmt (ostmt);
    1961       147631 :           if (dump_file && (dump_flags & TDF_DETAILS))
    1962              :             {
    1963           12 :               fprintf (dump_file, "Removing dead store stmt ");
    1964           12 :               print_gimple_stmt (dump_file, ostmt, 0);
    1965           12 :               fprintf (dump_file, "\n");
    1966              :             }
    1967       147631 :           unlink_stmt_vdef (ostmt);
    1968       147631 :           release_defs (ostmt);
    1969       147631 :           gsi_remove (&gsi, true);
    1970       147631 :           statistics_counter_event (cfun, "delete dead store", 1);
    1971              :           /* Only remove the first store previous statement. */
    1972       147631 :           return;
    1973              :         }
    1974              :       /* If the statement uses or maybe writes to the decl,
    1975              :          then nothing is to be removed. Don't know if the write
    1976              :          to the decl is partial write or a full one so the need
    1977              :          to stop.
    1978              :          e.g.
    1979              :          b.c = a;
    1980              :            Easier to stop here rather than do a full partial
    1981              :            dse of this statement.
    1982              :          b = {CLOBBER}; */
    1983     11513938 :       if (stmt_may_clobber_ref_p_1 (ostmt, &read, false)
    1984     11513938 :           || ref_maybe_used_by_stmt_p (ostmt, &read, false))
    1985      2102688 :         return;
    1986      9411250 :       limit--;
    1987              :     }
    1988              : }
    1989              : 
    1990              : /* Optimizes builtin memcmps for small constant sizes.
    1991              :    GSI_P is the GSI for the call. STMT is the call itself.
    1992              :    */
    1993              : 
    1994              : static bool
    1995       465745 : simplify_builtin_memcmp (gimple_stmt_iterator *gsi_p, gcall *stmt)
    1996              : {
    1997              :   /* Make sure memcmp arguments are the correct type.  */
    1998       465745 :   if (gimple_call_num_args (stmt) != 3)
    1999              :     return false;
    2000       465745 :   tree arg1 = gimple_call_arg (stmt, 0);
    2001       465745 :   tree arg2 = gimple_call_arg (stmt, 1);
    2002       465745 :   tree len = gimple_call_arg (stmt, 2);
    2003              : 
    2004       465745 :   if (!POINTER_TYPE_P (TREE_TYPE (arg1)))
    2005              :     return false;
    2006       465745 :   if (!POINTER_TYPE_P (TREE_TYPE (arg2)))
    2007              :     return false;
    2008       465745 :   if (!INTEGRAL_TYPE_P (TREE_TYPE (len)))
    2009              :     return false;
    2010              : 
    2011              :   /* The return value of the memcmp has to be used
    2012              :      equality comparison to zero. */
    2013       465745 :   tree res = gimple_call_lhs (stmt);
    2014              : 
    2015       465745 :   if (!res || !use_in_zero_equality (res))
    2016        14311 :     return false;
    2017              : 
    2018       451434 :   unsigned HOST_WIDE_INT leni;
    2019              : 
    2020       451434 :   if (tree_fits_uhwi_p (len)
    2021       628164 :       && (leni = tree_to_uhwi (len)) <= GET_MODE_SIZE (word_mode)
    2022       532149 :       && pow2p_hwi (leni))
    2023              :     {
    2024        19139 :       leni *= CHAR_TYPE_SIZE;
    2025        19139 :       unsigned align1 = get_pointer_alignment (arg1);
    2026        19139 :       unsigned align2 = get_pointer_alignment (arg2);
    2027        19139 :       unsigned align = MIN (align1, align2);
    2028        19139 :       scalar_int_mode mode;
    2029        19139 :       if (int_mode_for_size (leni, 1).exists (&mode)
    2030        19139 :           && (align >= leni || !targetm.slow_unaligned_access (mode, align)))
    2031              :         {
    2032        19139 :           location_t loc = gimple_location (stmt);
    2033        19139 :           tree type, off;
    2034        19139 :           type = build_nonstandard_integer_type (leni, 1);
    2035        38278 :           gcc_assert (known_eq (GET_MODE_BITSIZE (TYPE_MODE (type)), leni));
    2036        19139 :           tree ptrtype = build_pointer_type_for_mode (char_type_node,
    2037              :                                                       ptr_mode, true);
    2038        19139 :           off = build_int_cst (ptrtype, 0);
    2039              : 
    2040              :           /* Create unaligned types if needed. */
    2041        19139 :           tree type1 = type, type2 = type;
    2042        19139 :           if (TYPE_ALIGN (type1) > align1)
    2043         7812 :             type1 = build_aligned_type (type1, align1);
    2044        19139 :           if (TYPE_ALIGN (type2) > align2)
    2045         8312 :             type2 = build_aligned_type (type2, align2);
    2046              : 
    2047        19139 :           arg1 = build2_loc (loc, MEM_REF, type1, arg1, off);
    2048        19139 :           arg2 = build2_loc (loc, MEM_REF, type2, arg2, off);
    2049        19139 :           tree tem1 = fold_const_aggregate_ref (arg1);
    2050        19139 :           if (tem1)
    2051          219 :             arg1 = tem1;
    2052        19139 :           tree tem2 = fold_const_aggregate_ref (arg2);
    2053        19139 :           if (tem2)
    2054         7504 :             arg2 = tem2;
    2055        19139 :           res = fold_convert_loc (loc, TREE_TYPE (res),
    2056              :                                   fold_build2_loc (loc, NE_EXPR,
    2057              :                                                    boolean_type_node,
    2058              :                                                    arg1, arg2));
    2059        19139 :           gimplify_and_update_call_from_tree (gsi_p, res);
    2060        19139 :           return true;
    2061              :         }
    2062              :     }
    2063              : 
    2064              :   /* Replace memcmp with memcmp_eq if the above fails. */
    2065       432295 :   if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)) == BUILT_IN_MEMCMP_EQ)
    2066              :     return false;
    2067       342404 :   if (!fold_before_rtl_expansion_p ())
    2068              :     return false;
    2069        89891 :   gimple_call_set_fndecl (stmt, builtin_decl_explicit (BUILT_IN_MEMCMP_EQ));
    2070        89891 :   update_stmt (stmt);
    2071        89891 :   return true;
    2072              : }
    2073              : 
    2074              : /* Optimizes builtin memchrs for small constant sizes with a const string.
    2075              :    GSI_P is the GSI for the call. STMT is the call itself.
    2076              :    */
    2077              : 
    2078              : static bool
    2079        14313 : simplify_builtin_memchr (gimple_stmt_iterator *gsi_p, gcall *stmt)
    2080              : {
    2081        14313 :   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
    2082              :     return false;
    2083              : 
    2084        14313 :   if (gimple_call_num_args (stmt) != 3)
    2085              :     return false;
    2086              : 
    2087        14313 :   tree res = gimple_call_lhs (stmt);
    2088        14313 :   if (!res || !use_in_zero_equality (res))
    2089        12858 :     return false;
    2090              : 
    2091         1455 :   tree ptr = gimple_call_arg (stmt, 0);
    2092         1455 :   if (TREE_CODE (ptr) != ADDR_EXPR
    2093         1455 :       || TREE_CODE (TREE_OPERAND (ptr, 0)) != STRING_CST)
    2094              :     return false;
    2095              : 
    2096          444 :   unsigned HOST_WIDE_INT slen
    2097          444 :     = TREE_STRING_LENGTH (TREE_OPERAND (ptr, 0));
    2098              :   /* It must be a non-empty string constant.  */
    2099          444 :   if (slen < 2)
    2100              :     return false;
    2101              : 
    2102              :   /* For -Os, only simplify strings with a single character.  */
    2103          440 :   if (!optimize_bb_for_speed_p (gimple_bb (stmt))
    2104          440 :       && slen > 2)
    2105              :     return false;
    2106              : 
    2107          424 :   tree size = gimple_call_arg (stmt, 2);
    2108              :   /* Size must be a constant which is <= UNITS_PER_WORD and
    2109              :      <= the string length.  */
    2110          424 :   if (!tree_fits_uhwi_p (size))
    2111              :     return false;
    2112              : 
    2113          424 :   unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
    2114          425 :   if (sz == 0 || sz > UNITS_PER_WORD || sz >= slen)
    2115              :     return false;
    2116              : 
    2117          372 :   tree ch = gimple_call_arg (stmt, 1);
    2118          372 :   location_t loc = gimple_location (stmt);
    2119          372 :   if (!useless_type_conversion_p (char_type_node,
    2120          372 :                                   TREE_TYPE (ch)))
    2121          372 :     ch = fold_convert_loc (loc, char_type_node, ch);
    2122          372 :   const char *p = TREE_STRING_POINTER (TREE_OPERAND (ptr, 0));
    2123          372 :   unsigned int isize = sz;
    2124          372 :   tree *op = XALLOCAVEC (tree, isize);
    2125         1345 :   for (unsigned int i = 0; i < isize; i++)
    2126              :     {
    2127          973 :       op[i] = build_int_cst (char_type_node, p[i]);
    2128          973 :       op[i] = fold_build2_loc (loc, EQ_EXPR, boolean_type_node,
    2129              :                                op[i], ch);
    2130              :     }
    2131          973 :   for (unsigned int i = isize - 1; i >= 1; i--)
    2132          601 :     op[i - 1] = fold_convert_loc (loc, boolean_type_node,
    2133              :                                   fold_build2_loc (loc,
    2134              :                                                    BIT_IOR_EXPR,
    2135              :                                                    boolean_type_node,
    2136          601 :                                                    op[i - 1],
    2137          601 :                                                    op[i]));
    2138          372 :   res = fold_convert_loc (loc, TREE_TYPE (res), op[0]);
    2139          372 :   gimplify_and_update_call_from_tree (gsi_p, res);
    2140          372 :   return true;
    2141              : }
    2142              : 
    2143              : /* *GSI_P is a GIMPLE_CALL to a builtin function.
    2144              :    Optimize
    2145              :    memcpy (p, "abcd", 4); // STMT1
    2146              :    memset (p + 4, ' ', 3); // STMT2
    2147              :    into
    2148              :    memcpy (p, "abcd   ", 7);
    2149              :    call if the latter can be stored by pieces during expansion.
    2150              : */
    2151              : 
    2152              : static bool
    2153       109249 : simplify_builtin_memcpy_memset (gimple_stmt_iterator *gsi_p, gcall *stmt2)
    2154              : {
    2155       109249 :   if (gimple_call_num_args (stmt2) != 3
    2156       109249 :       || gimple_call_lhs (stmt2)
    2157              :       || CHAR_BIT != 8
    2158       109249 :       || BITS_PER_UNIT != 8)
    2159              :     return false;
    2160              : 
    2161       208595 :   tree vuse = gimple_vuse (stmt2);
    2162       101690 :   if (vuse == NULL)
    2163              :     return false;
    2164       101676 :   gimple *stmt1 = SSA_NAME_DEF_STMT (vuse);
    2165              : 
    2166       101676 :   tree callee1;
    2167       101676 :   tree ptr1, src1, str1, off1, len1, lhs1;
    2168       101676 :   tree ptr2 = gimple_call_arg (stmt2, 0);
    2169       101676 :   tree val2 = gimple_call_arg (stmt2, 1);
    2170       101676 :   tree len2 = gimple_call_arg (stmt2, 2);
    2171       101676 :   tree diff, vdef, new_str_cst;
    2172       101676 :   gimple *use_stmt;
    2173       101676 :   unsigned int ptr1_align;
    2174       101676 :   unsigned HOST_WIDE_INT src_len;
    2175       101676 :   char *src_buf;
    2176       101676 :   use_operand_p use_p;
    2177              : 
    2178       101676 :   if (!tree_fits_shwi_p (val2)
    2179        97652 :       || !tree_fits_uhwi_p (len2)
    2180       164100 :       || compare_tree_int (len2, 1024) == 1)
    2181        44332 :     return false;
    2182              : 
    2183        57344 :   if (is_gimple_call (stmt1))
    2184              :     {
    2185              :       /* If first stmt is a call, it needs to be memcpy
    2186              :          or mempcpy, with string literal as second argument and
    2187              :          constant length.  */
    2188        29696 :       callee1 = gimple_call_fndecl (stmt1);
    2189        29696 :       if (callee1 == NULL_TREE
    2190        29580 :           || !fndecl_built_in_p (callee1, BUILT_IN_NORMAL)
    2191        55817 :           || gimple_call_num_args (stmt1) != 3)
    2192              :         return false;
    2193        24835 :       if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
    2194        24835 :           && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
    2195              :         return false;
    2196        10867 :       ptr1 = gimple_call_arg (stmt1, 0);
    2197        10867 :       src1 = gimple_call_arg (stmt1, 1);
    2198        10867 :       len1 = gimple_call_arg (stmt1, 2);
    2199        10867 :       lhs1 = gimple_call_lhs (stmt1);
    2200        10867 :       if (!tree_fits_uhwi_p (len1))
    2201              :         return false;
    2202        10780 :       str1 = string_constant (src1, &off1, NULL, NULL);
    2203        10780 :       if (str1 == NULL_TREE)
    2204              :         return false;
    2205         4875 :       if (!tree_fits_uhwi_p (off1)
    2206         4875 :           || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
    2207         4875 :           || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
    2208         4875 :                                      - tree_to_uhwi (off1)) > 0
    2209         4875 :           || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
    2210        14625 :           || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
    2211         4875 :              != TYPE_MODE (char_type_node))
    2212            0 :         return false;
    2213              :     }
    2214        27648 :   else if (gimple_assign_single_p (stmt1))
    2215              :     {
    2216              :       /* Otherwise look for length 1 memcpy optimized into
    2217              :          assignment.  */
    2218        16915 :       ptr1 = gimple_assign_lhs (stmt1);
    2219        16915 :       src1 = gimple_assign_rhs1 (stmt1);
    2220        16915 :       if (TREE_CODE (ptr1) != MEM_REF
    2221         3520 :           || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
    2222        17949 :           || !tree_fits_shwi_p (src1))
    2223        16563 :         return false;
    2224          352 :       ptr1 = build_fold_addr_expr (ptr1);
    2225          352 :       STRIP_USELESS_TYPE_CONVERSION (ptr1);
    2226          352 :       callee1 = NULL_TREE;
    2227          352 :       len1 = size_one_node;
    2228          352 :       lhs1 = NULL_TREE;
    2229          352 :       off1 = size_zero_node;
    2230          352 :       str1 = NULL_TREE;
    2231              :     }
    2232              :   else
    2233              :     return false;
    2234              : 
    2235         5227 :   diff = constant_pointer_difference (ptr1, ptr2);
    2236         5227 :   if (diff == NULL && lhs1 != NULL)
    2237              :     {
    2238            7 :       diff = constant_pointer_difference (lhs1, ptr2);
    2239            7 :       if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
    2240            7 :           && diff != NULL)
    2241            7 :         diff = size_binop (PLUS_EXPR, diff,
    2242              :                            fold_convert (sizetype, len1));
    2243              :     }
    2244              :   /* If the difference between the second and first destination pointer
    2245              :      is not constant, or is bigger than memcpy length, bail out.  */
    2246         5227 :   if (diff == NULL
    2247         4344 :       || !tree_fits_uhwi_p (diff)
    2248         4344 :       || tree_int_cst_lt (len1, diff)
    2249         9327 :       || compare_tree_int (diff, 1024) == 1)
    2250         1127 :     return false;
    2251              : 
    2252              :   /* Use maximum of difference plus memset length and memcpy length
    2253              :      as the new memcpy length, if it is too big, bail out.  */
    2254         4100 :   src_len = tree_to_uhwi (diff);
    2255         4100 :   src_len += tree_to_uhwi (len2);
    2256         4100 :   if (src_len < tree_to_uhwi (len1))
    2257              :     src_len = tree_to_uhwi (len1);
    2258         4100 :   if (src_len > 1024)
    2259              :     return false;
    2260              : 
    2261              :   /* If mempcpy value is used elsewhere, bail out, as mempcpy
    2262              :      with bigger length will return different result.  */
    2263         4100 :   if (lhs1 != NULL_TREE
    2264          193 :       && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
    2265         4107 :       && (TREE_CODE (lhs1) != SSA_NAME
    2266            7 :           || !single_imm_use (lhs1, &use_p, &use_stmt)
    2267            7 :           || use_stmt != stmt2))
    2268            0 :     return false;
    2269              : 
    2270              :   /* If anything reads memory in between memcpy and memset
    2271              :      call, the modified memcpy call might change it.  */
    2272         4100 :   vdef = gimple_vdef (stmt1);
    2273         4100 :   if (vdef != NULL
    2274         4100 :       && (!single_imm_use (vdef, &use_p, &use_stmt)
    2275         3345 :           || use_stmt != stmt2))
    2276              :     return false;
    2277              : 
    2278         3345 :   ptr1_align = get_pointer_alignment (ptr1);
    2279              :   /* Construct the new source string literal.  */
    2280         3345 :   src_buf = XALLOCAVEC (char, src_len + 1);
    2281         3345 :   if (callee1)
    2282         3184 :     memcpy (src_buf,
    2283         3184 :             TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
    2284              :             tree_to_uhwi (len1));
    2285              :   else
    2286          161 :     src_buf[0] = tree_to_shwi (src1);
    2287         3345 :   memset (src_buf + tree_to_uhwi (diff),
    2288         3345 :           tree_to_shwi (val2), tree_to_uhwi (len2));
    2289         3345 :   src_buf[src_len] = '\0';
    2290              :   /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
    2291              :      handle embedded '\0's.  */
    2292         3345 :   if (strlen (src_buf) != src_len)
    2293              :     return false;
    2294         3255 :   rtl_profile_for_bb (gimple_bb (stmt2));
    2295              :   /* If the new memcpy wouldn't be emitted by storing the literal
    2296              :      by pieces, this optimization might enlarge .rodata too much,
    2297              :      as commonly used string literals couldn't be shared any
    2298              :      longer.  */
    2299         3255 :   if (!can_store_by_pieces (src_len,
    2300              :                             builtin_strncpy_read_str,
    2301              :                             src_buf, ptr1_align, false))
    2302              :     return false;
    2303              : 
    2304         2471 :   new_str_cst = build_string_literal (src_len, src_buf);
    2305         2471 :   if (callee1)
    2306              :     {
    2307              :       /* If STMT1 is a mem{,p}cpy call, adjust it and remove
    2308              :          memset call.  */
    2309         2344 :       if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
    2310            7 :         gimple_call_set_lhs (stmt1, NULL_TREE);
    2311         2344 :       gimple_call_set_arg (stmt1, 1, new_str_cst);
    2312         2344 :       gimple_call_set_arg (stmt1, 2,
    2313         2344 :                            build_int_cst (TREE_TYPE (len1), src_len));
    2314         2344 :       update_stmt (stmt1);
    2315         2344 :       unlink_stmt_vdef (stmt2);
    2316         2344 :       gsi_replace (gsi_p, gimple_build_nop (), false);
    2317         2344 :       fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
    2318         2344 :       release_defs (stmt2);
    2319         2344 :       if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
    2320              :         {
    2321            7 :           fwprop_invalidate_lattice (lhs1);
    2322            7 :           release_ssa_name (lhs1);
    2323              :         }
    2324         2344 :       return true;
    2325              :     }
    2326              :   else
    2327              :     {
    2328              :       /* Otherwise, if STMT1 is length 1 memcpy optimized into
    2329              :          assignment, remove STMT1 and change memset call into
    2330              :          memcpy call.  */
    2331          127 :       gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
    2332              : 
    2333          127 :       if (!is_gimple_val (ptr1))
    2334           12 :         ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
    2335              :                                          true, GSI_SAME_STMT);
    2336          127 :       tree fndecl = builtin_decl_explicit (BUILT_IN_MEMCPY);
    2337          127 :       gimple_call_set_fndecl (stmt2, fndecl);
    2338          127 :       gimple_call_set_fntype (stmt2,
    2339          127 :                               TREE_TYPE (fndecl));
    2340          127 :       gimple_call_set_arg (stmt2, 0, ptr1);
    2341          127 :       gimple_call_set_arg (stmt2, 1, new_str_cst);
    2342          127 :       gimple_call_set_arg (stmt2, 2,
    2343          127 :                            build_int_cst (TREE_TYPE (len2), src_len));
    2344          127 :       unlink_stmt_vdef (stmt1);
    2345          127 :       gsi_remove (&gsi, true);
    2346          127 :       fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
    2347          127 :       release_defs (stmt1);
    2348          127 :       update_stmt (stmt2);
    2349          127 :       return false;
    2350              :     }
    2351              : }
    2352              : 
    2353              : 
    2354              : /* Try to optimize out __builtin_stack_restore.  Optimize it out
    2355              :    if there is another __builtin_stack_restore in the same basic
    2356              :    block and no calls or ASM_EXPRs are in between, or if this block's
    2357              :    only outgoing edge is to EXIT_BLOCK and there are no calls or
    2358              :    ASM_EXPRs after this __builtin_stack_restore.
    2359              :    Note restore right before a noreturn function is not needed.
    2360              :    And skip some cheap calls that will most likely become an instruction.
    2361              :    Restoring the stack before a call is important to be able to keep
    2362              :    stack usage down so that call does not run out of stack.  */
    2363              : 
    2364              : 
    2365              : static bool
    2366        10404 : optimize_stack_restore (gimple_stmt_iterator *gsi, gimple *call)
    2367              : {
    2368        10404 :   if (!fold_before_rtl_expansion_p ())
    2369              :     return false;
    2370         2537 :   tree callee;
    2371         2537 :   gimple *stmt;
    2372              : 
    2373         2537 :   basic_block bb = gsi_bb (*gsi);
    2374              : 
    2375         2537 :   if (gimple_call_num_args (call) != 1
    2376         2537 :       || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
    2377         5074 :       || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
    2378              :     return false;
    2379              : 
    2380         2537 :   gimple_stmt_iterator i = *gsi;
    2381         6366 :   for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
    2382              :     {
    2383         4290 :       stmt = gsi_stmt (i);
    2384         4290 :       if (is_a<gasm*> (stmt))
    2385              :         return false;
    2386         4289 :       gcall *call = dyn_cast<gcall*>(stmt);
    2387         4289 :       if (!call)
    2388         3620 :         continue;
    2389              : 
    2390              :       /* We can remove the restore in front of noreturn
    2391              :          calls.  Since the restore will happen either
    2392              :          via an unwind/longjmp or not at all. */
    2393          669 :       if (gimple_call_noreturn_p (call))
    2394              :         break;
    2395              : 
    2396              :       /* Internal calls are ok, to bypass
    2397              :          check first since fndecl will be null. */
    2398          653 :       if (gimple_call_internal_p (call))
    2399            1 :         continue;
    2400              : 
    2401          652 :       callee = gimple_call_fndecl (call);
    2402              :       /* Non-builtin calls are not ok. */
    2403          652 :       if (!callee
    2404          652 :           || !fndecl_built_in_p (callee))
    2405              :         return false;
    2406              : 
    2407              :       /* Do not remove stack updates before strub leave.  */
    2408          576 :       if (fndecl_built_in_p (callee, BUILT_IN___STRUB_LEAVE)
    2409              :           /* Alloca calls are not ok either. */
    2410          576 :           || fndecl_builtin_alloc_p (callee))
    2411              :         return false;
    2412              : 
    2413          364 :       if (fndecl_built_in_p (callee, BUILT_IN_STACK_RESTORE))
    2414           52 :         goto second_stack_restore;
    2415              : 
    2416              :       /* If not a simple or inexpensive builtin, then it is not ok either. */
    2417          312 :       if (!is_simple_builtin (callee)
    2418          312 :           && !is_inexpensive_builtin (callee))
    2419              :         return false;
    2420              :     }
    2421              : 
    2422              :   /* Allow one successor of the exit block, or zero successors.  */
    2423         2092 :   switch (EDGE_COUNT (bb->succs))
    2424              :     {
    2425              :     case 0:
    2426              :       break;
    2427         2005 :     case 1:
    2428         2005 :       if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
    2429              :         return false;
    2430              :       break;
    2431              :     default:
    2432              :       return false;
    2433              :     }
    2434         1735 :  second_stack_restore:
    2435              : 
    2436              :   /* If there's exactly one use, then zap the call to __builtin_stack_save.
    2437              :      If there are multiple uses, then the last one should remove the call.
    2438              :      In any case, whether the call to __builtin_stack_save can be removed
    2439              :      or not is irrelevant to removing the call to __builtin_stack_restore.  */
    2440         1735 :   if (has_single_use (gimple_call_arg (call, 0)))
    2441              :     {
    2442         1557 :       gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
    2443         1557 :       if (is_gimple_call (stack_save))
    2444              :         {
    2445         1555 :           callee = gimple_call_fndecl (stack_save);
    2446         1555 :           if (callee && fndecl_built_in_p (callee, BUILT_IN_STACK_SAVE))
    2447              :             {
    2448         1555 :               gimple_stmt_iterator stack_save_gsi;
    2449         1555 :               tree rhs;
    2450              : 
    2451         1555 :               stack_save_gsi = gsi_for_stmt (stack_save);
    2452         1555 :               rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
    2453         1555 :               replace_call_with_value (&stack_save_gsi, rhs);
    2454              :             }
    2455              :         }
    2456              :     }
    2457              : 
    2458              :   /* No effect, so the statement will be deleted.  */
    2459         1735 :   replace_call_with_value (gsi, NULL_TREE);
    2460         1735 :   return true;
    2461              : }
    2462              : 
    2463              : /* *GSI_P is a GIMPLE_CALL to a builtin function.
    2464              :    Optimize
    2465              :    memcpy (p, "abcd", 4);
    2466              :    memset (p + 4, ' ', 3);
    2467              :    into
    2468              :    memcpy (p, "abcd   ", 7);
    2469              :    call if the latter can be stored by pieces during expansion.
    2470              : 
    2471              :    Optimize
    2472              :    memchr ("abcd", a, 4) == 0;
    2473              :    or
    2474              :    memchr ("abcd", a, 4) != 0;
    2475              :    to
    2476              :    (a == 'a' || a == 'b' || a == 'c' || a == 'd') == 0
    2477              :    or
    2478              :    (a == 'a' || a == 'b' || a == 'c' || a == 'd') != 0
    2479              : 
    2480              :    Also canonicalize __atomic_fetch_op (p, x, y) op x
    2481              :    to __atomic_op_fetch (p, x, y) or
    2482              :    __atomic_op_fetch (p, x, y) iop x
    2483              :    to __atomic_fetch_op (p, x, y) when possible (also __sync).  */
    2484              : 
    2485              : static bool
    2486      6169916 : simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2, bool full_walk)
    2487              : {
    2488      6169916 :   gimple *stmt2 = gsi_stmt (*gsi_p);
    2489      6169916 :   enum built_in_function other_atomic = END_BUILTINS;
    2490      6169916 :   enum tree_code atomic_op = ERROR_MARK;
    2491              : 
    2492      6169916 :   switch (DECL_FUNCTION_CODE (callee2))
    2493              :     {
    2494        10404 :     case BUILT_IN_STACK_RESTORE:
    2495        10404 :       return optimize_stack_restore (gsi_p, as_a<gcall*>(stmt2));
    2496       465745 :     case BUILT_IN_MEMCMP:
    2497       465745 :     case BUILT_IN_MEMCMP_EQ:
    2498       465745 :       return simplify_builtin_memcmp (gsi_p, as_a<gcall*>(stmt2));
    2499        14313 :     case BUILT_IN_MEMCHR:
    2500        14313 :       return simplify_builtin_memchr (gsi_p, as_a<gcall*>(stmt2));
    2501              : 
    2502       109249 :     case BUILT_IN_MEMSET:
    2503       109249 :       if (gimple_call_num_args (stmt2) == 3)
    2504              :         {
    2505              :           /* Try to prop the zeroing/value of the memset to memcpy
    2506              :              if the dest is an address and the value is a constant. */
    2507       109249 :           optimize_aggr_zeroprop (stmt2, full_walk);
    2508              :         }
    2509       109249 :       return simplify_builtin_memcpy_memset (gsi_p, as_a<gcall*>(stmt2));
    2510              : 
    2511              :  #define CASE_ATOMIC(NAME, OTHER, OP) \
    2512              :     case BUILT_IN_##NAME##_1:                                           \
    2513              :     case BUILT_IN_##NAME##_2:                                           \
    2514              :     case BUILT_IN_##NAME##_4:                                           \
    2515              :     case BUILT_IN_##NAME##_8:                                           \
    2516              :     case BUILT_IN_##NAME##_16:                                          \
    2517              :       atomic_op = OP;                                                   \
    2518              :       other_atomic                                                      \
    2519              :         = (enum built_in_function) (BUILT_IN_##OTHER##_1                \
    2520              :                                     + (DECL_FUNCTION_CODE (callee2)     \
    2521              :                                        - BUILT_IN_##NAME##_1));         \
    2522              :       goto handle_atomic_fetch_op;
    2523              : 
    2524        48539 :     CASE_ATOMIC (ATOMIC_FETCH_ADD, ATOMIC_ADD_FETCH, PLUS_EXPR)
    2525         7125 :     CASE_ATOMIC (ATOMIC_FETCH_SUB, ATOMIC_SUB_FETCH, MINUS_EXPR)
    2526         2876 :     CASE_ATOMIC (ATOMIC_FETCH_AND, ATOMIC_AND_FETCH, BIT_AND_EXPR)
    2527         2895 :     CASE_ATOMIC (ATOMIC_FETCH_XOR, ATOMIC_XOR_FETCH, BIT_XOR_EXPR)
    2528         3823 :     CASE_ATOMIC (ATOMIC_FETCH_OR, ATOMIC_OR_FETCH, BIT_IOR_EXPR)
    2529              : 
    2530         2365 :     CASE_ATOMIC (SYNC_FETCH_AND_ADD, SYNC_ADD_AND_FETCH, PLUS_EXPR)
    2531         2004 :     CASE_ATOMIC (SYNC_FETCH_AND_SUB, SYNC_SUB_AND_FETCH, MINUS_EXPR)
    2532         1876 :     CASE_ATOMIC (SYNC_FETCH_AND_AND, SYNC_AND_AND_FETCH, BIT_AND_EXPR)
    2533         2144 :     CASE_ATOMIC (SYNC_FETCH_AND_XOR, SYNC_XOR_AND_FETCH, BIT_XOR_EXPR)
    2534         1987 :     CASE_ATOMIC (SYNC_FETCH_AND_OR, SYNC_OR_AND_FETCH, BIT_IOR_EXPR)
    2535              : 
    2536        14351 :     CASE_ATOMIC (ATOMIC_ADD_FETCH, ATOMIC_FETCH_ADD, MINUS_EXPR)
    2537         8528 :     CASE_ATOMIC (ATOMIC_SUB_FETCH, ATOMIC_FETCH_SUB, PLUS_EXPR)
    2538         2380 :     CASE_ATOMIC (ATOMIC_XOR_FETCH, ATOMIC_FETCH_XOR, BIT_XOR_EXPR)
    2539              : 
    2540          846 :     CASE_ATOMIC (SYNC_ADD_AND_FETCH, SYNC_FETCH_AND_ADD, MINUS_EXPR)
    2541          732 :     CASE_ATOMIC (SYNC_SUB_AND_FETCH, SYNC_FETCH_AND_SUB, PLUS_EXPR)
    2542          800 :     CASE_ATOMIC (SYNC_XOR_AND_FETCH, SYNC_FETCH_AND_XOR, BIT_XOR_EXPR)
    2543              : 
    2544              : #undef CASE_ATOMIC
    2545              : 
    2546       103271 :     handle_atomic_fetch_op:
    2547       103271 :       if (gimple_call_num_args (stmt2) >= 2 && gimple_call_lhs (stmt2))
    2548              :         {
    2549        59826 :           tree lhs2 = gimple_call_lhs (stmt2), lhsc = lhs2;
    2550        59826 :           tree arg = gimple_call_arg (stmt2, 1);
    2551        59826 :           gimple *use_stmt, *cast_stmt = NULL;
    2552        59826 :           use_operand_p use_p;
    2553        59826 :           tree ndecl = builtin_decl_explicit (other_atomic);
    2554              : 
    2555        59826 :           if (ndecl == NULL_TREE || !single_imm_use (lhs2, &use_p, &use_stmt))
    2556              :             break;
    2557              : 
    2558        58697 :           if (gimple_assign_cast_p (use_stmt))
    2559              :             {
    2560        31212 :               cast_stmt = use_stmt;
    2561        31212 :               lhsc = gimple_assign_lhs (cast_stmt);
    2562        31212 :               if (lhsc == NULL_TREE
    2563        31212 :                   || !INTEGRAL_TYPE_P (TREE_TYPE (lhsc))
    2564        30661 :                   || (TYPE_PRECISION (TREE_TYPE (lhsc))
    2565        30661 :                       != TYPE_PRECISION (TREE_TYPE (lhs2)))
    2566        60347 :                   || !single_imm_use (lhsc, &use_p, &use_stmt))
    2567              :                 {
    2568         2605 :                   use_stmt = cast_stmt;
    2569         2605 :                   cast_stmt = NULL;
    2570         2605 :                   lhsc = lhs2;
    2571              :                 }
    2572              :             }
    2573              : 
    2574        58697 :           bool ok = false;
    2575        58697 :           tree oarg = NULL_TREE;
    2576        58697 :           enum tree_code ccode = ERROR_MARK;
    2577        58697 :           tree crhs1 = NULL_TREE, crhs2 = NULL_TREE;
    2578        58697 :           if (is_gimple_assign (use_stmt)
    2579        58697 :               && gimple_assign_rhs_code (use_stmt) == atomic_op)
    2580              :             {
    2581         1416 :               if (gimple_assign_rhs1 (use_stmt) == lhsc)
    2582         1016 :                 oarg = gimple_assign_rhs2 (use_stmt);
    2583          400 :               else if (atomic_op != MINUS_EXPR)
    2584              :                 oarg = gimple_assign_rhs1 (use_stmt);
    2585              :             }
    2586        57281 :           else if (atomic_op == MINUS_EXPR
    2587        13188 :                    && is_gimple_assign (use_stmt)
    2588         3612 :                    && gimple_assign_rhs_code (use_stmt) == PLUS_EXPR
    2589          199 :                    && TREE_CODE (arg) == INTEGER_CST
    2590        57480 :                    && (TREE_CODE (gimple_assign_rhs2 (use_stmt))
    2591              :                        == INTEGER_CST))
    2592              :             {
    2593          183 :               tree a = fold_convert (TREE_TYPE (lhs2), arg);
    2594          183 :               tree o = fold_convert (TREE_TYPE (lhs2),
    2595              :                                      gimple_assign_rhs2 (use_stmt));
    2596          183 :               if (wi::to_wide (a) == wi::neg (wi::to_wide (o)))
    2597              :                 ok = true;
    2598              :             }
    2599        57098 :           else if (atomic_op == BIT_AND_EXPR || atomic_op == BIT_IOR_EXPR)
    2600              :             ;
    2601        51856 :           else if (gimple_code (use_stmt) == GIMPLE_COND)
    2602              :             {
    2603        19457 :               ccode = gimple_cond_code (use_stmt);
    2604        19457 :               crhs1 = gimple_cond_lhs (use_stmt);
    2605        19457 :               crhs2 = gimple_cond_rhs (use_stmt);
    2606              :             }
    2607        32399 :           else if (is_gimple_assign (use_stmt))
    2608              :             {
    2609         9525 :               if (gimple_assign_rhs_class (use_stmt) == GIMPLE_BINARY_RHS)
    2610              :                 {
    2611         3941 :                   ccode = gimple_assign_rhs_code (use_stmt);
    2612         3941 :                   crhs1 = gimple_assign_rhs1 (use_stmt);
    2613         3941 :                   crhs2 = gimple_assign_rhs2 (use_stmt);
    2614              :                 }
    2615         5584 :               else if (gimple_assign_rhs_code (use_stmt) == COND_EXPR)
    2616              :                 {
    2617            0 :                   tree cond = gimple_assign_rhs1 (use_stmt);
    2618            0 :                   if (COMPARISON_CLASS_P (cond))
    2619              :                     {
    2620            0 :                       ccode = TREE_CODE (cond);
    2621            0 :                       crhs1 = TREE_OPERAND (cond, 0);
    2622            0 :                       crhs2 = TREE_OPERAND (cond, 1);
    2623              :                     }
    2624              :                 }
    2625              :             }
    2626        24414 :           if (ccode == EQ_EXPR || ccode == NE_EXPR)
    2627              :             {
    2628              :               /* Deal with x - y == 0 or x ^ y == 0
    2629              :                  being optimized into x == y and x + cst == 0
    2630              :                  into x == -cst.  */
    2631        22214 :               tree o = NULL_TREE;
    2632        22214 :               if (crhs1 == lhsc)
    2633              :                 o = crhs2;
    2634          133 :               else if (crhs2 == lhsc)
    2635          133 :                 o = crhs1;
    2636        22214 :               if (o && atomic_op != PLUS_EXPR)
    2637              :                 oarg = o;
    2638        10057 :               else if (o
    2639        10057 :                        && TREE_CODE (o) == INTEGER_CST
    2640        10057 :                        && TREE_CODE (arg) == INTEGER_CST)
    2641              :                 {
    2642         9347 :                   tree a = fold_convert (TREE_TYPE (lhs2), arg);
    2643         9347 :                   o = fold_convert (TREE_TYPE (lhs2), o);
    2644         9347 :                   if (wi::to_wide (a) == wi::neg (wi::to_wide (o)))
    2645        58697 :                     ok = true;
    2646              :                 }
    2647              :             }
    2648        58697 :           if (oarg && !ok)
    2649              :             {
    2650        13573 :               if (operand_equal_p (arg, oarg, 0))
    2651              :                 ok = true;
    2652        12244 :               else if (TREE_CODE (arg) == SSA_NAME
    2653         2203 :                        && TREE_CODE (oarg) == SSA_NAME)
    2654              :                 {
    2655          745 :                   tree oarg2 = oarg;
    2656          745 :                   if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (oarg)))
    2657              :                     {
    2658          104 :                       gimple *g = SSA_NAME_DEF_STMT (oarg);
    2659          104 :                       oarg2 = gimple_assign_rhs1 (g);
    2660          104 :                       if (TREE_CODE (oarg2) != SSA_NAME
    2661          104 :                           || !INTEGRAL_TYPE_P (TREE_TYPE (oarg2))
    2662          208 :                           || (TYPE_PRECISION (TREE_TYPE (oarg2))
    2663          104 :                               != TYPE_PRECISION (TREE_TYPE (oarg))))
    2664              :                         oarg2 = oarg;
    2665              :                     }
    2666          745 :                   if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg)))
    2667              :                     {
    2668          544 :                       gimple *g = SSA_NAME_DEF_STMT (arg);
    2669          544 :                       tree rhs1 = gimple_assign_rhs1 (g);
    2670              :                       /* Handle e.g.
    2671              :                          x.0_1 = (long unsigned int) x_4(D);
    2672              :                          _2 = __atomic_fetch_add_8 (&vlong, x.0_1, 0);
    2673              :                          _3 = (long int) _2;
    2674              :                          _7 = x_4(D) + _3;  */
    2675          544 :                       if (rhs1 == oarg || rhs1 == oarg2)
    2676              :                         ok = true;
    2677              :                       /* Handle e.g.
    2678              :                          x.18_1 = (short unsigned int) x_5(D);
    2679              :                          _2 = (int) x.18_1;
    2680              :                          _3 = __atomic_fetch_xor_2 (&vshort, _2, 0);
    2681              :                          _4 = (short int) _3;
    2682              :                          _8 = x_5(D) ^ _4;
    2683              :                          This happens only for char/short.  */
    2684          160 :                       else if (TREE_CODE (rhs1) == SSA_NAME
    2685          160 :                                && INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
    2686          320 :                                && (TYPE_PRECISION (TREE_TYPE (rhs1))
    2687          160 :                                    == TYPE_PRECISION (TREE_TYPE (lhs2))))
    2688              :                         {
    2689          160 :                           g = SSA_NAME_DEF_STMT (rhs1);
    2690          160 :                           if (gimple_assign_cast_p (g)
    2691          160 :                               && (gimple_assign_rhs1 (g) == oarg
    2692            0 :                                   || gimple_assign_rhs1 (g) == oarg2))
    2693              :                             ok = true;
    2694              :                         }
    2695              :                     }
    2696          745 :                   if (!ok && arg == oarg2)
    2697              :                     /* Handle e.g.
    2698              :                        _1 = __sync_fetch_and_add_4 (&v, x_5(D));
    2699              :                        _2 = (int) _1;
    2700              :                        x.0_3 = (int) x_5(D);
    2701              :                        _7 = _2 + x.0_3;  */
    2702              :                     ok = true;
    2703              :                 }
    2704              :             }
    2705              : 
    2706        57368 :           if (ok)
    2707              :             {
    2708         2546 :               tree new_lhs = make_ssa_name (TREE_TYPE (lhs2));
    2709         2546 :               gimple_call_set_lhs (stmt2, new_lhs);
    2710         2546 :               gimple_call_set_fndecl (stmt2, ndecl);
    2711         2546 :               gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
    2712         2546 :               if (ccode == ERROR_MARK)
    2713         2000 :                 gimple_assign_set_rhs_with_ops (&gsi, cast_stmt
    2714              :                                                 ? NOP_EXPR : SSA_NAME,
    2715              :                                                 new_lhs);
    2716              :               else
    2717              :                 {
    2718         1323 :                   crhs1 = new_lhs;
    2719         1323 :                   crhs2 = build_zero_cst (TREE_TYPE (lhs2));
    2720         1323 :                   if (gimple_code (use_stmt) == GIMPLE_COND)
    2721              :                     {
    2722          984 :                       gcond *cond_stmt = as_a <gcond *> (use_stmt);
    2723          984 :                       gimple_cond_set_lhs (cond_stmt, crhs1);
    2724          984 :                       gimple_cond_set_rhs (cond_stmt, crhs2);
    2725              :                     }
    2726          339 :                   else if (gimple_assign_rhs_class (use_stmt)
    2727              :                            == GIMPLE_BINARY_RHS)
    2728              :                     {
    2729          339 :                       gimple_assign_set_rhs1 (use_stmt, crhs1);
    2730          339 :                       gimple_assign_set_rhs2 (use_stmt, crhs2);
    2731              :                     }
    2732              :                   else
    2733              :                     {
    2734            0 :                       gcc_checking_assert (gimple_assign_rhs_code (use_stmt)
    2735              :                                            == COND_EXPR);
    2736            0 :                       tree cond = build2 (ccode, boolean_type_node,
    2737              :                                           crhs1, crhs2);
    2738            0 :                       gimple_assign_set_rhs1 (use_stmt, cond);
    2739              :                     }
    2740              :                 }
    2741         2546 :               update_stmt (use_stmt);
    2742         2546 :               if (atomic_op != BIT_AND_EXPR
    2743         2546 :                   && atomic_op != BIT_IOR_EXPR
    2744         2546 :                   && !stmt_ends_bb_p (stmt2))
    2745              :                 {
    2746              :                   /* For the benefit of debug stmts, emit stmt(s) to set
    2747              :                      lhs2 to the value it had from the new builtin.
    2748              :                      E.g. if it was previously:
    2749              :                      lhs2 = __atomic_fetch_add_8 (ptr, arg, 0);
    2750              :                      emit:
    2751              :                      new_lhs = __atomic_add_fetch_8 (ptr, arg, 0);
    2752              :                      lhs2 = new_lhs - arg;
    2753              :                      We also keep cast_stmt if any in the IL for
    2754              :                      the same reasons.
    2755              :                      These stmts will be DCEd later and proper debug info
    2756              :                      will be emitted.
    2757              :                      This is only possible for reversible operations
    2758              :                      (+/-/^) and without -fnon-call-exceptions.  */
    2759         2205 :                   gsi = gsi_for_stmt (stmt2);
    2760         2205 :                   tree type = TREE_TYPE (lhs2);
    2761         2205 :                   if (TREE_CODE (arg) == INTEGER_CST)
    2762         1623 :                     arg = fold_convert (type, arg);
    2763          582 :                   else if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
    2764              :                     {
    2765            0 :                       tree narg = make_ssa_name (type);
    2766            0 :                       gimple *g = gimple_build_assign (narg, NOP_EXPR, arg);
    2767            0 :                       gsi_insert_after (&gsi, g, GSI_NEW_STMT);
    2768            0 :                       arg = narg;
    2769              :                     }
    2770         2205 :                   enum tree_code rcode;
    2771         2205 :                   switch (atomic_op)
    2772              :                     {
    2773              :                     case PLUS_EXPR: rcode = MINUS_EXPR; break;
    2774          727 :                     case MINUS_EXPR: rcode = PLUS_EXPR; break;
    2775          492 :                     case BIT_XOR_EXPR: rcode = atomic_op; break;
    2776            0 :                     default: gcc_unreachable ();
    2777              :                     }
    2778         2205 :                   gimple *g = gimple_build_assign (lhs2, rcode, new_lhs, arg);
    2779         2205 :                   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
    2780         2205 :                   update_stmt (stmt2);
    2781              :                 }
    2782              :               else
    2783              :                 {
    2784              :                   /* For e.g.
    2785              :                      lhs2 = __atomic_fetch_or_8 (ptr, arg, 0);
    2786              :                      after we change it to
    2787              :                      new_lhs = __atomic_or_fetch_8 (ptr, arg, 0);
    2788              :                      there is no way to find out the lhs2 value (i.e.
    2789              :                      what the atomic memory contained before the operation),
    2790              :                      values of some bits are lost.  We have checked earlier
    2791              :                      that we don't have any non-debug users except for what
    2792              :                      we are already changing, so we need to reset the
    2793              :                      debug stmts and remove the cast_stmt if any.  */
    2794          341 :                   imm_use_iterator iter;
    2795          676 :                   FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs2)
    2796          335 :                     if (use_stmt != cast_stmt)
    2797              :                       {
    2798          168 :                         gcc_assert (is_gimple_debug (use_stmt));
    2799          168 :                         gimple_debug_bind_reset_value (use_stmt);
    2800          168 :                         update_stmt (use_stmt);
    2801          341 :                       }
    2802          341 :                   if (cast_stmt)
    2803              :                     {
    2804          167 :                       gsi = gsi_for_stmt (cast_stmt);
    2805          167 :                       gsi_remove (&gsi, true);
    2806              :                     }
    2807          341 :                   update_stmt (stmt2);
    2808          341 :                   release_ssa_name (lhs2);
    2809              :                 }
    2810              :             }
    2811              :         }
    2812              :       break;
    2813              : 
    2814              :     default:
    2815              :       break;
    2816              :     }
    2817              :   return false;
    2818              : }
    2819              : 
    2820              : /* Given a ssa_name in NAME see if it was defined by an assignment and
    2821              :    set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
    2822              :    to the second operand on the rhs. */
    2823              : 
    2824              : static inline void
    2825     17206921 : defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
    2826              : {
    2827     17206921 :   gimple *def;
    2828     17206921 :   enum tree_code code1;
    2829     17206921 :   tree arg11;
    2830     17206921 :   tree arg21;
    2831     17206921 :   tree arg31;
    2832     17206921 :   enum gimple_rhs_class grhs_class;
    2833              : 
    2834     17206921 :   code1 = TREE_CODE (name);
    2835     17206921 :   arg11 = name;
    2836     17206921 :   arg21 = NULL_TREE;
    2837     17206921 :   arg31 = NULL_TREE;
    2838     17206921 :   grhs_class = get_gimple_rhs_class (code1);
    2839              : 
    2840     17206921 :   if (code1 == SSA_NAME)
    2841              :     {
    2842     11464137 :       def = SSA_NAME_DEF_STMT (name);
    2843              : 
    2844     11464137 :       if (def && is_gimple_assign (def)
    2845     18565833 :           && can_propagate_from (def))
    2846              :         {
    2847      4889147 :           code1 = gimple_assign_rhs_code (def);
    2848      4889147 :           arg11 = gimple_assign_rhs1 (def);
    2849      4889147 :           arg21 = gimple_assign_rhs2 (def);
    2850      4889147 :           arg31 = gimple_assign_rhs3 (def);
    2851              :         }
    2852              :     }
    2853      5742784 :   else if (grhs_class != GIMPLE_SINGLE_RHS)
    2854            0 :     code1 = ERROR_MARK;
    2855              : 
    2856     17206921 :   *code = code1;
    2857     17206921 :   *arg1 = arg11;
    2858     17206921 :   if (arg2)
    2859     17189685 :     *arg2 = arg21;
    2860     17206921 :   if (arg31)
    2861         2211 :     *code = ERROR_MARK;
    2862     17206921 : }
    2863              : 
    2864              : 
    2865              : /* Recognize rotation patterns.  Return true if a transformation
    2866              :    applied, otherwise return false.
    2867              : 
    2868              :    We are looking for X with unsigned type T with bitsize B, OP being
    2869              :    +, | or ^, some type T2 wider than T.  For:
    2870              :    (X << CNT1) OP (X >> CNT2)                               iff CNT1 + CNT2 == B
    2871              :    ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
    2872              : 
    2873              :    transform these into:
    2874              :    X r<< CNT1
    2875              : 
    2876              :    Or for:
    2877              :    (X << Y) OP (X >> (B - Y))
    2878              :    (X << (int) Y) OP (X >> (int) (B - Y))
    2879              :    ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
    2880              :    ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
    2881              :    (X << Y) | (X >> ((-Y) & (B - 1)))
    2882              :    (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
    2883              :    ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
    2884              :    ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
    2885              : 
    2886              :    transform these into (last 2 only if ranger can prove Y < B
    2887              :    or Y = N * B):
    2888              :    X r<< Y
    2889              :    or
    2890              :    X r<< (& & (B - 1))
    2891              :    The latter for the forms with T2 wider than T if ranger can't prove Y < B.
    2892              : 
    2893              :    Or for:
    2894              :    (X << (Y & (B - 1))) | (X >> ((-Y) & (B - 1)))
    2895              :    (X << (int) (Y & (B - 1))) | (X >> (int) ((-Y) & (B - 1)))
    2896              :    ((T) ((T2) X << (Y & (B - 1)))) | ((T) ((T2) X >> ((-Y) & (B - 1))))
    2897              :    ((T) ((T2) X << (int) (Y & (B - 1)))) \
    2898              :      | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
    2899              : 
    2900              :    transform these into:
    2901              :    X r<< (Y & (B - 1))
    2902              : 
    2903              :    Note, in the patterns with T2 type, the type of OP operands
    2904              :    might be even a signed type, but should have precision B.
    2905              :    Expressions with & (B - 1) should be recognized only if B is
    2906              :    a power of 2.  */
    2907              : 
    2908              : static bool
    2909     10110852 : simplify_rotate (gimple_stmt_iterator *gsi)
    2910              : {
    2911     10110852 :   gimple *stmt = gsi_stmt (*gsi);
    2912     10110852 :   tree arg[2], rtype, rotcnt = NULL_TREE;
    2913     10110852 :   tree def_arg1[2], def_arg2[2];
    2914     10110852 :   enum tree_code def_code[2];
    2915     10110852 :   tree lhs;
    2916     10110852 :   int i;
    2917     10110852 :   bool swapped_p = false;
    2918     10110852 :   gimple *g;
    2919     10110852 :   gimple *def_arg_stmt[2] = { NULL, NULL };
    2920     10110852 :   int wider_prec = 0;
    2921     10110852 :   bool add_masking = false;
    2922              : 
    2923     10110852 :   arg[0] = gimple_assign_rhs1 (stmt);
    2924     10110852 :   arg[1] = gimple_assign_rhs2 (stmt);
    2925     10110852 :   rtype = TREE_TYPE (arg[0]);
    2926              : 
    2927              :   /* Only create rotates in complete modes.  Other cases are not
    2928              :      expanded properly.  */
    2929     10110852 :   if (!INTEGRAL_TYPE_P (rtype)
    2930     10110852 :       || !type_has_mode_precision_p (rtype))
    2931      1557182 :     return false;
    2932              : 
    2933     25661010 :   for (i = 0; i < 2; i++)
    2934              :     {
    2935     17107340 :       defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
    2936     17107340 :       if (TREE_CODE (arg[i]) == SSA_NAME)
    2937     11364556 :         def_arg_stmt[i] = SSA_NAME_DEF_STMT (arg[i]);
    2938              :     }
    2939              : 
    2940              :   /* Look through narrowing (or same precision) conversions.  */
    2941      7603726 :   if (CONVERT_EXPR_CODE_P (def_code[0])
    2942       949944 :       && CONVERT_EXPR_CODE_P (def_code[1])
    2943       139619 :       && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
    2944       116495 :       && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
    2945       109523 :       && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
    2946       109523 :          == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
    2947        63835 :       && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) >= TYPE_PRECISION (rtype)
    2948        44201 :       && has_single_use (arg[0])
    2949      8586543 :       && has_single_use (arg[1]))
    2950              :     {
    2951        28557 :       wider_prec = TYPE_PRECISION (TREE_TYPE (def_arg1[0]));
    2952        85671 :       for (i = 0; i < 2; i++)
    2953              :         {
    2954        57114 :           arg[i] = def_arg1[i];
    2955        57114 :           defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
    2956        57114 :           if (TREE_CODE (arg[i]) == SSA_NAME)
    2957        57114 :             def_arg_stmt[i] = SSA_NAME_DEF_STMT (arg[i]);
    2958              :         }
    2959              :     }
    2960              :   else
    2961              :     {
    2962              :       /* Handle signed rotate; the RSHIFT_EXPR has to be done
    2963              :          in unsigned type but LSHIFT_EXPR could be signed.  */
    2964      8525113 :       i = (def_code[0] == LSHIFT_EXPR || def_code[0] == RSHIFT_EXPR);
    2965      7586904 :       if (CONVERT_EXPR_CODE_P (def_code[i])
    2966       938209 :           && (def_code[1 - i] == LSHIFT_EXPR || def_code[1 - i] == RSHIFT_EXPR)
    2967        28142 :           && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[i]))
    2968        27058 :           && TYPE_PRECISION (rtype) == TYPE_PRECISION (TREE_TYPE (def_arg1[i]))
    2969      8528463 :           && has_single_use (arg[i]))
    2970              :         {
    2971         1995 :           arg[i] = def_arg1[i];
    2972         1995 :           defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
    2973         1995 :           if (TREE_CODE (arg[i]) == SSA_NAME)
    2974         1995 :             def_arg_stmt[i] = SSA_NAME_DEF_STMT (arg[i]);
    2975              :         }
    2976              :     }
    2977              : 
    2978              :   /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR.  */
    2979      8751170 :   for (i = 0; i < 2; i++)
    2980      8726579 :     if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
    2981              :       return false;
    2982       237416 :     else if (!has_single_use (arg[i]))
    2983              :       return false;
    2984        24591 :   if (def_code[0] == def_code[1])
    2985              :     return false;
    2986              : 
    2987              :   /* If we've looked through narrowing conversions before, look through
    2988              :      widening conversions from unsigned type with the same precision
    2989              :      as rtype here.  */
    2990        20287 :   if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
    2991        19348 :     for (i = 0; i < 2; i++)
    2992              :       {
    2993        12900 :         tree tem;
    2994        12900 :         enum tree_code code;
    2995        12900 :         defcodefor_name (def_arg1[i], &code, &tem, NULL);
    2996            4 :         if (!CONVERT_EXPR_CODE_P (code)
    2997        12896 :             || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
    2998        25796 :             || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
    2999            4 :           return false;
    3000        12896 :         def_arg1[i] = tem;
    3001              :       }
    3002              :   /* Both shifts have to use the same first operand.  */
    3003        20283 :   if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
    3004        32212 :       || !types_compatible_p (TREE_TYPE (def_arg1[0]),
    3005        11929 :                               TREE_TYPE (def_arg1[1])))
    3006              :     {
    3007         8354 :       if ((TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
    3008         8354 :            != TYPE_PRECISION (TREE_TYPE (def_arg1[1])))
    3009         8354 :           || (TYPE_UNSIGNED (TREE_TYPE (def_arg1[0]))
    3010         8354 :               == TYPE_UNSIGNED (TREE_TYPE (def_arg1[1]))))
    3011         8330 :         return false;
    3012              : 
    3013              :       /* Handle signed rotate; the RSHIFT_EXPR has to be done
    3014              :          in unsigned type but LSHIFT_EXPR could be signed.  */
    3015          540 :       i = def_code[0] != RSHIFT_EXPR;
    3016          540 :       if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[i])))
    3017              :         return false;
    3018              : 
    3019          507 :       tree tem;
    3020          507 :       enum tree_code code;
    3021          507 :       defcodefor_name (def_arg1[i], &code, &tem, NULL);
    3022          304 :       if (!CONVERT_EXPR_CODE_P (code)
    3023          203 :           || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
    3024          710 :           || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
    3025              :         return false;
    3026          194 :       def_arg1[i] = tem;
    3027          194 :       if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
    3028          218 :           || !types_compatible_p (TREE_TYPE (def_arg1[0]),
    3029           24 :                                   TREE_TYPE (def_arg1[1])))
    3030          170 :         return false;
    3031              :     }
    3032        11929 :   else if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
    3033              :     return false;
    3034              : 
    3035              :   /* CNT1 + CNT2 == B case above.  */
    3036        10698 :   if (tree_fits_uhwi_p (def_arg2[0])
    3037         1210 :       && tree_fits_uhwi_p (def_arg2[1])
    3038        10698 :       && tree_to_uhwi (def_arg2[0])
    3039         1210 :          + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
    3040              :     rotcnt = def_arg2[0];
    3041         9768 :   else if (TREE_CODE (def_arg2[0]) != SSA_NAME
    3042         9488 :            || TREE_CODE (def_arg2[1]) != SSA_NAME)
    3043              :     return false;
    3044              :   else
    3045              :     {
    3046         9488 :       tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
    3047         9488 :       enum tree_code cdef_code[2];
    3048         9488 :       gimple *def_arg_alt_stmt[2] = { NULL, NULL };
    3049         9488 :       int check_range = 0;
    3050         9488 :       gimple *check_range_stmt = NULL;
    3051              :       /* Look through conversion of the shift count argument.
    3052              :          The C/C++ FE cast any shift count argument to integer_type_node.
    3053              :          The only problem might be if the shift count type maximum value
    3054              :          is equal or smaller than number of bits in rtype.  */
    3055        28464 :       for (i = 0; i < 2; i++)
    3056              :         {
    3057        18976 :           def_arg2_alt[i] = def_arg2[i];
    3058        18976 :           defcodefor_name (def_arg2[i], &cdef_code[i],
    3059              :                            &cdef_arg1[i], &cdef_arg2[i]);
    3060        14716 :           if (CONVERT_EXPR_CODE_P (cdef_code[i])
    3061         4260 :               && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
    3062         4260 :               && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
    3063         8520 :                  > floor_log2 (TYPE_PRECISION (rtype))
    3064        23236 :               && type_has_mode_precision_p (TREE_TYPE (cdef_arg1[i])))
    3065              :             {
    3066         4260 :               def_arg2_alt[i] = cdef_arg1[i];
    3067         4260 :               if (TREE_CODE (def_arg2[i]) == SSA_NAME)
    3068         4260 :                 def_arg_alt_stmt[i] = SSA_NAME_DEF_STMT (def_arg2[i]);
    3069         4260 :               defcodefor_name (def_arg2_alt[i], &cdef_code[i],
    3070              :                                &cdef_arg1[i], &cdef_arg2[i]);
    3071              :             }
    3072              :           else
    3073        14716 :             def_arg_alt_stmt[i] = def_arg_stmt[i];
    3074              :         }
    3075        25796 :       for (i = 0; i < 2; i++)
    3076              :         /* Check for one shift count being Y and the other B - Y,
    3077              :            with optional casts.  */
    3078        18625 :         if (cdef_code[i] == MINUS_EXPR
    3079          862 :             && tree_fits_shwi_p (cdef_arg1[i])
    3080          862 :             && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
    3081        19447 :             && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
    3082              :           {
    3083          822 :             tree tem;
    3084          822 :             enum tree_code code;
    3085              : 
    3086          822 :             if (cdef_arg2[i] == def_arg2[1 - i]
    3087          472 :                 || cdef_arg2[i] == def_arg2_alt[1 - i])
    3088              :               {
    3089          350 :                 rotcnt = cdef_arg2[i];
    3090          350 :                 check_range = -1;
    3091          350 :                 if (cdef_arg2[i] == def_arg2[1 - i])
    3092          350 :                   check_range_stmt = def_arg_stmt[1 - i];
    3093              :                 else
    3094            0 :                   check_range_stmt = def_arg_alt_stmt[1 - i];
    3095          806 :                 break;
    3096              :               }
    3097          472 :             defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
    3098           16 :             if (CONVERT_EXPR_CODE_P (code)
    3099          456 :                 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
    3100          456 :                 && TYPE_PRECISION (TREE_TYPE (tem))
    3101          912 :                    > floor_log2 (TYPE_PRECISION (rtype))
    3102          456 :                 && type_has_mode_precision_p (TREE_TYPE (tem))
    3103          928 :                 && (tem == def_arg2[1 - i]
    3104          288 :                     || tem == def_arg2_alt[1 - i]))
    3105              :               {
    3106          456 :                 rotcnt = tem;
    3107          456 :                 check_range = -1;
    3108          456 :                 if (tem == def_arg2[1 - i])
    3109          168 :                   check_range_stmt = def_arg_stmt[1 - i];
    3110              :                 else
    3111          288 :                   check_range_stmt = def_arg_alt_stmt[1 - i];
    3112              :                 break;
    3113              :               }
    3114              :           }
    3115              :         /* The above sequence isn't safe for Y being 0,
    3116              :            because then one of the shifts triggers undefined behavior.
    3117              :            This alternative is safe even for rotation count of 0.
    3118              :            One shift count is Y and the other (-Y) & (B - 1).
    3119              :            Or one shift count is Y & (B - 1) and the other (-Y) & (B - 1).  */
    3120        17803 :         else if (cdef_code[i] == BIT_AND_EXPR
    3121        28724 :                  && pow2p_hwi (TYPE_PRECISION (rtype))
    3122        12416 :                  && tree_fits_shwi_p (cdef_arg2[i])
    3123        24832 :                  && tree_to_shwi (cdef_arg2[i])
    3124        12416 :                     == TYPE_PRECISION (rtype) - 1
    3125        12356 :                  && TREE_CODE (cdef_arg1[i]) == SSA_NAME
    3126        30159 :                  && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
    3127              :           {
    3128         2296 :             tree tem;
    3129         2296 :             enum tree_code code;
    3130              : 
    3131         2296 :             defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
    3132         2099 :             if (CONVERT_EXPR_CODE_P (code)
    3133          197 :                 && INTEGRAL_TYPE_P (TREE_TYPE (tem))
    3134          197 :                 && TYPE_PRECISION (TREE_TYPE (tem))
    3135          394 :                    > floor_log2 (TYPE_PRECISION (rtype))
    3136         2493 :                 && type_has_mode_precision_p (TREE_TYPE (tem)))
    3137          197 :               defcodefor_name (tem, &code, &tem, NULL);
    3138              : 
    3139         2296 :             if (code == NEGATE_EXPR)
    3140              :               {
    3141         1525 :                 if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
    3142              :                   {
    3143          854 :                     rotcnt = tem;
    3144          854 :                     check_range = 1;
    3145          854 :                     if (tem == def_arg2[1 - i])
    3146          846 :                       check_range_stmt = def_arg_stmt[1 - i];
    3147              :                     else
    3148            8 :                       check_range_stmt = def_arg_alt_stmt[1 - i];
    3149         1511 :                     break;
    3150              :                   }
    3151          671 :                 tree tem2;
    3152          671 :                 defcodefor_name (tem, &code, &tem2, NULL);
    3153          237 :                 if (CONVERT_EXPR_CODE_P (code)
    3154          434 :                     && INTEGRAL_TYPE_P (TREE_TYPE (tem2))
    3155          434 :                     && TYPE_PRECISION (TREE_TYPE (tem2))
    3156          868 :                        > floor_log2 (TYPE_PRECISION (rtype))
    3157         1105 :                     && type_has_mode_precision_p (TREE_TYPE (tem2)))
    3158              :                   {
    3159          434 :                     if (tem2 == def_arg2[1 - i]
    3160          434 :                         || tem2 == def_arg2_alt[1 - i])
    3161              :                       {
    3162          228 :                         rotcnt = tem2;
    3163          228 :                         check_range = 1;
    3164          228 :                         if (tem2 == def_arg2[1 - i])
    3165            0 :                           check_range_stmt = def_arg_stmt[1 - i];
    3166              :                         else
    3167          228 :                           check_range_stmt = def_arg_alt_stmt[1 - i];
    3168              :                         break;
    3169              :                       }
    3170              :                   }
    3171              :                 else
    3172          237 :                   tem2 = NULL_TREE;
    3173              : 
    3174          443 :                 if (cdef_code[1 - i] == BIT_AND_EXPR
    3175          430 :                     && tree_fits_shwi_p (cdef_arg2[1 - i])
    3176          860 :                     && tree_to_shwi (cdef_arg2[1 - i])
    3177          430 :                        == TYPE_PRECISION (rtype) - 1
    3178          873 :                     && TREE_CODE (cdef_arg1[1 - i]) == SSA_NAME)
    3179              :                   {
    3180          430 :                     if (tem == cdef_arg1[1 - i]
    3181          205 :                         || tem2 == cdef_arg1[1 - i])
    3182              :                       {
    3183              :                         rotcnt = def_arg2[1 - i];
    3184          429 :                         break;
    3185              :                       }
    3186          193 :                     tree tem3;
    3187          193 :                     defcodefor_name (cdef_arg1[1 - i], &code, &tem3, NULL);
    3188            0 :                     if (CONVERT_EXPR_CODE_P (code)
    3189          193 :                         && INTEGRAL_TYPE_P (TREE_TYPE (tem3))
    3190          193 :                         && TYPE_PRECISION (TREE_TYPE (tem3))
    3191          386 :                            > floor_log2 (TYPE_PRECISION (rtype))
    3192          386 :                         && type_has_mode_precision_p (TREE_TYPE (tem3)))
    3193              :                       {
    3194          193 :                         if (tem == tem3 || tem2 == tem3)
    3195              :                           {
    3196              :                             rotcnt = def_arg2[1 - i];
    3197              :                             break;
    3198              :                           }
    3199              :                       }
    3200              :                   }
    3201              :               }
    3202              :           }
    3203         2317 :       if (check_range && wider_prec > TYPE_PRECISION (rtype))
    3204              :         {
    3205         1533 :           if (TREE_CODE (rotcnt) != SSA_NAME)
    3206          573 :             return false;
    3207         1533 :           int_range_max r;
    3208         1533 :           range_query *q = get_range_query (cfun);
    3209         1533 :           if (q == get_global_range_query ())
    3210         1522 :             q = enable_ranger (cfun);
    3211         1533 :           if (!q->range_of_expr (r, rotcnt, check_range_stmt))
    3212              :             {
    3213            0 :               if (check_range > 0)
    3214              :                 return false;
    3215            0 :               r.set_varying (TREE_TYPE (rotcnt));
    3216              :             }
    3217         1533 :           int prec = TYPE_PRECISION (TREE_TYPE (rotcnt));
    3218         1533 :           signop sign = TYPE_SIGN (TREE_TYPE (rotcnt));
    3219         1533 :           wide_int min = wide_int::from (TYPE_PRECISION (rtype), prec, sign);
    3220         1533 :           wide_int max = wide_int::from (wider_prec - 1, prec, sign);
    3221         1533 :           if (check_range < 0)
    3222          616 :             max = min;
    3223         1533 :           int_range<1> r2 (TREE_TYPE (rotcnt), min, max);
    3224         1533 :           r.intersect (r2);
    3225         1533 :           if (!r.undefined_p ())
    3226              :             {
    3227         1181 :               if (check_range > 0)
    3228              :                 {
    3229          589 :                   int_range_max r3;
    3230         1844 :                   for (int i = TYPE_PRECISION (rtype) + 1; i < wider_prec;
    3231         1255 :                        i += TYPE_PRECISION (rtype))
    3232              :                     {
    3233         1255 :                       int j = i + TYPE_PRECISION (rtype) - 2;
    3234         1255 :                       min = wide_int::from (i, prec, sign);
    3235         1255 :                       max = wide_int::from (MIN (j, wider_prec - 1),
    3236         1255 :                                             prec, sign);
    3237         1255 :                       int_range<1> r4 (TREE_TYPE (rotcnt), min, max);
    3238         1255 :                       r3.union_ (r4);
    3239         1255 :                     }
    3240          589 :                   r.intersect (r3);
    3241          589 :                   if (!r.undefined_p ())
    3242          573 :                     return false;
    3243          589 :                 }
    3244              :               add_masking = true;
    3245              :             }
    3246         1533 :         }
    3247         8915 :       if (rotcnt == NULL_TREE)
    3248              :         return false;
    3249         1744 :       swapped_p = i != 1;
    3250              :     }
    3251              : 
    3252         2674 :   if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
    3253         2674 :                                   TREE_TYPE (rotcnt)))
    3254              :     {
    3255          496 :       g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
    3256              :                                NOP_EXPR, rotcnt);
    3257          496 :       gsi_insert_before (gsi, g, GSI_SAME_STMT);
    3258          496 :       rotcnt = gimple_assign_lhs (g);
    3259              :     }
    3260         2674 :   if (add_masking)
    3261              :     {
    3262          608 :       g = gimple_build_assign (make_ssa_name (TREE_TYPE (rotcnt)),
    3263              :                                BIT_AND_EXPR, rotcnt,
    3264          608 :                                build_int_cst (TREE_TYPE (rotcnt),
    3265          608 :                                               TYPE_PRECISION (rtype) - 1));
    3266          608 :       gsi_insert_before (gsi, g, GSI_SAME_STMT);
    3267          608 :       rotcnt = gimple_assign_lhs (g);
    3268              :     }
    3269         2674 :   lhs = gimple_assign_lhs (stmt);
    3270         2674 :   if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
    3271         1010 :     lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
    3272         2674 :   g = gimple_build_assign (lhs,
    3273         2674 :                            ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
    3274              :                            ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
    3275         2674 :   if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
    3276              :     {
    3277         1010 :       gsi_insert_before (gsi, g, GSI_SAME_STMT);
    3278         1010 :       g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
    3279              :     }
    3280         2674 :   gsi_replace (gsi, g, false);
    3281         2674 :   return true;
    3282              : }
    3283              : 
    3284              : 
    3285              : /* Check whether an array contains a valid table according to VALIDATE_FN.  */
    3286              : template<typename ValidateFn>
    3287              : static bool
    3288           14 : check_table_array (tree ctor, HOST_WIDE_INT &zero_val, unsigned bits,
    3289              :                   ValidateFn validate_fn)
    3290              : {
    3291              :   tree elt, idx;
    3292           14 :   unsigned HOST_WIDE_INT i, raw_idx = 0;
    3293           14 :   unsigned matched = 0;
    3294              : 
    3295           14 :   zero_val = 0;
    3296              : 
    3297          542 :   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), i, idx, elt)
    3298              :     {
    3299          542 :       if (!tree_fits_shwi_p (idx))
    3300              :         return false;
    3301          542 :       if (!tree_fits_shwi_p (elt) && TREE_CODE (elt) != RAW_DATA_CST)
    3302              :         return false;
    3303              : 
    3304          542 :       unsigned HOST_WIDE_INT index = tree_to_shwi (idx);
    3305              :       HOST_WIDE_INT val;
    3306              : 
    3307          542 :       if (TREE_CODE (elt) == INTEGER_CST)
    3308          478 :         val = tree_to_shwi (elt);
    3309              :       else
    3310              :         {
    3311           64 :           if (raw_idx == (unsigned) RAW_DATA_LENGTH (elt))
    3312              :             {
    3313            0 :               raw_idx = 0;
    3314            0 :               continue;
    3315              :             }
    3316           64 :           if (TYPE_UNSIGNED (TREE_TYPE (elt)))
    3317            0 :             val = RAW_DATA_UCHAR_ELT (elt, raw_idx);
    3318              :           else
    3319           64 :             val = RAW_DATA_SCHAR_ELT (elt, raw_idx);
    3320           64 :           index += raw_idx;
    3321           64 :           raw_idx++;
    3322           64 :           i--;
    3323              :         }
    3324              : 
    3325          542 :       if (index > bits * 2)
    3326              :         return false;
    3327              : 
    3328          542 :       if (index == 0)
    3329              :         {
    3330           14 :           zero_val = val;
    3331           14 :           matched++;
    3332              :         }
    3333              : 
    3334          542 :       if (val >= 0 && val < bits && validate_fn (val, index))
    3335          480 :         matched++;
    3336              : 
    3337          542 :       if (matched > bits)
    3338              :         return true;
    3339              :     }
    3340              : 
    3341              :   return false;
    3342              : }
    3343              : 
    3344              : /* Check whether a string contains a valid table according to VALIDATE_FN.  */
    3345              : template<typename ValidateFn>
    3346              : static bool
    3347            4 : check_table_string (tree string, HOST_WIDE_INT &zero_val,unsigned bits,
    3348              :                     ValidateFn validate_fn)
    3349              : {
    3350            4 :   unsigned HOST_WIDE_INT len = TREE_STRING_LENGTH (string);
    3351            4 :   unsigned matched = 0;
    3352            4 :   const unsigned char *p = (const unsigned char *) TREE_STRING_POINTER (string);
    3353              : 
    3354            4 :   if (len < bits || len > bits * 2)
    3355              :     return false;
    3356              : 
    3357            4 :   zero_val = p[0];
    3358              : 
    3359          164 :   for (unsigned i = 0; i < len; i++)
    3360          160 :     if (p[i] < bits && validate_fn (p[i], i))
    3361          160 :       matched++;
    3362              : 
    3363            4 :   return matched == bits;
    3364              : }
    3365              : 
    3366              : /* Check whether CTOR contains a valid table according to VALIDATE_FN.  */
    3367              : template<typename ValidateFn>
    3368              : static bool
    3369           26 : check_table (tree ctor, tree type, HOST_WIDE_INT &zero_val, unsigned bits,
    3370              :              ValidateFn validate_fn)
    3371              : {
    3372           26 :   if (TREE_CODE (ctor) == CONSTRUCTOR)
    3373           14 :     return check_table_array (ctor, zero_val, bits, validate_fn);
    3374              :   else if (TREE_CODE (ctor) == STRING_CST
    3375           12 :            && TYPE_PRECISION (type) == CHAR_TYPE_SIZE)
    3376            4 :     return check_table_string (ctor, zero_val, bits, validate_fn);
    3377              :   return false;
    3378              : }
    3379              : 
    3380              : /* Match.pd function to match the ctz expression.  */
    3381              : extern bool gimple_ctz_table_index (tree, tree *, tree (*)(tree));
    3382              : extern bool gimple_clz_table_index (tree, tree *, tree (*)(tree));
    3383              : 
    3384              : /* Recognize count leading and trailing zeroes idioms.
    3385              :    The canonical form is array[((x & -x) * C) >> SHIFT] where C is a magic
    3386              :    constant which when multiplied by a power of 2 creates a unique value
    3387              :    in the top 5 or 6 bits.  This is then indexed into a table which maps it
    3388              :    to the number of trailing zeroes.  Array[0] is returned so the caller can
    3389              :    emit an appropriate sequence depending on whether ctz (0) is defined on
    3390              :    the target.  */
    3391              : 
    3392              : static bool
    3393      1954349 : simplify_count_zeroes (gimple_stmt_iterator *gsi)
    3394              : {
    3395      1954349 :   gimple *stmt = gsi_stmt (*gsi);
    3396      1954349 :   tree array_ref = gimple_assign_rhs1 (stmt);
    3397      1954349 :   tree res_ops[3];
    3398              : 
    3399      1954349 :   gcc_checking_assert (TREE_CODE (array_ref) == ARRAY_REF);
    3400              : 
    3401      1954349 :   internal_fn fn = IFN_LAST;
    3402              :   /* For CTZ we recognize ((x & -x) * C) >> SHIFT where the array data
    3403              :      represents the number of trailing zeros.  */
    3404      1954349 :   if (gimple_ctz_table_index (TREE_OPERAND (array_ref, 1), &res_ops[0], NULL))
    3405              :     fn = IFN_CTZ;
    3406              :   /* For CLZ we recognize
    3407              :        x |= x >> 1;
    3408              :        x |= x >> 2;
    3409              :        x |= x >> 4;
    3410              :        x |= x >> 8;
    3411              :        x |= x >> 16;
    3412              :        (x * C) >> SHIFT
    3413              :      where 31 minus the array data represents the number of leading zeros.  */
    3414      1954327 :   else if (gimple_clz_table_index (TREE_OPERAND (array_ref, 1), &res_ops[0],
    3415              :                                    NULL))
    3416              :     fn = IFN_CLZ;
    3417              :   else
    3418              :     return false;
    3419              : 
    3420           31 :   HOST_WIDE_INT zero_val;
    3421           31 :   tree type = TREE_TYPE (array_ref);
    3422           31 :   tree array = TREE_OPERAND (array_ref, 0);
    3423           31 :   tree input_type = TREE_TYPE (res_ops[0]);
    3424           31 :   unsigned input_bits = tree_to_shwi (TYPE_SIZE (input_type));
    3425              : 
    3426              :   /* Check the array element type is not wider than 32 bits and the input is
    3427              :      an unsigned 32-bit or 64-bit type.  */
    3428           31 :   if (TYPE_PRECISION (type) > 32 || !TYPE_UNSIGNED (input_type))
    3429              :     return false;
    3430           27 :   if (input_bits != 32 && input_bits != 64)
    3431              :     return false;
    3432              : 
    3433           27 :   if (!direct_internal_fn_supported_p (fn, input_type, OPTIMIZE_FOR_BOTH))
    3434              :     return false;
    3435              : 
    3436              :   /* Check the lower bound of the array is zero.  */
    3437           27 :   tree low = array_ref_low_bound (array_ref);
    3438           27 :   if (!low || !integer_zerop (low))
    3439            0 :     return false;
    3440              : 
    3441              :   /* Check the shift extracts the top 5..7 bits.  */
    3442           27 :   unsigned shiftval = tree_to_shwi (res_ops[2]);
    3443           27 :   if (shiftval < input_bits - 7 || shiftval > input_bits - 5)
    3444              :     return false;
    3445              : 
    3446           26 :   tree ctor = ctor_for_folding (array);
    3447           26 :   if (!ctor)
    3448              :     return false;
    3449           26 :   unsigned HOST_WIDE_INT mulval = tree_to_uhwi (res_ops[1]);
    3450           26 :   if (fn == IFN_CTZ)
    3451              :     {
    3452          429 :       auto checkfn = [&](unsigned data, unsigned i) -> bool
    3453              :         {
    3454          412 :           unsigned HOST_WIDE_INT mask
    3455          412 :             = ((HOST_WIDE_INT_1U << (input_bits - shiftval)) - 1) << shiftval;
    3456          412 :           return (((mulval << data) & mask) >> shiftval) == i;
    3457           17 :         };
    3458           17 :       if (!check_table (ctor, type, zero_val, input_bits, checkfn))
    3459            8 :         return false;
    3460              :     }
    3461            9 :   else if (fn == IFN_CLZ)
    3462              :     {
    3463          297 :       auto checkfn = [&](unsigned data, unsigned i) -> bool
    3464              :         {
    3465          288 :           unsigned HOST_WIDE_INT mask
    3466          288 :             = ((HOST_WIDE_INT_1U << (input_bits - shiftval)) - 1) << shiftval;
    3467          288 :           return (((((HOST_WIDE_INT_1U << (data + 1)) - 1) * mulval) & mask)
    3468          288 :                   >> shiftval) == i;
    3469            9 :         };
    3470            9 :     if (!check_table (ctor, type, zero_val, input_bits, checkfn))
    3471            0 :       return false;
    3472              :     }
    3473              : 
    3474           18 :   HOST_WIDE_INT ctz_val = -1;
    3475           18 :   bool zero_ok;
    3476           18 :   if (fn == IFN_CTZ)
    3477              :     {
    3478            9 :       ctz_val = 0;
    3479           18 :       zero_ok = CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_INT_TYPE_MODE (input_type),
    3480              :                                            ctz_val) == 2;
    3481              :     }
    3482            9 :   else if (fn == IFN_CLZ)
    3483              :     {
    3484            9 :       ctz_val = 32;
    3485            9 :       zero_ok = CLZ_DEFINED_VALUE_AT_ZERO (SCALAR_INT_TYPE_MODE (input_type),
    3486              :                                            ctz_val) == 2;
    3487            9 :       zero_val = input_bits - 1 - zero_val;
    3488              :     }
    3489           18 :   int nargs = 2;
    3490              : 
    3491              :   /* If the input value can't be zero, don't special case ctz (0).  */
    3492           18 :   range_query *q = get_range_query (cfun);
    3493           18 :   if (q == get_global_range_query ())
    3494           18 :     q = enable_ranger (cfun);
    3495           18 :   int_range_max vr;
    3496           18 :   if (q->range_of_expr (vr, res_ops[0], stmt)
    3497           18 :       && !range_includes_zero_p (vr))
    3498              :     {
    3499            4 :       zero_ok = true;
    3500            4 :       zero_val = 0;
    3501            4 :       ctz_val = 0;
    3502            4 :       nargs = 1;
    3503              :     }
    3504              : 
    3505           18 :   gimple_seq seq = NULL;
    3506           18 :   gimple *g;
    3507           18 :   gcall *call = gimple_build_call_internal (fn, nargs, res_ops[0],
    3508              :                                             nargs == 1 ? NULL_TREE
    3509           32 :                                             : build_int_cst (integer_type_node,
    3510           14 :                                                              ctz_val));
    3511           18 :   gimple_set_location (call, gimple_location (stmt));
    3512           18 :   gimple_set_lhs (call, make_ssa_name (integer_type_node));
    3513           18 :   gimple_seq_add_stmt (&seq, call);
    3514              : 
    3515           18 :   tree prev_lhs = gimple_call_lhs (call);
    3516              : 
    3517           18 :   if (zero_ok && zero_val == ctz_val)
    3518              :     ;
    3519              :   /* Emit ctz (x) & 31 if ctz (0) is 32 but we need to return 0.  */
    3520            6 :   else if (zero_ok && zero_val == 0 && ctz_val == input_bits)
    3521              :     {
    3522            5 :       g = gimple_build_assign (make_ssa_name (integer_type_node),
    3523              :                                BIT_AND_EXPR, prev_lhs,
    3524              :                                build_int_cst (integer_type_node,
    3525            5 :                                               input_bits - 1));
    3526            5 :       gimple_set_location (g, gimple_location (stmt));
    3527            5 :       gimple_seq_add_stmt (&seq, g);
    3528            5 :       prev_lhs = gimple_assign_lhs (g);
    3529              :     }
    3530              :   /* As fallback emit a conditional move.  */
    3531              :   else
    3532              :     {
    3533            7 :       g = gimple_build_assign (make_ssa_name (boolean_type_node), EQ_EXPR,
    3534              :                                res_ops[0], build_zero_cst (input_type));
    3535            7 :       gimple_set_location (g, gimple_location (stmt));
    3536            7 :       gimple_seq_add_stmt (&seq, g);
    3537            7 :       tree cond = gimple_assign_lhs (g);
    3538            7 :       g = gimple_build_assign (make_ssa_name (integer_type_node),
    3539              :                                COND_EXPR, cond,
    3540            7 :                                build_int_cst (integer_type_node, zero_val),
    3541              :                                prev_lhs);
    3542            7 :       gimple_set_location (g, gimple_location (stmt));
    3543            7 :       gimple_seq_add_stmt (&seq, g);
    3544            7 :       prev_lhs = gimple_assign_lhs (g);
    3545              :     }
    3546              : 
    3547           18 :   if (fn == IFN_CLZ)
    3548              :     {
    3549            9 :       g = gimple_build_assign (make_ssa_name (integer_type_node),
    3550              :                                MINUS_EXPR,
    3551              :                                build_int_cst (integer_type_node,
    3552            9 :                                               input_bits - 1),
    3553              :                                prev_lhs);
    3554            9 :       gimple_set_location (g, gimple_location (stmt));
    3555            9 :       gimple_seq_add_stmt (&seq, g);
    3556            9 :       prev_lhs = gimple_assign_lhs (g);
    3557              :     }
    3558              : 
    3559           18 :   g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, prev_lhs);
    3560           18 :   gimple_seq_add_stmt (&seq, g);
    3561           18 :   gsi_replace_with_seq (gsi, seq, true);
    3562           18 :   return true;
    3563           18 : }
    3564              : 
    3565              : 
    3566              : /* Determine whether applying the 2 permutations (mask1 then mask2)
    3567              :    gives back one of the input.  */
    3568              : 
    3569              : static int
    3570           34 : is_combined_permutation_identity (tree mask1, tree mask2)
    3571              : {
    3572           34 :   tree mask;
    3573           34 :   unsigned HOST_WIDE_INT nelts, i, j;
    3574           34 :   bool maybe_identity1 = true;
    3575           34 :   bool maybe_identity2 = true;
    3576              : 
    3577           34 :   gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
    3578              :                        && TREE_CODE (mask2) == VECTOR_CST);
    3579              : 
    3580              :   /* For VLA masks, check for the following pattern:
    3581              :      v1 = VEC_PERM_EXPR (v0, ..., mask1)
    3582              :      v2 = VEC_PERM_EXPR (v1, ..., mask2)
    3583              :      -->
    3584              :      v2 = v0
    3585              :      if mask1 == mask2 == {nelts - 1, nelts - 2, ...}.  */
    3586              : 
    3587           34 :   if (operand_equal_p (mask1, mask2, 0)
    3588           34 :       && !VECTOR_CST_NELTS (mask1).is_constant ())
    3589              :     {
    3590              :       vec_perm_builder builder;
    3591              :       if (tree_to_vec_perm_builder (&builder, mask1))
    3592              :         {
    3593              :           poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (mask1));
    3594              :           vec_perm_indices sel (builder, 1, nelts);
    3595              :           if (sel.series_p (0, 1, nelts - 1, -1))
    3596              :             return 1;
    3597              :         }
    3598              :     }
    3599              : 
    3600           34 :   mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
    3601           34 :   if (mask == NULL_TREE || TREE_CODE (mask) != VECTOR_CST)
    3602              :     return 0;
    3603              : 
    3604           34 :   if (!VECTOR_CST_NELTS (mask).is_constant (&nelts))
    3605              :     return 0;
    3606           60 :   for (i = 0; i < nelts; i++)
    3607              :     {
    3608           60 :       tree val = VECTOR_CST_ELT (mask, i);
    3609           60 :       gcc_assert (TREE_CODE (val) == INTEGER_CST);
    3610           60 :       j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
    3611           60 :       if (j == i)
    3612              :         maybe_identity2 = false;
    3613           47 :       else if (j == i + nelts)
    3614              :         maybe_identity1 = false;
    3615              :       else
    3616              :         return 0;
    3617              :     }
    3618            0 :   return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
    3619              : }
    3620              : 
    3621              : /* Combine a shuffle with its arguments.  Returns true if there were any
    3622              :    changes made.  */
    3623              : 
    3624              : static bool
    3625       185078 : simplify_permutation (gimple_stmt_iterator *gsi)
    3626              : {
    3627       185078 :   gimple *stmt = gsi_stmt (*gsi);
    3628       185078 :   gimple *def_stmt = NULL;
    3629       185078 :   tree op0, op1, op2, op3, arg0, arg1;
    3630       185078 :   enum tree_code code, code2 = ERROR_MARK;
    3631       185078 :   bool single_use_op0 = false;
    3632              : 
    3633       185078 :   gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
    3634              : 
    3635       185078 :   op0 = gimple_assign_rhs1 (stmt);
    3636       185078 :   op1 = gimple_assign_rhs2 (stmt);
    3637       185078 :   op2 = gimple_assign_rhs3 (stmt);
    3638              : 
    3639       185078 :   if (TREE_CODE (op2) != VECTOR_CST)
    3640              :     return false;
    3641              : 
    3642       182341 :   if (TREE_CODE (op0) == VECTOR_CST)
    3643              :     {
    3644              :       code = VECTOR_CST;
    3645              :       arg0 = op0;
    3646              :     }
    3647       180479 :   else if (TREE_CODE (op0) == SSA_NAME)
    3648              :     {
    3649       180479 :       def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
    3650       180479 :       if (!def_stmt)
    3651              :         return false;
    3652       172411 :       code = gimple_assign_rhs_code (def_stmt);
    3653       172411 :       if (code == VIEW_CONVERT_EXPR)
    3654              :         {
    3655         1423 :           tree rhs = gimple_assign_rhs1 (def_stmt);
    3656         1423 :           tree name = TREE_OPERAND (rhs, 0);
    3657         1423 :           if (TREE_CODE (name) != SSA_NAME)
    3658              :             return false;
    3659         1423 :           if (!has_single_use (name))
    3660          218 :             single_use_op0 = false;
    3661              :           /* Here we update the def_stmt through this VIEW_CONVERT_EXPR,
    3662              :              but still keep the code to indicate it comes from
    3663              :              VIEW_CONVERT_EXPR.  */
    3664         1423 :           def_stmt = SSA_NAME_DEF_STMT (name);
    3665         1423 :           if (!def_stmt || !is_gimple_assign (def_stmt))
    3666              :             return false;
    3667          636 :           if (gimple_assign_rhs_code (def_stmt) != CONSTRUCTOR)
    3668              :             return false;
    3669              :         }
    3670       171125 :       if (!can_propagate_from (def_stmt))
    3671              :         return false;
    3672        21789 :       arg0 = gimple_assign_rhs1 (def_stmt);
    3673              :     }
    3674              :   else
    3675              :     return false;
    3676              : 
    3677              :   /* Two consecutive shuffles.  */
    3678        21789 :   if (code == VEC_PERM_EXPR)
    3679              :     {
    3680         6475 :       tree orig;
    3681         6475 :       int ident;
    3682              : 
    3683         6475 :       if (op0 != op1)
    3684              :         return false;
    3685           34 :       op3 = gimple_assign_rhs3 (def_stmt);
    3686           34 :       if (TREE_CODE (op3) != VECTOR_CST)
    3687              :         return false;
    3688           34 :       ident = is_combined_permutation_identity (op3, op2);
    3689           34 :       if (!ident)
    3690              :         return false;
    3691            0 :       orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
    3692            0 :                           : gimple_assign_rhs2 (def_stmt);
    3693            0 :       gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
    3694            0 :       gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
    3695            0 :       gimple_set_num_ops (stmt, 2);
    3696            0 :       update_stmt (stmt);
    3697            0 :       remove_prop_source_from_use (op0);
    3698            0 :       return true;
    3699              :     }
    3700        17176 :   else if (code == CONSTRUCTOR
    3701        17176 :            || code == VECTOR_CST
    3702              :            || code == VIEW_CONVERT_EXPR)
    3703              :     {
    3704         3316 :       if (op0 != op1)
    3705              :         {
    3706         3121 :           if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
    3707              :             return false;
    3708              : 
    3709         2754 :           if (TREE_CODE (op1) == VECTOR_CST)
    3710              :             arg1 = op1;
    3711         2254 :           else if (TREE_CODE (op1) == SSA_NAME)
    3712              :             {
    3713         2254 :               gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
    3714         2254 :               if (!def_stmt2)
    3715              :                 return false;
    3716          785 :               code2 = gimple_assign_rhs_code (def_stmt2);
    3717          785 :               if (code2 == VIEW_CONVERT_EXPR)
    3718              :                 {
    3719            4 :                   tree rhs = gimple_assign_rhs1 (def_stmt2);
    3720            4 :                   tree name = TREE_OPERAND (rhs, 0);
    3721            4 :                   if (TREE_CODE (name) != SSA_NAME)
    3722              :                     return false;
    3723            4 :                   if (!has_single_use (name))
    3724              :                     return false;
    3725            3 :                   def_stmt2 = SSA_NAME_DEF_STMT (name);
    3726            3 :                   if (!def_stmt2 || !is_gimple_assign (def_stmt2))
    3727              :                     return false;
    3728            0 :                   if (gimple_assign_rhs_code (def_stmt2) != CONSTRUCTOR)
    3729              :                     return false;
    3730              :                 }
    3731          781 :               else if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
    3732              :                 return false;
    3733          651 :               if (!can_propagate_from (def_stmt2))
    3734              :                 return false;
    3735          651 :               arg1 = gimple_assign_rhs1 (def_stmt2);
    3736              :             }
    3737              :           else
    3738              :             return false;
    3739              :         }
    3740              :       else
    3741              :         {
    3742              :           /* Already used twice in this statement.  */
    3743          195 :           if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
    3744              :             return false;
    3745              :           arg1 = arg0;
    3746              :         }
    3747              : 
    3748              :       /* If there are any VIEW_CONVERT_EXPRs found when finding permutation
    3749              :          operands source, check whether it's valid to transform and prepare
    3750              :          the required new operands.  */
    3751         1249 :       if (code == VIEW_CONVERT_EXPR || code2 == VIEW_CONVERT_EXPR)
    3752              :         {
    3753              :           /* Figure out the target vector type to which operands should be
    3754              :              converted.  If both are CONSTRUCTOR, the types should be the
    3755              :              same, otherwise, use the one of CONSTRUCTOR.  */
    3756           18 :           tree tgt_type = NULL_TREE;
    3757           18 :           if (code == VIEW_CONVERT_EXPR)
    3758              :             {
    3759           18 :               gcc_assert (gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR);
    3760           18 :               code = CONSTRUCTOR;
    3761           18 :               tgt_type = TREE_TYPE (arg0);
    3762              :             }
    3763           18 :           if (code2 == VIEW_CONVERT_EXPR)
    3764              :             {
    3765            0 :               tree arg1_type = TREE_TYPE (arg1);
    3766            0 :               if (tgt_type == NULL_TREE)
    3767              :                 tgt_type = arg1_type;
    3768            0 :               else if (tgt_type != arg1_type)
    3769           17 :                 return false;
    3770              :             }
    3771              : 
    3772           18 :           if (!VECTOR_TYPE_P (tgt_type))
    3773              :             return false;
    3774           18 :           tree op2_type = TREE_TYPE (op2);
    3775              : 
    3776              :           /* Figure out the shrunk factor.  */
    3777           18 :           poly_uint64 tgt_units = TYPE_VECTOR_SUBPARTS (tgt_type);
    3778           18 :           poly_uint64 op2_units = TYPE_VECTOR_SUBPARTS (op2_type);
    3779           18 :           if (maybe_gt (tgt_units, op2_units))
    3780              :             return false;
    3781           18 :           unsigned int factor;
    3782           35 :           if (!constant_multiple_p (op2_units, tgt_units, &factor))
    3783              :             return false;
    3784              : 
    3785              :           /* Build the new permutation control vector as target vector.  */
    3786           18 :           vec_perm_builder builder;
    3787           18 :           if (!tree_to_vec_perm_builder (&builder, op2))
    3788              :             return false;
    3789           18 :           vec_perm_indices indices (builder, 2, op2_units);
    3790           18 :           vec_perm_indices new_indices;
    3791           18 :           if (new_indices.new_shrunk_vector (indices, factor))
    3792              :             {
    3793            1 :               tree mask_type = tgt_type;
    3794            1 :               if (!VECTOR_INTEGER_TYPE_P (mask_type))
    3795              :                 {
    3796            0 :                   tree elem_type = TREE_TYPE (mask_type);
    3797            0 :                   unsigned elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
    3798            0 :                   tree int_type = build_nonstandard_integer_type (elem_size, 0);
    3799            0 :                   mask_type = build_vector_type (int_type, tgt_units);
    3800              :                 }
    3801            1 :               op2 = vec_perm_indices_to_tree (mask_type, new_indices);
    3802              :             }
    3803              :           else
    3804           17 :             return false;
    3805              : 
    3806              :           /* Convert the VECTOR_CST to the appropriate vector type.  */
    3807            1 :           if (tgt_type != TREE_TYPE (arg0))
    3808            0 :             arg0 = fold_build1 (VIEW_CONVERT_EXPR, tgt_type, arg0);
    3809            1 :           else if (tgt_type != TREE_TYPE (arg1))
    3810            0 :             arg1 = fold_build1 (VIEW_CONVERT_EXPR, tgt_type, arg1);
    3811           35 :         }
    3812              : 
    3813              :       /* VIEW_CONVERT_EXPR should be updated to CONSTRUCTOR before.  */
    3814         1232 :       gcc_assert (code == CONSTRUCTOR || code == VECTOR_CST);
    3815              : 
    3816              :       /* Shuffle of a constructor.  */
    3817         1232 :       tree res_type
    3818         1232 :         = build_vector_type (TREE_TYPE (TREE_TYPE (arg0)),
    3819         1232 :                              TYPE_VECTOR_SUBPARTS (TREE_TYPE (op2)));
    3820         1232 :       tree opt = fold_ternary (VEC_PERM_EXPR, res_type, arg0, arg1, op2);
    3821         1232 :       if (!opt
    3822          280 :           || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
    3823              :         return false;
    3824              :       /* Found VIEW_CONVERT_EXPR before, need one explicit conversion.  */
    3825          280 :       if (res_type != TREE_TYPE (op0))
    3826              :         {
    3827            1 :           tree name = make_ssa_name (TREE_TYPE (opt));
    3828            1 :           gimple *ass_stmt = gimple_build_assign (name, opt);
    3829            1 :           gsi_insert_before (gsi, ass_stmt, GSI_SAME_STMT);
    3830            1 :           opt = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op0), name);
    3831              :         }
    3832          280 :       gimple_assign_set_rhs_from_tree (gsi, opt);
    3833          280 :       update_stmt (gsi_stmt (*gsi));
    3834          280 :       if (TREE_CODE (op0) == SSA_NAME)
    3835            1 :         remove_prop_source_from_use (op0);
    3836          280 :       if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
    3837            0 :         remove_prop_source_from_use (op1);
    3838          280 :       return true;
    3839              :     }
    3840              : 
    3841              :   return false;
    3842              : }
    3843              : 
    3844              : /* Get the BIT_FIELD_REF definition of VAL, if any, looking through
    3845              :    conversions with code CONV_CODE or update it if still ERROR_MARK.
    3846              :    Return NULL_TREE if no such matching def was found.  */
    3847              : 
    3848              : static tree
    3849       402691 : get_bit_field_ref_def (tree val, enum tree_code &conv_code)
    3850              : {
    3851       402691 :   if (TREE_CODE (val) != SSA_NAME)
    3852              :     return NULL_TREE ;
    3853       376992 :   gimple *def_stmt = get_prop_source_stmt (val, false, NULL);
    3854       376992 :   if (!def_stmt)
    3855              :     return NULL_TREE;
    3856       302537 :   enum tree_code code = gimple_assign_rhs_code (def_stmt);
    3857       302537 :   if (code == FLOAT_EXPR
    3858       302537 :       || code == FIX_TRUNC_EXPR
    3859              :       || CONVERT_EXPR_CODE_P (code))
    3860              :     {
    3861       180016 :       tree op1 = gimple_assign_rhs1 (def_stmt);
    3862       180016 :       if (conv_code == ERROR_MARK)
    3863        86151 :         conv_code = code;
    3864        93865 :       else if (conv_code != code)
    3865              :         return NULL_TREE;
    3866       179991 :       if (TREE_CODE (op1) != SSA_NAME)
    3867              :         return NULL_TREE;
    3868        74162 :       def_stmt = SSA_NAME_DEF_STMT (op1);
    3869        74162 :       if (! is_gimple_assign (def_stmt))
    3870              :         return NULL_TREE;
    3871        59079 :       code = gimple_assign_rhs_code (def_stmt);
    3872              :     }
    3873       181600 :   if (code != BIT_FIELD_REF)
    3874              :     return NULL_TREE;
    3875        23673 :   return gimple_assign_rhs1 (def_stmt);
    3876              : }
    3877              : 
    3878              : /* Recognize a VEC_PERM_EXPR.  Returns true if there were any changes.  */
    3879              : 
    3880              : static bool
    3881       152785 : simplify_vector_constructor (gimple_stmt_iterator *gsi)
    3882              : {
    3883       152785 :   gimple *stmt = gsi_stmt (*gsi);
    3884       152785 :   tree op, orig[2], type;
    3885       152785 :   unsigned i;
    3886       152785 :   unsigned HOST_WIDE_INT nelts;
    3887       152785 :   unsigned HOST_WIDE_INT refnelts;
    3888       152785 :   enum tree_code conv_code;
    3889       152785 :   constructor_elt *elt;
    3890              : 
    3891       152785 :   op = gimple_assign_rhs1 (stmt);
    3892       152785 :   type = TREE_TYPE (op);
    3893       152785 :   gcc_checking_assert (TREE_CODE (op) == CONSTRUCTOR
    3894              :                        && TREE_CODE (type) == VECTOR_TYPE);
    3895              : 
    3896       152785 :   if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
    3897              :     return false;
    3898              : 
    3899       152785 :   orig[0] = NULL;
    3900       152785 :   orig[1] = NULL;
    3901       152785 :   tree orig_elem_type[2] = {};
    3902       152785 :   conv_code = ERROR_MARK;
    3903       152785 :   bool maybe_ident = true;
    3904       152785 :   bool maybe_blend[2] = { true, true };
    3905       152785 :   tree one_constant = NULL_TREE;
    3906       152785 :   tree one_nonconstant = NULL_TREE;
    3907       152785 :   tree subelt;
    3908       152785 :   auto_vec<tree> constants;
    3909       152785 :   constants.safe_grow_cleared (nelts, true);
    3910       152785 :   auto_vec<std::pair<unsigned, unsigned>, 64> elts;
    3911       152785 :   unsigned int tsubelts = 0;
    3912       434962 :   FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
    3913              :     {
    3914       402691 :       tree ref, op1;
    3915       402691 :       unsigned int elem, src_elem_size;
    3916       402691 :       unsigned HOST_WIDE_INT nsubelts = 1;
    3917              : 
    3918       402691 :       if (i >= nelts)
    3919       152785 :         return false;
    3920              : 
    3921              :       /* Look for elements extracted and possibly converted from
    3922              :          another vector.  */
    3923       402691 :       op1 = get_bit_field_ref_def (elt->value, conv_code);
    3924       402691 :       if (op1
    3925        23673 :           && TREE_CODE ((ref = TREE_OPERAND (op1, 0))) == SSA_NAME
    3926         5646 :           && VECTOR_TYPE_P (TREE_TYPE (ref))
    3927         5639 :           && (tree_nop_conversion_p (TREE_TYPE (op1),
    3928         5639 :                                      TREE_TYPE (TREE_TYPE (ref)))
    3929          840 :               || (VECTOR_TYPE_P (TREE_TYPE (op1))
    3930           76 :                   && tree_nop_conversion_p (TREE_TYPE (TREE_TYPE (op1)),
    3931           76 :                                             TREE_TYPE (TREE_TYPE (ref)))
    3932           76 :                   && TYPE_VECTOR_SUBPARTS (TREE_TYPE (op1))
    3933           76 :                         .is_constant (&nsubelts)))
    3934         4875 :           && constant_multiple_p (bit_field_size (op1), nsubelts,
    3935              :                                   &src_elem_size)
    3936       407566 :           && constant_multiple_p (bit_field_offset (op1), src_elem_size, &elem)
    3937       407566 :           && TYPE_VECTOR_SUBPARTS (TREE_TYPE (ref)).is_constant (&refnelts))
    3938              :         {
    3939              :           unsigned int j;
    3940         5160 :           for (j = 0; j < 2; ++j)
    3941              :             {
    3942         5143 :               if (!orig[j])
    3943              :                 {
    3944         2341 :                   if (j == 0
    3945         2540 :                       || useless_type_conversion_p (TREE_TYPE (orig[0]),
    3946          199 :                                                     TREE_TYPE (ref)))
    3947              :                     break;
    3948              :                 }
    3949         2802 :               else if (ref == orig[j])
    3950              :                 break;
    3951              :             }
    3952              :           /* Found a suitable vector element.  */
    3953         4875 :           if (j < 2)
    3954              :             {
    3955         4858 :               orig[j] = ref;
    3956              :               /* Track what element type was actually extracted (which may
    3957              :                  differ in signedness from the vector's element type due to
    3958              :                  tree_nop_conversion_p).  */
    3959         4858 :               if (!orig_elem_type[j])
    3960         2335 :                 orig_elem_type[j] = TREE_TYPE (op1);
    3961         4858 :               if (elem != i || j != 0)
    3962         2155 :                 maybe_ident = false;
    3963         4858 :               if (elem != i)
    3964         2086 :                 maybe_blend[j] = false;
    3965         9851 :               for (unsigned int k = 0; k < nsubelts; ++k)
    3966         4993 :                 elts.safe_push (std::make_pair (j, elem + k));
    3967         4858 :               tsubelts += nsubelts;
    3968         4858 :               continue;
    3969         4858 :             }
    3970              :           /* Else fallthru.  */
    3971              :         }
    3972              :       /* Handle elements not extracted from a vector.
    3973              :           1. constants by permuting with constant vector
    3974              :           2. a unique non-constant element by permuting with a splat vector  */
    3975       397833 :       if (orig[1]
    3976       246187 :           && orig[1] != error_mark_node)
    3977              :         return false;
    3978       397817 :       orig[1] = error_mark_node;
    3979       397817 :       if (VECTOR_TYPE_P (TREE_TYPE (elt->value))
    3980       397817 :           && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (elt->value))
    3981         3323 :                         .is_constant (&nsubelts))
    3982              :         return false;
    3983       397817 :       if (CONSTANT_CLASS_P (elt->value))
    3984              :         {
    3985        25695 :           if (one_nonconstant)
    3986              :             return false;
    3987        17357 :           if (!one_constant)
    3988         8103 :             one_constant = TREE_CODE (elt->value) == VECTOR_CST
    3989         8103 :                            ? VECTOR_CST_ELT (elt->value, 0)
    3990              :                            : elt->value;
    3991        17357 :           if (TREE_CODE (elt->value) == VECTOR_CST)
    3992              :             {
    3993          347 :               for (unsigned int k = 0; k < nsubelts; k++)
    3994          255 :                 constants[tsubelts + k] = VECTOR_CST_ELT (elt->value, k);
    3995              :             }
    3996              :           else
    3997        17265 :             constants[tsubelts] = elt->value;
    3998              :         }
    3999              :       else
    4000              :         {
    4001       372122 :           if (one_constant)
    4002              :             return false;
    4003       364352 :           subelt = VECTOR_TYPE_P (TREE_TYPE (elt->value))
    4004       364352 :                    ? ssa_uniform_vector_p (elt->value)
    4005              :                    : elt->value;
    4006       364352 :           if (!subelt)
    4007              :             return false;
    4008       361223 :           if (!one_nonconstant)
    4009              :             one_nonconstant = subelt;
    4010       220809 :           else if (!operand_equal_p (one_nonconstant, subelt, 0))
    4011              :             return false;
    4012              :         }
    4013       554803 :       for (unsigned int k = 0; k < nsubelts; ++k)
    4014       277484 :         elts.safe_push (std::make_pair (1, tsubelts + k));
    4015       277319 :       tsubelts += nsubelts;
    4016       277319 :       maybe_ident = false;
    4017              :     }
    4018              : 
    4019        64542 :   if (elts.length () < nelts)
    4020              :     return false;
    4021              : 
    4022        31023 :   if (! orig[0]
    4023        31023 :       || ! VECTOR_TYPE_P (TREE_TYPE (orig[0])))
    4024              :     return false;
    4025         1635 :   refnelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig[0])).to_constant ();
    4026              :   /* We currently do not handle larger destination vectors.  */
    4027         1635 :   if (refnelts < nelts)
    4028              :     return false;
    4029              : 
    4030              :   /* Determine the element type for the conversion source.
    4031              :      As orig_elem_type keeps track of the original type, check
    4032              :      if we need to perform a sign swap after permuting.
    4033              :      We need to be able to construct a vector type from the element
    4034              :      type which is not possible for e.g. BitInt or pointers
    4035              :      so pun with an integer type if needed.   */
    4036         1491 :   tree perm_eltype = TREE_TYPE (TREE_TYPE (orig[0]));
    4037         1491 :   bool sign_change_p = false;
    4038         1491 :   if (conv_code != ERROR_MARK
    4039          366 :       && orig_elem_type[0]
    4040         1857 :       && TYPE_SIGN (orig_elem_type[0]) != TYPE_SIGN (perm_eltype))
    4041              :     {
    4042           34 :       perm_eltype = signed_or_unsigned_type_for
    4043           34 :         (TYPE_UNSIGNED (orig_elem_type[0]), perm_eltype);
    4044           34 :       sign_change_p = true;
    4045              :     }
    4046         1491 :   tree conv_src_type = build_vector_type (perm_eltype, nelts);
    4047              : 
    4048         1491 :   if (maybe_ident)
    4049              :     {
    4050              :       /* When there is no conversion, use the target type directly.  */
    4051          448 :       if (conv_code == ERROR_MARK && nelts != refnelts)
    4052          448 :         conv_src_type = type;
    4053          448 :       if (conv_code != ERROR_MARK
    4054          448 :           && !supportable_convert_operation (conv_code, type, conv_src_type,
    4055              :                                              &conv_code))
    4056              :         {
    4057              :           /* Only few targets implement direct conversion patterns so try
    4058              :              some simple special cases via VEC_[UN]PACK[_FLOAT]_LO_EXPR.  */
    4059           46 :           optab optab;
    4060           46 :           insn_code icode;
    4061           46 :           tree halfvectype, dblvectype;
    4062           46 :           enum tree_code unpack_op;
    4063              : 
    4064           46 :           if (!BYTES_BIG_ENDIAN)
    4065           84 :             unpack_op = (FLOAT_TYPE_P (TREE_TYPE (type))
    4066           46 :                          ? VEC_UNPACK_FLOAT_LO_EXPR
    4067              :                          : VEC_UNPACK_LO_EXPR);
    4068              :           else
    4069              :             unpack_op = (FLOAT_TYPE_P (TREE_TYPE (type))
    4070              :                          ? VEC_UNPACK_FLOAT_HI_EXPR
    4071              :                          : VEC_UNPACK_HI_EXPR);
    4072              : 
    4073              :           /* Conversions between DFP and FP have no special tree code
    4074              :              but we cannot handle those since all relevant vector conversion
    4075              :              optabs only have a single mode.  */
    4076            6 :           if (CONVERT_EXPR_CODE_P (conv_code)
    4077           40 :               && FLOAT_TYPE_P (TREE_TYPE (type))
    4078           50 :               && (DECIMAL_FLOAT_TYPE_P (TREE_TYPE (type))
    4079            2 :                   != DECIMAL_FLOAT_TYPE_P (TREE_TYPE (conv_src_type))))
    4080              :             return false;
    4081              : 
    4082            6 :           if (CONVERT_EXPR_CODE_P (conv_code)
    4083           39 :               && (2 * TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig[0])))
    4084           39 :                   == TYPE_PRECISION (TREE_TYPE (type)))
    4085            0 :               && orig_elem_type[0]
    4086            0 :               && useless_type_conversion_p (orig_elem_type[0],
    4087            0 :                                             TREE_TYPE (TREE_TYPE (orig[0])))
    4088            0 :               && mode_for_vector (as_a <scalar_mode>
    4089            0 :                                   (TYPE_MODE (TREE_TYPE (TREE_TYPE (orig[0])))),
    4090            0 :                                   nelts * 2).exists ()
    4091            0 :               && (dblvectype
    4092            0 :                   = build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])),
    4093            0 :                                        nelts * 2))
    4094              :               /* Only use it for vector modes or for vector booleans
    4095              :                  represented as scalar bitmasks.  See PR95528.  */
    4096            0 :               && (VECTOR_MODE_P (TYPE_MODE (dblvectype))
    4097            0 :                   || VECTOR_BOOLEAN_TYPE_P (dblvectype))
    4098            0 :               && (optab = optab_for_tree_code (unpack_op,
    4099              :                                                dblvectype,
    4100              :                                                optab_default))
    4101            0 :               && ((icode = optab_handler (optab, TYPE_MODE (dblvectype)))
    4102              :                   != CODE_FOR_nothing)
    4103           45 :               && (insn_data[icode].operand[0].mode == TYPE_MODE (type)))
    4104              :             {
    4105            0 :               gimple_seq stmts = NULL;
    4106            0 :               tree dbl;
    4107            0 :               if (refnelts == nelts)
    4108              :                 {
    4109              :                   /* ???  Paradoxical subregs don't exist, so insert into
    4110              :                      the lower half of a wider zero vector.  */
    4111            0 :                   dbl = gimple_build (&stmts, BIT_INSERT_EXPR, dblvectype,
    4112              :                                       build_zero_cst (dblvectype), orig[0],
    4113            0 :                                       bitsize_zero_node);
    4114              :                 }
    4115            0 :               else if (refnelts == 2 * nelts)
    4116              :                 dbl = orig[0];
    4117              :               else
    4118            0 :                 dbl = gimple_build (&stmts, BIT_FIELD_REF, dblvectype,
    4119            0 :                                     orig[0], TYPE_SIZE (dblvectype),
    4120            0 :                                     bitsize_zero_node);
    4121            0 :               gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
    4122            0 :               gimple_assign_set_rhs_with_ops (gsi, unpack_op, dbl);
    4123              :             }
    4124            6 :           else if (CONVERT_EXPR_CODE_P (conv_code)
    4125           39 :                    && (TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig[0])))
    4126           39 :                        == 2 * TYPE_PRECISION (TREE_TYPE (type)))
    4127            1 :                    && orig_elem_type[0]
    4128            1 :                    && useless_type_conversion_p (orig_elem_type[0],
    4129            1 :                                                  TREE_TYPE (TREE_TYPE (orig[0])))
    4130            1 :                    && mode_for_vector (as_a <scalar_mode>
    4131            1 :                                          (TYPE_MODE
    4132              :                                            (TREE_TYPE (TREE_TYPE (orig[0])))),
    4133            2 :                                        nelts / 2).exists ()
    4134            1 :                    && (halfvectype
    4135            1 :                          = build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])),
    4136            1 :                                               nelts / 2))
    4137              :                    /* Only use it for vector modes or for vector booleans
    4138              :                       represented as scalar bitmasks.  See PR95528.  */
    4139            1 :                    && (VECTOR_MODE_P (TYPE_MODE (halfvectype))
    4140            0 :                        || VECTOR_BOOLEAN_TYPE_P (halfvectype))
    4141            1 :                    && (optab = optab_for_tree_code (VEC_PACK_TRUNC_EXPR,
    4142              :                                                     halfvectype,
    4143              :                                                     optab_default))
    4144            1 :                    && ((icode = optab_handler (optab, TYPE_MODE (halfvectype)))
    4145              :                        != CODE_FOR_nothing)
    4146           46 :                    && (insn_data[icode].operand[0].mode == TYPE_MODE (type)))
    4147              :             {
    4148            0 :               gimple_seq stmts = NULL;
    4149            0 :               tree low = gimple_build (&stmts, BIT_FIELD_REF, halfvectype,
    4150            0 :                                        orig[0], TYPE_SIZE (halfvectype),
    4151            0 :                                        bitsize_zero_node);
    4152            0 :               tree hig = gimple_build (&stmts, BIT_FIELD_REF, halfvectype,
    4153            0 :                                        orig[0], TYPE_SIZE (halfvectype),
    4154            0 :                                        TYPE_SIZE (halfvectype));
    4155            0 :               gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
    4156            0 :               gimple_assign_set_rhs_with_ops (gsi, VEC_PACK_TRUNC_EXPR,
    4157              :                                               low, hig);
    4158              :             }
    4159              :           else
    4160           45 :             return false;
    4161            0 :           update_stmt (gsi_stmt (*gsi));
    4162            0 :           return true;
    4163              :         }
    4164          402 :       if (nelts != refnelts)
    4165              :         {
    4166           20 :           gassign *lowpart
    4167           20 :             = gimple_build_assign (make_ssa_name (conv_src_type),
    4168              :                                    build3 (BIT_FIELD_REF, conv_src_type,
    4169           20 :                                            orig[0], TYPE_SIZE (conv_src_type),
    4170              :                                            bitsize_zero_node));
    4171           20 :           gsi_insert_before (gsi, lowpart, GSI_SAME_STMT);
    4172           20 :           orig[0] = gimple_assign_lhs (lowpart);
    4173              :         }
    4174          382 :       else if (sign_change_p)
    4175              :         {
    4176            0 :           gassign *conv
    4177            0 :             = gimple_build_assign (make_ssa_name (conv_src_type),
    4178              :                                    build1 (VIEW_CONVERT_EXPR, conv_src_type,
    4179              :                                            orig[0]));
    4180            0 :           gsi_insert_before (gsi, conv, GSI_SAME_STMT);
    4181            0 :           orig[0] = gimple_assign_lhs (conv);
    4182              :         }
    4183          402 :       if (conv_code == ERROR_MARK)
    4184              :         {
    4185          385 :           tree src_type = TREE_TYPE (orig[0]);
    4186          385 :           if (!useless_type_conversion_p (type, src_type))
    4187              :             {
    4188            0 :               gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type),
    4189              :                                     TYPE_VECTOR_SUBPARTS (src_type))
    4190              :                           && tree_nop_conversion_p (TREE_TYPE (type),
    4191              :                                                     TREE_TYPE (src_type)));
    4192            0 :               tree rhs = build1 (VIEW_CONVERT_EXPR, type, orig[0]);
    4193            0 :               orig[0] = make_ssa_name (type);
    4194            0 :               gassign *assign = gimple_build_assign (orig[0], rhs);
    4195            0 :               gsi_insert_before (gsi, assign, GSI_SAME_STMT);
    4196              :             }
    4197          385 :           gimple_assign_set_rhs_from_tree (gsi, orig[0]);
    4198              :         }
    4199              :       else
    4200           17 :         gimple_assign_set_rhs_with_ops (gsi, conv_code, orig[0],
    4201              :                                         NULL_TREE, NULL_TREE);
    4202              :     }
    4203              :   else
    4204              :     {
    4205              :       /* If we combine a vector with a non-vector avoid cases where
    4206              :          we'll obviously end up with more GIMPLE stmts which is when
    4207              :          we'll later not fold this to a single insert into the vector
    4208              :          and we had a single extract originally.  See PR92819.  */
    4209         1043 :       if (nelts == 2
    4210          771 :           && refnelts > 2
    4211          168 :           && orig[1] == error_mark_node
    4212           33 :           && !maybe_blend[0])
    4213          392 :         return false;
    4214         1016 :       tree mask_type, perm_type;
    4215         1016 :       perm_type = TREE_TYPE (orig[0]);
    4216         1016 :       if (conv_code != ERROR_MARK
    4217         1016 :           && !supportable_convert_operation (conv_code, type, conv_src_type,
    4218              :                                              &conv_code))
    4219              :         return false;
    4220              : 
    4221              :       /* Now that we know the number of elements of the source build the
    4222              :          permute vector.
    4223              :          ???  When the second vector has constant values we can shuffle
    4224              :          it and its source indexes to make the permutation supported.
    4225              :          For now it mimics a blend.  */
    4226          778 :       vec_perm_builder sel (refnelts, refnelts, 1);
    4227          778 :       bool all_same_p = true;
    4228         7318 :       for (i = 0; i < elts.length (); ++i)
    4229              :         {
    4230         2881 :           sel.quick_push (elts[i].second + elts[i].first * refnelts);
    4231         2881 :           all_same_p &= known_eq (sel[i], sel[0]);
    4232              :         }
    4233              :       /* And fill the tail with "something".  It's really don't care,
    4234              :          and ideally we'd allow VEC_PERM to have a smaller destination
    4235              :          vector.  As a heuristic:
    4236              : 
    4237              :          (a) if what we have so far duplicates a single element, make the
    4238              :              tail do the same
    4239              : 
    4240              :          (b) otherwise preserve a uniform orig[0].  This facilitates
    4241              :              later pattern-matching of VEC_PERM_EXPR to a BIT_INSERT_EXPR.  */
    4242         1609 :       for (; i < refnelts; ++i)
    4243         1662 :         sel.quick_push (all_same_p
    4244         2493 :                         ? sel[0]
    4245           96 :                         : (elts[0].second == 0 && elts[0].first == 0
    4246         1108 :                            ? 0 : refnelts) + i);
    4247         1021 :       vec_perm_indices indices (sel, orig[1] ? 2 : 1, refnelts);
    4248          778 :       machine_mode vmode = TYPE_MODE (perm_type);
    4249          778 :       if (!can_vec_perm_const_p (vmode, vmode, indices))
    4250              :         return false;
    4251          651 :       mask_type = build_vector_type (ssizetype, refnelts);
    4252          651 :       tree op2 = vec_perm_indices_to_tree (mask_type, indices);
    4253          651 :       bool converted_orig1 = false;
    4254          651 :       gimple_seq stmts = NULL;
    4255          651 :       if (!orig[1])
    4256          200 :         orig[1] = orig[0];
    4257          451 :       else if (orig[1] == error_mark_node
    4258          302 :                && one_nonconstant)
    4259              :         {
    4260              :           /* ???  We can see if we can safely convert to the original
    4261              :              element type.  */
    4262          165 :           converted_orig1 = conv_code != ERROR_MARK;
    4263          165 :           tree target_type = converted_orig1 ? type : perm_type;
    4264          165 :           tree nonconstant_for_splat = one_nonconstant;
    4265              :           /* If there's a nop conversion between the target element type and
    4266              :              the nonconstant's type, convert it.  */
    4267          165 :           if (!useless_type_conversion_p (TREE_TYPE (target_type),
    4268          165 :                                           TREE_TYPE (one_nonconstant)))
    4269            0 :             nonconstant_for_splat
    4270            0 :               = gimple_build (&stmts, NOP_EXPR, TREE_TYPE (target_type),
    4271              :                               one_nonconstant);
    4272          165 :           orig[1] = gimple_build_vector_from_val (&stmts, UNKNOWN_LOCATION,
    4273              :                                                   target_type,
    4274              :                                                   nonconstant_for_splat);
    4275          165 :         }
    4276          286 :       else if (orig[1] == error_mark_node)
    4277              :         {
    4278              :           /* ???  See if we can convert the vector to the original type.  */
    4279          137 :           converted_orig1 = conv_code != ERROR_MARK;
    4280          137 :           unsigned n = converted_orig1 ? nelts : refnelts;
    4281          120 :           tree target_type = converted_orig1 ? type : perm_type;
    4282          137 :           tree_vector_builder vec (target_type, n, 1);
    4283         1041 :           for (unsigned i = 0; i < n; ++i)
    4284         1732 :             if (i < nelts && constants[i])
    4285              :               {
    4286          461 :                 tree constant = constants[i];
    4287              :                 /* If there's a nop conversion, convert the constant.  */
    4288          461 :                 if (!useless_type_conversion_p (TREE_TYPE (target_type),
    4289          461 :                                                 TREE_TYPE (constant)))
    4290            0 :                   constant = fold_convert (TREE_TYPE (target_type), constant);
    4291          461 :                 vec.quick_push (constant);
    4292              :               }
    4293              :             else
    4294              :               {
    4295              :                 /* ??? Push a don't-care value.  */
    4296          443 :                 tree constant = one_constant;
    4297          443 :                 if (!useless_type_conversion_p (TREE_TYPE (target_type),
    4298          443 :                                                 TREE_TYPE (constant)))
    4299            0 :                   constant = fold_convert (TREE_TYPE (target_type), constant);
    4300          443 :                 vec.quick_push (constant);
    4301              :               }
    4302          137 :           orig[1] = vec.build ();
    4303          137 :         }
    4304          502 :       tree blend_op2 = NULL_TREE;
    4305          502 :       if (converted_orig1)
    4306              :         {
    4307              :           /* Make sure we can do a blend in the target type.  */
    4308           19 :           vec_perm_builder sel (nelts, nelts, 1);
    4309           87 :           for (i = 0; i < elts.length (); ++i)
    4310           68 :             sel.quick_push (elts[i].first
    4311           68 :                             ? elts[i].second + nelts : i);
    4312           19 :           vec_perm_indices indices (sel, 2, nelts);
    4313           19 :           machine_mode vmode = TYPE_MODE (type);
    4314           19 :           if (!can_vec_perm_const_p (vmode, vmode, indices))
    4315            0 :             return false;
    4316           19 :           mask_type = build_vector_type (ssizetype, nelts);
    4317           19 :           blend_op2 = vec_perm_indices_to_tree (mask_type, indices);
    4318           19 :         }
    4319              : 
    4320              :       /* For a real orig[1] (no splat, constant etc.) we might need to
    4321              :          nop-convert it.  Do so here.  */
    4322          651 :       if (orig[1] && orig[1] != error_mark_node
    4323          651 :           && !useless_type_conversion_p (perm_type, TREE_TYPE (orig[1]))
    4324          670 :           && tree_nop_conversion_p (TREE_TYPE (perm_type),
    4325           19 :                                     TREE_TYPE (TREE_TYPE (orig[1]))))
    4326            0 :         orig[1] = gimple_build (&stmts, VIEW_CONVERT_EXPR, perm_type,
    4327              :                                 orig[1]);
    4328              : 
    4329          651 :       tree orig1_for_perm
    4330          651 :         = converted_orig1 ? build_zero_cst (perm_type) : orig[1];
    4331          651 :       tree res = gimple_build (&stmts, VEC_PERM_EXPR, perm_type,
    4332              :                                orig[0], orig1_for_perm, op2);
    4333              :       /* If we're building a smaller vector, extract the element
    4334              :          with the proper type.  */
    4335          651 :       if (nelts != refnelts)
    4336          312 :         res = gimple_build (&stmts, BIT_FIELD_REF,
    4337              :                             conv_code != ERROR_MARK ? conv_src_type : type,
    4338              :                             res,
    4339          156 :                             TYPE_SIZE (conv_code != ERROR_MARK ? conv_src_type
    4340              :                                                                : type),
    4341          156 :                             bitsize_zero_node);
    4342              :       /* Otherwise, we can still have an intermediate sign change.
    4343              :          ??? In that case we have two subsequent conversions.
    4344              :          We should be able to merge them.  */
    4345          495 :       else if (sign_change_p)
    4346           14 :         res = gimple_build (&stmts, VIEW_CONVERT_EXPR, conv_src_type, res);
    4347              :       /* Finally, apply the conversion.  */
    4348          651 :       if (conv_code != ERROR_MARK)
    4349           52 :         res = gimple_build (&stmts, conv_code, type, res);
    4350          599 :       else if (!useless_type_conversion_p (type, TREE_TYPE (res)))
    4351              :         {
    4352            1 :           gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type),
    4353              :                                 TYPE_VECTOR_SUBPARTS (perm_type))
    4354              :                       && tree_nop_conversion_p (TREE_TYPE (type),
    4355              :                                                 TREE_TYPE (perm_type)));
    4356            1 :           res = gimple_build (&stmts, VIEW_CONVERT_EXPR, type, res);
    4357              :         }
    4358              :       /* Blend in the actual constant.  */
    4359          651 :       if (converted_orig1)
    4360           19 :         res = gimple_build (&stmts, VEC_PERM_EXPR, type,
    4361           19 :                             res, orig[1], blend_op2);
    4362          651 :       gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
    4363          651 :       gimple_assign_set_rhs_with_ops (gsi, SSA_NAME, res);
    4364          778 :     }
    4365         1053 :   update_stmt (gsi_stmt (*gsi));
    4366         1053 :   return true;
    4367       152785 : }
    4368              : 
    4369              : /* Prepare a TARGET_MEM_REF ref so that it can be subsetted as
    4370              :    lvalue.  This splits out an address computation stmt before *GSI
    4371              :    and returns a MEM_REF wrapping the address.  */
    4372              : 
    4373              : static tree
    4374         1096 : prepare_target_mem_ref_lvalue (tree ref, gimple_stmt_iterator *gsi)
    4375              : {
    4376         1096 :   if (TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
    4377          215 :     mark_addressable (TREE_OPERAND (TREE_OPERAND (ref, 0), 0));
    4378         1096 :   tree ptrtype = build_pointer_type (TREE_TYPE (ref));
    4379         1096 :   tree tem = make_ssa_name (ptrtype);
    4380         1096 :   gimple *new_stmt
    4381         1096 :     = gimple_build_assign (tem, build1 (ADDR_EXPR, TREE_TYPE (tem),
    4382              :                                         unshare_expr (ref)));
    4383         1096 :   gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
    4384         2192 :   ref = build2_loc (EXPR_LOCATION (ref),
    4385         1096 :                     MEM_REF, TREE_TYPE (ref), tem,
    4386         1096 :                     build_int_cst (TREE_TYPE (TREE_OPERAND (ref, 1)), 0));
    4387         1096 :   return ref;
    4388              : }
    4389              : 
    4390              : /* Rewrite the vector load at *GSI to component-wise loads if the load
    4391              :    is only used in BIT_FIELD_REF extractions with eventual intermediate
    4392              :    widening.  */
    4393              : 
    4394              : static void
    4395       288318 : optimize_vector_load (gimple_stmt_iterator *gsi)
    4396              : {
    4397       288318 :   gimple *stmt = gsi_stmt (*gsi);
    4398       288318 :   tree lhs = gimple_assign_lhs (stmt);
    4399       288318 :   tree rhs = gimple_assign_rhs1 (stmt);
    4400       288318 :   tree vuse = gimple_vuse (stmt);
    4401              : 
    4402              :   /* Gather BIT_FIELD_REFs to rewrite, looking through
    4403              :      VEC_UNPACK_{LO,HI}_EXPR.  */
    4404       288318 :   use_operand_p use_p;
    4405       288318 :   imm_use_iterator iter;
    4406       288318 :   bool rewrite = true;
    4407       288318 :   bool scalar_use = false;
    4408       288318 :   bool unpack_use = false;
    4409       288318 :   auto_vec<gimple *, 8> bf_stmts;
    4410       288318 :   auto_vec<tree, 8> worklist;
    4411       288318 :   worklist.quick_push (lhs);
    4412       290165 :   do
    4413              :     {
    4414       290165 :       tree def = worklist.pop ();
    4415       290165 :       unsigned HOST_WIDE_INT def_eltsize
    4416       290165 :         = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (def))));
    4417       656646 :       FOR_EACH_IMM_USE_FAST (use_p, iter, def)
    4418              :         {
    4419       346110 :           gimple *use_stmt = USE_STMT (use_p);
    4420       346110 :           if (is_gimple_debug (use_stmt))
    4421        76316 :             continue;
    4422       345517 :           if (!is_gimple_assign (use_stmt))
    4423              :             {
    4424              :               rewrite = false;
    4425       269794 :               break;
    4426              :             }
    4427       310121 :           enum tree_code use_code = gimple_assign_rhs_code (use_stmt);
    4428       310121 :           tree use_rhs = gimple_assign_rhs1 (use_stmt);
    4429       382139 :           if (use_code == BIT_FIELD_REF
    4430        72019 :               && TREE_OPERAND (use_rhs, 0) == def
    4431              :               /* If its on the VEC_UNPACK_{HI,LO}_EXPR
    4432              :                  def need to verify it is element aligned.  */
    4433       382140 :               && (def == lhs
    4434           85 :                   || (known_eq (bit_field_size (use_rhs), def_eltsize)
    4435           85 :                       && constant_multiple_p (bit_field_offset (use_rhs),
    4436              :                                               def_eltsize)
    4437              :                       /* We can simulate the VEC_UNPACK_{HI,LO}_EXPR
    4438              :                          via a NOP_EXPR only for integral types.
    4439              :                          ???  Support VEC_UNPACK_FLOAT_{HI,LO}_EXPR.  */
    4440           85 :                       && INTEGRAL_TYPE_P (TREE_TYPE (use_rhs)))))
    4441              :             {
    4442        72018 :               if (!VECTOR_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
    4443        69881 :                 scalar_use = true;
    4444        72018 :               bf_stmts.safe_push (use_stmt);
    4445        72018 :               continue;
    4446              :             }
    4447              :           /* Walk through one level of VEC_UNPACK_{LO,HI}_EXPR.  */
    4448       238103 :           if (def == lhs
    4449       236303 :               && (use_code == VEC_UNPACK_HI_EXPR
    4450       236303 :                   || use_code == VEC_UNPACK_LO_EXPR)
    4451         3705 :               && use_rhs == lhs)
    4452              :             {
    4453         3705 :               unpack_use = true;
    4454         3705 :               worklist.safe_push (gimple_assign_lhs (use_stmt));
    4455         3705 :               continue;
    4456              :             }
    4457              :           rewrite = false;
    4458              :           break;
    4459       290165 :         }
    4460       290165 :       if (!rewrite)
    4461              :         break;
    4462              :     }
    4463        40742 :   while (!worklist.is_empty ());
    4464              : 
    4465       288318 :   rewrite = rewrite && (scalar_use
    4466        18524 :                         || unpack_use
    4467          597 :                         || !can_implement_p (mov_optab,
    4468          597 :                                              TYPE_MODE (TREE_TYPE (lhs))));
    4469       288318 :   if (!rewrite)
    4470              :     {
    4471       269973 :       gsi_next (gsi);
    4472       269973 :       return;
    4473              :     }
    4474              :   /* We now have all ultimate uses of the load to rewrite in bf_stmts.  */
    4475              : 
    4476              :   /* Prepare the original ref to be wrapped in adjusted BIT_FIELD_REFs.
    4477              :      For TARGET_MEM_REFs we have to separate the LEA from the reference.  */
    4478        18345 :   tree load_rhs = rhs;
    4479        18345 :   if (TREE_CODE (load_rhs) == TARGET_MEM_REF)
    4480         1095 :     load_rhs = prepare_target_mem_ref_lvalue (load_rhs, gsi);
    4481              : 
    4482              :   /* Rewrite the BIT_FIELD_REFs to be actual loads, re-emitting them at
    4483              :      the place of the original load.  */
    4484       120733 :   for (gimple *use_stmt : bf_stmts)
    4485              :     {
    4486        65698 :       tree bfr = gimple_assign_rhs1 (use_stmt);
    4487        65698 :       tree new_rhs = unshare_expr (load_rhs);
    4488        65698 :       if (TREE_OPERAND (bfr, 0) != lhs)
    4489              :         {
    4490              :           /* When the BIT_FIELD_REF is on the promoted vector we have to
    4491              :              adjust it and emit a conversion afterwards.  */
    4492           84 :           gimple *def_stmt
    4493           84 :               = SSA_NAME_DEF_STMT (TREE_OPERAND (bfr, 0));
    4494           84 :           enum tree_code def_code
    4495           84 :               = gimple_assign_rhs_code (def_stmt);
    4496              : 
    4497              :           /* The adjusted BIT_FIELD_REF is of the promotion source
    4498              :              vector size and at half of the offset...  */
    4499           84 :           new_rhs = fold_build3 (BIT_FIELD_REF,
    4500              :                                  TREE_TYPE (TREE_TYPE (lhs)),
    4501              :                                  new_rhs,
    4502              :                                  TYPE_SIZE (TREE_TYPE (TREE_TYPE (lhs))),
    4503              :                                  size_binop (EXACT_DIV_EXPR,
    4504              :                                              TREE_OPERAND (bfr, 2),
    4505              :                                              bitsize_int (2)));
    4506              :           /* ... and offsetted by half of the vector if VEC_UNPACK_HI_EXPR.  */
    4507           84 :           if (def_code == (!BYTES_BIG_ENDIAN
    4508              :                            ? VEC_UNPACK_HI_EXPR : VEC_UNPACK_LO_EXPR))
    4509           42 :             TREE_OPERAND (new_rhs, 2)
    4510           84 :               = size_binop (PLUS_EXPR, TREE_OPERAND (new_rhs, 2),
    4511              :                             size_binop (EXACT_DIV_EXPR,
    4512              :                                         TYPE_SIZE (TREE_TYPE (lhs)),
    4513              :                                         bitsize_int (2)));
    4514           84 :           tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (lhs)));
    4515           84 :           gimple *new_stmt = gimple_build_assign (tem, new_rhs);
    4516           84 :           location_t loc = gimple_location (use_stmt);
    4517           84 :           gimple_set_location (new_stmt, loc);
    4518           84 :           gimple_set_vuse (new_stmt, vuse);
    4519           84 :           gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
    4520              :           /* Perform scalar promotion.  */
    4521           84 :           new_stmt = gimple_build_assign (gimple_assign_lhs (use_stmt),
    4522              :                                           NOP_EXPR, tem);
    4523           84 :           gimple_set_location (new_stmt, loc);
    4524           84 :           gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
    4525              :         }
    4526              :       else
    4527              :         {
    4528              :           /* When the BIT_FIELD_REF is on the original load result
    4529              :              we can just wrap that.  */
    4530        65614 :           tree new_rhs = fold_build3 (BIT_FIELD_REF, TREE_TYPE (bfr),
    4531              :                                       unshare_expr (load_rhs),
    4532              :                                       TREE_OPERAND (bfr, 1),
    4533              :                                       TREE_OPERAND (bfr, 2));
    4534        65614 :           gimple *new_stmt = gimple_build_assign (gimple_assign_lhs (use_stmt),
    4535              :                                                   new_rhs);
    4536        65614 :           location_t loc = gimple_location (use_stmt);
    4537        65614 :           gimple_set_location (new_stmt, loc);
    4538        65614 :           gimple_set_vuse (new_stmt, vuse);
    4539        65614 :           gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
    4540              :         }
    4541        65698 :       gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
    4542        65698 :       unlink_stmt_vdef (use_stmt);
    4543        65698 :       gsi_remove (&gsi2, true);
    4544              :     }
    4545              : 
    4546              :   /* Finally get rid of the intermediate stmts.  */
    4547        18345 :   gimple *use_stmt;
    4548        36990 :   FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
    4549              :     {
    4550          300 :       if (is_gimple_debug (use_stmt))
    4551              :         {
    4552          272 :           if (gimple_debug_bind_p (use_stmt))
    4553              :             {
    4554          272 :               gimple_debug_bind_reset_value (use_stmt);
    4555          272 :               update_stmt (use_stmt);
    4556              :             }
    4557          272 :           continue;
    4558              :         }
    4559           28 :       gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
    4560           28 :       unlink_stmt_vdef (use_stmt);
    4561           28 :       release_defs (use_stmt);
    4562           28 :       gsi_remove (&gsi2, true);
    4563        18345 :     }
    4564              :   /* And the original load.  */
    4565        18345 :   release_defs (stmt);
    4566        18345 :   gsi_remove (gsi, true);
    4567       288318 : }
    4568              : 
    4569              : 
    4570              : /* Primitive "lattice" function for gimple_simplify.  */
    4571              : 
    4572              : static tree
    4573   1588711835 : fwprop_ssa_val (tree name)
    4574              : {
    4575              :   /* First valueize NAME.  */
    4576   1588711835 :   if (TREE_CODE (name) == SSA_NAME
    4577   1588711835 :       && SSA_NAME_VERSION (name) < lattice.length ())
    4578              :     {
    4579   1587873901 :       tree val = lattice[SSA_NAME_VERSION (name)];
    4580   1587873901 :       if (val)
    4581   1588711835 :         name = val;
    4582              :     }
    4583              :   /* We continue matching along SSA use-def edges for SSA names
    4584              :      that are not single-use.  Currently there are no patterns
    4585              :      that would cause any issues with that.  */
    4586   1588711835 :   return name;
    4587              : }
    4588              : 
    4589              : /* Search for opportunities to free half of the lanes in the following pattern:
    4590              : 
    4591              :      v_in = {e0, e1, e2, e3}
    4592              :      v_1 = VEC_PERM <v_in, v_in, {0, 2, 0, 2}>
    4593              :      // v_1 = {e0, e2, e0, e2}
    4594              :      v_2 = VEC_PERM <v_in, v_in, {1, 3, 1, 3}>
    4595              :      // v_2 = {e1, e3, e1, e3}
    4596              : 
    4597              :      v_x = v_1 + v_2
    4598              :      // v_x = {e0+e1, e2+e3, e0+e1, e2+e3}
    4599              :      v_y = v_1 - v_2
    4600              :      // v_y = {e0-e1, e2-e3, e0-e1, e2-e3}
    4601              : 
    4602              :      v_out = VEC_PERM <v_x, v_y, {0, 1, 6, 7}>
    4603              :      // v_out = {e0+e1, e2+e3, e0-e1, e2-e3}
    4604              : 
    4605              :    The last statement could be simplified to:
    4606              :      v_out' = VEC_PERM <v_x, v_y, {0, 1, 4, 5}>
    4607              :      // v_out' = {e0+e1, e2+e3, e0-e1, e2-e3}
    4608              : 
    4609              :    Characteristic properties:
    4610              :    - v_1 and v_2 are created from the same input vector v_in and introduce the
    4611              :      lane duplication (in the selection operand) that we can eliminate.
    4612              :    - v_x and v_y are results from lane-preserving operations that use v_1 and
    4613              :      v_2 as inputs.
    4614              :    - v_out is created by selecting from duplicated lanes.  */
    4615              : 
    4616              : static bool
    4617       182827 : recognise_vec_perm_simplify_seq (gassign *stmt, vec_perm_simplify_seq *seq)
    4618              : {
    4619       182827 :   unsigned HOST_WIDE_INT nelts;
    4620              : 
    4621       182827 :   gcc_checking_assert (stmt);
    4622       182827 :   gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
    4623       182827 :   basic_block bb = gimple_bb (stmt);
    4624              : 
    4625              :   /* Decompose the final vec permute statement.  */
    4626       182827 :   tree v_x = gimple_assign_rhs1 (stmt);
    4627       182827 :   tree v_y = gimple_assign_rhs2 (stmt);
    4628       182827 :   tree sel = gimple_assign_rhs3 (stmt);
    4629              : 
    4630       182827 :   if (TREE_CODE (sel) != VECTOR_CST
    4631       180090 :       || !VECTOR_CST_NELTS (sel).is_constant (&nelts)
    4632       180090 :       || TREE_CODE (v_x) != SSA_NAME
    4633       178238 :       || TREE_CODE (v_y) != SSA_NAME
    4634       174298 :       || !has_single_use (v_x)
    4635       292551 :       || !has_single_use (v_y))
    4636        74865 :     return false;
    4637              : 
    4638              :   /* Don't analyse sequences with many lanes.  */
    4639       107962 :   if (nelts > 4)
    4640              :     return false;
    4641              : 
    4642              :   /* Lookup the definition of v_x and v_y.  */
    4643       105231 :   gassign *v_x_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (v_x));
    4644       105231 :   gassign *v_y_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (v_y));
    4645       104854 :   if (!v_x_stmt || gimple_bb (v_x_stmt) != bb
    4646       210085 :       || !v_y_stmt || gimple_bb (v_y_stmt) != bb)
    4647              :     return false;
    4648              : 
    4649              :   /* Check the operations that define v_x and v_y.  */
    4650       104847 :   if (TREE_CODE_CLASS (gimple_assign_rhs_code (v_x_stmt)) != tcc_binary
    4651       106890 :       || TREE_CODE_CLASS (gimple_assign_rhs_code (v_y_stmt)) != tcc_binary)
    4652              :     return false;
    4653              : 
    4654         2043 :   tree v_x_1 = gimple_assign_rhs1 (v_x_stmt);
    4655         2043 :   tree v_x_2 = gimple_assign_rhs2 (v_x_stmt);
    4656         2043 :   tree v_y_1 = gimple_assign_rhs1 (v_y_stmt);
    4657         2043 :   tree v_y_2 = gimple_assign_rhs2 (v_y_stmt);
    4658              : 
    4659         2043 :   if (v_x_stmt == v_y_stmt
    4660         2043 :       || TREE_CODE (v_x_1) != SSA_NAME
    4661         2040 :       || TREE_CODE (v_x_2) != SSA_NAME
    4662         2028 :       || num_imm_uses (v_x_1) != 2
    4663         3913 :       || num_imm_uses (v_x_2) != 2)
    4664              :     return false;
    4665              : 
    4666         1834 :   if (v_x_1 != v_y_1 || v_x_2 != v_y_2)
    4667              :     {
    4668              :       /* Allow operands of commutative operators to swap.  */
    4669          646 :       if (commutative_tree_code (gimple_assign_rhs_code (v_x_stmt)))
    4670              :         {
    4671              :           /* Keep v_x_1 the first operand for non-commutative operators.  */
    4672          266 :           std::swap (v_x_1, v_x_2);
    4673          266 :           if (v_x_1 != v_y_1 || v_x_2 != v_y_2)
    4674              :             return false;
    4675              :         }
    4676          380 :       else if (commutative_tree_code (gimple_assign_rhs_code (v_y_stmt)))
    4677              :         {
    4678          380 :           if (v_x_1 != v_y_2 || v_x_2 != v_y_1)
    4679              :             return false;
    4680              :         }
    4681              :       else
    4682              :         return false;
    4683              :     }
    4684         1834 :   gassign *v_1_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (v_x_1));
    4685         1834 :   gassign *v_2_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (v_x_2));
    4686         1770 :   if (!v_1_stmt || gimple_bb (v_1_stmt) != bb
    4687         3604 :       || !v_2_stmt || gimple_bb (v_2_stmt) != bb)
    4688              :     return false;
    4689              : 
    4690         1766 :   if (gimple_assign_rhs_code (v_1_stmt) != VEC_PERM_EXPR
    4691         1888 :       || gimple_assign_rhs_code (v_2_stmt) != VEC_PERM_EXPR)
    4692              :     return false;
    4693              : 
    4694              :   /* Decompose initial VEC_PERM_EXPRs.  */
    4695          108 :   tree v_in = gimple_assign_rhs1 (v_1_stmt);
    4696          108 :   tree v_1_sel = gimple_assign_rhs3 (v_1_stmt);
    4697          108 :   tree v_2_sel = gimple_assign_rhs3 (v_2_stmt);
    4698          108 :   if (v_in != gimple_assign_rhs2 (v_1_stmt)
    4699          103 :       || v_in != gimple_assign_rhs1 (v_2_stmt)
    4700          209 :       || v_in != gimple_assign_rhs2 (v_2_stmt))
    4701              :     return false;
    4702              : 
    4703          101 :   unsigned HOST_WIDE_INT v_1_nelts, v_2_nelts;
    4704          101 :   if (TREE_CODE (v_1_sel) != VECTOR_CST
    4705          101 :       || !VECTOR_CST_NELTS (v_1_sel).is_constant (&v_1_nelts)
    4706          101 :       || TREE_CODE (v_2_sel) != VECTOR_CST
    4707          202 :       || !VECTOR_CST_NELTS (v_2_sel).is_constant (&v_2_nelts))
    4708            0 :     return false;
    4709              : 
    4710          101 :   if (nelts != v_1_nelts || nelts != v_2_nelts)
    4711              :     return false;
    4712              : 
    4713              :   /* Create the new selector.  */
    4714          101 :   vec_perm_builder new_sel_perm (nelts, nelts, 1);
    4715          101 :   auto_vec<bool> lanes (nelts);
    4716          101 :   lanes.quick_grow_cleared (nelts);
    4717          505 :   for (unsigned int i = 0; i < nelts; i++)
    4718              :     {
    4719              :       /* Extract the i-th value from the selector.  */
    4720          404 :       unsigned int sel_cst = TREE_INT_CST_LOW (VECTOR_CST_ELT (sel, i));
    4721          404 :       unsigned int lane = sel_cst % nelts;
    4722          404 :       unsigned int offs = sel_cst / nelts;
    4723              : 
    4724              :       /* Check what's in the lane.  */
    4725          404 :       unsigned int e_1 = TREE_INT_CST_LOW (VECTOR_CST_ELT (v_1_sel, lane));
    4726          404 :       unsigned int e_2 = TREE_INT_CST_LOW (VECTOR_CST_ELT (v_2_sel, lane));
    4727              : 
    4728              :       /* Reuse previous lane (if any).  */
    4729          404 :       unsigned int l = 0;
    4730          687 :       for (; l < lane; l++)
    4731              :         {
    4732          481 :           if ((TREE_INT_CST_LOW (VECTOR_CST_ELT (v_1_sel, l)) == e_1)
    4733          481 :               && (TREE_INT_CST_LOW (VECTOR_CST_ELT (v_2_sel, l)) == e_2))
    4734              :             break;
    4735              :         }
    4736              : 
    4737              :       /* Add to narrowed selector.  */
    4738          404 :       new_sel_perm.quick_push (l + offs * nelts);
    4739              : 
    4740              :       /* Mark lane as used.  */
    4741          404 :       lanes[l] = true;
    4742              :     }
    4743              : 
    4744              :   /* Count how many lanes are need.  */
    4745              :   unsigned int cnt = 0;
    4746          505 :   for (unsigned int i = 0; i < nelts; i++)
    4747          404 :     cnt += lanes[i];
    4748              : 
    4749              :   /* If more than (nelts/2) lanes are needed, skip the sequence.  */
    4750          101 :   if (cnt > nelts / 2)
    4751              :     return false;
    4752              : 
    4753              :   /* Check if the resulting permutation is cheap.  */
    4754          101 :   vec_perm_indices new_indices (new_sel_perm, 2, nelts);
    4755          101 :   tree vectype = TREE_TYPE (gimple_assign_lhs (stmt));
    4756          101 :   machine_mode vmode = TYPE_MODE (vectype);
    4757          101 :   if (!can_vec_perm_const_p (vmode, vmode, new_indices, false))
    4758              :     return false;
    4759              : 
    4760          101 :   *seq = XNEW (struct _vec_perm_simplify_seq);
    4761          101 :   (*seq)->stmt = stmt;
    4762          101 :   (*seq)->v_1_stmt = v_1_stmt;
    4763          101 :   (*seq)->v_2_stmt = v_2_stmt;
    4764          101 :   (*seq)->v_x_stmt = v_x_stmt;
    4765          101 :   (*seq)->v_y_stmt = v_y_stmt;
    4766          101 :   (*seq)->nelts = nelts;
    4767          101 :   (*seq)->new_sel = vect_gen_perm_mask_checked (vectype, new_indices);
    4768              : 
    4769          101 :   if (dump_file)
    4770              :     {
    4771           28 :       fprintf (dump_file, "Found vec perm simplify sequence ending with:\n\t");
    4772           28 :       print_gimple_stmt (dump_file, stmt, 0);
    4773              : 
    4774           28 :       if (dump_flags & TDF_DETAILS)
    4775              :         {
    4776           28 :           fprintf (dump_file, "\tNarrowed vec_perm selector: ");
    4777           28 :           print_generic_expr (dump_file, (*seq)->new_sel);
    4778           28 :           fprintf (dump_file, "\n");
    4779              :         }
    4780              :     }
    4781              : 
    4782              :   return true;
    4783          202 : }
    4784              : 
    4785              : /* Reduce the lane consumption of a simplifiable vec perm sequence.  */
    4786              : 
    4787              : static void
    4788           74 : narrow_vec_perm_simplify_seq (const vec_perm_simplify_seq &seq)
    4789              : {
    4790           74 :   gassign *stmt = seq->stmt;
    4791           74 :   if (dump_file && (dump_flags & TDF_DETAILS))
    4792              :     {
    4793           22 :       fprintf (dump_file, "Updating VEC_PERM statement:\n");
    4794           22 :       fprintf (dump_file, "Old stmt: ");
    4795           22 :       print_gimple_stmt (dump_file, stmt, 0);
    4796              :     }
    4797              : 
    4798              :   /* Update the last VEC_PERM statement.  */
    4799           74 :   gimple_assign_set_rhs3 (stmt, seq->new_sel);
    4800           74 :   update_stmt (stmt);
    4801              : 
    4802           74 :   if (dump_file && (dump_flags & TDF_DETAILS))
    4803              :     {
    4804           22 :       fprintf (dump_file, "New stmt: ");
    4805           22 :       print_gimple_stmt (dump_file, stmt, 0);
    4806              :     }
    4807           74 : }
    4808              : 
    4809              : /* Test if we can blend two simplifiable vec permute sequences.
    4810              :    NEED_SWAP will be set, if sequences must be swapped for blending.  */
    4811              : 
    4812              : static bool
    4813           47 : can_blend_vec_perm_simplify_seqs_p (vec_perm_simplify_seq seq1,
    4814              :                                     vec_perm_simplify_seq seq2,
    4815              :                                     bool *need_swap)
    4816              : {
    4817           47 :   unsigned int nelts = seq1->nelts;
    4818           47 :   basic_block bb = gimple_bb (seq1->stmt);
    4819              : 
    4820           47 :   gcc_assert (gimple_bb (seq2->stmt) == bb);
    4821              : 
    4822              :   /* BBs and number of elements must be equal.  */
    4823           47 :   if (gimple_bb (seq2->stmt) != bb || seq2->nelts != nelts)
    4824              :     return false;
    4825              : 
    4826              :   /* We need vectors of the same type.  */
    4827           47 :   if (TREE_TYPE (gimple_assign_lhs (seq1->stmt))
    4828           47 :       != TREE_TYPE (gimple_assign_lhs (seq2->stmt)))
    4829              :     return false;
    4830              : 
    4831              :   /* We require isomorphic operators.  */
    4832           41 :   if (((gimple_assign_rhs_code (seq1->v_x_stmt)
    4833           41 :         != gimple_assign_rhs_code (seq2->v_x_stmt))
    4834           41 :        || (gimple_assign_rhs_code (seq1->v_y_stmt)
    4835           41 :            != gimple_assign_rhs_code (seq2->v_y_stmt))))
    4836              :     return false;
    4837              : 
    4838              :   /* We cannot have any dependencies between the sequences.
    4839              : 
    4840              :      For merging, we will reuse seq1->v_1_stmt and seq1->v_2_stmt.
    4841              :      seq1's v_in is defined before these statements, but we need
    4842              :      to check if seq2's v_in is defined before them as well.
    4843              : 
    4844              :      Further, we will reuse seq2->stmt.  We need to ensure that
    4845              :      seq1->v_x_stmt and seq1->v_y_stmt are before it.
    4846              : 
    4847              :      Note, that we don't need to check the BBs here, because all
    4848              :      statements of both sequences have to be in the same BB.  */
    4849              : 
    4850           41 :   tree seq2_v_in = gimple_assign_rhs1 (seq2->v_1_stmt);
    4851           41 :   if (TREE_CODE (seq2_v_in) != SSA_NAME)
    4852              :     return false;
    4853              : 
    4854           41 :   gassign *seq2_v_in_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (seq2_v_in));
    4855           41 :   if (!seq2_v_in_stmt || gimple_bb (seq2_v_in_stmt) != bb
    4856           41 :       || (gimple_uid (seq2_v_in_stmt) > gimple_uid (seq1->v_1_stmt))
    4857           37 :       || (gimple_uid (seq1->v_x_stmt) > gimple_uid (seq2->stmt))
    4858           37 :       || (gimple_uid (seq1->v_y_stmt) > gimple_uid (seq2->stmt)))
    4859              :     {
    4860            4 :       tree seq1_v_in = gimple_assign_rhs1 (seq1->v_1_stmt);
    4861            4 :       if (TREE_CODE (seq1_v_in) != SSA_NAME)
    4862              :         return false;
    4863              : 
    4864            4 :       gassign *seq1_v_in_stmt
    4865            4 :         = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (seq1_v_in));
    4866              :       /* Let's try to see if we succeed when swapping the sequences.  */
    4867            4 :       if (!seq1_v_in_stmt || gimple_bb (seq1_v_in_stmt)
    4868            0 :           || (gimple_uid (seq1_v_in_stmt) > gimple_uid (seq2->v_1_stmt))
    4869            0 :           || (gimple_uid (seq2->v_x_stmt) > gimple_uid (seq1->stmt))
    4870            0 :           || (gimple_uid (seq2->v_y_stmt) > gimple_uid (seq1->stmt)))
    4871              :         return false;
    4872            0 :       *need_swap = true;
    4873              :     }
    4874              :   else
    4875           37 :     *need_swap = false;
    4876              : 
    4877           37 :   if (dump_file && (dump_flags & TDF_DETAILS))
    4878           11 :     fprintf (dump_file, "Found vec perm simplify sequence pair.\n");
    4879              : 
    4880              :   return true;
    4881              : }
    4882              : 
    4883              : /* Calculate the permutations for blending the two given vec permute
    4884              :    sequences.  This may fail if the resulting permutation is not
    4885              :    supported.  */
    4886              : 
    4887              : static bool
    4888           37 : calc_perm_vec_perm_simplify_seqs (vec_perm_simplify_seq seq1,
    4889              :                                   vec_perm_simplify_seq seq2,
    4890              :                                   vec_perm_indices *seq2_stmt_indices,
    4891              :                                   vec_perm_indices *seq1_v_1_stmt_indices,
    4892              :                                   vec_perm_indices *seq1_v_2_stmt_indices)
    4893              : {
    4894           37 :   unsigned int i;
    4895           37 :   unsigned int nelts = seq1->nelts;
    4896           37 :   auto_vec<unsigned int> lane_assignment;
    4897           37 :   lane_assignment.create (nelts);
    4898              : 
    4899              :   /* Mark all lanes as free.  */
    4900           37 :   lane_assignment.quick_grow_cleared (nelts);
    4901              : 
    4902              :   /* Allocate lanes for seq1.  */
    4903          185 :   for (i = 0; i < nelts; i++)
    4904              :     {
    4905          148 :       unsigned int l = TREE_INT_CST_LOW (VECTOR_CST_ELT (seq1->new_sel, i));
    4906          148 :       l %= nelts;
    4907          148 :       lane_assignment[l] = 1;
    4908              :     }
    4909              : 
    4910              :   /* Allocate lanes for seq2 and calculate selector for seq2->stmt.  */
    4911           37 :   vec_perm_builder seq2_stmt_sel_perm (nelts, nelts, 1);
    4912          185 :   for (i = 0; i < nelts; i++)
    4913              :     {
    4914          148 :       unsigned int sel = TREE_INT_CST_LOW (VECTOR_CST_ELT (seq2->new_sel, i));
    4915          148 :       unsigned int lane = sel % nelts;
    4916          148 :       unsigned int offs = sel / nelts;
    4917          148 :       unsigned int new_sel;
    4918              : 
    4919              :       /* Check if we already allocated the lane for seq2.  */
    4920          148 :       unsigned int j = 0;
    4921          263 :       for (; j < i; j++)
    4922              :         {
    4923          189 :           unsigned int sel_old;
    4924          189 :           sel_old = TREE_INT_CST_LOW (VECTOR_CST_ELT (seq2->new_sel, j));
    4925          189 :           unsigned int lane_old = sel_old % nelts;
    4926          189 :           if (lane == lane_old)
    4927              :             {
    4928           74 :               new_sel = seq2_stmt_sel_perm[j].to_constant ();
    4929           74 :               new_sel = (new_sel % nelts) + offs * nelts;
    4930           74 :               break;
    4931              :             }
    4932              :         }
    4933              : 
    4934              :       /* If the lane is not allocated, we need to do that now.  */
    4935          148 :       if (j == i)
    4936              :         {
    4937              :           unsigned int l_orig = lane;
    4938          182 :           while (lane_assignment[lane] != 0)
    4939              :             {
    4940          108 :               lane = (lane + 1) % nelts;
    4941              : 
    4942              :               /* This should not happen if both sequences utilize no more than
    4943              :                  half of the lanes.  Test anyway to guarantee termination.  */
    4944          108 :               if (lane == l_orig)
    4945            0 :                 return false;
    4946              :             }
    4947              : 
    4948              :           /* Allocate lane.  */
    4949           74 :           lane_assignment[lane] = 2 + l_orig;
    4950           74 :           new_sel = lane + offs * nelts;
    4951              :         }
    4952              : 
    4953          148 :       seq2_stmt_sel_perm.quick_push (new_sel);
    4954              :     }
    4955              : 
    4956              :   /* Check if the resulting permutation is cheap.  */
    4957           37 :   seq2_stmt_indices->new_vector (seq2_stmt_sel_perm, 2, nelts);
    4958           37 :   tree vectype = TREE_TYPE (gimple_assign_lhs (seq2->stmt));
    4959           37 :   machine_mode vmode = TYPE_MODE (vectype);
    4960           37 :   if (!can_vec_perm_const_p (vmode, vmode, *seq2_stmt_indices, false))
    4961              :     return false;
    4962              : 
    4963              :   /* Calculate selectors for seq1->v_1_stmt and seq1->v_2_stmt.  */
    4964           37 :   vec_perm_builder seq1_v_1_stmt_sel_perm (nelts, nelts, 1);
    4965           37 :   vec_perm_builder seq1_v_2_stmt_sel_perm (nelts, nelts, 1);
    4966          185 :   for (i = 0; i < nelts; i++)
    4967              :     {
    4968          148 :       bool use_seq1 = lane_assignment[i] < 2;
    4969          148 :       unsigned int l1, l2;
    4970              : 
    4971          148 :       if (use_seq1)
    4972              :         {
    4973              :           /* Just reuse the selector indices.  */
    4974           74 :           tree s1 = gimple_assign_rhs3 (seq1->v_1_stmt);
    4975           74 :           tree s2 = gimple_assign_rhs3 (seq1->v_2_stmt);
    4976           74 :           l1 = TREE_INT_CST_LOW (VECTOR_CST_ELT (s1, i));
    4977           74 :           l2 = TREE_INT_CST_LOW (VECTOR_CST_ELT (s2, i));
    4978              :         }
    4979              :       else
    4980              :         {
    4981              :           /* We moved the lanes for seq2, so we need to adjust for that.  */
    4982           74 :           tree s1 = gimple_assign_rhs3 (seq2->v_1_stmt);
    4983           74 :           tree s2 = gimple_assign_rhs3 (seq2->v_2_stmt);
    4984           74 :           l1 = TREE_INT_CST_LOW (VECTOR_CST_ELT (s1, lane_assignment[i] - 2));
    4985           74 :           l2 = TREE_INT_CST_LOW (VECTOR_CST_ELT (s2, lane_assignment[i] - 2));
    4986              :         }
    4987              : 
    4988          148 :       l1 %= nelts;
    4989          148 :       l2 %= nelts;
    4990          222 :       seq1_v_1_stmt_sel_perm.quick_push (l1 + (use_seq1 ? 0 : nelts));
    4991          148 :       seq1_v_2_stmt_sel_perm.quick_push (l2 + (use_seq1 ? 0 : nelts));
    4992              :     }
    4993              : 
    4994           37 :   seq1_v_1_stmt_indices->new_vector (seq1_v_1_stmt_sel_perm, 2, nelts);
    4995           37 :   vectype = TREE_TYPE (gimple_assign_lhs (seq1->v_1_stmt));
    4996           37 :   vmode = TYPE_MODE (vectype);
    4997           37 :   if (!can_vec_perm_const_p (vmode, vmode, *seq1_v_1_stmt_indices, false))
    4998              :     return false;
    4999              : 
    5000           37 :   seq1_v_2_stmt_indices->new_vector (seq1_v_2_stmt_sel_perm, 2, nelts);
    5001           37 :   vectype = TREE_TYPE (gimple_assign_lhs (seq1->v_2_stmt));
    5002           37 :   vmode = TYPE_MODE (vectype);
    5003           37 :   if (!can_vec_perm_const_p (vmode, vmode, *seq1_v_2_stmt_indices, false))
    5004              :     return false;
    5005              : 
    5006              :   return true;
    5007           74 : }
    5008              : 
    5009              : /* Blend the two given simplifiable vec permute sequences using the
    5010              :    given permutations.  */
    5011              : 
    5012              : static void
    5013           37 : blend_vec_perm_simplify_seqs (vec_perm_simplify_seq seq1,
    5014              :                               vec_perm_simplify_seq seq2,
    5015              :                               const vec_perm_indices &seq2_stmt_indices,
    5016              :                               const vec_perm_indices &seq1_v_1_stmt_indices,
    5017              :                               const vec_perm_indices &seq1_v_2_stmt_indices)
    5018              : {
    5019              :   /* We don't need to adjust seq1->stmt because its lanes consumption
    5020              :      was already narrowed before entering this function.  */
    5021              : 
    5022              :   /* Adjust seq2->stmt: copy RHS1/RHS2 from seq1->stmt and set new sel.  */
    5023           37 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5024              :     {
    5025           11 :       fprintf (dump_file, "Updating VEC_PERM statement:\n");
    5026           11 :       fprintf (dump_file, "Old stmt: ");
    5027           11 :       print_gimple_stmt (dump_file, seq2->stmt, 0);
    5028              :     }
    5029              : 
    5030           37 :   gimple_assign_set_rhs1 (seq2->stmt, gimple_assign_rhs1 (seq1->stmt));
    5031           74 :   gimple_assign_set_rhs2 (seq2->stmt, gimple_assign_rhs2 (seq1->stmt));
    5032           37 :   tree vectype = TREE_TYPE (gimple_assign_lhs (seq2->stmt));
    5033           37 :   tree sel = vect_gen_perm_mask_checked (vectype, seq2_stmt_indices);
    5034           37 :   gimple_assign_set_rhs3 (seq2->stmt, sel);
    5035           37 :   update_stmt (seq2->stmt);
    5036              : 
    5037           37 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5038              :     {
    5039           11 :       fprintf (dump_file, "New stmt: ");
    5040           11 :       print_gimple_stmt (dump_file, seq2->stmt, 0);
    5041              :     }
    5042              : 
    5043              :   /* Adjust seq1->v_1_stmt: copy RHS2 from seq2->v_1_stmt and set new sel.  */
    5044           37 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5045              :     {
    5046           11 :       fprintf (dump_file, "Updating VEC_PERM statement:\n");
    5047           11 :       fprintf (dump_file, "Old stmt: ");
    5048           11 :       print_gimple_stmt (dump_file, seq1->v_1_stmt, 0);
    5049              :     }
    5050              : 
    5051           37 :   gimple_assign_set_rhs2 (seq1->v_1_stmt, gimple_assign_rhs1 (seq2->v_1_stmt));
    5052           37 :   vectype = TREE_TYPE (gimple_assign_lhs (seq1->v_1_stmt));
    5053           37 :   sel = vect_gen_perm_mask_checked (vectype, seq1_v_1_stmt_indices);
    5054           37 :   gimple_assign_set_rhs3 (seq1->v_1_stmt, sel);
    5055           37 :   update_stmt (seq1->v_1_stmt);
    5056              : 
    5057           37 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5058              :     {
    5059           11 :       fprintf (dump_file, "New stmt: ");
    5060           11 :       print_gimple_stmt (dump_file, seq1->v_1_stmt, 0);
    5061              :     }
    5062              : 
    5063              :   /* Adjust seq1->v_2_stmt: copy RHS2 from seq2->v_2_stmt and set new sel.  */
    5064           37 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5065              :     {
    5066           11 :       fprintf (dump_file, "Updating VEC_PERM statement:\n");
    5067           11 :       fprintf (dump_file, "Old stmt: ");
    5068           11 :       print_gimple_stmt (dump_file, seq1->v_2_stmt, 0);
    5069              :     }
    5070              : 
    5071           37 :   gimple_assign_set_rhs2 (seq1->v_2_stmt, gimple_assign_rhs1 (seq2->v_2_stmt));
    5072           37 :   vectype = TREE_TYPE (gimple_assign_lhs (seq1->v_2_stmt));
    5073           37 :   sel = vect_gen_perm_mask_checked (vectype, seq1_v_2_stmt_indices);
    5074           37 :   gimple_assign_set_rhs3 (seq1->v_2_stmt, sel);
    5075           37 :   update_stmt (seq1->v_2_stmt);
    5076              : 
    5077           37 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5078              :     {
    5079           11 :       fprintf (dump_file, "New stmt: ");
    5080           11 :       print_gimple_stmt (dump_file, seq1->v_2_stmt, 0);
    5081              :     }
    5082              : 
    5083              :   /* At this point, we have four unmodified seq2 stmts, which will be
    5084              :      eliminated by DCE.  */
    5085              : 
    5086           37 :   if (dump_file)
    5087           11 :     fprintf (dump_file, "Vec perm simplify sequences have been blended.\n\n");
    5088           37 : }
    5089              : 
    5090              : /* Try to blend narrowed vec_perm_simplify_seqs pairwise.
    5091              :    The provided list will be empty after this call.  */
    5092              : 
    5093              : static void
    5094    313706756 : process_vec_perm_simplify_seq_list (vec<vec_perm_simplify_seq> *l)
    5095              : {
    5096    313706756 :   unsigned int i, j;
    5097    313706756 :   vec_perm_simplify_seq seq1, seq2;
    5098              : 
    5099    313706756 :   if (l->is_empty ())
    5100    313706711 :     return;
    5101              : 
    5102           45 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5103           13 :     fprintf (dump_file, "\nProcessing %u vec perm simplify sequences.\n",
    5104              :              l->length ());
    5105              : 
    5106          109 :   FOR_EACH_VEC_ELT (*l, i, seq1)
    5107              :     {
    5108           64 :       if (i + 1 < l->length ())
    5109              :         {
    5110           51 :           FOR_EACH_VEC_ELT_FROM (*l, j, seq2, i + 1)
    5111              :             {
    5112           47 :               bool swap = false;
    5113           47 :               if (can_blend_vec_perm_simplify_seqs_p (seq1, seq2, &swap))
    5114              :                 {
    5115           37 :                   vec_perm_indices seq2_stmt_indices;
    5116           37 :                   vec_perm_indices seq1_v_1_stmt_indices;
    5117           37 :                   vec_perm_indices seq1_v_2_stmt_indices;
    5118          111 :                   if (calc_perm_vec_perm_simplify_seqs (swap ? seq2 : seq1,
    5119              :                                                         swap ? seq1 : seq2,
    5120              :                                                         &seq2_stmt_indices,
    5121              :                                                         &seq1_v_1_stmt_indices,
    5122              :                                                         &seq1_v_2_stmt_indices))
    5123              :                     {
    5124              :                       /* Narrow lane usage.  */
    5125           37 :                       narrow_vec_perm_simplify_seq (seq1);
    5126           37 :                       narrow_vec_perm_simplify_seq (seq2);
    5127              : 
    5128              :                       /* Blend sequences.  */
    5129           37 :                       blend_vec_perm_simplify_seqs (swap ? seq2 : seq1,
    5130              :                                                     swap ? seq1 : seq2,
    5131              :                                                     seq2_stmt_indices,
    5132              :                                                     seq1_v_1_stmt_indices,
    5133              :                                                     seq1_v_2_stmt_indices);
    5134              : 
    5135              :                       /* We can use unordered_remove as we break the loop.  */
    5136           37 :                       l->unordered_remove (j);
    5137           37 :                       XDELETE (seq2);
    5138           37 :                       break;
    5139              :                     }
    5140           37 :                 }
    5141              :             }
    5142              :         }
    5143              : 
    5144              :       /* We don't need to call l->remove for seq1.  */
    5145           64 :       XDELETE (seq1);
    5146              :     }
    5147              : 
    5148           45 :   l->truncate (0);
    5149              : }
    5150              : 
    5151              : static void
    5152          101 : append_vec_perm_simplify_seq_list (vec<vec_perm_simplify_seq> *l,
    5153              :                                    const vec_perm_simplify_seq &seq)
    5154              : {
    5155              :   /* If no space on list left, then process the list.  */
    5156          101 :   if (!l->space (1))
    5157            0 :       process_vec_perm_simplify_seq_list (l);
    5158              : 
    5159          101 :   l->quick_push (seq);
    5160          101 : }
    5161              : 
    5162              : /* Main entry point for the forward propagation and statement combine
    5163              :    optimizer.  */
    5164              : 
    5165              : namespace {
    5166              : 
    5167              : const pass_data pass_data_forwprop =
    5168              : {
    5169              :   GIMPLE_PASS, /* type */
    5170              :   "forwprop", /* name */
    5171              :   OPTGROUP_NONE, /* optinfo_flags */
    5172              :   TV_TREE_FORWPROP, /* tv_id */
    5173              :   ( PROP_cfg | PROP_ssa ), /* properties_required */
    5174              :   0, /* properties_provided */
    5175              :   0, /* properties_destroyed */
    5176              :   0, /* todo_flags_start */
    5177              :   0, /* todo_flags_finish */
    5178              : };
    5179              : 
    5180              : class pass_forwprop : public gimple_opt_pass
    5181              : {
    5182              : public:
    5183      1443875 :   pass_forwprop (gcc::context *ctxt)
    5184      2887750 :     : gimple_opt_pass (pass_data_forwprop, ctxt), last_p (false)
    5185              :   {}
    5186              : 
    5187              :   /* opt_pass methods: */
    5188      1155100 :   opt_pass * clone () final override { return new pass_forwprop (m_ctxt); }
    5189      1732650 :   void set_pass_param (unsigned int n, bool param) final override
    5190              :     {
    5191      1732650 :       switch (n)
    5192              :         {
    5193      1155100 :           case 0:
    5194      1155100 :             m_full_walk = param;
    5195      1155100 :             break;
    5196       577550 :           case 1:
    5197       577550 :             last_p = param;
    5198       577550 :             break;
    5199            0 :           default:
    5200            0 :           gcc_unreachable();
    5201              :         }
    5202      1732650 :     }
    5203      5568383 :   bool gate (function *) final override { return flag_tree_forwprop; }
    5204              :   unsigned int execute (function *) final override;
    5205              : 
    5206              :  private:
    5207              :   /* Determines whether the pass instance should set PROP_last_full_fold.  */
    5208              :   bool last_p;
    5209              : 
    5210              :   /* True if the aggregate props are doing a full walk or not.  */
    5211              :   bool m_full_walk = false;
    5212              : }; // class pass_forwprop
    5213              : 
    5214              : /* Attemp to make the BB block of __builtin_unreachable unreachable by changing
    5215              :    the incoming jumps.  Return true if at least one jump was changed.  */
    5216              : 
    5217              : static bool
    5218         2252 : optimize_unreachable (basic_block bb)
    5219              : {
    5220         2252 :   gimple_stmt_iterator gsi;
    5221         2252 :   gimple *stmt;
    5222         2252 :   edge_iterator ei;
    5223         2252 :   edge e;
    5224         2252 :   bool ret;
    5225              : 
    5226         2252 :   ret = false;
    5227         5186 :   FOR_EACH_EDGE (e, ei, bb->preds)
    5228              :     {
    5229         2934 :       gsi = gsi_last_bb (e->src);
    5230         2934 :       if (gsi_end_p (gsi))
    5231          330 :         continue;
    5232              : 
    5233         2604 :       stmt = gsi_stmt (gsi);
    5234         2604 :       if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
    5235              :         {
    5236              :           /* If the condition is already true/false
    5237              :              ignore it. This can happen during copy prop of forwprop. */
    5238          659 :           if (gimple_cond_true_p (cond_stmt)
    5239          651 :               || gimple_cond_false_p (cond_stmt))
    5240            8 :             continue;
    5241          643 :           else if (e->flags & EDGE_TRUE_VALUE)
    5242          561 :             gimple_cond_make_false (cond_stmt);
    5243           82 :           else if (e->flags & EDGE_FALSE_VALUE)
    5244           82 :             gimple_cond_make_true (cond_stmt);
    5245              :           else
    5246            0 :             gcc_unreachable ();
    5247          643 :           update_stmt (cond_stmt);
    5248              :         }
    5249              :       else
    5250              :         {
    5251              :           /* Todo: handle other cases.  Note that unreachable switch case
    5252              :              statements have already been removed.  */
    5253         1953 :           continue;
    5254              :         }
    5255              : 
    5256          643 :       ret = true;
    5257              :     }
    5258              : 
    5259         2252 :   return ret;
    5260              : }
    5261              : 
    5262              : unsigned int
    5263      5565823 : pass_forwprop::execute (function *fun)
    5264              : {
    5265      5565823 :   unsigned int todoflags = 0;
    5266              :   /* Handle a full walk only when expensive optimizations are on.  */
    5267      5565823 :   bool full_walk = m_full_walk && flag_expensive_optimizations;
    5268              : 
    5269      5565823 :   cfg_changed = false;
    5270      5565823 :   if (last_p)
    5271      1045506 :     fun->curr_properties |= PROP_last_full_fold;
    5272              : 
    5273      5565823 :   calculate_dominance_info (CDI_DOMINATORS);
    5274              : 
    5275              :   /* Combine stmts with the stmts defining their operands.  Do that
    5276              :      in an order that guarantees visiting SSA defs before SSA uses.  */
    5277     11131646 :   lattice.create (num_ssa_names);
    5278     11131646 :   lattice.quick_grow_cleared (num_ssa_names);
    5279      5565823 :   int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
    5280      5565823 :   int postorder_num = pre_and_rev_post_order_compute_fn (fun, NULL,
    5281              :                                                          postorder, false);
    5282      5565823 :   int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fun));
    5283     50082409 :   for (int i = 0; i < postorder_num; ++i)
    5284              :     {
    5285     44516586 :       bb_to_rpo[postorder[i]] = i;
    5286     44516586 :       edge_iterator ei;
    5287     44516586 :       edge e;
    5288    107161504 :       FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (fun, postorder[i])->succs)
    5289     62644918 :         e->flags &= ~EDGE_EXECUTABLE;
    5290              :     }
    5291      5565823 :   single_succ_edge (BASIC_BLOCK_FOR_FN (fun, ENTRY_BLOCK))->flags
    5292      5565823 :     |= EDGE_EXECUTABLE;
    5293      5565823 :   auto_vec<gimple *, 4> to_fixup;
    5294      5565823 :   auto_vec<gimple *, 32> to_remove;
    5295      5565823 :   auto_vec<unsigned, 32> to_remove_defs;
    5296      5565823 :   auto_vec<std::pair<int, int>, 10> edges_to_remove;
    5297      5565823 :   auto_bitmap simple_dce_worklist;
    5298      5565823 :   auto_bitmap need_ab_cleanup;
    5299      5565823 :   to_purge = BITMAP_ALLOC (NULL);
    5300      5565823 :   auto_vec<vec_perm_simplify_seq, 8> vec_perm_simplify_seq_list;
    5301     50082409 :   for (int i = 0; i < postorder_num; ++i)
    5302              :     {
    5303     44516586 :       gimple_stmt_iterator gsi;
    5304     44516586 :       basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
    5305     44516586 :       edge_iterator ei;
    5306     44516586 :       edge e;
    5307              : 
    5308              :       /* Skip processing not executable blocks.  We could improve
    5309              :          single_use tracking by at least unlinking uses from unreachable
    5310              :          blocks but since blocks with uses are not processed in a
    5311              :          meaningful order this is probably not worth it.  */
    5312     44516586 :       bool any = false;
    5313     45647880 :       FOR_EACH_EDGE (e, ei, bb->preds)
    5314              :         {
    5315     45633589 :           if ((e->flags & EDGE_EXECUTABLE)
    5316              :               /* We can handle backedges in natural loops correctly but
    5317              :                  for irreducible regions we have to take all backedges
    5318              :                  conservatively when we did not visit the source yet.  */
    5319     45633589 :               || (bb_to_rpo[e->src->index] > i
    5320       660218 :                   && !dominated_by_p (CDI_DOMINATORS, e->src, e->dest)))
    5321              :             {
    5322              :               any = true;
    5323              :               break;
    5324              :             }
    5325              :         }
    5326     44516586 :       if (!any)
    5327        14902 :         continue;
    5328              : 
    5329              :       /* Remove conditions that go directly to unreachable when this is the last forwprop.  */
    5330     44502295 :       if (last_p
    5331      9733891 :           && !(flag_sanitize & SANITIZE_UNREACHABLE))
    5332              :         {
    5333      9728921 :           gimple_stmt_iterator gsi;
    5334      9728921 :           gsi = gsi_start_nondebug_after_labels_bb (bb);
    5335      9729532 :           if (!gsi_end_p (gsi)
    5336      8910397 :               && gimple_call_builtin_p (*gsi, BUILT_IN_UNREACHABLE)
    5337      9731173 :               && optimize_unreachable (bb))
    5338              :             {
    5339          611 :               cfg_changed = true;
    5340          611 :               continue;
    5341              :             }
    5342              :         }
    5343              : 
    5344              :       /* Record degenerate PHIs in the lattice.  */
    5345     60246924 :       for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
    5346     15745240 :            gsi_next (&si))
    5347              :         {
    5348     15745240 :           gphi *phi = si.phi ();
    5349     15745240 :           tree res = gimple_phi_result (phi);
    5350     31490480 :           if (virtual_operand_p (res))
    5351      7221707 :             continue;
    5352              : 
    5353      8523533 :           tree first = NULL_TREE;
    5354      8523533 :           bool all_same = true;
    5355      8523533 :           edge_iterator ei;
    5356      8523533 :           edge e;
    5357     17526326 :           FOR_EACH_EDGE (e, ei, bb->preds)
    5358              :             {
    5359              :               /* Ignore not executable forward edges.  */
    5360     17306708 :               if (!(e->flags & EDGE_EXECUTABLE))
    5361              :                 {
    5362      4005699 :                   if (bb_to_rpo[e->src->index] < i)
    5363         5535 :                     continue;
    5364              :                   /* Avoid equivalences from backedges - while we might
    5365              :                      be able to make irreducible regions reducible and
    5366              :                      thus turning a back into a forward edge we do not
    5367              :                      want to deal with the intermediate SSA issues that
    5368              :                      exposes.  */
    5369              :                   all_same = false;
    5370              :                 }
    5371     17301173 :               tree use = PHI_ARG_DEF_FROM_EDGE (phi, e);
    5372     17301173 :               if (use == res)
    5373              :                 /* The PHI result can also appear on a backedge, if so
    5374              :                    we can ignore this case for the purpose of determining
    5375              :                    the singular value.  */
    5376              :                 ;
    5377     17288603 :               else if (! first)
    5378              :                 first = use;
    5379      8765070 :               else if (! operand_equal_p (first, use, 0))
    5380              :                 {
    5381              :                   all_same = false;
    5382              :                   break;
    5383              :                 }
    5384              :             }
    5385      8523533 :           if (all_same)
    5386              :             {
    5387       214883 :               if (may_propagate_copy (res, first))
    5388       214394 :                 to_remove_defs.safe_push (SSA_NAME_VERSION (res));
    5389       214883 :               fwprop_set_lattice_val (res, first);
    5390              :             }
    5391              :         }
    5392              : 
    5393              :       /* Apply forward propagation to all stmts in the basic-block.
    5394              :          Note we update GSI within the loop as necessary.  */
    5395     44501684 :       unsigned int uid = 1;
    5396    423827771 :       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
    5397              :         {
    5398    334824403 :           gimple *stmt = gsi_stmt (gsi);
    5399    334824403 :           tree lhs, rhs;
    5400    334824403 :           enum tree_code code;
    5401              : 
    5402    334824403 :           gimple_set_uid (stmt, uid++);
    5403              : 
    5404    334824403 :           if (!is_gimple_assign (stmt))
    5405              :             {
    5406    231462237 :               process_vec_perm_simplify_seq_list (&vec_perm_simplify_seq_list);
    5407    231462237 :               gsi_next (&gsi);
    5408    231462237 :               continue;
    5409              :             }
    5410              : 
    5411    103362166 :           lhs = gimple_assign_lhs (stmt);
    5412    103362166 :           rhs = gimple_assign_rhs1 (stmt);
    5413    103362166 :           code = gimple_assign_rhs_code (stmt);
    5414              : 
    5415    141105001 :           if (TREE_CODE (lhs) != SSA_NAME
    5416    103362166 :               || has_zero_uses (lhs))
    5417              :             {
    5418     37742835 :               process_vec_perm_simplify_seq_list (&vec_perm_simplify_seq_list);
    5419     37742835 :               gsi_next (&gsi);
    5420     37742835 :               continue;
    5421              :             }
    5422              : 
    5423              :           /* If this statement sets an SSA_NAME to an address,
    5424              :              try to propagate the address into the uses of the SSA_NAME.  */
    5425     65619331 :           if ((code == ADDR_EXPR
    5426              :                /* Handle pointer conversions on invariant addresses
    5427              :                   as well, as this is valid gimple.  */
    5428     63365609 :                || (CONVERT_EXPR_CODE_P (code)
    5429      8729866 :                    && TREE_CODE (rhs) == ADDR_EXPR
    5430       350023 :                    && POINTER_TYPE_P (TREE_TYPE (lhs))))
    5431     65619555 :               && TREE_CODE (TREE_OPERAND (rhs, 0)) != TARGET_MEM_REF)
    5432              :             {
    5433      2253292 :               tree base = get_base_address (TREE_OPERAND (rhs, 0));
    5434      2253292 :               if ((!base
    5435      2253292 :                    || !DECL_P (base)
    5436       131990 :                    || decl_address_invariant_p (base))
    5437      2253292 :                   && !stmt_references_abnormal_ssa_name (stmt)
    5438      4506568 :                   && forward_propagate_addr_expr (lhs, rhs, true))
    5439              :                 {
    5440       456859 :                   fwprop_invalidate_lattice (gimple_get_lhs (stmt));
    5441       456859 :                   release_defs (stmt);
    5442       456859 :                   gsi_remove (&gsi, true);
    5443              :                 }
    5444              :               else
    5445      1796433 :                 gsi_next (&gsi);
    5446              :             }
    5447     63366039 :           else if (code == POINTER_PLUS_EXPR)
    5448              :             {
    5449      3567452 :               tree off = gimple_assign_rhs2 (stmt);
    5450      3567452 :               if (TREE_CODE (off) == INTEGER_CST
    5451      1095351 :                   && can_propagate_from (stmt)
    5452      1094998 :                   && !simple_iv_increment_p (stmt)
    5453              :                   /* ???  Better adjust the interface to that function
    5454              :                      instead of building new trees here.  */
    5455      4375599 :                   && forward_propagate_addr_expr
    5456      2424441 :                        (lhs,
    5457              :                         build1_loc (gimple_location (stmt),
    5458       808147 :                                     ADDR_EXPR, TREE_TYPE (rhs),
    5459       808147 :                                     fold_build2 (MEM_REF,
    5460              :                                                  TREE_TYPE (TREE_TYPE (rhs)),
    5461              :                                                  rhs,
    5462              :                                                  fold_convert (ptr_type_node,
    5463              :                                                                off))), true))
    5464              :                 {
    5465       311813 :                   fwprop_invalidate_lattice (gimple_get_lhs (stmt));
    5466       311813 :                   release_defs (stmt);
    5467       311813 :                   gsi_remove (&gsi, true);
    5468              :                 }
    5469      3255639 :               else if (is_gimple_min_invariant (rhs))
    5470              :                 {
    5471              :                   /* Make sure to fold &a[0] + off_1 here.  */
    5472       404242 :                   fold_stmt_inplace (&gsi);
    5473       404242 :                   update_stmt (stmt);
    5474       404242 :                   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
    5475       404224 :                     gsi_next (&gsi);
    5476              :                 }
    5477              :               else
    5478      2851397 :                 gsi_next (&gsi);
    5479              :             }
    5480     59798587 :           else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
    5481       211865 :                    && gimple_assign_load_p (stmt)
    5482       134479 :                    && !gimple_has_volatile_ops (stmt)
    5483        40404 :                    && TREE_CODE (rhs) != TARGET_MEM_REF
    5484        40375 :                    && TREE_CODE (rhs) != BIT_FIELD_REF
    5485     59838958 :                    && !stmt_can_throw_internal (fun, stmt))
    5486              :             {
    5487              :               /* Rewrite loads used only in real/imagpart extractions to
    5488              :                  component-wise loads.  */
    5489        40246 :               use_operand_p use_p;
    5490        40246 :               imm_use_iterator iter;
    5491        40246 :               tree vuse = gimple_vuse (stmt);
    5492        40246 :               bool rewrite = true;
    5493        85249 :               FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
    5494              :                 {
    5495        42918 :                   gimple *use_stmt = USE_STMT (use_p);
    5496        42918 :                   if (is_gimple_debug (use_stmt))
    5497          691 :                     continue;
    5498        42227 :                   if (!is_gimple_assign (use_stmt)
    5499        27602 :                       || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
    5500        25554 :                           && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR)
    5501        46293 :                       || TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) != lhs)
    5502              :                     {
    5503              :                       rewrite = false;
    5504              :                       break;
    5505              :                     }
    5506        40246 :                 }
    5507        40246 :               if (rewrite)
    5508              :                 {
    5509         2085 :                   gimple *use_stmt;
    5510         8661 :                   FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
    5511              :                     {
    5512         4491 :                       if (is_gimple_debug (use_stmt))
    5513              :                         {
    5514          454 :                           if (gimple_debug_bind_p (use_stmt))
    5515              :                             {
    5516          454 :                               gimple_debug_bind_reset_value (use_stmt);
    5517          454 :                               update_stmt (use_stmt);
    5518              :                             }
    5519          454 :                           continue;
    5520              :                         }
    5521              : 
    5522         8074 :                       tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
    5523         4037 :                                              TREE_TYPE (TREE_TYPE (rhs)),
    5524              :                                              unshare_expr (rhs));
    5525         4037 :                       gimple *new_stmt
    5526         4037 :                         = gimple_build_assign (gimple_assign_lhs (use_stmt),
    5527              :                                                new_rhs);
    5528              : 
    5529         4037 :                       location_t loc = gimple_location (use_stmt);
    5530         4037 :                       gimple_set_location (new_stmt, loc);
    5531         4037 :                       gimple_set_vuse (new_stmt, vuse);
    5532         4037 :                       gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
    5533         4037 :                       unlink_stmt_vdef (use_stmt);
    5534         4037 :                       gsi_remove (&gsi2, true);
    5535              : 
    5536         4037 :                       gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
    5537         2085 :                     }
    5538              : 
    5539         2085 :                   release_defs (stmt);
    5540         2085 :                   gsi_remove (&gsi, true);
    5541              :                 }
    5542              :               else
    5543        38161 :                 gsi_next (&gsi);
    5544              :             }
    5545     59758341 :           else if (TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE
    5546      1692818 :                    && (TYPE_MODE (TREE_TYPE (lhs)) == BLKmode
    5547              :                        /* After vector lowering rewrite all loads, but
    5548              :                           initially do not since this conflicts with
    5549              :                           vector CONSTRUCTOR to shuffle optimization.  */
    5550      1670836 :                        || (fun->curr_properties & PROP_gimple_lvec))
    5551       902232 :                    && gimple_assign_load_p (stmt)
    5552       302689 :                    && !gimple_has_volatile_ops (stmt)
    5553       288820 :                    && !stmt_can_throw_internal (fun, stmt)
    5554     60047161 :                    && (!VAR_P (rhs) || !DECL_HARD_REGISTER (rhs)))
    5555       288318 :             optimize_vector_load (&gsi);
    5556              : 
    5557     59470023 :           else if (code == COMPLEX_EXPR)
    5558              :             {
    5559              :               /* Rewrite stores of a single-use complex build expression
    5560              :                  to component-wise stores.  */
    5561        36580 :               use_operand_p use_p;
    5562        36580 :               gimple *use_stmt, *def1, *def2;
    5563        36580 :               tree rhs2;
    5564        36580 :               if (single_imm_use (lhs, &use_p, &use_stmt)
    5565        34420 :                   && gimple_store_p (use_stmt)
    5566        41038 :                   && !gimple_has_volatile_ops (use_stmt)
    5567         2603 :                   && is_gimple_assign (use_stmt)
    5568         2599 :                   && (TREE_CODE (TREE_TYPE (gimple_assign_lhs (use_stmt)))
    5569              :                       == COMPLEX_TYPE)
    5570        39174 :                   && (TREE_CODE (gimple_assign_lhs (use_stmt))
    5571              :                       != TARGET_MEM_REF))
    5572              :                 {
    5573         2590 :                   tree use_lhs = gimple_assign_lhs (use_stmt);
    5574         2590 :                   if (auto_var_p (use_lhs))
    5575          601 :                     DECL_NOT_GIMPLE_REG_P (use_lhs) = 1;
    5576         5180 :                   tree new_lhs = build1 (REALPART_EXPR,
    5577         2590 :                                          TREE_TYPE (TREE_TYPE (use_lhs)),
    5578              :                                          unshare_expr (use_lhs));
    5579         2590 :                   gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
    5580         2590 :                   location_t loc = gimple_location (use_stmt);
    5581         2590 :                   gimple_set_location (new_stmt, loc);
    5582         5180 :                   gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
    5583         2590 :                   gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (fun)));
    5584         5180 :                   SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
    5585         5180 :                   gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
    5586         2590 :                   gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
    5587         2590 :                   gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
    5588              : 
    5589         5180 :                   new_lhs = build1 (IMAGPART_EXPR,
    5590         2590 :                                     TREE_TYPE (TREE_TYPE (use_lhs)),
    5591              :                                     unshare_expr (use_lhs));
    5592         2590 :                   gimple_assign_set_lhs (use_stmt, new_lhs);
    5593         2590 :                   gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
    5594         2590 :                   update_stmt (use_stmt);
    5595              : 
    5596         2590 :                   release_defs (stmt);
    5597         2590 :                   gsi_remove (&gsi, true);
    5598              :                 }
    5599              :               /* Rewrite a component-wise load of a complex to a complex
    5600              :                  load if the components are not used separately.  */
    5601        33990 :               else if (TREE_CODE (rhs) == SSA_NAME
    5602        33549 :                        && has_single_use (rhs)
    5603        30061 :                        && ((rhs2 = gimple_assign_rhs2 (stmt)), true)
    5604        30061 :                        && TREE_CODE (rhs2) == SSA_NAME
    5605        28316 :                        && has_single_use (rhs2)
    5606        27895 :                        && (def1 = SSA_NAME_DEF_STMT (rhs),
    5607        27895 :                            gimple_assign_load_p (def1))
    5608         1097 :                        && (def2 = SSA_NAME_DEF_STMT (rhs2),
    5609         1097 :                            gimple_assign_load_p (def2))
    5610         1606 :                        && (gimple_vuse (def1) == gimple_vuse (def2))
    5611          800 :                        && !gimple_has_volatile_ops (def1)
    5612          800 :                        && !gimple_has_volatile_ops (def2)
    5613          800 :                        && !stmt_can_throw_internal (fun, def1)
    5614          800 :                        && !stmt_can_throw_internal (fun, def2)
    5615          800 :                        && gimple_assign_rhs_code (def1) == REALPART_EXPR
    5616          542 :                        && gimple_assign_rhs_code (def2) == IMAGPART_EXPR
    5617        34532 :                        && operand_equal_p (TREE_OPERAND (gimple_assign_rhs1
    5618              :                                                                  (def1), 0),
    5619          542 :                                            TREE_OPERAND (gimple_assign_rhs1
    5620              :                                                                  (def2), 0)))
    5621              :                 {
    5622          542 :                   tree cl = TREE_OPERAND (gimple_assign_rhs1 (def1), 0);
    5623          542 :                   gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (cl));
    5624          542 :                   gcc_assert (gsi_stmt (gsi) == stmt);
    5625         1084 :                   gimple_set_vuse (stmt, gimple_vuse (def1));
    5626          542 :                   gimple_set_modified (stmt, true);
    5627          542 :                   gimple_stmt_iterator gsi2 = gsi_for_stmt (def1);
    5628          542 :                   gsi_remove (&gsi, false);
    5629          542 :                   gsi_insert_after (&gsi2, stmt, GSI_SAME_STMT);
    5630              :                 }
    5631              :               else
    5632        33448 :                 gsi_next (&gsi);
    5633              :             }
    5634     59433443 :           else if (code == CONSTRUCTOR
    5635       154873 :                    && VECTOR_TYPE_P (TREE_TYPE (rhs))
    5636       154873 :                    && TYPE_MODE (TREE_TYPE (rhs)) == BLKmode
    5637         3443 :                    && CONSTRUCTOR_NELTS (rhs) > 0
    5638     59436886 :                    && (!VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
    5639         1050 :                        || (TYPE_MODE (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
    5640              :                            != BLKmode)))
    5641              :             {
    5642              :               /* Rewrite stores of a single-use vector constructors
    5643              :                  to component-wise stores if the mode isn't supported.  */
    5644         3348 :               use_operand_p use_p;
    5645         3348 :               gimple *use_stmt;
    5646         3348 :               if (single_imm_use (lhs, &use_p, &use_stmt)
    5647         2911 :                   && gimple_store_p (use_stmt)
    5648         2956 :                   && !gimple_has_volatile_ops (use_stmt)
    5649         1472 :                   && !stmt_can_throw_internal (fun, use_stmt)
    5650         4810 :                   && is_gimple_assign (use_stmt))
    5651              :                 {
    5652         1462 :                   tree elt_t = TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value);
    5653         1462 :                   unsigned HOST_WIDE_INT elt_w
    5654         1462 :                     = tree_to_uhwi (TYPE_SIZE (elt_t));
    5655         1462 :                   unsigned HOST_WIDE_INT n
    5656         1462 :                     = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs)));
    5657         1462 :                   tree use_lhs = gimple_assign_lhs (use_stmt);
    5658         1462 :                   if (auto_var_p (use_lhs))
    5659          554 :                     DECL_NOT_GIMPLE_REG_P (use_lhs) = 1;
    5660          908 :                   else if (TREE_CODE (use_lhs) == TARGET_MEM_REF)
    5661              :                     {
    5662            1 :                       gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
    5663            1 :                       use_lhs = prepare_target_mem_ref_lvalue (use_lhs, &gsi2);
    5664              :                     }
    5665        32844 :                   for (unsigned HOST_WIDE_INT bi = 0; bi < n; bi += elt_w)
    5666              :                     {
    5667        31382 :                       unsigned HOST_WIDE_INT ci = bi / elt_w;
    5668        31382 :                       tree new_rhs;
    5669        31382 :                       if (ci < CONSTRUCTOR_NELTS (rhs))
    5670        30764 :                         new_rhs = CONSTRUCTOR_ELT (rhs, ci)->value;
    5671              :                       else
    5672          618 :                         new_rhs = build_zero_cst (elt_t);
    5673        31382 :                       tree new_lhs = build3 (BIT_FIELD_REF,
    5674              :                                              elt_t,
    5675              :                                              unshare_expr (use_lhs),
    5676        31382 :                                              bitsize_int (elt_w),
    5677        31382 :                                              bitsize_int (bi));
    5678        31382 :                       gimple *new_stmt = gimple_build_assign (new_lhs, new_rhs);
    5679        31382 :                       location_t loc = gimple_location (use_stmt);
    5680        31382 :                       gimple_set_location (new_stmt, loc);
    5681        62764 :                       gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
    5682        31382 :                       gimple_set_vdef (new_stmt,
    5683              :                                        make_ssa_name (gimple_vop (fun)));
    5684        62764 :                       SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
    5685        62764 :                       gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
    5686        31382 :                       gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
    5687        31382 :                       gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
    5688              :                     }
    5689         1462 :                   gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
    5690         1462 :                   unlink_stmt_vdef (use_stmt);
    5691         1462 :                   release_defs (use_stmt);
    5692         1462 :                   gsi_remove (&gsi2, true);
    5693         1462 :                   release_defs (stmt);
    5694         1462 :                   gsi_remove (&gsi, true);
    5695              :                 }
    5696              :               else
    5697         1886 :                 gsi_next (&gsi);
    5698              :             }
    5699     59430095 :           else if (code == VEC_PERM_EXPR)
    5700              :             {
    5701              :               /* Find vectorized sequences, where we can reduce the lane
    5702              :                  utilization.  The narrowing will be donw later and only
    5703              :                  if we find a pair of sequences that can be blended.  */
    5704       182827 :               gassign *assign = dyn_cast <gassign *> (stmt);
    5705       182827 :               vec_perm_simplify_seq seq;
    5706       182827 :               if (recognise_vec_perm_simplify_seq (assign, &seq))
    5707          101 :                 append_vec_perm_simplify_seq_list (&vec_perm_simplify_seq_list,
    5708              :                                                    seq);
    5709              : 
    5710       182827 :               gsi_next (&gsi);
    5711              :           }
    5712              :           else
    5713     59247268 :             gsi_next (&gsi);
    5714              :         }
    5715              : 
    5716     44501684 :       process_vec_perm_simplify_seq_list (&vec_perm_simplify_seq_list);
    5717              : 
    5718              :       /* Combine stmts with the stmts defining their operands.
    5719              :          Note we update GSI within the loop as necessary.  */
    5720    423481671 :       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
    5721              :         {
    5722    334478303 :           gimple *stmt = gsi_stmt (gsi);
    5723              : 
    5724              :           /* Mark stmt as potentially needing revisiting.  */
    5725    334478303 :           gimple_set_plf (stmt, GF_PLF_1, false);
    5726              : 
    5727    334478303 :           bool can_make_abnormal_goto = (is_gimple_call (stmt)
    5728    334478303 :                                          && stmt_can_make_abnormal_goto (stmt));
    5729              : 
    5730              :           /* Substitute from our lattice.  We need to do so only once.  */
    5731    334478303 :           bool substituted_p = false;
    5732    334478303 :           use_operand_p usep;
    5733    334478303 :           ssa_op_iter iter;
    5734    494308823 :           FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_USE)
    5735              :             {
    5736    159830520 :               tree use = USE_FROM_PTR (usep);
    5737    159830520 :               tree val = fwprop_ssa_val (use);
    5738    159830520 :               if (val && val != use)
    5739              :                 {
    5740      1862948 :                   if (!is_gimple_debug (stmt))
    5741      1548348 :                     bitmap_set_bit (simple_dce_worklist, SSA_NAME_VERSION (use));
    5742      1862948 :                   if (may_propagate_copy (use, val))
    5743              :                     {
    5744      1859765 :                       propagate_value (usep, val);
    5745      1859765 :                       substituted_p = true;
    5746              :                     }
    5747              :                 }
    5748              :             }
    5749    334478303 :           if (substituted_p)
    5750      1806221 :             update_stmt (stmt);
    5751      1806221 :           if (substituted_p
    5752      1806221 :               && is_gimple_assign (stmt)
    5753      1084912 :               && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
    5754        20171 :             recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
    5755    334478303 :           if (substituted_p
    5756    334478303 :               && can_make_abnormal_goto
    5757    334478303 :               && !stmt_can_make_abnormal_goto (stmt))
    5758            3 :             bitmap_set_bit (need_ab_cleanup, bb->index);
    5759              : 
    5760    337271823 :           bool changed;
    5761    674543646 :           do
    5762              :             {
    5763    337271823 :               gimple *orig_stmt = stmt = gsi_stmt (gsi);
    5764    337271823 :               bool was_call = is_gimple_call (stmt);
    5765    337271823 :               bool was_noreturn = (was_call
    5766    337271823 :                                    && gimple_call_noreturn_p (stmt));
    5767    337271823 :               changed = false;
    5768              : 
    5769    337271823 :               auto_vec<tree, 8> uses;
    5770    500075836 :               FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_USE)
    5771    162804013 :                 if (uses.space (1))
    5772    162415969 :                   uses.quick_push (USE_FROM_PTR (usep));
    5773              : 
    5774    337271823 :               if (fold_stmt (&gsi, fwprop_ssa_val, simple_dce_worklist))
    5775              :                 {
    5776      2476819 :                   changed = true;
    5777              :                   /* There is no updating of the address
    5778              :                      taken after the last forwprop so update
    5779              :                      the addresses when a folding happened to a call.
    5780              :                      The va_* builtins can remove taking of the address so
    5781              :                      can the sincos->cexpi transformation.  See PR 39643 and PR 20983. */
    5782      2476819 :                   if (was_call && last_p)
    5783      2476819 :                     todoflags |= TODO_update_address_taken;
    5784      2476819 :                   stmt = gsi_stmt (gsi);
    5785              :                   /* Cleanup the CFG if we simplified a condition to
    5786              :                      true or false.  */
    5787      2476819 :                   if (gcond *cond = dyn_cast <gcond *> (stmt))
    5788       974429 :                     if (gimple_cond_true_p (cond)
    5789       974429 :                         || gimple_cond_false_p (cond))
    5790        15424 :                       cfg_changed = true;
    5791              :                   /* Queue old uses for simple DCE if not debug statement.  */
    5792      2476819 :                   if (!is_gimple_debug (stmt))
    5793     10477461 :                     for (tree use : uses)
    5794      3067533 :                       if (TREE_CODE (use) == SSA_NAME
    5795      3067533 :                           && !SSA_NAME_IS_DEFAULT_DEF (use))
    5796      2872118 :                         bitmap_set_bit (simple_dce_worklist,
    5797      2872118 :                                         SSA_NAME_VERSION (use));
    5798      2476819 :                   update_stmt (stmt);
    5799              :                 }
    5800              : 
    5801    337271823 :               switch (gimple_code (stmt))
    5802              :                 {
    5803    104362271 :                 case GIMPLE_ASSIGN:
    5804    104362271 :                   {
    5805    104362271 :                     tree rhs1 = gimple_assign_rhs1 (stmt);
    5806    104362271 :                     enum tree_code code = gimple_assign_rhs_code (stmt);
    5807    104362271 :                     if (gimple_clobber_p (stmt))
    5808      6825015 :                       do_simple_agr_dse (as_a<gassign*>(stmt), full_walk);
    5809     97537256 :                     else if (gimple_store_p (stmt))
    5810              :                       {
    5811     30295630 :                         optimize_aggr_zeroprop (stmt, full_walk);
    5812     30295630 :                         if (gimple_assign_load_p (stmt))
    5813      3698751 :                           optimize_agr_copyprop (stmt);
    5814              :                       }
    5815     67241626 :                     else if (TREE_CODE_CLASS (code) == tcc_comparison)
    5816      2543129 :                       changed |= forward_propagate_into_comparison (&gsi);
    5817     64698497 :                     else if ((code == PLUS_EXPR
    5818     64698497 :                               || code == BIT_IOR_EXPR
    5819     54717998 :                               || code == BIT_XOR_EXPR)
    5820     64828850 :                              && simplify_rotate (&gsi))
    5821              :                       changed = true;
    5822     64695823 :                     else if (code == VEC_PERM_EXPR)
    5823       185078 :                       changed |= simplify_permutation (&gsi);
    5824     64510745 :                     else if (code == CONSTRUCTOR
    5825     64510745 :                              && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
    5826       152785 :                       changed |= simplify_vector_constructor (&gsi);
    5827     64357960 :                     else if (code == ARRAY_REF)
    5828      1954349 :                       changed |= simplify_count_zeroes (&gsi);
    5829              :                     break;
    5830              :                   }
    5831              : 
    5832       103300 :                 case GIMPLE_SWITCH:
    5833       103300 :                   changed |= simplify_gimple_switch (as_a <gswitch *> (stmt),
    5834              :                                                      edges_to_remove,
    5835              :                                                      simple_dce_worklist);
    5836       103300 :                   break;
    5837              : 
    5838     19254666 :                 case GIMPLE_COND:
    5839     19254666 :                   {
    5840     19254666 :                     int did_something = forward_propagate_into_gimple_cond
    5841     19254666 :                                                         (as_a <gcond *> (stmt));
    5842     19254666 :                     if (did_something == 2)
    5843         1691 :                       cfg_changed = true;
    5844     19254666 :                     changed |= did_something != 0;
    5845     19254666 :                     break;
    5846              :                   }
    5847              : 
    5848     23219995 :                 case GIMPLE_CALL:
    5849     23219995 :                   {
    5850     23219995 :                     tree callee = gimple_call_fndecl (stmt);
    5851     23219995 :                     if (callee != NULL_TREE
    5852     23219995 :                         && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
    5853      6169916 :                       changed |= simplify_builtin_call (&gsi, callee, full_walk);
    5854              :                     break;
    5855              :                   }
    5856              : 
    5857    337269149 :                 default:;
    5858              :                 }
    5859              : 
    5860    337269149 :               if (changed || substituted_p)
    5861              :                 {
    5862      4068397 :                   substituted_p = false;
    5863      4068397 :                   stmt = gsi_stmt (gsi);
    5864      4068397 :                   if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
    5865           70 :                     bitmap_set_bit (to_purge, bb->index);
    5866      4068397 :                   if (!was_noreturn
    5867      4068397 :                       && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
    5868           12 :                     to_fixup.safe_push (stmt);
    5869              :                 }
    5870      4068397 :               if (changed)
    5871              :                 {
    5872              :                   /* If the stmt changed then re-visit it and the statements
    5873              :                      inserted before it.  */
    5874      8743566 :                   for (; !gsi_end_p (gsi); gsi_prev (&gsi))
    5875      5544500 :                     if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
    5876              :                       break;
    5877      2793520 :                   if (gsi_end_p (gsi))
    5878       448086 :                     gsi = gsi_start_bb (bb);
    5879              :                   else
    5880      2569477 :                     gsi_next (&gsi);
    5881              :                 }
    5882    337271823 :             }
    5883              :           while (changed);
    5884              : 
    5885              :           /* Stmt no longer needs to be revisited.  */
    5886    334478303 :           stmt = gsi_stmt (gsi);
    5887    334478303 :           gcc_checking_assert (!gimple_plf (stmt, GF_PLF_1));
    5888    334478303 :           gimple_set_plf (stmt, GF_PLF_1, true);
    5889              : 
    5890              :           /* Fill up the lattice.  */
    5891    334478303 :           if (gimple_assign_single_p (stmt))
    5892              :             {
    5893     68968853 :               tree lhs = gimple_assign_lhs (stmt);
    5894     68968853 :               tree rhs = gimple_assign_rhs1 (stmt);
    5895     68968853 :               if (TREE_CODE (lhs) == SSA_NAME)
    5896              :                 {
    5897     31861089 :                   tree val = lhs;
    5898     31861089 :                   if (TREE_CODE (rhs) == SSA_NAME)
    5899       787144 :                     val = fwprop_ssa_val (rhs);
    5900     31073945 :                   else if (is_gimple_min_invariant (rhs))
    5901       425136 :                     val = rhs;
    5902              :                   /* If we can propagate the lattice-value mark the
    5903              :                      stmt for removal.  */
    5904     31861089 :                   if (val != lhs
    5905     31861089 :                       && may_propagate_copy (lhs, val))
    5906      1208912 :                     to_remove_defs.safe_push (SSA_NAME_VERSION (lhs));
    5907     31861089 :                   fwprop_set_lattice_val (lhs, val);
    5908              :                 }
    5909              :             }
    5910    265509450 :           else if (gimple_nop_p (stmt))
    5911        88610 :             to_remove.safe_push (stmt);
    5912              :         }
    5913              : 
    5914              :       /* Substitute in destination PHI arguments.  */
    5915    107135834 :       FOR_EACH_EDGE (e, ei, bb->succs)
    5916     62634150 :         for (gphi_iterator gsi = gsi_start_phis (e->dest);
    5917    104263447 :              !gsi_end_p (gsi); gsi_next (&gsi))
    5918              :           {
    5919     41629297 :             gphi *phi = gsi.phi ();
    5920     41629297 :             use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
    5921     41629297 :             tree arg = USE_FROM_PTR (use_p);
    5922     68620715 :             if (TREE_CODE (arg) != SSA_NAME
    5923     41629297 :                 || virtual_operand_p (arg))
    5924     26991418 :               continue;
    5925     14637879 :             tree val = fwprop_ssa_val (arg);
    5926     14637879 :             if (val != arg
    5927     14637879 :                 && may_propagate_copy (arg, val, !(e->flags & EDGE_ABNORMAL)))
    5928       236922 :               propagate_value (use_p, val);
    5929              :           }
    5930              : 
    5931              :       /* Mark outgoing exectuable edges.  */
    5932     44501684 :       if (edge e = find_taken_edge (bb, NULL))
    5933              :         {
    5934     18879563 :           e->flags |= EDGE_EXECUTABLE;
    5935     44523009 :           if (EDGE_COUNT (bb->succs) > 1)
    5936        21325 :             cfg_changed = true;
    5937              :         }
    5938              :       else
    5939              :         {
    5940     69355382 :           FOR_EACH_EDGE (e, ei, bb->succs)
    5941     43733261 :             e->flags |= EDGE_EXECUTABLE;
    5942              :         }
    5943              :     }
    5944      5565823 :   free (postorder);
    5945      5565823 :   free (bb_to_rpo);
    5946      5565823 :   lattice.release ();
    5947              : 
    5948              :   /* First remove chains of stmts where we check no uses remain.  */
    5949      5565823 :   simple_dce_from_worklist (simple_dce_worklist, to_purge);
    5950              : 
    5951      5905289 :   auto remove = [](gimple *stmt)
    5952              :     {
    5953       339466 :       if (dump_file && (dump_flags & TDF_DETAILS))
    5954              :         {
    5955            1 :           fprintf (dump_file, "Removing dead stmt ");
    5956            1 :           print_gimple_stmt (dump_file, stmt, 0);
    5957            1 :           fprintf (dump_file, "\n");
    5958              :         }
    5959       339466 :       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
    5960       339466 :       if (gimple_code (stmt) == GIMPLE_PHI)
    5961        84335 :         remove_phi_node (&gsi, true);
    5962              :       else
    5963              :         {
    5964       255131 :           unlink_stmt_vdef (stmt);
    5965       255131 :           gsi_remove (&gsi, true);
    5966       255131 :           release_defs (stmt);
    5967              :         }
    5968       339466 :     };
    5969              : 
    5970              :   /* Then remove stmts we know we can remove even though we did not
    5971              :      substitute in dead code regions, so uses can remain.  Do so in reverse
    5972              :      order to make debug stmt creation possible.  */
    5973     12554952 :   while (!to_remove_defs.is_empty())
    5974              :     {
    5975      1423306 :       tree def = ssa_name (to_remove_defs.pop ());
    5976              :       /* For example remove_prop_source_from_use can remove stmts queued
    5977              :          for removal.  Deal with this gracefully.  */
    5978      1423306 :       if (!def)
    5979      1172450 :         continue;
    5980       250856 :       gimple *stmt = SSA_NAME_DEF_STMT (def);
    5981       250856 :       remove (stmt);
    5982              :     }
    5983              : 
    5984              :   /* Wipe other queued stmts that do not have SSA defs.  */
    5985      5654433 :   while (!to_remove.is_empty())
    5986              :     {
    5987        88610 :       gimple *stmt = to_remove.pop ();
    5988        88610 :       remove (stmt);
    5989              :     }
    5990              : 
    5991              :   /* Fixup stmts that became noreturn calls.  This may require splitting
    5992              :      blocks and thus isn't possible during the walk.  Do this
    5993              :      in reverse order so we don't inadvertedly remove a stmt we want to
    5994              :      fixup by visiting a dominating now noreturn call first.  */
    5995      5565835 :   while (!to_fixup.is_empty ())
    5996              :     {
    5997           12 :       gimple *stmt = to_fixup.pop ();
    5998           12 :       if (dump_file && dump_flags & TDF_DETAILS)
    5999              :         {
    6000            0 :           fprintf (dump_file, "Fixing up noreturn call ");
    6001            0 :           print_gimple_stmt (dump_file, stmt, 0);
    6002            0 :           fprintf (dump_file, "\n");
    6003              :         }
    6004           12 :       cfg_changed |= fixup_noreturn_call (stmt);
    6005              :     }
    6006              : 
    6007      5565823 :   cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
    6008      5565823 :   cfg_changed |= gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
    6009      5565823 :   BITMAP_FREE (to_purge);
    6010              : 
    6011              :   /* Remove edges queued from switch stmt simplification.  */
    6012     16697469 :   for (auto ep : edges_to_remove)
    6013              :     {
    6014            0 :       basic_block src = BASIC_BLOCK_FOR_FN (fun, ep.first);
    6015            0 :       basic_block dest = BASIC_BLOCK_FOR_FN (fun, ep.second);
    6016            0 :       edge e;
    6017            0 :       if (src && dest && (e = find_edge (src, dest)))
    6018              :         {
    6019            0 :           free_dominance_info (CDI_DOMINATORS);
    6020            0 :           remove_edge (e);
    6021            0 :           cfg_changed = true;
    6022              :         }
    6023              :     }
    6024              : 
    6025     11130106 :   if (get_range_query (fun) != get_global_range_query ())
    6026         1540 :     disable_ranger (fun);
    6027              : 
    6028      5565823 :   if (cfg_changed)
    6029         9946 :     todoflags |= TODO_cleanup_cfg;
    6030              : 
    6031      5565823 :   return todoflags;
    6032      5565823 : }
    6033              : 
    6034              : } // anon namespace
    6035              : 
    6036              : gimple_opt_pass *
    6037       288775 : make_pass_forwprop (gcc::context *ctxt)
    6038              : {
    6039       288775 :   return new pass_forwprop (ctxt);
    6040              : }
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.