LCOV - code coverage report
Current view: top level - gcc - tree-ssa-sccvn.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 95.7 % 4628 4429
Test Date: 2026-03-28 14:25:54 Functions: 98.4 % 124 122
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* SCC value numbering for trees
       2              :    Copyright (C) 2006-2026 Free Software Foundation, Inc.
       3              :    Contributed by Daniel Berlin <dan@dberlin.org>
       4              : 
       5              : This file is part of GCC.
       6              : 
       7              : GCC is free software; you can redistribute it and/or modify
       8              : it under the terms of the GNU General Public License as published by
       9              : the Free Software Foundation; either version 3, or (at your option)
      10              : any later version.
      11              : 
      12              : GCC is distributed in the hope that it will be useful,
      13              : but WITHOUT ANY WARRANTY; without even the implied warranty of
      14              : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
      15              : GNU General Public License for more details.
      16              : 
      17              : You should have received a copy of the GNU General Public License
      18              : along with GCC; see the file COPYING3.  If not see
      19              : <http://www.gnu.org/licenses/>.  */
      20              : 
      21              : #include "config.h"
      22              : #include "system.h"
      23              : #include "coretypes.h"
      24              : #include "backend.h"
      25              : #include "rtl.h"
      26              : #include "tree.h"
      27              : #include "gimple.h"
      28              : #include "ssa.h"
      29              : #include "expmed.h"
      30              : #include "insn-config.h"
      31              : #include "memmodel.h"
      32              : #include "emit-rtl.h"
      33              : #include "cgraph.h"
      34              : #include "gimple-pretty-print.h"
      35              : #include "splay-tree-utils.h"
      36              : #include "alias.h"
      37              : #include "fold-const.h"
      38              : #include "stor-layout.h"
      39              : #include "cfganal.h"
      40              : #include "tree-inline.h"
      41              : #include "internal-fn.h"
      42              : #include "gimple-iterator.h"
      43              : #include "gimple-fold.h"
      44              : #include "tree-eh.h"
      45              : #include "gimplify.h"
      46              : #include "flags.h"
      47              : #include "dojump.h"
      48              : #include "explow.h"
      49              : #include "calls.h"
      50              : #include "varasm.h"
      51              : #include "stmt.h"
      52              : #include "expr.h"
      53              : #include "tree-dfa.h"
      54              : #include "tree-ssa.h"
      55              : #include "dumpfile.h"
      56              : #include "cfgloop.h"
      57              : #include "tree-ssa-propagate.h"
      58              : #include "tree-cfg.h"
      59              : #include "domwalk.h"
      60              : #include "gimple-match.h"
      61              : #include "stringpool.h"
      62              : #include "attribs.h"
      63              : #include "tree-pass.h"
      64              : #include "statistics.h"
      65              : #include "langhooks.h"
      66              : #include "ipa-utils.h"
      67              : #include "dbgcnt.h"
      68              : #include "tree-cfgcleanup.h"
      69              : #include "tree-ssa-loop.h"
      70              : #include "tree-scalar-evolution.h"
      71              : #include "tree-ssa-loop-niter.h"
      72              : #include "builtins.h"
      73              : #include "fold-const-call.h"
      74              : #include "ipa-modref-tree.h"
      75              : #include "ipa-modref.h"
      76              : #include "tree-ssa-sccvn.h"
      77              : #include "alloc-pool.h"
      78              : #include "symbol-summary.h"
      79              : #include "sreal.h"
      80              : #include "ipa-cp.h"
      81              : #include "ipa-prop.h"
      82              : #include "target.h"
      83              : 
      84              : /* This algorithm is based on the SCC algorithm presented by Keith
      85              :    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
      86              :    (http://citeseer.ist.psu.edu/41805.html).  In
      87              :    straight line code, it is equivalent to a regular hash based value
      88              :    numbering that is performed in reverse postorder.
      89              : 
      90              :    For code with cycles, there are two alternatives, both of which
      91              :    require keeping the hashtables separate from the actual list of
      92              :    value numbers for SSA names.
      93              : 
      94              :    1. Iterate value numbering in an RPO walk of the blocks, removing
      95              :    all the entries from the hashtable after each iteration (but
      96              :    keeping the SSA name->value number mapping between iterations).
      97              :    Iterate until it does not change.
      98              : 
      99              :    2. Perform value numbering as part of an SCC walk on the SSA graph,
     100              :    iterating only the cycles in the SSA graph until they do not change
     101              :    (using a separate, optimistic hashtable for value numbering the SCC
     102              :    operands).
     103              : 
     104              :    The second is not just faster in practice (because most SSA graph
     105              :    cycles do not involve all the variables in the graph), it also has
     106              :    some nice properties.
     107              : 
     108              :    One of these nice properties is that when we pop an SCC off the
     109              :    stack, we are guaranteed to have processed all the operands coming from
     110              :    *outside of that SCC*, so we do not need to do anything special to
     111              :    ensure they have value numbers.
     112              : 
     113              :    Another nice property is that the SCC walk is done as part of a DFS
     114              :    of the SSA graph, which makes it easy to perform combining and
     115              :    simplifying operations at the same time.
     116              : 
     117              :    The code below is deliberately written in a way that makes it easy
     118              :    to separate the SCC walk from the other work it does.
     119              : 
     120              :    In order to propagate constants through the code, we track which
     121              :    expressions contain constants, and use those while folding.  In
     122              :    theory, we could also track expressions whose value numbers are
     123              :    replaced, in case we end up folding based on expression
     124              :    identities.
     125              : 
     126              :    In order to value number memory, we assign value numbers to vuses.
     127              :    This enables us to note that, for example, stores to the same
     128              :    address of the same value from the same starting memory states are
     129              :    equivalent.
     130              :    TODO:
     131              : 
     132              :    1. We can iterate only the changing portions of the SCC's, but
     133              :    I have not seen an SCC big enough for this to be a win.
     134              :    2. If you differentiate between phi nodes for loops and phi nodes
     135              :    for if-then-else, you can properly consider phi nodes in different
     136              :    blocks for equivalence.
     137              :    3. We could value number vuses in more cases, particularly, whole
     138              :    structure copies.
     139              : */
     140              : 
     141              : /* There's no BB_EXECUTABLE but we can use BB_VISITED.  */
     142              : #define BB_EXECUTABLE BB_VISITED
     143              : 
     144              : static vn_lookup_kind default_vn_walk_kind;
     145              : 
     146              : /* vn_nary_op hashtable helpers.  */
     147              : 
     148              : struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
     149              : {
     150              :   typedef vn_nary_op_s *compare_type;
     151              :   static inline hashval_t hash (const vn_nary_op_s *);
     152              :   static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
     153              : };
     154              : 
     155              : /* Return the computed hashcode for nary operation P1.  */
     156              : 
     157              : inline hashval_t
     158    767877128 : vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
     159              : {
     160    767877128 :   return vno1->hashcode;
     161              : }
     162              : 
     163              : /* Compare nary operations P1 and P2 and return true if they are
     164              :    equivalent.  */
     165              : 
     166              : inline bool
     167    972482103 : vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
     168              : {
     169    972482103 :   return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
     170              : }
     171              : 
     172              : typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
     173              : typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
     174              : 
     175              : 
     176              : /* vn_phi hashtable helpers.  */
     177              : 
     178              : static int
     179              : vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
     180              : 
     181              : struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
     182              : {
     183              :   static inline hashval_t hash (const vn_phi_s *);
     184              :   static inline bool equal (const vn_phi_s *, const vn_phi_s *);
     185              : };
     186              : 
     187              : /* Return the computed hashcode for phi operation P1.  */
     188              : 
     189              : inline hashval_t
     190     25536133 : vn_phi_hasher::hash (const vn_phi_s *vp1)
     191              : {
     192     25536133 :   return vp1->hashcode;
     193              : }
     194              : 
     195              : /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
     196              : 
     197              : inline bool
     198     46330421 : vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
     199              : {
     200     46330421 :   return vp1 == vp2 || vn_phi_eq (vp1, vp2);
     201              : }
     202              : 
     203              : typedef hash_table<vn_phi_hasher> vn_phi_table_type;
     204              : typedef vn_phi_table_type::iterator vn_phi_iterator_type;
     205              : 
     206              : 
     207              : /* Compare two reference operands P1 and P2 for equality.  Return true if
     208              :    they are equal, and false otherwise.  */
     209              : 
     210              : static int
     211     25299483 : vn_reference_op_eq (const void *p1, const void *p2)
     212              : {
     213     25299483 :   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
     214     25299483 :   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
     215              : 
     216     25299483 :   return (vro1->opcode == vro2->opcode
     217              :           /* We do not care for differences in type qualification.  */
     218     25297471 :           && (vro1->type == vro2->type
     219      1151356 :               || (vro1->type && vro2->type
     220      1151356 :                   && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
     221      1151356 :                                          TYPE_MAIN_VARIANT (vro2->type))))
     222     24317397 :           && expressions_equal_p (vro1->op0, vro2->op0)
     223     24281223 :           && expressions_equal_p (vro1->op1, vro2->op1)
     224     24281223 :           && expressions_equal_p (vro1->op2, vro2->op2)
     225     49580706 :           && (vro1->opcode != CALL_EXPR || vro1->clique == vro2->clique));
     226              : }
     227              : 
     228              : /* Free a reference operation structure VP.  */
     229              : 
     230              : static inline void
     231            0 : free_reference (vn_reference_s *vr)
     232              : {
     233            0 :   vr->operands.release ();
     234              : }
     235              : 
     236              : 
     237              : /* vn_reference hashtable helpers.  */
     238              : 
     239              : struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
     240              : {
     241              :   static inline hashval_t hash (const vn_reference_s *);
     242              :   static inline bool equal (const vn_reference_s *, const vn_reference_s *);
     243              : };
     244              : 
     245              : /* Return the hashcode for a given reference operation P1.  */
     246              : 
     247              : inline hashval_t
     248   3673487643 : vn_reference_hasher::hash (const vn_reference_s *vr1)
     249              : {
     250   3673487643 :   return vr1->hashcode;
     251              : }
     252              : 
     253              : inline bool
     254   4379088354 : vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
     255              : {
     256   4379088354 :   return v == c || vn_reference_eq (v, c);
     257              : }
     258              : 
     259              : typedef hash_table<vn_reference_hasher> vn_reference_table_type;
     260              : typedef vn_reference_table_type::iterator vn_reference_iterator_type;
     261              : 
     262              : /* Pretty-print OPS to OUTFILE.  */
     263              : 
     264              : void
     265          287 : print_vn_reference_ops (FILE *outfile, const vec<vn_reference_op_s> ops)
     266              : {
     267          287 :   vn_reference_op_t vro;
     268          287 :   unsigned int i;
     269          287 :   fprintf (outfile, "{");
     270         1304 :   for (i = 0; ops.iterate (i, &vro); i++)
     271              :     {
     272         1017 :       bool closebrace = false;
     273         1017 :       if (vro->opcode != SSA_NAME
     274          803 :           && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
     275              :         {
     276          803 :           fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
     277          803 :           if (vro->op0 || vro->opcode == CALL_EXPR)
     278              :             {
     279          803 :               fprintf (outfile, "<");
     280          803 :               closebrace = true;
     281              :             }
     282              :         }
     283         1017 :       if (vro->opcode == MEM_REF || vro->opcode == TARGET_MEM_REF)
     284          275 :         fprintf (outfile, "(A%d)", TYPE_ALIGN (vro->type));
     285         1017 :       if (vro->op0 || vro->opcode == CALL_EXPR)
     286              :         {
     287         1017 :           if (!vro->op0)
     288            0 :             fprintf (outfile, internal_fn_name ((internal_fn)vro->clique));
     289              :           else
     290              :             {
     291         1017 :               if (vro->opcode == MEM_REF || vro->opcode == TARGET_MEM_REF)
     292              :                 {
     293          275 :                   fprintf (outfile, "(");
     294          275 :                   print_generic_expr (outfile, TREE_TYPE (vro->op0));
     295          275 :                   fprintf (outfile, ")");
     296              :                 }
     297         1017 :               print_generic_expr (outfile, vro->op0);
     298              :             }
     299         1017 :           if (vro->op1)
     300              :             {
     301          185 :               fprintf (outfile, ",");
     302          185 :               print_generic_expr (outfile, vro->op1);
     303              :             }
     304         1017 :           if (vro->op2)
     305              :             {
     306          185 :               fprintf (outfile, ",");
     307          185 :               print_generic_expr (outfile, vro->op2);
     308              :             }
     309              :         }
     310         1017 :       if (closebrace)
     311          803 :         fprintf (outfile, ">");
     312         1017 :       if (i != ops.length () - 1)
     313          730 :         fprintf (outfile, ",");
     314              :     }
     315          287 :   fprintf (outfile, "}");
     316          287 : }
     317              : 
     318              : DEBUG_FUNCTION void
     319            0 : debug_vn_reference_ops (const vec<vn_reference_op_s> ops)
     320              : {
     321            0 :   print_vn_reference_ops (stderr, ops);
     322            0 :   fputc ('\n', stderr);
     323            0 : }
     324              : 
     325              : /* The set of VN hashtables.  */
     326              : 
     327              : typedef struct vn_tables_s
     328              : {
     329              :   vn_nary_op_table_type *nary;
     330              :   vn_phi_table_type *phis;
     331              :   vn_reference_table_type *references;
     332              : } *vn_tables_t;
     333              : 
     334              : 
     335              : /* vn_constant hashtable helpers.  */
     336              : 
     337              : struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
     338              : {
     339              :   static inline hashval_t hash (const vn_constant_s *);
     340              :   static inline bool equal (const vn_constant_s *, const vn_constant_s *);
     341              : };
     342              : 
     343              : /* Hash table hash function for vn_constant_t.  */
     344              : 
     345              : inline hashval_t
     346     12089354 : vn_constant_hasher::hash (const vn_constant_s *vc1)
     347              : {
     348     12089354 :   return vc1->hashcode;
     349              : }
     350              : 
     351              : /* Hash table equality function for vn_constant_t.  */
     352              : 
     353              : inline bool
     354     14596306 : vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
     355              : {
     356     14596306 :   if (vc1->hashcode != vc2->hashcode)
     357              :     return false;
     358              : 
     359      2197267 :   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
     360              : }
     361              : 
     362              : static hash_table<vn_constant_hasher> *constant_to_value_id;
     363              : 
     364              : 
     365              : /* Obstack we allocate the vn-tables elements from.  */
     366              : static obstack vn_tables_obstack;
     367              : /* Special obstack we never unwind.  */
     368              : static obstack vn_tables_insert_obstack;
     369              : 
     370              : static vn_reference_t last_inserted_ref;
     371              : static vn_phi_t last_inserted_phi;
     372              : static vn_nary_op_t last_inserted_nary;
     373              : static vn_ssa_aux_t last_pushed_avail;
     374              : 
     375              : /* Valid hashtables storing information we have proven to be
     376              :    correct.  */
     377              : static vn_tables_t valid_info;
     378              : 
     379              : /* Global RPO state for access from hooks.  */
     380              : static class eliminate_dom_walker *rpo_avail;
     381              : basic_block vn_context_bb;
     382              : int *vn_bb_to_rpo;
     383              : 
     384              : 
     385              : /* Valueization hook for simplify_replace_tree.  Valueize NAME if it is
     386              :    an SSA name, otherwise just return it.  */
     387              : tree (*vn_valueize) (tree);
     388              : static tree
     389        84483 : vn_valueize_for_srt (tree t, void* context ATTRIBUTE_UNUSED)
     390              : {
     391        84483 :   basic_block saved_vn_context_bb = vn_context_bb;
     392              :   /* Look for sth available at the definition block of the argument.
     393              :      This avoids inconsistencies between availability there which
     394              :      decides if the stmt can be removed and availability at the
     395              :      use site.  The SSA property ensures that things available
     396              :      at the definition are also available at uses.  */
     397        84483 :   if (!SSA_NAME_IS_DEFAULT_DEF (t))
     398        80588 :     vn_context_bb = gimple_bb (SSA_NAME_DEF_STMT (t));
     399        84483 :   tree res = vn_valueize (t);
     400        84483 :   vn_context_bb = saved_vn_context_bb;
     401        84483 :   return res;
     402              : }
     403              : 
     404              : 
     405              : /* This represents the top of the VN lattice, which is the universal
     406              :    value.  */
     407              : 
     408              : tree VN_TOP;
     409              : 
     410              : /* Unique counter for our value ids.  */
     411              : 
     412              : static unsigned int next_value_id;
     413              : static int next_constant_value_id;
     414              : 
     415              : 
     416              : /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
     417              :    are allocated on an obstack for locality reasons, and to free them
     418              :    without looping over the vec.  */
     419              : 
     420              : struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
     421              : {
     422              :   typedef vn_ssa_aux_t value_type;
     423              :   typedef tree compare_type;
     424              :   static inline hashval_t hash (const value_type &);
     425              :   static inline bool equal (const value_type &, const compare_type &);
     426              :   static inline void mark_deleted (value_type &) {}
     427              :   static const bool empty_zero_p = true;
     428            0 :   static inline void mark_empty (value_type &e) { e = NULL; }
     429              :   static inline bool is_deleted (value_type &) { return false; }
     430  >13145*10^7 :   static inline bool is_empty (value_type &e) { return e == NULL; }
     431              : };
     432              : 
     433              : hashval_t
     434  43325803420 : vn_ssa_aux_hasher::hash (const value_type &entry)
     435              : {
     436  43325803420 :   return SSA_NAME_VERSION (entry->name);
     437              : }
     438              : 
     439              : bool
     440  49550871129 : vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
     441              : {
     442  49550871129 :   return name == entry->name;
     443              : }
     444              : 
     445              : static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
     446              : typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
     447              : static struct obstack vn_ssa_aux_obstack;
     448              : 
     449              : static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
     450              : static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
     451              :                                             vn_nary_op_table_type *);
     452              : static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
     453              :                                          enum tree_code, tree, tree *);
     454              : static tree vn_lookup_simplify_result (gimple_match_op *);
     455              : static vn_reference_t vn_reference_lookup_or_insert_for_pieces
     456              :           (tree, alias_set_type, alias_set_type, poly_int64, poly_int64, tree,
     457              :            vec<vn_reference_op_s, va_heap>, tree);
     458              : 
     459              : /* Return whether there is value numbering information for a given SSA name.  */
     460              : 
     461              : bool
     462      5142795 : has_VN_INFO (tree name)
     463              : {
     464      5142795 :   return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
     465              : }
     466              : 
     467              : vn_ssa_aux_t
     468   3727896220 : VN_INFO (tree name)
     469              : {
     470   3727896220 :   vn_ssa_aux_t *res
     471   3727896220 :     = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
     472              :                                             INSERT);
     473   3727896220 :   if (*res != NULL)
     474              :     return *res;
     475              : 
     476    170645975 :   vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
     477    170645975 :   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
     478    170645975 :   newinfo->name = name;
     479    170645975 :   newinfo->valnum = VN_TOP;
     480              :   /* We are using the visited flag to handle uses with defs not within the
     481              :      region being value-numbered.  */
     482    170645975 :   newinfo->visited = false;
     483              : 
     484              :   /* Given we create the VN_INFOs on-demand now we have to do initialization
     485              :      different than VN_TOP here.  */
     486    170645975 :   if (SSA_NAME_IS_DEFAULT_DEF (name))
     487      9188359 :     switch (TREE_CODE (SSA_NAME_VAR (name)))
     488              :       {
     489      1660422 :       case VAR_DECL:
     490              :         /* All undefined vars are VARYING.  */
     491      1660422 :         newinfo->valnum = name;
     492      1660422 :         newinfo->visited = true;
     493      1660422 :         break;
     494              : 
     495      7470567 :       case PARM_DECL:
     496              :         /* Parameters are VARYING but we can record a condition
     497              :            if we know it is a non-NULL pointer.  */
     498      7470567 :         newinfo->visited = true;
     499      7470567 :         newinfo->valnum = name;
     500     11475267 :         if (POINTER_TYPE_P (TREE_TYPE (name))
     501      8561812 :             && nonnull_arg_p (SSA_NAME_VAR (name)))
     502              :           {
     503      2262617 :             tree ops[2];
     504      2262617 :             ops[0] = name;
     505      2262617 :             ops[1] = build_int_cst (TREE_TYPE (name), 0);
     506      2262617 :             vn_nary_op_t nary;
     507              :             /* Allocate from non-unwinding stack.  */
     508      2262617 :             nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
     509      2262617 :             init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
     510              :                                          boolean_type_node, ops);
     511      2262617 :             nary->predicated_values = 0;
     512      2262617 :             nary->u.result = boolean_true_node;
     513      2262617 :             vn_nary_op_insert_into (nary, valid_info->nary);
     514      2262617 :             gcc_assert (nary->unwind_to == NULL);
     515              :             /* Also do not link it into the undo chain.  */
     516      2262617 :             last_inserted_nary = nary->next;
     517      2262617 :             nary->next = (vn_nary_op_t)(void *)-1;
     518      2262617 :             nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
     519      2262617 :             init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
     520              :                                          boolean_type_node, ops);
     521      2262617 :             nary->predicated_values = 0;
     522      2262617 :             nary->u.result = boolean_false_node;
     523      2262617 :             vn_nary_op_insert_into (nary, valid_info->nary);
     524      2262617 :             gcc_assert (nary->unwind_to == NULL);
     525      2262617 :             last_inserted_nary = nary->next;
     526      2262617 :             nary->next = (vn_nary_op_t)(void *)-1;
     527      2262617 :             if (dump_file && (dump_flags & TDF_DETAILS))
     528              :               {
     529           38 :                 fprintf (dump_file, "Recording ");
     530           38 :                 print_generic_expr (dump_file, name, TDF_SLIM);
     531           38 :                 fprintf (dump_file, " != 0\n");
     532              :               }
     533              :           }
     534              :         break;
     535              : 
     536        57370 :       case RESULT_DECL:
     537              :         /* If the result is passed by invisible reference the default
     538              :            def is initialized, otherwise it's uninitialized.  Still
     539              :            undefined is varying.  */
     540        57370 :         newinfo->visited = true;
     541        57370 :         newinfo->valnum = name;
     542        57370 :         break;
     543              : 
     544            0 :       default:
     545            0 :         gcc_unreachable ();
     546              :       }
     547              :   return newinfo;
     548              : }
     549              : 
     550              : /* Return the SSA value of X.  */
     551              : 
     552              : inline tree
     553   3387572835 : SSA_VAL (tree x, bool *visited = NULL)
     554              : {
     555   3387572835 :   vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
     556   3387572835 :   if (visited)
     557   1373119105 :     *visited = tem && tem->visited;
     558   3387572835 :   return tem && tem->visited ? tem->valnum : x;
     559              : }
     560              : 
     561              : /* Return the SSA value of the VUSE x, supporting released VDEFs
     562              :    during elimination which will value-number the VDEF to the
     563              :    associated VUSE (but not substitute in the whole lattice).  */
     564              : 
     565              : static inline tree
     566   1249193783 : vuse_ssa_val (tree x)
     567              : {
     568   1249193783 :   if (!x)
     569              :     return NULL_TREE;
     570              : 
     571   1245885272 :   do
     572              :     {
     573   1245885272 :       x = SSA_VAL (x);
     574   1245885272 :       gcc_assert (x != VN_TOP);
     575              :     }
     576   1245885272 :   while (SSA_NAME_IN_FREE_LIST (x));
     577              : 
     578              :   return x;
     579              : }
     580              : 
     581              : /* Similar to the above but used as callback for walk_non_aliased_vuses
     582              :    and thus should stop at unvisited VUSE to not walk across region
     583              :    boundaries.  */
     584              : 
     585              : static tree
     586   1055500868 : vuse_valueize (tree vuse)
     587              : {
     588   1055500868 :   do
     589              :     {
     590   1055500868 :       bool visited;
     591   1055500868 :       vuse = SSA_VAL (vuse, &visited);
     592   1055500868 :       if (!visited)
     593     15771642 :         return NULL_TREE;
     594   1039729226 :       gcc_assert (vuse != VN_TOP);
     595              :     }
     596   1039729226 :   while (SSA_NAME_IN_FREE_LIST (vuse));
     597              :   return vuse;
     598              : }
     599              : 
     600              : 
     601              : /* Return the vn_kind the expression computed by the stmt should be
     602              :    associated with.  */
     603              : 
     604              : enum vn_kind
     605    101659380 : vn_get_stmt_kind (gimple *stmt)
     606              : {
     607    101659380 :   switch (gimple_code (stmt))
     608              :     {
     609              :     case GIMPLE_CALL:
     610              :       return VN_REFERENCE;
     611              :     case GIMPLE_PHI:
     612              :       return VN_PHI;
     613    101659380 :     case GIMPLE_ASSIGN:
     614    101659380 :       {
     615    101659380 :         enum tree_code code = gimple_assign_rhs_code (stmt);
     616    101659380 :         tree rhs1 = gimple_assign_rhs1 (stmt);
     617    101659380 :         switch (get_gimple_rhs_class (code))
     618              :           {
     619              :           case GIMPLE_UNARY_RHS:
     620              :           case GIMPLE_BINARY_RHS:
     621              :           case GIMPLE_TERNARY_RHS:
     622              :             return VN_NARY;
     623     47458585 :           case GIMPLE_SINGLE_RHS:
     624     47458585 :             switch (TREE_CODE_CLASS (code))
     625              :               {
     626     35705305 :               case tcc_reference:
     627              :                 /* VOP-less references can go through unary case.  */
     628     35705305 :                 if ((code == REALPART_EXPR
     629              :                      || code == IMAGPART_EXPR
     630     35705305 :                      || code == VIEW_CONVERT_EXPR
     631     35705305 :                      || code == BIT_FIELD_REF)
     632     35705305 :                     && (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME
     633       678724 :                         || is_gimple_min_invariant (TREE_OPERAND (rhs1, 0))))
     634      2021277 :                   return VN_NARY;
     635              : 
     636              :                 /* Fallthrough.  */
     637              :               case tcc_declaration:
     638              :                 return VN_REFERENCE;
     639              : 
     640              :               case tcc_constant:
     641              :                 return VN_CONSTANT;
     642              : 
     643      5860458 :               default:
     644      5860458 :                 if (code == ADDR_EXPR)
     645      3169751 :                   return (is_gimple_min_invariant (rhs1)
     646      3169751 :                           ? VN_CONSTANT : VN_REFERENCE);
     647      2690707 :                 else if (code == CONSTRUCTOR)
     648              :                   return VN_NARY;
     649              :                 return VN_NONE;
     650              :               }
     651              :           default:
     652              :             return VN_NONE;
     653              :           }
     654              :       }
     655              :     default:
     656              :       return VN_NONE;
     657              :     }
     658              : }
     659              : 
     660              : /* Lookup a value id for CONSTANT and return it.  If it does not
     661              :    exist returns 0.  */
     662              : 
     663              : unsigned int
     664            0 : get_constant_value_id (tree constant)
     665              : {
     666            0 :   vn_constant_s **slot;
     667            0 :   struct vn_constant_s vc;
     668              : 
     669            0 :   vc.hashcode = vn_hash_constant_with_type (constant);
     670            0 :   vc.constant = constant;
     671            0 :   slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
     672            0 :   if (slot)
     673            0 :     return (*slot)->value_id;
     674              :   return 0;
     675              : }
     676              : 
     677              : /* Lookup a value id for CONSTANT, and if it does not exist, create a
     678              :    new one and return it.  If it does exist, return it.  */
     679              : 
     680              : unsigned int
     681     28446401 : get_or_alloc_constant_value_id (tree constant)
     682              : {
     683     28446401 :   vn_constant_s **slot;
     684     28446401 :   struct vn_constant_s vc;
     685     28446401 :   vn_constant_t vcp;
     686              : 
     687              :   /* If the hashtable isn't initialized we're not running from PRE and thus
     688              :      do not need value-ids.  */
     689     28446401 :   if (!constant_to_value_id)
     690              :     return 0;
     691              : 
     692      4694304 :   vc.hashcode = vn_hash_constant_with_type (constant);
     693      4694304 :   vc.constant = constant;
     694      4694304 :   slot = constant_to_value_id->find_slot (&vc, INSERT);
     695      4694304 :   if (*slot)
     696      2179739 :     return (*slot)->value_id;
     697              : 
     698      2514565 :   vcp = XNEW (struct vn_constant_s);
     699      2514565 :   vcp->hashcode = vc.hashcode;
     700      2514565 :   vcp->constant = constant;
     701      2514565 :   vcp->value_id = get_next_constant_value_id ();
     702      2514565 :   *slot = vcp;
     703      2514565 :   return vcp->value_id;
     704              : }
     705              : 
     706              : /* Compute the hash for a reference operand VRO1.  */
     707              : 
     708              : static void
     709    134817386 : vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
     710              : {
     711    134817386 :   hstate.add_int (vro1->opcode);
     712    134817386 :   if (vro1->opcode == CALL_EXPR && !vro1->op0)
     713       542212 :     hstate.add_int (vro1->clique);
     714    134817386 :   if (vro1->op0)
     715    128522914 :     inchash::add_expr (vro1->op0, hstate);
     716    134817386 :   if (vro1->op1)
     717     12105656 :     inchash::add_expr (vro1->op1, hstate);
     718    134817386 :   if (vro1->op2)
     719     13802353 :     inchash::add_expr (vro1->op2, hstate);
     720    134817386 : }
     721              : 
     722              : /* Compute a hash for the reference operation VR1 and return it.  */
     723              : 
     724              : hashval_t
     725    200302815 : vn_reference_compute_hash (const vn_reference_t vr1)
     726              : {
     727    200302815 :   inchash::hash hstate;
     728    200302815 :   hashval_t result;
     729    200302815 :   int i;
     730    200302815 :   vn_reference_op_t vro;
     731    200302815 :   poly_offset_int off = -1;
     732    200302815 :   bool deref = false;
     733              : 
     734    815001074 :   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
     735              :     {
     736    614698259 :       if (vro->opcode == MEM_REF)
     737              :         deref = true;
     738    424938230 :       else if (vro->opcode != ADDR_EXPR)
     739    297924039 :         deref = false;
     740    614698259 :       if (maybe_ne (vro->off, -1))
     741              :         {
     742    360897011 :           if (known_eq (off, -1))
     743    192138423 :             off = 0;
     744    614698259 :           off += vro->off;
     745              :         }
     746              :       else
     747              :         {
     748    253801248 :           if (maybe_ne (off, -1)
     749    253801248 :               && maybe_ne (off, 0))
     750    101638806 :             hstate.add_poly_hwi (off.force_shwi ());
     751    253801248 :           off = -1;
     752    253801248 :           if (deref
     753    119209495 :               && vro->opcode == ADDR_EXPR)
     754              :             {
     755    118983862 :               if (vro->op0)
     756              :                 {
     757    118983862 :                   tree op = TREE_OPERAND (vro->op0, 0);
     758    118983862 :                   hstate.add_int (TREE_CODE (op));
     759    118983862 :                   inchash::add_expr (op, hstate);
     760              :                 }
     761              :             }
     762              :           else
     763    134817386 :             vn_reference_op_compute_hash (vro, hstate);
     764              :         }
     765              :     }
     766              :   /* Do not hash vr1->offset or vr1->max_size, we want to get collisions
     767              :      to be able to identify compatible results.  */
     768    200302815 :   result = hstate.end ();
     769              :   /* ??? We would ICE later if we hash instead of adding that in. */
     770    200302815 :   if (vr1->vuse)
     771    195438940 :     result += SSA_NAME_VERSION (vr1->vuse);
     772              : 
     773    200302815 :   return result;
     774              : }
     775              : 
     776              : /* Return true if reference operations VR1 and VR2 are equivalent.  This
     777              :    means they have the same set of operands and vuses.  If LEXICAL
     778              :    is true then the full access path has to be the same.  */
     779              : 
     780              : bool
     781   4374566878 : vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2,
     782              :                  bool lexical)
     783              : {
     784   4374566878 :   unsigned i, j;
     785              : 
     786              :   /* Early out if this is not a hash collision.  */
     787   4374566878 :   if (vr1->hashcode != vr2->hashcode)
     788              :     return false;
     789              : 
     790              :   /* The VOP needs to be the same.  */
     791     17550191 :   if (vr1->vuse != vr2->vuse)
     792              :     return false;
     793              : 
     794              :   /* The offset/max_size used for the ao_ref during lookup has to be
     795              :      the same.  */
     796     17549742 :   if (maybe_ne (vr1->offset, vr2->offset)
     797     17549742 :       || maybe_ne (vr1->max_size, vr2->max_size))
     798              :     {
     799              :       /* But nothing known in the prevailing entry is OK to be used.  */
     800      6747492 :       if (maybe_ne (vr1->offset, 0) || known_size_p (vr1->max_size))
     801              :         return false;
     802              :     }
     803              : 
     804              :   /* If the operands are the same we are done.  */
     805     35009562 :   if (vr1->operands == vr2->operands)
     806              :     return true;
     807              : 
     808     17504781 :   if (!vr1->type || !vr2->type)
     809              :     {
     810       543034 :       if (vr1->type != vr2->type)
     811              :         return false;
     812              :     }
     813     16961747 :   else if (vr1->type == vr2->type)
     814              :     ;
     815      2132320 :   else if (COMPLETE_TYPE_P (vr1->type) != COMPLETE_TYPE_P (vr2->type)
     816      2132320 :            || (COMPLETE_TYPE_P (vr1->type)
     817      2132320 :                && !expressions_equal_p (TYPE_SIZE (vr1->type),
     818      2132320 :                                         TYPE_SIZE (vr2->type))))
     819       760778 :     return false;
     820      1371542 :   else if (vr1->operands[0].opcode == CALL_EXPR
     821      1371542 :            && !types_compatible_p (vr1->type, vr2->type))
     822              :     return false;
     823      1371542 :   else if (INTEGRAL_TYPE_P (vr1->type)
     824       564267 :            && INTEGRAL_TYPE_P (vr2->type))
     825              :     {
     826       524390 :       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
     827              :         return false;
     828              :     }
     829       847152 :   else if (INTEGRAL_TYPE_P (vr1->type)
     830       847152 :            && (TYPE_PRECISION (vr1->type)
     831        39877 :                != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
     832              :     return false;
     833       847106 :   else if (INTEGRAL_TYPE_P (vr2->type)
     834       847106 :            && (TYPE_PRECISION (vr2->type)
     835         9377 :                != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
     836              :     return false;
     837        13000 :   else if (VECTOR_BOOLEAN_TYPE_P (vr1->type)
     838       846511 :            && VECTOR_BOOLEAN_TYPE_P (vr2->type))
     839              :     {
     840              :       /* Vector boolean types can have padding, verify we are dealing with
     841              :          the same number of elements, aka the precision of the types.
     842              :          For example, In most architecture the precision_size of vbool*_t
     843              :          types are caculated like below:
     844              :          precision_size = type_size * 8
     845              : 
     846              :          Unfortunately, the RISC-V will adjust the precision_size for the
     847              :          vbool*_t in order to align the ISA as below:
     848              :          type_size      = [1, 1, 1, 1,  2,  4,  8]
     849              :          precision_size = [1, 2, 4, 8, 16, 32, 64]
     850              : 
     851              :          Then the precision_size of RISC-V vbool*_t will not be the multiple
     852              :          of the type_size.  We take care of this case consolidated here.  */
     853            0 :       if (maybe_ne (TYPE_VECTOR_SUBPARTS (vr1->type),
     854            0 :                     TYPE_VECTOR_SUBPARTS (vr2->type)))
     855              :         return false;
     856              :     }
     857       846511 :   else if (TYPE_MODE (vr1->type) != TYPE_MODE (vr2->type)
     858       846511 :            && (!mode_can_transfer_bits (TYPE_MODE (vr1->type))
     859        44602 :                || !mode_can_transfer_bits (TYPE_MODE (vr2->type))))
     860         1007 :     return false;
     861              : 
     862              :   i = 0;
     863              :   j = 0;
     864     21624058 :   do
     865              :     {
     866     21624058 :       poly_offset_int off1 = 0, off2 = 0;
     867     21624058 :       vn_reference_op_t vro1, vro2;
     868     21624058 :       vn_reference_op_s tem1, tem2;
     869     21624058 :       bool deref1 = false, deref2 = false;
     870     21624058 :       bool reverse1 = false, reverse2 = false;
     871     70095419 :       for (; vr1->operands.iterate (i, &vro1); i++)
     872              :         {
     873     48471361 :           if (vro1->opcode == MEM_REF)
     874              :             deref1 = true;
     875              :           /* Do not look through a storage order barrier.  */
     876     33176921 :           else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
     877        73730 :             return false;
     878     48471361 :           reverse1 |= vro1->reverse;
     879     48471361 :           if (lexical || known_eq (vro1->off, -1))
     880              :             break;
     881     26847303 :           off1 += vro1->off;
     882              :         }
     883     48620864 :       for (; vr2->operands.iterate (j, &vro2); j++)
     884              :         {
     885     48620864 :           if (vro2->opcode == MEM_REF)
     886              :             deref2 = true;
     887              :           /* Do not look through a storage order barrier.  */
     888     33299715 :           else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
     889              :             return false;
     890     48620864 :           reverse2 |= vro2->reverse;
     891     48620864 :           if (lexical || known_eq (vro2->off, -1))
     892              :             break;
     893     26996806 :           off2 += vro2->off;
     894              :         }
     895     21624058 :       if (maybe_ne (off1, off2) || reverse1 != reverse2)
     896              :         return false;
     897     21623920 :       if (deref1 && vro1->opcode == ADDR_EXPR)
     898              :         {
     899      8077501 :           memset (&tem1, 0, sizeof (tem1));
     900      8077501 :           tem1.op0 = TREE_OPERAND (vro1->op0, 0);
     901      8077501 :           tem1.type = TREE_TYPE (tem1.op0);
     902      8077501 :           tem1.opcode = TREE_CODE (tem1.op0);
     903      8077501 :           vro1 = &tem1;
     904      8077501 :           deref1 = false;
     905              :         }
     906     21623920 :       if (deref2 && vro2->opcode == ADDR_EXPR)
     907              :         {
     908      8077511 :           memset (&tem2, 0, sizeof (tem2));
     909      8077511 :           tem2.op0 = TREE_OPERAND (vro2->op0, 0);
     910      8077511 :           tem2.type = TREE_TYPE (tem2.op0);
     911      8077511 :           tem2.opcode = TREE_CODE (tem2.op0);
     912      8077511 :           vro2 = &tem2;
     913      8077511 :           deref2 = false;
     914              :         }
     915     21623920 :       if (deref1 != deref2)
     916              :         return false;
     917     21566159 :       if (!vn_reference_op_eq (vro1, vro2))
     918              :         return false;
     919              :       /* Both alignment and alias set are not relevant for the produced
     920              :          value but need to be included when doing lexical comparison.
     921              :          We also need to make sure that the access path ends in an
     922              :          access of the same size as otherwise we might assume an access
     923              :          may not trap while in fact it might.  */
     924     21554019 :       if (lexical
     925      2283793 :           && (vro1->opcode == MEM_REF
     926      2283793 :               || vro1->opcode == TARGET_MEM_REF)
     927     22297565 :           && (TYPE_ALIGN (vro1->type) != TYPE_ALIGN (vro2->type)
     928       743369 :               || (TYPE_SIZE (vro1->type) != TYPE_SIZE (vro2->type)
     929            6 :                   && (! TYPE_SIZE (vro1->type)
     930            6 :                       || ! TYPE_SIZE (vro2->type)
     931            6 :                       || ! operand_equal_p (TYPE_SIZE (vro1->type),
     932            6 :                                             TYPE_SIZE (vro2->type))))
     933      2230089 :               || (get_deref_alias_set (vro1->opcode == MEM_REF
     934       743363 :                                        ? TREE_TYPE (vro1->op0)
     935            0 :                                        : TREE_TYPE (vro1->op2))
     936      1486726 :                   != get_deref_alias_set (vro2->opcode == MEM_REF
     937       743363 :                                           ? TREE_TYPE (vro2->op0)
     938            0 :                                           : TREE_TYPE (vro2->op2)))))
     939         3691 :         return false;
     940     21550328 :       ++j;
     941     21550328 :       ++i;
     942              :     }
     943     43100656 :   while (vr1->operands.length () != i
     944     64650984 :          || vr2->operands.length () != j);
     945              : 
     946              :   return true;
     947              : }
     948              : 
     949              : /* Copy the operations present in load/store REF into RESULT, a vector of
     950              :    vn_reference_op_s's.  */
     951              : 
     952              : void
     953    219077652 : copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
     954              : {
     955              :   /* For non-calls, store the information that makes up the address.  */
     956    219077652 :   tree orig = ref;
     957    759906063 :   while (ref)
     958              :     {
     959    540828411 :       vn_reference_op_s temp;
     960              : 
     961    540828411 :       memset (&temp, 0, sizeof (temp));
     962    540828411 :       temp.type = TREE_TYPE (ref);
     963    540828411 :       temp.opcode = TREE_CODE (ref);
     964    540828411 :       temp.off = -1;
     965              : 
     966    540828411 :       switch (temp.opcode)
     967              :         {
     968     14846948 :         case MODIFY_EXPR:
     969     14846948 :           temp.op0 = TREE_OPERAND (ref, 1);
     970     14846948 :           break;
     971          137 :         case WITH_SIZE_EXPR:
     972          137 :           temp.op0 = TREE_OPERAND (ref, 1);
     973          137 :           temp.off = 0;
     974          137 :           break;
     975    114995315 :         case MEM_REF:
     976              :           /* The base address gets its own vn_reference_op_s structure.  */
     977    114995315 :           temp.op0 = TREE_OPERAND (ref, 1);
     978    114995315 :           if (!mem_ref_offset (ref).to_shwi (&temp.off))
     979            0 :             temp.off = -1;
     980    114995315 :           temp.clique = MR_DEPENDENCE_CLIQUE (ref);
     981    114995315 :           temp.base = MR_DEPENDENCE_BASE (ref);
     982    114995315 :           temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
     983    114995315 :           break;
     984      2542223 :         case TARGET_MEM_REF:
     985              :           /* The base address gets its own vn_reference_op_s structure.  */
     986      2542223 :           temp.op0 = TMR_INDEX (ref);
     987      2542223 :           temp.op1 = TMR_STEP (ref);
     988      2542223 :           temp.op2 = TMR_OFFSET (ref);
     989      2542223 :           temp.clique = MR_DEPENDENCE_CLIQUE (ref);
     990      2542223 :           temp.base = MR_DEPENDENCE_BASE (ref);
     991      2542223 :           result->safe_push (temp);
     992      2542223 :           memset (&temp, 0, sizeof (temp));
     993      2542223 :           temp.type = NULL_TREE;
     994      2542223 :           temp.opcode = ERROR_MARK;
     995      2542223 :           temp.op0 = TMR_INDEX2 (ref);
     996      2542223 :           temp.off = -1;
     997      2542223 :           break;
     998       804554 :         case BIT_FIELD_REF:
     999              :           /* Record bits, position and storage order.  */
    1000       804554 :           temp.op0 = TREE_OPERAND (ref, 1);
    1001       804554 :           temp.op1 = TREE_OPERAND (ref, 2);
    1002      1608398 :           if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
    1003          710 :             temp.off = -1;
    1004       804554 :           temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
    1005       804554 :           break;
    1006    143893195 :         case COMPONENT_REF:
    1007              :           /* The field decl is enough to unambiguously specify the field,
    1008              :              so use its type here.  */
    1009    143893195 :           temp.type = TREE_TYPE (TREE_OPERAND (ref, 1));
    1010    143893195 :           temp.op0 = TREE_OPERAND (ref, 1);
    1011    143893195 :           temp.op1 = TREE_OPERAND (ref, 2);
    1012    287783951 :           temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
    1013    287783686 :                           && TYPE_REVERSE_STORAGE_ORDER
    1014              :                                (TREE_TYPE (TREE_OPERAND (ref, 0))));
    1015    143893195 :           {
    1016    143893195 :             tree this_offset = component_ref_field_offset (ref);
    1017    143893195 :             if (this_offset
    1018    143893195 :                 && poly_int_tree_p (this_offset))
    1019              :               {
    1020    143891059 :                 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
    1021    143891059 :                 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
    1022              :                   {
    1023    143359071 :                     poly_offset_int off
    1024    143359071 :                       = (wi::to_poly_offset (this_offset)
    1025    143359071 :                          + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
    1026              :                     /* Prohibit value-numbering zero offset components
    1027              :                        of addresses the same before the pass folding
    1028              :                        __builtin_object_size had a chance to run.  Likewise
    1029              :                        for components of zero size at arbitrary offset.  */
    1030    143359071 :                     if (TREE_CODE (orig) != ADDR_EXPR
    1031      4761466 :                         || (TYPE_SIZE (temp.type)
    1032      4748457 :                             && integer_nonzerop (TYPE_SIZE (temp.type))
    1033      6129080 :                             && maybe_ne (off, 0))
    1034    146301984 :                         || (cfun->curr_properties & PROP_objsz))
    1035    141976278 :                       off.to_shwi (&temp.off);
    1036              :                   }
    1037              :               }
    1038              :           }
    1039              :           break;
    1040     38184684 :         case ARRAY_RANGE_REF:
    1041     38184684 :         case ARRAY_REF:
    1042     38184684 :           {
    1043     38184684 :             tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
    1044              :             /* Record index as operand.  */
    1045     38184684 :             temp.op0 = TREE_OPERAND (ref, 1);
    1046              :             /* Always record lower bounds and element size.  */
    1047     38184684 :             temp.op1 = array_ref_low_bound (ref);
    1048              :             /* But record element size in units of the type alignment.  */
    1049     38184684 :             temp.op2 = TREE_OPERAND (ref, 3);
    1050     38184684 :             temp.align = eltype->type_common.align;
    1051     38184684 :             if (! temp.op2)
    1052     37974403 :               temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
    1053              :                                      size_int (TYPE_ALIGN_UNIT (eltype)));
    1054              :             /* Prohibit value-numbering addresses of one-after-the-last
    1055              :                element ARRAY_REFs the same as addresses of other components
    1056              :                before the pass folding __builtin_object_size had a chance
    1057              :                to run.  */
    1058     38184684 :             bool avoid_oob = true;
    1059     38184684 :             if (TREE_CODE (orig) != ADDR_EXPR
    1060       469421 :                 || cfun->curr_properties & PROP_objsz)
    1061              :               avoid_oob = false;
    1062       221508 :             else if (poly_int_tree_p (temp.op0))
    1063              :               {
    1064        74339 :                 tree ub = array_ref_up_bound (ref);
    1065        74339 :                 if (ub
    1066        72711 :                     && poly_int_tree_p (ub)
    1067              :                     /* ???  The C frontend for T[0] uses [0:] and the
    1068              :                        C++ frontend [0:-1U].  See layout_type for how
    1069              :                        awkward this is.  */
    1070        64508 :                     && !integer_minus_onep (ub)
    1071       147050 :                     && known_le (wi::to_poly_offset (temp.op0),
    1072              :                                  wi::to_poly_offset (ub)))
    1073        63661 :                   avoid_oob = false;
    1074              :               }
    1075     38184684 :             if (poly_int_tree_p (temp.op0)
    1076     21918877 :                 && poly_int_tree_p (temp.op1)
    1077     21918849 :                 && TREE_CODE (temp.op2) == INTEGER_CST
    1078     60042925 :                 && !avoid_oob)
    1079              :               {
    1080     43696836 :                 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
    1081     65545254 :                                         - wi::to_poly_offset (temp.op1))
    1082     43696836 :                                        * wi::to_offset (temp.op2)
    1083     21848418 :                                        * vn_ref_op_align_unit (&temp));
    1084     21848418 :                 off.to_shwi (&temp.off);
    1085              :               }
    1086     38184684 :             temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
    1087     38184684 :                             && TYPE_REVERSE_STORAGE_ORDER
    1088              :                                  (TREE_TYPE (TREE_OPERAND (ref, 0))));
    1089              :           }
    1090     38184684 :           break;
    1091     81174975 :         case VAR_DECL:
    1092     81174975 :           if (DECL_HARD_REGISTER (ref))
    1093              :             {
    1094        20295 :               temp.op0 = ref;
    1095        20295 :               break;
    1096              :             }
    1097              :           /* Fallthru.  */
    1098     84581371 :         case PARM_DECL:
    1099     84581371 :         case CONST_DECL:
    1100     84581371 :         case RESULT_DECL:
    1101              :           /* Canonicalize decls to MEM[&decl] which is what we end up with
    1102              :              when valueizing MEM[ptr] with ptr = &decl.  */
    1103     84581371 :           temp.opcode = MEM_REF;
    1104     84581371 :           temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
    1105     84581371 :           temp.off = 0;
    1106     84581371 :           result->safe_push (temp);
    1107     84581371 :           temp.opcode = ADDR_EXPR;
    1108     84581371 :           temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
    1109     84581371 :           temp.type = TREE_TYPE (temp.op0);
    1110     84581371 :           temp.off = -1;
    1111     84581371 :           break;
    1112     94523234 :         case STRING_CST:
    1113     94523234 :         case INTEGER_CST:
    1114     94523234 :         case POLY_INT_CST:
    1115     94523234 :         case COMPLEX_CST:
    1116     94523234 :         case VECTOR_CST:
    1117     94523234 :         case REAL_CST:
    1118     94523234 :         case FIXED_CST:
    1119     94523234 :         case CONSTRUCTOR:
    1120     94523234 :         case SSA_NAME:
    1121     94523234 :           temp.op0 = ref;
    1122     94523234 :           break;
    1123     44035365 :         case ADDR_EXPR:
    1124     44035365 :           if (is_gimple_min_invariant (ref))
    1125              :             {
    1126     39952752 :               temp.op0 = ref;
    1127     39952752 :               break;
    1128              :             }
    1129              :           break;
    1130              :           /* These are only interesting for their operands, their
    1131              :              existence, and their type.  They will never be the last
    1132              :              ref in the chain of references (IE they require an
    1133              :              operand), so we don't have to put anything
    1134              :              for op* as it will be handled by the iteration  */
    1135       484124 :         case REALPART_EXPR:
    1136       484124 :           temp.off = 0;
    1137       484124 :           break;
    1138      1427619 :         case VIEW_CONVERT_EXPR:
    1139      1427619 :           temp.off = 0;
    1140      1427619 :           temp.reverse = storage_order_barrier_p (ref);
    1141      1427619 :           break;
    1142       489347 :         case IMAGPART_EXPR:
    1143              :           /* This is only interesting for its constant offset.  */
    1144       489347 :           temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
    1145       489347 :           break;
    1146            0 :         default:
    1147            0 :           gcc_unreachable ();
    1148              :         }
    1149    540828411 :       result->safe_push (temp);
    1150              : 
    1151    540828411 :       if (REFERENCE_CLASS_P (ref)
    1152    238007350 :           || TREE_CODE (ref) == MODIFY_EXPR
    1153    223160402 :           || TREE_CODE (ref) == WITH_SIZE_EXPR
    1154    763988676 :           || (TREE_CODE (ref) == ADDR_EXPR
    1155     44035365 :               && !is_gimple_min_invariant (ref)))
    1156    321750759 :         ref = TREE_OPERAND (ref, 0);
    1157              :       else
    1158              :         ref = NULL_TREE;
    1159              :     }
    1160    219077652 : }
    1161              : 
    1162              : /* Build a alias-oracle reference abstraction in *REF from the vn_reference
    1163              :    operands in *OPS, the reference alias set SET and the reference type TYPE.
    1164              :    Return true if something useful was produced.  */
    1165              : 
    1166              : bool
    1167     14250603 : ao_ref_init_from_vn_reference (ao_ref *ref,
    1168              :                                alias_set_type set, alias_set_type base_set,
    1169              :                                tree type, const vec<vn_reference_op_s> &ops)
    1170              : {
    1171     14250603 :   unsigned i;
    1172     14250603 :   tree base = NULL_TREE;
    1173     14250603 :   tree *op0_p = &base;
    1174     14250603 :   poly_offset_int offset = 0;
    1175     14250603 :   poly_offset_int max_size;
    1176     14250603 :   poly_offset_int size = -1;
    1177     14250603 :   tree size_tree = NULL_TREE;
    1178              : 
    1179              :   /* We don't handle calls.  */
    1180     14250603 :   if (!type)
    1181              :     return false;
    1182              : 
    1183     14250603 :   machine_mode mode = TYPE_MODE (type);
    1184     14250603 :   if (mode == BLKmode)
    1185        66255 :     size_tree = TYPE_SIZE (type);
    1186              :   else
    1187     28368696 :     size = GET_MODE_BITSIZE (mode);
    1188     14184348 :   if (size_tree != NULL_TREE
    1189        66255 :       && poly_int_tree_p (size_tree))
    1190        66255 :     size = wi::to_poly_offset (size_tree);
    1191              : 
    1192              :   /* Lower the final access size from the outermost expression.  */
    1193     14250603 :   const_vn_reference_op_t cst_op = &ops[0];
    1194              :   /* Cast away constness for the sake of the const-unsafe
    1195              :      FOR_EACH_VEC_ELT().  */
    1196     14250603 :   vn_reference_op_t op = const_cast<vn_reference_op_t>(cst_op);
    1197     14250603 :   size_tree = NULL_TREE;
    1198     14250603 :   if (op->opcode == COMPONENT_REF)
    1199      4925095 :     size_tree = DECL_SIZE (op->op0);
    1200      9325508 :   else if (op->opcode == BIT_FIELD_REF)
    1201        73225 :     size_tree = op->op0;
    1202      4998320 :   if (size_tree != NULL_TREE
    1203      4998320 :       && poly_int_tree_p (size_tree)
    1204      9996640 :       && (!known_size_p (size)
    1205     14250603 :           || known_lt (wi::to_poly_offset (size_tree), size)))
    1206        48876 :     size = wi::to_poly_offset (size_tree);
    1207              : 
    1208              :   /* Initially, maxsize is the same as the accessed element size.
    1209              :      In the following it will only grow (or become -1).  */
    1210     14250603 :   max_size = size;
    1211              : 
    1212              :   /* Compute cumulative bit-offset for nested component-refs and array-refs,
    1213              :      and find the ultimate containing object.  */
    1214     54800091 :   FOR_EACH_VEC_ELT (ops, i, op)
    1215              :     {
    1216     40696104 :       switch (op->opcode)
    1217              :         {
    1218              :         case CALL_EXPR:
    1219              :           return false;
    1220              : 
    1221              :         /* Record the base objects.  */
    1222     13795618 :         case MEM_REF:
    1223     13795618 :           *op0_p = build2 (MEM_REF, op->type,
    1224              :                            NULL_TREE, op->op0);
    1225     13795618 :           MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
    1226     13795618 :           MR_DEPENDENCE_BASE (*op0_p) = op->base;
    1227     13795618 :           op0_p = &TREE_OPERAND (*op0_p, 0);
    1228     13795618 :           break;
    1229              : 
    1230       307865 :         case TARGET_MEM_REF:
    1231       923595 :           *op0_p = build5 (TARGET_MEM_REF, op->type,
    1232              :                            NULL_TREE, op->op2, op->op0,
    1233       307865 :                            op->op1, ops[i+1].op0);
    1234       307865 :           MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
    1235       307865 :           MR_DEPENDENCE_BASE (*op0_p) = op->base;
    1236       307865 :           op0_p = &TREE_OPERAND (*op0_p, 0);
    1237       307865 :           ++i;
    1238       307865 :           break;
    1239              : 
    1240              :         /* Unwrap some of the wrapped decls.  */
    1241      6499882 :         case ADDR_EXPR:
    1242              :           /* Apart from ADDR_EXPR arguments to MEM_REF.  */
    1243      6499882 :           if (base != NULL_TREE
    1244      6499882 :               && TREE_CODE (base) == MEM_REF
    1245      6463916 :               && op->op0
    1246     12963798 :               && DECL_P (TREE_OPERAND (op->op0, 0)))
    1247              :             {
    1248      6457047 :               const_vn_reference_op_t pop = &ops[i-1];
    1249      6457047 :               base = TREE_OPERAND (op->op0, 0);
    1250      6457047 :               if (known_eq (pop->off, -1))
    1251              :                 {
    1252           25 :                   max_size = -1;
    1253           25 :                   offset = 0;
    1254              :                 }
    1255              :               else
    1256     19371066 :                 offset += poly_offset_int (pop->off) * BITS_PER_UNIT;
    1257              :               op0_p = NULL;
    1258              :               break;
    1259              :             }
    1260              :           /* Fallthru.  */
    1261      7646940 :         case PARM_DECL:
    1262      7646940 :         case CONST_DECL:
    1263      7646940 :         case RESULT_DECL:
    1264              :           /* ???  We shouldn't see these, but un-canonicalize what
    1265              :              copy_reference_ops_from_ref does when visiting MEM_REF.  */
    1266      7646940 :         case VAR_DECL:
    1267              :           /* ???  And for this only have DECL_HARD_REGISTER.  */
    1268      7646940 :         case STRING_CST:
    1269              :           /* This can show up in ARRAY_REF bases.  */
    1270      7646940 :         case INTEGER_CST:
    1271      7646940 :         case SSA_NAME:
    1272      7646940 :           *op0_p = op->op0;
    1273      7646940 :           op0_p = NULL;
    1274      7646940 :           break;
    1275              : 
    1276              :         /* And now the usual component-reference style ops.  */
    1277        73225 :         case BIT_FIELD_REF:
    1278        73225 :           offset += wi::to_poly_offset (op->op1);
    1279        73225 :           break;
    1280              : 
    1281      8078868 :         case COMPONENT_REF:
    1282      8078868 :           {
    1283      8078868 :             tree field = op->op0;
    1284              :             /* We do not have a complete COMPONENT_REF tree here so we
    1285              :                cannot use component_ref_field_offset.  Do the interesting
    1286              :                parts manually.  */
    1287      8078868 :             tree this_offset = DECL_FIELD_OFFSET (field);
    1288              : 
    1289      8078868 :             if (op->op1 || !poly_int_tree_p (this_offset))
    1290          234 :               max_size = -1;
    1291              :             else
    1292              :               {
    1293      8078634 :                 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
    1294      8078634 :                                            << LOG2_BITS_PER_UNIT);
    1295      8078634 :                 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
    1296      8078634 :                 offset += woffset;
    1297              :               }
    1298              :             break;
    1299              :           }
    1300              : 
    1301      2990743 :         case ARRAY_RANGE_REF:
    1302      2990743 :         case ARRAY_REF:
    1303              :           /* Use the recorded constant offset.  */
    1304      2990743 :           if (maybe_eq (op->off, -1))
    1305      1187704 :             max_size = -1;
    1306              :           else
    1307      5409117 :             offset += poly_offset_int (op->off) * BITS_PER_UNIT;
    1308              :           break;
    1309              : 
    1310              :         case REALPART_EXPR:
    1311              :           break;
    1312              : 
    1313              :         case IMAGPART_EXPR:
    1314     40549488 :           offset += size;
    1315              :           break;
    1316              : 
    1317              :         case VIEW_CONVERT_EXPR:
    1318              :           break;
    1319              : 
    1320              :         case POLY_INT_CST:
    1321              :         case COMPLEX_CST:
    1322              :         case VECTOR_CST:
    1323              :         case REAL_CST:
    1324              :         case FIXED_CST:
    1325              :         case CONSTRUCTOR:
    1326              :           return false;
    1327              : 
    1328              :         default:
    1329              :           return false;
    1330              :         }
    1331              :     }
    1332              : 
    1333     14103987 :   if (base == NULL_TREE)
    1334              :     return false;
    1335              : 
    1336     14103987 :   ref->ref = NULL_TREE;
    1337     14103987 :   ref->base = base;
    1338     14103987 :   ref->ref_alias_set = set;
    1339     14103987 :   ref->base_alias_set = base_set;
    1340              :   /* We discount volatiles from value-numbering elsewhere.  */
    1341     14103987 :   ref->volatile_p = false;
    1342              : 
    1343     14103987 :   if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
    1344              :     {
    1345            0 :       ref->offset = 0;
    1346            0 :       ref->size = -1;
    1347            0 :       ref->max_size = -1;
    1348            0 :       return true;
    1349              :     }
    1350              : 
    1351     14103987 :   if (!offset.to_shwi (&ref->offset))
    1352              :     {
    1353           26 :       ref->offset = 0;
    1354           26 :       ref->max_size = -1;
    1355           26 :       return true;
    1356              :     }
    1357              : 
    1358     14103961 :   if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
    1359      1036944 :     ref->max_size = -1;
    1360              : 
    1361              :   return true;
    1362              : }
    1363              : 
    1364              : /* Copy the operations present in load/store/call REF into RESULT, a vector of
    1365              :    vn_reference_op_s's.  */
    1366              : 
    1367              : static void
    1368      9109684 : copy_reference_ops_from_call (gcall *call,
    1369              :                               vec<vn_reference_op_s> *result)
    1370              : {
    1371      9109684 :   vn_reference_op_s temp;
    1372      9109684 :   unsigned i;
    1373      9109684 :   tree lhs = gimple_call_lhs (call);
    1374      9109684 :   int lr;
    1375              : 
    1376              :   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
    1377              :      different.  By adding the lhs here in the vector, we ensure that the
    1378              :      hashcode is different, guaranteeing a different value number.  */
    1379      9109684 :   if (lhs && TREE_CODE (lhs) != SSA_NAME)
    1380              :     {
    1381       441461 :       memset (&temp, 0, sizeof (temp));
    1382       441461 :       temp.opcode = MODIFY_EXPR;
    1383       441461 :       temp.type = TREE_TYPE (lhs);
    1384       441461 :       temp.op0 = lhs;
    1385       441461 :       temp.off = -1;
    1386       441461 :       result->safe_push (temp);
    1387              :     }
    1388              : 
    1389              :   /* Copy the type, opcode, function, static chain and EH region, if any.  */
    1390      9109684 :   memset (&temp, 0, sizeof (temp));
    1391      9109684 :   temp.type = gimple_call_fntype (call);
    1392      9109684 :   temp.opcode = CALL_EXPR;
    1393      9109684 :   temp.op0 = gimple_call_fn (call);
    1394      9109684 :   if (gimple_call_internal_p (call))
    1395       527998 :     temp.clique = gimple_call_internal_fn (call);
    1396      9109684 :   temp.op1 = gimple_call_chain (call);
    1397      9109684 :   if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
    1398       592150 :     temp.op2 = size_int (lr);
    1399      9109684 :   temp.off = -1;
    1400      9109684 :   result->safe_push (temp);
    1401              : 
    1402              :   /* Copy the call arguments.  As they can be references as well,
    1403              :      just chain them together.  */
    1404     26838718 :   for (i = 0; i < gimple_call_num_args (call); ++i)
    1405              :     {
    1406     17729034 :       tree callarg = gimple_call_arg (call, i);
    1407     17729034 :       copy_reference_ops_from_ref (callarg, result);
    1408              :     }
    1409      9109684 : }
    1410              : 
    1411              : /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
    1412              :    *I_P to point to the last element of the replacement.  */
    1413              : static bool
    1414    125050787 : vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
    1415              :                             unsigned int *i_p)
    1416              : {
    1417    125050787 :   unsigned int i = *i_p;
    1418    125050787 :   vn_reference_op_t op = &(*ops)[i];
    1419    125050787 :   vn_reference_op_t mem_op = &(*ops)[i - 1];
    1420    125050787 :   tree addr_base;
    1421    125050787 :   poly_int64 addr_offset = 0;
    1422              : 
    1423              :   /* The only thing we have to do is from &OBJ.foo.bar add the offset
    1424              :      from .foo.bar to the preceding MEM_REF offset and replace the
    1425              :      address with &OBJ.  */
    1426    125050787 :   addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0),
    1427              :                                                &addr_offset, vn_valueize);
    1428    125050787 :   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
    1429    125050787 :   if (addr_base != TREE_OPERAND (op->op0, 0))
    1430              :     {
    1431       662921 :       poly_offset_int off
    1432       662921 :         = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
    1433              :                                   SIGNED)
    1434       662921 :            + addr_offset);
    1435       662921 :       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
    1436       662921 :       op->op0 = build_fold_addr_expr (addr_base);
    1437       662921 :       if (tree_fits_shwi_p (mem_op->op0))
    1438       662854 :         mem_op->off = tree_to_shwi (mem_op->op0);
    1439              :       else
    1440           67 :         mem_op->off = -1;
    1441       662921 :       return true;
    1442              :     }
    1443              :   return false;
    1444              : }
    1445              : 
    1446              : /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
    1447              :    *I_P to point to the last element of the replacement.  */
    1448              : static bool
    1449     83948911 : vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
    1450              :                                      unsigned int *i_p)
    1451              : {
    1452     83948911 :   bool changed = false;
    1453     90982675 :   vn_reference_op_t op;
    1454              : 
    1455     90982675 :   do
    1456              :     {
    1457     90982675 :       unsigned int i = *i_p;
    1458     90982675 :       op = &(*ops)[i];
    1459     90982675 :       vn_reference_op_t mem_op = &(*ops)[i - 1];
    1460     90982675 :       gimple *def_stmt;
    1461     90982675 :       enum tree_code code;
    1462     90982675 :       poly_offset_int off;
    1463              : 
    1464     90982675 :       def_stmt = SSA_NAME_DEF_STMT (op->op0);
    1465     90982675 :       if (!is_gimple_assign (def_stmt))
    1466     83946960 :         return changed;
    1467              : 
    1468     36825838 :       code = gimple_assign_rhs_code (def_stmt);
    1469     36825838 :       if (code != ADDR_EXPR
    1470     36825838 :           && code != POINTER_PLUS_EXPR)
    1471              :         return changed;
    1472              : 
    1473     19532347 :       off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
    1474              : 
    1475              :       /* The only thing we have to do is from &OBJ.foo.bar add the offset
    1476              :          from .foo.bar to the preceding MEM_REF offset and replace the
    1477              :          address with &OBJ.  */
    1478     19532347 :       if (code == ADDR_EXPR)
    1479              :         {
    1480       926909 :           tree addr, addr_base;
    1481       926909 :           poly_int64 addr_offset;
    1482              : 
    1483       926909 :           addr = gimple_assign_rhs1 (def_stmt);
    1484       926909 :           addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0),
    1485              :                                                        &addr_offset,
    1486              :                                                        vn_valueize);
    1487              :           /* If that didn't work because the address isn't invariant propagate
    1488              :              the reference tree from the address operation in case the current
    1489              :              dereference isn't offsetted.  */
    1490       926909 :           if (!addr_base
    1491       280740 :               && *i_p == ops->length () - 1
    1492       140370 :               && known_eq (off, 0)
    1493              :               /* This makes us disable this transform for PRE where the
    1494              :                  reference ops might be also used for code insertion which
    1495              :                  is invalid.  */
    1496      1012338 :               && default_vn_walk_kind == VN_WALKREWRITE)
    1497              :             {
    1498        85339 :               auto_vec<vn_reference_op_s, 32> tem;
    1499        85339 :               copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
    1500              :               /* Make sure to preserve TBAA info.  The only objects not
    1501              :                  wrapped in MEM_REFs that can have their address taken are
    1502              :                  STRING_CSTs.  */
    1503        85339 :               if (tem.length () >= 2
    1504        85339 :                   && tem[tem.length () - 2].opcode == MEM_REF)
    1505              :                 {
    1506        85324 :                   vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
    1507        85324 :                   new_mem_op->op0
    1508        85324 :                       = wide_int_to_tree (TREE_TYPE (mem_op->op0),
    1509       170648 :                                           wi::to_poly_wide (new_mem_op->op0));
    1510              :                 }
    1511              :               else
    1512           15 :                 gcc_assert (tem.last ().opcode == STRING_CST);
    1513        85339 :               ops->pop ();
    1514        85339 :               ops->pop ();
    1515        85339 :               ops->safe_splice (tem);
    1516        85339 :               --*i_p;
    1517        85339 :               return true;
    1518        85339 :             }
    1519       841570 :           if (!addr_base
    1520       786539 :               || TREE_CODE (addr_base) != MEM_REF
    1521      1626286 :               || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
    1522       782855 :                   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
    1523              :                                                                     0))))
    1524              :             return changed;
    1525              : 
    1526       784716 :           off += addr_offset;
    1527       784716 :           off += mem_ref_offset (addr_base);
    1528       784716 :           op->op0 = TREE_OPERAND (addr_base, 0);
    1529              :         }
    1530              :       else
    1531              :         {
    1532     18605438 :           tree ptr, ptroff;
    1533     18605438 :           ptr = gimple_assign_rhs1 (def_stmt);
    1534     18605438 :           ptroff = gimple_assign_rhs2 (def_stmt);
    1535     18605438 :           if (TREE_CODE (ptr) != SSA_NAME
    1536     16924618 :               || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
    1537              :               /* Make sure to not endlessly recurse.
    1538              :                  See gcc.dg/tree-ssa/20040408-1.c for an example.  Can easily
    1539              :                  happen when we value-number a PHI to its backedge value.  */
    1540     16923224 :               || SSA_VAL (ptr) == op->op0
    1541     35528662 :               || !poly_int_tree_p (ptroff))
    1542     12354439 :             return changed;
    1543              : 
    1544      6250999 :           off += wi::to_poly_offset (ptroff);
    1545      6250999 :           op->op0 = ptr;
    1546              :         }
    1547              : 
    1548      7035715 :       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
    1549      7035715 :       if (tree_fits_shwi_p (mem_op->op0))
    1550      6746057 :         mem_op->off = tree_to_shwi (mem_op->op0);
    1551              :       else
    1552       289658 :         mem_op->off = -1;
    1553              :       /* ???  Can end up with endless recursion here!?
    1554              :          gcc.c-torture/execute/strcmp-1.c  */
    1555      7035715 :       if (TREE_CODE (op->op0) == SSA_NAME)
    1556      7033854 :         op->op0 = SSA_VAL (op->op0);
    1557      7035715 :       if (TREE_CODE (op->op0) != SSA_NAME)
    1558         1951 :         op->opcode = TREE_CODE (op->op0);
    1559              : 
    1560      7035715 :       changed = true;
    1561              :     }
    1562              :   /* Tail-recurse.  */
    1563      7035715 :   while (TREE_CODE (op->op0) == SSA_NAME);
    1564              : 
    1565              :   /* Fold a remaining *&.  */
    1566         1951 :   if (TREE_CODE (op->op0) == ADDR_EXPR)
    1567          261 :     vn_reference_fold_indirect (ops, i_p);
    1568              : 
    1569              :   return changed;
    1570              : }
    1571              : 
    1572              : /* Optimize the reference REF to a constant if possible or return
    1573              :    NULL_TREE if not.  */
    1574              : 
    1575              : tree
    1576    108133835 : fully_constant_vn_reference_p (vn_reference_t ref)
    1577              : {
    1578    108133835 :   vec<vn_reference_op_s> operands = ref->operands;
    1579    108133835 :   vn_reference_op_t op;
    1580              : 
    1581              :   /* Try to simplify the translated expression if it is
    1582              :      a call to a builtin function with at most two arguments.  */
    1583    108133835 :   op = &operands[0];
    1584    108133835 :   if (op->opcode == CALL_EXPR
    1585        88036 :       && (!op->op0
    1586        80903 :           || (TREE_CODE (op->op0) == ADDR_EXPR
    1587        80903 :               && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
    1588        80903 :               && fndecl_built_in_p (TREE_OPERAND (op->op0, 0),
    1589              :                                     BUILT_IN_NORMAL)))
    1590        71620 :       && operands.length () >= 2
    1591    108205411 :       && operands.length () <= 3)
    1592              :     {
    1593        35380 :       vn_reference_op_t arg0, arg1 = NULL;
    1594        35380 :       bool anyconst = false;
    1595        35380 :       arg0 = &operands[1];
    1596        35380 :       if (operands.length () > 2)
    1597         5500 :         arg1 = &operands[2];
    1598        35380 :       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
    1599        35380 :           || (arg0->opcode == ADDR_EXPR
    1600        13526 :               && is_gimple_min_invariant (arg0->op0)))
    1601              :         anyconst = true;
    1602        35380 :       if (arg1
    1603        35380 :           && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
    1604         4036 :               || (arg1->opcode == ADDR_EXPR
    1605          579 :                   && is_gimple_min_invariant (arg1->op0))))
    1606              :         anyconst = true;
    1607        33337 :       if (anyconst)
    1608              :         {
    1609        21634 :           combined_fn fn;
    1610        21634 :           if (op->op0)
    1611        20678 :             fn = as_combined_fn (DECL_FUNCTION_CODE
    1612        20678 :                                         (TREE_OPERAND (op->op0, 0)));
    1613              :           else
    1614          956 :             fn = as_combined_fn ((internal_fn) op->clique);
    1615        21634 :           tree folded;
    1616        21634 :           if (arg1)
    1617         2655 :             folded = fold_const_call (fn, ref->type, arg0->op0, arg1->op0);
    1618              :           else
    1619        18979 :             folded = fold_const_call (fn, ref->type, arg0->op0);
    1620        21634 :           if (folded
    1621        21634 :               && is_gimple_min_invariant (folded))
    1622              :             return folded;
    1623              :         }
    1624              :     }
    1625              : 
    1626              :   /* Simplify reads from constants or constant initializers.  */
    1627    108098455 :   else if (BITS_PER_UNIT == 8
    1628    108098455 :            && ref->type
    1629    108098455 :            && COMPLETE_TYPE_P (ref->type)
    1630    216196868 :            && is_gimple_reg_type (ref->type))
    1631              :     {
    1632    103837768 :       poly_int64 off = 0;
    1633    103837768 :       HOST_WIDE_INT size;
    1634    103837768 :       if (INTEGRAL_TYPE_P (ref->type))
    1635     52940519 :         size = TYPE_PRECISION (ref->type);
    1636     50897249 :       else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
    1637     50897249 :         size = tree_to_shwi (TYPE_SIZE (ref->type));
    1638              :       else
    1639    108133835 :         return NULL_TREE;
    1640    103837768 :       if (size % BITS_PER_UNIT != 0
    1641    101999185 :           || size > MAX_BITSIZE_MODE_ANY_MODE)
    1642              :         return NULL_TREE;
    1643    101997858 :       size /= BITS_PER_UNIT;
    1644    101997858 :       unsigned i;
    1645    188558815 :       for (i = 0; i < operands.length (); ++i)
    1646              :         {
    1647    188558815 :           if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
    1648              :             {
    1649          309 :               ++i;
    1650          309 :               break;
    1651              :             }
    1652    188558506 :           if (operands[i].reverse)
    1653              :             return NULL_TREE;
    1654    188550148 :           if (known_eq (operands[i].off, -1))
    1655              :             return NULL_TREE;
    1656    174859923 :           off += operands[i].off;
    1657    174859923 :           if (operands[i].opcode == MEM_REF)
    1658              :             {
    1659     88298966 :               ++i;
    1660     88298966 :               break;
    1661              :             }
    1662              :         }
    1663     88299275 :       vn_reference_op_t base = &operands[--i];
    1664     88299275 :       tree ctor = error_mark_node;
    1665     88299275 :       tree decl = NULL_TREE;
    1666     88299275 :       if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
    1667          309 :         ctor = base->op0;
    1668     88298966 :       else if (base->opcode == MEM_REF
    1669     88298966 :                && base[1].opcode == ADDR_EXPR
    1670    145094831 :                && (VAR_P (TREE_OPERAND (base[1].op0, 0))
    1671      3548460 :                    || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
    1672      3548400 :                    || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
    1673              :         {
    1674     53252928 :           decl = TREE_OPERAND (base[1].op0, 0);
    1675     53252928 :           if (TREE_CODE (decl) == STRING_CST)
    1676              :             ctor = decl;
    1677              :           else
    1678     53247465 :             ctor = ctor_for_folding (decl);
    1679              :         }
    1680     88293812 :       if (ctor == NULL_TREE)
    1681          386 :         return build_zero_cst (ref->type);
    1682     88298889 :       else if (ctor != error_mark_node)
    1683              :         {
    1684        95894 :           HOST_WIDE_INT const_off;
    1685        95894 :           if (decl)
    1686              :             {
    1687       191170 :               tree res = fold_ctor_reference (ref->type, ctor,
    1688        95585 :                                               off * BITS_PER_UNIT,
    1689        95585 :                                               size * BITS_PER_UNIT, decl);
    1690        95585 :               if (res)
    1691              :                 {
    1692        55456 :                   STRIP_USELESS_TYPE_CONVERSION (res);
    1693        55456 :                   if (is_gimple_min_invariant (res))
    1694    108133835 :                     return res;
    1695              :                 }
    1696              :             }
    1697          309 :           else if (off.is_constant (&const_off))
    1698              :             {
    1699          309 :               unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
    1700          309 :               int len = native_encode_expr (ctor, buf, size, const_off);
    1701          309 :               if (len > 0)
    1702          139 :                 return native_interpret_expr (ref->type, buf, len);
    1703              :             }
    1704              :         }
    1705              :     }
    1706              : 
    1707              :   return NULL_TREE;
    1708              : }
    1709              : 
    1710              : /* Return true if OPS contain a storage order barrier.  */
    1711              : 
    1712              : static bool
    1713     58278563 : contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
    1714              : {
    1715     58278563 :   vn_reference_op_t op;
    1716     58278563 :   unsigned i;
    1717              : 
    1718    228288891 :   FOR_EACH_VEC_ELT (ops, i, op)
    1719    170010328 :     if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
    1720              :       return true;
    1721              : 
    1722              :   return false;
    1723              : }
    1724              : 
    1725              : /* Return true if OPS represent an access with reverse storage order.  */
    1726              : 
    1727              : static bool
    1728     58286834 : reverse_storage_order_for_component_p (vec<vn_reference_op_s> ops)
    1729              : {
    1730     58286834 :   unsigned i = 0;
    1731     58286834 :   if (ops[i].opcode == REALPART_EXPR || ops[i].opcode == IMAGPART_EXPR)
    1732              :     ++i;
    1733     58286834 :   switch (ops[i].opcode)
    1734              :     {
    1735     56211602 :     case ARRAY_REF:
    1736     56211602 :     case COMPONENT_REF:
    1737     56211602 :     case BIT_FIELD_REF:
    1738     56211602 :     case MEM_REF:
    1739     56211602 :       return ops[i].reverse;
    1740              :     default:
    1741              :       return false;
    1742              :     }
    1743              : }
    1744              : 
    1745              : /* Transform any SSA_NAME's in a vector of vn_reference_op_s
    1746              :    structures into their value numbers.  This is done in-place, and
    1747              :    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
    1748              :    whether any operands were valueized.  */
    1749              : 
    1750              : static void
    1751    216214872 : valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
    1752              :                  bool with_avail = false)
    1753              : {
    1754    216214872 :   *valueized_anything = false;
    1755              : 
    1756    872291284 :   for (unsigned i = 0; i < orig->length (); ++i)
    1757              :     {
    1758    656076412 : re_valueize:
    1759    659863777 :       vn_reference_op_t vro = &(*orig)[i];
    1760    659863777 :       if (vro->opcode == SSA_NAME
    1761    564163275 :           || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
    1762              :         {
    1763    119913613 :           tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
    1764    119913613 :           if (tem != vro->op0)
    1765              :             {
    1766     17616731 :               *valueized_anything = true;
    1767     17616731 :               vro->op0 = tem;
    1768              :             }
    1769              :           /* If it transforms from an SSA_NAME to a constant, update
    1770              :              the opcode.  */
    1771    119913613 :           if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
    1772      2080438 :             vro->opcode = TREE_CODE (vro->op0);
    1773              :         }
    1774    659863777 :       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
    1775              :         {
    1776        26310 :           tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
    1777        26310 :           if (tem != vro->op1)
    1778              :             {
    1779          603 :               *valueized_anything = true;
    1780          603 :               vro->op1 = tem;
    1781              :             }
    1782              :         }
    1783    659863777 :       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
    1784              :         {
    1785       205451 :           tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
    1786       205451 :           if (tem != vro->op2)
    1787              :             {
    1788       119584 :               *valueized_anything = true;
    1789       119584 :               vro->op2 = tem;
    1790              :             }
    1791              :         }
    1792              :       /* If it transforms from an SSA_NAME to an address, fold with
    1793              :          a preceding indirect reference.  */
    1794    659863777 :       if (i > 0
    1795    443569441 :           && vro->op0
    1796    440069855 :           && TREE_CODE (vro->op0) == ADDR_EXPR
    1797    790700690 :           && (*orig)[i - 1].opcode == MEM_REF)
    1798              :         {
    1799    125050526 :           if (vn_reference_fold_indirect (orig, &i))
    1800       662921 :             *valueized_anything = true;
    1801              :         }
    1802    534813251 :       else if (i > 0
    1803    318518915 :                && vro->opcode == SSA_NAME
    1804    628433315 :                && (*orig)[i - 1].opcode == MEM_REF)
    1805              :         {
    1806     83948911 :           if (vn_reference_maybe_forwprop_address (orig, &i))
    1807              :             {
    1808      3787365 :               *valueized_anything = true;
    1809              :               /* Re-valueize the current operand.  */
    1810      3787365 :               goto re_valueize;
    1811              :             }
    1812              :         }
    1813              :       /* If it transforms a non-constant ARRAY_REF into a constant
    1814              :          one, adjust the constant offset.  */
    1815    450864340 :       else if ((vro->opcode == ARRAY_REF
    1816    450864340 :                 || vro->opcode == ARRAY_RANGE_REF)
    1817     39270747 :                && known_eq (vro->off, -1)
    1818     17054635 :                && poly_int_tree_p (vro->op0)
    1819      4682202 :                && poly_int_tree_p (vro->op1)
    1820    455546542 :                && TREE_CODE (vro->op2) == INTEGER_CST)
    1821              :         {
    1822              :             /* Prohibit value-numbering addresses of one-after-the-last
    1823              :                element ARRAY_REFs the same as addresses of other components
    1824              :                before the pass folding __builtin_object_size had a chance
    1825              :                to run.  */
    1826      4548513 :           if (!(cfun->curr_properties & PROP_objsz)
    1827      5780088 :               && (*orig)[0].opcode == ADDR_EXPR)
    1828              :             {
    1829        35339 :               tree dom = TYPE_DOMAIN ((*orig)[i + 1].type);
    1830        53640 :               if (!dom
    1831        35189 :                   || !TYPE_MAX_VALUE (dom)
    1832        25380 :                   || !poly_int_tree_p (TYPE_MAX_VALUE (dom))
    1833        52463 :                   || integer_minus_onep (TYPE_MAX_VALUE (dom)))
    1834        19108 :                 continue;
    1835        17038 :               if (!known_le (wi::to_poly_offset (vro->op0),
    1836              :                              wi::to_poly_offset (TYPE_MAX_VALUE (dom))))
    1837          807 :                 continue;
    1838              :             }
    1839              : 
    1840      9058810 :           poly_offset_int off = ((wi::to_poly_offset (vro->op0)
    1841     13588215 :                                   - wi::to_poly_offset (vro->op1))
    1842      9058810 :                                  * wi::to_offset (vro->op2)
    1843      4529405 :                                  * vn_ref_op_align_unit (vro));
    1844      4529405 :           off.to_shwi (&vro->off);
    1845              :         }
    1846              :     }
    1847    216214872 : }
    1848              : 
    1849              : static void
    1850     12374311 : valueize_refs (vec<vn_reference_op_s> *orig)
    1851              : {
    1852     12374311 :   bool tem;
    1853            0 :   valueize_refs_1 (orig, &tem);
    1854            0 : }
    1855              : 
    1856              : static vec<vn_reference_op_s> shared_lookup_references;
    1857              : 
    1858              : /* Create a vector of vn_reference_op_s structures from REF, a
    1859              :    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
    1860              :    this function.  *VALUEIZED_ANYTHING will specify whether any
    1861              :    operands were valueized.  */
    1862              : 
    1863              : static vec<vn_reference_op_s>
    1864    178070246 : valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
    1865              : {
    1866    178070246 :   if (!ref)
    1867            0 :     return vNULL;
    1868    178070246 :   shared_lookup_references.truncate (0);
    1869    178070246 :   copy_reference_ops_from_ref (ref, &shared_lookup_references);
    1870    178070246 :   valueize_refs_1 (&shared_lookup_references, valueized_anything);
    1871    178070246 :   return shared_lookup_references;
    1872              : }
    1873              : 
    1874              : /* Create a vector of vn_reference_op_s structures from CALL, a
    1875              :    call statement.  The vector is shared among all callers of
    1876              :    this function.  */
    1877              : 
    1878              : static vec<vn_reference_op_s>
    1879      9109684 : valueize_shared_reference_ops_from_call (gcall *call)
    1880              : {
    1881      9109684 :   if (!call)
    1882            0 :     return vNULL;
    1883      9109684 :   shared_lookup_references.truncate (0);
    1884      9109684 :   copy_reference_ops_from_call (call, &shared_lookup_references);
    1885      9109684 :   valueize_refs (&shared_lookup_references);
    1886      9109684 :   return shared_lookup_references;
    1887              : }
    1888              : 
    1889              : /* Lookup a SCCVN reference operation VR in the current hash table.
    1890              :    Returns the resulting value number if it exists in the hash table,
    1891              :    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
    1892              :    vn_reference_t stored in the hashtable if something is found.  */
    1893              : 
    1894              : static tree
    1895     64478004 : vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
    1896              : {
    1897     64478004 :   vn_reference_s **slot;
    1898     64478004 :   hashval_t hash;
    1899              : 
    1900     64478004 :   hash = vr->hashcode;
    1901     64478004 :   slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
    1902     64478004 :   if (slot)
    1903              :     {
    1904      8088831 :       if (vnresult)
    1905      8088831 :         *vnresult = (vn_reference_t)*slot;
    1906      8088831 :       return ((vn_reference_t)*slot)->result;
    1907              :     }
    1908              : 
    1909              :   return NULL_TREE;
    1910              : }
    1911              : 
    1912              : 
    1913              : /* Partial definition tracking support.  */
    1914              : 
    1915              : struct pd_range
    1916              : {
    1917              :   HOST_WIDE_INT offset;
    1918              :   HOST_WIDE_INT size;
    1919              :   pd_range *m_children[2];
    1920              : };
    1921              : 
    1922              : struct pd_data
    1923              : {
    1924              :   tree rhs;
    1925              :   HOST_WIDE_INT rhs_off;
    1926              :   HOST_WIDE_INT offset;
    1927              :   HOST_WIDE_INT size;
    1928              : };
    1929              : 
    1930              : /* Context for alias walking.  */
    1931              : 
    1932              : struct vn_walk_cb_data
    1933              : {
    1934     60420324 :   vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
    1935              :                    vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_,
    1936              :                    bool redundant_store_removal_p_)
    1937     60420324 :     : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
    1938     60420324 :       mask (mask_), masked_result (NULL_TREE), same_val (NULL_TREE),
    1939     60420324 :       vn_walk_kind (vn_walk_kind_),
    1940     60420324 :       tbaa_p (tbaa_p_), redundant_store_removal_p (redundant_store_removal_p_),
    1941    120840648 :       saved_operands (vNULL), first_range (), first_set (-2),
    1942    120840648 :       first_base_set (-2)
    1943              :   {
    1944     60420324 :     if (!last_vuse_ptr)
    1945     28017505 :       last_vuse_ptr = &last_vuse;
    1946     60420324 :     ao_ref_init (&orig_ref, orig_ref_);
    1947     60420324 :     if (mask)
    1948              :       {
    1949       320213 :         wide_int w = wi::to_wide (mask);
    1950       320213 :         unsigned int pos = 0, prec = w.get_precision ();
    1951       320213 :         pd_data pd;
    1952       320213 :         pd.rhs = build_constructor (NULL_TREE, NULL);
    1953       320213 :         pd.rhs_off = 0;
    1954              :         /* When bitwise and with a constant is done on a memory load,
    1955              :            we don't really need all the bits to be defined or defined
    1956              :            to constants, we don't really care what is in the position
    1957              :            corresponding to 0 bits in the mask.
    1958              :            So, push the ranges of those 0 bits in the mask as artificial
    1959              :            zero stores and let the partial def handling code do the
    1960              :            rest.  */
    1961       683694 :         while (pos < prec)
    1962              :           {
    1963       663619 :             int tz = wi::ctz (w);
    1964       663619 :             if (pos + tz > prec)
    1965       300138 :               tz = prec - pos;
    1966       663619 :             if (tz)
    1967              :               {
    1968       504338 :                 if (BYTES_BIG_ENDIAN)
    1969              :                   pd.offset = prec - pos - tz;
    1970              :                 else
    1971       504338 :                   pd.offset = pos;
    1972       504338 :                 pd.size = tz;
    1973       504338 :                 void *r = push_partial_def (pd, 0, 0, 0, prec);
    1974       504338 :                 gcc_assert (r == NULL_TREE);
    1975              :               }
    1976       663619 :             pos += tz;
    1977       663619 :             if (pos == prec)
    1978              :               break;
    1979       363481 :             w = wi::lrshift (w, tz);
    1980       363481 :             tz = wi::ctz (wi::bit_not (w));
    1981       363481 :             if (pos + tz > prec)
    1982            0 :               tz = prec - pos;
    1983       363481 :             pos += tz;
    1984       363481 :             w = wi::lrshift (w, tz);
    1985              :           }
    1986       320213 :       }
    1987     60420324 :   }
    1988              :   ~vn_walk_cb_data ();
    1989              :   void *finish (alias_set_type, alias_set_type, tree);
    1990              :   void *push_partial_def (pd_data pd,
    1991              :                           alias_set_type, alias_set_type, HOST_WIDE_INT,
    1992              :                           HOST_WIDE_INT);
    1993              : 
    1994              :   vn_reference_t vr;
    1995              :   ao_ref orig_ref;
    1996              :   tree *last_vuse_ptr;
    1997              :   tree last_vuse;
    1998              :   tree mask;
    1999              :   tree masked_result;
    2000              :   tree same_val;
    2001              :   vn_lookup_kind vn_walk_kind;
    2002              :   bool tbaa_p;
    2003              :   bool redundant_store_removal_p;
    2004              :   vec<vn_reference_op_s> saved_operands;
    2005              : 
    2006              :   /* The VDEFs of partial defs we come along.  */
    2007              :   auto_vec<pd_data, 2> partial_defs;
    2008              :   /* The first defs range to avoid splay tree setup in most cases.  */
    2009              :   pd_range first_range;
    2010              :   alias_set_type first_set;
    2011              :   alias_set_type first_base_set;
    2012              :   default_splay_tree<pd_range *> known_ranges;
    2013              :   obstack ranges_obstack;
    2014              :   static constexpr HOST_WIDE_INT bufsize = 64;
    2015              : };
    2016              : 
    2017     60420324 : vn_walk_cb_data::~vn_walk_cb_data ()
    2018              : {
    2019     60420324 :   if (known_ranges)
    2020       178788 :     obstack_free (&ranges_obstack, NULL);
    2021     60420324 :   saved_operands.release ();
    2022     60420324 : }
    2023              : 
    2024              : void *
    2025      1457048 : vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
    2026              : {
    2027      1457048 :   if (first_set != -2)
    2028              :     {
    2029       350388 :       set = first_set;
    2030       350388 :       base_set = first_base_set;
    2031              :     }
    2032      1457048 :   if (mask)
    2033              :     {
    2034          440 :       masked_result = val;
    2035          440 :       return (void *) -1;
    2036              :     }
    2037      1456608 :   if (same_val && !operand_equal_p (val, same_val))
    2038              :     return (void *) -1;
    2039      1452822 :   vec<vn_reference_op_s> &operands
    2040      1452822 :     = saved_operands.exists () ? saved_operands : vr->operands;
    2041      1452822 :   return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
    2042              :                                                    vr->offset, vr->max_size,
    2043      1452822 :                                                    vr->type, operands, val);
    2044              : }
    2045              : 
    2046              : /* Push PD to the vector of partial definitions returning a
    2047              :    value when we are ready to combine things with VUSE, SET and MAXSIZEI,
    2048              :    NULL when we want to continue looking for partial defs or -1
    2049              :    on failure.  */
    2050              : 
    2051              : void *
    2052       581692 : vn_walk_cb_data::push_partial_def (pd_data pd,
    2053              :                                    alias_set_type set, alias_set_type base_set,
    2054              :                                    HOST_WIDE_INT offseti,
    2055              :                                    HOST_WIDE_INT maxsizei)
    2056              : {
    2057              :   /* We're using a fixed buffer for encoding so fail early if the object
    2058              :      we want to interpret is bigger.  */
    2059       581692 :   if (maxsizei > bufsize * BITS_PER_UNIT
    2060              :       || CHAR_BIT != 8
    2061              :       || BITS_PER_UNIT != 8
    2062              :       /* Not prepared to handle PDP endian.  */
    2063              :       || BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
    2064              :     return (void *)-1;
    2065              : 
    2066              :   /* Turn too large constant stores into non-constant stores.  */
    2067       581622 :   if (CONSTANT_CLASS_P (pd.rhs) && pd.size > bufsize * BITS_PER_UNIT)
    2068            0 :     pd.rhs = error_mark_node;
    2069              : 
    2070              :   /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
    2071              :      most a partial byte before and/or after the region.  */
    2072       581622 :   if (!CONSTANT_CLASS_P (pd.rhs))
    2073              :     {
    2074       544217 :       if (pd.offset < offseti)
    2075              :         {
    2076         8005 :           HOST_WIDE_INT o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT);
    2077         8005 :           gcc_assert (pd.size > o);
    2078         8005 :           pd.size -= o;
    2079         8005 :           pd.offset += o;
    2080              :         }
    2081       544217 :       if (pd.size > maxsizei)
    2082         7160 :         pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT);
    2083              :     }
    2084              : 
    2085       581622 :   pd.offset -= offseti;
    2086              : 
    2087      1163244 :   bool pd_constant_p = (TREE_CODE (pd.rhs) == CONSTRUCTOR
    2088       581622 :                         || CONSTANT_CLASS_P (pd.rhs));
    2089       581622 :   pd_range *r;
    2090       581622 :   if (partial_defs.is_empty ())
    2091              :     {
    2092              :       /* If we get a clobber upfront, fail.  */
    2093       372221 :       if (TREE_CLOBBER_P (pd.rhs))
    2094              :         return (void *)-1;
    2095       371864 :       if (!pd_constant_p)
    2096              :         return (void *)-1;
    2097       342464 :       partial_defs.safe_push (pd);
    2098       342464 :       first_range.offset = pd.offset;
    2099       342464 :       first_range.size = pd.size;
    2100       342464 :       first_set = set;
    2101       342464 :       first_base_set = base_set;
    2102       342464 :       last_vuse_ptr = NULL;
    2103       342464 :       r = &first_range;
    2104              :       /* Go check if the first partial definition was a full one in case
    2105              :          the caller didn't optimize for this.  */
    2106              :     }
    2107              :   else
    2108              :     {
    2109       209401 :       if (!known_ranges)
    2110              :         {
    2111              :           /* ???  Optimize the case where the 2nd partial def completes
    2112              :              things.  */
    2113       178788 :           gcc_obstack_init (&ranges_obstack);
    2114       178788 :           known_ranges.insert_max_node (&first_range);
    2115              :         }
    2116              :       /* Lookup the offset and see if we need to merge.  */
    2117       209401 :       int comparison = known_ranges.lookup_le
    2118       423061 :         ([&] (pd_range *r) { return pd.offset < r->offset; },
    2119       190286 :          [&] (pd_range *r) { return pd.offset > r->offset; });
    2120       209401 :       r = known_ranges.root ();
    2121       209401 :       if (comparison >= 0
    2122       209401 :           && ranges_known_overlap_p (r->offset, r->size + 1,
    2123              :                                      pd.offset, pd.size))
    2124              :         {
    2125              :           /* Ignore partial defs already covered.  Here we also drop shadowed
    2126              :              clobbers arriving here at the floor.  */
    2127         5718 :           if (known_subrange_p (pd.offset, pd.size, r->offset, r->size))
    2128              :             return NULL;
    2129         4889 :           r->size = MAX (r->offset + r->size, pd.offset + pd.size) - r->offset;
    2130              :         }
    2131              :       else
    2132              :         {
    2133              :           /* pd.offset wasn't covered yet, insert the range.  */
    2134       203683 :           void *addr = XOBNEW (&ranges_obstack, pd_range);
    2135       203683 :           r = new (addr) pd_range { pd.offset, pd.size, {} };
    2136       203683 :           known_ranges.insert_relative (comparison, r);
    2137              :         }
    2138              :       /* Merge r which now contains pd's range and is a member of the splay
    2139              :          tree with adjacent overlapping ranges.  */
    2140       208572 :       if (known_ranges.splay_next_node ())
    2141        21213 :         do
    2142              :           {
    2143        21213 :             pd_range *rafter = known_ranges.root ();
    2144        21213 :             if (!ranges_known_overlap_p (r->offset, r->size + 1,
    2145        21213 :                                          rafter->offset, rafter->size))
    2146              :               break;
    2147        20967 :             r->size = MAX (r->offset + r->size,
    2148        20967 :                            rafter->offset + rafter->size) - r->offset;
    2149              :           }
    2150        20967 :         while (known_ranges.remove_root_and_splay_next ());
    2151              :       /* If we get a clobber, fail.  */
    2152       208572 :       if (TREE_CLOBBER_P (pd.rhs))
    2153              :         return (void *)-1;
    2154              :       /* Non-constants are OK as long as they are shadowed by a constant.  */
    2155       206448 :       if (!pd_constant_p)
    2156              :         return (void *)-1;
    2157       200389 :       partial_defs.safe_push (pd);
    2158              :     }
    2159              : 
    2160              :   /* Now we have merged pd's range into the range tree.  When we have covered
    2161              :      [offseti, sizei] then the tree will contain exactly one node which has
    2162              :      the desired properties and it will be 'r'.  */
    2163       542853 :   if (!known_subrange_p (0, maxsizei, r->offset, r->size))
    2164              :     /* Continue looking for partial defs.  */
    2165              :     return NULL;
    2166              : 
    2167              :   /* Now simply native encode all partial defs in reverse order.  */
    2168         8360 :   unsigned ndefs = partial_defs.length ();
    2169              :   /* We support up to 512-bit values (for V8DFmode).  */
    2170         8360 :   unsigned char buffer[bufsize + 1];
    2171         8360 :   unsigned char this_buffer[bufsize + 1];
    2172         8360 :   int len;
    2173              : 
    2174         8360 :   memset (buffer, 0, bufsize + 1);
    2175         8360 :   unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT) / BITS_PER_UNIT;
    2176        40858 :   while (!partial_defs.is_empty ())
    2177              :     {
    2178        24138 :       pd_data pd = partial_defs.pop ();
    2179        24138 :       unsigned int amnt;
    2180        24138 :       if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
    2181              :         {
    2182              :           /* Empty CONSTRUCTOR.  */
    2183         1965 :           if (pd.size >= needed_len * BITS_PER_UNIT)
    2184         1965 :             len = needed_len;
    2185              :           else
    2186         1732 :             len = ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT;
    2187         1965 :           memset (this_buffer, 0, len);
    2188              :         }
    2189        22173 :       else if (pd.rhs_off >= 0)
    2190              :         {
    2191        44346 :           len = native_encode_expr (pd.rhs, this_buffer, bufsize,
    2192        22173 :                                     (MAX (0, -pd.offset)
    2193        22173 :                                      + pd.rhs_off) / BITS_PER_UNIT);
    2194        22173 :           if (len <= 0
    2195        22173 :               || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
    2196        22173 :                         - MAX (0, -pd.offset) / BITS_PER_UNIT))
    2197              :             {
    2198            0 :               if (dump_file && (dump_flags & TDF_DETAILS))
    2199            0 :                 fprintf (dump_file, "Failed to encode %u "
    2200              :                          "partial definitions\n", ndefs);
    2201            0 :               return (void *)-1;
    2202              :             }
    2203              :         }
    2204              :       else /* negative pd.rhs_off indicates we want to chop off first bits */
    2205              :         {
    2206            0 :           if (-pd.rhs_off >= bufsize)
    2207              :             return (void *)-1;
    2208            0 :           len = native_encode_expr (pd.rhs,
    2209            0 :                                     this_buffer + -pd.rhs_off / BITS_PER_UNIT,
    2210            0 :                                     bufsize - -pd.rhs_off / BITS_PER_UNIT,
    2211            0 :                                     MAX (0, -pd.offset) / BITS_PER_UNIT);
    2212            0 :           if (len <= 0
    2213            0 :               || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
    2214            0 :                         - MAX (0, -pd.offset) / BITS_PER_UNIT))
    2215              :             {
    2216            0 :               if (dump_file && (dump_flags & TDF_DETAILS))
    2217            0 :                 fprintf (dump_file, "Failed to encode %u "
    2218              :                          "partial definitions\n", ndefs);
    2219            0 :               return (void *)-1;
    2220              :             }
    2221              :         }
    2222              : 
    2223        24138 :       unsigned char *p = buffer;
    2224        24138 :       HOST_WIDE_INT size = pd.size;
    2225        24138 :       if (pd.offset < 0)
    2226          240 :         size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT);
    2227        24138 :       this_buffer[len] = 0;
    2228        24138 :       if (BYTES_BIG_ENDIAN)
    2229              :         {
    2230              :           /* LSB of this_buffer[len - 1] byte should be at
    2231              :              pd.offset + pd.size - 1 bits in buffer.  */
    2232              :           amnt = ((unsigned HOST_WIDE_INT) pd.offset
    2233              :                   + pd.size) % BITS_PER_UNIT;
    2234              :           if (amnt)
    2235              :             shift_bytes_in_array_right (this_buffer, len + 1, amnt);
    2236              :           unsigned char *q = this_buffer;
    2237              :           unsigned int off = 0;
    2238              :           if (pd.offset >= 0)
    2239              :             {
    2240              :               unsigned int msk;
    2241              :               off = pd.offset / BITS_PER_UNIT;
    2242              :               gcc_assert (off < needed_len);
    2243              :               p = buffer + off;
    2244              :               if (size <= amnt)
    2245              :                 {
    2246              :                   msk = ((1 << size) - 1) << (BITS_PER_UNIT - amnt);
    2247              :                   *p = (*p & ~msk) | (this_buffer[len] & msk);
    2248              :                   size = 0;
    2249              :                 }
    2250              :               else
    2251              :                 {
    2252              :                   if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
    2253              :                     q = (this_buffer + len
    2254              :                          - (ROUND_UP (size - amnt, BITS_PER_UNIT)
    2255              :                             / BITS_PER_UNIT));
    2256              :                   if (pd.offset % BITS_PER_UNIT)
    2257              :                     {
    2258              :                       msk = -1U << (BITS_PER_UNIT
    2259              :                                     - (pd.offset % BITS_PER_UNIT));
    2260              :                       *p = (*p & msk) | (*q & ~msk);
    2261              :                       p++;
    2262              :                       q++;
    2263              :                       off++;
    2264              :                       size -= BITS_PER_UNIT - (pd.offset % BITS_PER_UNIT);
    2265              :                       gcc_assert (size >= 0);
    2266              :                     }
    2267              :                 }
    2268              :             }
    2269              :           else if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
    2270              :             {
    2271              :               q = (this_buffer + len
    2272              :                    - (ROUND_UP (size - amnt, BITS_PER_UNIT)
    2273              :                       / BITS_PER_UNIT));
    2274              :               if (pd.offset % BITS_PER_UNIT)
    2275              :                 {
    2276              :                   q++;
    2277              :                   size -= BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) pd.offset
    2278              :                                            % BITS_PER_UNIT);
    2279              :                   gcc_assert (size >= 0);
    2280              :                 }
    2281              :             }
    2282              :           if ((unsigned HOST_WIDE_INT) size / BITS_PER_UNIT + off
    2283              :               > needed_len)
    2284              :             size = (needed_len - off) * BITS_PER_UNIT;
    2285              :           memcpy (p, q, size / BITS_PER_UNIT);
    2286              :           if (size % BITS_PER_UNIT)
    2287              :             {
    2288              :               unsigned int msk
    2289              :                 = -1U << (BITS_PER_UNIT - (size % BITS_PER_UNIT));
    2290              :               p += size / BITS_PER_UNIT;
    2291              :               q += size / BITS_PER_UNIT;
    2292              :               *p = (*q & msk) | (*p & ~msk);
    2293              :             }
    2294              :         }
    2295              :       else
    2296              :         {
    2297        24138 :           if (pd.offset >= 0)
    2298              :             {
    2299              :               /* LSB of this_buffer[0] byte should be at pd.offset bits
    2300              :                  in buffer.  */
    2301        23898 :               unsigned int msk;
    2302        23898 :               size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
    2303        23898 :               amnt = pd.offset % BITS_PER_UNIT;
    2304        23898 :               if (amnt)
    2305         1514 :                 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
    2306        23898 :               unsigned int off = pd.offset / BITS_PER_UNIT;
    2307        23898 :               gcc_assert (off < needed_len);
    2308        23898 :               size = MIN (size,
    2309              :                           (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT);
    2310        23898 :               p = buffer + off;
    2311        23898 :               if (amnt + size < BITS_PER_UNIT)
    2312              :                 {
    2313              :                   /* Low amnt bits come from *p, then size bits
    2314              :                      from this_buffer[0] and the remaining again from
    2315              :                      *p.  */
    2316         1084 :                   msk = ((1 << size) - 1) << amnt;
    2317         1084 :                   *p = (*p & ~msk) | (this_buffer[0] & msk);
    2318         1084 :                   size = 0;
    2319              :                 }
    2320        22814 :               else if (amnt)
    2321              :                 {
    2322         1120 :                   msk = -1U << amnt;
    2323         1120 :                   *p = (*p & ~msk) | (this_buffer[0] & msk);
    2324         1120 :                   p++;
    2325         1120 :                   size -= (BITS_PER_UNIT - amnt);
    2326              :                 }
    2327              :             }
    2328              :           else
    2329              :             {
    2330          240 :               amnt = (unsigned HOST_WIDE_INT) pd.offset % BITS_PER_UNIT;
    2331          240 :               if (amnt)
    2332           16 :                 size -= BITS_PER_UNIT - amnt;
    2333          240 :               size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
    2334          240 :               if (amnt)
    2335           16 :                 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
    2336              :             }
    2337        24138 :           memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT);
    2338        24138 :           p += size / BITS_PER_UNIT;
    2339        24138 :           if (size % BITS_PER_UNIT)
    2340              :             {
    2341          625 :               unsigned int msk = -1U << (size % BITS_PER_UNIT);
    2342          625 :               *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT]
    2343          625 :                     & ~msk) | (*p & msk);
    2344              :             }
    2345              :         }
    2346              :     }
    2347              : 
    2348         8360 :   tree type = vr->type;
    2349              :   /* Make sure to interpret in a type that has a range covering the whole
    2350              :      access size.  */
    2351         8360 :   if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
    2352              :     {
    2353           13 :       if (TREE_CODE (vr->type) == BITINT_TYPE
    2354           26 :           && maxsizei > MAX_FIXED_MODE_SIZE)
    2355           13 :         type = build_bitint_type (maxsizei, TYPE_UNSIGNED (type));
    2356              :       else
    2357            0 :         type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
    2358              :     }
    2359         8360 :   tree val;
    2360         8360 :   if (BYTES_BIG_ENDIAN)
    2361              :     {
    2362              :       unsigned sz = needed_len;
    2363              :       if (maxsizei % BITS_PER_UNIT)
    2364              :         shift_bytes_in_array_right (buffer, needed_len,
    2365              :                                     BITS_PER_UNIT
    2366              :                                     - (maxsizei % BITS_PER_UNIT));
    2367              :       if (INTEGRAL_TYPE_P (type))
    2368              :         {
    2369              :           if (TYPE_MODE (type) != BLKmode)
    2370              :             sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
    2371              :           else
    2372              :             sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
    2373              :         }
    2374              :       if (sz > needed_len)
    2375              :         {
    2376              :           memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
    2377              :           val = native_interpret_expr (type, this_buffer, sz);
    2378              :         }
    2379              :       else
    2380              :         val = native_interpret_expr (type, buffer, needed_len);
    2381              :     }
    2382              :   else
    2383         8360 :     val = native_interpret_expr (type, buffer, bufsize);
    2384              :   /* If we chop off bits because the types precision doesn't match the memory
    2385              :      access size this is ok when optimizing reads but not when called from
    2386              :      the DSE code during elimination.  */
    2387         8360 :   if (val && type != vr->type)
    2388              :     {
    2389           13 :       if (! int_fits_type_p (val, vr->type))
    2390              :         val = NULL_TREE;
    2391              :       else
    2392           13 :         val = fold_convert (vr->type, val);
    2393              :     }
    2394              : 
    2395         8356 :   if (val)
    2396              :     {
    2397         8356 :       if (dump_file && (dump_flags & TDF_DETAILS))
    2398            0 :         fprintf (dump_file,
    2399              :                  "Successfully combined %u partial definitions\n", ndefs);
    2400              :       /* We are using the alias-set of the first store we encounter which
    2401              :          should be appropriate here.  */
    2402         8356 :       return finish (first_set, first_base_set, val);
    2403              :     }
    2404              :   else
    2405              :     {
    2406            4 :       if (dump_file && (dump_flags & TDF_DETAILS))
    2407            0 :         fprintf (dump_file,
    2408              :                  "Failed to interpret %u encoded partial definitions\n", ndefs);
    2409            4 :       return (void *)-1;
    2410              :     }
    2411              : }
    2412              : 
    2413              : /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
    2414              :    with the current VUSE and performs the expression lookup.  */
    2415              : 
    2416              : static void *
    2417   1063201162 : vn_reference_lookup_2 (ao_ref *op, tree vuse, void *data_)
    2418              : {
    2419   1063201162 :   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
    2420   1063201162 :   vn_reference_t vr = data->vr;
    2421   1063201162 :   vn_reference_s **slot;
    2422   1063201162 :   hashval_t hash;
    2423              : 
    2424              :   /* If we have partial definitions recorded we have to go through
    2425              :      vn_reference_lookup_3.  */
    2426   2118702030 :   if (!data->partial_defs.is_empty ())
    2427              :     return NULL;
    2428              : 
    2429   1062407879 :   if (data->last_vuse_ptr)
    2430              :     {
    2431   1040663888 :       *data->last_vuse_ptr = vuse;
    2432   1040663888 :       data->last_vuse = vuse;
    2433              :     }
    2434              : 
    2435              :   /* Fixup vuse and hash.  */
    2436   1062407879 :   if (vr->vuse)
    2437   1062407879 :     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
    2438   1062407879 :   vr->vuse = vuse_ssa_val (vuse);
    2439   1062407879 :   if (vr->vuse)
    2440   1062407879 :     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
    2441              : 
    2442   1062407879 :   hash = vr->hashcode;
    2443   1062407879 :   slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
    2444   1062407879 :   if (slot)
    2445              :     {
    2446      7699058 :       if ((*slot)->result && data->saved_operands.exists ())
    2447       336457 :         return data->finish (vr->set, vr->base_set, (*slot)->result);
    2448              :       return *slot;
    2449              :     }
    2450              : 
    2451   1054708821 :   if (SSA_NAME_IS_DEFAULT_DEF (vuse))
    2452              :     {
    2453     17796550 :       HOST_WIDE_INT op_offset, op_size;
    2454     17796550 :       tree v = NULL_TREE;
    2455     17796550 :       tree base = ao_ref_base (op);
    2456              : 
    2457     17796550 :       if (base
    2458     17796550 :           && op->offset.is_constant (&op_offset)
    2459     17796550 :           && op->size.is_constant (&op_size)
    2460     17796550 :           && op->max_size_known_p ()
    2461     35150459 :           && known_eq (op->size, op->max_size))
    2462              :         {
    2463     17057990 :           if (TREE_CODE (base) == PARM_DECL)
    2464       660305 :             v = ipcp_get_aggregate_const (cfun, base, false, op_offset,
    2465              :                                           op_size);
    2466     16397685 :           else if (TREE_CODE (base) == MEM_REF
    2467      6759188 :                    && integer_zerop (TREE_OPERAND (base, 1))
    2468      5445821 :                    && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
    2469      5440638 :                    && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
    2470     20019235 :                    && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0)))
    2471              :                        == PARM_DECL))
    2472      3569737 :             v = ipcp_get_aggregate_const (cfun,
    2473      3569737 :                                           SSA_NAME_VAR (TREE_OPERAND (base, 0)),
    2474              :                                           true, op_offset, op_size);
    2475              :         }
    2476      4230042 :       if (v)
    2477         1236 :         return data->finish (vr->set, vr->base_set, v);
    2478              :     }
    2479              : 
    2480              :   return NULL;
    2481              : }
    2482              : 
    2483              : /* Lookup an existing or insert a new vn_reference entry into the
    2484              :    value table for the VUSE, SET, TYPE, OPERANDS reference which
    2485              :    has the value VALUE which is either a constant or an SSA name.  */
    2486              : 
    2487              : static vn_reference_t
    2488      1452822 : vn_reference_lookup_or_insert_for_pieces (tree vuse,
    2489              :                                           alias_set_type set,
    2490              :                                           alias_set_type base_set,
    2491              :                                           poly_int64 offset,
    2492              :                                           poly_int64 max_size,
    2493              :                                           tree type,
    2494              :                                           vec<vn_reference_op_s,
    2495              :                                                 va_heap> operands,
    2496              :                                           tree value)
    2497              : {
    2498      1452822 :   vn_reference_s vr1;
    2499      1452822 :   vn_reference_t result;
    2500      1452822 :   unsigned value_id;
    2501      1452822 :   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
    2502      1452822 :   vr1.operands = operands;
    2503      1452822 :   vr1.type = type;
    2504      1452822 :   vr1.set = set;
    2505      1452822 :   vr1.base_set = base_set;
    2506      1452822 :   vr1.offset = offset;
    2507      1452822 :   vr1.max_size = max_size;
    2508      1452822 :   vr1.hashcode = vn_reference_compute_hash (&vr1);
    2509      1452822 :   if (vn_reference_lookup_1 (&vr1, &result))
    2510         8160 :     return result;
    2511              : 
    2512      1444662 :   if (TREE_CODE (value) == SSA_NAME)
    2513       272760 :     value_id = VN_INFO (value)->value_id;
    2514              :   else
    2515      1171902 :     value_id = get_or_alloc_constant_value_id (value);
    2516      1444662 :   return vn_reference_insert_pieces (vuse, set, base_set, offset, max_size,
    2517      1444662 :                                      type, operands.copy (), value, value_id);
    2518              : }
    2519              : 
    2520              : /* Return a value-number for RCODE OPS... either by looking up an existing
    2521              :    value-number for the possibly simplified result or by inserting the
    2522              :    operation if INSERT is true.  If SIMPLIFY is false, return a value
    2523              :    number for the unsimplified expression.  */
    2524              : 
    2525              : static tree
    2526     18466031 : vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert,
    2527              :                            bool simplify)
    2528              : {
    2529     18466031 :   tree result = NULL_TREE;
    2530              :   /* We will be creating a value number for
    2531              :        RCODE (OPS...).
    2532              :      So first simplify and lookup this expression to see if it
    2533              :      is already available.  */
    2534              :   /* For simplification valueize.  */
    2535     18466031 :   unsigned i = 0;
    2536     18466031 :   if (simplify)
    2537     42907357 :     for (i = 0; i < res_op->num_ops; ++i)
    2538     24447059 :       if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
    2539              :         {
    2540     15638513 :           tree tem = vn_valueize (res_op->ops[i]);
    2541     15638513 :           if (!tem)
    2542              :             break;
    2543     15638513 :           res_op->ops[i] = tem;
    2544              :         }
    2545              :   /* If valueization of an operand fails (it is not available), skip
    2546              :      simplification.  */
    2547     18466031 :   bool res = false;
    2548     18466031 :   if (i == res_op->num_ops)
    2549              :     {
    2550              :       /* Do not leak not available operands into the simplified expression
    2551              :          when called from PRE context.  */
    2552     18460298 :       if (rpo_avail)
    2553     11000293 :         mprts_hook = vn_lookup_simplify_result;
    2554     18460298 :       res = res_op->resimplify (NULL, vn_valueize);
    2555     18460298 :       mprts_hook = NULL;
    2556              :     }
    2557     31771675 :   gimple *new_stmt = NULL;
    2558     18460298 :   if (res
    2559     18460298 :       && gimple_simplified_result_is_gimple_val (res_op))
    2560              :     {
    2561              :       /* The expression is already available.  */
    2562      5154654 :       result = res_op->ops[0];
    2563              :       /* Valueize it, simplification returns sth in AVAIL only.  */
    2564      5154654 :       if (TREE_CODE (result) == SSA_NAME)
    2565       290471 :         result = SSA_VAL (result);
    2566              :     }
    2567              :   else
    2568              :     {
    2569     13311377 :       tree val = vn_lookup_simplify_result (res_op);
    2570              :       /* ???  In weird cases we can end up with internal-fn calls,
    2571              :          but this isn't expected so throw the result away.  See
    2572              :          PR123040 for an example.  */
    2573     13311377 :       if (!val && insert && res_op->code.is_tree_code ())
    2574              :         {
    2575       135955 :           gimple_seq stmts = NULL;
    2576       135955 :           result = maybe_push_res_to_seq (res_op, &stmts);
    2577       135955 :           if (result)
    2578              :             {
    2579       135949 :               gcc_assert (gimple_seq_singleton_p (stmts));
    2580       135949 :               new_stmt = gimple_seq_first_stmt (stmts);
    2581              :             }
    2582              :         }
    2583              :       else
    2584              :         /* The expression is already available.  */
    2585              :         result = val;
    2586              :     }
    2587       290477 :   if (new_stmt)
    2588              :     {
    2589              :       /* The expression is not yet available, value-number lhs to
    2590              :          the new SSA_NAME we created.  */
    2591              :       /* Initialize value-number information properly.  */
    2592       135949 :       vn_ssa_aux_t result_info = VN_INFO (result);
    2593       135949 :       result_info->valnum = result;
    2594       135949 :       result_info->value_id = get_next_value_id ();
    2595       135949 :       result_info->visited = 1;
    2596       135949 :       gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
    2597              :                                           new_stmt);
    2598       135949 :       result_info->needs_insertion = true;
    2599              :       /* ???  PRE phi-translation inserts NARYs without corresponding
    2600              :          SSA name result.  Re-use those but set their result according
    2601              :          to the stmt we just built.  */
    2602       135949 :       vn_nary_op_t nary = NULL;
    2603       135949 :       vn_nary_op_lookup_stmt (new_stmt, &nary);
    2604       135949 :       if (nary)
    2605              :         {
    2606            0 :           gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
    2607            0 :           nary->u.result = gimple_assign_lhs (new_stmt);
    2608              :         }
    2609              :       /* As all "inserted" statements are singleton SCCs, insert
    2610              :          to the valid table.  This is strictly needed to
    2611              :          avoid re-generating new value SSA_NAMEs for the same
    2612              :          expression during SCC iteration over and over (the
    2613              :          optimistic table gets cleared after each iteration).
    2614              :          We do not need to insert into the optimistic table, as
    2615              :          lookups there will fall back to the valid table.  */
    2616              :       else
    2617              :         {
    2618       135949 :           unsigned int length = vn_nary_length_from_stmt (new_stmt);
    2619       135949 :           vn_nary_op_t vno1
    2620       135949 :             = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
    2621       135949 :           vno1->value_id = result_info->value_id;
    2622       135949 :           vno1->length = length;
    2623       135949 :           vno1->predicated_values = 0;
    2624       135949 :           vno1->u.result = result;
    2625       135949 :           init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (new_stmt));
    2626       135949 :           vn_nary_op_insert_into (vno1, valid_info->nary);
    2627              :           /* Also do not link it into the undo chain.  */
    2628       135949 :           last_inserted_nary = vno1->next;
    2629       135949 :           vno1->next = (vn_nary_op_t)(void *)-1;
    2630              :         }
    2631       135949 :       if (dump_file && (dump_flags & TDF_DETAILS))
    2632              :         {
    2633          590 :           fprintf (dump_file, "Inserting name ");
    2634          590 :           print_generic_expr (dump_file, result);
    2635          590 :           fprintf (dump_file, " for expression ");
    2636          590 :           print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
    2637          590 :           fprintf (dump_file, "\n");
    2638              :         }
    2639              :     }
    2640     18466031 :   return result;
    2641              : }
    2642              : 
    2643              : /* Return a value-number for RCODE OPS... either by looking up an existing
    2644              :    value-number for the simplified result or by inserting the operation.  */
    2645              : 
    2646              : static tree
    2647       179804 : vn_nary_build_or_lookup (gimple_match_op *res_op)
    2648              : {
    2649            0 :   return vn_nary_build_or_lookup_1 (res_op, true, true);
    2650              : }
    2651              : 
    2652              : /* Try to simplify the expression RCODE OPS... of type TYPE and return
    2653              :    its value if present.  Update NARY with a simplified expression if
    2654              :    it fits.  */
    2655              : 
    2656              : tree
    2657      7455563 : vn_nary_simplify (vn_nary_op_t nary)
    2658              : {
    2659      7455563 :   if (nary->length > gimple_match_op::MAX_NUM_OPS
    2660              :       /* For CONSTRUCTOR the vn_nary_op_t and gimple_match_op representation
    2661              :          does not match.  */
    2662      7455204 :       || nary->opcode == CONSTRUCTOR)
    2663              :     return NULL_TREE;
    2664      7453956 :   gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
    2665      7453956 :                       nary->type, nary->length);
    2666      7453956 :   memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
    2667      7453956 :   tree res = vn_nary_build_or_lookup_1 (&op, false, true);
    2668              :   /* Do not update *NARY with a simplified result that contains abnormals.
    2669              :      This matches what maybe_push_res_to_seq does when requesting insertion.  */
    2670     19561225 :   for (unsigned i = 0; i < op.num_ops; ++i)
    2671     12107358 :     if (TREE_CODE (op.ops[i]) == SSA_NAME
    2672     12107358 :         && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op.ops[i]))
    2673              :       return res;
    2674      7453867 :   if (op.code.is_tree_code ()
    2675      7453867 :       && op.num_ops <= nary->length
    2676     14907032 :       && (tree_code) op.code != CONSTRUCTOR)
    2677              :     {
    2678      7453164 :       nary->opcode = (tree_code) op.code;
    2679      7453164 :       nary->length = op.num_ops;
    2680     19559016 :       for (unsigned i = 0; i < op.num_ops; ++i)
    2681     12105852 :         nary->op[i] = op.ops[i];
    2682              :     }
    2683              :   return res;
    2684              : }
    2685              : 
    2686              : /* Elimination engine.  */
    2687              : 
    2688              : class eliminate_dom_walker : public dom_walker
    2689              : {
    2690              : public:
    2691              :   eliminate_dom_walker (cdi_direction, bitmap);
    2692              :   ~eliminate_dom_walker ();
    2693              : 
    2694              :   edge before_dom_children (basic_block) final override;
    2695              :   void after_dom_children (basic_block) final override;
    2696              : 
    2697              :   virtual tree eliminate_avail (basic_block, tree op);
    2698              :   virtual void eliminate_push_avail (basic_block, tree op);
    2699              :   tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
    2700              : 
    2701              :   void eliminate_stmt (basic_block, gimple_stmt_iterator *);
    2702              : 
    2703              :   unsigned eliminate_cleanup (bool region_p = false);
    2704              : 
    2705              :   bool do_pre;
    2706              :   unsigned int el_todo;
    2707              :   unsigned int eliminations;
    2708              :   unsigned int insertions;
    2709              : 
    2710              :   /* SSA names that had their defs inserted by PRE if do_pre.  */
    2711              :   bitmap inserted_exprs;
    2712              : 
    2713              :   /* Blocks with statements that have had their EH properties changed.  */
    2714              :   bitmap need_eh_cleanup;
    2715              : 
    2716              :   /* Blocks with statements that have had their AB properties changed.  */
    2717              :   bitmap need_ab_cleanup;
    2718              : 
    2719              :   /* Local state for the eliminate domwalk.  */
    2720              :   auto_vec<gimple *> to_remove;
    2721              :   auto_vec<gimple *> to_fixup;
    2722              :   auto_vec<tree> avail;
    2723              :   auto_vec<tree> avail_stack;
    2724              : };
    2725              : 
    2726              : /* Adaptor to the elimination engine using RPO availability.  */
    2727              : 
    2728     12200006 : class rpo_elim : public eliminate_dom_walker
    2729              : {
    2730              : public:
    2731      6100003 :   rpo_elim(basic_block entry_)
    2732     12200006 :     : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
    2733     12200006 :       m_avail_freelist (NULL) {}
    2734              : 
    2735              :   tree eliminate_avail (basic_block, tree op) final override;
    2736              : 
    2737              :   void eliminate_push_avail (basic_block, tree) final override;
    2738              : 
    2739              :   basic_block entry;
    2740              :   /* Freelist of avail entries which are allocated from the vn_ssa_aux
    2741              :      obstack.  */
    2742              :   vn_avail *m_avail_freelist;
    2743              : };
    2744              : 
    2745              : /* Return true if BASE1 and BASE2 can be adjusted so they have the
    2746              :    same address and adjust *OFFSET1 and *OFFSET2 accordingly.
    2747              :    Otherwise return false.  */
    2748              : 
    2749              : static bool
    2750      6803027 : adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
    2751              :                                        tree base2, poly_int64 *offset2)
    2752              : {
    2753      6803027 :   poly_int64 soff;
    2754      6803027 :   if (TREE_CODE (base1) == MEM_REF
    2755      3088618 :       && TREE_CODE (base2) == MEM_REF)
    2756              :     {
    2757      2475156 :       if (mem_ref_offset (base1).to_shwi (&soff))
    2758              :         {
    2759      2475156 :           base1 = TREE_OPERAND (base1, 0);
    2760      2475156 :           *offset1 += soff * BITS_PER_UNIT;
    2761              :         }
    2762      2475156 :       if (mem_ref_offset (base2).to_shwi (&soff))
    2763              :         {
    2764      2475156 :           base2 = TREE_OPERAND (base2, 0);
    2765      2475156 :           *offset2 += soff * BITS_PER_UNIT;
    2766              :         }
    2767      2475156 :       return operand_equal_p (base1, base2, 0);
    2768              :     }
    2769      4327871 :   return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
    2770              : }
    2771              : 
    2772              : /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
    2773              :    from the statement defining VUSE and if not successful tries to
    2774              :    translate *REFP and VR_ through an aggregate copy at the definition
    2775              :    of VUSE.  If *DISAMBIGUATE_ONLY is true then do not perform translation
    2776              :    of *REF and *VR.  If only disambiguation was performed then
    2777              :    *DISAMBIGUATE_ONLY is set to true.  */
    2778              : 
    2779              : static void *
    2780     42003346 : vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
    2781              :                        translate_flags *disambiguate_only)
    2782              : {
    2783     42003346 :   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
    2784     42003346 :   vn_reference_t vr = data->vr;
    2785     42003346 :   gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
    2786     42003346 :   tree base = ao_ref_base (ref);
    2787     42003346 :   HOST_WIDE_INT offseti = 0, maxsizei, sizei = 0;
    2788     42003346 :   static vec<vn_reference_op_s> lhs_ops;
    2789     42003346 :   ao_ref lhs_ref;
    2790     42003346 :   bool lhs_ref_ok = false;
    2791     42003346 :   poly_int64 copy_size;
    2792              : 
    2793              :   /* First try to disambiguate after value-replacing in the definitions LHS.  */
    2794     42003346 :   if (is_gimple_assign (def_stmt))
    2795              :     {
    2796     20699565 :       tree lhs = gimple_assign_lhs (def_stmt);
    2797     20699565 :       bool valueized_anything = false;
    2798              :       /* Avoid re-allocation overhead.  */
    2799     20699565 :       lhs_ops.truncate (0);
    2800     20699565 :       basic_block saved_rpo_bb = vn_context_bb;
    2801     20699565 :       vn_context_bb = gimple_bb (def_stmt);
    2802     20699565 :       if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
    2803              :         {
    2804     13439452 :           copy_reference_ops_from_ref (lhs, &lhs_ops);
    2805     13439452 :           valueize_refs_1 (&lhs_ops, &valueized_anything, true);
    2806              :         }
    2807     20699565 :       vn_context_bb = saved_rpo_bb;
    2808     20699565 :       ao_ref_init (&lhs_ref, lhs);
    2809     20699565 :       lhs_ref_ok = true;
    2810     20699565 :       if (valueized_anything
    2811      1943828 :           && ao_ref_init_from_vn_reference
    2812      1943828 :                (&lhs_ref, ao_ref_alias_set (&lhs_ref),
    2813      1943828 :                 ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
    2814     22643393 :           && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
    2815              :         {
    2816      1640001 :           *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
    2817      8145817 :           return NULL;
    2818              :         }
    2819              : 
    2820              :       /* When the def is a CLOBBER we can optimistically disambiguate
    2821              :          against it since any overlap it would be undefined behavior.
    2822              :          Avoid this for obvious must aliases to save compile-time though.
    2823              :          We also may not do this when the query is used for redundant
    2824              :          store removal.  */
    2825     19059564 :       if (!data->redundant_store_removal_p
    2826     10445409 :           && gimple_clobber_p (def_stmt)
    2827     19570551 :           && !operand_equal_p (ao_ref_base (&lhs_ref), base, OEP_ADDRESS_OF))
    2828              :         {
    2829       478157 :           *disambiguate_only = TR_DISAMBIGUATE;
    2830       478157 :           return NULL;
    2831              :         }
    2832              : 
    2833              :       /* Besides valueizing the LHS we can also use access-path based
    2834              :          disambiguation on the original non-valueized ref.  */
    2835     18581407 :       if (!ref->ref
    2836              :           && lhs_ref_ok
    2837      2626900 :           && data->orig_ref.ref)
    2838              :         {
    2839              :           /* We want to use the non-valueized LHS for this, but avoid redundant
    2840              :              work.  */
    2841      1825313 :           ao_ref *lref = &lhs_ref;
    2842      1825313 :           ao_ref lref_alt;
    2843      1825313 :           if (valueized_anything)
    2844              :             {
    2845       121463 :               ao_ref_init (&lref_alt, lhs);
    2846       121463 :               lref = &lref_alt;
    2847              :             }
    2848      1825313 :           if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
    2849              :             {
    2850       237910 :               *disambiguate_only = (valueized_anything
    2851       118955 :                                     ? TR_VALUEIZE_AND_DISAMBIGUATE
    2852              :                                     : TR_DISAMBIGUATE);
    2853       118955 :               return NULL;
    2854              :             }
    2855              :         }
    2856              : 
    2857              :       /* If we reach a clobbering statement try to skip it and see if
    2858              :          we find a VN result with exactly the same value as the
    2859              :          possible clobber.  In this case we can ignore the clobber
    2860              :          and return the found value.  */
    2861     18462452 :       if (!gimple_has_volatile_ops (def_stmt)
    2862     17100545 :           && ((is_gimple_reg_type (TREE_TYPE (lhs))
    2863     12625522 :                && types_compatible_p (TREE_TYPE (lhs), vr->type)
    2864      9821015 :                && !storage_order_barrier_p (lhs)
    2865      9821015 :                && !reverse_storage_order_for_component_p (lhs))
    2866      7279534 :               || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == CONSTRUCTOR)
    2867     10874803 :           && (ref->ref || data->orig_ref.ref)
    2868     10411821 :           && !data->mask
    2869     10390327 :           && data->partial_defs.is_empty ()
    2870     10388177 :           && multiple_p (get_object_alignment
    2871              :                            (ref->ref ? ref->ref : data->orig_ref.ref),
    2872              :                            ref->size)
    2873     41271174 :           && multiple_p (get_object_alignment (lhs), ref->size))
    2874              :         {
    2875      9976880 :           HOST_WIDE_INT offset2i, size2i;
    2876      9976880 :           poly_int64 offset = ref->offset;
    2877      9976880 :           poly_int64 maxsize = ref->max_size;
    2878              : 
    2879      9976880 :           gcc_assert (lhs_ref_ok);
    2880      9976880 :           tree base2 = ao_ref_base (&lhs_ref);
    2881      9976880 :           poly_int64 offset2 = lhs_ref.offset;
    2882      9976880 :           poly_int64 size2 = lhs_ref.size;
    2883      9976880 :           poly_int64 maxsize2 = lhs_ref.max_size;
    2884              : 
    2885      9976880 :           tree rhs = gimple_assign_rhs1 (def_stmt);
    2886      9976880 :           if (TREE_CODE (rhs) == CONSTRUCTOR)
    2887      1026205 :             rhs = integer_zero_node;
    2888              :           /* ???  We may not compare to ahead values which might be from
    2889              :              a different loop iteration but only to loop invariants.  Use
    2890              :              CONSTANT_CLASS_P (unvalueized!) as conservative approximation.
    2891              :              The one-hop lookup below doesn't have this issue since there's
    2892              :              a virtual PHI before we ever reach a backedge to cross.
    2893              :              We can skip multiple defs as long as they are from the same
    2894              :              value though.  */
    2895      9976880 :           if (data->same_val
    2896      9976880 :               && !operand_equal_p (data->same_val, rhs))
    2897              :             ;
    2898              :           /* When this is a (partial) must-def, leave it to handling
    2899              :              below in case we are interested in the value.  */
    2900      9683689 :           else if (!(*disambiguate_only > TR_TRANSLATE)
    2901      3318501 :                    && base2
    2902      3318501 :                    && known_eq (maxsize2, size2)
    2903      2320419 :                    && adjust_offsets_for_equal_base_address (base, &offset,
    2904              :                                                              base2, &offset2)
    2905      1135820 :                    && offset2.is_constant (&offset2i)
    2906      1135820 :                    && size2.is_constant (&size2i)
    2907      1135820 :                    && maxsize.is_constant (&maxsizei)
    2908      1135820 :                    && offset.is_constant (&offseti)
    2909     10819509 :                    && ranges_known_overlap_p (offseti, maxsizei, offset2i,
    2910              :                                               size2i))
    2911              :             ;
    2912      8633838 :           else if (CONSTANT_CLASS_P (rhs))
    2913              :             {
    2914      4206331 :               if (dump_file && (dump_flags & TDF_DETAILS))
    2915              :                 {
    2916         1940 :                   fprintf (dump_file,
    2917              :                            "Skipping possible redundant definition ");
    2918         1940 :                   print_gimple_stmt (dump_file, def_stmt, 0);
    2919              :                 }
    2920              :               /* Delay the actual compare of the values to the end of the walk
    2921              :                  but do not update last_vuse from here.  */
    2922      4206331 :               data->last_vuse_ptr = NULL;
    2923      4206331 :               data->same_val = rhs;
    2924      4268703 :               return NULL;
    2925              :             }
    2926              :           else
    2927              :             {
    2928      4427507 :               tree saved_vuse = vr->vuse;
    2929      4427507 :               hashval_t saved_hashcode = vr->hashcode;
    2930      4427507 :               if (vr->vuse)
    2931      4427507 :                 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
    2932      8855014 :               vr->vuse = vuse_ssa_val (gimple_vuse (def_stmt));
    2933      4427507 :               if (vr->vuse)
    2934      4427507 :                 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
    2935      4427507 :               vn_reference_t vnresult = NULL;
    2936              :               /* Do not use vn_reference_lookup_2 since that might perform
    2937              :                  expression hashtable insertion but this lookup crosses
    2938              :                  a possible may-alias making such insertion conditionally
    2939              :                  invalid.  */
    2940      4427507 :               vn_reference_lookup_1 (vr, &vnresult);
    2941              :               /* Need to restore vr->vuse and vr->hashcode.  */
    2942      4427507 :               vr->vuse = saved_vuse;
    2943      4427507 :               vr->hashcode = saved_hashcode;
    2944      4427507 :               if (vnresult)
    2945              :                 {
    2946       240851 :                   if (TREE_CODE (rhs) == SSA_NAME)
    2947       239332 :                     rhs = SSA_VAL (rhs);
    2948       240851 :                   if (vnresult->result
    2949       240851 :                       && operand_equal_p (vnresult->result, rhs, 0))
    2950        62372 :                     return vnresult;
    2951              :                 }
    2952              :             }
    2953              :         }
    2954              :     }
    2955     21303781 :   else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
    2956     19132369 :            && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
    2957     23355007 :            && gimple_call_num_args (def_stmt) <= 4)
    2958              :     {
    2959              :       /* For builtin calls valueize its arguments and call the
    2960              :          alias oracle again.  Valueization may improve points-to
    2961              :          info of pointers and constify size and position arguments.
    2962              :          Originally this was motivated by PR61034 which has
    2963              :          conditional calls to free falsely clobbering ref because
    2964              :          of imprecise points-to info of the argument.  */
    2965              :       tree oldargs[4];
    2966              :       bool valueized_anything = false;
    2967      4848981 :       for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
    2968              :         {
    2969      3337027 :           oldargs[i] = gimple_call_arg (def_stmt, i);
    2970      3337027 :           tree val = vn_valueize (oldargs[i]);
    2971      3337027 :           if (val != oldargs[i])
    2972              :             {
    2973       120585 :               gimple_call_set_arg (def_stmt, i, val);
    2974       120585 :               valueized_anything = true;
    2975              :             }
    2976              :         }
    2977      1511954 :       if (valueized_anything)
    2978              :         {
    2979       186502 :           bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
    2980        93251 :                                                ref, data->tbaa_p);
    2981       339361 :           for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
    2982       246110 :             gimple_call_set_arg (def_stmt, i, oldargs[i]);
    2983        93251 :           if (!res)
    2984              :             {
    2985        28473 :               *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
    2986        28473 :               return NULL;
    2987              :             }
    2988              :         }
    2989              :     }
    2990              : 
    2991     35469057 :   if (*disambiguate_only > TR_TRANSLATE)
    2992              :     return (void *)-1;
    2993              : 
    2994              :   /* If we cannot constrain the size of the reference we cannot
    2995              :      test if anything kills it.  */
    2996     23587918 :   if (!ref->max_size_known_p ())
    2997              :     return (void *)-1;
    2998              : 
    2999     23161146 :   poly_int64 offset = ref->offset;
    3000     23161146 :   poly_int64 maxsize = ref->max_size;
    3001              : 
    3002              :   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
    3003              :      from that definition.
    3004              :      1) Memset.  */
    3005     23161146 :   if (is_gimple_reg_type (vr->type)
    3006     23153239 :       && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
    3007     23062750 :           || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
    3008        91031 :       && (integer_zerop (gimple_call_arg (def_stmt, 1))
    3009        33490 :           || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
    3010         9163 :                || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
    3011              :               && CHAR_BIT == 8
    3012              :               && BITS_PER_UNIT == 8
    3013              :               && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
    3014        32151 :               && offset.is_constant (&offseti)
    3015        32151 :               && ref->size.is_constant (&sizei)
    3016        32151 :               && (offseti % BITS_PER_UNIT == 0
    3017           39 :                   || TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST)))
    3018        89692 :       && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
    3019        37150 :           || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
    3020        37150 :               && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
    3021     23214251 :       && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
    3022        30399 :           || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
    3023              :     {
    3024        53064 :       tree base2;
    3025        53064 :       poly_int64 offset2, size2, maxsize2;
    3026        53064 :       bool reverse;
    3027        53064 :       tree ref2 = gimple_call_arg (def_stmt, 0);
    3028        53064 :       if (TREE_CODE (ref2) == SSA_NAME)
    3029              :         {
    3030        30358 :           ref2 = SSA_VAL (ref2);
    3031        30358 :           if (TREE_CODE (ref2) == SSA_NAME
    3032        30358 :               && (TREE_CODE (base) != MEM_REF
    3033        19390 :                   || TREE_OPERAND (base, 0) != ref2))
    3034              :             {
    3035        24037 :               gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
    3036        24037 :               if (gimple_assign_single_p (def_stmt)
    3037        24037 :                   && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
    3038          802 :                 ref2 = gimple_assign_rhs1 (def_stmt);
    3039              :             }
    3040              :         }
    3041        53064 :       if (TREE_CODE (ref2) == ADDR_EXPR)
    3042              :         {
    3043        26493 :           ref2 = TREE_OPERAND (ref2, 0);
    3044        26493 :           base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
    3045              :                                            &reverse);
    3046        26493 :           if (!known_size_p (maxsize2)
    3047        26453 :               || !known_eq (maxsize2, size2)
    3048        52872 :               || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
    3049        56562 :             return (void *)-1;
    3050              :         }
    3051        26571 :       else if (TREE_CODE (ref2) == SSA_NAME)
    3052              :         {
    3053        26571 :           poly_int64 soff;
    3054        26571 :           if (TREE_CODE (base) != MEM_REF
    3055        45378 :               || !(mem_ref_offset (base)
    3056        37611 :                    << LOG2_BITS_PER_UNIT).to_shwi (&soff))
    3057        22623 :             return (void *)-1;
    3058        18804 :           offset += soff;
    3059        18804 :           offset2 = 0;
    3060        18804 :           if (TREE_OPERAND (base, 0) != ref2)
    3061              :             {
    3062        15468 :               gimple *def = SSA_NAME_DEF_STMT (ref2);
    3063        15468 :               if (is_gimple_assign (def)
    3064        14151 :                   && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
    3065        12177 :                   && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
    3066        16110 :                   && poly_int_tree_p (gimple_assign_rhs2 (def)))
    3067              :                 {
    3068          612 :                   tree rhs2 = gimple_assign_rhs2 (def);
    3069          612 :                   if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
    3070              :                                                SIGNED)
    3071          612 :                         << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
    3072              :                     return (void *)-1;
    3073          612 :                   ref2 = gimple_assign_rhs1 (def);
    3074          612 :                   if (TREE_CODE (ref2) == SSA_NAME)
    3075          612 :                     ref2 = SSA_VAL (ref2);
    3076              :                 }
    3077              :               else
    3078              :                 return (void *)-1;
    3079              :             }
    3080              :         }
    3081              :       else
    3082              :         return (void *)-1;
    3083        26558 :       tree len = gimple_call_arg (def_stmt, 2);
    3084        26558 :       HOST_WIDE_INT leni, offset2i;
    3085        26558 :       if (TREE_CODE (len) == SSA_NAME)
    3086          255 :         len = SSA_VAL (len);
    3087              :       /* Sometimes the above trickery is smarter than alias analysis.  Take
    3088              :          advantage of that.  */
    3089        26558 :       if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
    3090        53116 :                                    (wi::to_poly_offset (len)
    3091        26558 :                                     << LOG2_BITS_PER_UNIT)))
    3092              :         return NULL;
    3093        53066 :       if (data->partial_defs.is_empty ()
    3094        26508 :           && known_subrange_p (offset, maxsize, offset2,
    3095        26508 :                                wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
    3096              :         {
    3097        26011 :           tree val;
    3098        26011 :           if (integer_zerop (gimple_call_arg (def_stmt, 1)))
    3099        21178 :             val = build_zero_cst (vr->type);
    3100         4833 :           else if (INTEGRAL_TYPE_P (vr->type)
    3101         3693 :                    && known_eq (ref->size, 8)
    3102         7771 :                    && offseti % BITS_PER_UNIT == 0)
    3103              :             {
    3104         2938 :               gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
    3105         2938 :                                       vr->type, gimple_call_arg (def_stmt, 1));
    3106         2938 :               val = vn_nary_build_or_lookup (&res_op);
    3107         2938 :               if (!val
    3108         2938 :                   || (TREE_CODE (val) == SSA_NAME
    3109          626 :                       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
    3110            0 :                 return (void *)-1;
    3111              :             }
    3112              :           else
    3113              :             {
    3114         1895 :               unsigned buflen
    3115         1895 :                 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type)) + 1;
    3116         1895 :               if (INTEGRAL_TYPE_P (vr->type)
    3117         1895 :                   && TYPE_MODE (vr->type) != BLKmode)
    3118         1508 :                 buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)) + 1;
    3119         1895 :               unsigned char *buf = XALLOCAVEC (unsigned char, buflen);
    3120         1895 :               memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
    3121              :                       buflen);
    3122         1895 :               if (BYTES_BIG_ENDIAN)
    3123              :                 {
    3124              :                   unsigned int amnt
    3125              :                     = (((unsigned HOST_WIDE_INT) offseti + sizei)
    3126              :                        % BITS_PER_UNIT);
    3127              :                   if (amnt)
    3128              :                     {
    3129              :                       shift_bytes_in_array_right (buf, buflen,
    3130              :                                                   BITS_PER_UNIT - amnt);
    3131              :                       buf++;
    3132              :                       buflen--;
    3133              :                     }
    3134              :                 }
    3135         1895 :               else if (offseti % BITS_PER_UNIT != 0)
    3136              :                 {
    3137            7 :                   unsigned int amnt
    3138              :                     = BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) offseti
    3139            7 :                                        % BITS_PER_UNIT);
    3140            7 :                   shift_bytes_in_array_left (buf, buflen, amnt);
    3141            7 :                   buf++;
    3142            7 :                   buflen--;
    3143              :                 }
    3144         1895 :               val = native_interpret_expr (vr->type, buf, buflen);
    3145         1895 :               if (!val)
    3146              :                 return (void *)-1;
    3147              :             }
    3148        26011 :           return data->finish (0, 0, val);
    3149              :         }
    3150              :       /* For now handle clearing memory with partial defs.  */
    3151          547 :       else if (known_eq (ref->size, maxsize)
    3152          478 :                && integer_zerop (gimple_call_arg (def_stmt, 1))
    3153          166 :                && tree_fits_poly_int64_p (len)
    3154          162 :                && tree_to_poly_int64 (len).is_constant (&leni)
    3155          162 :                && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT
    3156          162 :                && offset.is_constant (&offseti)
    3157          162 :                && offset2.is_constant (&offset2i)
    3158          162 :                && maxsize.is_constant (&maxsizei)
    3159          547 :                && ranges_known_overlap_p (offseti, maxsizei, offset2i,
    3160          547 :                                           leni << LOG2_BITS_PER_UNIT))
    3161              :         {
    3162          162 :           pd_data pd;
    3163          162 :           pd.rhs = build_constructor (NULL_TREE, NULL);
    3164          162 :           pd.rhs_off = 0;
    3165          162 :           pd.offset = offset2i;
    3166          162 :           pd.size = leni << LOG2_BITS_PER_UNIT;
    3167          162 :           return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
    3168              :         }
    3169              :     }
    3170              : 
    3171              :   /* 2) Assignment from an empty CONSTRUCTOR.  */
    3172     23108082 :   else if (is_gimple_reg_type (vr->type)
    3173     23100175 :            && gimple_assign_single_p (def_stmt)
    3174      7659700 :            && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
    3175      1953450 :            && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0
    3176     25061532 :            && !TREE_THIS_VOLATILE (gimple_assign_lhs (def_stmt)))
    3177              :     {
    3178      1953418 :       tree base2;
    3179      1953418 :       poly_int64 offset2, size2, maxsize2;
    3180      1953418 :       HOST_WIDE_INT offset2i, size2i;
    3181      1953418 :       gcc_assert (lhs_ref_ok);
    3182      1953418 :       base2 = ao_ref_base (&lhs_ref);
    3183      1953418 :       offset2 = lhs_ref.offset;
    3184      1953418 :       size2 = lhs_ref.size;
    3185      1953418 :       maxsize2 = lhs_ref.max_size;
    3186      1953418 :       if (known_size_p (maxsize2)
    3187      1953380 :           && known_eq (maxsize2, size2)
    3188      3906752 :           && adjust_offsets_for_equal_base_address (base, &offset,
    3189              :                                                     base2, &offset2))
    3190              :         {
    3191      1926268 :           if (data->partial_defs.is_empty ()
    3192      1922880 :               && known_subrange_p (offset, maxsize, offset2, size2))
    3193              :             {
    3194              :               /* While technically undefined behavior do not optimize
    3195              :                  a full read from a clobber.  */
    3196      1922028 :               if (gimple_clobber_p (def_stmt))
    3197      1926214 :                 return (void *)-1;
    3198       976349 :               tree val = build_zero_cst (vr->type);
    3199       976349 :               return data->finish (ao_ref_alias_set (&lhs_ref),
    3200       976349 :                                    ao_ref_base_alias_set (&lhs_ref), val);
    3201              :             }
    3202         4240 :           else if (known_eq (ref->size, maxsize)
    3203         4186 :                    && maxsize.is_constant (&maxsizei)
    3204         4186 :                    && offset.is_constant (&offseti)
    3205         4186 :                    && offset2.is_constant (&offset2i)
    3206         4186 :                    && size2.is_constant (&size2i)
    3207         4240 :                    && ranges_known_overlap_p (offseti, maxsizei,
    3208              :                                               offset2i, size2i))
    3209              :             {
    3210              :               /* Let clobbers be consumed by the partial-def tracker
    3211              :                  which can choose to ignore them if they are shadowed
    3212              :                  by a later def.  */
    3213         4186 :               pd_data pd;
    3214         4186 :               pd.rhs = gimple_assign_rhs1 (def_stmt);
    3215         4186 :               pd.rhs_off = 0;
    3216         4186 :               pd.offset = offset2i;
    3217         4186 :               pd.size = size2i;
    3218         4186 :               return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
    3219              :                                              ao_ref_base_alias_set (&lhs_ref),
    3220              :                                              offseti, maxsizei);
    3221              :             }
    3222              :         }
    3223              :     }
    3224              : 
    3225              :   /* 3) Assignment from a constant.  We can use folds native encode/interpret
    3226              :      routines to extract the assigned bits.  */
    3227     21154664 :   else if (known_eq (ref->size, maxsize)
    3228     20620825 :            && is_gimple_reg_type (vr->type)
    3229     20612918 :            && !reverse_storage_order_for_component_p (vr->operands)
    3230     20610161 :            && !contains_storage_order_barrier_p (vr->operands)
    3231     20610161 :            && gimple_assign_single_p (def_stmt)
    3232      5368916 :            && !TREE_THIS_VOLATILE (gimple_assign_lhs (def_stmt))
    3233              :            && CHAR_BIT == 8
    3234              :            && BITS_PER_UNIT == 8
    3235              :            && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
    3236              :            /* native_encode and native_decode operate on arrays of bytes
    3237              :               and so fundamentally need a compile-time size and offset.  */
    3238      5365955 :            && maxsize.is_constant (&maxsizei)
    3239      5365955 :            && offset.is_constant (&offseti)
    3240     26520619 :            && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
    3241      4534427 :                || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
    3242      1872426 :                    && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
    3243              :     {
    3244       847966 :       tree lhs = gimple_assign_lhs (def_stmt);
    3245       847966 :       tree base2;
    3246       847966 :       poly_int64 offset2, size2, maxsize2;
    3247       847966 :       HOST_WIDE_INT offset2i, size2i;
    3248       847966 :       bool reverse;
    3249       847966 :       gcc_assert (lhs_ref_ok);
    3250       847966 :       base2 = ao_ref_base (&lhs_ref);
    3251       847966 :       offset2 = lhs_ref.offset;
    3252       847966 :       size2 = lhs_ref.size;
    3253       847966 :       maxsize2 = lhs_ref.max_size;
    3254       847966 :       reverse = reverse_storage_order_for_component_p (lhs);
    3255       847966 :       if (base2
    3256       847966 :           && !reverse
    3257       847138 :           && !storage_order_barrier_p (lhs)
    3258       847138 :           && known_eq (maxsize2, size2)
    3259       815832 :           && adjust_offsets_for_equal_base_address (base, &offset,
    3260              :                                                     base2, &offset2)
    3261        79820 :           && offset.is_constant (&offseti)
    3262        79820 :           && offset2.is_constant (&offset2i)
    3263       847966 :           && size2.is_constant (&size2i))
    3264              :         {
    3265        79820 :           if (data->partial_defs.is_empty ()
    3266        64021 :               && known_subrange_p (offseti, maxsizei, offset2, size2))
    3267              :             {
    3268              :               /* We support up to 512-bit values (for V8DFmode).  */
    3269        42304 :               unsigned char buffer[65];
    3270        42304 :               int len;
    3271              : 
    3272        42304 :               tree rhs = gimple_assign_rhs1 (def_stmt);
    3273        42304 :               if (TREE_CODE (rhs) == SSA_NAME)
    3274         1536 :                 rhs = SSA_VAL (rhs);
    3275        84608 :               len = native_encode_expr (rhs,
    3276              :                                         buffer, sizeof (buffer) - 1,
    3277        42304 :                                         (offseti - offset2i) / BITS_PER_UNIT);
    3278        42304 :               if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
    3279              :                 {
    3280        39282 :                   tree type = vr->type;
    3281        39282 :                   unsigned char *buf = buffer;
    3282        39282 :                   unsigned int amnt = 0;
    3283              :                   /* Make sure to interpret in a type that has a range
    3284              :                      covering the whole access size.  */
    3285        39282 :                   if (INTEGRAL_TYPE_P (vr->type)
    3286        39282 :                       && maxsizei != TYPE_PRECISION (vr->type))
    3287         1830 :                     type = build_nonstandard_integer_type (maxsizei,
    3288          915 :                                                            TYPE_UNSIGNED (type));
    3289        39282 :                   if (BYTES_BIG_ENDIAN)
    3290              :                     {
    3291              :                       /* For big-endian native_encode_expr stored the rhs
    3292              :                          such that the LSB of it is the LSB of buffer[len - 1].
    3293              :                          That bit is stored into memory at position
    3294              :                          offset2 + size2 - 1, i.e. in byte
    3295              :                          base + (offset2 + size2 - 1) / BITS_PER_UNIT.
    3296              :                          E.g. for offset2 1 and size2 14, rhs -1 and memory
    3297              :                          previously cleared that is:
    3298              :                          0        1
    3299              :                          01111111|11111110
    3300              :                          Now, if we want to extract offset 2 and size 12 from
    3301              :                          it using native_interpret_expr (which actually works
    3302              :                          for integral bitfield types in terms of byte size of
    3303              :                          the mode), the native_encode_expr stored the value
    3304              :                          into buffer as
    3305              :                          XX111111|11111111
    3306              :                          and returned len 2 (the X bits are outside of
    3307              :                          precision).
    3308              :                          Let sz be maxsize / BITS_PER_UNIT if not extracting
    3309              :                          a bitfield, and GET_MODE_SIZE otherwise.
    3310              :                          We need to align the LSB of the value we want to
    3311              :                          extract as the LSB of buf[sz - 1].
    3312              :                          The LSB from memory we need to read is at position
    3313              :                          offset + maxsize - 1.  */
    3314              :                       HOST_WIDE_INT sz = maxsizei / BITS_PER_UNIT;
    3315              :                       if (INTEGRAL_TYPE_P (type))
    3316              :                         {
    3317              :                           if (TYPE_MODE (type) != BLKmode)
    3318              :                             sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
    3319              :                           else
    3320              :                             sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
    3321              :                         }
    3322              :                       amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
    3323              :                               - offseti - maxsizei) % BITS_PER_UNIT;
    3324              :                       if (amnt)
    3325              :                         shift_bytes_in_array_right (buffer, len, amnt);
    3326              :                       amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
    3327              :                               - offseti - maxsizei - amnt) / BITS_PER_UNIT;
    3328              :                       if ((unsigned HOST_WIDE_INT) sz + amnt > (unsigned) len)
    3329              :                         len = 0;
    3330              :                       else
    3331              :                         {
    3332              :                           buf = buffer + len - sz - amnt;
    3333              :                           len -= (buf - buffer);
    3334              :                         }
    3335              :                     }
    3336              :                   else
    3337              :                     {
    3338        39282 :                       amnt = ((unsigned HOST_WIDE_INT) offset2i
    3339        39282 :                               - offseti) % BITS_PER_UNIT;
    3340        39282 :                       if (amnt)
    3341              :                         {
    3342          315 :                           buffer[len] = 0;
    3343          315 :                           shift_bytes_in_array_left (buffer, len + 1, amnt);
    3344          315 :                           buf = buffer + 1;
    3345              :                         }
    3346              :                     }
    3347        39282 :                   tree val = native_interpret_expr (type, buf, len);
    3348              :                   /* If we chop off bits because the types precision doesn't
    3349              :                      match the memory access size this is ok when optimizing
    3350              :                      reads but not when called from the DSE code during
    3351              :                      elimination.  */
    3352        39282 :                   if (val
    3353        39280 :                       && type != vr->type)
    3354              :                     {
    3355          915 :                       if (! int_fits_type_p (val, vr->type))
    3356              :                         val = NULL_TREE;
    3357              :                       else
    3358          915 :                         val = fold_convert (vr->type, val);
    3359              :                     }
    3360              : 
    3361        39280 :                   if (val)
    3362        39280 :                     return data->finish (ao_ref_alias_set (&lhs_ref),
    3363        39280 :                                          ao_ref_base_alias_set (&lhs_ref), val);
    3364              :                 }
    3365              :             }
    3366        37516 :           else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
    3367              :                                            size2i))
    3368              :             {
    3369        37516 :               pd_data pd;
    3370        37516 :               tree rhs = gimple_assign_rhs1 (def_stmt);
    3371        37516 :               if (TREE_CODE (rhs) == SSA_NAME)
    3372         2176 :                 rhs = SSA_VAL (rhs);
    3373        37516 :               pd.rhs = rhs;
    3374        37516 :               pd.rhs_off = 0;
    3375        37516 :               pd.offset = offset2i;
    3376        37516 :               pd.size = size2i;
    3377        37516 :               return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
    3378              :                                              ao_ref_base_alias_set (&lhs_ref),
    3379              :                                              offseti, maxsizei);
    3380              :             }
    3381              :         }
    3382              :     }
    3383              : 
    3384              :   /* 4) Assignment from an SSA name which definition we may be able
    3385              :      to access pieces from or we can combine to a larger entity.  */
    3386     20306698 :   else if (known_eq (ref->size, maxsize)
    3387     19772859 :            && is_gimple_reg_type (vr->type)
    3388     19764952 :            && !reverse_storage_order_for_component_p (vr->operands)
    3389     19762195 :            && !contains_storage_order_barrier_p (vr->operands)
    3390     19762195 :            && gimple_assign_single_p (def_stmt)
    3391      4520950 :            && !TREE_THIS_VOLATILE (gimple_assign_lhs (def_stmt))
    3392     24824687 :            && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
    3393              :     {
    3394      1855988 :       tree lhs = gimple_assign_lhs (def_stmt);
    3395      1855988 :       tree base2;
    3396      1855988 :       poly_int64 offset2, size2, maxsize2;
    3397      1855988 :       HOST_WIDE_INT offset2i, size2i, offseti;
    3398      1855988 :       bool reverse;
    3399      1855988 :       gcc_assert (lhs_ref_ok);
    3400      1855988 :       base2 = ao_ref_base (&lhs_ref);
    3401      1855988 :       offset2 = lhs_ref.offset;
    3402      1855988 :       size2 = lhs_ref.size;
    3403      1855988 :       maxsize2 = lhs_ref.max_size;
    3404      1855988 :       reverse = reverse_storage_order_for_component_p (lhs);
    3405      1855988 :       tree def_rhs = gimple_assign_rhs1 (def_stmt);
    3406      1855988 :       if (!reverse
    3407      1855776 :           && !storage_order_barrier_p (lhs)
    3408      1855776 :           && known_size_p (maxsize2)
    3409      1831151 :           && known_eq (maxsize2, size2)
    3410      3569416 :           && adjust_offsets_for_equal_base_address (base, &offset,
    3411              :                                                     base2, &offset2))
    3412              :         {
    3413        80400 :           if (data->partial_defs.is_empty ()
    3414        74351 :               && known_subrange_p (offset, maxsize, offset2, size2)
    3415              :               /* ???  We can't handle bitfield precision extracts without
    3416              :                  either using an alternate type for the BIT_FIELD_REF and
    3417              :                  then doing a conversion or possibly adjusting the offset
    3418              :                  according to endianness.  */
    3419        50909 :               && (! INTEGRAL_TYPE_P (vr->type)
    3420        37214 :                   || known_eq (ref->size, TYPE_PRECISION (vr->type)))
    3421        93283 :               && multiple_p (ref->size, BITS_PER_UNIT))
    3422              :             {
    3423        44921 :               tree val = NULL_TREE;
    3424        89836 :               if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
    3425        49553 :                   || type_has_mode_precision_p (TREE_TYPE (def_rhs)))
    3426              :                 {
    3427        43760 :                   gimple_match_op op (gimple_match_cond::UNCOND,
    3428        43760 :                                       BIT_FIELD_REF, vr->type,
    3429              :                                       SSA_VAL (def_rhs),
    3430              :                                       bitsize_int (ref->size),
    3431        43760 :                                       bitsize_int (offset - offset2));
    3432        43760 :                   val = vn_nary_build_or_lookup (&op);
    3433              :                 }
    3434         1161 :               else if (known_eq (ref->size, size2))
    3435              :                 {
    3436         1087 :                   gimple_match_op op (gimple_match_cond::UNCOND,
    3437         1087 :                                       VIEW_CONVERT_EXPR, vr->type,
    3438         1087 :                                       SSA_VAL (def_rhs));
    3439         1087 :                   val = vn_nary_build_or_lookup (&op);
    3440              :                 }
    3441        44847 :               if (val
    3442        44847 :                   && (TREE_CODE (val) != SSA_NAME
    3443        44003 :                       || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
    3444        44828 :                 return data->finish (ao_ref_alias_set (&lhs_ref),
    3445        80307 :                                      ao_ref_base_alias_set (&lhs_ref), val);
    3446              :             }
    3447        35479 :           else if (maxsize.is_constant (&maxsizei)
    3448        35479 :                    && offset.is_constant (&offseti)
    3449        35479 :                    && offset2.is_constant (&offset2i)
    3450        35479 :                    && size2.is_constant (&size2i)
    3451        35479 :                    && ranges_known_overlap_p (offset, maxsize, offset2, size2))
    3452              :             {
    3453        35479 :               pd_data pd;
    3454        35479 :               pd.rhs = SSA_VAL (def_rhs);
    3455        35479 :               pd.rhs_off = 0;
    3456        35479 :               pd.offset = offset2i;
    3457        35479 :               pd.size = size2i;
    3458        35479 :               return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
    3459              :                                              ao_ref_base_alias_set (&lhs_ref),
    3460              :                                              offseti, maxsizei);
    3461              :             }
    3462              :         }
    3463              :     }
    3464              : 
    3465              :   /* 4b) Assignment done via one of the vectorizer internal store
    3466              :      functions where we may be able to access pieces from or we can
    3467              :      combine to a larger entity.  */
    3468     18450710 :   else if (known_eq (ref->size, maxsize)
    3469     17916871 :            && is_gimple_reg_type (vr->type)
    3470     17908964 :            && !reverse_storage_order_for_component_p (vr->operands)
    3471     17906207 :            && !contains_storage_order_barrier_p (vr->operands)
    3472     17906207 :            && is_gimple_call (def_stmt)
    3473     14450589 :            && gimple_call_internal_p (def_stmt)
    3474     18702638 :            && internal_store_fn_p (gimple_call_internal_fn (def_stmt)))
    3475              :     {
    3476           46 :       gcall *call = as_a <gcall *> (def_stmt);
    3477           46 :       internal_fn fn = gimple_call_internal_fn (call);
    3478              : 
    3479           46 :       tree mask = NULL_TREE, len = NULL_TREE, bias = NULL_TREE;
    3480           46 :       switch (fn)
    3481              :         {
    3482           46 :         case IFN_MASK_STORE:
    3483           46 :           mask = gimple_call_arg (call, internal_fn_mask_index (fn));
    3484           46 :           mask = vn_valueize (mask);
    3485           46 :           if (TREE_CODE (mask) != VECTOR_CST)
    3486           38 :             return (void *)-1;
    3487              :           break;
    3488            0 :         case IFN_LEN_STORE:
    3489            0 :           {
    3490            0 :             int len_index = internal_fn_len_index (fn);
    3491            0 :             len = gimple_call_arg (call, len_index);
    3492            0 :             bias = gimple_call_arg (call, len_index + 1);
    3493            0 :             if (!tree_fits_uhwi_p (len) || !tree_fits_shwi_p (bias))
    3494              :               return (void *) -1;
    3495              :             break;
    3496              :           }
    3497              :         default:
    3498              :           return (void *)-1;
    3499              :         }
    3500           14 :       tree def_rhs = gimple_call_arg (call,
    3501           14 :                                       internal_fn_stored_value_index (fn));
    3502           14 :       def_rhs = vn_valueize (def_rhs);
    3503           14 :       if (TREE_CODE (def_rhs) != VECTOR_CST)
    3504              :         return (void *)-1;
    3505              : 
    3506           14 :       ao_ref_init_from_ptr_and_size (&lhs_ref,
    3507              :                                      vn_valueize (gimple_call_arg (call, 0)),
    3508           14 :                                      TYPE_SIZE_UNIT (TREE_TYPE (def_rhs)));
    3509           14 :       tree base2;
    3510           14 :       poly_int64 offset2, size2, maxsize2;
    3511           14 :       HOST_WIDE_INT offset2i, size2i, offseti;
    3512           14 :       base2 = ao_ref_base (&lhs_ref);
    3513           14 :       offset2 = lhs_ref.offset;
    3514           14 :       size2 = lhs_ref.size;
    3515           14 :       maxsize2 = lhs_ref.max_size;
    3516           14 :       if (known_size_p (maxsize2)
    3517           14 :           && known_eq (maxsize2, size2)
    3518           14 :           && adjust_offsets_for_equal_base_address (base, &offset,
    3519              :                                                     base2, &offset2)
    3520            6 :           && maxsize.is_constant (&maxsizei)
    3521            6 :           && offset.is_constant (&offseti)
    3522            6 :           && offset2.is_constant (&offset2i)
    3523           14 :           && size2.is_constant (&size2i))
    3524              :         {
    3525            6 :           if (!ranges_maybe_overlap_p (offset, maxsize, offset2, size2))
    3526              :             /* Poor-mans disambiguation.  */
    3527              :             return NULL;
    3528            6 :           else if (ranges_known_overlap_p (offset, maxsize, offset2, size2))
    3529              :             {
    3530            6 :               pd_data pd;
    3531            6 :               pd.rhs = def_rhs;
    3532            6 :               tree aa = gimple_call_arg (call, 1);
    3533            6 :               alias_set_type set = get_deref_alias_set (TREE_TYPE (aa));
    3534            6 :               tree vectype = TREE_TYPE (def_rhs);
    3535            6 :               unsigned HOST_WIDE_INT elsz
    3536            6 :                 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (vectype)));
    3537            6 :               if (mask)
    3538              :                 {
    3539              :                   HOST_WIDE_INT start = 0, length = 0;
    3540              :                   unsigned mask_idx = 0;
    3541           48 :                   do
    3542              :                     {
    3543           48 :                       if (integer_zerop (VECTOR_CST_ELT (mask, mask_idx)))
    3544              :                         {
    3545           24 :                           if (length != 0)
    3546              :                             {
    3547           18 :                               pd.rhs_off = start;
    3548           18 :                               pd.offset = offset2i + start;
    3549           18 :                               pd.size = length;
    3550           18 :                               if (ranges_known_overlap_p
    3551           18 :                                     (offset, maxsize, pd.offset, pd.size))
    3552              :                                 {
    3553            0 :                                   void *res = data->push_partial_def
    3554            0 :                                               (pd, set, set, offseti, maxsizei);
    3555            0 :                                   if (res != NULL)
    3556            6 :                                     return res;
    3557              :                                 }
    3558              :                             }
    3559           24 :                           start = (mask_idx + 1) * elsz;
    3560           24 :                           length = 0;
    3561              :                         }
    3562              :                       else
    3563           24 :                         length += elsz;
    3564           48 :                       mask_idx++;
    3565              :                     }
    3566           48 :                   while (known_lt (mask_idx, TYPE_VECTOR_SUBPARTS (vectype)));
    3567            6 :                   if (length != 0)
    3568              :                     {
    3569            6 :                       pd.rhs_off = start;
    3570            6 :                       pd.offset = offset2i + start;
    3571            6 :                       pd.size = length;
    3572            6 :                       if (ranges_known_overlap_p (offset, maxsize,
    3573              :                                                   pd.offset, pd.size))
    3574            2 :                         return data->push_partial_def (pd, set, set,
    3575            2 :                                                        offseti, maxsizei);
    3576              :                     }
    3577              :                 }
    3578            0 :               else if (fn == IFN_LEN_STORE)
    3579              :                 {
    3580            0 :                   pd.offset = offset2i;
    3581            0 :                   pd.size = (tree_to_uhwi (len)
    3582            0 :                              + -tree_to_shwi (bias)) * BITS_PER_UNIT;
    3583            0 :                   if (BYTES_BIG_ENDIAN)
    3584              :                     pd.rhs_off = pd.size - tree_to_uhwi (TYPE_SIZE (vectype));
    3585              :                   else
    3586            0 :                     pd.rhs_off = 0;
    3587            0 :                   if (ranges_known_overlap_p (offset, maxsize,
    3588              :                                               pd.offset, pd.size))
    3589            0 :                     return data->push_partial_def (pd, set, set,
    3590            0 :                                                    offseti, maxsizei);
    3591              :                 }
    3592              :               else
    3593            0 :                 gcc_unreachable ();
    3594            4 :               return NULL;
    3595              :             }
    3596              :         }
    3597              :     }
    3598              : 
    3599              :   /* 5) For aggregate copies translate the reference through them if
    3600              :      the copy kills ref.  */
    3601     18450664 :   else if (data->vn_walk_kind == VN_WALKREWRITE
    3602     14879500 :            && gimple_assign_single_p (def_stmt)
    3603      2466039 :            && !gimple_has_volatile_ops (def_stmt)
    3604     20914405 :            && (DECL_P (gimple_assign_rhs1 (def_stmt))
    3605      1984339 :                || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
    3606      1578814 :                || handled_component_p (gimple_assign_rhs1 (def_stmt))))
    3607              :     {
    3608      2243766 :       tree base2;
    3609      2243766 :       int i, j, k;
    3610      2243766 :       auto_vec<vn_reference_op_s> rhs;
    3611      2243766 :       vn_reference_op_t vro;
    3612      2243766 :       ao_ref r;
    3613              : 
    3614      2243766 :       gcc_assert (lhs_ref_ok);
    3615              : 
    3616              :       /* See if the assignment kills REF.  */
    3617      2243766 :       base2 = ao_ref_base (&lhs_ref);
    3618      2243766 :       if (!lhs_ref.max_size_known_p ()
    3619      2243317 :           || (base != base2
    3620        86898 :               && (TREE_CODE (base) != MEM_REF
    3621        72886 :                   || TREE_CODE (base2) != MEM_REF
    3622        57458 :                   || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
    3623        24172 :                   || !tree_int_cst_equal (TREE_OPERAND (base, 1),
    3624        24172 :                                           TREE_OPERAND (base2, 1))))
    3625      4422879 :           || !stmt_kills_ref_p (def_stmt, ref))
    3626       392334 :         return (void *)-1;
    3627              : 
    3628              :       /* Find the common base of ref and the lhs.  lhs_ops already
    3629              :          contains valueized operands for the lhs.  */
    3630      1851432 :       poly_int64 extra_off = 0;
    3631      1851432 :       i = vr->operands.length () - 1;
    3632      1851432 :       j = lhs_ops.length () - 1;
    3633              : 
    3634              :       /* The base should be always equal due to the above check.  */
    3635      1851432 :       if (! vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
    3636              :         return (void *)-1;
    3637      1851170 :       i--, j--;
    3638              : 
    3639              :       /* The 2nd component should always exist and be a MEM_REF.  */
    3640      1851170 :       if (!(i >= 0 && j >= 0))
    3641              :         ;
    3642      1851170 :       else if (vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
    3643       847335 :         i--, j--;
    3644      1003835 :       else if (vr->operands[i].opcode == MEM_REF
    3645      1002254 :                && lhs_ops[j].opcode == MEM_REF
    3646      1002254 :                && known_ne (lhs_ops[j].off, -1)
    3647      2006089 :                && known_ne (vr->operands[i].off, -1))
    3648              :         {
    3649      1002254 :           bool found = false;
    3650              :           /* When we ge a mismatch at a MEM_REF that is not the sole component
    3651              :              try finding a match in one of the outer components and continue
    3652              :              stripping there.  This happens when addresses of components get
    3653              :              forwarded into dereferences.  */
    3654      1002254 :           if (i > 0)
    3655              :             {
    3656       106902 :               int temi = i - 1;
    3657       106902 :               poly_int64 tem_extra_off = extra_off + vr->operands[i].off;
    3658       106902 :               while (temi >= 0
    3659       232500 :                      && known_ne (vr->operands[temi].off, -1))
    3660              :                 {
    3661       127097 :                   if (vr->operands[temi].type
    3662       127097 :                       && lhs_ops[j].type
    3663       254194 :                       && (TYPE_MAIN_VARIANT (vr->operands[temi].type)
    3664       127097 :                           == TYPE_MAIN_VARIANT (lhs_ops[j].type)))
    3665              :                     {
    3666         1499 :                       i = temi;
    3667              :                       /* Strip the component that was type matched to
    3668              :                          the MEM_REF.  */
    3669         1499 :                       extra_off = (tem_extra_off
    3670         1499 :                                    + vr->operands[i].off - lhs_ops[j].off);
    3671         1499 :                       i--, j--;
    3672              :                       /* Strip further equal components.  */
    3673         1499 :                       found = true;
    3674         1499 :                       break;
    3675              :                     }
    3676       125598 :                   tem_extra_off += vr->operands[temi].off;
    3677       125598 :                   temi--;
    3678              :                 }
    3679              :             }
    3680      1002254 :           if (!found && j > 0)
    3681              :             {
    3682        26278 :               int temj = j - 1;
    3683        26278 :               poly_int64 tem_extra_off = extra_off - lhs_ops[j].off;
    3684        26278 :               while (temj >= 0
    3685        50667 :                      && known_ne (lhs_ops[temj].off, -1))
    3686              :                 {
    3687        28245 :                   if (vr->operands[i].type
    3688        28245 :                       && lhs_ops[temj].type
    3689        56490 :                       && (TYPE_MAIN_VARIANT (vr->operands[i].type)
    3690        28245 :                           == TYPE_MAIN_VARIANT (lhs_ops[temj].type)))
    3691              :                     {
    3692         3856 :                       j = temj;
    3693              :                       /* Strip the component that was type matched to
    3694              :                          the MEM_REF.  */
    3695         3856 :                       extra_off = (tem_extra_off
    3696         3856 :                                    + vr->operands[i].off - lhs_ops[j].off);
    3697         3856 :                       i--, j--;
    3698              :                       /* Strip further equal components.  */
    3699         3856 :                       found = true;
    3700         3856 :                       break;
    3701              :                     }
    3702        24389 :                   tem_extra_off += -lhs_ops[temj].off;
    3703        24389 :                   temj--;
    3704              :                 }
    3705              :             }
    3706              :           /* When we cannot find a common base to reconstruct the full
    3707              :              reference instead try to reduce the lookup to the new
    3708              :              base plus a constant offset.  */
    3709      1002254 :           if (!found)
    3710              :             {
    3711              :               while (j >= 0
    3712      2017849 :                      && known_ne (lhs_ops[j].off, -1))
    3713              :                 {
    3714      1020950 :                   extra_off += -lhs_ops[j].off;
    3715      1020950 :                   j--;
    3716              :                 }
    3717       996899 :               if (j != -1)
    3718              :                 return (void *)-1;
    3719              :               while (i >= 0
    3720      2114163 :                      && known_ne (vr->operands[i].off, -1))
    3721              :                 {
    3722              :                   /* Punt if the additional ops contain a storage order
    3723              :                      barrier.  */
    3724      1117264 :                   if (vr->operands[i].opcode == VIEW_CONVERT_EXPR
    3725      1117264 :                       && vr->operands[i].reverse)
    3726              :                     break;
    3727      1117264 :                   extra_off += vr->operands[i].off;
    3728      1117264 :                   i--;
    3729              :                 }
    3730       996899 :               if (i != -1)
    3731              :                 return (void *)-1;
    3732              :               found = true;
    3733              :             }
    3734              :           /* If we did find a match we'd eventually append a MEM_REF
    3735              :              as component.  Don't.  */
    3736              :           if (!found)
    3737              :             return (void *)-1;
    3738              :         }
    3739              :       else
    3740              :         return (void *)-1;
    3741              : 
    3742              :       /* Strip further common components, attempting to consume lhs_ops
    3743              :          in full.  */
    3744      1851476 :       while (j >= 0 && i >= 0
    3745      1851476 :              && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
    3746              :         {
    3747        28699 :           i--;
    3748        28699 :           j--;
    3749              :         }
    3750              : 
    3751              :       /* i now points to the first additional op.
    3752              :          ???  LHS may not be completely contained in VR, one or more
    3753              :          VIEW_CONVERT_EXPRs could be in its way.  We could at least
    3754              :          try handling outermost VIEW_CONVERT_EXPRs.  */
    3755      1822777 :       if (j != -1)
    3756              :         return (void *)-1;
    3757              : 
    3758              :       /* Punt if the additional ops contain a storage order barrier.  */
    3759      2824522 :       for (k = i; k >= 0; k--)
    3760              :         {
    3761      1004539 :           vro = &vr->operands[k];
    3762      1004539 :           if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
    3763              :             return (void *)-1;
    3764              :         }
    3765              : 
    3766              :       /* Now re-write REF to be based on the rhs of the assignment.  */
    3767      1819983 :       tree rhs1 = gimple_assign_rhs1 (def_stmt);
    3768      1819983 :       copy_reference_ops_from_ref (rhs1, &rhs);
    3769              : 
    3770              :       /* Apply an extra offset to the inner MEM_REF of the RHS.  */
    3771      1819983 :       bool force_no_tbaa = false;
    3772      1819983 :       if (maybe_ne (extra_off, 0))
    3773              :         {
    3774       724006 :           if (rhs.length () < 2)
    3775              :             return (void *)-1;
    3776       724006 :           int ix = rhs.length () - 2;
    3777       724006 :           if (rhs[ix].opcode != MEM_REF
    3778       724006 :               || known_eq (rhs[ix].off, -1))
    3779              :             return (void *)-1;
    3780       723988 :           rhs[ix].off += extra_off;
    3781       723988 :           rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
    3782       723988 :                                          build_int_cst (TREE_TYPE (rhs[ix].op0),
    3783              :                                                         extra_off));
    3784              :           /* When we have offsetted the RHS, reading only parts of it,
    3785              :              we can no longer use the original TBAA type, force alias-set
    3786              :              zero.  */
    3787       723988 :           force_no_tbaa = true;
    3788              :         }
    3789              : 
    3790              :       /* Save the operands since we need to use the original ones for
    3791              :          the hash entry we use.  */
    3792      1819965 :       if (!data->saved_operands.exists ())
    3793      1722363 :         data->saved_operands = vr->operands.copy ();
    3794              : 
    3795              :       /* We need to pre-pend vr->operands[0..i] to rhs.  */
    3796      1819965 :       vec<vn_reference_op_s> old = vr->operands;
    3797      5459895 :       if (i + 1 + rhs.length () > vr->operands.length ())
    3798      1150238 :         vr->operands.safe_grow (i + 1 + rhs.length (), true);
    3799              :       else
    3800       669727 :         vr->operands.truncate (i + 1 + rhs.length ());
    3801      6656024 :       FOR_EACH_VEC_ELT (rhs, j, vro)
    3802      4836059 :         vr->operands[i + 1 + j] = *vro;
    3803      1819965 :       valueize_refs (&vr->operands);
    3804      3639930 :       if (old == shared_lookup_references)
    3805      1819965 :         shared_lookup_references = vr->operands;
    3806      1819965 :       vr->hashcode = vn_reference_compute_hash (vr);
    3807              : 
    3808              :       /* Try folding the new reference to a constant.  */
    3809      1819965 :       tree val = fully_constant_vn_reference_p (vr);
    3810      1819965 :       if (val)
    3811              :         {
    3812        21924 :           if (data->partial_defs.is_empty ())
    3813        21915 :             return data->finish (ao_ref_alias_set (&lhs_ref),
    3814        21915 :                                  ao_ref_base_alias_set (&lhs_ref), val);
    3815              :           /* This is the only interesting case for partial-def handling
    3816              :              coming from targets that like to gimplify init-ctors as
    3817              :              aggregate copies from constant data like aarch64 for
    3818              :              PR83518.  */
    3819            9 :           if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize))
    3820              :             {
    3821            9 :               pd_data pd;
    3822            9 :               pd.rhs = val;
    3823            9 :               pd.rhs_off = 0;
    3824            9 :               pd.offset = 0;
    3825            9 :               pd.size = maxsizei;
    3826            9 :               return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
    3827              :                                              ao_ref_base_alias_set (&lhs_ref),
    3828              :                                              0, maxsizei);
    3829              :             }
    3830              :         }
    3831              : 
    3832              :       /* Continuing with partial defs isn't easily possible here, we
    3833              :          have to find a full def from further lookups from here.  Probably
    3834              :          not worth the special-casing everywhere.  */
    3835      2228807 :       if (!data->partial_defs.is_empty ())
    3836              :         return (void *)-1;
    3837              : 
    3838              :       /* Adjust *ref from the new operands.  */
    3839      1791076 :       ao_ref rhs1_ref;
    3840      1791076 :       ao_ref_init (&rhs1_ref, rhs1);
    3841      2872645 :       if (!ao_ref_init_from_vn_reference (&r,
    3842              :                                           force_no_tbaa ? 0
    3843      1081569 :                                           : ao_ref_alias_set (&rhs1_ref),
    3844              :                                           force_no_tbaa ? 0
    3845      1081569 :                                           : ao_ref_base_alias_set (&rhs1_ref),
    3846              :                                           vr->type, vr->operands))
    3847              :         return (void *)-1;
    3848              :       /* This can happen with bitfields.  */
    3849      1791076 :       if (maybe_ne (ref->size, r.size))
    3850              :         {
    3851              :           /* If the access lacks some subsetting simply apply that by
    3852              :              shortening it.  That in the end can only be successful
    3853              :              if we can pun the lookup result which in turn requires
    3854              :              exact offsets.  */
    3855         1347 :           if (known_eq (r.size, r.max_size)
    3856         1347 :               && known_lt (ref->size, r.size))
    3857         1347 :             r.size = r.max_size = ref->size;
    3858              :           else
    3859              :             return (void *)-1;
    3860              :         }
    3861      1791076 :       *ref = r;
    3862      1791076 :       vr->offset = r.offset;
    3863      1791076 :       vr->max_size = r.max_size;
    3864              : 
    3865              :       /* Do not update last seen VUSE after translating.  */
    3866      1791076 :       data->last_vuse_ptr = NULL;
    3867              :       /* Invalidate the original access path since it now contains
    3868              :          the wrong base.  */
    3869      1791076 :       data->orig_ref.ref = NULL_TREE;
    3870              :       /* Use the alias-set of this LHS for recording an eventual result.  */
    3871      1791076 :       if (data->first_set == -2)
    3872              :         {
    3873      1695003 :           data->first_set = ao_ref_alias_set (&lhs_ref);
    3874      1695003 :           data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
    3875              :         }
    3876              : 
    3877              :       /* Keep looking for the adjusted *REF / VR pair.  */
    3878      1791076 :       return NULL;
    3879      2243766 :     }
    3880              : 
    3881              :   /* 6) For memcpy copies translate the reference through them if the copy
    3882              :      kills ref.  But we cannot (easily) do this translation if the memcpy is
    3883              :      a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
    3884              :      can modify the storage order of objects (see storage_order_barrier_p).  */
    3885     16206898 :   else if (data->vn_walk_kind == VN_WALKREWRITE
    3886     12635734 :            && is_gimple_reg_type (vr->type)
    3887              :            /* ???  Handle BCOPY as well.  */
    3888     12627861 :            && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
    3889     12561070 :                || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
    3890     12560647 :                || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
    3891     12559461 :                || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
    3892     12559219 :                || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
    3893     12533072 :                || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
    3894        95117 :            && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
    3895        85062 :                || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
    3896        95083 :            && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
    3897        68519 :                || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
    3898        95068 :            && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
    3899        56190 :                || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
    3900        56190 :                    && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
    3901              :                                        &copy_size)))
    3902              :            /* Handling this is more complicated, give up for now.  */
    3903     16248224 :            && data->partial_defs.is_empty ())
    3904              :     {
    3905        40718 :       tree lhs, rhs;
    3906        40718 :       ao_ref r;
    3907        40718 :       poly_int64 rhs_offset, lhs_offset;
    3908        40718 :       vn_reference_op_s op;
    3909        40718 :       poly_uint64 mem_offset;
    3910        40718 :       poly_int64 at, byte_maxsize;
    3911              : 
    3912              :       /* Only handle non-variable, addressable refs.  */
    3913        40718 :       if (maybe_ne (ref->size, maxsize)
    3914        40249 :           || !multiple_p (offset, BITS_PER_UNIT, &at)
    3915        40718 :           || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
    3916          469 :         return (void *)-1;
    3917              : 
    3918              :       /* Extract a pointer base and an offset for the destination.  */
    3919        40249 :       lhs = gimple_call_arg (def_stmt, 0);
    3920        40249 :       lhs_offset = 0;
    3921        40249 :       if (TREE_CODE (lhs) == SSA_NAME)
    3922              :         {
    3923        31780 :           lhs = vn_valueize (lhs);
    3924        31780 :           if (TREE_CODE (lhs) == SSA_NAME)
    3925              :             {
    3926        31460 :               gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
    3927        31460 :               if (gimple_assign_single_p (def_stmt)
    3928        31460 :                   && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
    3929         2381 :                 lhs = gimple_assign_rhs1 (def_stmt);
    3930              :             }
    3931              :         }
    3932        40249 :       if (TREE_CODE (lhs) == ADDR_EXPR)
    3933              :         {
    3934        15404 :           if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))
    3935        15107 :               && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs))))
    3936              :             return (void *)-1;
    3937        11030 :           tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
    3938              :                                                     &lhs_offset);
    3939        11030 :           if (!tem)
    3940              :             return (void *)-1;
    3941        10338 :           if (TREE_CODE (tem) == MEM_REF
    3942        10338 :               && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
    3943              :             {
    3944         1689 :               lhs = TREE_OPERAND (tem, 0);
    3945         1689 :               if (TREE_CODE (lhs) == SSA_NAME)
    3946         1689 :                 lhs = vn_valueize (lhs);
    3947         1689 :               lhs_offset += mem_offset;
    3948              :             }
    3949         8649 :           else if (DECL_P (tem))
    3950         8649 :             lhs = build_fold_addr_expr (tem);
    3951              :           else
    3952              :             return (void *)-1;
    3953              :         }
    3954        39417 :       if (TREE_CODE (lhs) != SSA_NAME
    3955         8650 :           && TREE_CODE (lhs) != ADDR_EXPR)
    3956              :         return (void *)-1;
    3957              : 
    3958              :       /* Extract a pointer base and an offset for the source.  */
    3959        39417 :       rhs = gimple_call_arg (def_stmt, 1);
    3960        39417 :       rhs_offset = 0;
    3961        39417 :       if (TREE_CODE (rhs) == SSA_NAME)
    3962        18480 :         rhs = vn_valueize (rhs);
    3963        39417 :       if (TREE_CODE (rhs) == ADDR_EXPR)
    3964              :         {
    3965        33295 :           if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))
    3966        22954 :               && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs))))
    3967              :             return (void *)-1;
    3968        22510 :           tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
    3969              :                                                     &rhs_offset);
    3970        22510 :           if (!tem)
    3971              :             return (void *)-1;
    3972        22510 :           if (TREE_CODE (tem) == MEM_REF
    3973        22510 :               && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
    3974              :             {
    3975            0 :               rhs = TREE_OPERAND (tem, 0);
    3976            0 :               rhs_offset += mem_offset;
    3977              :             }
    3978        22510 :           else if (DECL_P (tem)
    3979        16622 :                    || TREE_CODE (tem) == STRING_CST)
    3980        22510 :             rhs = build_fold_addr_expr (tem);
    3981              :           else
    3982              :             return (void *)-1;
    3983              :         }
    3984        39417 :       if (TREE_CODE (rhs) == SSA_NAME)
    3985        16907 :         rhs = SSA_VAL (rhs);
    3986        22510 :       else if (TREE_CODE (rhs) != ADDR_EXPR)
    3987              :         return (void *)-1;
    3988              : 
    3989              :       /* The bases of the destination and the references have to agree.  */
    3990        39417 :       if (TREE_CODE (base) == MEM_REF)
    3991              :         {
    3992        15231 :           if (TREE_OPERAND (base, 0) != lhs
    3993        15231 :               || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
    3994        11587 :             return (void *) -1;
    3995        11096 :           at += mem_offset;
    3996              :         }
    3997        24186 :       else if (!DECL_P (base)
    3998        23281 :                || TREE_CODE (lhs) != ADDR_EXPR
    3999        31639 :                || TREE_OPERAND (lhs, 0) != base)
    4000              :         return (void *)-1;
    4001              : 
    4002              :       /* If the access is completely outside of the memcpy destination
    4003              :          area there is no aliasing.  */
    4004        11096 :       if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
    4005              :         return NULL;
    4006              :       /* And the access has to be contained within the memcpy destination.  */
    4007        11063 :       if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
    4008              :         return (void *)-1;
    4009              : 
    4010              :       /* Save the operands since we need to use the original ones for
    4011              :          the hash entry we use.  */
    4012        10610 :       if (!data->saved_operands.exists ())
    4013        10188 :         data->saved_operands = vr->operands.copy ();
    4014              : 
    4015              :       /* Make room for 2 operands in the new reference.  */
    4016        10610 :       if (vr->operands.length () < 2)
    4017              :         {
    4018            0 :           vec<vn_reference_op_s> old = vr->operands;
    4019            0 :           vr->operands.safe_grow_cleared (2, true);
    4020            0 :           if (old == shared_lookup_references)
    4021            0 :             shared_lookup_references = vr->operands;
    4022              :         }
    4023              :       else
    4024        10610 :         vr->operands.truncate (2);
    4025              : 
    4026              :       /* The looked-through reference is a simple MEM_REF.  */
    4027        10610 :       memset (&op, 0, sizeof (op));
    4028        10610 :       op.type = vr->type;
    4029        10610 :       op.opcode = MEM_REF;
    4030        10610 :       op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
    4031        10610 :       op.off = at - lhs_offset + rhs_offset;
    4032        10610 :       vr->operands[0] = op;
    4033        10610 :       op.type = TREE_TYPE (rhs);
    4034        10610 :       op.opcode = TREE_CODE (rhs);
    4035        10610 :       op.op0 = rhs;
    4036        10610 :       op.off = -1;
    4037        10610 :       vr->operands[1] = op;
    4038        10610 :       vr->hashcode = vn_reference_compute_hash (vr);
    4039              : 
    4040              :       /* Try folding the new reference to a constant.  */
    4041        10610 :       tree val = fully_constant_vn_reference_p (vr);
    4042        10610 :       if (val)
    4043         2616 :         return data->finish (0, 0, val);
    4044              : 
    4045              :       /* Adjust *ref from the new operands.  */
    4046         7994 :       if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
    4047              :         return (void *)-1;
    4048              :       /* This can happen with bitfields.  */
    4049         7994 :       if (maybe_ne (ref->size, r.size))
    4050              :         return (void *)-1;
    4051         7994 :       *ref = r;
    4052         7994 :       vr->offset = r.offset;
    4053         7994 :       vr->max_size = r.max_size;
    4054              : 
    4055              :       /* Do not update last seen VUSE after translating.  */
    4056         7994 :       data->last_vuse_ptr = NULL;
    4057              :       /* Invalidate the original access path since it now contains
    4058              :          the wrong base.  */
    4059         7994 :       data->orig_ref.ref = NULL_TREE;
    4060              :       /* Use the alias-set of this stmt for recording an eventual result.  */
    4061         7994 :       if (data->first_set == -2)
    4062              :         {
    4063         7612 :           data->first_set = 0;
    4064         7612 :           data->first_base_set = 0;
    4065              :         }
    4066              : 
    4067              :       /* Keep looking for the adjusted *REF / VR pair.  */
    4068         7994 :       return NULL;
    4069              :     }
    4070              : 
    4071              :   /* Bail out and stop walking.  */
    4072              :   return (void *)-1;
    4073              : }
    4074              : 
    4075              : /* Return true if E is a backedge with respect to our CFG walk order.  */
    4076              : 
    4077              : static bool
    4078    118766773 : vn_is_backedge (edge e, void *)
    4079              : {
    4080              :   /* During PRE elimination we no longer have access to this info.  */
    4081    118766773 :   return (!vn_bb_to_rpo
    4082    118766773 :           || vn_bb_to_rpo[e->dest->index] <= vn_bb_to_rpo[e->src->index]);
    4083              : }
    4084              : 
    4085              : /* Return a reference op vector from OP that can be used for
    4086              :    vn_reference_lookup_pieces.  The caller is responsible for releasing
    4087              :    the vector.  */
    4088              : 
    4089              : vec<vn_reference_op_s>
    4090      4692589 : vn_reference_operands_for_lookup (tree op)
    4091              : {
    4092      4692589 :   bool valueized;
    4093      4692589 :   return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
    4094              : }
    4095              : 
    4096              : /* Lookup a reference operation by it's parts, in the current hash table.
    4097              :    Returns the resulting value number if it exists in the hash table,
    4098              :    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
    4099              :    vn_reference_t stored in the hashtable if something is found.  */
    4100              : 
    4101              : tree
    4102      7740600 : vn_reference_lookup_pieces (tree vuse, alias_set_type set,
    4103              :                             alias_set_type base_set, tree type,
    4104              :                             vec<vn_reference_op_s> operands,
    4105              :                             vn_reference_t *vnresult, vn_lookup_kind kind)
    4106              : {
    4107      7740600 :   struct vn_reference_s vr1;
    4108      7740600 :   vn_reference_t tmp;
    4109      7740600 :   tree cst;
    4110              : 
    4111      7740600 :   if (!vnresult)
    4112            0 :     vnresult = &tmp;
    4113      7740600 :   *vnresult = NULL;
    4114              : 
    4115      7740600 :   vr1.vuse = vuse_ssa_val (vuse);
    4116      7740600 :   shared_lookup_references.truncate (0);
    4117     15481200 :   shared_lookup_references.safe_grow (operands.length (), true);
    4118      7740600 :   memcpy (shared_lookup_references.address (),
    4119      7740600 :           operands.address (),
    4120              :           sizeof (vn_reference_op_s)
    4121      7740600 :           * operands.length ());
    4122      7740600 :   bool valueized_p;
    4123      7740600 :   valueize_refs_1 (&shared_lookup_references, &valueized_p);
    4124      7740600 :   vr1.operands = shared_lookup_references;
    4125      7740600 :   vr1.type = type;
    4126      7740600 :   vr1.set = set;
    4127      7740600 :   vr1.base_set = base_set;
    4128              :   /* We can pretend there's no extra info fed in since the ao_refs offset
    4129              :      and max_size are computed only from the VN reference ops.  */
    4130      7740600 :   vr1.offset = 0;
    4131      7740600 :   vr1.max_size = -1;
    4132      7740600 :   vr1.hashcode = vn_reference_compute_hash (&vr1);
    4133      7740600 :   if ((cst = fully_constant_vn_reference_p (&vr1)))
    4134              :     return cst;
    4135              : 
    4136      7721987 :   vn_reference_lookup_1 (&vr1, vnresult);
    4137      7721987 :   if (!*vnresult
    4138      3007723 :       && kind != VN_NOWALK
    4139      3007723 :       && vr1.vuse)
    4140              :     {
    4141      2980277 :       ao_ref r;
    4142      2980277 :       unsigned limit = param_sccvn_max_alias_queries_per_access;
    4143      2980277 :       vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE,
    4144      2980277 :                             false);
    4145      2980277 :       vec<vn_reference_op_s> ops_for_ref;
    4146      2980277 :       if (!valueized_p)
    4147      2889138 :         ops_for_ref = vr1.operands;
    4148              :       else
    4149              :         {
    4150              :           /* For ao_ref_from_mem we have to ensure only available SSA names
    4151              :              end up in base and the only convenient way to make this work
    4152              :              for PRE is to re-valueize with that in mind.  */
    4153       182278 :           ops_for_ref.create (operands.length ());
    4154       182278 :           ops_for_ref.quick_grow (operands.length ());
    4155        91139 :           memcpy (ops_for_ref.address (),
    4156        91139 :                   operands.address (),
    4157              :                   sizeof (vn_reference_op_s)
    4158        91139 :                   * operands.length ());
    4159        91139 :           valueize_refs_1 (&ops_for_ref, &valueized_p, true);
    4160              :         }
    4161      2980277 :       if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
    4162              :                                          ops_for_ref))
    4163      2910518 :         *vnresult
    4164      2910518 :           = ((vn_reference_t)
    4165      2910518 :              walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
    4166              :                                      vn_reference_lookup_3, vn_is_backedge,
    4167              :                                      vuse_valueize, limit, &data));
    4168      5960554 :       if (ops_for_ref != shared_lookup_references)
    4169        91139 :         ops_for_ref.release ();
    4170      5960554 :       gcc_checking_assert (vr1.operands == shared_lookup_references);
    4171      2980277 :       if (*vnresult
    4172       428195 :           && data.same_val
    4173      2980277 :           && (!(*vnresult)->result
    4174            0 :               || !operand_equal_p ((*vnresult)->result, data.same_val)))
    4175              :         {
    4176            0 :           *vnresult = NULL;
    4177            0 :           return NULL_TREE;
    4178              :         }
    4179      2980277 :     }
    4180              : 
    4181      7721987 :   if (*vnresult)
    4182      5142459 :      return (*vnresult)->result;
    4183              : 
    4184              :   return NULL_TREE;
    4185              : }
    4186              : 
    4187              : /* When OPERANDS is an ADDR_EXPR that can be possibly expressed as a
    4188              :    POINTER_PLUS_EXPR return true and fill in its operands in OPS.  */
    4189              : 
    4190              : bool
    4191      2182406 : vn_pp_nary_for_addr (const vec<vn_reference_op_s>& operands, tree ops[2])
    4192              : {
    4193      4364812 :   gcc_assert (operands[0].opcode == ADDR_EXPR
    4194              :               && operands.last ().opcode == SSA_NAME);
    4195              :   poly_int64 off = 0;
    4196              :   vn_reference_op_t vro;
    4197              :   unsigned i;
    4198      7059134 :   for (i = 1; operands.iterate (i, &vro); ++i)
    4199              :     {
    4200      7059134 :       if (vro->opcode == SSA_NAME)
    4201              :         break;
    4202      4926771 :       else if (known_eq (vro->off, -1))
    4203              :         break;
    4204      4876728 :       off += vro->off;
    4205              :     }
    4206      2182406 :   if (i == operands.length () - 1
    4207      2132363 :       && maybe_ne (off, 0)
    4208              :       /* Make sure we the offset we accumulated in a 64bit int
    4209              :          fits the address computation carried out in target
    4210              :          offset precision.  */
    4211      3585776 :       && (off.coeffs[0]
    4212      1403370 :           == sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype))))
    4213              :     {
    4214      1402600 :       gcc_assert (operands[i-1].opcode == MEM_REF);
    4215      1402600 :       ops[0] = operands[i].op0;
    4216      1402600 :       ops[1] = wide_int_to_tree (sizetype, off);
    4217      1402600 :       return true;
    4218              :     }
    4219              :   return false;
    4220              : }
    4221              : 
    4222              : /* Lookup OP in the current hash table, and return the resulting value
    4223              :    number if it exists in the hash table.  Return NULL_TREE if it does
    4224              :    not exist in the hash table or if the result field of the structure
    4225              :    was NULL..  VNRESULT will be filled in with the vn_reference_t
    4226              :    stored in the hashtable if one exists.  When TBAA_P is false assume
    4227              :    we are looking up a store and treat it as having alias-set zero.
    4228              :    *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
    4229              :    MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
    4230              :    load is bitwise anded with MASK and so we are only interested in a subset
    4231              :    of the bits and can ignore if the other bits are uninitialized or
    4232              :    not initialized with constants.  When doing redundant store removal
    4233              :    the caller has to set REDUNDANT_STORE_REMOVAL_P.  */
    4234              : 
    4235              : tree
    4236     99567941 : vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
    4237              :                      vn_reference_t *vnresult, bool tbaa_p,
    4238              :                      tree *last_vuse_ptr, tree mask,
    4239              :                      bool redundant_store_removal_p)
    4240              : {
    4241     99567941 :   vec<vn_reference_op_s> operands;
    4242     99567941 :   struct vn_reference_s vr1;
    4243     99567941 :   bool valueized_anything;
    4244              : 
    4245     99567941 :   if (vnresult)
    4246     99163422 :     *vnresult = NULL;
    4247              : 
    4248     99567941 :   vr1.vuse = vuse_ssa_val (vuse);
    4249    199135882 :   vr1.operands = operands
    4250     99567941 :     = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
    4251              : 
    4252              :   /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR.  Avoid doing
    4253              :      this before the pass folding __builtin_object_size had a chance to run.  */
    4254     99567941 :   if ((cfun->curr_properties & PROP_objsz)
    4255     72213759 :       && operands[0].opcode == ADDR_EXPR
    4256    100668205 :       && operands.last ().opcode == SSA_NAME)
    4257              :     {
    4258      1066095 :       tree ops[2];
    4259      1066095 :       if (vn_pp_nary_for_addr (operands, ops))
    4260              :         {
    4261       685068 :           tree res = vn_nary_op_lookup_pieces (2, POINTER_PLUS_EXPR,
    4262       685068 :                                                TREE_TYPE (op), ops, NULL);
    4263       685068 :           if (res)
    4264       685068 :             return res;
    4265       685068 :           return NULL_TREE;
    4266              :         }
    4267              :     }
    4268              : 
    4269     98882873 :   vr1.type = TREE_TYPE (op);
    4270     98882873 :   ao_ref op_ref;
    4271     98882873 :   ao_ref_init (&op_ref, op);
    4272     98882873 :   vr1.set = ao_ref_alias_set (&op_ref);
    4273     98882873 :   vr1.base_set = ao_ref_base_alias_set (&op_ref);
    4274     98882873 :   vr1.offset = 0;
    4275     98882873 :   vr1.max_size = -1;
    4276     98882873 :   vr1.hashcode = vn_reference_compute_hash (&vr1);
    4277     98882873 :   if (mask == NULL_TREE)
    4278     98562660 :     if (tree cst = fully_constant_vn_reference_p (&vr1))
    4279              :       return cst;
    4280              : 
    4281     98869535 :   if (kind != VN_NOWALK && vr1.vuse)
    4282              :     {
    4283     57440047 :       vn_reference_t wvnresult;
    4284     57440047 :       ao_ref r;
    4285     57440047 :       unsigned limit = param_sccvn_max_alias_queries_per_access;
    4286     57440047 :       auto_vec<vn_reference_op_s> ops_for_ref;
    4287     57440047 :       if (valueized_anything)
    4288              :         {
    4289      4499124 :           copy_reference_ops_from_ref (op, &ops_for_ref);
    4290      4499124 :           bool tem;
    4291      4499124 :           valueize_refs_1 (&ops_for_ref, &tem, true);
    4292              :         }
    4293              :       /* Make sure to use a valueized reference if we valueized anything.
    4294              :          Otherwise preserve the full reference for advanced TBAA.  */
    4295     57440047 :       if (!valueized_anything
    4296     57440047 :           || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
    4297              :                                              vr1.type, ops_for_ref))
    4298              :         {
    4299     52940923 :           ao_ref_init (&r, op);
    4300              :           /* Record the extra info we're getting from the full ref.  */
    4301     52940923 :           ao_ref_base (&r);
    4302     52940923 :           vr1.offset = r.offset;
    4303     52940923 :           vr1.max_size = r.max_size;
    4304              :         }
    4305     57440047 :       vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
    4306              :                             last_vuse_ptr, kind, tbaa_p, mask,
    4307    110380970 :                             redundant_store_removal_p);
    4308              : 
    4309     57440047 :       wvnresult
    4310              :         = ((vn_reference_t)
    4311     57440047 :            walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
    4312              :                                    vn_reference_lookup_3, vn_is_backedge,
    4313              :                                    vuse_valueize, limit, &data));
    4314    114880094 :       gcc_checking_assert (vr1.operands == shared_lookup_references);
    4315     57440047 :       if (wvnresult)
    4316              :         {
    4317      8445002 :           gcc_assert (mask == NULL_TREE);
    4318      8445002 :           if (data.same_val
    4319      8445002 :               && (!wvnresult->result
    4320        67538 :                   || !operand_equal_p (wvnresult->result, data.same_val)))
    4321        47487 :             return NULL_TREE;
    4322      8397515 :           if (vnresult)
    4323      8395988 :             *vnresult = wvnresult;
    4324      8397515 :           return wvnresult->result;
    4325              :         }
    4326     48995045 :       else if (mask)
    4327       320213 :         return data.masked_result;
    4328              : 
    4329              :       return NULL_TREE;
    4330     57440047 :     }
    4331              : 
    4332     41429488 :   if (last_vuse_ptr)
    4333      1413846 :     *last_vuse_ptr = vr1.vuse;
    4334     41429488 :   if (mask)
    4335              :     return NULL_TREE;
    4336     41429488 :   return vn_reference_lookup_1 (&vr1, vnresult);
    4337              : }
    4338              : 
    4339              : /* Lookup CALL in the current hash table and return the entry in
    4340              :    *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
    4341              : 
    4342              : void
    4343      9109684 : vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
    4344              :                           vn_reference_t vr)
    4345              : {
    4346      9109684 :   if (vnresult)
    4347      9109684 :     *vnresult = NULL;
    4348              : 
    4349      9109684 :   tree vuse = gimple_vuse (call);
    4350              : 
    4351      9109684 :   vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
    4352      9109684 :   vr->operands = valueize_shared_reference_ops_from_call (call);
    4353      9109684 :   tree lhs = gimple_call_lhs (call);
    4354              :   /* For non-SSA return values the referece ops contain the LHS.  */
    4355      4995442 :   vr->type = ((lhs && TREE_CODE (lhs) == SSA_NAME)
    4356     13663665 :               ? TREE_TYPE (lhs) : NULL_TREE);
    4357      9109684 :   vr->punned = false;
    4358      9109684 :   vr->set = 0;
    4359      9109684 :   vr->base_set = 0;
    4360      9109684 :   vr->offset = 0;
    4361      9109684 :   vr->max_size = -1;
    4362      9109684 :   vr->hashcode = vn_reference_compute_hash (vr);
    4363      9109684 :   vn_reference_lookup_1 (vr, vnresult);
    4364      9109684 : }
    4365              : 
    4366              : /* Insert OP into the current hash table with a value number of RESULT.  */
    4367              : 
    4368              : static void
    4369     73809716 : vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
    4370              : {
    4371     73809716 :   vn_reference_s **slot;
    4372     73809716 :   vn_reference_t vr1;
    4373     73809716 :   bool tem;
    4374              : 
    4375     73809716 :   vec<vn_reference_op_s> operands
    4376     73809716 :     = valueize_shared_reference_ops_from_ref (op, &tem);
    4377              :   /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR.  Avoid doing this
    4378              :      before the pass folding __builtin_object_size had a chance to run.  */
    4379     73809716 :   if ((cfun->curr_properties & PROP_objsz)
    4380     55376714 :       && operands[0].opcode == ADDR_EXPR
    4381     74702244 :       && operands.last ().opcode == SSA_NAME)
    4382              :     {
    4383       860982 :       tree ops[2];
    4384       860982 :       if (vn_pp_nary_for_addr (operands, ops))
    4385              :         {
    4386       547294 :           vn_nary_op_insert_pieces (2, POINTER_PLUS_EXPR,
    4387       547294 :                                     TREE_TYPE (op), ops, result,
    4388       547294 :                                     VN_INFO (result)->value_id);
    4389       547294 :           return;
    4390              :         }
    4391              :     }
    4392              : 
    4393     73262422 :   vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
    4394     73262422 :   if (TREE_CODE (result) == SSA_NAME)
    4395     50522162 :     vr1->value_id = VN_INFO (result)->value_id;
    4396              :   else
    4397     22740260 :     vr1->value_id = get_or_alloc_constant_value_id (result);
    4398     73262422 :   vr1->vuse = vuse_ssa_val (vuse);
    4399     73262422 :   vr1->operands = operands.copy ();
    4400     73262422 :   vr1->type = TREE_TYPE (op);
    4401     73262422 :   vr1->punned = false;
    4402     73262422 :   ao_ref op_ref;
    4403     73262422 :   ao_ref_init (&op_ref, op);
    4404     73262422 :   vr1->set = ao_ref_alias_set (&op_ref);
    4405     73262422 :   vr1->base_set = ao_ref_base_alias_set (&op_ref);
    4406              :   /* Specifically use an unknown extent here, we're not doing any lookup
    4407              :      and assume the caller didn't either (or it went VARYING).  */
    4408     73262422 :   vr1->offset = 0;
    4409     73262422 :   vr1->max_size = -1;
    4410     73262422 :   vr1->hashcode = vn_reference_compute_hash (vr1);
    4411     73262422 :   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
    4412     73262422 :   vr1->result_vdef = vdef;
    4413              : 
    4414     73262422 :   slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
    4415              :                                                       INSERT);
    4416              : 
    4417              :   /* Because IL walking on reference lookup can end up visiting
    4418              :      a def that is only to be visited later in iteration order
    4419              :      when we are about to make an irreducible region reducible
    4420              :      the def can be effectively processed and its ref being inserted
    4421              :      by vn_reference_lookup_3 already.  So we cannot assert (!*slot)
    4422              :      but save a lookup if we deal with already inserted refs here.  */
    4423     73262422 :   if (*slot)
    4424              :     {
    4425              :       /* We cannot assert that we have the same value either because
    4426              :          when disentangling an irreducible region we may end up visiting
    4427              :          a use before the corresponding def.  That's a missed optimization
    4428              :          only though.  See gcc.dg/tree-ssa/pr87126.c for example.  */
    4429            0 :       if (dump_file && (dump_flags & TDF_DETAILS)
    4430            0 :           && !operand_equal_p ((*slot)->result, vr1->result, 0))
    4431              :         {
    4432            0 :           fprintf (dump_file, "Keeping old value ");
    4433            0 :           print_generic_expr (dump_file, (*slot)->result);
    4434            0 :           fprintf (dump_file, " because of collision\n");
    4435              :         }
    4436            0 :       free_reference (vr1);
    4437            0 :       obstack_free (&vn_tables_obstack, vr1);
    4438            0 :       return;
    4439              :     }
    4440              : 
    4441     73262422 :   *slot = vr1;
    4442     73262422 :   vr1->next = last_inserted_ref;
    4443     73262422 :   last_inserted_ref = vr1;
    4444              : }
    4445              : 
    4446              : /* Insert a reference by it's pieces into the current hash table with
    4447              :    a value number of RESULT.  Return the resulting reference
    4448              :    structure we created.  */
    4449              : 
    4450              : vn_reference_t
    4451      1444662 : vn_reference_insert_pieces (tree vuse, alias_set_type set,
    4452              :                             alias_set_type base_set,
    4453              :                             poly_int64 offset, poly_int64 max_size, tree type,
    4454              :                             vec<vn_reference_op_s> operands,
    4455              :                             tree result, unsigned int value_id)
    4456              : 
    4457              : {
    4458      1444662 :   vn_reference_s **slot;
    4459      1444662 :   vn_reference_t vr1;
    4460              : 
    4461      1444662 :   vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
    4462      1444662 :   vr1->value_id = value_id;
    4463      1444662 :   vr1->vuse = vuse_ssa_val (vuse);
    4464      1444662 :   vr1->operands = operands;
    4465      1444662 :   valueize_refs (&vr1->operands);
    4466      1444662 :   vr1->type = type;
    4467      1444662 :   vr1->punned = false;
    4468      1444662 :   vr1->set = set;
    4469      1444662 :   vr1->base_set = base_set;
    4470      1444662 :   vr1->offset = offset;
    4471      1444662 :   vr1->max_size = max_size;
    4472      1444662 :   vr1->hashcode = vn_reference_compute_hash (vr1);
    4473      1444662 :   if (result && TREE_CODE (result) == SSA_NAME)
    4474       272760 :     result = SSA_VAL (result);
    4475      1444662 :   vr1->result = result;
    4476      1444662 :   vr1->result_vdef = NULL_TREE;
    4477              : 
    4478      1444662 :   slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
    4479              :                                                       INSERT);
    4480              : 
    4481              :   /* At this point we should have all the things inserted that we have
    4482              :      seen before, and we should never try inserting something that
    4483              :      already exists.  */
    4484      1444662 :   gcc_assert (!*slot);
    4485              : 
    4486      1444662 :   *slot = vr1;
    4487      1444662 :   vr1->next = last_inserted_ref;
    4488      1444662 :   last_inserted_ref = vr1;
    4489      1444662 :   return vr1;
    4490              : }
    4491              : 
    4492              : /* Compute and return the hash value for nary operation VBO1.  */
    4493              : 
    4494              : hashval_t
    4495    301798347 : vn_nary_op_compute_hash (const vn_nary_op_t vno1)
    4496              : {
    4497    301798347 :   inchash::hash hstate;
    4498    301798347 :   unsigned i;
    4499              : 
    4500    301798347 :   if (((vno1->length == 2
    4501    254235605 :         && commutative_tree_code (vno1->opcode))
    4502    138193095 :        || (vno1->length == 3
    4503      1559302 :            && commutative_ternary_tree_code (vno1->opcode)))
    4504    465405584 :       && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
    4505      2390756 :     std::swap (vno1->op[0], vno1->op[1]);
    4506    299407591 :   else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
    4507    299407591 :            && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
    4508              :     {
    4509       474892 :       std::swap (vno1->op[0], vno1->op[1]);
    4510       474892 :       vno1->opcode = swap_tree_comparison  (vno1->opcode);
    4511              :     }
    4512              : 
    4513    301798347 :   hstate.add_int (vno1->opcode);
    4514    861690624 :   for (i = 0; i < vno1->length; ++i)
    4515    559892277 :     inchash::add_expr (vno1->op[i], hstate);
    4516              : 
    4517    301798347 :   return hstate.end ();
    4518              : }
    4519              : 
    4520              : /* Compare nary operations VNO1 and VNO2 and return true if they are
    4521              :    equivalent.  */
    4522              : 
    4523              : bool
    4524    961676733 : vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
    4525              : {
    4526    961676733 :   unsigned i;
    4527              : 
    4528    961676733 :   if (vno1->hashcode != vno2->hashcode)
    4529              :     return false;
    4530              : 
    4531     49950514 :   if (vno1->length != vno2->length)
    4532              :     return false;
    4533              : 
    4534     49950514 :   if (vno1->opcode != vno2->opcode
    4535     49950514 :       || !types_compatible_p (vno1->type, vno2->type))
    4536      1155716 :     return false;
    4537              : 
    4538    141023604 :   for (i = 0; i < vno1->length; ++i)
    4539     92326771 :     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
    4540              :       return false;
    4541              : 
    4542              :   /* BIT_INSERT_EXPR has an implict operand as the type precision
    4543              :      of op1.  Need to check to make sure they are the same.  */
    4544     48696833 :   if (vno1->opcode == BIT_INSERT_EXPR
    4545          534 :       && TREE_CODE (vno1->op[1]) == INTEGER_CST
    4546     48696940 :       && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
    4547          107 :          != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
    4548              :     return false;
    4549              : 
    4550              :   return true;
    4551              : }
    4552              : 
    4553              : /* Initialize VNO from the pieces provided.  */
    4554              : 
    4555              : static void
    4556    187149592 : init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
    4557              :                              enum tree_code code, tree type, tree *ops)
    4558              : {
    4559    187149592 :   vno->opcode = code;
    4560    187149592 :   vno->length = length;
    4561    187149592 :   vno->type = type;
    4562      4525234 :   memcpy (&vno->op[0], ops, sizeof (tree) * length);
    4563            0 : }
    4564              : 
    4565              : /* Return the number of operands for a vn_nary ops structure from STMT.  */
    4566              : 
    4567              : unsigned int
    4568    108606464 : vn_nary_length_from_stmt (gimple *stmt)
    4569              : {
    4570    108606464 :   switch (gimple_assign_rhs_code (stmt))
    4571              :     {
    4572              :     case REALPART_EXPR:
    4573              :     case IMAGPART_EXPR:
    4574              :     case VIEW_CONVERT_EXPR:
    4575              :       return 1;
    4576              : 
    4577       610691 :     case BIT_FIELD_REF:
    4578       610691 :       return 3;
    4579              : 
    4580       509143 :     case CONSTRUCTOR:
    4581       509143 :       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
    4582              : 
    4583    104164308 :     default:
    4584    104164308 :       return gimple_num_ops (stmt) - 1;
    4585              :     }
    4586              : }
    4587              : 
    4588              : /* Initialize VNO from STMT.  */
    4589              : 
    4590              : void
    4591    108606464 : init_vn_nary_op_from_stmt (vn_nary_op_t vno, gassign *stmt)
    4592              : {
    4593    108606464 :   unsigned i;
    4594              : 
    4595    108606464 :   vno->opcode = gimple_assign_rhs_code (stmt);
    4596    108606464 :   vno->type = TREE_TYPE (gimple_assign_lhs (stmt));
    4597    108606464 :   switch (vno->opcode)
    4598              :     {
    4599      3322322 :     case REALPART_EXPR:
    4600      3322322 :     case IMAGPART_EXPR:
    4601      3322322 :     case VIEW_CONVERT_EXPR:
    4602      3322322 :       vno->length = 1;
    4603      3322322 :       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
    4604      3322322 :       break;
    4605              : 
    4606       610691 :     case BIT_FIELD_REF:
    4607       610691 :       vno->length = 3;
    4608       610691 :       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
    4609       610691 :       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
    4610       610691 :       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
    4611       610691 :       break;
    4612              : 
    4613       509143 :     case CONSTRUCTOR:
    4614       509143 :       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
    4615      2019976 :       for (i = 0; i < vno->length; ++i)
    4616      1510833 :         vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
    4617              :       break;
    4618              : 
    4619    104164308 :     default:
    4620    104164308 :       gcc_checking_assert (!gimple_assign_single_p (stmt));
    4621    104164308 :       vno->length = gimple_num_ops (stmt) - 1;
    4622    285358519 :       for (i = 0; i < vno->length; ++i)
    4623    181194211 :         vno->op[i] = gimple_op (stmt, i + 1);
    4624              :     }
    4625    108606464 : }
    4626              : 
    4627              : /* Compute the hashcode for VNO and look for it in the hash table;
    4628              :    return the resulting value number if it exists in the hash table.
    4629              :    Return NULL_TREE if it does not exist in the hash table or if the
    4630              :    result field of the operation is NULL.  VNRESULT will contain the
    4631              :    vn_nary_op_t from the hashtable if it exists.  */
    4632              : 
    4633              : static tree
    4634    130838496 : vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
    4635              : {
    4636    130838496 :   vn_nary_op_s **slot;
    4637              : 
    4638    130838496 :   if (vnresult)
    4639    123680248 :     *vnresult = NULL;
    4640              : 
    4641    363881661 :   for (unsigned i = 0; i < vno->length; ++i)
    4642    233043165 :     if (TREE_CODE (vno->op[i]) == SSA_NAME)
    4643    164920294 :       vno->op[i] = SSA_VAL (vno->op[i]);
    4644              : 
    4645    130838496 :   vno->hashcode = vn_nary_op_compute_hash (vno);
    4646    130838496 :   slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
    4647    130838496 :   if (!slot)
    4648              :     return NULL_TREE;
    4649     17575069 :   if (vnresult)
    4650     17130109 :     *vnresult = *slot;
    4651     17575069 :   return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
    4652              : }
    4653              : 
    4654              : /* Lookup a n-ary operation by its pieces and return the resulting value
    4655              :    number if it exists in the hash table.  Return NULL_TREE if it does
    4656              :    not exist in the hash table or if the result field of the operation
    4657              :    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
    4658              :    if it exists.  */
    4659              : 
    4660              : tree
    4661     74625840 : vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
    4662              :                           tree type, tree *ops, vn_nary_op_t *vnresult)
    4663              : {
    4664     74625840 :   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
    4665              :                                   sizeof_vn_nary_op (length));
    4666     74625840 :   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
    4667     74625840 :   return vn_nary_op_lookup_1 (vno1, vnresult);
    4668              : }
    4669              : 
    4670              : /* Lookup the rhs of STMT in the current hash table, and return the resulting
    4671              :    value number if it exists in the hash table.  Return NULL_TREE if
    4672              :    it does not exist in the hash table.  VNRESULT will contain the
    4673              :    vn_nary_op_t from the hashtable if it exists.  */
    4674              : 
    4675              : tree
    4676     56212656 : vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
    4677              : {
    4678     56212656 :   vn_nary_op_t vno1
    4679     56212656 :     = XALLOCAVAR (struct vn_nary_op_s,
    4680              :                   sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
    4681     56212656 :   init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
    4682     56212656 :   return vn_nary_op_lookup_1 (vno1, vnresult);
    4683              : }
    4684              : 
    4685              : /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
    4686              : 
    4687              : vn_nary_op_t
    4688    169977052 : alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
    4689              : {
    4690    169977052 :   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
    4691              : }
    4692              : 
    4693              : /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
    4694              :    obstack.  */
    4695              : 
    4696              : static vn_nary_op_t
    4697    152867181 : alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
    4698              : {
    4699            0 :   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
    4700              : 
    4701    152867181 :   vno1->value_id = value_id;
    4702    152867181 :   vno1->length = length;
    4703    152867181 :   vno1->predicated_values = 0;
    4704    152867181 :   vno1->u.result = result;
    4705              : 
    4706    152867181 :   return vno1;
    4707              : }
    4708              : 
    4709              : /* Insert VNO into TABLE.  */
    4710              : 
    4711              : static vn_nary_op_t
    4712    157528364 : vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table)
    4713              : {
    4714    157528364 :   vn_nary_op_s **slot;
    4715              : 
    4716    157528364 :   gcc_assert (! vno->predicated_values
    4717              :               || (! vno->u.values->next
    4718              :                   && vno->u.values->n == 1));
    4719              : 
    4720    460989981 :   for (unsigned i = 0; i < vno->length; ++i)
    4721    303461617 :     if (TREE_CODE (vno->op[i]) == SSA_NAME)
    4722    197801661 :       vno->op[i] = SSA_VAL (vno->op[i]);
    4723              : 
    4724    157528364 :   vno->hashcode = vn_nary_op_compute_hash (vno);
    4725    157528364 :   slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
    4726    157528364 :   vno->unwind_to = *slot;
    4727    157528364 :   if (*slot)
    4728              :     {
    4729              :       /* Prefer non-predicated values.
    4730              :          ???  Only if those are constant, otherwise, with constant predicated
    4731              :          value, turn them into predicated values with entry-block validity
    4732              :          (???  but we always find the first valid result currently).  */
    4733     30138965 :       if ((*slot)->predicated_values
    4734     29406687 :           && ! vno->predicated_values)
    4735              :         {
    4736              :           /* ???  We cannot remove *slot from the unwind stack list.
    4737              :              For the moment we deal with this by skipping not found
    4738              :              entries but this isn't ideal ...  */
    4739        83333 :           *slot = vno;
    4740              :           /* ???  Maintain a stack of states we can unwind in
    4741              :              vn_nary_op_s?  But how far do we unwind?  In reality
    4742              :              we need to push change records somewhere...  Or not
    4743              :              unwind vn_nary_op_s and linking them but instead
    4744              :              unwind the results "list", linking that, which also
    4745              :              doesn't move on hashtable resize.  */
    4746              :           /* We can also have a ->unwind_to recording *slot there.
    4747              :              That way we can make u.values a fixed size array with
    4748              :              recording the number of entries but of course we then
    4749              :              have always N copies for each unwind_to-state.  Or we
    4750              :              make sure to only ever append and each unwinding will
    4751              :              pop off one entry (but how to deal with predicated
    4752              :              replaced with non-predicated here?)  */
    4753        83333 :           vno->next = last_inserted_nary;
    4754        83333 :           last_inserted_nary = vno;
    4755        83333 :           return vno;
    4756              :         }
    4757     30055632 :       else if (vno->predicated_values
    4758     30055276 :                && ! (*slot)->predicated_values)
    4759              :         return *slot;
    4760     29323710 :       else if (vno->predicated_values
    4761     29323354 :                && (*slot)->predicated_values)
    4762              :         {
    4763              :           /* ???  Factor this all into a insert_single_predicated_value
    4764              :              routine.  */
    4765     29323354 :           gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
    4766     29323354 :           basic_block vno_bb
    4767     29323354 :             = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
    4768     29323354 :           vn_pval *nval = vno->u.values;
    4769     29323354 :           vn_pval **next = &vno->u.values;
    4770     29323354 :           vn_pval *ins = NULL;
    4771     29323354 :           vn_pval *ins_at = NULL;
    4772              :           /* Find an existing value to append to.  */
    4773     55189936 :           for (vn_pval *val = (*slot)->u.values; val; val = val->next)
    4774              :             {
    4775     30301454 :               if (expressions_equal_p (val->result, nval->result))
    4776              :                 {
    4777              :                   /* Limit the number of places we register a predicate
    4778              :                      as valid.  */
    4779      4434872 :                   if (val->n > 8)
    4780       123768 :                     return *slot;
    4781     10959160 :                   for (unsigned i = 0; i < val->n; ++i)
    4782              :                     {
    4783      6881670 :                       basic_block val_bb
    4784      6881670 :                         = BASIC_BLOCK_FOR_FN (cfun,
    4785              :                                               val->valid_dominated_by_p[i]);
    4786      6881670 :                       if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
    4787              :                         /* Value registered with more generic predicate.  */
    4788       233614 :                         return *slot;
    4789      6648056 :                       else if (flag_checking)
    4790              :                         /* Shouldn't happen, we insert in RPO order.  */
    4791      6648056 :                         gcc_assert (!dominated_by_p (CDI_DOMINATORS,
    4792              :                                                      val_bb, vno_bb));
    4793              :                     }
    4794              :                   /* Append the location.  */
    4795      4077490 :                   ins_at = val;
    4796      4077490 :                   ins = (vn_pval *) obstack_alloc (&vn_tables_obstack,
    4797              :                                                    sizeof (vn_pval)
    4798              :                                                    + val->n * sizeof (int));
    4799      4077490 :                   ins->next = NULL;
    4800      4077490 :                   ins->result = val->result;
    4801      4077490 :                   ins->n = val->n + 1;
    4802      4077490 :                   memcpy (ins->valid_dominated_by_p,
    4803      4077490 :                           val->valid_dominated_by_p,
    4804      4077490 :                           val->n * sizeof (int));
    4805      4077490 :                   ins->valid_dominated_by_p[val->n] = vno_bb->index;
    4806      4077490 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    4807            4 :                     fprintf (dump_file, "Appending predicate to value.\n");
    4808              :                   break;
    4809              :                 }
    4810              :             }
    4811              :           /* Copy the rest of the value chain.  */
    4812     59768025 :           for (vn_pval *val = (*slot)->u.values; val; val = val->next)
    4813              :             {
    4814     30802053 :               if (val == ins_at)
    4815              :                 /* Replace the node we appended to.  */
    4816      4077490 :                 *next = ins;
    4817              :               else
    4818              :                 {
    4819              :                   /* Copy other predicated values.  */
    4820     26724563 :                   *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
    4821              :                                                      sizeof (vn_pval)
    4822              :                                                      + ((val->n-1)
    4823              :                                                         * sizeof (int)));
    4824     26724563 :                   memcpy (*next, val,
    4825     26724563 :                           sizeof (vn_pval) + (val->n-1) * sizeof (int));
    4826     26724563 :                   (*next)->next = NULL;
    4827              :                 }
    4828     30802053 :               next = &(*next)->next;
    4829              :             }
    4830              :           /* Append the value if we didn't find it.  */
    4831     28965972 :           if (!ins_at)
    4832     24888482 :             *next = nval;
    4833     28965972 :           *slot = vno;
    4834     28965972 :           vno->next = last_inserted_nary;
    4835     28965972 :           last_inserted_nary = vno;
    4836     28965972 :           return vno;
    4837              :         }
    4838              : 
    4839              :       /* While we do not want to insert things twice it's awkward to
    4840              :          avoid it in the case where visit_nary_op pattern-matches stuff
    4841              :          and ends up simplifying the replacement to itself.  We then
    4842              :          get two inserts, one from visit_nary_op and one from
    4843              :          vn_nary_build_or_lookup.
    4844              :          So allow inserts with the same value number.  */
    4845          356 :       if ((*slot)->u.result == vno->u.result)
    4846              :         return *slot;
    4847              :     }
    4848              : 
    4849              :   /* ???  There's also optimistic vs. previous commited state merging
    4850              :      that is problematic for the case of unwinding.  */
    4851              : 
    4852              :   /* ???  We should return NULL if we do not use 'vno' and have the
    4853              :      caller release it.  */
    4854    127389399 :   gcc_assert (!*slot);
    4855              : 
    4856    127389399 :   *slot = vno;
    4857    127389399 :   vno->next = last_inserted_nary;
    4858    127389399 :   last_inserted_nary = vno;
    4859    127389399 :   return vno;
    4860              : }
    4861              : 
    4862              : /* Insert a n-ary operation into the current hash table using it's
    4863              :    pieces.  Return the vn_nary_op_t structure we created and put in
    4864              :    the hashtable.  */
    4865              : 
    4866              : vn_nary_op_t
    4867       547294 : vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
    4868              :                           tree type, tree *ops,
    4869              :                           tree result, unsigned int value_id)
    4870              : {
    4871       547294 :   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
    4872       547294 :   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
    4873       547294 :   return vn_nary_op_insert_into (vno1, valid_info->nary);
    4874              : }
    4875              : 
    4876              : /* Return whether we can track a predicate valid when PRED_E is executed.  */
    4877              : 
    4878              : static bool
    4879    151597993 : can_track_predicate_on_edge (edge pred_e)
    4880              : {
    4881              :   /* ???  As we are currently recording the destination basic-block index in
    4882              :      vn_pval.valid_dominated_by_p and using dominance for the
    4883              :      validity check we cannot track predicates on all edges.  */
    4884    151597993 :   if (single_pred_p (pred_e->dest))
    4885              :     return true;
    4886              :   /* Never record for backedges.  */
    4887     11978165 :   if (pred_e->flags & EDGE_DFS_BACK)
    4888              :     return false;
    4889              :   /* When there's more than one predecessor we cannot track
    4890              :      predicate validity based on the destination block.  The
    4891              :      exception is when all other incoming edges sources are
    4892              :      dominated by the destination block.  */
    4893     11322326 :   edge_iterator ei;
    4894     11322326 :   edge e;
    4895     19399795 :   FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
    4896     17555885 :     if (e != pred_e && ! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
    4897              :       return false;
    4898              :   return true;
    4899              : }
    4900              : 
    4901              : static vn_nary_op_t
    4902    107451224 : vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
    4903              :                                      tree type, tree *ops,
    4904              :                                      tree result, unsigned int value_id,
    4905              :                                      edge pred_e)
    4906              : {
    4907    107451224 :   if (flag_checking)
    4908    107450388 :     gcc_assert (can_track_predicate_on_edge (pred_e));
    4909              : 
    4910        74760 :   if (dump_file && (dump_flags & TDF_DETAILS)
    4911              :       /* ???  Fix dumping, but currently we only get comparisons.  */
    4912    107521954 :       && TREE_CODE_CLASS (code) == tcc_comparison)
    4913              :     {
    4914        70730 :       fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
    4915        70730 :                pred_e->dest->index);
    4916        70730 :       print_generic_expr (dump_file, ops[0], TDF_SLIM);
    4917        70730 :       fprintf (dump_file, " %s ", get_tree_code_name (code));
    4918        70730 :       print_generic_expr (dump_file, ops[1], TDF_SLIM);
    4919       105726 :       fprintf (dump_file, " == %s\n",
    4920        70730 :                integer_zerop (result) ? "false" : "true");
    4921              :     }
    4922    107451224 :   vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
    4923    107451224 :   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
    4924    107451224 :   vno1->predicated_values = 1;
    4925    107451224 :   vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
    4926              :                                               sizeof (vn_pval));
    4927    107451224 :   vno1->u.values->next = NULL;
    4928    107451224 :   vno1->u.values->result = result;
    4929    107451224 :   vno1->u.values->n = 1;
    4930    107451224 :   vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
    4931    107451224 :   return vn_nary_op_insert_into (vno1, valid_info->nary);
    4932              : }
    4933              : 
    4934              : static bool
    4935              : dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool);
    4936              : 
    4937              : static tree
    4938      1717947 : vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb,
    4939              :                                  edge e = NULL)
    4940              : {
    4941      1717947 :   if (! vno->predicated_values)
    4942            0 :     return vno->u.result;
    4943      3560586 :   for (vn_pval *val = vno->u.values; val; val = val->next)
    4944      5440184 :     for (unsigned i = 0; i < val->n; ++i)
    4945              :       {
    4946      3597545 :         basic_block cand
    4947      3597545 :           = BASIC_BLOCK_FOR_FN (cfun, val->valid_dominated_by_p[i]);
    4948              :         /* Do not handle backedge executability optimistically since
    4949              :            when figuring out whether to iterate we do not consider
    4950              :            changed predication.
    4951              :            When asking for predicated values on an edge avoid looking
    4952              :            at edge executability for edges forward in our iteration
    4953              :            as well.  */
    4954      3597545 :         if (e && (e->flags & EDGE_DFS_BACK))
    4955              :           {
    4956        23412 :             if (dominated_by_p (CDI_DOMINATORS, bb, cand))
    4957         7782 :               return val->result;
    4958              :           }
    4959      3574133 :         else if (dominated_by_p_w_unex (bb, cand, false))
    4960       534916 :           return val->result;
    4961              :       }
    4962              :   return NULL_TREE;
    4963              : }
    4964              : 
    4965              : static tree
    4966       213301 : vn_nary_op_get_predicated_value (vn_nary_op_t vno, edge e)
    4967              : {
    4968            0 :   return vn_nary_op_get_predicated_value (vno, e->src, e);
    4969              : }
    4970              : 
    4971              : /* Insert the rhs of STMT into the current hash table with a value number of
    4972              :    RESULT.  */
    4973              : 
    4974              : static vn_nary_op_t
    4975     44868663 : vn_nary_op_insert_stmt (gimple *stmt, tree result)
    4976              : {
    4977     44868663 :   vn_nary_op_t vno1
    4978     44868663 :     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
    4979     44868663 :                         result, VN_INFO (result)->value_id);
    4980     44868663 :   init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
    4981     44868663 :   return vn_nary_op_insert_into (vno1, valid_info->nary);
    4982              : }
    4983              : 
    4984              : /* Compute a hashcode for PHI operation VP1 and return it.  */
    4985              : 
    4986              : static inline hashval_t
    4987     50109398 : vn_phi_compute_hash (vn_phi_t vp1)
    4988              : {
    4989     50109398 :   inchash::hash hstate;
    4990     50109398 :   tree phi1op;
    4991     50109398 :   tree type;
    4992     50109398 :   edge e;
    4993     50109398 :   edge_iterator ei;
    4994              : 
    4995    100218796 :   hstate.add_int (EDGE_COUNT (vp1->block->preds));
    4996     50109398 :   switch (EDGE_COUNT (vp1->block->preds))
    4997              :     {
    4998              :     case 1:
    4999              :       break;
    5000     43029138 :     case 2:
    5001              :       /* When this is a PHI node subject to CSE for different blocks
    5002              :          avoid hashing the block index.  */
    5003     43029138 :       if (vp1->cclhs)
    5004              :         break;
    5005              :       /* Fallthru.  */
    5006     33852861 :     default:
    5007     33852861 :       hstate.add_int (vp1->block->index);
    5008              :     }
    5009              : 
    5010              :   /* If all PHI arguments are constants we need to distinguish
    5011              :      the PHI node via its type.  */
    5012     50109398 :   type = vp1->type;
    5013     50109398 :   hstate.merge_hash (vn_hash_type (type));
    5014              : 
    5015    174006526 :   FOR_EACH_EDGE (e, ei, vp1->block->preds)
    5016              :     {
    5017              :       /* Don't hash backedge values they need to be handled as VN_TOP
    5018              :          for optimistic value-numbering.  */
    5019    123897128 :       if (e->flags & EDGE_DFS_BACK)
    5020     27747312 :         continue;
    5021              : 
    5022     96149816 :       phi1op = vp1->phiargs[e->dest_idx];
    5023     96149816 :       if (phi1op == VN_TOP)
    5024       243914 :         continue;
    5025     95905902 :       inchash::add_expr (phi1op, hstate);
    5026              :     }
    5027              : 
    5028     50109398 :   return hstate.end ();
    5029              : }
    5030              : 
    5031              : 
    5032              : /* Return true if COND1 and COND2 represent the same condition, set
    5033              :    *INVERTED_P if one needs to be inverted to make it the same as
    5034              :    the other.  */
    5035              : 
    5036              : static bool
    5037      3783701 : cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
    5038              :                     gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
    5039              : {
    5040      3783701 :   enum tree_code code1 = gimple_cond_code (cond1);
    5041      3783701 :   enum tree_code code2 = gimple_cond_code (cond2);
    5042              : 
    5043      3783701 :   *inverted_p = false;
    5044      3783701 :   if (code1 == code2)
    5045              :     ;
    5046       299900 :   else if (code1 == swap_tree_comparison (code2))
    5047              :     std::swap (lhs2, rhs2);
    5048       263844 :   else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
    5049       130656 :     *inverted_p = true;
    5050       133188 :   else if (code1 == invert_tree_comparison
    5051       133188 :                       (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
    5052              :     {
    5053        10304 :       std::swap (lhs2, rhs2);
    5054        10304 :       *inverted_p = true;
    5055              :     }
    5056              :   else
    5057              :     return false;
    5058              : 
    5059      3660817 :   return ((expressions_equal_p (lhs1, lhs2)
    5060       108479 :            && expressions_equal_p (rhs1, rhs2))
    5061      3685042 :           || (commutative_tree_code (code1)
    5062      1800816 :               && expressions_equal_p (lhs1, rhs2)
    5063         2271 :               && expressions_equal_p (rhs1, lhs2)));
    5064              : }
    5065              : 
    5066              : /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
    5067              : 
    5068              : static int
    5069     40768108 : vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
    5070              : {
    5071     40768108 :   if (vp1->hashcode != vp2->hashcode)
    5072              :     return false;
    5073              : 
    5074     12687123 :   if (vp1->block != vp2->block)
    5075              :     {
    5076     11374506 :       if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
    5077              :         return false;
    5078              : 
    5079     36625122 :       switch (EDGE_COUNT (vp1->block->preds))
    5080              :         {
    5081              :         case 1:
    5082              :           /* Single-arg PHIs are just copies.  */
    5083              :           break;
    5084              : 
    5085      3791502 :         case 2:
    5086      3791502 :           {
    5087              :             /* Make sure both PHIs are classified as CSEable.  */
    5088      3791502 :             if (! vp1->cclhs || ! vp2->cclhs)
    5089              :               return false;
    5090              : 
    5091              :             /* Rule out backedges into the PHI.  */
    5092      3791502 :             gcc_checking_assert
    5093              :               (vp1->block->loop_father->header != vp1->block
    5094              :                && vp2->block->loop_father->header != vp2->block);
    5095              : 
    5096              :             /* If the PHI nodes do not have compatible types
    5097              :                they are not the same.  */
    5098      3791502 :             if (!types_compatible_p (vp1->type, vp2->type))
    5099              :               return false;
    5100              : 
    5101              :             /* If the immediate dominator end in switch stmts multiple
    5102              :                values may end up in the same PHI arg via intermediate
    5103              :                CFG merges.  */
    5104      3783701 :             basic_block idom1
    5105      3783701 :               = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
    5106      3783701 :             basic_block idom2
    5107      3783701 :               = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
    5108      3783701 :             gcc_checking_assert (EDGE_COUNT (idom1->succs) == 2
    5109              :                                  && EDGE_COUNT (idom2->succs) == 2);
    5110              : 
    5111              :             /* Verify the controlling stmt is the same.  */
    5112      7567402 :             gcond *last1 = as_a <gcond *> (*gsi_last_bb (idom1));
    5113      7567402 :             gcond *last2 = as_a <gcond *> (*gsi_last_bb (idom2));
    5114      3783701 :             bool inverted_p;
    5115      3783701 :             if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
    5116      3783701 :                                       last2, vp2->cclhs, vp2->ccrhs,
    5117              :                                       &inverted_p))
    5118              :               return false;
    5119              : 
    5120              :             /* Get at true/false controlled edges into the PHI.  */
    5121        84340 :             edge te1, te2, fe1, fe2;
    5122        84340 :             if (! extract_true_false_controlled_edges (idom1, vp1->block,
    5123              :                                                        &te1, &fe1)
    5124        84340 :                 || ! extract_true_false_controlled_edges (idom2, vp2->block,
    5125              :                                                           &te2, &fe2))
    5126        38262 :               return false;
    5127              : 
    5128              :             /* Swap edges if the second condition is the inverted of the
    5129              :                first.  */
    5130        46078 :             if (inverted_p)
    5131         2028 :               std::swap (te2, fe2);
    5132              : 
    5133              :             /* Since we do not know which edge will be executed we have
    5134              :                to be careful when matching VN_TOP.  Be conservative and
    5135              :                only match VN_TOP == VN_TOP for now, we could allow
    5136              :                VN_TOP on the not prevailing PHI though.  See for example
    5137              :                PR102920.  */
    5138        46078 :             if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
    5139        46078 :                                        vp2->phiargs[te2->dest_idx], false)
    5140        90343 :                 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
    5141        44265 :                                           vp2->phiargs[fe2->dest_idx], false))
    5142         1813 :               return false;
    5143              : 
    5144              :             return true;
    5145              :           }
    5146              : 
    5147              :         default:
    5148              :           return false;
    5149              :         }
    5150              :     }
    5151              : 
    5152              :   /* If the PHI nodes do not have compatible types
    5153              :      they are not the same.  */
    5154      8895621 :   if (!types_compatible_p (vp1->type, vp2->type))
    5155              :     return false;
    5156              : 
    5157              :   /* Any phi in the same block will have it's arguments in the
    5158              :      same edge order, because of how we store phi nodes.  */
    5159      8894571 :   unsigned nargs = EDGE_COUNT (vp1->block->preds);
    5160     20638409 :   for (unsigned i = 0; i < nargs; ++i)
    5161              :     {
    5162     16495423 :       tree phi1op = vp1->phiargs[i];
    5163     16495423 :       tree phi2op = vp2->phiargs[i];
    5164     16495423 :       if (phi1op == phi2op)
    5165     11649112 :         continue;
    5166      4846311 :       if (!expressions_equal_p (phi1op, phi2op, false))
    5167              :         return false;
    5168              :     }
    5169              : 
    5170              :   return true;
    5171              : }
    5172              : 
    5173              : /* Lookup PHI in the current hash table, and return the resulting
    5174              :    value number if it exists in the hash table.  Return NULL_TREE if
    5175              :    it does not exist in the hash table. */
    5176              : 
    5177              : static tree
    5178     27453579 : vn_phi_lookup (gimple *phi, bool backedges_varying_p)
    5179              : {
    5180     27453579 :   vn_phi_s **slot;
    5181     27453579 :   struct vn_phi_s *vp1;
    5182     27453579 :   edge e;
    5183     27453579 :   edge_iterator ei;
    5184              : 
    5185     27453579 :   vp1 = XALLOCAVAR (struct vn_phi_s,
    5186              :                     sizeof (struct vn_phi_s)
    5187              :                     + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
    5188              : 
    5189              :   /* Canonicalize the SSA_NAME's to their value number.  */
    5190     94651074 :   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
    5191              :     {
    5192     67197495 :       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
    5193     67197495 :       if (TREE_CODE (def) == SSA_NAME
    5194     55940685 :           && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
    5195              :         {
    5196     53398687 :           if (!virtual_operand_p (def)
    5197     53398687 :               && ssa_undefined_value_p (def, false))
    5198       135365 :             def = VN_TOP;
    5199              :           else
    5200     53263322 :             def = SSA_VAL (def);
    5201              :         }
    5202     67197495 :       vp1->phiargs[e->dest_idx] = def;
    5203              :     }
    5204     27453579 :   vp1->type = TREE_TYPE (gimple_phi_result (phi));
    5205     27453579 :   vp1->block = gimple_bb (phi);
    5206              :   /* Extract values of the controlling condition.  */
    5207     27453579 :   vp1->cclhs = NULL_TREE;
    5208     27453579 :   vp1->ccrhs = NULL_TREE;
    5209     27453579 :   if (EDGE_COUNT (vp1->block->preds) == 2
    5210     27453579 :       && vp1->block->loop_father->header != vp1->block)
    5211              :     {
    5212      8571837 :       basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
    5213      8571837 :       if (EDGE_COUNT (idom1->succs) == 2)
    5214     17042214 :         if (gcond *last1 = safe_dyn_cast <gcond *> (*gsi_last_bb (idom1)))
    5215              :           {
    5216              :             /* ???  We want to use SSA_VAL here.  But possibly not
    5217              :                allow VN_TOP.  */
    5218      8303953 :             vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
    5219      8303953 :             vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
    5220              :           }
    5221              :     }
    5222     27453579 :   vp1->hashcode = vn_phi_compute_hash (vp1);
    5223     27453579 :   slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
    5224     27453579 :   if (!slot)
    5225              :     return NULL_TREE;
    5226      4187251 :   return (*slot)->result;
    5227              : }
    5228              : 
    5229              : /* Insert PHI into the current hash table with a value number of
    5230              :    RESULT.  */
    5231              : 
    5232              : static vn_phi_t
    5233     22655819 : vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
    5234              : {
    5235     22655819 :   vn_phi_s **slot;
    5236     22655819 :   vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
    5237              :                                            sizeof (vn_phi_s)
    5238              :                                            + ((gimple_phi_num_args (phi) - 1)
    5239              :                                               * sizeof (tree)));
    5240     22655819 :   edge e;
    5241     22655819 :   edge_iterator ei;
    5242              : 
    5243              :   /* Canonicalize the SSA_NAME's to their value number.  */
    5244     79355452 :   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
    5245              :     {
    5246     56699633 :       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
    5247     56699633 :       if (TREE_CODE (def) == SSA_NAME
    5248     46515616 :           && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
    5249              :         {
    5250     43974033 :           if (!virtual_operand_p (def)
    5251     43974033 :               && ssa_undefined_value_p (def, false))
    5252       108793 :             def = VN_TOP;
    5253              :           else
    5254     43865240 :             def = SSA_VAL (def);
    5255              :         }
    5256     56699633 :       vp1->phiargs[e->dest_idx] = def;
    5257              :     }
    5258     22655819 :   vp1->value_id = VN_INFO (result)->value_id;
    5259     22655819 :   vp1->type = TREE_TYPE (gimple_phi_result (phi));
    5260     22655819 :   vp1->block = gimple_bb (phi);
    5261              :   /* Extract values of the controlling condition.  */
    5262     22655819 :   vp1->cclhs = NULL_TREE;
    5263     22655819 :   vp1->ccrhs = NULL_TREE;
    5264     22655819 :   if (EDGE_COUNT (vp1->block->preds) == 2
    5265     22655819 :       && vp1->block->loop_father->header != vp1->block)
    5266              :     {
    5267      8216917 :       basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
    5268      8216917 :       if (EDGE_COUNT (idom1->succs) == 2)
    5269     16335286 :         if (gcond *last1 = safe_dyn_cast <gcond *> (*gsi_last_bb (idom1)))
    5270              :           {
    5271              :             /* ???  We want to use SSA_VAL here.  But possibly not
    5272              :                allow VN_TOP.  */
    5273      7952584 :             vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
    5274      7952584 :             vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
    5275              :           }
    5276              :     }
    5277     22655819 :   vp1->result = result;
    5278     22655819 :   vp1->hashcode = vn_phi_compute_hash (vp1);
    5279              : 
    5280     22655819 :   slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
    5281     22655819 :   gcc_assert (!*slot);
    5282              : 
    5283     22655819 :   *slot = vp1;
    5284     22655819 :   vp1->next = last_inserted_phi;
    5285     22655819 :   last_inserted_phi = vp1;
    5286     22655819 :   return vp1;
    5287              : }
    5288              : 
    5289              : 
    5290              : /* Return true if BB1 is dominated by BB2 taking into account edges
    5291              :    that are not executable.  When ALLOW_BACK is false consider not
    5292              :    executable backedges as executable.  */
    5293              : 
    5294              : static bool
    5295     71123701 : dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool allow_back)
    5296              : {
    5297     71123701 :   edge_iterator ei;
    5298     71123701 :   edge e;
    5299              : 
    5300     71123701 :   if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
    5301              :     return true;
    5302              : 
    5303              :   /* Before iterating we'd like to know if there exists a
    5304              :      (executable) path from bb2 to bb1 at all, if not we can
    5305              :      directly return false.  For now simply iterate once.  */
    5306              : 
    5307              :   /* Iterate to the single executable bb1 predecessor.  */
    5308     21316029 :   if (EDGE_COUNT (bb1->preds) > 1)
    5309              :     {
    5310      2937764 :       edge prede = NULL;
    5311      6385131 :       FOR_EACH_EDGE (e, ei, bb1->preds)
    5312      5969829 :         if ((e->flags & EDGE_EXECUTABLE)
    5313       587622 :             || (!allow_back && (e->flags & EDGE_DFS_BACK)))
    5314              :           {
    5315      5460226 :             if (prede)
    5316              :               {
    5317              :                 prede = NULL;
    5318              :                 break;
    5319              :               }
    5320              :             prede = e;
    5321              :           }
    5322      2937764 :       if (prede)
    5323              :         {
    5324       415302 :           bb1 = prede->src;
    5325              : 
    5326              :           /* Re-do the dominance check with changed bb1.  */
    5327       415302 :           if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
    5328              :             return true;
    5329              :         }
    5330              :     }
    5331              : 
    5332              :   /* Iterate to the single executable bb2 successor.  */
    5333     21075118 :   if (EDGE_COUNT (bb2->succs) > 1)
    5334              :     {
    5335      6332640 :       edge succe = NULL;
    5336     12812530 :       FOR_EACH_EDGE (e, ei, bb2->succs)
    5337     12665544 :         if ((e->flags & EDGE_EXECUTABLE)
    5338       180691 :             || (!allow_back && (e->flags & EDGE_DFS_BACK)))
    5339              :           {
    5340     12484894 :             if (succe)
    5341              :               {
    5342              :                 succe = NULL;
    5343              :                 break;
    5344              :               }
    5345              :             succe = e;
    5346              :           }
    5347      6332640 :       if (succe
    5348              :           /* Limit the number of edges we check, we should bring in
    5349              :              context from the iteration and compute the single
    5350              :              executable incoming edge when visiting a block.  */
    5351      6332640 :           && EDGE_COUNT (succe->dest->preds) < 8)
    5352              :         {
    5353              :           /* Verify the reached block is only reached through succe.
    5354              :              If there is only one edge we can spare us the dominator
    5355              :              check and iterate directly.  */
    5356       113046 :           if (EDGE_COUNT (succe->dest->preds) > 1)
    5357              :             {
    5358        51753 :               FOR_EACH_EDGE (e, ei, succe->dest->preds)
    5359        40328 :                 if (e != succe
    5360        26429 :                     && ((e->flags & EDGE_EXECUTABLE)
    5361        17386 :                         || (!allow_back && (e->flags & EDGE_DFS_BACK))))
    5362              :                   {
    5363              :                     succe = NULL;
    5364              :                     break;
    5365              :                   }
    5366              :             }
    5367       113046 :           if (succe)
    5368              :             {
    5369       103994 :               bb2 = succe->dest;
    5370              : 
    5371              :               /* Re-do the dominance check with changed bb2.  */
    5372       103994 :               if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
    5373              :                 return true;
    5374              :             }
    5375              :         }
    5376              :     }
    5377              :   /* Iterate to the single successor of bb2 with only a single executable
    5378              :      incoming edge.  */
    5379     14742478 :   else if (EDGE_COUNT (bb2->succs) == 1
    5380     14206038 :            && EDGE_COUNT (single_succ (bb2)->preds) > 1
    5381              :            /* Limit the number of edges we check, we should bring in
    5382              :               context from the iteration and compute the single
    5383              :               executable incoming edge when visiting a block.  */
    5384     28712037 :            && EDGE_COUNT (single_succ (bb2)->preds) < 8)
    5385              :     {
    5386      4969682 :       edge prede = NULL;
    5387     11198895 :       FOR_EACH_EDGE (e, ei, single_succ (bb2)->preds)
    5388     10665311 :         if ((e->flags & EDGE_EXECUTABLE)
    5389      1308613 :             || (!allow_back && (e->flags & EDGE_DFS_BACK)))
    5390              :           {
    5391      9360874 :             if (prede)
    5392              :               {
    5393              :                 prede = NULL;
    5394              :                 break;
    5395              :               }
    5396              :             prede = e;
    5397              :           }
    5398              :       /* We might actually get to a query with BB2 not visited yet when
    5399              :          we're querying for a predicated value.  */
    5400      4969682 :       if (prede && prede->src == bb2)
    5401              :         {
    5402       475647 :           bb2 = prede->dest;
    5403              : 
    5404              :           /* Re-do the dominance check with changed bb2.  */
    5405       475647 :           if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
    5406              :             return true;
    5407              :         }
    5408              :     }
    5409              : 
    5410              :   /* We could now iterate updating bb1 / bb2.  */
    5411              :   return false;
    5412              : }
    5413              : 
    5414              : /* Set the value number of FROM to TO, return true if it has changed
    5415              :    as a result.  */
    5416              : 
    5417              : static inline bool
    5418    203070756 : set_ssa_val_to (tree from, tree to)
    5419              : {
    5420    203070756 :   vn_ssa_aux_t from_info = VN_INFO (from);
    5421    203070756 :   tree currval = from_info->valnum; // SSA_VAL (from)
    5422    203070756 :   poly_int64 toff, coff;
    5423    203070756 :   bool curr_undefined = false;
    5424    203070756 :   bool curr_invariant = false;
    5425              : 
    5426              :   /* The only thing we allow as value numbers are ssa_names
    5427              :      and invariants.  So assert that here.  We don't allow VN_TOP
    5428              :      as visiting a stmt should produce a value-number other than
    5429              :      that.
    5430              :      ???  Still VN_TOP can happen for unreachable code, so force
    5431              :      it to varying in that case.  Not all code is prepared to
    5432              :      get VN_TOP on valueization.  */
    5433    203070756 :   if (to == VN_TOP)
    5434              :     {
    5435              :       /* ???  When iterating and visiting PHI <undef, backedge-value>
    5436              :          for the first time we rightfully get VN_TOP and we need to
    5437              :          preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
    5438              :          With SCCVN we were simply lucky we iterated the other PHI
    5439              :          cycles first and thus visited the backedge-value DEF.  */
    5440            0 :       if (currval == VN_TOP)
    5441            0 :         goto set_and_exit;
    5442            0 :       if (dump_file && (dump_flags & TDF_DETAILS))
    5443            0 :         fprintf (dump_file, "Forcing value number to varying on "
    5444              :                  "receiving VN_TOP\n");
    5445              :       to = from;
    5446              :     }
    5447              : 
    5448    203070756 :   gcc_checking_assert (to != NULL_TREE
    5449              :                        && ((TREE_CODE (to) == SSA_NAME
    5450              :                             && (to == from || SSA_VAL (to) == to))
    5451              :                            || is_gimple_min_invariant (to)));
    5452              : 
    5453    203070756 :   if (from != to)
    5454              :     {
    5455     32265574 :       if (currval == from)
    5456              :         {
    5457        13730 :           if (dump_file && (dump_flags & TDF_DETAILS))
    5458              :             {
    5459            0 :               fprintf (dump_file, "Not changing value number of ");
    5460            0 :               print_generic_expr (dump_file, from);
    5461            0 :               fprintf (dump_file, " from VARYING to ");
    5462            0 :               print_generic_expr (dump_file, to);
    5463            0 :               fprintf (dump_file, "\n");
    5464              :             }
    5465        13730 :           return false;
    5466              :         }
    5467     32251844 :       curr_invariant = is_gimple_min_invariant (currval);
    5468     64503688 :       curr_undefined = (TREE_CODE (currval) == SSA_NAME
    5469      3827305 :                         && !virtual_operand_p (currval)
    5470     35857035 :                         && ssa_undefined_value_p (currval, false));
    5471     32251844 :       if (currval != VN_TOP
    5472              :           && !curr_invariant
    5473      5311973 :           && !curr_undefined
    5474     36066260 :           && is_gimple_min_invariant (to))
    5475              :         {
    5476          220 :           if (dump_file && (dump_flags & TDF_DETAILS))
    5477              :             {
    5478            0 :               fprintf (dump_file, "Forcing VARYING instead of changing "
    5479              :                        "value number of ");
    5480            0 :               print_generic_expr (dump_file, from);
    5481            0 :               fprintf (dump_file, " from ");
    5482            0 :               print_generic_expr (dump_file, currval);
    5483            0 :               fprintf (dump_file, " (non-constant) to ");
    5484            0 :               print_generic_expr (dump_file, to);
    5485            0 :               fprintf (dump_file, " (constant)\n");
    5486              :             }
    5487              :           to = from;
    5488              :         }
    5489     32251624 :       else if (currval != VN_TOP
    5490      5311753 :                && !curr_undefined
    5491      5298864 :                && TREE_CODE (to) == SSA_NAME
    5492      4471046 :                && !virtual_operand_p (to)
    5493     36500556 :                && ssa_undefined_value_p (to, false))
    5494              :         {
    5495            6 :           if (dump_file && (dump_flags & TDF_DETAILS))
    5496              :             {
    5497            0 :               fprintf (dump_file, "Forcing VARYING instead of changing "
    5498              :                        "value number of ");
    5499            0 :               print_generic_expr (dump_file, from);
    5500            0 :               fprintf (dump_file, " from ");
    5501            0 :               print_generic_expr (dump_file, currval);
    5502            0 :               fprintf (dump_file, " (non-undefined) to ");
    5503            0 :               print_generic_expr (dump_file, to);
    5504            0 :               fprintf (dump_file, " (undefined)\n");
    5505              :             }
    5506              :           to = from;
    5507              :         }
    5508     32251618 :       else if (TREE_CODE (to) == SSA_NAME
    5509     32251618 :                && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
    5510              :         to = from;
    5511              :     }
    5512              : 
    5513    170805182 : set_and_exit:
    5514    203057026 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5515              :     {
    5516       397820 :       fprintf (dump_file, "Setting value number of ");
    5517       397820 :       print_generic_expr (dump_file, from);
    5518       397820 :       fprintf (dump_file, " to ");
    5519       397820 :       print_generic_expr (dump_file, to);
    5520              :     }
    5521              : 
    5522    203057026 :   if (currval != to
    5523    165480578 :       && !operand_equal_p (currval, to, 0)
    5524              :       /* Different undefined SSA names are not actually different.  See
    5525              :          PR82320 for a testcase were we'd otherwise not terminate iteration.  */
    5526    165411726 :       && !(curr_undefined
    5527         3359 :            && TREE_CODE (to) == SSA_NAME
    5528          574 :            && !virtual_operand_p (to)
    5529          574 :            && ssa_undefined_value_p (to, false))
    5530              :       /* ???  For addresses involving volatile objects or types operand_equal_p
    5531              :          does not reliably detect ADDR_EXPRs as equal.  We know we are only
    5532              :          getting invariant gimple addresses here, so can use
    5533              :          get_addr_base_and_unit_offset to do this comparison.  */
    5534    368468146 :       && !(TREE_CODE (currval) == ADDR_EXPR
    5535       461373 :            && TREE_CODE (to) == ADDR_EXPR
    5536           12 :            && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
    5537            6 :                == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
    5538            6 :            && known_eq (coff, toff)))
    5539              :     {
    5540    165411114 :       if (to != from
    5541     27933925 :           && currval != VN_TOP
    5542       997525 :           && !curr_undefined
    5543              :           /* We do not want to allow lattice transitions from one value
    5544              :              to another since that may lead to not terminating iteration
    5545              :              (see PR95049).  Since there's no convenient way to check
    5546              :              for the allowed transition of VAL -> PHI (loop entry value,
    5547              :              same on two PHIs, to same PHI result) we restrict the check
    5548              :              to invariants.  */
    5549       997525 :           && curr_invariant
    5550    166067958 :           && is_gimple_min_invariant (to))
    5551              :         {
    5552            0 :           if (dump_file && (dump_flags & TDF_DETAILS))
    5553            0 :             fprintf (dump_file, " forced VARYING");
    5554              :           to = from;
    5555              :         }
    5556    165411114 :       if (dump_file && (dump_flags & TDF_DETAILS))
    5557       397504 :         fprintf (dump_file, " (changed)\n");
    5558    165411114 :       from_info->valnum = to;
    5559    165411114 :       return true;
    5560              :     }
    5561     37645912 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5562          316 :     fprintf (dump_file, "\n");
    5563              :   return false;
    5564              : }
    5565              : 
    5566              : /* Set all definitions in STMT to value number to themselves.
    5567              :    Return true if a value number changed. */
    5568              : 
    5569              : static bool
    5570    280944517 : defs_to_varying (gimple *stmt)
    5571              : {
    5572    280944517 :   bool changed = false;
    5573    280944517 :   ssa_op_iter iter;
    5574    280944517 :   def_operand_p defp;
    5575              : 
    5576    310026958 :   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
    5577              :     {
    5578     29082441 :       tree def = DEF_FROM_PTR (defp);
    5579     29082441 :       changed |= set_ssa_val_to (def, def);
    5580              :     }
    5581    280944517 :   return changed;
    5582              : }
    5583              : 
    5584              : /* Visit a copy between LHS and RHS, return true if the value number
    5585              :    changed.  */
    5586              : 
    5587              : static bool
    5588      7894493 : visit_copy (tree lhs, tree rhs)
    5589              : {
    5590              :   /* Valueize.  */
    5591      7894493 :   rhs = SSA_VAL (rhs);
    5592              : 
    5593      7894493 :   return set_ssa_val_to (lhs, rhs);
    5594              : }
    5595              : 
    5596              : /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
    5597              :    is the same.  */
    5598              : 
    5599              : static tree
    5600      2457005 : valueized_wider_op (tree wide_type, tree op, bool allow_truncate)
    5601              : {
    5602      2457005 :   if (TREE_CODE (op) == SSA_NAME)
    5603      2151776 :     op = vn_valueize (op);
    5604              : 
    5605              :   /* Either we have the op widened available.  */
    5606      2457005 :   tree ops[3] = {};
    5607      2457005 :   ops[0] = op;
    5608      2457005 :   tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
    5609              :                                        wide_type, ops, NULL);
    5610      2457005 :   if (tem)
    5611              :     return tem;
    5612              : 
    5613              :   /* Or the op is truncated from some existing value.  */
    5614      2165804 :   if (allow_truncate && TREE_CODE (op) == SSA_NAME)
    5615              :     {
    5616       549131 :       gimple *def = SSA_NAME_DEF_STMT (op);
    5617       549131 :       if (is_gimple_assign (def)
    5618       549131 :           && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
    5619              :         {
    5620       282038 :           tem = gimple_assign_rhs1 (def);
    5621       282038 :           if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
    5622              :             {
    5623       181220 :               if (TREE_CODE (tem) == SSA_NAME)
    5624       181220 :                 tem = vn_valueize (tem);
    5625       181220 :               return tem;
    5626              :             }
    5627              :         }
    5628              :     }
    5629              : 
    5630              :   /* For constants simply extend it.  */
    5631      1984584 :   if (TREE_CODE (op) == INTEGER_CST)
    5632       338296 :     return wide_int_to_tree (wide_type, wi::to_widest (op));
    5633              : 
    5634              :   return NULL_TREE;
    5635              : }
    5636              : 
    5637              : /* Visit a nary operator RHS, value number it, and return true if the
    5638              :    value number of LHS has changed as a result.  */
    5639              : 
    5640              : static bool
    5641     48580511 : visit_nary_op (tree lhs, gassign *stmt)
    5642              : {
    5643     48580511 :   vn_nary_op_t vnresult;
    5644     48580511 :   tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
    5645     48580511 :   if (! result && vnresult)
    5646       153338 :     result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
    5647     44940361 :   if (result)
    5648      3710155 :     return set_ssa_val_to (lhs, result);
    5649              : 
    5650              :   /* Do some special pattern matching for redundancies of operations
    5651              :      in different types.  */
    5652     44870356 :   enum tree_code code = gimple_assign_rhs_code (stmt);
    5653     44870356 :   tree type = TREE_TYPE (lhs);
    5654     44870356 :   tree rhs1 = gimple_assign_rhs1 (stmt);
    5655     44870356 :   switch (code)
    5656              :     {
    5657      9999252 :     CASE_CONVERT:
    5658              :       /* Match arithmetic done in a different type where we can easily
    5659              :          substitute the result from some earlier sign-changed or widened
    5660              :          operation.  */
    5661      9999252 :       if (INTEGRAL_TYPE_P (type)
    5662      8957997 :           && TREE_CODE (rhs1) == SSA_NAME
    5663              :           /* We only handle sign-changes, zero-extension -> & mask or
    5664              :              sign-extension if we know the inner operation doesn't
    5665              :              overflow.  */
    5666     18722893 :           && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
    5667      5245592 :                 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
    5668      5244794 :                     && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
    5669      7995911 :                && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
    5670      5901714 :               || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
    5671              :         {
    5672      7639073 :           gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
    5673      5484726 :           if (def
    5674      5484726 :               && (gimple_assign_rhs_code (def) == PLUS_EXPR
    5675      4234969 :                   || gimple_assign_rhs_code (def) == MINUS_EXPR
    5676      4098419 :                   || gimple_assign_rhs_code (def) == MULT_EXPR))
    5677              :             {
    5678      1985853 :               tree ops[3] = {};
    5679              :               /* When requiring a sign-extension we cannot model a
    5680              :                  previous truncation with a single op so don't bother.  */
    5681      1985853 :               bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1));
    5682              :               /* Either we have the op widened available.  */
    5683      1985853 :               ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def),
    5684              :                                            allow_truncate);
    5685      1985853 :               if (ops[0])
    5686       942304 :                 ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def),
    5687              :                                              allow_truncate);
    5688      1985853 :               if (ops[0] && ops[1])
    5689              :                 {
    5690       339565 :                   ops[0] = vn_nary_op_lookup_pieces
    5691       339565 :                       (2, gimple_assign_rhs_code (def), type, ops, NULL);
    5692              :                   /* We have wider operation available.  */
    5693       339565 :                   if (ops[0]
    5694              :                       /* If the leader is a wrapping operation we can
    5695              :                          insert it for code hoisting w/o introducing
    5696              :                          undefined overflow.  If it is not it has to
    5697              :                          be available.  See PR86554.  */
    5698       339565 :                       && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
    5699         2084 :                           || (rpo_avail && vn_context_bb
    5700         2084 :                               && rpo_avail->eliminate_avail (vn_context_bb,
    5701              :                                                              ops[0]))))
    5702              :                     {
    5703         9629 :                       unsigned lhs_prec = TYPE_PRECISION (type);
    5704         9629 :                       unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
    5705         9629 :                       if (lhs_prec == rhs_prec
    5706         9629 :                           || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
    5707         1755 :                               && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
    5708              :                         {
    5709         9039 :                           gimple_match_op match_op (gimple_match_cond::UNCOND,
    5710         9039 :                                                     NOP_EXPR, type, ops[0]);
    5711         9039 :                           result = vn_nary_build_or_lookup (&match_op);
    5712         9039 :                           if (result)
    5713              :                             {
    5714         9039 :                               bool changed = set_ssa_val_to (lhs, result);
    5715         9039 :                               if (TREE_CODE (result) == SSA_NAME)
    5716         9039 :                                 vn_nary_op_insert_stmt (stmt, result);
    5717         9039 :                               return changed;
    5718              :                             }
    5719              :                         }
    5720              :                       else
    5721              :                         {
    5722          590 :                           tree mask = wide_int_to_tree
    5723          590 :                             (type, wi::mask (rhs_prec, false, lhs_prec));
    5724          590 :                           gimple_match_op match_op (gimple_match_cond::UNCOND,
    5725          590 :                                                     BIT_AND_EXPR,
    5726          590 :                                                     TREE_TYPE (lhs),
    5727          590 :                                                     ops[0], mask);
    5728          590 :                           result = vn_nary_build_or_lookup (&match_op);
    5729          590 :                           if (result)
    5730              :                             {
    5731          590 :                               bool changed = set_ssa_val_to (lhs, result);
    5732          590 :                               if (TREE_CODE (result) == SSA_NAME)
    5733          590 :                                 vn_nary_op_insert_stmt (stmt, result);
    5734          590 :                               return changed;
    5735              :                             }
    5736              :                         }
    5737              :                     }
    5738              :                 }
    5739              :             }
    5740              :         }
    5741              :       break;
    5742      1512467 :     case BIT_AND_EXPR:
    5743      1512467 :       if (INTEGRAL_TYPE_P (type)
    5744      1474368 :           && TREE_CODE (rhs1) == SSA_NAME
    5745      1474368 :           && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
    5746       920895 :           && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)
    5747       920777 :           && default_vn_walk_kind != VN_NOWALK
    5748              :           && CHAR_BIT == 8
    5749              :           && BITS_PER_UNIT == 8
    5750              :           && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
    5751       920569 :           && TYPE_PRECISION (type) <= vn_walk_cb_data::bufsize * BITS_PER_UNIT
    5752       920567 :           && !integer_all_onesp (gimple_assign_rhs2 (stmt))
    5753      2433034 :           && !integer_zerop (gimple_assign_rhs2 (stmt)))
    5754              :         {
    5755       920567 :           gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
    5756       676560 :           if (ass
    5757       676560 :               && !gimple_has_volatile_ops (ass)
    5758       675114 :               && vn_get_stmt_kind (ass) == VN_REFERENCE)
    5759              :             {
    5760       320213 :               tree last_vuse = gimple_vuse (ass);
    5761       320213 :               tree op = gimple_assign_rhs1 (ass);
    5762       960639 :               tree result = vn_reference_lookup (op, gimple_vuse (ass),
    5763              :                                                  default_vn_walk_kind,
    5764              :                                                  NULL, true, &last_vuse,
    5765              :                                                  gimple_assign_rhs2 (stmt));
    5766       320213 :               if (result
    5767       320653 :                   && useless_type_conversion_p (TREE_TYPE (result),
    5768          440 :                                                 TREE_TYPE (op)))
    5769          440 :                 return set_ssa_val_to (lhs, result);
    5770              :             }
    5771              :         }
    5772              :       break;
    5773       251008 :     case BIT_FIELD_REF:
    5774       251008 :       if (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
    5775              :         {
    5776       250988 :           tree op0 = TREE_OPERAND (rhs1, 0);
    5777       250988 :           gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (op0));
    5778       207625 :           if (ass
    5779       207625 :               && !gimple_has_volatile_ops (ass)
    5780       207542 :               && vn_get_stmt_kind (ass) == VN_REFERENCE)
    5781              :             {
    5782        92102 :               tree last_vuse = gimple_vuse (ass);
    5783        92102 :               tree op = gimple_assign_rhs1 (ass);
    5784              :               /* Avoid building invalid and unexpected refs.  */
    5785        92102 :               if (TREE_CODE (op) != TARGET_MEM_REF
    5786              :                   && TREE_CODE (op) != BIT_FIELD_REF
    5787              :                   && TREE_CODE (op) != REALPART_EXPR
    5788              :                   && TREE_CODE (op) != IMAGPART_EXPR)
    5789              :                 {
    5790        84306 :                   tree op = build3 (BIT_FIELD_REF, TREE_TYPE (rhs1),
    5791              :                                     gimple_assign_rhs1 (ass),
    5792        84306 :                                     TREE_OPERAND (rhs1, 1),
    5793        84306 :                                     TREE_OPERAND (rhs1, 2));
    5794       168612 :                   tree result = vn_reference_lookup (op, gimple_vuse (ass),
    5795              :                                                      default_vn_walk_kind,
    5796              :                                                      NULL, true, &last_vuse);
    5797        84306 :                   if (result
    5798        84306 :                       && useless_type_conversion_p (type, TREE_TYPE (result)))
    5799         1527 :                     return set_ssa_val_to (lhs, result);
    5800        83066 :                   else if (result
    5801          287 :                            && TYPE_SIZE (type)
    5802          287 :                            && TYPE_SIZE (TREE_TYPE (result))
    5803        83353 :                            && operand_equal_p (TYPE_SIZE (type),
    5804          287 :                                                TYPE_SIZE (TREE_TYPE (result))))
    5805              :                     {
    5806          287 :                       gimple_match_op match_op (gimple_match_cond::UNCOND,
    5807          287 :                                                 VIEW_CONVERT_EXPR,
    5808          287 :                                                 type, result);
    5809          287 :                       result = vn_nary_build_or_lookup (&match_op);
    5810          287 :                       if (result)
    5811              :                         {
    5812          287 :                           bool changed = set_ssa_val_to (lhs, result);
    5813          287 :                           if (TREE_CODE (result) == SSA_NAME)
    5814          275 :                             vn_nary_op_insert_stmt (stmt, result);
    5815          287 :                           return changed;
    5816              :                         }
    5817              :                     }
    5818              :                 }
    5819              :             }
    5820              :         }
    5821              :       break;
    5822       328762 :     case TRUNC_DIV_EXPR:
    5823       328762 :       if (TYPE_UNSIGNED (type))
    5824              :         break;
    5825              :       /* Fallthru.  */
    5826      5414757 :     case RDIV_EXPR:
    5827      5414757 :     case MULT_EXPR:
    5828              :       /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v.  */
    5829      5414757 :       if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
    5830              :         {
    5831      5413847 :           tree rhs[2];
    5832      5413847 :           rhs[0] = rhs1;
    5833      5413847 :           rhs[1] = gimple_assign_rhs2 (stmt);
    5834     16234652 :           for (unsigned i = 0; i <= 1; ++i)
    5835              :             {
    5836     10826538 :               unsigned j = i == 0 ? 1 : 0;
    5837     10826538 :               tree ops[2];
    5838     10826538 :               gimple_match_op match_op (gimple_match_cond::UNCOND,
    5839     10826538 :                                         NEGATE_EXPR, type, rhs[i]);
    5840     10826538 :               ops[i] = vn_nary_build_or_lookup_1 (&match_op, false, true);
    5841     10826538 :               ops[j] = rhs[j];
    5842     10826538 :               if (ops[i]
    5843     10826538 :                   && (ops[0] = vn_nary_op_lookup_pieces (2, code,
    5844              :                                                          type, ops, NULL)))
    5845              :                 {
    5846         5733 :                   gimple_match_op match_op (gimple_match_cond::UNCOND,
    5847         5733 :                                             NEGATE_EXPR, type, ops[0]);
    5848         5733 :                   result = vn_nary_build_or_lookup_1 (&match_op, true, false);
    5849         5733 :                   if (result)
    5850              :                     {
    5851         5733 :                       bool changed = set_ssa_val_to (lhs, result);
    5852         5733 :                       if (TREE_CODE (result) == SSA_NAME)
    5853         5733 :                         vn_nary_op_insert_stmt (stmt, result);
    5854         5733 :                       return changed;
    5855              :                     }
    5856              :                 }
    5857              :             }
    5858              :         }
    5859              :       break;
    5860       362022 :     case LSHIFT_EXPR:
    5861              :       /* For X << C, use the value number of X * (1 << C).  */
    5862       362022 :       if (INTEGRAL_TYPE_P (type)
    5863       348821 :           && TYPE_OVERFLOW_WRAPS (type)
    5864       547347 :           && !TYPE_SATURATING (type))
    5865              :         {
    5866       185325 :           tree rhs2 = gimple_assign_rhs2 (stmt);
    5867       185325 :           if (TREE_CODE (rhs2) == INTEGER_CST
    5868       107427 :               && tree_fits_uhwi_p (rhs2)
    5869       292752 :               && tree_to_uhwi (rhs2) < TYPE_PRECISION (type))
    5870              :             {
    5871       107427 :               wide_int w = wi::set_bit_in_zero (tree_to_uhwi (rhs2),
    5872       107427 :                                                 TYPE_PRECISION (type));
    5873       214854 :               gimple_match_op match_op (gimple_match_cond::UNCOND,
    5874       107427 :                                         MULT_EXPR, type, rhs1,
    5875       107427 :                                         wide_int_to_tree (type, w));
    5876       107427 :               result = vn_nary_build_or_lookup (&match_op);
    5877       107427 :               if (result)
    5878              :                 {
    5879       107427 :                   bool changed = set_ssa_val_to (lhs, result);
    5880       107427 :                   if (TREE_CODE (result) == SSA_NAME)
    5881       107426 :                     vn_nary_op_insert_stmt (stmt, result);
    5882       107427 :                   return changed;
    5883              :                 }
    5884       107427 :             }
    5885              :         }
    5886              :       break;
    5887              :     default:
    5888              :       break;
    5889              :     }
    5890              : 
    5891     44745600 :   bool changed = set_ssa_val_to (lhs, lhs);
    5892     44745600 :   vn_nary_op_insert_stmt (stmt, lhs);
    5893     44745600 :   return changed;
    5894              : }
    5895              : 
    5896              : /* Visit a call STMT storing into LHS.  Return true if the value number
    5897              :    of the LHS has changed as a result.  */
    5898              : 
    5899              : static bool
    5900      8556158 : visit_reference_op_call (tree lhs, gcall *stmt)
    5901              : {
    5902      8556158 :   bool changed = false;
    5903      8556158 :   struct vn_reference_s vr1;
    5904      8556158 :   vn_reference_t vnresult = NULL;
    5905      8556158 :   tree vdef = gimple_vdef (stmt);
    5906      8556158 :   modref_summary *summary;
    5907              : 
    5908              :   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
    5909      8556158 :   if (lhs && TREE_CODE (lhs) != SSA_NAME)
    5910      4532072 :     lhs = NULL_TREE;
    5911              : 
    5912      8556158 :   vn_reference_lookup_call (stmt, &vnresult, &vr1);
    5913              : 
    5914              :   /* If the lookup did not succeed for pure functions try to use
    5915              :      modref info to find a candidate to CSE to.  */
    5916      8556158 :   const unsigned accesses_limit = 8;
    5917      8556158 :   if (!vnresult
    5918      7911264 :       && !vdef
    5919      7911264 :       && lhs
    5920      2797670 :       && gimple_vuse (stmt)
    5921     10105279 :       && (((summary = get_modref_function_summary (stmt, NULL))
    5922       207274 :            && !summary->global_memory_read
    5923        79356 :            && summary->load_accesses < accesses_limit)
    5924      1469899 :           || gimple_call_flags (stmt) & ECF_CONST))
    5925              :     {
    5926              :       /* First search if we can do someting useful and build a
    5927              :          vector of all loads we have to check.  */
    5928        79958 :       bool unknown_memory_access = false;
    5929        79958 :       auto_vec<ao_ref, accesses_limit> accesses;
    5930        79958 :       unsigned load_accesses = summary ? summary->load_accesses : 0;
    5931        79958 :       if (!unknown_memory_access)
    5932              :         /* Add loads done as part of setting up the call arguments.
    5933              :            That's also necessary for CONST functions which will
    5934              :            not have a modref summary.  */
    5935       237651 :         for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
    5936              :           {
    5937       157701 :             tree arg = gimple_call_arg (stmt, i);
    5938       157701 :             if (TREE_CODE (arg) != SSA_NAME
    5939       157701 :                 && !is_gimple_min_invariant (arg))
    5940              :               {
    5941        61754 :                 if (accesses.length () >= accesses_limit - load_accesses)
    5942              :                   {
    5943              :                     unknown_memory_access = true;
    5944              :                     break;
    5945              :                   }
    5946        30869 :                 accesses.quick_grow (accesses.length () + 1);
    5947        30869 :                 ao_ref_init (&accesses.last (), arg);
    5948              :               }
    5949              :           }
    5950        79958 :       if (summary && !unknown_memory_access)
    5951              :         {
    5952              :           /* Add loads as analyzed by IPA modref.  */
    5953       278613 :           for (auto base_node : summary->loads->bases)
    5954        70073 :             if (unknown_memory_access)
    5955              :               break;
    5956       285433 :             else for (auto ref_node : base_node->refs)
    5957        76057 :               if (unknown_memory_access)
    5958              :                 break;
    5959       313557 :               else for (auto access_node : ref_node->accesses)
    5960              :                 {
    5961       205582 :                   accesses.quick_grow (accesses.length () + 1);
    5962       102791 :                   ao_ref *r = &accesses.last ();
    5963       102791 :                   if (!access_node.get_ao_ref (stmt, r))
    5964              :                     {
    5965              :                       /* Initialize a ref based on the argument and
    5966              :                          unknown offset if possible.  */
    5967        17369 :                       tree arg = access_node.get_call_arg (stmt);
    5968        17369 :                       if (arg && TREE_CODE (arg) == SSA_NAME)
    5969         3805 :                         arg = SSA_VAL (arg);
    5970         3805 :                       if (arg
    5971        17359 :                           && TREE_CODE (arg) == ADDR_EXPR
    5972        13620 :                           && (arg = get_base_address (arg))
    5973        17425 :                           && DECL_P (arg))
    5974              :                         {
    5975            0 :                           ao_ref_init (r, arg);
    5976            0 :                           r->ref = NULL_TREE;
    5977            0 :                           r->base = arg;
    5978              :                         }
    5979              :                       else
    5980              :                         {
    5981              :                           unknown_memory_access = true;
    5982              :                           break;
    5983              :                         }
    5984              :                     }
    5985        85422 :                   r->base_alias_set = base_node->base;
    5986        85422 :                   r->ref_alias_set = ref_node->ref;
    5987              :                 }
    5988              :         }
    5989              : 
    5990              :       /* Walk the VUSE->VDEF chain optimistically trying to find an entry
    5991              :          for the call in the hashtable.  */
    5992        79958 :       unsigned limit = (unknown_memory_access
    5993        79958 :                         ? 0
    5994        62581 :                         : (param_sccvn_max_alias_queries_per_access
    5995        62581 :                            / (accesses.length () + 1)));
    5996        79958 :       tree saved_vuse = vr1.vuse;
    5997        79958 :       hashval_t saved_hashcode = vr1.hashcode;
    5998       416474 :       while (limit > 0 && !vnresult && !SSA_NAME_IS_DEFAULT_DEF (vr1.vuse))
    5999              :         {
    6000       358524 :           vr1.hashcode = vr1.hashcode - SSA_NAME_VERSION (vr1.vuse);
    6001       358524 :           gimple *def = SSA_NAME_DEF_STMT (vr1.vuse);
    6002              :           /* ???  We could use fancy stuff like in walk_non_aliased_vuses, but
    6003              :              do not bother for now.  */
    6004       358524 :           if (is_a <gphi *> (def))
    6005              :             break;
    6006       673032 :           vr1.vuse = vuse_ssa_val (gimple_vuse (def));
    6007       336516 :           vr1.hashcode = vr1.hashcode + SSA_NAME_VERSION (vr1.vuse);
    6008       336516 :           vn_reference_lookup_1 (&vr1, &vnresult);
    6009       336516 :           limit--;
    6010              :         }
    6011              : 
    6012              :       /* If we found a candidate to CSE to verify it is valid.  */
    6013        79958 :       if (vnresult && !accesses.is_empty ())
    6014              :         {
    6015         1917 :           tree vuse = vuse_ssa_val (gimple_vuse (stmt));
    6016         7132 :           while (vnresult && vuse != vr1.vuse)
    6017              :             {
    6018         3298 :               gimple *def = SSA_NAME_DEF_STMT (vuse);
    6019        17349 :               for (auto &ref : accesses)
    6020              :                 {
    6021              :                   /* ???  stmt_may_clobber_ref_p_1 does per stmt constant
    6022              :                      analysis overhead that we might be able to cache.  */
    6023         9198 :                   if (stmt_may_clobber_ref_p_1 (def, &ref, true))
    6024              :                     {
    6025         1743 :                       vnresult = NULL;
    6026         1743 :                       break;
    6027              :                     }
    6028              :                 }
    6029         6596 :               vuse = vuse_ssa_val (gimple_vuse (def));
    6030              :             }
    6031              :         }
    6032        79958 :       vr1.vuse = saved_vuse;
    6033        79958 :       vr1.hashcode = saved_hashcode;
    6034        79958 :     }
    6035              : 
    6036      8556158 :   if (vnresult)
    6037              :     {
    6038       645096 :       if (vdef)
    6039              :         {
    6040       172901 :           if (vnresult->result_vdef)
    6041       172901 :             changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
    6042            0 :           else if (!lhs && gimple_call_lhs (stmt))
    6043              :             /* If stmt has non-SSA_NAME lhs, value number the vdef to itself,
    6044              :                as the call still acts as a lhs store.  */
    6045            0 :             changed |= set_ssa_val_to (vdef, vdef);
    6046              :           else
    6047              :             /* If the call was discovered to be pure or const reflect
    6048              :                that as far as possible.  */
    6049            0 :             changed |= set_ssa_val_to (vdef,
    6050              :                                        vuse_ssa_val (gimple_vuse (stmt)));
    6051              :         }
    6052              : 
    6053       645096 :       if (!vnresult->result && lhs)
    6054            0 :         vnresult->result = lhs;
    6055              : 
    6056       645096 :       if (vnresult->result && lhs)
    6057       110818 :         changed |= set_ssa_val_to (lhs, vnresult->result);
    6058              :     }
    6059              :   else
    6060              :     {
    6061      7911062 :       vn_reference_t vr2;
    6062      7911062 :       vn_reference_s **slot;
    6063      7911062 :       tree vdef_val = vdef;
    6064      7911062 :       if (vdef)
    6065              :         {
    6066              :           /* If we value numbered an indirect functions function to
    6067              :              one not clobbering memory value number its VDEF to its
    6068              :              VUSE.  */
    6069      4790052 :           tree fn = gimple_call_fn (stmt);
    6070      4790052 :           if (fn && TREE_CODE (fn) == SSA_NAME)
    6071              :             {
    6072       129483 :               fn = SSA_VAL (fn);
    6073       129483 :               if (TREE_CODE (fn) == ADDR_EXPR
    6074         1700 :                   && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
    6075         1700 :                   && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
    6076         1700 :                       & (ECF_CONST | ECF_PURE))
    6077              :                   /* If stmt has non-SSA_NAME lhs, value number the
    6078              :                      vdef to itself, as the call still acts as a lhs
    6079              :                      store.  */
    6080       130587 :                   && (lhs || gimple_call_lhs (stmt) == NULL_TREE))
    6081         2082 :                 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
    6082              :             }
    6083      4790052 :           changed |= set_ssa_val_to (vdef, vdef_val);
    6084              :         }
    6085      7911062 :       if (lhs)
    6086      3913268 :         changed |= set_ssa_val_to (lhs, lhs);
    6087      7911062 :       vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
    6088      7911062 :       vr2->vuse = vr1.vuse;
    6089              :       /* As we are not walking the virtual operand chain we know the
    6090              :          shared_lookup_references are still original so we can re-use
    6091              :          them here.  */
    6092      7911062 :       vr2->operands = vr1.operands.copy ();
    6093      7911062 :       vr2->type = vr1.type;
    6094      7911062 :       vr2->punned = vr1.punned;
    6095      7911062 :       vr2->set = vr1.set;
    6096      7911062 :       vr2->offset = vr1.offset;
    6097      7911062 :       vr2->max_size = vr1.max_size;
    6098      7911062 :       vr2->base_set = vr1.base_set;
    6099      7911062 :       vr2->hashcode = vr1.hashcode;
    6100      7911062 :       vr2->result = lhs;
    6101      7911062 :       vr2->result_vdef = vdef_val;
    6102      7911062 :       vr2->value_id = 0;
    6103      7911062 :       slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
    6104              :                                                           INSERT);
    6105      7911062 :       gcc_assert (!*slot);
    6106      7911062 :       *slot = vr2;
    6107      7911062 :       vr2->next = last_inserted_ref;
    6108      7911062 :       last_inserted_ref = vr2;
    6109              :     }
    6110              : 
    6111      8556158 :   return changed;
    6112              : }
    6113              : 
    6114              : /* Visit a load from a reference operator RHS, part of STMT, value number it,
    6115              :    and return true if the value number of the LHS has changed as a result.  */
    6116              : 
    6117              : static bool
    6118     34110227 : visit_reference_op_load (tree lhs, tree op, gimple *stmt)
    6119              : {
    6120     34110227 :   bool changed = false;
    6121     34110227 :   tree result;
    6122     34110227 :   vn_reference_t res;
    6123              : 
    6124     34110227 :   tree vuse = gimple_vuse (stmt);
    6125     34110227 :   tree last_vuse = vuse;
    6126     34110227 :   result = vn_reference_lookup (op, vuse, default_vn_walk_kind, &res, true, &last_vuse);
    6127              : 
    6128              :   /* We handle type-punning through unions by value-numbering based
    6129              :      on offset and size of the access.  Be prepared to handle a
    6130              :      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
    6131     34110227 :   if (result
    6132     34110227 :       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
    6133              :     {
    6134        18871 :       if (CONSTANT_CLASS_P (result))
    6135         4195 :         result = const_unop (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
    6136              :       else
    6137              :         {
    6138              :           /* We will be setting the value number of lhs to the value number
    6139              :              of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
    6140              :              So first simplify and lookup this expression to see if it
    6141              :              is already available.  */
    6142        14676 :           gimple_match_op res_op (gimple_match_cond::UNCOND,
    6143        14676 :                                   VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
    6144        14676 :           result = vn_nary_build_or_lookup (&res_op);
    6145        14676 :           if (result
    6146        14670 :               && TREE_CODE (result) == SSA_NAME
    6147        27661 :               && VN_INFO (result)->needs_insertion)
    6148              :             /* Track whether this is the canonical expression for different
    6149              :                typed loads.  We use that as a stopgap measure for code
    6150              :                hoisting when dealing with floating point loads.  */
    6151        11761 :             res->punned = true;
    6152              :         }
    6153              : 
    6154              :       /* When building the conversion fails avoid inserting the reference
    6155              :          again.  */
    6156        18871 :       if (!result)
    6157            6 :         return set_ssa_val_to (lhs, lhs);
    6158              :     }
    6159              : 
    6160     34091356 :   if (result)
    6161      5447588 :     changed = set_ssa_val_to (lhs, result);
    6162              :   else
    6163              :     {
    6164     28662633 :       changed = set_ssa_val_to (lhs, lhs);
    6165     28662633 :       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
    6166     28662633 :       if (vuse && SSA_VAL (last_vuse) != SSA_VAL (vuse))
    6167              :         {
    6168      8746673 :           if (dump_file && (dump_flags & TDF_DETAILS))
    6169              :             {
    6170        15208 :               fprintf (dump_file, "Using extra use virtual operand ");
    6171        15208 :               print_generic_expr (dump_file, last_vuse);
    6172        15208 :               fprintf (dump_file, "\n");
    6173              :             }
    6174      8746673 :           vn_reference_insert (op, lhs, vuse, NULL_TREE);
    6175              :         }
    6176              :     }
    6177              : 
    6178              :   return changed;
    6179              : }
    6180              : 
    6181              : 
    6182              : /* Visit a store to a reference operator LHS, part of STMT, value number it,
    6183              :    and return true if the value number of the LHS has changed as a result.  */
    6184              : 
    6185              : static bool
    6186     32570952 : visit_reference_op_store (tree lhs, tree op, gimple *stmt)
    6187              : {
    6188     32570952 :   bool changed = false;
    6189     32570952 :   vn_reference_t vnresult = NULL;
    6190     32570952 :   tree assign;
    6191     32570952 :   bool resultsame = false;
    6192     32570952 :   tree vuse = gimple_vuse (stmt);
    6193     32570952 :   tree vdef = gimple_vdef (stmt);
    6194              : 
    6195     32570952 :   if (TREE_CODE (op) == SSA_NAME)
    6196     14735706 :     op = SSA_VAL (op);
    6197              : 
    6198              :   /* First we want to lookup using the *vuses* from the store and see
    6199              :      if there the last store to this location with the same address
    6200              :      had the same value.
    6201              : 
    6202              :      The vuses represent the memory state before the store.  If the
    6203              :      memory state, address, and value of the store is the same as the
    6204              :      last store to this location, then this store will produce the
    6205              :      same memory state as that store.
    6206              : 
    6207              :      In this case the vdef versions for this store are value numbered to those
    6208              :      vuse versions, since they represent the same memory state after
    6209              :      this store.
    6210              : 
    6211              :      Otherwise, the vdefs for the store are used when inserting into
    6212              :      the table, since the store generates a new memory state.  */
    6213              : 
    6214     32570952 :   vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
    6215     32570952 :   if (vnresult
    6216      1667507 :       && vnresult->result)
    6217              :     {
    6218      1667507 :       tree result = vnresult->result;
    6219      1667507 :       gcc_checking_assert (TREE_CODE (result) != SSA_NAME
    6220              :                            || result == SSA_VAL (result));
    6221      1667507 :       resultsame = expressions_equal_p (result, op);
    6222      1667507 :       if (resultsame)
    6223              :         {
    6224              :           /* If the TBAA state isn't compatible for downstream reads
    6225              :              we cannot value-number the VDEFs the same.  */
    6226        51911 :           ao_ref lhs_ref;
    6227        51911 :           ao_ref_init (&lhs_ref, lhs);
    6228        51911 :           alias_set_type set = ao_ref_alias_set (&lhs_ref);
    6229        51911 :           alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
    6230        51911 :           if ((vnresult->set != set
    6231          976 :                && ! alias_set_subset_of (set, vnresult->set))
    6232        52563 :               || (vnresult->base_set != base_set
    6233         6337 :                   && ! alias_set_subset_of (base_set, vnresult->base_set)))
    6234          846 :             resultsame = false;
    6235              :         }
    6236              :     }
    6237              : 
    6238          846 :   if (!resultsame)
    6239              :     {
    6240     32519887 :       if (dump_file && (dump_flags & TDF_DETAILS))
    6241              :         {
    6242        20440 :           fprintf (dump_file, "No store match\n");
    6243        20440 :           fprintf (dump_file, "Value numbering store ");
    6244        20440 :           print_generic_expr (dump_file, lhs);
    6245        20440 :           fprintf (dump_file, " to ");
    6246        20440 :           print_generic_expr (dump_file, op);
    6247        20440 :           fprintf (dump_file, "\n");
    6248              :         }
    6249              :       /* Have to set value numbers before insert, since insert is
    6250              :          going to valueize the references in-place.  */
    6251     32519887 :       if (vdef)
    6252     32519887 :         changed |= set_ssa_val_to (vdef, vdef);
    6253              : 
    6254              :       /* Do not insert structure copies into the tables.  */
    6255     32519887 :       if (is_gimple_min_invariant (op)
    6256     32519887 :           || is_gimple_reg (op))
    6257     28998325 :         vn_reference_insert (lhs, op, vdef, NULL);
    6258              : 
    6259              :       /* Only perform the following when being called from PRE
    6260              :          which embeds tail merging.  */
    6261     32519887 :       if (default_vn_walk_kind == VN_WALK)
    6262              :         {
    6263      7444863 :           assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
    6264      7444863 :           vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
    6265      7444863 :           if (!vnresult)
    6266      7402085 :             vn_reference_insert (assign, lhs, vuse, vdef);
    6267              :         }
    6268              :     }
    6269              :   else
    6270              :     {
    6271              :       /* We had a match, so value number the vdef to have the value
    6272              :          number of the vuse it came from.  */
    6273              : 
    6274        51065 :       if (dump_file && (dump_flags & TDF_DETAILS))
    6275            9 :         fprintf (dump_file, "Store matched earlier value, "
    6276              :                  "value numbering store vdefs to matching vuses.\n");
    6277              : 
    6278        51065 :       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
    6279              :     }
    6280              : 
    6281     32570952 :   return changed;
    6282              : }
    6283              : 
    6284              : /* Visit and value number PHI, return true if the value number
    6285              :    changed.  When BACKEDGES_VARYING_P is true then assume all
    6286              :    backedge values are varying.  When INSERTED is not NULL then
    6287              :    this is just a ahead query for a possible iteration, set INSERTED
    6288              :    to true if we'd insert into the hashtable.  */
    6289              : 
    6290              : static bool
    6291     34307403 : visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
    6292              : {
    6293     34307403 :   tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
    6294     34307403 :   bool seen_undef_visited = false;
    6295     34307403 :   tree backedge_val = NULL_TREE;
    6296     34307403 :   bool seen_non_backedge = false;
    6297     34307403 :   tree sameval_base = NULL_TREE;
    6298     34307403 :   poly_int64 soff, doff;
    6299     34307403 :   unsigned n_executable = 0;
    6300     34307403 :   edge sameval_e = NULL;
    6301              : 
    6302              :   /* TODO: We could check for this in initialization, and replace this
    6303              :      with a gcc_assert.  */
    6304     34307403 :   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
    6305        30018 :     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
    6306              : 
    6307              :   /* We track whether a PHI was CSEd to avoid excessive iterations
    6308              :      that would be necessary only because the PHI changed arguments
    6309              :      but not value.  */
    6310     34277385 :   if (!inserted)
    6311     26735899 :     gimple_set_plf (phi, GF_PLF_1, false);
    6312              : 
    6313     34277385 :   basic_block bb = gimple_bb (phi);
    6314              : 
    6315              :   /* For the equivalence handling below make sure to first process an
    6316              :      edge with a non-constant.  */
    6317     34277385 :   auto_vec<edge, 2> preds;
    6318     68554770 :   preds.reserve_exact (EDGE_COUNT (bb->preds));
    6319     34277385 :   bool seen_nonconstant = false;
    6320    113200839 :   for (unsigned i = 0; i < EDGE_COUNT (bb->preds); ++i)
    6321              :     {
    6322     78923454 :       edge e = EDGE_PRED (bb, i);
    6323     78923454 :       preds.quick_push (e);
    6324     78923454 :       if (!seen_nonconstant)
    6325              :         {
    6326     41914066 :           tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
    6327     41914066 :           if (TREE_CODE (def) == SSA_NAME)
    6328              :             {
    6329     32565149 :               seen_nonconstant = true;
    6330     32565149 :               if (i != 0)
    6331      5710317 :                 std::swap (preds[0], preds[i]);
    6332              :             }
    6333              :         }
    6334              :     }
    6335              : 
    6336              :   /* See if all non-TOP arguments have the same value.  TOP is
    6337              :      equivalent to everything, so we can ignore it.  */
    6338    144251289 :   for (edge e : preds)
    6339     68122027 :     if (e->flags & EDGE_EXECUTABLE)
    6340              :       {
    6341     63121698 :         tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
    6342              : 
    6343     63121698 :         if (def == PHI_RESULT (phi))
    6344       318925 :           continue;
    6345     62824553 :         ++n_executable;
    6346     62824553 :         bool visited = true;
    6347     62824553 :         if (TREE_CODE (def) == SSA_NAME)
    6348              :           {
    6349     50686536 :             tree val = SSA_VAL (def, &visited);
    6350     50686536 :             if (SSA_NAME_IS_DEFAULT_DEF (def))
    6351      2667270 :               visited = true;
    6352     50686536 :             if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
    6353     48149387 :               def = val;
    6354     50686536 :             if (e->flags & EDGE_DFS_BACK)
    6355     15384958 :               backedge_val = def;
    6356              :           }
    6357     62824553 :         if (!(e->flags & EDGE_DFS_BACK))
    6358     47272901 :           seen_non_backedge = true;
    6359     62824553 :         if (def == VN_TOP)
    6360              :           ;
    6361              :         /* Ignore undefined defs for sameval but record one.  */
    6362     62824553 :         else if (TREE_CODE (def) == SSA_NAME
    6363     47310958 :                  && ! virtual_operand_p (def)
    6364     86997266 :                  && ssa_undefined_value_p (def, false))
    6365              :           {
    6366       225788 :             if (!seen_undef
    6367              :                 /* Avoid having not visited undefined defs if we also have
    6368              :                    a visited one.  */
    6369        28879 :                 || (!seen_undef_visited && visited))
    6370              :               {
    6371       196912 :                 seen_undef = def;
    6372       196912 :                 seen_undef_visited = visited;
    6373              :               }
    6374              :           }
    6375     62598765 :         else if (sameval == VN_TOP)
    6376              :           {
    6377              :             sameval = def;
    6378              :             sameval_e = e;
    6379              :           }
    6380     28368079 :         else if (expressions_equal_p (def, sameval))
    6381              :           sameval_e = NULL;
    6382     44635261 :         else if (virtual_operand_p (def))
    6383              :           {
    6384              :             sameval = NULL_TREE;
    6385     26702893 :             break;
    6386              :           }
    6387              :         else
    6388              :           {
    6389              :             /* We know we're arriving only with invariant addresses here,
    6390              :                try harder comparing them.  We can do some caching here
    6391              :                which we cannot do in expressions_equal_p.  */
    6392     16727555 :             if (TREE_CODE (def) == ADDR_EXPR
    6393       391128 :                 && TREE_CODE (sameval) == ADDR_EXPR
    6394       110906 :                 && sameval_base != (void *)-1)
    6395              :               {
    6396       110906 :                 if (!sameval_base)
    6397       110904 :                   sameval_base = get_addr_base_and_unit_offset
    6398       110904 :                                    (TREE_OPERAND (sameval, 0), &soff);
    6399       110904 :                 if (!sameval_base)
    6400              :                   sameval_base = (tree)(void *)-1;
    6401       110911 :                 else if ((get_addr_base_and_unit_offset
    6402       110906 :                             (TREE_OPERAND (def, 0), &doff) == sameval_base)
    6403       110906 :                          && known_eq (soff, doff))
    6404            5 :                   continue;
    6405              :               }
    6406              :             /* There's also the possibility to use equivalences.  */
    6407     32367683 :             if (!FLOAT_TYPE_P (TREE_TYPE (def))
    6408              :                 /* But only do this if we didn't force any of sameval or
    6409              :                    val to VARYING because of backedge processing rules.  */
    6410     15536312 :                 && (TREE_CODE (sameval) != SSA_NAME
    6411     12260868 :                     || SSA_VAL (sameval) == sameval)
    6412     32263799 :                 && (TREE_CODE (def) != SSA_NAME || SSA_VAL (def) == def))
    6413              :               {
    6414     15536235 :                 vn_nary_op_t vnresult;
    6415     15536235 :                 tree ops[2];
    6416     15536235 :                 ops[0] = def;
    6417     15536235 :                 ops[1] = sameval;
    6418              :                 /* Canonicalize the operands order for eq below. */
    6419     15536235 :                 if (tree_swap_operands_p (ops[0], ops[1]))
    6420      9382293 :                   std::swap (ops[0], ops[1]);
    6421     15536235 :                 tree val = vn_nary_op_lookup_pieces (2, EQ_EXPR,
    6422              :                                                      boolean_type_node,
    6423              :                                                      ops, &vnresult);
    6424     15536235 :                 if (! val && vnresult && vnresult->predicated_values)
    6425              :                   {
    6426       213301 :                     val = vn_nary_op_get_predicated_value (vnresult, e);
    6427       123891 :                     if (val && integer_truep (val)
    6428       235198 :                         && !(sameval_e && (sameval_e->flags & EDGE_DFS_BACK)))
    6429              :                       {
    6430        21775 :                         if (dump_file && (dump_flags & TDF_DETAILS))
    6431              :                           {
    6432            2 :                             fprintf (dump_file, "Predication says ");
    6433            2 :                             print_generic_expr (dump_file, def, TDF_NONE);
    6434            2 :                             fprintf (dump_file, " and ");
    6435            2 :                             print_generic_expr (dump_file, sameval, TDF_NONE);
    6436            2 :                             fprintf (dump_file, " are equal on edge %d -> %d\n",
    6437            2 :                                      e->src->index, e->dest->index);
    6438              :                           }
    6439        21775 :                         continue;
    6440              :                       }
    6441              :                   }
    6442              :               }
    6443              :             sameval = NULL_TREE;
    6444              :             break;
    6445              :           }
    6446              :       }
    6447              : 
    6448              :   /* If the value we want to use is flowing over the backedge and we
    6449              :      should take it as VARYING but it has a non-VARYING value drop to
    6450              :      VARYING.
    6451              :      If we value-number a virtual operand never value-number to the
    6452              :      value from the backedge as that confuses the alias-walking code.
    6453              :      See gcc.dg/torture/pr87176.c.  If the value is the same on a
    6454              :      non-backedge everything is OK though.  */
    6455     34277385 :   bool visited_p;
    6456     34277385 :   if ((backedge_val
    6457     34277385 :        && !seen_non_backedge
    6458         2065 :        && TREE_CODE (backedge_val) == SSA_NAME
    6459         1785 :        && sameval == backedge_val
    6460          341 :        && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
    6461           62 :            || SSA_VAL (backedge_val) != backedge_val))
    6462              :       /* Do not value-number a virtual operand to sth not visited though
    6463              :          given that allows us to escape a region in alias walking.  */
    6464     34279171 :       || (sameval
    6465      7574213 :           && TREE_CODE (sameval) == SSA_NAME
    6466      4488661 :           && !SSA_NAME_IS_DEFAULT_DEF (sameval)
    6467      3797349 :           && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
    6468      1902419 :           && (SSA_VAL (sameval, &visited_p), !visited_p)))
    6469              :     /* Note this just drops to VARYING without inserting the PHI into
    6470              :        the hashes.  */
    6471       295782 :     result = PHI_RESULT (phi);
    6472              :   /* If none of the edges was executable keep the value-number at VN_TOP,
    6473              :      if only a single edge is exectuable use its value.  */
    6474     33981603 :   else if (n_executable <= 1)
    6475      6522726 :     result = seen_undef ? seen_undef : sameval;
    6476              :   /* If we saw only undefined values and VN_TOP use one of the
    6477              :      undefined values.  */
    6478     27458877 :   else if (sameval == VN_TOP)
    6479      7097132 :     result = (seen_undef && seen_undef_visited) ? seen_undef : sameval;
    6480              :   /* First see if it is equivalent to a phi node in this block.  We prefer
    6481              :      this as it allows IV elimination - see PRs 66502 and 67167.  */
    6482     27453579 :   else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
    6483              :     {
    6484      4187251 :       if (!inserted
    6485        67486 :           && TREE_CODE (result) == SSA_NAME
    6486      4254737 :           && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
    6487              :         {
    6488        67486 :           gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
    6489        67486 :           if (dump_file && (dump_flags & TDF_DETAILS))
    6490              :             {
    6491            6 :               fprintf (dump_file, "Marking CSEd to PHI node ");
    6492            6 :               print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
    6493              :                                  0, TDF_SLIM);
    6494            6 :               fprintf (dump_file, "\n");
    6495              :             }
    6496              :         }
    6497              :     }
    6498              :   /* If all values are the same use that, unless we've seen undefined
    6499              :      values as well and the value isn't constant.
    6500              :      CCP/copyprop have the same restriction to not remove uninit warnings.  */
    6501     23266328 :   else if (sameval
    6502     23266328 :            && (! seen_undef || is_gimple_min_invariant (sameval)))
    6503              :     result = sameval;
    6504              :   else
    6505              :     {
    6506     22655819 :       result = PHI_RESULT (phi);
    6507              :       /* Only insert PHIs that are varying, for constant value numbers
    6508              :          we mess up equivalences otherwise as we are only comparing
    6509              :          the immediate controlling predicates.  */
    6510     22655819 :       vn_phi_insert (phi, result, backedges_varying_p);
    6511     22655819 :       if (inserted)
    6512      3270368 :         *inserted = true;
    6513              :     }
    6514              : 
    6515     34277385 :   return set_ssa_val_to (PHI_RESULT (phi), result);
    6516     34277385 : }
    6517              : 
    6518              : /* Try to simplify RHS using equivalences and constant folding.  */
    6519              : 
    6520              : static tree
    6521    125774107 : try_to_simplify (gassign *stmt)
    6522              : {
    6523    125774107 :   enum tree_code code = gimple_assign_rhs_code (stmt);
    6524    125774107 :   tree tem;
    6525              : 
    6526              :   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
    6527              :      in this case, there is no point in doing extra work.  */
    6528    125774107 :   if (code == SSA_NAME)
    6529              :     return NULL_TREE;
    6530              : 
    6531              :   /* First try constant folding based on our current lattice.  */
    6532    111038148 :   mprts_hook = vn_lookup_simplify_result;
    6533    111038148 :   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
    6534    111038148 :   mprts_hook = NULL;
    6535    111038148 :   if (tem
    6536    111038148 :       && (TREE_CODE (tem) == SSA_NAME
    6537     24643085 :           || is_gimple_min_invariant (tem)))
    6538     24724347 :     return tem;
    6539              : 
    6540              :   return NULL_TREE;
    6541              : }
    6542              : 
    6543              : /* Visit and value number STMT, return true if the value number
    6544              :    changed.  */
    6545              : 
    6546              : static bool
    6547    446950170 : visit_stmt (gimple *stmt, bool backedges_varying_p = false)
    6548              : {
    6549    446950170 :   bool changed = false;
    6550              : 
    6551    446950170 :   if (dump_file && (dump_flags & TDF_DETAILS))
    6552              :     {
    6553       410194 :       fprintf (dump_file, "Value numbering stmt = ");
    6554       410194 :       print_gimple_stmt (dump_file, stmt, 0);
    6555              :     }
    6556              : 
    6557    446950170 :   if (gimple_code (stmt) == GIMPLE_PHI)
    6558     26756362 :     changed = visit_phi (stmt, NULL, backedges_varying_p);
    6559    589658228 :   else if (gimple_has_volatile_ops (stmt))
    6560      8552463 :     changed = defs_to_varying (stmt);
    6561    411641345 :   else if (gassign *ass = dyn_cast <gassign *> (stmt))
    6562              :     {
    6563    130736449 :       enum tree_code code = gimple_assign_rhs_code (ass);
    6564    130736449 :       tree lhs = gimple_assign_lhs (ass);
    6565    130736449 :       tree rhs1 = gimple_assign_rhs1 (ass);
    6566    130736449 :       tree simplified;
    6567              : 
    6568              :       /* Shortcut for copies. Simplifying copies is pointless,
    6569              :          since we copy the expression and value they represent.  */
    6570    130736449 :       if (code == SSA_NAME
    6571     19698301 :           && TREE_CODE (lhs) == SSA_NAME)
    6572              :         {
    6573      4962342 :           changed = visit_copy (lhs, rhs1);
    6574      4962342 :           goto done;
    6575              :         }
    6576    125774107 :       simplified = try_to_simplify (ass);
    6577    125774107 :       if (simplified)
    6578              :         {
    6579     24724347 :           if (dump_file && (dump_flags & TDF_DETAILS))
    6580              :             {
    6581        14478 :               fprintf (dump_file, "RHS ");
    6582        14478 :               print_gimple_expr (dump_file, ass, 0);
    6583        14478 :               fprintf (dump_file, " simplified to ");
    6584        14478 :               print_generic_expr (dump_file, simplified);
    6585        14478 :               fprintf (dump_file, "\n");
    6586              :             }
    6587              :         }
    6588              :       /* Setting value numbers to constants will occasionally
    6589              :          screw up phi congruence because constants are not
    6590              :          uniquely associated with a single ssa name that can be
    6591              :          looked up.  */
    6592     24724347 :       if (simplified
    6593     24724347 :           && is_gimple_min_invariant (simplified)
    6594     21792489 :           && TREE_CODE (lhs) == SSA_NAME)
    6595              :         {
    6596      7478785 :           changed = set_ssa_val_to (lhs, simplified);
    6597      7478785 :           goto done;
    6598              :         }
    6599    118295322 :       else if (simplified
    6600     17245562 :                && TREE_CODE (simplified) == SSA_NAME
    6601      2931858 :                && TREE_CODE (lhs) == SSA_NAME)
    6602              :         {
    6603      2931858 :           changed = visit_copy (lhs, simplified);
    6604      2931858 :           goto done;
    6605              :         }
    6606              : 
    6607    115363464 :       if ((TREE_CODE (lhs) == SSA_NAME
    6608              :            /* We can substitute SSA_NAMEs that are live over
    6609              :               abnormal edges with their constant value.  */
    6610     82792285 :            && !(gimple_assign_copy_p (ass)
    6611           26 :                 && is_gimple_min_invariant (rhs1))
    6612     82792259 :            && !(simplified
    6613            0 :                 && is_gimple_min_invariant (simplified))
    6614     82792259 :            && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
    6615              :           /* Stores or copies from SSA_NAMEs that are live over
    6616              :              abnormal edges are a problem.  */
    6617    198154439 :           || (code == SSA_NAME
    6618     14735959 :               && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
    6619         1537 :         changed = defs_to_varying (ass);
    6620    115361927 :       else if (REFERENCE_CLASS_P (lhs)
    6621    115361927 :                || DECL_P (lhs))
    6622     32570952 :         changed = visit_reference_op_store (lhs, rhs1, ass);
    6623     82790975 :       else if (TREE_CODE (lhs) == SSA_NAME)
    6624              :         {
    6625     82790975 :           if ((gimple_assign_copy_p (ass)
    6626           26 :                && is_gimple_min_invariant (rhs1))
    6627     82791001 :               || (simplified
    6628            0 :                   && is_gimple_min_invariant (simplified)))
    6629              :             {
    6630            0 :               if (simplified)
    6631            0 :                 changed = set_ssa_val_to (lhs, simplified);
    6632              :               else
    6633            0 :                 changed = set_ssa_val_to (lhs, rhs1);
    6634              :             }
    6635              :           else
    6636              :             {
    6637              :               /* Visit the original statement.  */
    6638     82790975 :               switch (vn_get_stmt_kind (ass))
    6639              :                 {
    6640     48580511 :                 case VN_NARY:
    6641     48580511 :                   changed = visit_nary_op (lhs, ass);
    6642     48580511 :                   break;
    6643     34110227 :                 case VN_REFERENCE:
    6644     34110227 :                   changed = visit_reference_op_load (lhs, rhs1, ass);
    6645     34110227 :                   break;
    6646       100237 :                 default:
    6647       100237 :                   changed = defs_to_varying (ass);
    6648       100237 :                   break;
    6649              :                 }
    6650              :             }
    6651              :         }
    6652              :       else
    6653            0 :         changed = defs_to_varying (ass);
    6654              :     }
    6655    280904896 :   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
    6656              :     {
    6657     24533733 :       tree lhs = gimple_call_lhs (call_stmt);
    6658     24533733 :       if (lhs && TREE_CODE (lhs) == SSA_NAME)
    6659              :         {
    6660              :           /* Try constant folding based on our current lattice.  */
    6661      8308317 :           tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
    6662              :                                                             vn_valueize);
    6663      8308317 :           if (simplified)
    6664              :             {
    6665        64509 :               if (dump_file && (dump_flags & TDF_DETAILS))
    6666              :                 {
    6667            1 :                   fprintf (dump_file, "call ");
    6668            1 :                   print_gimple_expr (dump_file, call_stmt, 0);
    6669            1 :                   fprintf (dump_file, " simplified to ");
    6670            1 :                   print_generic_expr (dump_file, simplified);
    6671            1 :                   fprintf (dump_file, "\n");
    6672              :                 }
    6673              :             }
    6674              :           /* Setting value numbers to constants will occasionally
    6675              :              screw up phi congruence because constants are not
    6676              :              uniquely associated with a single ssa name that can be
    6677              :              looked up.  */
    6678        64509 :           if (simplified
    6679        64509 :               && is_gimple_min_invariant (simplified))
    6680              :             {
    6681        58165 :               changed = set_ssa_val_to (lhs, simplified);
    6682       116330 :               if (gimple_vdef (call_stmt))
    6683          740 :                 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
    6684              :                                            SSA_VAL (gimple_vuse (call_stmt)));
    6685        58165 :               goto done;
    6686              :             }
    6687      8250152 :           else if (simplified
    6688         6344 :                    && TREE_CODE (simplified) == SSA_NAME)
    6689              :             {
    6690          293 :               changed = visit_copy (lhs, simplified);
    6691          586 :               if (gimple_vdef (call_stmt))
    6692            0 :                 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
    6693              :                                            SSA_VAL (gimple_vuse (call_stmt)));
    6694          293 :               goto done;
    6695              :             }
    6696      8249859 :           else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
    6697              :             {
    6698          381 :               changed = defs_to_varying (call_stmt);
    6699          381 :               goto done;
    6700              :             }
    6701              :         }
    6702              : 
    6703              :       /* Pick up flags from a devirtualization target.  */
    6704     24474894 :       tree fn = gimple_call_fn (stmt);
    6705     24474894 :       int extra_fnflags = 0;
    6706     24474894 :       if (fn && TREE_CODE (fn) == SSA_NAME)
    6707              :         {
    6708       537558 :           fn = SSA_VAL (fn);
    6709       537558 :           if (TREE_CODE (fn) == ADDR_EXPR
    6710       537558 :               && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
    6711         4809 :             extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
    6712              :         }
    6713     24474894 :       if ((/* Calls to the same function with the same vuse
    6714              :               and the same operands do not necessarily return the same
    6715              :               value, unless they're pure or const.  */
    6716     24474894 :            ((gimple_call_flags (call_stmt) | extra_fnflags)
    6717     24474894 :             & (ECF_PURE | ECF_CONST))
    6718              :            /* If calls have a vdef, subsequent calls won't have
    6719              :               the same incoming vuse.  So, if 2 calls with vdef have the
    6720              :               same vuse, we know they're not subsequent.
    6721              :               We can value number 2 calls to the same function with the
    6722              :               same vuse and the same operands which are not subsequent
    6723              :               the same, because there is no code in the program that can
    6724              :               compare the 2 values...  */
    6725     20622812 :            || (gimple_vdef (call_stmt)
    6726              :                /* ... unless the call returns a pointer which does
    6727              :                   not alias with anything else.  In which case the
    6728              :                   information that the values are distinct are encoded
    6729              :                   in the IL.  */
    6730     20587890 :                && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
    6731              :                /* Only perform the following when being called from PRE
    6732              :                   which embeds tail merging.  */
    6733     20041503 :                && default_vn_walk_kind == VN_WALK))
    6734              :           /* Do not process .DEFERRED_INIT since that confuses uninit
    6735              :              analysis.  */
    6736     29374949 :           && !gimple_call_internal_p (call_stmt, IFN_DEFERRED_INIT))
    6737      8556158 :         changed = visit_reference_op_call (lhs, call_stmt);
    6738              :       else
    6739     15918736 :         changed = defs_to_varying (call_stmt);
    6740              :     }
    6741              :   else
    6742    256371163 :     changed = defs_to_varying (stmt);
    6743    446950170 :  done:
    6744    446950170 :   return changed;
    6745              : }
    6746              : 
    6747              : 
    6748              : /* Allocate a value number table.  */
    6749              : 
    6750              : static void
    6751      6100003 : allocate_vn_table (vn_tables_t table, unsigned size)
    6752              : {
    6753      6100003 :   table->phis = new vn_phi_table_type (size);
    6754      6100003 :   table->nary = new vn_nary_op_table_type (size);
    6755      6100003 :   table->references = new vn_reference_table_type (size);
    6756      6100003 : }
    6757              : 
    6758              : /* Free a value number table.  */
    6759              : 
    6760              : static void
    6761      6100003 : free_vn_table (vn_tables_t table)
    6762              : {
    6763              :   /* Walk over elements and release vectors.  */
    6764      6100003 :   vn_reference_iterator_type hir;
    6765      6100003 :   vn_reference_t vr;
    6766    144960621 :   FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
    6767     69430309 :     vr->operands.release ();
    6768      6100003 :   delete table->phis;
    6769      6100003 :   table->phis = NULL;
    6770      6100003 :   delete table->nary;
    6771      6100003 :   table->nary = NULL;
    6772      6100003 :   delete table->references;
    6773      6100003 :   table->references = NULL;
    6774      6100003 : }
    6775              : 
    6776              : /* Set *ID according to RESULT.  */
    6777              : 
    6778              : static void
    6779     34316419 : set_value_id_for_result (tree result, unsigned int *id)
    6780              : {
    6781     34316419 :   if (result && TREE_CODE (result) == SSA_NAME)
    6782     21395895 :     *id = VN_INFO (result)->value_id;
    6783      9690949 :   else if (result && is_gimple_min_invariant (result))
    6784      3658690 :     *id = get_or_alloc_constant_value_id (result);
    6785              :   else
    6786      9261834 :     *id = get_next_value_id ();
    6787     34316419 : }
    6788              : 
    6789              : /* Set the value ids in the valid hash tables.  */
    6790              : 
    6791              : static void
    6792       960589 : set_hashtable_value_ids (void)
    6793              : {
    6794       960589 :   vn_nary_op_iterator_type hin;
    6795       960589 :   vn_phi_iterator_type hip;
    6796       960589 :   vn_reference_iterator_type hir;
    6797       960589 :   vn_nary_op_t vno;
    6798       960589 :   vn_reference_t vr;
    6799       960589 :   vn_phi_t vp;
    6800              : 
    6801              :   /* Now set the value ids of the things we had put in the hash
    6802              :      table.  */
    6803              : 
    6804     48745281 :   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
    6805     23892346 :     if (! vno->predicated_values)
    6806      7736433 :       set_value_id_for_result (vno->u.result, &vno->value_id);
    6807              : 
    6808      8991743 :   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
    6809      4015577 :     set_value_id_for_result (vp->result, &vp->value_id);
    6810              : 
    6811     46089407 :   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
    6812              :                                hir)
    6813     22564409 :     set_value_id_for_result (vr->result, &vr->value_id);
    6814       960589 : }
    6815              : 
    6816              : /* Return the maximum value id we have ever seen.  */
    6817              : 
    6818              : unsigned int
    6819      1921178 : get_max_value_id (void)
    6820              : {
    6821      1921178 :   return next_value_id;
    6822              : }
    6823              : 
    6824              : /* Return the maximum constant value id we have ever seen.  */
    6825              : 
    6826              : unsigned int
    6827      1921178 : get_max_constant_value_id (void)
    6828              : {
    6829      1921178 :   return -next_constant_value_id;
    6830              : }
    6831              : 
    6832              : /* Return the next unique value id.  */
    6833              : 
    6834              : unsigned int
    6835     48823876 : get_next_value_id (void)
    6836              : {
    6837     48823876 :   gcc_checking_assert ((int)next_value_id > 0);
    6838     48823876 :   return next_value_id++;
    6839              : }
    6840              : 
    6841              : /* Return the next unique value id for constants.  */
    6842              : 
    6843              : unsigned int
    6844      2514565 : get_next_constant_value_id (void)
    6845              : {
    6846      2514565 :   gcc_checking_assert (next_constant_value_id < 0);
    6847      2514565 :   return next_constant_value_id--;
    6848              : }
    6849              : 
    6850              : 
    6851              : /* Compare two expressions E1 and E2 and return true if they are equal.
    6852              :    If match_vn_top_optimistically is true then VN_TOP is equal to anything,
    6853              :    otherwise VN_TOP only matches VN_TOP.  */
    6854              : 
    6855              : bool
    6856    245048535 : expressions_equal_p (tree e1, tree e2, bool match_vn_top_optimistically)
    6857              : {
    6858              :   /* The obvious case.  */
    6859    245048535 :   if (e1 == e2)
    6860              :     return true;
    6861              : 
    6862              :   /* If either one is VN_TOP consider them equal.  */
    6863     70396671 :   if (match_vn_top_optimistically
    6864     65546762 :       && (e1 == VN_TOP || e2 == VN_TOP))
    6865              :     return true;
    6866              : 
    6867              :   /* If only one of them is null, they cannot be equal.  While in general
    6868              :      this should not happen for operations like TARGET_MEM_REF some
    6869              :      operands are optional and an identity value we could substitute
    6870              :      has differing semantics.  */
    6871     70396671 :   if (!e1 || !e2)
    6872              :     return false;
    6873              : 
    6874              :   /* SSA_NAME compare pointer equal.  */
    6875     70396671 :   if (TREE_CODE (e1) == SSA_NAME || TREE_CODE (e2) == SSA_NAME)
    6876              :     return false;
    6877              : 
    6878              :   /* Now perform the actual comparison.  */
    6879     34904511 :   if (TREE_CODE (e1) == TREE_CODE (e2)
    6880     34904511 :       && operand_equal_p (e1, e2, OEP_PURE_SAME))
    6881              :     return true;
    6882              : 
    6883              :   return false;
    6884              : }
    6885              : 
    6886              : 
    6887              : /* Return true if the nary operation NARY may trap.  This is a copy
    6888              :    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
    6889              : 
    6890              : bool
    6891      5628973 : vn_nary_may_trap (vn_nary_op_t nary)
    6892              : {
    6893      5628973 :   tree type;
    6894      5628973 :   tree rhs2 = NULL_TREE;
    6895      5628973 :   bool honor_nans = false;
    6896      5628973 :   bool honor_snans = false;
    6897      5628973 :   bool fp_operation = false;
    6898      5628973 :   bool honor_trapv = false;
    6899      5628973 :   bool handled, ret;
    6900      5628973 :   unsigned i;
    6901              : 
    6902      5628973 :   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
    6903              :       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
    6904      5628973 :       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
    6905              :     {
    6906      5527786 :       type = nary->type;
    6907      5527786 :       fp_operation = FLOAT_TYPE_P (type);
    6908      5527786 :       if (fp_operation)
    6909              :         {
    6910       117400 :           honor_nans = flag_trapping_math && !flag_finite_math_only;
    6911       117400 :           honor_snans = flag_signaling_nans != 0;
    6912              :         }
    6913      5410386 :       else if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type))
    6914              :         honor_trapv = true;
    6915              :     }
    6916      5628973 :   if (nary->length >= 2)
    6917      2241308 :     rhs2 = nary->op[1];
    6918      5628973 :   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
    6919              :                                        honor_trapv, honor_nans, honor_snans,
    6920              :                                        rhs2, &handled);
    6921      5628973 :   if (handled && ret)
    6922              :     return true;
    6923              : 
    6924     13190989 :   for (i = 0; i < nary->length; ++i)
    6925      7678222 :     if (tree_could_trap_p (nary->op[i]))
    6926              :       return true;
    6927              : 
    6928              :   return false;
    6929              : }
    6930              : 
    6931              : /* Return true if the reference operation REF may trap.  */
    6932              : 
    6933              : bool
    6934       920562 : vn_reference_may_trap (vn_reference_t ref)
    6935              : {
    6936       920562 :   switch (ref->operands[0].opcode)
    6937              :     {
    6938              :     case MODIFY_EXPR:
    6939              :     case CALL_EXPR:
    6940              :       /* We do not handle calls.  */
    6941              :       return true;
    6942              :     case ADDR_EXPR:
    6943              :       /* And toplevel address computations never trap.  */
    6944              :       return false;
    6945              :     default:;
    6946              :     }
    6947              : 
    6948              :   vn_reference_op_t op;
    6949              :   unsigned i;
    6950      2563722 :   FOR_EACH_VEC_ELT (ref->operands, i, op)
    6951              :     {
    6952      2563497 :       switch (op->opcode)
    6953              :         {
    6954              :         case WITH_SIZE_EXPR:
    6955              :         case TARGET_MEM_REF:
    6956              :           /* Always variable.  */
    6957              :           return true;
    6958       730239 :         case COMPONENT_REF:
    6959       730239 :           if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
    6960              :             return true;
    6961              :           break;
    6962            0 :         case ARRAY_RANGE_REF:
    6963            0 :           if (TREE_CODE (op->op0) == SSA_NAME)
    6964              :             return true;
    6965              :           break;
    6966       202933 :         case ARRAY_REF:
    6967       202933 :           {
    6968       202933 :             if (TREE_CODE (op->op0) != INTEGER_CST)
    6969              :               return true;
    6970              : 
    6971              :             /* !in_array_bounds   */
    6972       182808 :             tree domain_type = TYPE_DOMAIN (ref->operands[i+1].type);
    6973       182808 :             if (!domain_type)
    6974              :               return true;
    6975              : 
    6976       182764 :             tree min = op->op1;
    6977       182764 :             tree max = TYPE_MAX_VALUE (domain_type);
    6978       182764 :             if (!min
    6979       182764 :                 || !max
    6980       170214 :                 || TREE_CODE (min) != INTEGER_CST
    6981       170214 :                 || TREE_CODE (max) != INTEGER_CST)
    6982              :               return true;
    6983              : 
    6984       167600 :             if (tree_int_cst_lt (op->op0, min)
    6985       167600 :                 || tree_int_cst_lt (max, op->op0))
    6986          318 :               return true;
    6987              : 
    6988              :             break;
    6989              :           }
    6990              :         case MEM_REF:
    6991              :           /* Nothing interesting in itself, the base is separate.  */
    6992              :           break;
    6993              :         /* The following are the address bases.  */
    6994              :         case SSA_NAME:
    6995              :           return true;
    6996       527362 :         case ADDR_EXPR:
    6997       527362 :           if (op->op0)
    6998       527362 :             return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
    6999              :           return false;
    7000      1724843 :         default:;
    7001              :         }
    7002              :     }
    7003              :   return false;
    7004              : }
    7005              : 
    7006     10319259 : eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
    7007     10319259 :                                             bitmap inserted_exprs_)
    7008     10319259 :   : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
    7009     10319259 :     el_todo (0), eliminations (0), insertions (0),
    7010     10319259 :     inserted_exprs (inserted_exprs_)
    7011              : {
    7012     10319259 :   need_eh_cleanup = BITMAP_ALLOC (NULL);
    7013     10319259 :   need_ab_cleanup = BITMAP_ALLOC (NULL);
    7014     10319259 : }
    7015              : 
    7016     10319259 : eliminate_dom_walker::~eliminate_dom_walker ()
    7017              : {
    7018     10319259 :   BITMAP_FREE (need_eh_cleanup);
    7019     10319259 :   BITMAP_FREE (need_ab_cleanup);
    7020     10319259 : }
    7021              : 
    7022              : /* Return a leader for OP that is available at the current point of the
    7023              :    eliminate domwalk.  */
    7024              : 
    7025              : tree
    7026    180048254 : eliminate_dom_walker::eliminate_avail (basic_block, tree op)
    7027              : {
    7028    180048254 :   tree valnum = VN_INFO (op)->valnum;
    7029    180048254 :   if (TREE_CODE (valnum) == SSA_NAME)
    7030              :     {
    7031    175100957 :       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
    7032              :         return valnum;
    7033    305121150 :       if (avail.length () > SSA_NAME_VERSION (valnum))
    7034              :         {
    7035    137580261 :           tree av = avail[SSA_NAME_VERSION (valnum)];
    7036              :           /* When PRE discovers a new redundancy there's no way to unite
    7037              :              the value classes so it instead inserts a copy old-val = new-val.
    7038              :              Look through such copies here, providing one more level of
    7039              :              simplification at elimination time.  */
    7040    137580261 :           gassign *ass;
    7041    241656394 :           if (av && (ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (av))))
    7042     73697925 :             if (gimple_assign_rhs_class (ass) == GIMPLE_SINGLE_RHS)
    7043              :               {
    7044     38906898 :                 tree rhs1 = gimple_assign_rhs1 (ass);
    7045     38906898 :                 if (CONSTANT_CLASS_P (rhs1)
    7046     38906898 :                     || (TREE_CODE (rhs1) == SSA_NAME
    7047         9654 :                         && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
    7048              :                   av = rhs1;
    7049              :               }
    7050    137580261 :           return av;
    7051              :         }
    7052              :     }
    7053      4947297 :   else if (is_gimple_min_invariant (valnum))
    7054              :     return valnum;
    7055              :   return NULL_TREE;
    7056              : }
    7057              : 
    7058              : /* At the current point of the eliminate domwalk make OP available.  */
    7059              : 
    7060              : void
    7061     49864335 : eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
    7062              : {
    7063     49864335 :   tree valnum = VN_INFO (op)->valnum;
    7064     49864335 :   if (TREE_CODE (valnum) == SSA_NAME)
    7065              :     {
    7066     96400194 :       if (avail.length () <= SSA_NAME_VERSION (valnum))
    7067     16705024 :         avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1, true);
    7068     49864335 :       tree pushop = op;
    7069     49864335 :       if (avail[SSA_NAME_VERSION (valnum)])
    7070        43815 :         pushop = avail[SSA_NAME_VERSION (valnum)];
    7071     49864335 :       avail_stack.safe_push (pushop);
    7072     49864335 :       avail[SSA_NAME_VERSION (valnum)] = op;
    7073              :     }
    7074     49864335 : }
    7075              : 
    7076              : /* Insert the expression recorded by SCCVN for VAL at *GSI.  Returns
    7077              :    the leader for the expression if insertion was successful.  */
    7078              : 
    7079              : tree
    7080       124293 : eliminate_dom_walker::eliminate_insert (basic_block bb,
    7081              :                                         gimple_stmt_iterator *gsi, tree val)
    7082              : {
    7083              :   /* We can insert a sequence with a single assignment only.  */
    7084       124293 :   gimple_seq stmts = VN_INFO (val)->expr;
    7085       124293 :   if (!gimple_seq_singleton_p (stmts))
    7086              :     return NULL_TREE;
    7087       225867 :   gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
    7088       124293 :   if (!stmt
    7089       124293 :       || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
    7090              :           && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
    7091              :           && gimple_assign_rhs_code (stmt) != NEGATE_EXPR
    7092              :           && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
    7093              :           && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
    7094           75 :               || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
    7095              :     return NULL_TREE;
    7096              : 
    7097        33017 :   tree op = gimple_assign_rhs1 (stmt);
    7098        33017 :   if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
    7099        33017 :       || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
    7100        19843 :     op = TREE_OPERAND (op, 0);
    7101        33017 :   tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
    7102        32971 :   if (!leader)
    7103              :     return NULL_TREE;
    7104              : 
    7105        22723 :   tree res;
    7106        22723 :   stmts = NULL;
    7107        41662 :   if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
    7108        33906 :     res = gimple_build (&stmts, BIT_FIELD_REF,
    7109        16953 :                         TREE_TYPE (val), leader,
    7110        16953 :                         TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
    7111        16953 :                         TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
    7112         5770 :   else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
    7113          150 :     res = gimple_build (&stmts, BIT_AND_EXPR,
    7114           75 :                         TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
    7115              :   else
    7116         5695 :     res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
    7117         5695 :                         TREE_TYPE (val), leader);
    7118        22723 :   if (TREE_CODE (res) != SSA_NAME
    7119        22722 :       || SSA_NAME_IS_DEFAULT_DEF (res)
    7120        45445 :       || gimple_bb (SSA_NAME_DEF_STMT (res)))
    7121              :     {
    7122            4 :       gimple_seq_discard (stmts);
    7123              : 
    7124              :       /* During propagation we have to treat SSA info conservatively
    7125              :          and thus we can end up simplifying the inserted expression
    7126              :          at elimination time to sth not defined in stmts.  */
    7127              :       /* But then this is a redundancy we failed to detect.  Which means
    7128              :          res now has two values.  That doesn't play well with how
    7129              :          we track availability here, so give up.  */
    7130            4 :       if (dump_file && (dump_flags & TDF_DETAILS))
    7131              :         {
    7132            0 :           if (TREE_CODE (res) == SSA_NAME)
    7133            0 :             res = eliminate_avail (bb, res);
    7134            0 :           if (res)
    7135              :             {
    7136            0 :               fprintf (dump_file, "Failed to insert expression for value ");
    7137            0 :               print_generic_expr (dump_file, val);
    7138            0 :               fprintf (dump_file, " which is really fully redundant to ");
    7139            0 :               print_generic_expr (dump_file, res);
    7140            0 :               fprintf (dump_file, "\n");
    7141              :             }
    7142              :         }
    7143              : 
    7144            4 :       return NULL_TREE;
    7145              :     }
    7146              :   else
    7147              :     {
    7148        22719 :       gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
    7149        22719 :       vn_ssa_aux_t vn_info = VN_INFO (res);
    7150        22719 :       vn_info->valnum = val;
    7151        22719 :       vn_info->visited = true;
    7152              :     }
    7153              : 
    7154        22719 :   insertions++;
    7155        22719 :   if (dump_file && (dump_flags & TDF_DETAILS))
    7156              :     {
    7157          499 :       fprintf (dump_file, "Inserted ");
    7158          499 :       print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
    7159              :     }
    7160              : 
    7161              :   return res;
    7162              : }
    7163              : 
    7164              : void
    7165    345884618 : eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
    7166              : {
    7167    345884618 :   tree sprime = NULL_TREE;
    7168    345884618 :   gimple *stmt = gsi_stmt (*gsi);
    7169    345884618 :   tree lhs = gimple_get_lhs (stmt);
    7170    118864566 :   if (lhs && TREE_CODE (lhs) == SSA_NAME
    7171    164760672 :       && !gimple_has_volatile_ops (stmt)
    7172              :       /* See PR43491.  Do not replace a global register variable when
    7173              :          it is a the RHS of an assignment.  Do replace local register
    7174              :          variables since gcc does not guarantee a local variable will
    7175              :          be allocated in register.
    7176              :          ???  The fix isn't effective here.  This should instead
    7177              :          be ensured by not value-numbering them the same but treating
    7178              :          them like volatiles?  */
    7179    427202311 :       && !(gimple_assign_single_p (stmt)
    7180     34972210 :            && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
    7181      2459119 :                && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
    7182         4172 :                && is_global_var (gimple_assign_rhs1 (stmt)))))
    7183              :     {
    7184     81317449 :       sprime = eliminate_avail (b, lhs);
    7185     81317449 :       if (!sprime)
    7186              :         {
    7187              :           /* If there is no existing usable leader but SCCVN thinks
    7188              :              it has an expression it wants to use as replacement,
    7189              :              insert that.  */
    7190     68558687 :           tree val = VN_INFO (lhs)->valnum;
    7191     68558687 :           vn_ssa_aux_t vn_info;
    7192     68558687 :           if (val != VN_TOP
    7193     68558687 :               && TREE_CODE (val) == SSA_NAME
    7194     68558687 :               && (vn_info = VN_INFO (val), true)
    7195     68558687 :               && vn_info->needs_insertion
    7196       313799 :               && vn_info->expr != NULL
    7197     68682980 :               && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
    7198        22719 :             eliminate_push_avail (b, sprime);
    7199              :         }
    7200              : 
    7201              :       /* If this now constitutes a copy duplicate points-to
    7202              :          and range info appropriately.  This is especially
    7203              :          important for inserted code.  */
    7204     68558687 :       if (sprime
    7205     12781481 :           && TREE_CODE (sprime) == SSA_NAME)
    7206      8805436 :         maybe_duplicate_ssa_info_at_copy (lhs, sprime);
    7207              : 
    7208              :       /* Inhibit the use of an inserted PHI on a loop header when
    7209              :          the address of the memory reference is a simple induction
    7210              :          variable.  In other cases the vectorizer won't do anything
    7211              :          anyway (either it's loop invariant or a complicated
    7212              :          expression).  */
    7213      8805436 :       if (sprime
    7214     12781481 :           && TREE_CODE (sprime) == SSA_NAME
    7215      8805436 :           && do_pre
    7216       921769 :           && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
    7217       903275 :           && loop_outer (b->loop_father)
    7218       381527 :           && has_zero_uses (sprime)
    7219       187950 :           && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
    7220       187795 :           && gimple_assign_load_p (stmt))
    7221              :         {
    7222       101292 :           gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
    7223       101292 :           basic_block def_bb = gimple_bb (def_stmt);
    7224       101292 :           if (gimple_code (def_stmt) == GIMPLE_PHI
    7225       101292 :               && def_bb->loop_father->header == def_bb)
    7226              :             {
    7227        64745 :               loop_p loop = def_bb->loop_father;
    7228        64745 :               ssa_op_iter iter;
    7229        64745 :               tree op;
    7230        64745 :               bool found = false;
    7231        82010 :               FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
    7232              :                 {
    7233        61038 :                   affine_iv iv;
    7234        61038 :                   def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
    7235        61038 :                   if (def_bb
    7236        55298 :                       && flow_bb_inside_loop_p (loop, def_bb)
    7237       111341 :                       && simple_iv (loop, loop, op, &iv, true))
    7238              :                     {
    7239        43773 :                       found = true;
    7240        43773 :                       break;
    7241              :                     }
    7242              :                 }
    7243        20972 :               if (found)
    7244              :                 {
    7245        43773 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    7246              :                     {
    7247            3 :                       fprintf (dump_file, "Not replacing ");
    7248            3 :                       print_gimple_expr (dump_file, stmt, 0);
    7249            3 :                       fprintf (dump_file, " with ");
    7250            3 :                       print_generic_expr (dump_file, sprime);
    7251            3 :                       fprintf (dump_file, " which would add a loop"
    7252              :                                " carried dependence to loop %d\n",
    7253              :                                loop->num);
    7254              :                     }
    7255              :                   /* Don't keep sprime available.  */
    7256        43773 :                   sprime = NULL_TREE;
    7257              :                 }
    7258              :             }
    7259              :         }
    7260              : 
    7261     81317449 :       if (sprime)
    7262              :         {
    7263              :           /* If we can propagate the value computed for LHS into
    7264              :              all uses don't bother doing anything with this stmt.  */
    7265     12737708 :           if (may_propagate_copy (lhs, sprime))
    7266              :             {
    7267              :               /* Mark it for removal.  */
    7268     12735802 :               to_remove.safe_push (stmt);
    7269              : 
    7270              :               /* ???  Don't count copy/constant propagations.  */
    7271     12735802 :               if (gimple_assign_single_p (stmt)
    7272     12735802 :                   && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
    7273      4440572 :                       || gimple_assign_rhs1 (stmt) == sprime))
    7274     13551239 :                 return;
    7275              : 
    7276      7710058 :               if (dump_file && (dump_flags & TDF_DETAILS))
    7277              :                 {
    7278        18832 :                   fprintf (dump_file, "Replaced ");
    7279        18832 :                   print_gimple_expr (dump_file, stmt, 0);
    7280        18832 :                   fprintf (dump_file, " with ");
    7281        18832 :                   print_generic_expr (dump_file, sprime);
    7282        18832 :                   fprintf (dump_file, " in all uses of ");
    7283        18832 :                   print_gimple_stmt (dump_file, stmt, 0);
    7284              :                 }
    7285              : 
    7286      7710058 :               eliminations++;
    7287      7710058 :               return;
    7288              :             }
    7289              : 
    7290              :           /* If this is an assignment from our leader (which
    7291              :              happens in the case the value-number is a constant)
    7292              :              then there is nothing to do.  Likewise if we run into
    7293              :              inserted code that needed a conversion because of
    7294              :              our type-agnostic value-numbering of loads.  */
    7295         1906 :           if ((gimple_assign_single_p (stmt)
    7296            1 :                || (is_gimple_assign (stmt)
    7297            1 :                    && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
    7298            0 :                        || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)))
    7299         1907 :               && sprime == gimple_assign_rhs1 (stmt))
    7300              :             return;
    7301              : 
    7302              :           /* Else replace its RHS.  */
    7303          718 :           if (dump_file && (dump_flags & TDF_DETAILS))
    7304              :             {
    7305            0 :               fprintf (dump_file, "Replaced ");
    7306            0 :               print_gimple_expr (dump_file, stmt, 0);
    7307            0 :               fprintf (dump_file, " with ");
    7308            0 :               print_generic_expr (dump_file, sprime);
    7309            0 :               fprintf (dump_file, " in ");
    7310            0 :               print_gimple_stmt (dump_file, stmt, 0);
    7311              :             }
    7312          718 :           eliminations++;
    7313              : 
    7314          718 :           bool can_make_abnormal_goto = (is_gimple_call (stmt)
    7315          718 :                                          && stmt_can_make_abnormal_goto (stmt));
    7316          718 :           gimple *orig_stmt = stmt;
    7317          718 :           if (!useless_type_conversion_p (TREE_TYPE (lhs),
    7318          718 :                                           TREE_TYPE (sprime)))
    7319              :             {
    7320              :               /* We preserve conversions to but not from function or method
    7321              :                  types.  This asymmetry makes it necessary to re-instantiate
    7322              :                  conversions here.  */
    7323          716 :               if (POINTER_TYPE_P (TREE_TYPE (lhs))
    7324          716 :                   && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
    7325          716 :                 sprime = fold_convert (TREE_TYPE (lhs), sprime);
    7326              :               else
    7327            0 :                 gcc_unreachable ();
    7328              :             }
    7329          718 :           tree vdef = gimple_vdef (stmt);
    7330          718 :           tree vuse = gimple_vuse (stmt);
    7331          718 :           propagate_tree_value_into_stmt (gsi, sprime);
    7332          718 :           stmt = gsi_stmt (*gsi);
    7333          718 :           update_stmt (stmt);
    7334              :           /* In case the VDEF on the original stmt was released, value-number
    7335              :              it to the VUSE.  This is to make vuse_ssa_val able to skip
    7336              :              released virtual operands.  */
    7337         1436 :           if (vdef != gimple_vdef (stmt))
    7338              :             {
    7339            0 :               gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
    7340            0 :               VN_INFO (vdef)->valnum = vuse;
    7341              :             }
    7342              : 
    7343              :           /* If we removed EH side-effects from the statement, clean
    7344              :              its EH information.  */
    7345          718 :           if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
    7346              :             {
    7347            0 :               bitmap_set_bit (need_eh_cleanup,
    7348            0 :                               gimple_bb (stmt)->index);
    7349            0 :               if (dump_file && (dump_flags & TDF_DETAILS))
    7350            0 :                 fprintf (dump_file, "  Removed EH side-effects.\n");
    7351              :             }
    7352              : 
    7353              :           /* Likewise for AB side-effects.  */
    7354          718 :           if (can_make_abnormal_goto
    7355          718 :               && !stmt_can_make_abnormal_goto (stmt))
    7356              :             {
    7357            0 :               bitmap_set_bit (need_ab_cleanup,
    7358            0 :                               gimple_bb (stmt)->index);
    7359            0 :               if (dump_file && (dump_flags & TDF_DETAILS))
    7360            0 :                 fprintf (dump_file, "  Removed AB side-effects.\n");
    7361              :             }
    7362              : 
    7363          718 :           return;
    7364              :         }
    7365              :     }
    7366              : 
    7367              :   /* If the statement is a scalar store, see if the expression
    7368              :      has the same value number as its rhs.  If so, the store is
    7369              :      dead.  */
    7370    333146910 :   if (gimple_assign_single_p (stmt)
    7371    125324618 :       && !gimple_has_volatile_ops (stmt)
    7372     54717232 :       && !is_gimple_reg (gimple_assign_lhs (stmt))
    7373     28182658 :       && (TREE_CODE (gimple_assign_lhs (stmt)) != VAR_DECL
    7374      2733802 :           || !DECL_HARD_REGISTER (gimple_assign_lhs (stmt)))
    7375    361325559 :       && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
    7376     16149733 :           || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
    7377              :     {
    7378     25169134 :       tree rhs = gimple_assign_rhs1 (stmt);
    7379     25169134 :       vn_reference_t vnresult;
    7380              :       /* ???  gcc.dg/torture/pr91445.c shows that we lookup a boolean
    7381              :          typed load of a byte known to be 0x11 as 1 so a store of
    7382              :          a boolean 1 is detected as redundant.  Because of this we
    7383              :          have to make sure to lookup with a ref where its size
    7384              :          matches the precision.  */
    7385     25169134 :       tree lookup_lhs = lhs;
    7386     50079664 :       if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
    7387     13170513 :           && (TREE_CODE (lhs) != COMPONENT_REF
    7388      7982015 :               || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
    7389     38117951 :           && !type_has_mode_precision_p (TREE_TYPE (lhs)))
    7390              :         {
    7391       417613 :           if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
    7392       426695 :               && TYPE_PRECISION (TREE_TYPE (lhs)) > MAX_FIXED_MODE_SIZE)
    7393              :             lookup_lhs = NULL_TREE;
    7394       410740 :           else if (TREE_CODE (lhs) == COMPONENT_REF
    7395       410740 :                    || TREE_CODE (lhs) == MEM_REF)
    7396              :             {
    7397       285859 :               tree ltype = build_nonstandard_integer_type
    7398       285859 :                                 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs))),
    7399       285859 :                                  TYPE_UNSIGNED (TREE_TYPE (lhs)));
    7400       285859 :               if (TREE_CODE (lhs) == COMPONENT_REF)
    7401              :                 {
    7402       216535 :                   tree foff = component_ref_field_offset (lhs);
    7403       216535 :                   tree f = TREE_OPERAND (lhs, 1);
    7404       216535 :                   if (!poly_int_tree_p (foff))
    7405              :                     lookup_lhs = NULL_TREE;
    7406              :                   else
    7407       433070 :                     lookup_lhs = build3 (BIT_FIELD_REF, ltype,
    7408       216535 :                                          TREE_OPERAND (lhs, 0),
    7409       216535 :                                          TYPE_SIZE (TREE_TYPE (lhs)),
    7410              :                                          bit_from_pos
    7411       216535 :                                            (foff, DECL_FIELD_BIT_OFFSET (f)));
    7412              :                 }
    7413              :               else
    7414        69324 :                 lookup_lhs = build2 (MEM_REF, ltype,
    7415        69324 :                                      TREE_OPERAND (lhs, 0),
    7416        69324 :                                      TREE_OPERAND (lhs, 1));
    7417              :             }
    7418              :           else
    7419              :             lookup_lhs = NULL_TREE;
    7420              :         }
    7421     25037380 :       tree val = NULL_TREE, tem;
    7422     25037380 :       if (lookup_lhs)
    7423     50074760 :         val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt),
    7424              :                                    VN_WALKREWRITE, &vnresult, false,
    7425              :                                    NULL, NULL_TREE, true);
    7426     25169134 :       if (TREE_CODE (rhs) == SSA_NAME)
    7427     12028916 :         rhs = VN_INFO (rhs)->valnum;
    7428     25169134 :       gassign *ass;
    7429     25169134 :       if (val
    7430     25169134 :           && (operand_equal_p (val, rhs, 0)
    7431              :               /* Due to the bitfield lookups above we can get bit
    7432              :                  interpretations of the same RHS as values here.  Those
    7433              :                  are redundant as well.  */
    7434      3090182 :               || (TREE_CODE (val) == SSA_NAME
    7435      1886444 :                   && gimple_assign_single_p (SSA_NAME_DEF_STMT (val))
    7436      1705383 :                   && (tem = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val)))
    7437      1705383 :                   && TREE_CODE (tem) == VIEW_CONVERT_EXPR
    7438         3781 :                   && TREE_OPERAND (tem, 0) == rhs)
    7439      3090172 :               || (TREE_CODE (rhs) == SSA_NAME
    7440     25618372 :                   && (ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs)))
    7441      1469856 :                   && gimple_assign_rhs1 (ass) == val
    7442       672273 :                   && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (ass))
    7443            9 :                   && tree_nop_conversion_p (TREE_TYPE (rhs), TREE_TYPE (val)))))
    7444              :         {
    7445              :           /* We can only remove the later store if the former aliases
    7446              :              at least all accesses the later one does or if the store
    7447              :              was to readonly memory storing the same value.  */
    7448       241430 :           ao_ref lhs_ref;
    7449       241430 :           ao_ref_init (&lhs_ref, lhs);
    7450       241430 :           alias_set_type set = ao_ref_alias_set (&lhs_ref);
    7451       241430 :           alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
    7452       241430 :           if (! vnresult
    7453       241430 :               || ((vnresult->set == set
    7454        50042 :                    || alias_set_subset_of (set, vnresult->set))
    7455       225307 :                   && (vnresult->base_set == base_set
    7456        21478 :                       || alias_set_subset_of (base_set, vnresult->base_set))))
    7457              :             {
    7458       223035 :               if (dump_file && (dump_flags & TDF_DETAILS))
    7459              :                 {
    7460           17 :                   fprintf (dump_file, "Deleted redundant store ");
    7461           17 :                   print_gimple_stmt (dump_file, stmt, 0);
    7462              :                 }
    7463              : 
    7464              :               /* Queue stmt for removal.  */
    7465       223035 :               to_remove.safe_push (stmt);
    7466       223035 :               return;
    7467              :             }
    7468              :         }
    7469              :     }
    7470              : 
    7471              :   /* If this is a control statement value numbering left edges
    7472              :      unexecuted on force the condition in a way consistent with
    7473              :      that.  */
    7474    332923875 :   if (gcond *cond = dyn_cast <gcond *> (stmt))
    7475              :     {
    7476     18971712 :       if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
    7477     18971712 :           ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
    7478              :         {
    7479       590496 :           if (dump_file && (dump_flags & TDF_DETAILS))
    7480              :             {
    7481           15 :               fprintf (dump_file, "Removing unexecutable edge from ");
    7482           15 :               print_gimple_stmt (dump_file, stmt, 0);
    7483              :             }
    7484       590496 :           if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
    7485       590496 :               == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
    7486       230000 :             gimple_cond_make_true (cond);
    7487              :           else
    7488       360496 :             gimple_cond_make_false (cond);
    7489       590496 :           update_stmt (cond);
    7490       590496 :           el_todo |= TODO_cleanup_cfg;
    7491       590496 :           return;
    7492              :         }
    7493              :     }
    7494              : 
    7495    332333379 :   bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
    7496    332333379 :   bool was_noreturn = (is_gimple_call (stmt)
    7497    332333379 :                        && gimple_call_noreturn_p (stmt));
    7498    332333379 :   tree vdef = gimple_vdef (stmt);
    7499    332333379 :   tree vuse = gimple_vuse (stmt);
    7500              : 
    7501              :   /* If we didn't replace the whole stmt (or propagate the result
    7502              :      into all uses), replace all uses on this stmt with their
    7503              :      leaders.  */
    7504    332333379 :   bool modified = false;
    7505    332333379 :   use_operand_p use_p;
    7506    332333379 :   ssa_op_iter iter;
    7507    495846587 :   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
    7508              :     {
    7509    163513208 :       tree use = USE_FROM_PTR (use_p);
    7510              :       /* ???  The call code above leaves stmt operands un-updated.  */
    7511    163513208 :       if (TREE_CODE (use) != SSA_NAME)
    7512            0 :         continue;
    7513    163513208 :       tree sprime;
    7514    163513208 :       if (SSA_NAME_IS_DEFAULT_DEF (use))
    7515              :         /* ???  For default defs BB shouldn't matter, but we have to
    7516              :            solve the inconsistency between rpo eliminate and
    7517              :            dom eliminate avail valueization first.  */
    7518     26218563 :         sprime = eliminate_avail (b, use);
    7519              :       else
    7520              :         /* Look for sth available at the definition block of the argument.
    7521              :            This avoids inconsistencies between availability there which
    7522              :            decides if the stmt can be removed and availability at the
    7523              :            use site.  The SSA property ensures that things available
    7524              :            at the definition are also available at uses.  */
    7525    137294645 :         sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
    7526    163513208 :       if (sprime && sprime != use
    7527     12460921 :           && may_propagate_copy (use, sprime, true)
    7528              :           /* We substitute into debug stmts to avoid excessive
    7529              :              debug temporaries created by removed stmts, but we need
    7530              :              to avoid doing so for inserted sprimes as we never want
    7531              :              to create debug temporaries for them.  */
    7532    175973413 :           && (!inserted_exprs
    7533      1202551 :               || TREE_CODE (sprime) != SSA_NAME
    7534      1182331 :               || !is_gimple_debug (stmt)
    7535       378864 :               || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
    7536              :         {
    7537     12111137 :           propagate_value (use_p, sprime);
    7538     12111137 :           modified = true;
    7539              :         }
    7540              :     }
    7541              : 
    7542              :   /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
    7543              :      into which is a requirement for the IPA devirt machinery.  */
    7544    332333379 :   gimple *old_stmt = stmt;
    7545    332333379 :   if (modified)
    7546              :     {
    7547              :       /* If a formerly non-invariant ADDR_EXPR is turned into an
    7548              :          invariant one it was on a separate stmt.  */
    7549     11239946 :       if (gimple_assign_single_p (stmt)
    7550     11239946 :           && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
    7551       236091 :         recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
    7552     11239946 :       gimple_stmt_iterator prev = *gsi;
    7553     11239946 :       gsi_prev (&prev);
    7554     11239946 :       if (fold_stmt (gsi, follow_all_ssa_edges))
    7555              :         {
    7556              :           /* fold_stmt may have created new stmts inbetween
    7557              :              the previous stmt and the folded stmt.  Mark
    7558              :              all defs created there as varying to not confuse
    7559              :              the SCCVN machinery as we're using that even during
    7560              :              elimination.  */
    7561       979504 :           if (gsi_end_p (prev))
    7562       220074 :             prev = gsi_start_bb (b);
    7563              :           else
    7564       869467 :             gsi_next (&prev);
    7565       979504 :           if (gsi_stmt (prev) != gsi_stmt (*gsi))
    7566        88000 :             do
    7567              :               {
    7568        54831 :                 tree def;
    7569        54831 :                 ssa_op_iter dit;
    7570       105490 :                 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
    7571              :                                            dit, SSA_OP_ALL_DEFS)
    7572              :                     /* As existing DEFs may move between stmts
    7573              :                        only process new ones.  */
    7574        50659 :                     if (! has_VN_INFO (def))
    7575              :                       {
    7576        33067 :                         vn_ssa_aux_t vn_info = VN_INFO (def);
    7577        33067 :                         vn_info->valnum = def;
    7578        33067 :                         vn_info->visited = true;
    7579              :                       }
    7580        54831 :                 if (gsi_stmt (prev) == gsi_stmt (*gsi))
    7581              :                   break;
    7582        33169 :                 gsi_next (&prev);
    7583        33169 :               }
    7584              :             while (1);
    7585              :         }
    7586     11239946 :       stmt = gsi_stmt (*gsi);
    7587              :       /* In case we folded the stmt away schedule the NOP for removal.  */
    7588     11239946 :       if (gimple_nop_p (stmt))
    7589          815 :         to_remove.safe_push (stmt);
    7590              :     }
    7591              : 
    7592              :   /* Visit indirect calls and turn them into direct calls if
    7593              :      possible using the devirtualization machinery.  Do this before
    7594              :      checking for required EH/abnormal/noreturn cleanup as devird
    7595              :      may expose more of those.  */
    7596    332333379 :   if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
    7597              :     {
    7598     22100659 :       tree fn = gimple_call_fn (call_stmt);
    7599     22100659 :       if (fn
    7600     21367089 :           && flag_devirtualize
    7601     42728797 :           && virtual_method_call_p (fn))
    7602              :         {
    7603       184837 :           tree otr_type = obj_type_ref_class (fn);
    7604       184837 :           unsigned HOST_WIDE_INT otr_tok
    7605       184837 :               = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
    7606       184837 :           tree instance;
    7607       184837 :           ipa_polymorphic_call_context context (current_function_decl,
    7608       184837 :                                                 fn, stmt, &instance);
    7609       184837 :           context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
    7610              :                                     otr_type, stmt, NULL);
    7611       184837 :           bool final;
    7612       184837 :           vec <cgraph_node *> targets
    7613       184837 :               = possible_polymorphic_call_targets (obj_type_ref_class (fn),
    7614              :                                                    otr_tok, context, &final);
    7615       184837 :           if (dump_file)
    7616           22 :             dump_possible_polymorphic_call_targets (dump_file,
    7617              :                                                     obj_type_ref_class (fn),
    7618              :                                                     otr_tok, context);
    7619       185042 :           if (final && targets.length () <= 1 && dbg_cnt (devirt))
    7620              :             {
    7621           64 :               tree fn;
    7622           64 :               if (targets.length () == 1)
    7623           64 :                 fn = targets[0]->decl;
    7624              :               else
    7625            0 :                 fn = builtin_decl_unreachable ();
    7626           64 :               if (dump_enabled_p ())
    7627              :                 {
    7628            9 :                   dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
    7629              :                                    "converting indirect call to "
    7630              :                                    "function %s\n",
    7631            9 :                                    lang_hooks.decl_printable_name (fn, 2));
    7632              :                 }
    7633           64 :               gimple_call_set_fndecl (call_stmt, fn);
    7634              :               /* If changing the call to __builtin_unreachable
    7635              :                  or similar noreturn function, adjust gimple_call_fntype
    7636              :                  too.  */
    7637           64 :               if (gimple_call_noreturn_p (call_stmt)
    7638            0 :                   && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
    7639            0 :                   && TYPE_ARG_TYPES (TREE_TYPE (fn))
    7640           64 :                   && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
    7641            0 :                       == void_type_node))
    7642            0 :                 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
    7643           64 :               maybe_remove_unused_call_args (cfun, call_stmt);
    7644           64 :               modified = true;
    7645              :             }
    7646              :         }
    7647              :     }
    7648              : 
    7649    332333379 :   if (modified)
    7650              :     {
    7651              :       /* When changing a call into a noreturn call, cfg cleanup
    7652              :          is needed to fix up the noreturn call.  */
    7653     11239967 :       if (!was_noreturn
    7654     11239967 :           && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
    7655           56 :         to_fixup.safe_push  (stmt);
    7656              :       /* When changing a condition or switch into one we know what
    7657              :          edge will be executed, schedule a cfg cleanup.  */
    7658     11239967 :       if ((gimple_code (stmt) == GIMPLE_COND
    7659      1500362 :            && (gimple_cond_true_p (as_a <gcond *> (stmt))
    7660      1494738 :                || gimple_cond_false_p (as_a <gcond *> (stmt))))
    7661     12732495 :           || (gimple_code (stmt) == GIMPLE_SWITCH
    7662         8014 :               && TREE_CODE (gimple_switch_index
    7663              :                             (as_a <gswitch *> (stmt))) == INTEGER_CST))
    7664         9619 :         el_todo |= TODO_cleanup_cfg;
    7665              :       /* If we removed EH side-effects from the statement, clean
    7666              :          its EH information.  */
    7667     11239967 :       if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
    7668              :         {
    7669         1587 :           bitmap_set_bit (need_eh_cleanup,
    7670         1587 :                           gimple_bb (stmt)->index);
    7671         1587 :           if (dump_file && (dump_flags & TDF_DETAILS))
    7672            0 :             fprintf (dump_file, "  Removed EH side-effects.\n");
    7673              :         }
    7674              :       /* Likewise for AB side-effects.  */
    7675     11239967 :       if (can_make_abnormal_goto
    7676     11239967 :           && !stmt_can_make_abnormal_goto (stmt))
    7677              :         {
    7678            0 :           bitmap_set_bit (need_ab_cleanup,
    7679            0 :                           gimple_bb (stmt)->index);
    7680            0 :           if (dump_file && (dump_flags & TDF_DETAILS))
    7681            0 :             fprintf (dump_file, "  Removed AB side-effects.\n");
    7682              :         }
    7683     11239967 :       update_stmt (stmt);
    7684              :       /* In case the VDEF on the original stmt was released, value-number
    7685              :          it to the VUSE.  This is to make vuse_ssa_val able to skip
    7686              :          released virtual operands.  */
    7687     14326021 :       if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
    7688         1815 :         VN_INFO (vdef)->valnum = vuse;
    7689              :     }
    7690              : 
    7691              :   /* Make new values available - for fully redundant LHS we
    7692              :      continue with the next stmt above and skip this.
    7693              :      But avoid picking up dead defs.  */
    7694    332333379 :   tree def;
    7695    402234603 :   FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
    7696     69901224 :     if (! has_zero_uses (def)
    7697     69901224 :         || (inserted_exprs
    7698       212339 :             && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (def))))
    7699     68518583 :       eliminate_push_avail (b, def);
    7700              : }
    7701              : 
    7702              : /* Perform elimination for the basic-block B during the domwalk.  */
    7703              : 
    7704              : edge
    7705     40979295 : eliminate_dom_walker::before_dom_children (basic_block b)
    7706              : {
    7707              :   /* Mark new bb.  */
    7708     40979295 :   avail_stack.safe_push (NULL_TREE);
    7709              : 
    7710              :   /* Skip unreachable blocks marked unreachable during the SCCVN domwalk.  */
    7711     40979295 :   if (!(b->flags & BB_EXECUTABLE))
    7712              :     return NULL;
    7713              : 
    7714     36253322 :   vn_context_bb = b;
    7715              : 
    7716     47734940 :   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
    7717              :     {
    7718     11481618 :       gphi *phi = gsi.phi ();
    7719     11481618 :       tree res = PHI_RESULT (phi);
    7720              : 
    7721     22963236 :       if (virtual_operand_p (res))
    7722              :         {
    7723      5269386 :           gsi_next (&gsi);
    7724      5269386 :           continue;
    7725              :         }
    7726              : 
    7727      6212232 :       tree sprime = eliminate_avail (b, res);
    7728      6212232 :       if (sprime
    7729      6212232 :           && sprime != res)
    7730              :         {
    7731       420483 :           if (dump_file && (dump_flags & TDF_DETAILS))
    7732              :             {
    7733           20 :               fprintf (dump_file, "Replaced redundant PHI node defining ");
    7734           20 :               print_generic_expr (dump_file, res);
    7735           20 :               fprintf (dump_file, " with ");
    7736           20 :               print_generic_expr (dump_file, sprime);
    7737           20 :               fprintf (dump_file, "\n");
    7738              :             }
    7739              : 
    7740              :           /* If we inserted this PHI node ourself, it's not an elimination.  */
    7741       420483 :           if (! inserted_exprs
    7742       534352 :               || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
    7743       392656 :             eliminations++;
    7744              : 
    7745              :           /* If we will propagate into all uses don't bother to do
    7746              :              anything.  */
    7747       420483 :           if (may_propagate_copy (res, sprime))
    7748              :             {
    7749              :               /* Mark the PHI for removal.  */
    7750       420483 :               to_remove.safe_push (phi);
    7751       420483 :               gsi_next (&gsi);
    7752       420483 :               continue;
    7753              :             }
    7754              : 
    7755            0 :           remove_phi_node (&gsi, false);
    7756              : 
    7757            0 :           if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
    7758            0 :             sprime = fold_convert (TREE_TYPE (res), sprime);
    7759            0 :           gimple *stmt = gimple_build_assign (res, sprime);
    7760            0 :           gimple_stmt_iterator gsi2 = gsi_after_labels (b);
    7761            0 :           gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
    7762            0 :           continue;
    7763            0 :         }
    7764              : 
    7765      5791749 :       eliminate_push_avail (b, res);
    7766      5791749 :       gsi_next (&gsi);
    7767              :     }
    7768              : 
    7769     72506644 :   for (gimple_stmt_iterator gsi = gsi_start_bb (b);
    7770    276686162 :        !gsi_end_p (gsi);
    7771    240432840 :        gsi_next (&gsi))
    7772    240432840 :     eliminate_stmt (b, &gsi);
    7773              : 
    7774              :   /* Replace destination PHI arguments.  */
    7775     36253322 :   edge_iterator ei;
    7776     36253322 :   edge e;
    7777     85614224 :   FOR_EACH_EDGE (e, ei, b->succs)
    7778     49360902 :     if (e->flags & EDGE_EXECUTABLE)
    7779     48834880 :       for (gphi_iterator gsi = gsi_start_phis (e->dest);
    7780     78347454 :            !gsi_end_p (gsi);
    7781     29512574 :            gsi_next (&gsi))
    7782              :         {
    7783     29512574 :           gphi *phi = gsi.phi ();
    7784     29512574 :           use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
    7785     29512574 :           tree arg = USE_FROM_PTR (use_p);
    7786     48839256 :           if (TREE_CODE (arg) != SSA_NAME
    7787     29512574 :               || virtual_operand_p (arg))
    7788     19326682 :             continue;
    7789     10185892 :           tree sprime = eliminate_avail (b, arg);
    7790     20371784 :           if (sprime && may_propagate_copy (arg, sprime,
    7791     10185892 :                                             !(e->flags & EDGE_ABNORMAL)))
    7792     10173853 :             propagate_value (use_p, sprime);
    7793              :         }
    7794              : 
    7795     36253322 :   vn_context_bb = NULL;
    7796              : 
    7797     36253322 :   return NULL;
    7798              : }
    7799              : 
    7800              : /* Make no longer available leaders no longer available.  */
    7801              : 
    7802              : void
    7803     40979295 : eliminate_dom_walker::after_dom_children (basic_block)
    7804              : {
    7805     40979295 :   tree entry;
    7806     90843630 :   while ((entry = avail_stack.pop ()) != NULL_TREE)
    7807              :     {
    7808     49864335 :       tree valnum = VN_INFO (entry)->valnum;
    7809     49864335 :       tree old = avail[SSA_NAME_VERSION (valnum)];
    7810     49864335 :       if (old == entry)
    7811     49820520 :         avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
    7812              :       else
    7813        43815 :         avail[SSA_NAME_VERSION (valnum)] = entry;
    7814              :     }
    7815     40979295 : }
    7816              : 
    7817              : /* Remove queued stmts and perform delayed cleanups.  */
    7818              : 
    7819              : unsigned
    7820      6080584 : eliminate_dom_walker::eliminate_cleanup (bool region_p)
    7821              : {
    7822      6080584 :   statistics_counter_event (cfun, "Eliminated", eliminations);
    7823      6080584 :   statistics_counter_event (cfun, "Insertions", insertions);
    7824              : 
    7825              :   /* We cannot remove stmts during BB walk, especially not release SSA
    7826              :      names there as this confuses the VN machinery.  The stmts ending
    7827              :      up in to_remove are either stores or simple copies.
    7828              :      Remove stmts in reverse order to make debug stmt creation possible.  */
    7829     32893291 :   while (!to_remove.is_empty ())
    7830              :     {
    7831     14651483 :       bool do_release_defs = true;
    7832     14651483 :       gimple *stmt = to_remove.pop ();
    7833              : 
    7834              :       /* When we are value-numbering a region we do not require exit PHIs to
    7835              :          be present so we have to make sure to deal with uses outside of the
    7836              :          region of stmts that we thought are eliminated.
    7837              :          ??? Note we may be confused by uses in dead regions we didn't run
    7838              :          elimination on.  Rather than checking individual uses we accept
    7839              :          dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
    7840              :          contains such example).  */
    7841     14651483 :       if (region_p)
    7842              :         {
    7843      1710803 :           if (gphi *phi = dyn_cast <gphi *> (stmt))
    7844              :             {
    7845      1101445 :               tree lhs = gimple_phi_result (phi);
    7846      1101445 :               if (!has_zero_uses (lhs))
    7847              :                 {
    7848        22628 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    7849            3 :                     fprintf (dump_file, "Keeping eliminated stmt live "
    7850              :                              "as copy because of out-of-region uses\n");
    7851        22628 :                   tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
    7852        22628 :                   gimple *copy = gimple_build_assign (lhs, sprime);
    7853        22628 :                   gimple_stmt_iterator gsi
    7854        22628 :                     = gsi_after_labels (gimple_bb (stmt));
    7855        22628 :                   gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
    7856        22628 :                   do_release_defs = false;
    7857              :                 }
    7858              :             }
    7859       609358 :           else if (tree lhs = gimple_get_lhs (stmt))
    7860       609358 :             if (TREE_CODE (lhs) == SSA_NAME
    7861       609358 :                 && !has_zero_uses (lhs))
    7862              :               {
    7863         1685 :                 if (dump_file && (dump_flags & TDF_DETAILS))
    7864            0 :                   fprintf (dump_file, "Keeping eliminated stmt live "
    7865              :                            "as copy because of out-of-region uses\n");
    7866         1685 :                 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
    7867         1685 :                 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
    7868         1685 :                 if (is_gimple_assign (stmt))
    7869              :                   {
    7870         1685 :                     gimple_assign_set_rhs_from_tree (&gsi, sprime);
    7871         1685 :                     stmt = gsi_stmt (gsi);
    7872         1685 :                     update_stmt (stmt);
    7873         1685 :                     if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
    7874            0 :                       bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
    7875         1685 :                     continue;
    7876              :                   }
    7877              :                 else
    7878              :                   {
    7879            0 :                     gimple *copy = gimple_build_assign (lhs, sprime);
    7880            0 :                     gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
    7881            0 :                     do_release_defs = false;
    7882              :                   }
    7883              :               }
    7884              :         }
    7885              : 
    7886     14649798 :       if (dump_file && (dump_flags & TDF_DETAILS))
    7887              :         {
    7888        21870 :           fprintf (dump_file, "Removing dead stmt ");
    7889        21870 :           print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
    7890              :         }
    7891              : 
    7892     14649798 :       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
    7893     14649798 :       if (gimple_code (stmt) == GIMPLE_PHI)
    7894      1691831 :         remove_phi_node (&gsi, do_release_defs);
    7895              :       else
    7896              :         {
    7897     12957967 :           basic_block bb = gimple_bb (stmt);
    7898     12957967 :           unlink_stmt_vdef (stmt);
    7899     12957967 :           if (gsi_remove (&gsi, true))
    7900        26542 :             bitmap_set_bit (need_eh_cleanup, bb->index);
    7901     12957967 :           if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
    7902            2 :             bitmap_set_bit (need_ab_cleanup, bb->index);
    7903     12957967 :           if (do_release_defs)
    7904     12957967 :             release_defs (stmt);
    7905              :         }
    7906              : 
    7907              :       /* Removing a stmt may expose a forwarder block.  */
    7908     14649798 :       el_todo |= TODO_cleanup_cfg;
    7909              :     }
    7910              : 
    7911              :   /* Fixup stmts that became noreturn calls.  This may require splitting
    7912              :      blocks and thus isn't possible during the dominator walk.  Do this
    7913              :      in reverse order so we don't inadvertedly remove a stmt we want to
    7914              :      fixup by visiting a dominating now noreturn call first.  */
    7915      6080640 :   while (!to_fixup.is_empty ())
    7916              :     {
    7917           56 :       gimple *stmt = to_fixup.pop ();
    7918              : 
    7919           56 :       if (dump_file && (dump_flags & TDF_DETAILS))
    7920              :         {
    7921            0 :           fprintf (dump_file, "Fixing up noreturn call ");
    7922            0 :           print_gimple_stmt (dump_file, stmt, 0);
    7923              :         }
    7924              : 
    7925           56 :       if (fixup_noreturn_call (stmt))
    7926           56 :         el_todo |= TODO_cleanup_cfg;
    7927              :     }
    7928              : 
    7929      6080584 :   bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
    7930      6080584 :   bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
    7931              : 
    7932      6080584 :   if (do_eh_cleanup)
    7933        10600 :     gimple_purge_all_dead_eh_edges (need_eh_cleanup);
    7934              : 
    7935      6080584 :   if (do_ab_cleanup)
    7936            2 :     gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
    7937              : 
    7938      6080584 :   if (do_eh_cleanup || do_ab_cleanup)
    7939        10602 :     el_todo |= TODO_cleanup_cfg;
    7940              : 
    7941      6080584 :   return el_todo;
    7942              : }
    7943              : 
    7944              : /* Eliminate fully redundant computations.  */
    7945              : 
    7946              : unsigned
    7947      4219256 : eliminate_with_rpo_vn (bitmap inserted_exprs)
    7948              : {
    7949      4219256 :   eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
    7950              : 
    7951      4219256 :   eliminate_dom_walker *saved_rpo_avail = rpo_avail;
    7952      4219256 :   rpo_avail = &walker;
    7953      4219256 :   walker.walk (cfun->cfg->x_entry_block_ptr);
    7954      4219256 :   rpo_avail = saved_rpo_avail;
    7955              : 
    7956      4219256 :   return walker.eliminate_cleanup ();
    7957      4219256 : }
    7958              : 
    7959              : static unsigned
    7960              : do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
    7961              :              bool iterate, bool eliminate, bool skip_entry_phis,
    7962              :              vn_lookup_kind kind);
    7963              : 
    7964              : void
    7965       960589 : run_rpo_vn (vn_lookup_kind kind)
    7966              : {
    7967       960589 :   do_rpo_vn_1 (cfun, NULL, NULL, true, false, false, kind);
    7968              : 
    7969              :   /* ???  Prune requirement of these.  */
    7970       960589 :   constant_to_value_id = new hash_table<vn_constant_hasher> (23);
    7971              : 
    7972              :   /* Initialize the value ids and prune out remaining VN_TOPs
    7973              :      from dead code.  */
    7974       960589 :   tree name;
    7975       960589 :   unsigned i;
    7976     46935564 :   FOR_EACH_SSA_NAME (i, name, cfun)
    7977              :     {
    7978     33423328 :       vn_ssa_aux_t info = VN_INFO (name);
    7979     33423328 :       if (!info->visited
    7980     33344024 :           || info->valnum == VN_TOP)
    7981        79304 :         info->valnum = name;
    7982     33423328 :       if (info->valnum == name)
    7983     32266829 :         info->value_id = get_next_value_id ();
    7984      1156499 :       else if (is_gimple_min_invariant (info->valnum))
    7985        42039 :         info->value_id = get_or_alloc_constant_value_id (info->valnum);
    7986              :     }
    7987              : 
    7988              :   /* Propagate.  */
    7989     46935564 :   FOR_EACH_SSA_NAME (i, name, cfun)
    7990              :     {
    7991     33423328 :       vn_ssa_aux_t info = VN_INFO (name);
    7992     33423328 :       if (TREE_CODE (info->valnum) == SSA_NAME
    7993     33381289 :           && info->valnum != name
    7994     34537788 :           && info->value_id != VN_INFO (info->valnum)->value_id)
    7995      1114460 :         info->value_id = VN_INFO (info->valnum)->value_id;
    7996              :     }
    7997              : 
    7998       960589 :   set_hashtable_value_ids ();
    7999              : 
    8000       960589 :   if (dump_file && (dump_flags & TDF_DETAILS))
    8001              :     {
    8002           14 :       fprintf (dump_file, "Value numbers:\n");
    8003          406 :       FOR_EACH_SSA_NAME (i, name, cfun)
    8004              :         {
    8005          307 :           if (VN_INFO (name)->visited
    8006          307 :               && SSA_VAL (name) != name)
    8007              :             {
    8008           33 :               print_generic_expr (dump_file, name);
    8009           33 :               fprintf (dump_file, " = ");
    8010           33 :               print_generic_expr (dump_file, SSA_VAL (name));
    8011           33 :               fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
    8012              :             }
    8013              :         }
    8014              :     }
    8015       960589 : }
    8016              : 
    8017              : /* Free VN associated data structures.  */
    8018              : 
    8019              : void
    8020      6100003 : free_rpo_vn (void)
    8021              : {
    8022      6100003 :   free_vn_table (valid_info);
    8023      6100003 :   XDELETE (valid_info);
    8024      6100003 :   obstack_free (&vn_tables_obstack, NULL);
    8025      6100003 :   obstack_free (&vn_tables_insert_obstack, NULL);
    8026              : 
    8027      6100003 :   vn_ssa_aux_iterator_type it;
    8028      6100003 :   vn_ssa_aux_t info;
    8029    347391953 :   FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
    8030    170645975 :     if (info->needs_insertion)
    8031      4112669 :       release_ssa_name (info->name);
    8032      6100003 :   obstack_free (&vn_ssa_aux_obstack, NULL);
    8033      6100003 :   delete vn_ssa_aux_hash;
    8034              : 
    8035      6100003 :   delete constant_to_value_id;
    8036      6100003 :   constant_to_value_id = NULL;
    8037      6100003 : }
    8038              : 
    8039              : /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables.  */
    8040              : 
    8041              : static tree
    8042     22962153 : vn_lookup_simplify_result (gimple_match_op *res_op)
    8043              : {
    8044     22962153 :   if (!res_op->code.is_tree_code ())
    8045              :     return NULL_TREE;
    8046     22958979 :   tree *ops = res_op->ops;
    8047     22958979 :   unsigned int length = res_op->num_ops;
    8048     22958979 :   if (res_op->code == CONSTRUCTOR
    8049              :       /* ???  We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
    8050              :          and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree.  */
    8051     22958979 :       && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
    8052              :     {
    8053         1269 :       length = CONSTRUCTOR_NELTS (res_op->ops[0]);
    8054         1269 :       ops = XALLOCAVEC (tree, length);
    8055         6119 :       for (unsigned i = 0; i < length; ++i)
    8056         4850 :         ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
    8057              :     }
    8058     22958979 :   vn_nary_op_t vnresult = NULL;
    8059     22958979 :   tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
    8060              :                                        res_op->type, ops, &vnresult);
    8061              :   /* If this is used from expression simplification make sure to
    8062              :      return an available expression.  */
    8063     22958979 :   if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
    8064      2223862 :     res = rpo_avail->eliminate_avail (vn_context_bb, res);
    8065              :   return res;
    8066              : }
    8067              : 
    8068              : /* Return a leader for OPs value that is valid at BB.  */
    8069              : 
    8070              : tree
    8071    265029282 : rpo_elim::eliminate_avail (basic_block bb, tree op)
    8072              : {
    8073    265029282 :   bool visited;
    8074    265029282 :   tree valnum = SSA_VAL (op, &visited);
    8075              :   /* If we didn't visit OP then it must be defined outside of the
    8076              :      region we process and also dominate it.  So it is available.  */
    8077    265029282 :   if (!visited)
    8078              :     return op;
    8079    262870912 :   if (TREE_CODE (valnum) == SSA_NAME)
    8080              :     {
    8081    248972513 :       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
    8082              :         return valnum;
    8083    242270928 :       vn_ssa_aux_t valnum_info = VN_INFO (valnum);
    8084    242270928 :       vn_avail *av = valnum_info->avail;
    8085    242270928 :       if (!av)
    8086              :         {
    8087              :           /* See above.  But when there's availability info prefer
    8088              :              what we recorded there for example to preserve LC SSA.  */
    8089     83135962 :           if (!valnum_info->visited)
    8090              :             return valnum;
    8091              :           return NULL_TREE;
    8092              :         }
    8093    159134966 :       if (av->location == bb->index)
    8094              :         /* On tramp3d 90% of the cases are here.  */
    8095    105557198 :         return ssa_name (av->leader);
    8096     67549568 :       do
    8097              :         {
    8098     67549568 :           basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
    8099              :           /* ???  During elimination we have to use availability at the
    8100              :              definition site of a use we try to replace.  This
    8101              :              is required to not run into inconsistencies because
    8102              :              of dominated_by_p_w_unex behavior and removing a definition
    8103              :              while not replacing all uses.
    8104              :              ???  We could try to consistently walk dominators
    8105              :              ignoring non-executable regions.  The nearest common
    8106              :              dominator of bb and abb is where we can stop walking.  We
    8107              :              may also be able to "pre-compute" (bits of) the next immediate
    8108              :              (non-)dominator during the RPO walk when marking edges as
    8109              :              executable.  */
    8110     67549568 :           if (dominated_by_p_w_unex (bb, abb, true))
    8111              :             {
    8112     49693351 :               tree leader = ssa_name (av->leader);
    8113              :               /* Prevent eliminations that break loop-closed SSA.  */
    8114     49693351 :               if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
    8115      3153236 :                   && ! SSA_NAME_IS_DEFAULT_DEF (leader)
    8116     52846587 :                   && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
    8117      3153236 :                                                          (leader))->loop_father,
    8118              :                                               bb))
    8119              :                 return NULL_TREE;
    8120     49614536 :               if (dump_file && (dump_flags & TDF_DETAILS))
    8121              :                 {
    8122         3555 :                   print_generic_expr (dump_file, leader);
    8123         3555 :                   fprintf (dump_file, " is available for ");
    8124         3555 :                   print_generic_expr (dump_file, valnum);
    8125         3555 :                   fprintf (dump_file, "\n");
    8126              :                 }
    8127              :               /* On tramp3d 99% of the _remaining_ cases succeed at
    8128              :                  the first enty.  */
    8129     49614536 :               return leader;
    8130              :             }
    8131              :           /* ???  Can we somehow skip to the immediate dominator
    8132              :              RPO index (bb_to_rpo)?  Again, maybe not worth, on
    8133              :              tramp3d the worst number of elements in the vector is 9.  */
    8134     17856217 :           av = av->next;
    8135              :         }
    8136     17856217 :       while (av);
    8137              :       /* While we prefer avail we have to fallback to using the value
    8138              :          directly if defined outside of the region when none of the
    8139              :          available defs suit.  */
    8140      3884417 :       if (!valnum_info->visited)
    8141              :         return valnum;
    8142              :     }
    8143     13898399 :   else if (valnum != VN_TOP)
    8144              :     /* valnum is is_gimple_min_invariant.  */
    8145              :     return valnum;
    8146              :   return NULL_TREE;
    8147              : }
    8148              : 
    8149              : /* Make LEADER a leader for its value at BB.  */
    8150              : 
    8151              : void
    8152     96543168 : rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
    8153              : {
    8154     96543168 :   tree valnum = VN_INFO (leader)->valnum;
    8155     96543168 :   if (valnum == VN_TOP
    8156     96543168 :       || is_gimple_min_invariant (valnum))
    8157            0 :     return;
    8158     96543168 :   if (dump_file && (dump_flags & TDF_DETAILS))
    8159              :     {
    8160       324200 :       fprintf (dump_file, "Making available beyond BB%d ", bb->index);
    8161       324200 :       print_generic_expr (dump_file, leader);
    8162       324200 :       fprintf (dump_file, " for value ");
    8163       324200 :       print_generic_expr (dump_file, valnum);
    8164       324200 :       fprintf (dump_file, "\n");
    8165              :     }
    8166     96543168 :   vn_ssa_aux_t value = VN_INFO (valnum);
    8167     96543168 :   vn_avail *av;
    8168     96543168 :   if (m_avail_freelist)
    8169              :     {
    8170     18680012 :       av = m_avail_freelist;
    8171     18680012 :       m_avail_freelist = m_avail_freelist->next;
    8172              :     }
    8173              :   else
    8174     77863156 :     av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
    8175     96543168 :   av->location = bb->index;
    8176     96543168 :   av->leader = SSA_NAME_VERSION (leader);
    8177     96543168 :   av->next = value->avail;
    8178     96543168 :   av->next_undo = last_pushed_avail;
    8179     96543168 :   last_pushed_avail = value;
    8180     96543168 :   value->avail = av;
    8181              : }
    8182              : 
    8183              : /* Valueization hook for RPO VN plus required state.  */
    8184              : 
    8185              : tree
    8186   2025592278 : rpo_vn_valueize (tree name)
    8187              : {
    8188   2025592278 :   if (TREE_CODE (name) == SSA_NAME)
    8189              :     {
    8190   1979767097 :       vn_ssa_aux_t val = VN_INFO (name);
    8191   1979767097 :       if (val)
    8192              :         {
    8193   1979767097 :           tree tem = val->valnum;
    8194   1979767097 :           if (tem != VN_TOP && tem != name)
    8195              :             {
    8196    106557626 :               if (TREE_CODE (tem) != SSA_NAME)
    8197              :                 return tem;
    8198              :               /* For all values we only valueize to an available leader
    8199              :                  which means we can use SSA name info without restriction.  */
    8200     89809536 :               tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
    8201     89809536 :               if (tem)
    8202              :                 return tem;
    8203              :             }
    8204              :         }
    8205              :     }
    8206              :   return name;
    8207              : }
    8208              : 
    8209              : /* Insert on PRED_E predicates derived from CODE OPS being true besides the
    8210              :    inverted condition.  */
    8211              : 
    8212              : static void
    8213     27384054 : insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
    8214              : {
    8215     27384054 :   switch (code)
    8216              :     {
    8217      1371076 :     case LT_EXPR:
    8218              :       /* a < b -> a {!,<}= b */
    8219      1371076 :       vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
    8220              :                                            ops, boolean_true_node, 0, pred_e);
    8221      1371076 :       vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
    8222              :                                            ops, boolean_true_node, 0, pred_e);
    8223              :       /* a < b -> ! a {>,=} b */
    8224      1371076 :       vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
    8225              :                                            ops, boolean_false_node, 0, pred_e);
    8226      1371076 :       vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
    8227              :                                            ops, boolean_false_node, 0, pred_e);
    8228      1371076 :       break;
    8229      3444283 :     case GT_EXPR:
    8230              :       /* a > b -> a {!,>}= b */
    8231      3444283 :       vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
    8232              :                                            ops, boolean_true_node, 0, pred_e);
    8233      3444283 :       vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
    8234              :                                            ops, boolean_true_node, 0, pred_e);
    8235              :       /* a > b -> ! a {<,=} b */
    8236      3444283 :       vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
    8237              :                                            ops, boolean_false_node, 0, pred_e);
    8238      3444283 :       vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
    8239              :                                            ops, boolean_false_node, 0, pred_e);
    8240      3444283 :       break;
    8241      9355787 :     case EQ_EXPR:
    8242              :       /* a == b -> ! a {<,>} b */
    8243      9355787 :       vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
    8244              :                                            ops, boolean_false_node, 0, pred_e);
    8245      9355787 :       vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
    8246              :                                            ops, boolean_false_node, 0, pred_e);
    8247      9355787 :       break;
    8248              :     case LE_EXPR:
    8249              :     case GE_EXPR:
    8250              :     case NE_EXPR:
    8251              :       /* Nothing besides inverted condition.  */
    8252              :       break;
    8253     27384054 :     default:;
    8254              :     }
    8255     27384054 : }
    8256              : 
    8257              : /* Insert on the TRUE_E true and FALSE_E false predicates
    8258              :    derived from LHS CODE RHS.  */
    8259              : 
    8260              : static void
    8261     23339603 : insert_predicates_for_cond (tree_code code, tree lhs, tree rhs,
    8262              :                             edge true_e, edge false_e)
    8263              : {
    8264              :   /* If both edges are null, then there is nothing to be done. */
    8265     23339603 :   if (!true_e && !false_e)
    8266      1317262 :     return;
    8267              : 
    8268              :   /* Canonicalize the comparison if needed, putting
    8269              :      the constant in the rhs.  */
    8270     22025811 :   if (tree_swap_operands_p (lhs, rhs))
    8271              :     {
    8272        16516 :       std::swap (lhs, rhs);
    8273        16516 :       code = swap_tree_comparison (code);
    8274              :     }
    8275              : 
    8276              :   /* If the lhs is not a ssa name, don't record anything. */
    8277     22025811 :   if (TREE_CODE (lhs) != SSA_NAME)
    8278              :     return;
    8279              : 
    8280     22022341 :   tree_code icode = invert_tree_comparison (code, HONOR_NANS (lhs));
    8281     22022341 :   tree ops[2];
    8282     22022341 :   ops[0] = lhs;
    8283     22022341 :   ops[1] = rhs;
    8284     22022341 :   if (true_e)
    8285     17984845 :     vn_nary_op_insert_pieces_predicated (2, code, boolean_type_node, ops,
    8286              :                                          boolean_true_node, 0, true_e);
    8287     22022341 :   if (false_e)
    8288     16930641 :     vn_nary_op_insert_pieces_predicated (2, code, boolean_type_node, ops,
    8289              :                                          boolean_false_node, 0, false_e);
    8290     22022341 :   if (icode != ERROR_MARK)
    8291              :     {
    8292     21775388 :       if (true_e)
    8293     17831630 :         vn_nary_op_insert_pieces_predicated (2, icode, boolean_type_node, ops,
    8294              :                                              boolean_false_node, 0, true_e);
    8295     21775388 :       if (false_e)
    8296     16731098 :         vn_nary_op_insert_pieces_predicated (2, icode, boolean_type_node, ops,
    8297              :                                              boolean_true_node, 0, false_e);
    8298              :     }
    8299              :   /* Relax for non-integers, inverted condition handled
    8300              :      above.  */
    8301     22022341 :   if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
    8302              :     {
    8303     17260808 :       if (true_e)
    8304     14169634 :         insert_related_predicates_on_edge (code, ops, true_e);
    8305     17260808 :       if (false_e)
    8306     13214420 :         insert_related_predicates_on_edge (icode, ops, false_e);
    8307              :   }
    8308     22022341 :   if (integer_zerop (rhs)
    8309     22022341 :       && (code == NE_EXPR || code == EQ_EXPR))
    8310              :     {
    8311      9135177 :       gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
    8312              :       /* (A CMP B) != 0 is the same as (A CMP B).
    8313              :          (A CMP B) == 0 is just (A CMP B) with the edges swapped.  */
    8314      9135177 :       if (is_gimple_assign (def_stmt)
    8315      9135177 :           && TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_comparison)
    8316              :           {
    8317       430391 :             tree_code nc = gimple_assign_rhs_code (def_stmt);
    8318       430391 :             tree nlhs = vn_valueize (gimple_assign_rhs1 (def_stmt));
    8319       430391 :             tree nrhs = vn_valueize (gimple_assign_rhs2 (def_stmt));
    8320       430391 :             edge nt = true_e;
    8321       430391 :             edge nf = false_e;
    8322       430391 :             if (code == EQ_EXPR)
    8323       304005 :               std::swap (nt, nf);
    8324       430391 :             if (lhs != nlhs)
    8325       430391 :               insert_predicates_for_cond (nc, nlhs, nrhs, nt, nf);
    8326              :           }
    8327              :       /* (a | b) == 0 ->
    8328              :             on true edge assert: a == 0 & b == 0. */
    8329              :       /* (a | b) != 0 ->
    8330              :             on false edge assert: a == 0 & b == 0. */
    8331      9135177 :       if (is_gimple_assign (def_stmt)
    8332      9135177 :           && gimple_assign_rhs_code (def_stmt) == BIT_IOR_EXPR)
    8333              :         {
    8334       254422 :           edge e = code == EQ_EXPR ? true_e : false_e;
    8335       254422 :           tree nlhs;
    8336              : 
    8337       254422 :           nlhs = vn_valueize (gimple_assign_rhs1 (def_stmt));
    8338              :           /* A valueization of the `a` might return the old lhs
    8339              :              which is already handled above. */
    8340       254422 :           if (nlhs != lhs)
    8341       254422 :             insert_predicates_for_cond (EQ_EXPR, nlhs, rhs, e, nullptr);
    8342              : 
    8343              :           /* A valueization of the `b` might return the old lhs
    8344              :              which is already handled above. */
    8345       254422 :           nlhs = vn_valueize (gimple_assign_rhs2 (def_stmt));
    8346       254422 :           if (nlhs != lhs)
    8347       254422 :             insert_predicates_for_cond (EQ_EXPR, nlhs, rhs, e, nullptr);
    8348              :         }
    8349              :     }
    8350              : }
    8351              : 
    8352              : /* Main stmt worker for RPO VN, process BB.  */
    8353              : 
    8354              : static unsigned
    8355     61131649 : process_bb (rpo_elim &avail, basic_block bb,
    8356              :             bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
    8357              :             bool do_region, bitmap exit_bbs, bool skip_phis)
    8358              : {
    8359     61131649 :   unsigned todo = 0;
    8360     61131649 :   edge_iterator ei;
    8361     61131649 :   edge e;
    8362              : 
    8363     61131649 :   vn_context_bb = bb;
    8364              : 
    8365              :   /* If we are in loop-closed SSA preserve this state.  This is
    8366              :      relevant when called on regions from outside of FRE/PRE.  */
    8367     61131649 :   bool lc_phi_nodes = false;
    8368     61131649 :   if (!skip_phis
    8369     61131649 :       && loops_state_satisfies_p (LOOP_CLOSED_SSA))
    8370      3694119 :     FOR_EACH_EDGE (e, ei, bb->preds)
    8371      2231745 :       if (e->src->loop_father != e->dest->loop_father
    8372      2231745 :           && flow_loop_nested_p (e->dest->loop_father,
    8373              :                                  e->src->loop_father))
    8374              :         {
    8375              :           lc_phi_nodes = true;
    8376              :           break;
    8377              :         }
    8378              : 
    8379              :   /* When we visit a loop header substitute into loop info.  */
    8380     61131649 :   if (!iterate && eliminate && bb->loop_father->header == bb)
    8381              :     {
    8382              :       /* Keep fields in sync with substitute_in_loop_info.  */
    8383       945925 :       if (bb->loop_father->nb_iterations)
    8384       155996 :         bb->loop_father->nb_iterations
    8385       155996 :           = simplify_replace_tree (bb->loop_father->nb_iterations,
    8386              :                                    NULL_TREE, NULL_TREE, &vn_valueize_for_srt);
    8387              :     }
    8388              : 
    8389              :   /* Value-number all defs in the basic-block.  */
    8390     61131649 :   if (!skip_phis)
    8391     87858726 :     for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
    8392     26756362 :          gsi_next (&gsi))
    8393              :       {
    8394     26756362 :         gphi *phi = gsi.phi ();
    8395     26756362 :         tree res = PHI_RESULT (phi);
    8396     26756362 :         vn_ssa_aux_t res_info = VN_INFO (res);
    8397     26756362 :         if (!bb_visited)
    8398              :           {
    8399     18902684 :             gcc_assert (!res_info->visited);
    8400     18902684 :             res_info->valnum = VN_TOP;
    8401     18902684 :             res_info->visited = true;
    8402              :           }
    8403              : 
    8404              :         /* When not iterating force backedge values to varying.  */
    8405     26756362 :         visit_stmt (phi, !iterate_phis);
    8406     53512724 :         if (virtual_operand_p (res))
    8407     10578039 :           continue;
    8408              : 
    8409              :         /* Eliminate */
    8410              :         /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
    8411              :            how we handle backedges and availability.
    8412              :            And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization.  */
    8413     16178323 :         tree val = res_info->valnum;
    8414     16178323 :         if (res != val && !iterate && eliminate)
    8415              :           {
    8416      1396278 :             if (tree leader = avail.eliminate_avail (bb, res))
    8417              :               {
    8418      1271933 :                 if (leader != res
    8419              :                     /* Preserve loop-closed SSA form.  */
    8420      1271933 :                     && (! lc_phi_nodes
    8421         6548 :                         || is_gimple_min_invariant (leader)))
    8422              :                   {
    8423      1271348 :                     if (dump_file && (dump_flags & TDF_DETAILS))
    8424              :                       {
    8425          203 :                         fprintf (dump_file, "Replaced redundant PHI node "
    8426              :                                  "defining ");
    8427          203 :                         print_generic_expr (dump_file, res);
    8428          203 :                         fprintf (dump_file, " with ");
    8429          203 :                         print_generic_expr (dump_file, leader);
    8430          203 :                         fprintf (dump_file, "\n");
    8431              :                       }
    8432      1271348 :                     avail.eliminations++;
    8433              : 
    8434      1271348 :                     if (may_propagate_copy (res, leader))
    8435              :                       {
    8436              :                         /* Schedule for removal.  */
    8437      1271348 :                         avail.to_remove.safe_push (phi);
    8438      1271348 :                         continue;
    8439              :                       }
    8440              :                     /* ???  Else generate a copy stmt.  */
    8441              :                   }
    8442              :               }
    8443              :           }
    8444              :         /* Only make defs available that not already are.  But make
    8445              :            sure loop-closed SSA PHI node defs are picked up for
    8446              :            downstream uses.  */
    8447     14906975 :         if (lc_phi_nodes
    8448     14906975 :             || res == val
    8449     14906975 :             || ! avail.eliminate_avail (bb, res))
    8450     11418029 :           avail.eliminate_push_avail (bb, res);
    8451              :       }
    8452              : 
    8453              :   /* For empty BBs mark outgoing edges executable.  For non-empty BBs
    8454              :      we do this when processing the last stmt as we have to do this
    8455              :      before elimination which otherwise forces GIMPLE_CONDs to
    8456              :      if (1 != 0) style when seeing non-executable edges.  */
    8457    122263298 :   if (gsi_end_p (gsi_start_bb (bb)))
    8458              :     {
    8459     14029594 :       FOR_EACH_EDGE (e, ei, bb->succs)
    8460              :         {
    8461      7014797 :           if (!(e->flags & EDGE_EXECUTABLE))
    8462              :             {
    8463      4781997 :               if (dump_file && (dump_flags & TDF_DETAILS))
    8464         6167 :                 fprintf (dump_file,
    8465              :                          "marking outgoing edge %d -> %d executable\n",
    8466         6167 :                          e->src->index, e->dest->index);
    8467      4781997 :               e->flags |= EDGE_EXECUTABLE;
    8468      4781997 :               e->dest->flags |= BB_EXECUTABLE;
    8469              :             }
    8470      2232800 :           else if (!(e->dest->flags & BB_EXECUTABLE))
    8471              :             {
    8472            0 :               if (dump_file && (dump_flags & TDF_DETAILS))
    8473            0 :                 fprintf (dump_file,
    8474              :                          "marking destination block %d reachable\n",
    8475              :                          e->dest->index);
    8476            0 :               e->dest->flags |= BB_EXECUTABLE;
    8477              :             }
    8478              :         }
    8479              :     }
    8480    122263298 :   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
    8481    481325457 :        !gsi_end_p (gsi); gsi_next (&gsi))
    8482              :     {
    8483    420193808 :       ssa_op_iter i;
    8484    420193808 :       tree op;
    8485    420193808 :       if (!bb_visited)
    8486              :         {
    8487    481774227 :           FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
    8488              :             {
    8489    136222074 :               vn_ssa_aux_t op_info = VN_INFO (op);
    8490    136222074 :               gcc_assert (!op_info->visited);
    8491    136222074 :               op_info->valnum = VN_TOP;
    8492    136222074 :               op_info->visited = true;
    8493              :             }
    8494              : 
    8495              :           /* We somehow have to deal with uses that are not defined
    8496              :              in the processed region.  Forcing unvisited uses to
    8497              :              varying here doesn't play well with def-use following during
    8498              :              expression simplification, so we deal with this by checking
    8499              :              the visited flag in SSA_VAL.  */
    8500              :         }
    8501              : 
    8502    420193808 :       visit_stmt (gsi_stmt (gsi));
    8503              : 
    8504    420193808 :       gimple *last = gsi_stmt (gsi);
    8505    420193808 :       e = NULL;
    8506    420193808 :       switch (gimple_code (last))
    8507              :         {
    8508       114763 :         case GIMPLE_SWITCH:
    8509       114763 :           e = find_taken_edge (bb, vn_valueize (gimple_switch_index
    8510       114763 :                                                 (as_a <gswitch *> (last))));
    8511       114763 :           break;
    8512     24583393 :         case GIMPLE_COND:
    8513     24583393 :           {
    8514     24583393 :             tree lhs = vn_valueize (gimple_cond_lhs (last));
    8515     24583393 :             tree rhs = vn_valueize (gimple_cond_rhs (last));
    8516     24583393 :             tree_code cmpcode = gimple_cond_code (last);
    8517              :             /* Canonicalize the comparison if needed, putting
    8518              :                the constant in the rhs.  */
    8519     24583393 :             if (tree_swap_operands_p (lhs, rhs))
    8520              :               {
    8521       832989 :                 std::swap (lhs, rhs);
    8522       832989 :                 cmpcode = swap_tree_comparison (cmpcode);
    8523              :                }
    8524     24583393 :             tree val = gimple_simplify (cmpcode,
    8525              :                                         boolean_type_node, lhs, rhs,
    8526              :                                         NULL, vn_valueize);
    8527              :             /* If the condition didn't simplfy see if we have recorded
    8528              :                an expression from sofar taken edges.  */
    8529     24583393 :             if (! val || TREE_CODE (val) != INTEGER_CST)
    8530              :               {
    8531     22749186 :                 vn_nary_op_t vnresult;
    8532     22749186 :                 tree ops[2];
    8533     22749186 :                 ops[0] = lhs;
    8534     22749186 :                 ops[1] = rhs;
    8535     22749186 :                 val = vn_nary_op_lookup_pieces (2, cmpcode,
    8536              :                                                 boolean_type_node, ops,
    8537              :                                                 &vnresult);
    8538              :                 /* Got back a ssa name, then try looking up `val != 0`
    8539              :                    as it might have been recorded that way.  */
    8540     22749186 :                 if (val && TREE_CODE (val) == SSA_NAME)
    8541              :                   {
    8542       152101 :                     ops[0] = val;
    8543       152101 :                     ops[1] = build_zero_cst (TREE_TYPE (val));
    8544       152101 :                     val = vn_nary_op_lookup_pieces (2, NE_EXPR,
    8545              :                                                     boolean_type_node, ops,
    8546              :                                                     &vnresult);
    8547              :                   }
    8548              :                 /* Did we get a predicated value?  */
    8549     22749170 :                 if (! val && vnresult && vnresult->predicated_values)
    8550              :                   {
    8551      1351308 :                     val = vn_nary_op_get_predicated_value (vnresult, bb);
    8552      1351308 :                     if (val && dump_file && (dump_flags & TDF_DETAILS))
    8553              :                       {
    8554            2 :                         fprintf (dump_file, "Got predicated value ");
    8555            2 :                         print_generic_expr (dump_file, val, TDF_NONE);
    8556            2 :                         fprintf (dump_file, " for ");
    8557            2 :                         print_gimple_stmt (dump_file, last, TDF_SLIM);
    8558              :                       }
    8559              :                   }
    8560              :               }
    8561     22749186 :             if (val)
    8562      2183025 :               e = find_taken_edge (bb, val);
    8563     24583393 :             if (! e)
    8564              :               {
    8565              :                 /* If we didn't manage to compute the taken edge then
    8566              :                    push predicated expressions for the condition itself
    8567              :                    and related conditions to the hashtables.  This allows
    8568              :                    simplification of redundant conditions which is
    8569              :                    important as early cleanup.  */
    8570     22400368 :                 edge true_e, false_e;
    8571     22400368 :                 extract_true_false_edges_from_block (bb, &true_e, &false_e);
    8572       541303 :                 if ((do_region && bitmap_bit_p (exit_bbs, true_e->dest->index))
    8573     22628274 :                     || !can_track_predicate_on_edge (true_e))
    8574      4925747 :                   true_e = NULL;
    8575       541303 :                 if ((do_region && bitmap_bit_p (exit_bbs, false_e->dest->index))
    8576     22601937 :                     || !can_track_predicate_on_edge (false_e))
    8577      5861639 :                   false_e = NULL;
    8578     22400368 :                 insert_predicates_for_cond (cmpcode, lhs, rhs, true_e, false_e);
    8579              :               }
    8580              :             break;
    8581              :           }
    8582         1394 :         case GIMPLE_GOTO:
    8583         1394 :           e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
    8584         1394 :           break;
    8585              :         default:
    8586              :           e = NULL;
    8587              :         }
    8588    420193808 :       if (e)
    8589              :         {
    8590      2186618 :           todo = TODO_cleanup_cfg;
    8591      2186618 :           if (!(e->flags & EDGE_EXECUTABLE))
    8592              :             {
    8593      1727009 :               if (dump_file && (dump_flags & TDF_DETAILS))
    8594           35 :                 fprintf (dump_file,
    8595              :                          "marking known outgoing %sedge %d -> %d executable\n",
    8596           35 :                          e->flags & EDGE_DFS_BACK ? "back-" : "",
    8597           35 :                          e->src->index, e->dest->index);
    8598      1727009 :               e->flags |= EDGE_EXECUTABLE;
    8599      1727009 :               e->dest->flags |= BB_EXECUTABLE;
    8600              :             }
    8601       459609 :           else if (!(e->dest->flags & BB_EXECUTABLE))
    8602              :             {
    8603        27227 :               if (dump_file && (dump_flags & TDF_DETAILS))
    8604            1 :                 fprintf (dump_file,
    8605              :                          "marking destination block %d reachable\n",
    8606              :                          e->dest->index);
    8607        27227 :               e->dest->flags |= BB_EXECUTABLE;
    8608              :             }
    8609              :         }
    8610    836014380 :       else if (gsi_one_before_end_p (gsi))
    8611              :         {
    8612    127477577 :           FOR_EACH_EDGE (e, ei, bb->succs)
    8613              :             {
    8614     75547343 :               if (!(e->flags & EDGE_EXECUTABLE))
    8615              :                 {
    8616     55506706 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    8617        18460 :                     fprintf (dump_file,
    8618              :                              "marking outgoing edge %d -> %d executable\n",
    8619        18460 :                              e->src->index, e->dest->index);
    8620     55506706 :                   e->flags |= EDGE_EXECUTABLE;
    8621     55506706 :                   e->dest->flags |= BB_EXECUTABLE;
    8622              :                 }
    8623     20040637 :               else if (!(e->dest->flags & BB_EXECUTABLE))
    8624              :                 {
    8625      2493512 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    8626         5991 :                     fprintf (dump_file,
    8627              :                              "marking destination block %d reachable\n",
    8628              :                              e->dest->index);
    8629      2493512 :                   e->dest->flags |= BB_EXECUTABLE;
    8630              :                 }
    8631              :             }
    8632              :         }
    8633              : 
    8634              :       /* Eliminate.  That also pushes to avail.  */
    8635    420193808 :       if (eliminate && ! iterate)
    8636    105451778 :         avail.eliminate_stmt (bb, &gsi);
    8637              :       else
    8638              :         /* If not eliminating, make all not already available defs
    8639              :            available.  But avoid picking up dead defs.  */
    8640    394236317 :         FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
    8641     79494287 :           if (! has_zero_uses (op)
    8642     79494287 :               && ! avail.eliminate_avail (bb, op))
    8643     60656423 :             avail.eliminate_push_avail (bb, op);
    8644              :     }
    8645              : 
    8646              :   /* Eliminate in destination PHI arguments.  Always substitute in dest
    8647              :      PHIs, even for non-executable edges.  This handles region
    8648              :      exits PHIs.  */
    8649     61131649 :   if (!iterate && eliminate)
    8650     32855766 :     FOR_EACH_EDGE (e, ei, bb->succs)
    8651     19569793 :       for (gphi_iterator gsi = gsi_start_phis (e->dest);
    8652     37941245 :            !gsi_end_p (gsi); gsi_next (&gsi))
    8653              :         {
    8654     18371452 :           gphi *phi = gsi.phi ();
    8655     18371452 :           use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
    8656     18371452 :           tree arg = USE_FROM_PTR (use_p);
    8657     27950076 :           if (TREE_CODE (arg) != SSA_NAME
    8658     18371452 :               || virtual_operand_p (arg))
    8659      9578624 :             continue;
    8660      8792828 :           tree sprime;
    8661      8792828 :           if (SSA_NAME_IS_DEFAULT_DEF (arg))
    8662              :             {
    8663       119391 :               sprime = SSA_VAL (arg);
    8664       119391 :               gcc_assert (TREE_CODE (sprime) != SSA_NAME
    8665              :                           || SSA_NAME_IS_DEFAULT_DEF (sprime));
    8666              :             }
    8667              :           else
    8668              :             /* Look for sth available at the definition block of the argument.
    8669              :                This avoids inconsistencies between availability there which
    8670              :                decides if the stmt can be removed and availability at the
    8671              :                use site.  The SSA property ensures that things available
    8672              :                at the definition are also available at uses.  */
    8673      8673437 :             sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
    8674              :                                             arg);
    8675      8792828 :           if (sprime
    8676      8792828 :               && sprime != arg
    8677      8792828 :               && may_propagate_copy (arg, sprime, !(e->flags & EDGE_ABNORMAL)))
    8678      1506596 :             propagate_value (use_p, sprime);
    8679              :         }
    8680              : 
    8681     61131649 :   vn_context_bb = NULL;
    8682     61131649 :   return todo;
    8683              : }
    8684              : 
    8685              : /* Unwind state per basic-block.  */
    8686              : 
    8687              : struct unwind_state
    8688              : {
    8689              :   /* Times this block has been visited.  */
    8690              :   unsigned visited;
    8691              :   /* Whether to handle this as iteration point or whether to treat
    8692              :      incoming backedge PHI values as varying.  */
    8693              :   bool iterate;
    8694              :   /* Maximum RPO index this block is reachable from.  */
    8695              :   int max_rpo;
    8696              :   /* Unwind state.  */
    8697              :   void *ob_top;
    8698              :   vn_reference_t ref_top;
    8699              :   vn_phi_t phi_top;
    8700              :   vn_nary_op_t nary_top;
    8701              :   vn_avail *avail_top;
    8702              : };
    8703              : 
    8704              : /* Unwind the RPO VN state for iteration.  */
    8705              : 
    8706              : static void
    8707      1898663 : do_unwind (unwind_state *to, rpo_elim &avail)
    8708              : {
    8709      1898663 :   gcc_assert (to->iterate);
    8710     34709947 :   for (; last_inserted_nary != to->nary_top;
    8711     32811284 :        last_inserted_nary = last_inserted_nary->next)
    8712              :     {
    8713     32811284 :       vn_nary_op_t *slot;
    8714     32811284 :       slot = valid_info->nary->find_slot_with_hash
    8715     32811284 :         (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
    8716              :       /* Predication causes the need to restore previous state.  */
    8717     32811284 :       if ((*slot)->unwind_to)
    8718      6611677 :         *slot = (*slot)->unwind_to;
    8719              :       else
    8720     26199607 :         valid_info->nary->clear_slot (slot);
    8721              :     }
    8722      7460976 :   for (; last_inserted_phi != to->phi_top;
    8723      5562313 :        last_inserted_phi = last_inserted_phi->next)
    8724              :     {
    8725      5562313 :       vn_phi_t *slot;
    8726      5562313 :       slot = valid_info->phis->find_slot_with_hash
    8727      5562313 :         (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
    8728      5562313 :       valid_info->phis->clear_slot (slot);
    8729              :     }
    8730     15086500 :   for (; last_inserted_ref != to->ref_top;
    8731     13187837 :        last_inserted_ref = last_inserted_ref->next)
    8732              :     {
    8733     13187837 :       vn_reference_t *slot;
    8734     13187837 :       slot = valid_info->references->find_slot_with_hash
    8735     13187837 :         (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
    8736     13187837 :       (*slot)->operands.release ();
    8737     13187837 :       valid_info->references->clear_slot (slot);
    8738              :     }
    8739      1898663 :   obstack_free (&vn_tables_obstack, to->ob_top);
    8740              : 
    8741              :   /* Prune [rpo_idx, ] from avail.  */
    8742     20578675 :   for (; last_pushed_avail && last_pushed_avail->avail != to->avail_top;)
    8743              :     {
    8744     18680012 :       vn_ssa_aux_t val = last_pushed_avail;
    8745     18680012 :       vn_avail *av = val->avail;
    8746     18680012 :       val->avail = av->next;
    8747     18680012 :       last_pushed_avail = av->next_undo;
    8748     18680012 :       av->next = avail.m_avail_freelist;
    8749     18680012 :       avail.m_avail_freelist = av;
    8750              :     }
    8751      1898663 : }
    8752              : 
    8753              : /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
    8754              :    If ITERATE is true then treat backedges optimistically as not
    8755              :    executed and iterate.  If ELIMINATE is true then perform
    8756              :    elimination, otherwise leave that to the caller.  If SKIP_ENTRY_PHIS
    8757              :    is true then force PHI nodes in ENTRY->dest to VARYING.  */
    8758              : 
    8759              : static unsigned
    8760      6100003 : do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
    8761              :              bool iterate, bool eliminate, bool skip_entry_phis,
    8762              :              vn_lookup_kind kind)
    8763              : {
    8764      6100003 :   unsigned todo = 0;
    8765      6100003 :   default_vn_walk_kind = kind;
    8766              : 
    8767              :   /* We currently do not support region-based iteration when
    8768              :      elimination is requested.  */
    8769      6100003 :   gcc_assert (!entry || !iterate || !eliminate);
    8770              :   /* When iterating we need loop info up-to-date.  */
    8771      6100003 :   gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
    8772              : 
    8773      6100003 :   bool do_region = entry != NULL;
    8774      6100003 :   if (!do_region)
    8775              :     {
    8776      5414191 :       entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
    8777      5414191 :       exit_bbs = BITMAP_ALLOC (NULL);
    8778      5414191 :       bitmap_set_bit (exit_bbs, EXIT_BLOCK);
    8779              :     }
    8780              : 
    8781              :   /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
    8782              :      re-mark those that are contained in the region.  */
    8783      6100003 :   edge_iterator ei;
    8784      6100003 :   edge e;
    8785     12259886 :   FOR_EACH_EDGE (e, ei, entry->dest->preds)
    8786      6159883 :     e->flags &= ~EDGE_DFS_BACK;
    8787              : 
    8788      6100003 :   int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
    8789      6100003 :   auto_vec<std::pair<int, int> > toplevel_scc_extents;
    8790      6100003 :   int n = rev_post_order_and_mark_dfs_back_seme
    8791      7980750 :     (fn, entry, exit_bbs, true, rpo, !iterate ? &toplevel_scc_extents : NULL);
    8792              : 
    8793      6100003 :   if (!do_region)
    8794      5414191 :     BITMAP_FREE (exit_bbs);
    8795              : 
    8796              :   /* If there are any non-DFS_BACK edges into entry->dest skip
    8797              :      processing PHI nodes for that block.  This supports
    8798              :      value-numbering loop bodies w/o the actual loop.  */
    8799     12259885 :   FOR_EACH_EDGE (e, ei, entry->dest->preds)
    8800      6159883 :     if (e != entry
    8801        59880 :         && !(e->flags & EDGE_DFS_BACK))
    8802              :       break;
    8803      6100003 :   if (e != NULL && dump_file && (dump_flags & TDF_DETAILS))
    8804            0 :     fprintf (dump_file, "Region does not contain all edges into "
    8805              :              "the entry block, skipping its PHIs.\n");
    8806      6100003 :   skip_entry_phis |= e != NULL;
    8807              : 
    8808      6100003 :   int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
    8809     56306020 :   for (int i = 0; i < n; ++i)
    8810     50206017 :     bb_to_rpo[rpo[i]] = i;
    8811      6100003 :   vn_bb_to_rpo = bb_to_rpo;
    8812              : 
    8813      6100003 :   unwind_state *rpo_state = XNEWVEC (unwind_state, n);
    8814              : 
    8815      6100003 :   rpo_elim avail (entry->dest);
    8816      6100003 :   rpo_avail = &avail;
    8817              : 
    8818              :   /* Verify we have no extra entries into the region.  */
    8819      6100003 :   if (flag_checking && do_region)
    8820              :     {
    8821       685806 :       auto_bb_flag bb_in_region (fn);
    8822      2076617 :       for (int i = 0; i < n; ++i)
    8823              :         {
    8824      1390811 :           basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
    8825      1390811 :           bb->flags |= bb_in_region;
    8826              :         }
    8827              :       /* We can't merge the first two loops because we cannot rely
    8828              :          on EDGE_DFS_BACK for edges not within the region.  But if
    8829              :          we decide to always have the bb_in_region flag we can
    8830              :          do the checking during the RPO walk itself (but then it's
    8831              :          also easy to handle MEME conservatively).  */
    8832      2076617 :       for (int i = 0; i < n; ++i)
    8833              :         {
    8834      1390811 :           basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
    8835      1390811 :           edge e;
    8836      1390811 :           edge_iterator ei;
    8837      3041431 :           FOR_EACH_EDGE (e, ei, bb->preds)
    8838      1650620 :             gcc_assert (e == entry
    8839              :                         || (skip_entry_phis && bb == entry->dest)
    8840              :                         || (e->src->flags & bb_in_region));
    8841              :         }
    8842      2076617 :       for (int i = 0; i < n; ++i)
    8843              :         {
    8844      1390811 :           basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
    8845      1390811 :           bb->flags &= ~bb_in_region;
    8846              :         }
    8847       685806 :     }
    8848              : 
    8849              :   /* Create the VN state.  For the initial size of the various hashtables
    8850              :      use a heuristic based on region size and number of SSA names.  */
    8851      6100003 :   unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
    8852      6100003 :                           / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
    8853      6100003 :   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
    8854      6100003 :   next_value_id = 1;
    8855      6100003 :   next_constant_value_id = -1;
    8856              : 
    8857      6100003 :   vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
    8858      6100003 :   gcc_obstack_init (&vn_ssa_aux_obstack);
    8859              : 
    8860      6100003 :   gcc_obstack_init (&vn_tables_obstack);
    8861      6100003 :   gcc_obstack_init (&vn_tables_insert_obstack);
    8862      6100003 :   valid_info = XCNEW (struct vn_tables_s);
    8863      6100003 :   allocate_vn_table (valid_info, region_size);
    8864      6100003 :   last_inserted_ref = NULL;
    8865      6100003 :   last_inserted_phi = NULL;
    8866      6100003 :   last_inserted_nary = NULL;
    8867      6100003 :   last_pushed_avail = NULL;
    8868              : 
    8869      6100003 :   vn_valueize = rpo_vn_valueize;
    8870              : 
    8871              :   /* Initialize the unwind state and edge/BB executable state.  */
    8872      6100003 :   unsigned curr_scc = 0;
    8873     56306020 :   for (int i = 0; i < n; ++i)
    8874              :     {
    8875     50206017 :       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
    8876     50206017 :       rpo_state[i].visited = 0;
    8877     50206017 :       rpo_state[i].max_rpo = i;
    8878     58654187 :       if (!iterate && curr_scc < toplevel_scc_extents.length ())
    8879              :         {
    8880      7038728 :           if (i >= toplevel_scc_extents[curr_scc].first
    8881      7038728 :               && i <= toplevel_scc_extents[curr_scc].second)
    8882      3869154 :             rpo_state[i].max_rpo = toplevel_scc_extents[curr_scc].second;
    8883      7038728 :           if (i == toplevel_scc_extents[curr_scc].second)
    8884       731819 :             curr_scc++;
    8885              :         }
    8886     50206017 :       bb->flags &= ~BB_EXECUTABLE;
    8887     50206017 :       bool has_backedges = false;
    8888     50206017 :       edge e;
    8889     50206017 :       edge_iterator ei;
    8890    119179804 :       FOR_EACH_EDGE (e, ei, bb->preds)
    8891              :         {
    8892     68973787 :           if (e->flags & EDGE_DFS_BACK)
    8893      2841577 :             has_backedges = true;
    8894     68973787 :           e->flags &= ~EDGE_EXECUTABLE;
    8895     68973787 :           if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
    8896     68973787 :             continue;
    8897              :         }
    8898     50206017 :       rpo_state[i].iterate = iterate && has_backedges;
    8899              :     }
    8900      6100003 :   entry->flags |= EDGE_EXECUTABLE;
    8901      6100003 :   entry->dest->flags |= BB_EXECUTABLE;
    8902              : 
    8903              :   /* As heuristic to improve compile-time we handle only the N innermost
    8904              :      loops and the outermost one optimistically.  */
    8905      6100003 :   if (iterate)
    8906              :     {
    8907      4219256 :       unsigned max_depth = param_rpo_vn_max_loop_depth;
    8908     14199333 :       for (auto loop : loops_list (cfun, LI_ONLY_INNERMOST))
    8909      1543958 :         if (loop_depth (loop) > max_depth)
    8910         2092 :           for (unsigned i = 2;
    8911         8970 :                i < loop_depth (loop) - max_depth; ++i)
    8912              :             {
    8913         2092 :               basic_block header = superloop_at_depth (loop, i)->header;
    8914         2092 :               bool non_latch_backedge = false;
    8915         2092 :               edge e;
    8916         2092 :               edge_iterator ei;
    8917         6307 :               FOR_EACH_EDGE (e, ei, header->preds)
    8918         4215 :                 if (e->flags & EDGE_DFS_BACK)
    8919              :                   {
    8920              :                     /* There can be a non-latch backedge into the header
    8921              :                        which is part of an outer irreducible region.  We
    8922              :                        cannot avoid iterating this block then.  */
    8923         2123 :                     if (!dominated_by_p (CDI_DOMINATORS,
    8924         2123 :                                          e->src, e->dest))
    8925              :                       {
    8926           12 :                         if (dump_file && (dump_flags & TDF_DETAILS))
    8927            0 :                           fprintf (dump_file, "non-latch backedge %d -> %d "
    8928              :                                    "forces iteration of loop %d\n",
    8929            0 :                                    e->src->index, e->dest->index, loop->num);
    8930              :                         non_latch_backedge = true;
    8931              :                       }
    8932              :                     else
    8933         2111 :                       e->flags |= EDGE_EXECUTABLE;
    8934              :                   }
    8935         2092 :               rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
    8936      4219256 :             }
    8937              :     }
    8938              : 
    8939      6100003 :   uint64_t nblk = 0;
    8940      6100003 :   int idx = 0;
    8941      4219256 :   if (iterate)
    8942              :     /* Go and process all blocks, iterating as necessary.  */
    8943     48646569 :     do
    8944              :       {
    8945     48646569 :         basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
    8946              : 
    8947              :         /* If the block has incoming backedges remember unwind state.  This
    8948              :            is required even for non-executable blocks since in irreducible
    8949              :            regions we might reach them via the backedge and re-start iterating
    8950              :            from there.
    8951              :            Note we can individually mark blocks with incoming backedges to
    8952              :            not iterate where we then handle PHIs conservatively.  We do that
    8953              :            heuristically to reduce compile-time for degenerate cases.  */
    8954     48646569 :         if (rpo_state[idx].iterate)
    8955              :           {
    8956      4377128 :             rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
    8957      4377128 :             rpo_state[idx].ref_top = last_inserted_ref;
    8958      4377128 :             rpo_state[idx].phi_top = last_inserted_phi;
    8959      4377128 :             rpo_state[idx].nary_top = last_inserted_nary;
    8960      4377128 :             rpo_state[idx].avail_top
    8961      4377128 :               = last_pushed_avail ? last_pushed_avail->avail : NULL;
    8962              :           }
    8963              : 
    8964     48646569 :         if (!(bb->flags & BB_EXECUTABLE))
    8965              :           {
    8966       906647 :             if (dump_file && (dump_flags & TDF_DETAILS))
    8967            2 :               fprintf (dump_file, "Block %d: BB%d found not executable\n",
    8968              :                        idx, bb->index);
    8969       906647 :             idx++;
    8970      2805310 :             continue;
    8971              :           }
    8972              : 
    8973     47739922 :         if (dump_file && (dump_flags & TDF_DETAILS))
    8974          334 :           fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
    8975     47739922 :         nblk++;
    8976     95479844 :         todo |= process_bb (avail, bb,
    8977     47739922 :                             rpo_state[idx].visited != 0,
    8978              :                             rpo_state[idx].iterate,
    8979              :                             iterate, eliminate, do_region, exit_bbs, false);
    8980     47739922 :         rpo_state[idx].visited++;
    8981              : 
    8982              :         /* Verify if changed values flow over executable outgoing backedges
    8983              :            and those change destination PHI values (that's the thing we
    8984              :            can easily verify).  Reduce over all such edges to the farthest
    8985              :            away PHI.  */
    8986     47739922 :         int iterate_to = -1;
    8987     47739922 :         edge_iterator ei;
    8988     47739922 :         edge e;
    8989    114990299 :         FOR_EACH_EDGE (e, ei, bb->succs)
    8990     67250377 :           if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
    8991              :               == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
    8992      4386988 :               && rpo_state[bb_to_rpo[e->dest->index]].iterate)
    8993              :             {
    8994      4384254 :               int destidx = bb_to_rpo[e->dest->index];
    8995      4384254 :               if (!rpo_state[destidx].visited)
    8996              :                 {
    8997          135 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    8998            0 :                     fprintf (dump_file, "Unvisited destination %d\n",
    8999              :                              e->dest->index);
    9000          135 :                   if (iterate_to == -1 || destidx < iterate_to)
    9001          135 :                     iterate_to = destidx;
    9002          135 :                   continue;
    9003              :                 }
    9004      4384119 :               if (dump_file && (dump_flags & TDF_DETAILS))
    9005           53 :                 fprintf (dump_file, "Looking for changed values of backedge"
    9006              :                          " %d->%d destination PHIs\n",
    9007           53 :                          e->src->index, e->dest->index);
    9008      4384119 :               vn_context_bb = e->dest;
    9009      4384119 :               gphi_iterator gsi;
    9010      4384119 :               for (gsi = gsi_start_phis (e->dest);
    9011     10036340 :                    !gsi_end_p (gsi); gsi_next (&gsi))
    9012              :                 {
    9013      7551041 :                   bool inserted = false;
    9014              :                   /* While we'd ideally just iterate on value changes
    9015              :                      we CSE PHIs and do that even across basic-block
    9016              :                      boundaries.  So even hashtable state changes can
    9017              :                      be important (which is roughly equivalent to
    9018              :                      PHI argument value changes).  To not excessively
    9019              :                      iterate because of that we track whether a PHI
    9020              :                      was CSEd to with GF_PLF_1.  */
    9021      7551041 :                   bool phival_changed;
    9022      7551041 :                   if ((phival_changed = visit_phi (gsi.phi (),
    9023              :                                                    &inserted, false))
    9024      8927153 :                       || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
    9025              :                     {
    9026      1898820 :                       if (!phival_changed
    9027      1898820 :                           && dump_file && (dump_flags & TDF_DETAILS))
    9028            0 :                         fprintf (dump_file, "PHI was CSEd and hashtable "
    9029              :                                  "state (changed)\n");
    9030      1898820 :                       if (iterate_to == -1 || destidx < iterate_to)
    9031      1898735 :                         iterate_to = destidx;
    9032      1898820 :                       break;
    9033              :                     }
    9034              :                 }
    9035      4384119 :               vn_context_bb = NULL;
    9036              :             }
    9037     47739922 :         if (iterate_to != -1)
    9038              :           {
    9039      1898663 :             do_unwind (&rpo_state[iterate_to], avail);
    9040      1898663 :             idx = iterate_to;
    9041      1898663 :             if (dump_file && (dump_flags & TDF_DETAILS))
    9042           20 :               fprintf (dump_file, "Iterating to %d BB%d\n",
    9043           20 :                        iterate_to, rpo[iterate_to]);
    9044      1898663 :             continue;
    9045              :           }
    9046              : 
    9047     45841259 :         idx++;
    9048              :       }
    9049     48646569 :     while (idx < n);
    9050              : 
    9051              :   else /* !iterate */
    9052              :     {
    9053              :       /* Process all blocks greedily with a worklist that enforces RPO
    9054              :          processing of reachable blocks.  */
    9055      1880747 :       auto_bitmap worklist;
    9056      1880747 :       bitmap_set_bit (worklist, 0);
    9057     17153221 :       while (!bitmap_empty_p (worklist))
    9058              :         {
    9059     13391727 :           int idx = bitmap_clear_first_set_bit (worklist);
    9060     13391727 :           basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
    9061     13391727 :           gcc_assert ((bb->flags & BB_EXECUTABLE)
    9062              :                       && !rpo_state[idx].visited);
    9063              : 
    9064     13391727 :           if (dump_file && (dump_flags & TDF_DETAILS))
    9065        35027 :             fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
    9066              : 
    9067              :           /* When we run into predecessor edges where we cannot trust its
    9068              :              executable state mark them executable so PHI processing will
    9069              :              be conservative.
    9070              :              ???  Do we need to force arguments flowing over that edge
    9071              :              to be varying or will they even always be?  */
    9072     13391727 :           edge_iterator ei;
    9073     13391727 :           edge e;
    9074     32458263 :           FOR_EACH_EDGE (e, ei, bb->preds)
    9075     19066536 :             if (!(e->flags & EDGE_EXECUTABLE)
    9076      1021514 :                 && (bb == entry->dest
    9077       964834 :                     || (!rpo_state[bb_to_rpo[e->src->index]].visited
    9078       928730 :                         && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
    9079              :                             >= (int)idx))))
    9080              :               {
    9081       962122 :                 if (dump_file && (dump_flags & TDF_DETAILS))
    9082        11243 :                   fprintf (dump_file, "Cannot trust state of predecessor "
    9083              :                            "edge %d -> %d, marking executable\n",
    9084        11243 :                            e->src->index, e->dest->index);
    9085       962122 :                 e->flags |= EDGE_EXECUTABLE;
    9086              :               }
    9087              : 
    9088     13391727 :           nblk++;
    9089     13391727 :           todo |= process_bb (avail, bb, false, false, false, eliminate,
    9090              :                               do_region, exit_bbs,
    9091     13391727 :                               skip_entry_phis && bb == entry->dest);
    9092     13391727 :           rpo_state[idx].visited++;
    9093              : 
    9094     33090676 :           FOR_EACH_EDGE (e, ei, bb->succs)
    9095     19698949 :             if ((e->flags & EDGE_EXECUTABLE)
    9096     19622580 :                 && e->dest->index != EXIT_BLOCK
    9097     18457236 :                 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
    9098     36820024 :                 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
    9099     16164275 :               bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
    9100              :         }
    9101      1880747 :     }
    9102              : 
    9103              :   /* If statistics or dump file active.  */
    9104      6100003 :   int nex = 0;
    9105      6100003 :   unsigned max_visited = 1;
    9106     56306020 :   for (int i = 0; i < n; ++i)
    9107              :     {
    9108     50206017 :       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
    9109     50206017 :       if (bb->flags & BB_EXECUTABLE)
    9110     49633501 :         nex++;
    9111     50206017 :       statistics_histogram_event (cfun, "RPO block visited times",
    9112     50206017 :                                   rpo_state[i].visited);
    9113     50206017 :       if (rpo_state[i].visited > max_visited)
    9114              :         max_visited = rpo_state[i].visited;
    9115              :     }
    9116      6100003 :   unsigned nvalues = 0, navail = 0;
    9117    168051773 :   for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
    9118    330003543 :        i != vn_ssa_aux_hash->end (); ++i)
    9119              :     {
    9120    161951770 :       nvalues++;
    9121    161951770 :       vn_avail *av = (*i)->avail;
    9122    239814926 :       while (av)
    9123              :         {
    9124     77863156 :           navail++;
    9125     77863156 :           av = av->next;
    9126              :         }
    9127              :     }
    9128      6100003 :   statistics_counter_event (cfun, "RPO blocks", n);
    9129      6100003 :   statistics_counter_event (cfun, "RPO blocks visited", nblk);
    9130      6100003 :   statistics_counter_event (cfun, "RPO blocks executable", nex);
    9131      6100003 :   statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
    9132      6100003 :   statistics_histogram_event (cfun, "RPO num values", nvalues);
    9133      6100003 :   statistics_histogram_event (cfun, "RPO num avail", navail);
    9134      6100003 :   statistics_histogram_event (cfun, "RPO num lattice",
    9135      6100003 :                               vn_ssa_aux_hash->elements ());
    9136      6100003 :   if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
    9137              :     {
    9138        11150 :       fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
    9139              :                " blocks in total discovering %d executable blocks iterating "
    9140              :                "%d.%d times, a block was visited max. %u times\n",
    9141              :                n, nblk, nex,
    9142        11150 :                (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
    9143              :                max_visited);
    9144        11150 :       fprintf (dump_file, "RPO tracked %d values available at %d locations "
    9145              :                "and %" PRIu64 " lattice elements\n",
    9146        11150 :                nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
    9147              :     }
    9148              : 
    9149      6100003 :   if (eliminate)
    9150              :     {
    9151              :       /* When !iterate we already performed elimination during the RPO
    9152              :          walk.  */
    9153      5119995 :       if (iterate)
    9154              :         {
    9155              :           /* Elimination for region-based VN needs to be done within the
    9156              :              RPO walk.  */
    9157      3258667 :           gcc_assert (! do_region);
    9158              :           /* Note we can't use avail.walk here because that gets confused
    9159              :              by the existing availability and it will be less efficient
    9160              :              as well.  */
    9161      3258667 :           todo |= eliminate_with_rpo_vn (NULL);
    9162              :         }
    9163              :       else
    9164      1861328 :         todo |= avail.eliminate_cleanup (do_region);
    9165              :     }
    9166              : 
    9167      6100003 :   vn_valueize = NULL;
    9168      6100003 :   rpo_avail = NULL;
    9169      6100003 :   vn_bb_to_rpo = NULL;
    9170              : 
    9171      6100003 :   XDELETEVEC (bb_to_rpo);
    9172      6100003 :   XDELETEVEC (rpo);
    9173      6100003 :   XDELETEVEC (rpo_state);
    9174              : 
    9175      6100003 :   return todo;
    9176      6100003 : }
    9177              : 
    9178              : /* Region-based entry for RPO VN.  Performs value-numbering and elimination
    9179              :    on the SEME region specified by ENTRY and EXIT_BBS.  If ENTRY is not
    9180              :    the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
    9181              :    are not considered.
    9182              :    If ITERATE is true then treat backedges optimistically as not
    9183              :    executed and iterate.  If ELIMINATE is true then perform
    9184              :    elimination, otherwise leave that to the caller.
    9185              :    If SKIP_ENTRY_PHIS is true then force PHI nodes in ENTRY->dest to VARYING.
    9186              :    KIND specifies the amount of work done for handling memory operations.  */
    9187              : 
    9188              : unsigned
    9189       705231 : do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
    9190              :            bool iterate, bool eliminate, bool skip_entry_phis,
    9191              :            vn_lookup_kind kind)
    9192              : {
    9193       705231 :   auto_timevar tv (TV_TREE_RPO_VN);
    9194       705231 :   unsigned todo = do_rpo_vn_1 (fn, entry, exit_bbs, iterate, eliminate,
    9195              :                                skip_entry_phis, kind);
    9196       705231 :   free_rpo_vn ();
    9197      1410462 :   return todo;
    9198       705231 : }
    9199              : 
    9200              : 
    9201              : namespace {
    9202              : 
    9203              : const pass_data pass_data_fre =
    9204              : {
    9205              :   GIMPLE_PASS, /* type */
    9206              :   "fre", /* name */
    9207              :   OPTGROUP_NONE, /* optinfo_flags */
    9208              :   TV_TREE_FRE, /* tv_id */
    9209              :   ( PROP_cfg | PROP_ssa ), /* properties_required */
    9210              :   0, /* properties_provided */
    9211              :   0, /* properties_destroyed */
    9212              :   0, /* todo_flags_start */
    9213              :   0, /* todo_flags_finish */
    9214              : };
    9215              : 
    9216              : class pass_fre : public gimple_opt_pass
    9217              : {
    9218              : public:
    9219      1439360 :   pass_fre (gcc::context *ctxt)
    9220      2878720 :     : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
    9221              :   {}
    9222              : 
    9223              :   /* opt_pass methods: */
    9224      1151488 :   opt_pass * clone () final override { return new pass_fre (m_ctxt); }
    9225      1439360 :   void set_pass_param (unsigned int n, bool param) final override
    9226              :     {
    9227      1439360 :       gcc_assert (n == 0);
    9228      1439360 :       may_iterate = param;
    9229      1439360 :     }
    9230      4513047 :   bool gate (function *) final override
    9231              :     {
    9232      4513047 :       return flag_tree_fre != 0 && (may_iterate || optimize > 1);
    9233              :     }
    9234              :   unsigned int execute (function *) final override;
    9235              : 
    9236              : private:
    9237              :   bool may_iterate;
    9238              : }; // class pass_fre
    9239              : 
    9240              : unsigned int
    9241      4434183 : pass_fre::execute (function *fun)
    9242              : {
    9243      4434183 :   unsigned todo = 0;
    9244              : 
    9245              :   /* At -O[1g] use the cheap non-iterating mode.  */
    9246      4434183 :   bool iterate_p = may_iterate && (optimize > 1);
    9247      4434183 :   calculate_dominance_info (CDI_DOMINATORS);
    9248      4434183 :   if (iterate_p)
    9249      3258667 :     loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
    9250              : 
    9251      4434183 :   todo = do_rpo_vn_1 (fun, NULL, NULL, iterate_p, true, false, VN_WALKREWRITE);
    9252      4434183 :   free_rpo_vn ();
    9253              : 
    9254      4434183 :   if (iterate_p)
    9255      3258667 :     loop_optimizer_finalize ();
    9256              : 
    9257      4434183 :   if (scev_initialized_p ())
    9258        31831 :     scev_reset_htab ();
    9259              : 
    9260              :   /* For late FRE after IVOPTs and unrolling, see if we can
    9261              :      remove some TREE_ADDRESSABLE and rewrite stuff into SSA.  */
    9262      4434183 :   if (!may_iterate)
    9263       992194 :     todo |= TODO_update_address_taken;
    9264              : 
    9265      4434183 :   return todo;
    9266              : }
    9267              : 
    9268              : } // anon namespace
    9269              : 
    9270              : gimple_opt_pass *
    9271       287872 : make_pass_fre (gcc::context *ctxt)
    9272              : {
    9273       287872 :   return new pass_fre (ctxt);
    9274              : }
    9275              : 
    9276              : #undef BB_EXECUTABLE
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.