LCOV - code coverage report
Current view: top level - gcc - tree-ssa-sccvn.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 95.7 % 4606 4409
Test Date: 2026-02-28 14:20:25 Functions: 98.4 % 124 122
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* SCC value numbering for trees
       2              :    Copyright (C) 2006-2026 Free Software Foundation, Inc.
       3              :    Contributed by Daniel Berlin <dan@dberlin.org>
       4              : 
       5              : This file is part of GCC.
       6              : 
       7              : GCC is free software; you can redistribute it and/or modify
       8              : it under the terms of the GNU General Public License as published by
       9              : the Free Software Foundation; either version 3, or (at your option)
      10              : any later version.
      11              : 
      12              : GCC is distributed in the hope that it will be useful,
      13              : but WITHOUT ANY WARRANTY; without even the implied warranty of
      14              : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
      15              : GNU General Public License for more details.
      16              : 
      17              : You should have received a copy of the GNU General Public License
      18              : along with GCC; see the file COPYING3.  If not see
      19              : <http://www.gnu.org/licenses/>.  */
      20              : 
      21              : #include "config.h"
      22              : #include "system.h"
      23              : #include "coretypes.h"
      24              : #include "backend.h"
      25              : #include "rtl.h"
      26              : #include "tree.h"
      27              : #include "gimple.h"
      28              : #include "ssa.h"
      29              : #include "expmed.h"
      30              : #include "insn-config.h"
      31              : #include "memmodel.h"
      32              : #include "emit-rtl.h"
      33              : #include "cgraph.h"
      34              : #include "gimple-pretty-print.h"
      35              : #include "splay-tree-utils.h"
      36              : #include "alias.h"
      37              : #include "fold-const.h"
      38              : #include "stor-layout.h"
      39              : #include "cfganal.h"
      40              : #include "tree-inline.h"
      41              : #include "internal-fn.h"
      42              : #include "gimple-iterator.h"
      43              : #include "gimple-fold.h"
      44              : #include "tree-eh.h"
      45              : #include "gimplify.h"
      46              : #include "flags.h"
      47              : #include "dojump.h"
      48              : #include "explow.h"
      49              : #include "calls.h"
      50              : #include "varasm.h"
      51              : #include "stmt.h"
      52              : #include "expr.h"
      53              : #include "tree-dfa.h"
      54              : #include "tree-ssa.h"
      55              : #include "dumpfile.h"
      56              : #include "cfgloop.h"
      57              : #include "tree-ssa-propagate.h"
      58              : #include "tree-cfg.h"
      59              : #include "domwalk.h"
      60              : #include "gimple-match.h"
      61              : #include "stringpool.h"
      62              : #include "attribs.h"
      63              : #include "tree-pass.h"
      64              : #include "statistics.h"
      65              : #include "langhooks.h"
      66              : #include "ipa-utils.h"
      67              : #include "dbgcnt.h"
      68              : #include "tree-cfgcleanup.h"
      69              : #include "tree-ssa-loop.h"
      70              : #include "tree-scalar-evolution.h"
      71              : #include "tree-ssa-loop-niter.h"
      72              : #include "builtins.h"
      73              : #include "fold-const-call.h"
      74              : #include "ipa-modref-tree.h"
      75              : #include "ipa-modref.h"
      76              : #include "tree-ssa-sccvn.h"
      77              : #include "alloc-pool.h"
      78              : #include "symbol-summary.h"
      79              : #include "sreal.h"
      80              : #include "ipa-cp.h"
      81              : #include "ipa-prop.h"
      82              : #include "target.h"
      83              : 
      84              : /* This algorithm is based on the SCC algorithm presented by Keith
      85              :    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
      86              :    (http://citeseer.ist.psu.edu/41805.html).  In
      87              :    straight line code, it is equivalent to a regular hash based value
      88              :    numbering that is performed in reverse postorder.
      89              : 
      90              :    For code with cycles, there are two alternatives, both of which
      91              :    require keeping the hashtables separate from the actual list of
      92              :    value numbers for SSA names.
      93              : 
      94              :    1. Iterate value numbering in an RPO walk of the blocks, removing
      95              :    all the entries from the hashtable after each iteration (but
      96              :    keeping the SSA name->value number mapping between iterations).
      97              :    Iterate until it does not change.
      98              : 
      99              :    2. Perform value numbering as part of an SCC walk on the SSA graph,
     100              :    iterating only the cycles in the SSA graph until they do not change
     101              :    (using a separate, optimistic hashtable for value numbering the SCC
     102              :    operands).
     103              : 
     104              :    The second is not just faster in practice (because most SSA graph
     105              :    cycles do not involve all the variables in the graph), it also has
     106              :    some nice properties.
     107              : 
     108              :    One of these nice properties is that when we pop an SCC off the
     109              :    stack, we are guaranteed to have processed all the operands coming from
     110              :    *outside of that SCC*, so we do not need to do anything special to
     111              :    ensure they have value numbers.
     112              : 
     113              :    Another nice property is that the SCC walk is done as part of a DFS
     114              :    of the SSA graph, which makes it easy to perform combining and
     115              :    simplifying operations at the same time.
     116              : 
     117              :    The code below is deliberately written in a way that makes it easy
     118              :    to separate the SCC walk from the other work it does.
     119              : 
     120              :    In order to propagate constants through the code, we track which
     121              :    expressions contain constants, and use those while folding.  In
     122              :    theory, we could also track expressions whose value numbers are
     123              :    replaced, in case we end up folding based on expression
     124              :    identities.
     125              : 
     126              :    In order to value number memory, we assign value numbers to vuses.
     127              :    This enables us to note that, for example, stores to the same
     128              :    address of the same value from the same starting memory states are
     129              :    equivalent.
     130              :    TODO:
     131              : 
     132              :    1. We can iterate only the changing portions of the SCC's, but
     133              :    I have not seen an SCC big enough for this to be a win.
     134              :    2. If you differentiate between phi nodes for loops and phi nodes
     135              :    for if-then-else, you can properly consider phi nodes in different
     136              :    blocks for equivalence.
     137              :    3. We could value number vuses in more cases, particularly, whole
     138              :    structure copies.
     139              : */
     140              : 
     141              : /* There's no BB_EXECUTABLE but we can use BB_VISITED.  */
     142              : #define BB_EXECUTABLE BB_VISITED
     143              : 
     144              : static vn_lookup_kind default_vn_walk_kind;
     145              : 
     146              : /* vn_nary_op hashtable helpers.  */
     147              : 
     148              : struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
     149              : {
     150              :   typedef vn_nary_op_s *compare_type;
     151              :   static inline hashval_t hash (const vn_nary_op_s *);
     152              :   static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
     153              : };
     154              : 
     155              : /* Return the computed hashcode for nary operation P1.  */
     156              : 
     157              : inline hashval_t
     158    766021782 : vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
     159              : {
     160    766021782 :   return vno1->hashcode;
     161              : }
     162              : 
     163              : /* Compare nary operations P1 and P2 and return true if they are
     164              :    equivalent.  */
     165              : 
     166              : inline bool
     167    970404614 : vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
     168              : {
     169    970404614 :   return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
     170              : }
     171              : 
     172              : typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
     173              : typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
     174              : 
     175              : 
     176              : /* vn_phi hashtable helpers.  */
     177              : 
     178              : static int
     179              : vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
     180              : 
     181              : struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
     182              : {
     183              :   static inline hashval_t hash (const vn_phi_s *);
     184              :   static inline bool equal (const vn_phi_s *, const vn_phi_s *);
     185              : };
     186              : 
     187              : /* Return the computed hashcode for phi operation P1.  */
     188              : 
     189              : inline hashval_t
     190     25690159 : vn_phi_hasher::hash (const vn_phi_s *vp1)
     191              : {
     192     25690159 :   return vp1->hashcode;
     193              : }
     194              : 
     195              : /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
     196              : 
     197              : inline bool
     198     46504719 : vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
     199              : {
     200     46504719 :   return vp1 == vp2 || vn_phi_eq (vp1, vp2);
     201              : }
     202              : 
     203              : typedef hash_table<vn_phi_hasher> vn_phi_table_type;
     204              : typedef vn_phi_table_type::iterator vn_phi_iterator_type;
     205              : 
     206              : 
     207              : /* Compare two reference operands P1 and P2 for equality.  Return true if
     208              :    they are equal, and false otherwise.  */
     209              : 
     210              : static int
     211     23219551 : vn_reference_op_eq (const void *p1, const void *p2)
     212              : {
     213     23219551 :   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
     214     23219551 :   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
     215              : 
     216     23219551 :   return (vro1->opcode == vro2->opcode
     217              :           /* We do not care for differences in type qualification.  */
     218     23217623 :           && (vro1->type == vro2->type
     219      1121217 :               || (vro1->type && vro2->type
     220      1121217 :                   && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
     221      1121217 :                                          TYPE_MAIN_VARIANT (vro2->type))))
     222     22241238 :           && expressions_equal_p (vro1->op0, vro2->op0)
     223     22210333 :           && expressions_equal_p (vro1->op1, vro2->op1)
     224     22210333 :           && expressions_equal_p (vro1->op2, vro2->op2)
     225     45429884 :           && (vro1->opcode != CALL_EXPR || vro1->clique == vro2->clique));
     226              : }
     227              : 
     228              : /* Free a reference operation structure VP.  */
     229              : 
     230              : static inline void
     231            0 : free_reference (vn_reference_s *vr)
     232              : {
     233            0 :   vr->operands.release ();
     234              : }
     235              : 
     236              : 
     237              : /* vn_reference hashtable helpers.  */
     238              : 
     239              : struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
     240              : {
     241              :   static inline hashval_t hash (const vn_reference_s *);
     242              :   static inline bool equal (const vn_reference_s *, const vn_reference_s *);
     243              : };
     244              : 
     245              : /* Return the hashcode for a given reference operation P1.  */
     246              : 
     247              : inline hashval_t
     248   3673973123 : vn_reference_hasher::hash (const vn_reference_s *vr1)
     249              : {
     250   3673973123 :   return vr1->hashcode;
     251              : }
     252              : 
     253              : inline bool
     254   4386639918 : vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
     255              : {
     256   4386639918 :   return v == c || vn_reference_eq (v, c);
     257              : }
     258              : 
     259              : typedef hash_table<vn_reference_hasher> vn_reference_table_type;
     260              : typedef vn_reference_table_type::iterator vn_reference_iterator_type;
     261              : 
     262              : /* Pretty-print OPS to OUTFILE.  */
     263              : 
     264              : void
     265          317 : print_vn_reference_ops (FILE *outfile, const vec<vn_reference_op_s> ops)
     266              : {
     267          317 :   vn_reference_op_t vro;
     268          317 :   unsigned int i;
     269          317 :   fprintf (outfile, "{");
     270         1394 :   for (i = 0; ops.iterate (i, &vro); i++)
     271              :     {
     272         1077 :       bool closebrace = false;
     273         1077 :       if (vro->opcode != SSA_NAME
     274          863 :           && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
     275              :         {
     276          863 :           fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
     277          863 :           if (vro->op0 || vro->opcode == CALL_EXPR)
     278              :             {
     279          863 :               fprintf (outfile, "<");
     280          863 :               closebrace = true;
     281              :             }
     282              :         }
     283         1077 :       if (vro->op0 || vro->opcode == CALL_EXPR)
     284              :         {
     285         1077 :           if (!vro->op0)
     286            0 :             fprintf (outfile, internal_fn_name ((internal_fn)vro->clique));
     287              :           else
     288         1077 :             print_generic_expr (outfile, vro->op0);
     289         1077 :           if (vro->op1)
     290              :             {
     291          185 :               fprintf (outfile, ",");
     292          185 :               print_generic_expr (outfile, vro->op1);
     293              :             }
     294         1077 :           if (vro->op2)
     295              :             {
     296          185 :               fprintf (outfile, ",");
     297          185 :               print_generic_expr (outfile, vro->op2);
     298              :             }
     299              :         }
     300         1077 :       if (closebrace)
     301          863 :         fprintf (outfile, ">");
     302         1077 :       if (i != ops.length () - 1)
     303          760 :         fprintf (outfile, ",");
     304              :     }
     305          317 :   fprintf (outfile, "}");
     306          317 : }
     307              : 
     308              : DEBUG_FUNCTION void
     309            0 : debug_vn_reference_ops (const vec<vn_reference_op_s> ops)
     310              : {
     311            0 :   print_vn_reference_ops (stderr, ops);
     312            0 :   fputc ('\n', stderr);
     313            0 : }
     314              : 
     315              : /* The set of VN hashtables.  */
     316              : 
     317              : typedef struct vn_tables_s
     318              : {
     319              :   vn_nary_op_table_type *nary;
     320              :   vn_phi_table_type *phis;
     321              :   vn_reference_table_type *references;
     322              : } *vn_tables_t;
     323              : 
     324              : 
     325              : /* vn_constant hashtable helpers.  */
     326              : 
     327              : struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
     328              : {
     329              :   static inline hashval_t hash (const vn_constant_s *);
     330              :   static inline bool equal (const vn_constant_s *, const vn_constant_s *);
     331              : };
     332              : 
     333              : /* Hash table hash function for vn_constant_t.  */
     334              : 
     335              : inline hashval_t
     336     12061027 : vn_constant_hasher::hash (const vn_constant_s *vc1)
     337              : {
     338     12061027 :   return vc1->hashcode;
     339              : }
     340              : 
     341              : /* Hash table equality function for vn_constant_t.  */
     342              : 
     343              : inline bool
     344     14561827 : vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
     345              : {
     346     14561827 :   if (vc1->hashcode != vc2->hashcode)
     347              :     return false;
     348              : 
     349      2192826 :   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
     350              : }
     351              : 
     352              : static hash_table<vn_constant_hasher> *constant_to_value_id;
     353              : 
     354              : 
     355              : /* Obstack we allocate the vn-tables elements from.  */
     356              : static obstack vn_tables_obstack;
     357              : /* Special obstack we never unwind.  */
     358              : static obstack vn_tables_insert_obstack;
     359              : 
     360              : static vn_reference_t last_inserted_ref;
     361              : static vn_phi_t last_inserted_phi;
     362              : static vn_nary_op_t last_inserted_nary;
     363              : static vn_ssa_aux_t last_pushed_avail;
     364              : 
     365              : /* Valid hashtables storing information we have proven to be
     366              :    correct.  */
     367              : static vn_tables_t valid_info;
     368              : 
     369              : /* Global RPO state for access from hooks.  */
     370              : static class eliminate_dom_walker *rpo_avail;
     371              : basic_block vn_context_bb;
     372              : int *vn_bb_to_rpo;
     373              : 
     374              : 
     375              : /* Valueization hook for simplify_replace_tree.  Valueize NAME if it is
     376              :    an SSA name, otherwise just return it.  */
     377              : tree (*vn_valueize) (tree);
     378              : static tree
     379        84166 : vn_valueize_for_srt (tree t, void* context ATTRIBUTE_UNUSED)
     380              : {
     381        84166 :   basic_block saved_vn_context_bb = vn_context_bb;
     382              :   /* Look for sth available at the definition block of the argument.
     383              :      This avoids inconsistencies between availability there which
     384              :      decides if the stmt can be removed and availability at the
     385              :      use site.  The SSA property ensures that things available
     386              :      at the definition are also available at uses.  */
     387        84166 :   if (!SSA_NAME_IS_DEFAULT_DEF (t))
     388        80271 :     vn_context_bb = gimple_bb (SSA_NAME_DEF_STMT (t));
     389        84166 :   tree res = vn_valueize (t);
     390        84166 :   vn_context_bb = saved_vn_context_bb;
     391        84166 :   return res;
     392              : }
     393              : 
     394              : 
     395              : /* This represents the top of the VN lattice, which is the universal
     396              :    value.  */
     397              : 
     398              : tree VN_TOP;
     399              : 
     400              : /* Unique counter for our value ids.  */
     401              : 
     402              : static unsigned int next_value_id;
     403              : static int next_constant_value_id;
     404              : 
     405              : 
     406              : /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
     407              :    are allocated on an obstack for locality reasons, and to free them
     408              :    without looping over the vec.  */
     409              : 
     410              : struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
     411              : {
     412              :   typedef vn_ssa_aux_t value_type;
     413              :   typedef tree compare_type;
     414              :   static inline hashval_t hash (const value_type &);
     415              :   static inline bool equal (const value_type &, const compare_type &);
     416              :   static inline void mark_deleted (value_type &) {}
     417              :   static const bool empty_zero_p = true;
     418            0 :   static inline void mark_empty (value_type &e) { e = NULL; }
     419              :   static inline bool is_deleted (value_type &) { return false; }
     420  >13116*10^7 :   static inline bool is_empty (value_type &e) { return e == NULL; }
     421              : };
     422              : 
     423              : hashval_t
     424  43199986490 : vn_ssa_aux_hasher::hash (const value_type &entry)
     425              : {
     426  43199986490 :   return SSA_NAME_VERSION (entry->name);
     427              : }
     428              : 
     429              : bool
     430  49410820494 : vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
     431              : {
     432  49410820494 :   return name == entry->name;
     433              : }
     434              : 
     435              : static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
     436              : typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
     437              : static struct obstack vn_ssa_aux_obstack;
     438              : 
     439              : static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
     440              : static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
     441              :                                             vn_nary_op_table_type *);
     442              : static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
     443              :                                          enum tree_code, tree, tree *);
     444              : static tree vn_lookup_simplify_result (gimple_match_op *);
     445              : static vn_reference_t vn_reference_lookup_or_insert_for_pieces
     446              :           (tree, alias_set_type, alias_set_type, poly_int64, poly_int64, tree,
     447              :            vec<vn_reference_op_s, va_heap>, tree);
     448              : 
     449              : /* Return whether there is value numbering information for a given SSA name.  */
     450              : 
     451              : bool
     452      5166161 : has_VN_INFO (tree name)
     453              : {
     454      5166161 :   return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
     455              : }
     456              : 
     457              : vn_ssa_aux_t
     458   3721813394 : VN_INFO (tree name)
     459              : {
     460   3721813394 :   vn_ssa_aux_t *res
     461   3721813394 :     = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
     462              :                                             INSERT);
     463   3721813394 :   if (*res != NULL)
     464              :     return *res;
     465              : 
     466    170309914 :   vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
     467    170309914 :   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
     468    170309914 :   newinfo->name = name;
     469    170309914 :   newinfo->valnum = VN_TOP;
     470              :   /* We are using the visited flag to handle uses with defs not within the
     471              :      region being value-numbered.  */
     472    170309914 :   newinfo->visited = false;
     473              : 
     474              :   /* Given we create the VN_INFOs on-demand now we have to do initialization
     475              :      different than VN_TOP here.  */
     476    170309914 :   if (SSA_NAME_IS_DEFAULT_DEF (name))
     477      9211730 :     switch (TREE_CODE (SSA_NAME_VAR (name)))
     478              :       {
     479      1651607 :       case VAR_DECL:
     480              :         /* All undefined vars are VARYING.  */
     481      1651607 :         newinfo->valnum = name;
     482      1651607 :         newinfo->visited = true;
     483      1651607 :         break;
     484              : 
     485      7501917 :       case PARM_DECL:
     486              :         /* Parameters are VARYING but we can record a condition
     487              :            if we know it is a non-NULL pointer.  */
     488      7501917 :         newinfo->visited = true;
     489      7501917 :         newinfo->valnum = name;
     490     11509250 :         if (POINTER_TYPE_P (TREE_TYPE (name))
     491      8588126 :             && nonnull_arg_p (SSA_NAME_VAR (name)))
     492              :           {
     493      2272901 :             tree ops[2];
     494      2272901 :             ops[0] = name;
     495      2272901 :             ops[1] = build_int_cst (TREE_TYPE (name), 0);
     496      2272901 :             vn_nary_op_t nary;
     497              :             /* Allocate from non-unwinding stack.  */
     498      2272901 :             nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
     499      2272901 :             init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
     500              :                                          boolean_type_node, ops);
     501      2272901 :             nary->predicated_values = 0;
     502      2272901 :             nary->u.result = boolean_true_node;
     503      2272901 :             vn_nary_op_insert_into (nary, valid_info->nary);
     504      2272901 :             gcc_assert (nary->unwind_to == NULL);
     505              :             /* Also do not link it into the undo chain.  */
     506      2272901 :             last_inserted_nary = nary->next;
     507      2272901 :             nary->next = (vn_nary_op_t)(void *)-1;
     508      2272901 :             nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
     509      2272901 :             init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
     510              :                                          boolean_type_node, ops);
     511      2272901 :             nary->predicated_values = 0;
     512      2272901 :             nary->u.result = boolean_false_node;
     513      2272901 :             vn_nary_op_insert_into (nary, valid_info->nary);
     514      2272901 :             gcc_assert (nary->unwind_to == NULL);
     515      2272901 :             last_inserted_nary = nary->next;
     516      2272901 :             nary->next = (vn_nary_op_t)(void *)-1;
     517      2272901 :             if (dump_file && (dump_flags & TDF_DETAILS))
     518              :               {
     519           38 :                 fprintf (dump_file, "Recording ");
     520           38 :                 print_generic_expr (dump_file, name, TDF_SLIM);
     521           38 :                 fprintf (dump_file, " != 0\n");
     522              :               }
     523              :           }
     524              :         break;
     525              : 
     526        58206 :       case RESULT_DECL:
     527              :         /* If the result is passed by invisible reference the default
     528              :            def is initialized, otherwise it's uninitialized.  Still
     529              :            undefined is varying.  */
     530        58206 :         newinfo->visited = true;
     531        58206 :         newinfo->valnum = name;
     532        58206 :         break;
     533              : 
     534            0 :       default:
     535            0 :         gcc_unreachable ();
     536              :       }
     537              :   return newinfo;
     538              : }
     539              : 
     540              : /* Return the SSA value of X.  */
     541              : 
     542              : inline tree
     543   3380902984 : SSA_VAL (tree x, bool *visited = NULL)
     544              : {
     545   3380902984 :   vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
     546   3380902984 :   if (visited)
     547   1367055872 :     *visited = tem && tem->visited;
     548   3380902984 :   return tem && tem->visited ? tem->valnum : x;
     549              : }
     550              : 
     551              : /* Return the SSA value of the VUSE x, supporting released VDEFs
     552              :    during elimination which will value-number the VDEF to the
     553              :    associated VUSE (but not substitute in the whole lattice).  */
     554              : 
     555              : static inline tree
     556   1246500399 : vuse_ssa_val (tree x)
     557              : {
     558   1246500399 :   if (!x)
     559              :     return NULL_TREE;
     560              : 
     561   1243141797 :   do
     562              :     {
     563   1243141797 :       x = SSA_VAL (x);
     564   1243141797 :       gcc_assert (x != VN_TOP);
     565              :     }
     566   1243141797 :   while (SSA_NAME_IN_FREE_LIST (x));
     567              : 
     568              :   return x;
     569              : }
     570              : 
     571              : /* Similar to the above but used as callback for walk_non_aliased_vuses
     572              :    and thus should stop at unvisited VUSE to not walk across region
     573              :    boundaries.  */
     574              : 
     575              : static tree
     576   1050347465 : vuse_valueize (tree vuse)
     577              : {
     578   1050347465 :   do
     579              :     {
     580   1050347465 :       bool visited;
     581   1050347465 :       vuse = SSA_VAL (vuse, &visited);
     582   1050347465 :       if (!visited)
     583     15696661 :         return NULL_TREE;
     584   1034650804 :       gcc_assert (vuse != VN_TOP);
     585              :     }
     586   1034650804 :   while (SSA_NAME_IN_FREE_LIST (vuse));
     587              :   return vuse;
     588              : }
     589              : 
     590              : 
     591              : /* Return the vn_kind the expression computed by the stmt should be
     592              :    associated with.  */
     593              : 
     594              : enum vn_kind
     595    101537021 : vn_get_stmt_kind (gimple *stmt)
     596              : {
     597    101537021 :   switch (gimple_code (stmt))
     598              :     {
     599              :     case GIMPLE_CALL:
     600              :       return VN_REFERENCE;
     601              :     case GIMPLE_PHI:
     602              :       return VN_PHI;
     603    101537021 :     case GIMPLE_ASSIGN:
     604    101537021 :       {
     605    101537021 :         enum tree_code code = gimple_assign_rhs_code (stmt);
     606    101537021 :         tree rhs1 = gimple_assign_rhs1 (stmt);
     607    101537021 :         switch (get_gimple_rhs_class (code))
     608              :           {
     609              :           case GIMPLE_UNARY_RHS:
     610              :           case GIMPLE_BINARY_RHS:
     611              :           case GIMPLE_TERNARY_RHS:
     612              :             return VN_NARY;
     613     47461455 :           case GIMPLE_SINGLE_RHS:
     614     47461455 :             switch (TREE_CODE_CLASS (code))
     615              :               {
     616     35756749 :               case tcc_reference:
     617              :                 /* VOP-less references can go through unary case.  */
     618     35756749 :                 if ((code == REALPART_EXPR
     619              :                      || code == IMAGPART_EXPR
     620     35756749 :                      || code == VIEW_CONVERT_EXPR
     621     35756749 :                      || code == BIT_FIELD_REF)
     622     35756749 :                     && (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME
     623       663924 :                         || is_gimple_min_invariant (TREE_OPERAND (rhs1, 0))))
     624      1959240 :                   return VN_NARY;
     625              : 
     626              :                 /* Fallthrough.  */
     627              :               case tcc_declaration:
     628              :                 return VN_REFERENCE;
     629              : 
     630              :               case tcc_constant:
     631              :                 return VN_CONSTANT;
     632              : 
     633      5838712 :               default:
     634      5838712 :                 if (code == ADDR_EXPR)
     635      3184020 :                   return (is_gimple_min_invariant (rhs1)
     636      3184020 :                           ? VN_CONSTANT : VN_REFERENCE);
     637      2654692 :                 else if (code == CONSTRUCTOR)
     638              :                   return VN_NARY;
     639              :                 return VN_NONE;
     640              :               }
     641              :           default:
     642              :             return VN_NONE;
     643              :           }
     644              :       }
     645              :     default:
     646              :       return VN_NONE;
     647              :     }
     648              : }
     649              : 
     650              : /* Lookup a value id for CONSTANT and return it.  If it does not
     651              :    exist returns 0.  */
     652              : 
     653              : unsigned int
     654            0 : get_constant_value_id (tree constant)
     655              : {
     656            0 :   vn_constant_s **slot;
     657            0 :   struct vn_constant_s vc;
     658              : 
     659            0 :   vc.hashcode = vn_hash_constant_with_type (constant);
     660            0 :   vc.constant = constant;
     661            0 :   slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
     662            0 :   if (slot)
     663            0 :     return (*slot)->value_id;
     664              :   return 0;
     665              : }
     666              : 
     667              : /* Lookup a value id for CONSTANT, and if it does not exist, create a
     668              :    new one and return it.  If it does exist, return it.  */
     669              : 
     670              : unsigned int
     671     28345718 : get_or_alloc_constant_value_id (tree constant)
     672              : {
     673     28345718 :   vn_constant_s **slot;
     674     28345718 :   struct vn_constant_s vc;
     675     28345718 :   vn_constant_t vcp;
     676              : 
     677              :   /* If the hashtable isn't initialized we're not running from PRE and thus
     678              :      do not need value-ids.  */
     679     28345718 :   if (!constant_to_value_id)
     680              :     return 0;
     681              : 
     682      4688268 :   vc.hashcode = vn_hash_constant_with_type (constant);
     683      4688268 :   vc.constant = constant;
     684      4688268 :   slot = constant_to_value_id->find_slot (&vc, INSERT);
     685      4688268 :   if (*slot)
     686      2174961 :     return (*slot)->value_id;
     687              : 
     688      2513307 :   vcp = XNEW (struct vn_constant_s);
     689      2513307 :   vcp->hashcode = vc.hashcode;
     690      2513307 :   vcp->constant = constant;
     691      2513307 :   vcp->value_id = get_next_constant_value_id ();
     692      2513307 :   *slot = vcp;
     693      2513307 :   return vcp->value_id;
     694              : }
     695              : 
     696              : /* Compute the hash for a reference operand VRO1.  */
     697              : 
     698              : static void
     699    131888362 : vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
     700              : {
     701    131888362 :   hstate.add_int (vro1->opcode);
     702    131888362 :   if (vro1->opcode == CALL_EXPR && !vro1->op0)
     703       537451 :     hstate.add_int (vro1->clique);
     704    131888362 :   if (vro1->op0)
     705    125677051 :     inchash::add_expr (vro1->op0, hstate);
     706    131888362 :   if (vro1->op1)
     707     11468570 :     inchash::add_expr (vro1->op1, hstate);
     708    131888362 :   if (vro1->op2)
     709     13170380 :     inchash::add_expr (vro1->op2, hstate);
     710    131888362 : }
     711              : 
     712              : /* Compute a hash for the reference operation VR1 and return it.  */
     713              : 
     714              : static hashval_t
     715    196120984 : vn_reference_compute_hash (const vn_reference_t vr1)
     716              : {
     717    196120984 :   inchash::hash hstate;
     718    196120984 :   hashval_t result;
     719    196120984 :   int i;
     720    196120984 :   vn_reference_op_t vro;
     721    196120984 :   poly_offset_int off = -1;
     722    196120984 :   bool deref = false;
     723              : 
     724    798575084 :   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
     725              :     {
     726    602454100 :       if (vro->opcode == MEM_REF)
     727              :         deref = true;
     728    416872293 :       else if (vro->opcode != ADDR_EXPR)
     729    292015680 :         deref = false;
     730    602454100 :       if (maybe_ne (vro->off, -1))
     731              :         {
     732    353652182 :           if (known_eq (off, -1))
     733    187861470 :             off = 0;
     734    602454100 :           off += vro->off;
     735              :         }
     736              :       else
     737              :         {
     738    248801918 :           if (maybe_ne (off, -1)
     739    248801918 :               && maybe_ne (off, 0))
     740     99676287 :             hstate.add_poly_hwi (off.force_shwi ());
     741    248801918 :           off = -1;
     742    248801918 :           if (deref
     743    117142737 :               && vro->opcode == ADDR_EXPR)
     744              :             {
     745    116913556 :               if (vro->op0)
     746              :                 {
     747    116913556 :                   tree op = TREE_OPERAND (vro->op0, 0);
     748    116913556 :                   hstate.add_int (TREE_CODE (op));
     749    116913556 :                   inchash::add_expr (op, hstate);
     750              :                 }
     751              :             }
     752              :           else
     753    131888362 :             vn_reference_op_compute_hash (vro, hstate);
     754              :         }
     755              :     }
     756              :   /* Do not hash vr1->offset or vr1->max_size, we want to get collisions
     757              :      to be able to identify compatible results.  */
     758    196120984 :   result = hstate.end ();
     759              :   /* ??? We would ICE later if we hash instead of adding that in. */
     760    196120984 :   if (vr1->vuse)
     761    191357691 :     result += SSA_NAME_VERSION (vr1->vuse);
     762              : 
     763    196120984 :   return result;
     764              : }
     765              : 
     766              : /* Return true if reference operations VR1 and VR2 are equivalent.  This
     767              :    means they have the same set of operands and vuses.  */
     768              : 
     769              : bool
     770   4381761473 : vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
     771              : {
     772   4381761473 :   unsigned i, j;
     773              : 
     774              :   /* Early out if this is not a hash collision.  */
     775   4381761473 :   if (vr1->hashcode != vr2->hashcode)
     776              :     return false;
     777              : 
     778              :   /* The VOP needs to be the same.  */
     779     17797136 :   if (vr1->vuse != vr2->vuse)
     780              :     return false;
     781              : 
     782              :   /* The offset/max_size used for the ao_ref during lookup has to be
     783              :      the same.  */
     784     17796623 :   if (maybe_ne (vr1->offset, vr2->offset)
     785     17796623 :       || maybe_ne (vr1->max_size, vr2->max_size))
     786              :     {
     787              :       /* But nothing known in the prevailing entry is OK to be used.  */
     788      6751095 :       if (maybe_ne (vr1->offset, 0) || known_size_p (vr1->max_size))
     789              :         return false;
     790              :     }
     791              : 
     792              :   /* If the operands are the same we are done.  */
     793     35505132 :   if (vr1->operands == vr2->operands)
     794              :     return true;
     795              : 
     796     16930907 :   if (!vr1->type || !vr2->type)
     797              :     {
     798       552085 :       if (vr1->type != vr2->type)
     799              :         return false;
     800              :     }
     801     16378822 :   else if (vr1->type == vr2->type)
     802              :     ;
     803      2135909 :   else if (COMPLETE_TYPE_P (vr1->type) != COMPLETE_TYPE_P (vr2->type)
     804      2135909 :            || (COMPLETE_TYPE_P (vr1->type)
     805      2135909 :                && !expressions_equal_p (TYPE_SIZE (vr1->type),
     806      2135909 :                                         TYPE_SIZE (vr2->type))))
     807       785216 :     return false;
     808      1350693 :   else if (vr1->operands[0].opcode == CALL_EXPR
     809      1350693 :            && !types_compatible_p (vr1->type, vr2->type))
     810              :     return false;
     811      1350693 :   else if (INTEGRAL_TYPE_P (vr1->type)
     812       549260 :            && INTEGRAL_TYPE_P (vr2->type))
     813              :     {
     814       508111 :       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
     815              :         return false;
     816              :     }
     817       842582 :   else if (INTEGRAL_TYPE_P (vr1->type)
     818       842582 :            && (TYPE_PRECISION (vr1->type)
     819        41149 :                != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
     820              :     return false;
     821       842448 :   else if (INTEGRAL_TYPE_P (vr2->type)
     822       842448 :            && (TYPE_PRECISION (vr2->type)
     823         9558 :                != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
     824              :     return false;
     825         8228 :   else if (VECTOR_BOOLEAN_TYPE_P (vr1->type)
     826       841854 :            && VECTOR_BOOLEAN_TYPE_P (vr2->type))
     827              :     {
     828              :       /* Vector boolean types can have padding, verify we are dealing with
     829              :          the same number of elements, aka the precision of the types.
     830              :          For example, In most architecture the precision_size of vbool*_t
     831              :          types are caculated like below:
     832              :          precision_size = type_size * 8
     833              : 
     834              :          Unfortunately, the RISC-V will adjust the precision_size for the
     835              :          vbool*_t in order to align the ISA as below:
     836              :          type_size      = [1, 1, 1, 1,  2,  4,  8]
     837              :          precision_size = [1, 2, 4, 8, 16, 32, 64]
     838              : 
     839              :          Then the precision_size of RISC-V vbool*_t will not be the multiple
     840              :          of the type_size.  We take care of this case consolidated here.  */
     841            0 :       if (maybe_ne (TYPE_VECTOR_SUBPARTS (vr1->type),
     842            0 :                     TYPE_VECTOR_SUBPARTS (vr2->type)))
     843              :         return false;
     844              :     }
     845       841854 :   else if (TYPE_MODE (vr1->type) != TYPE_MODE (vr2->type)
     846       841854 :            && (!mode_can_transfer_bits (TYPE_MODE (vr1->type))
     847        44598 :                || !mode_can_transfer_bits (TYPE_MODE (vr2->type))))
     848         1043 :     return false;
     849              : 
     850              :   i = 0;
     851              :   j = 0;
     852     19501998 :   do
     853              :     {
     854     19501998 :       poly_offset_int off1 = 0, off2 = 0;
     855     19501998 :       vn_reference_op_t vro1, vro2;
     856     19501998 :       vn_reference_op_s tem1, tem2;
     857     19501998 :       bool deref1 = false, deref2 = false;
     858     19501998 :       bool reverse1 = false, reverse2 = false;
     859     66390860 :       for (; vr1->operands.iterate (i, &vro1); i++)
     860              :         {
     861     46888862 :           if (vro1->opcode == MEM_REF)
     862              :             deref1 = true;
     863              :           /* Do not look through a storage order barrier.  */
     864     32118233 :           else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
     865         6135 :             return false;
     866     46888862 :           reverse1 |= vro1->reverse;
     867     46888862 :           if (known_eq (vro1->off, -1))
     868              :             break;
     869     27386864 :           off1 += vro1->off;
     870              :         }
     871     47021865 :       for (; vr2->operands.iterate (j, &vro2); j++)
     872              :         {
     873     47021865 :           if (vro2->opcode == MEM_REF)
     874              :             deref2 = true;
     875              :           /* Do not look through a storage order barrier.  */
     876     32251226 :           else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
     877              :             return false;
     878     47021865 :           reverse2 |= vro2->reverse;
     879     47021865 :           if (known_eq (vro2->off, -1))
     880              :             break;
     881     27519867 :           off2 += vro2->off;
     882              :         }
     883     19501998 :       if (maybe_ne (off1, off2) || reverse1 != reverse2)
     884              :         return false;
     885     19501860 :       if (deref1 && vro1->opcode == ADDR_EXPR)
     886              :         {
     887      8181082 :           memset (&tem1, 0, sizeof (tem1));
     888      8181082 :           tem1.op0 = TREE_OPERAND (vro1->op0, 0);
     889      8181082 :           tem1.type = TREE_TYPE (tem1.op0);
     890      8181082 :           tem1.opcode = TREE_CODE (tem1.op0);
     891      8181082 :           vro1 = &tem1;
     892      8181082 :           deref1 = false;
     893              :         }
     894     19501860 :       if (deref2 && vro2->opcode == ADDR_EXPR)
     895              :         {
     896      8181092 :           memset (&tem2, 0, sizeof (tem2));
     897      8181092 :           tem2.op0 = TREE_OPERAND (vro2->op0, 0);
     898      8181092 :           tem2.type = TREE_TYPE (tem2.op0);
     899      8181092 :           tem2.opcode = TREE_CODE (tem2.op0);
     900      8181092 :           vro2 = &tem2;
     901      8181092 :           deref2 = false;
     902              :         }
     903     19501860 :       if (deref1 != deref2)
     904              :         return false;
     905     19501860 :       if (!vn_reference_op_eq (vro1, vro2))
     906              :         return false;
     907     19495863 :       ++j;
     908     19495863 :       ++i;
     909              :     }
     910     38991726 :   while (vr1->operands.length () != i
     911     58487589 :          || vr2->operands.length () != j);
     912              : 
     913              :   return true;
     914              : }
     915              : 
     916              : /* Copy the operations present in load/store REF into RESULT, a vector of
     917              :    vn_reference_op_s's.  */
     918              : 
     919              : static void
     920    215826450 : copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
     921              : {
     922              :   /* For non-calls, store the information that makes up the address.  */
     923    215826450 :   tree orig = ref;
     924    748408418 :   while (ref)
     925              :     {
     926    532581968 :       vn_reference_op_s temp;
     927              : 
     928    532581968 :       memset (&temp, 0, sizeof (temp));
     929    532581968 :       temp.type = TREE_TYPE (ref);
     930    532581968 :       temp.opcode = TREE_CODE (ref);
     931    532581968 :       temp.off = -1;
     932              : 
     933    532581968 :       switch (temp.opcode)
     934              :         {
     935     14782560 :         case MODIFY_EXPR:
     936     14782560 :           temp.op0 = TREE_OPERAND (ref, 1);
     937     14782560 :           break;
     938          137 :         case WITH_SIZE_EXPR:
     939          137 :           temp.op0 = TREE_OPERAND (ref, 1);
     940          137 :           temp.off = 0;
     941          137 :           break;
     942    112759565 :         case MEM_REF:
     943              :           /* The base address gets its own vn_reference_op_s structure.  */
     944    112759565 :           temp.op0 = TREE_OPERAND (ref, 1);
     945    112759565 :           if (!mem_ref_offset (ref).to_shwi (&temp.off))
     946            0 :             temp.off = -1;
     947    112759565 :           temp.clique = MR_DEPENDENCE_CLIQUE (ref);
     948    112759565 :           temp.base = MR_DEPENDENCE_BASE (ref);
     949    112759565 :           temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
     950    112759565 :           break;
     951      2534510 :         case TARGET_MEM_REF:
     952              :           /* The base address gets its own vn_reference_op_s structure.  */
     953      2534510 :           temp.op0 = TMR_INDEX (ref);
     954      2534510 :           temp.op1 = TMR_STEP (ref);
     955      2534510 :           temp.op2 = TMR_OFFSET (ref);
     956      2534510 :           temp.clique = MR_DEPENDENCE_CLIQUE (ref);
     957      2534510 :           temp.base = MR_DEPENDENCE_BASE (ref);
     958      2534510 :           result->safe_push (temp);
     959      2534510 :           memset (&temp, 0, sizeof (temp));
     960      2534510 :           temp.type = NULL_TREE;
     961      2534510 :           temp.opcode = ERROR_MARK;
     962      2534510 :           temp.op0 = TMR_INDEX2 (ref);
     963      2534510 :           temp.off = -1;
     964      2534510 :           break;
     965       747133 :         case BIT_FIELD_REF:
     966              :           /* Record bits, position and storage order.  */
     967       747133 :           temp.op0 = TREE_OPERAND (ref, 1);
     968       747133 :           temp.op1 = TREE_OPERAND (ref, 2);
     969      1493556 :           if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
     970          710 :             temp.off = -1;
     971       747133 :           temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
     972       747133 :           break;
     973    141734126 :         case COMPONENT_REF:
     974              :           /* The field decl is enough to unambiguously specify the field,
     975              :              so use its type here.  */
     976    141734126 :           temp.type = TREE_TYPE (TREE_OPERAND (ref, 1));
     977    141734126 :           temp.op0 = TREE_OPERAND (ref, 1);
     978    141734126 :           temp.op1 = TREE_OPERAND (ref, 2);
     979    283465901 :           temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
     980    283465636 :                           && TYPE_REVERSE_STORAGE_ORDER
     981              :                                (TREE_TYPE (TREE_OPERAND (ref, 0))));
     982    141734126 :           {
     983    141734126 :             tree this_offset = component_ref_field_offset (ref);
     984    141734126 :             if (this_offset
     985    141734126 :                 && poly_int_tree_p (this_offset))
     986              :               {
     987    141732024 :                 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
     988    141732024 :                 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
     989              :                   {
     990    141210884 :                     poly_offset_int off
     991    141210884 :                       = (wi::to_poly_offset (this_offset)
     992    141210884 :                          + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
     993              :                     /* Prohibit value-numbering zero offset components
     994              :                        of addresses the same before the pass folding
     995              :                        __builtin_object_size had a chance to run.  Likewise
     996              :                        for components of zero size at arbitrary offset.  */
     997    141210884 :                     if (TREE_CODE (orig) != ADDR_EXPR
     998      4733079 :                         || (TYPE_SIZE (temp.type)
     999      4720090 :                             && integer_nonzerop (TYPE_SIZE (temp.type))
    1000      6103742 :                             && maybe_ne (off, 0))
    1001    144093654 :                         || (cfun->curr_properties & PROP_objsz))
    1002    139825101 :                       off.to_shwi (&temp.off);
    1003              :                   }
    1004              :               }
    1005              :           }
    1006              :           break;
    1007     37729462 :         case ARRAY_RANGE_REF:
    1008     37729462 :         case ARRAY_REF:
    1009     37729462 :           {
    1010     37729462 :             tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
    1011              :             /* Record index as operand.  */
    1012     37729462 :             temp.op0 = TREE_OPERAND (ref, 1);
    1013              :             /* Always record lower bounds and element size.  */
    1014     37729462 :             temp.op1 = array_ref_low_bound (ref);
    1015              :             /* But record element size in units of the type alignment.  */
    1016     37729462 :             temp.op2 = TREE_OPERAND (ref, 3);
    1017     37729462 :             temp.align = eltype->type_common.align;
    1018     37729462 :             if (! temp.op2)
    1019     37520742 :               temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
    1020              :                                      size_int (TYPE_ALIGN_UNIT (eltype)));
    1021              :             /* Prohibit value-numbering addresses of one-after-the-last
    1022              :                element ARRAY_REFs the same as addresses of other components
    1023              :                before the pass folding __builtin_object_size had a chance
    1024              :                to run.  */
    1025     37729462 :             bool avoid_oob = true;
    1026     37729462 :             if (TREE_CODE (orig) != ADDR_EXPR
    1027       462867 :                 || cfun->curr_properties & PROP_objsz)
    1028              :               avoid_oob = false;
    1029       220751 :             else if (poly_int_tree_p (temp.op0))
    1030              :               {
    1031        74088 :                 tree ub = array_ref_up_bound (ref);
    1032        74088 :                 if (ub
    1033        72460 :                     && poly_int_tree_p (ub)
    1034              :                     /* ???  The C frontend for T[0] uses [0:] and the
    1035              :                        C++ frontend [0:-1U].  See layout_type for how
    1036              :                        awkward this is.  */
    1037        64267 :                     && !integer_minus_onep (ub)
    1038       146548 :                     && known_le (wi::to_poly_offset (temp.op0),
    1039              :                                  wi::to_poly_offset (ub)))
    1040        63420 :                   avoid_oob = false;
    1041              :               }
    1042     37729462 :             if (poly_int_tree_p (temp.op0)
    1043     21801766 :                 && poly_int_tree_p (temp.op1)
    1044     21801738 :                 && TREE_CODE (temp.op2) == INTEGER_CST
    1045     59471144 :                 && !avoid_oob)
    1046              :               {
    1047     43463738 :                 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
    1048     65195607 :                                         - wi::to_poly_offset (temp.op1))
    1049     43463738 :                                        * wi::to_offset (temp.op2)
    1050     21731869 :                                        * vn_ref_op_align_unit (&temp));
    1051     21731869 :                 off.to_shwi (&temp.off);
    1052              :               }
    1053     37729462 :             temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
    1054     37729462 :                             && TYPE_REVERSE_STORAGE_ORDER
    1055              :                                  (TREE_TYPE (TREE_OPERAND (ref, 0))));
    1056              :           }
    1057     37729462 :           break;
    1058     80292919 :         case VAR_DECL:
    1059     80292919 :           if (DECL_HARD_REGISTER (ref))
    1060              :             {
    1061        20183 :               temp.op0 = ref;
    1062        20183 :               break;
    1063              :             }
    1064              :           /* Fallthru.  */
    1065     83585120 :         case PARM_DECL:
    1066     83585120 :         case CONST_DECL:
    1067     83585120 :         case RESULT_DECL:
    1068              :           /* Canonicalize decls to MEM[&decl] which is what we end up with
    1069              :              when valueizing MEM[ptr] with ptr = &decl.  */
    1070     83585120 :           temp.opcode = MEM_REF;
    1071     83585120 :           temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
    1072     83585120 :           temp.off = 0;
    1073     83585120 :           result->safe_push (temp);
    1074     83585120 :           temp.opcode = ADDR_EXPR;
    1075     83585120 :           temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
    1076     83585120 :           temp.type = TREE_TYPE (temp.op0);
    1077     83585120 :           temp.off = -1;
    1078     83585120 :           break;
    1079     92744671 :         case STRING_CST:
    1080     92744671 :         case INTEGER_CST:
    1081     92744671 :         case POLY_INT_CST:
    1082     92744671 :         case COMPLEX_CST:
    1083     92744671 :         case VECTOR_CST:
    1084     92744671 :         case REAL_CST:
    1085     92744671 :         case FIXED_CST:
    1086     92744671 :         case CONSTRUCTOR:
    1087     92744671 :         case SSA_NAME:
    1088     92744671 :           temp.op0 = ref;
    1089     92744671 :           break;
    1090     43509445 :         case ADDR_EXPR:
    1091     43509445 :           if (is_gimple_min_invariant (ref))
    1092              :             {
    1093     39476476 :               temp.op0 = ref;
    1094     39476476 :               break;
    1095              :             }
    1096              :           break;
    1097              :           /* These are only interesting for their operands, their
    1098              :              existence, and their type.  They will never be the last
    1099              :              ref in the chain of references (IE they require an
    1100              :              operand), so we don't have to put anything
    1101              :              for op* as it will be handled by the iteration  */
    1102       470601 :         case REALPART_EXPR:
    1103       470601 :           temp.off = 0;
    1104       470601 :           break;
    1105      1488489 :         case VIEW_CONVERT_EXPR:
    1106      1488489 :           temp.off = 0;
    1107      1488489 :           temp.reverse = storage_order_barrier_p (ref);
    1108      1488489 :           break;
    1109       475966 :         case IMAGPART_EXPR:
    1110              :           /* This is only interesting for its constant offset.  */
    1111       475966 :           temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
    1112       475966 :           break;
    1113            0 :         default:
    1114            0 :           gcc_unreachable ();
    1115              :         }
    1116    532581968 :       result->safe_push (temp);
    1117              : 
    1118    532581968 :       if (REFERENCE_CLASS_P (ref)
    1119    234642116 :           || TREE_CODE (ref) == MODIFY_EXPR
    1120    219859556 :           || TREE_CODE (ref) == WITH_SIZE_EXPR
    1121    752441387 :           || (TREE_CODE (ref) == ADDR_EXPR
    1122     43509445 :               && !is_gimple_min_invariant (ref)))
    1123    316755518 :         ref = TREE_OPERAND (ref, 0);
    1124              :       else
    1125              :         ref = NULL_TREE;
    1126              :     }
    1127    215826450 : }
    1128              : 
    1129              : /* Build a alias-oracle reference abstraction in *REF from the vn_reference
    1130              :    operands in *OPS, the reference alias set SET and the reference type TYPE.
    1131              :    Return true if something useful was produced.  */
    1132              : 
    1133              : bool
    1134     14344166 : ao_ref_init_from_vn_reference (ao_ref *ref,
    1135              :                                alias_set_type set, alias_set_type base_set,
    1136              :                                tree type, const vec<vn_reference_op_s> &ops)
    1137              : {
    1138     14344166 :   unsigned i;
    1139     14344166 :   tree base = NULL_TREE;
    1140     14344166 :   tree *op0_p = &base;
    1141     14344166 :   poly_offset_int offset = 0;
    1142     14344166 :   poly_offset_int max_size;
    1143     14344166 :   poly_offset_int size = -1;
    1144     14344166 :   tree size_tree = NULL_TREE;
    1145              : 
    1146              :   /* We don't handle calls.  */
    1147     14344166 :   if (!type)
    1148              :     return false;
    1149              : 
    1150     14344166 :   machine_mode mode = TYPE_MODE (type);
    1151     14344166 :   if (mode == BLKmode)
    1152       267188 :     size_tree = TYPE_SIZE (type);
    1153              :   else
    1154     28153956 :     size = GET_MODE_BITSIZE (mode);
    1155     14076978 :   if (size_tree != NULL_TREE
    1156       267188 :       && poly_int_tree_p (size_tree))
    1157       267188 :     size = wi::to_poly_offset (size_tree);
    1158              : 
    1159              :   /* Lower the final access size from the outermost expression.  */
    1160     14344166 :   const_vn_reference_op_t cst_op = &ops[0];
    1161              :   /* Cast away constness for the sake of the const-unsafe
    1162              :      FOR_EACH_VEC_ELT().  */
    1163     14344166 :   vn_reference_op_t op = const_cast<vn_reference_op_t>(cst_op);
    1164     14344166 :   size_tree = NULL_TREE;
    1165     14344166 :   if (op->opcode == COMPONENT_REF)
    1166      4878872 :     size_tree = DECL_SIZE (op->op0);
    1167      9465294 :   else if (op->opcode == BIT_FIELD_REF)
    1168        64151 :     size_tree = op->op0;
    1169      4943023 :   if (size_tree != NULL_TREE
    1170      4943023 :       && poly_int_tree_p (size_tree)
    1171      9886046 :       && (!known_size_p (size)
    1172     14344166 :           || known_lt (wi::to_poly_offset (size_tree), size)))
    1173        44702 :     size = wi::to_poly_offset (size_tree);
    1174              : 
    1175              :   /* Initially, maxsize is the same as the accessed element size.
    1176              :      In the following it will only grow (or become -1).  */
    1177     14344166 :   max_size = size;
    1178              : 
    1179              :   /* Compute cumulative bit-offset for nested component-refs and array-refs,
    1180              :      and find the ultimate containing object.  */
    1181     54984545 :   FOR_EACH_VEC_ELT (ops, i, op)
    1182              :     {
    1183     40787394 :       switch (op->opcode)
    1184              :         {
    1185              :         case CALL_EXPR:
    1186              :           return false;
    1187              : 
    1188              :         /* Record the base objects.  */
    1189     13891613 :         case MEM_REF:
    1190     13891613 :           *op0_p = build2 (MEM_REF, op->type,
    1191              :                            NULL_TREE, op->op0);
    1192     13891613 :           MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
    1193     13891613 :           MR_DEPENDENCE_BASE (*op0_p) = op->base;
    1194     13891613 :           op0_p = &TREE_OPERAND (*op0_p, 0);
    1195     13891613 :           break;
    1196              : 
    1197       305121 :         case TARGET_MEM_REF:
    1198       915363 :           *op0_p = build5 (TARGET_MEM_REF, op->type,
    1199              :                            NULL_TREE, op->op2, op->op0,
    1200       305121 :                            op->op1, ops[i+1].op0);
    1201       305121 :           MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
    1202       305121 :           MR_DEPENDENCE_BASE (*op0_p) = op->base;
    1203       305121 :           op0_p = &TREE_OPERAND (*op0_p, 0);
    1204       305121 :           ++i;
    1205       305121 :           break;
    1206              : 
    1207              :         /* Unwrap some of the wrapped decls.  */
    1208      6645676 :         case ADDR_EXPR:
    1209              :           /* Apart from ADDR_EXPR arguments to MEM_REF.  */
    1210      6645676 :           if (base != NULL_TREE
    1211      6645676 :               && TREE_CODE (base) == MEM_REF
    1212      6609716 :               && op->op0
    1213     13255392 :               && DECL_P (TREE_OPERAND (op->op0, 0)))
    1214              :             {
    1215      6606767 :               const_vn_reference_op_t pop = &ops[i-1];
    1216      6606767 :               base = TREE_OPERAND (op->op0, 0);
    1217      6606767 :               if (known_eq (pop->off, -1))
    1218              :                 {
    1219           52 :                   max_size = -1;
    1220           52 :                   offset = 0;
    1221              :                 }
    1222              :               else
    1223     19820145 :                 offset += poly_offset_int (pop->off) * BITS_PER_UNIT;
    1224              :               op0_p = NULL;
    1225              :               break;
    1226              :             }
    1227              :           /* Fallthru.  */
    1228      7590384 :         case PARM_DECL:
    1229      7590384 :         case CONST_DECL:
    1230      7590384 :         case RESULT_DECL:
    1231              :           /* ???  We shouldn't see these, but un-canonicalize what
    1232              :              copy_reference_ops_from_ref does when visiting MEM_REF.  */
    1233      7590384 :         case VAR_DECL:
    1234              :           /* ???  And for this only have DECL_HARD_REGISTER.  */
    1235      7590384 :         case STRING_CST:
    1236              :           /* This can show up in ARRAY_REF bases.  */
    1237      7590384 :         case INTEGER_CST:
    1238      7590384 :         case SSA_NAME:
    1239      7590384 :           *op0_p = op->op0;
    1240      7590384 :           op0_p = NULL;
    1241      7590384 :           break;
    1242              : 
    1243              :         /* And now the usual component-reference style ops.  */
    1244        64151 :         case BIT_FIELD_REF:
    1245        64151 :           offset += wi::to_poly_offset (op->op1);
    1246        64151 :           break;
    1247              : 
    1248      7976836 :         case COMPONENT_REF:
    1249      7976836 :           {
    1250      7976836 :             tree field = op->op0;
    1251              :             /* We do not have a complete COMPONENT_REF tree here so we
    1252              :                cannot use component_ref_field_offset.  Do the interesting
    1253              :                parts manually.  */
    1254      7976836 :             tree this_offset = DECL_FIELD_OFFSET (field);
    1255              : 
    1256      7976836 :             if (op->op1 || !poly_int_tree_p (this_offset))
    1257          234 :               max_size = -1;
    1258              :             else
    1259              :               {
    1260      7976602 :                 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
    1261      7976602 :                                            << LOG2_BITS_PER_UNIT);
    1262      7976602 :                 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
    1263      7976602 :                 offset += woffset;
    1264              :               }
    1265              :             break;
    1266              :           }
    1267              : 
    1268      2921243 :         case ARRAY_RANGE_REF:
    1269      2921243 :         case ARRAY_REF:
    1270              :           /* Use the recorded constant offset.  */
    1271      2921243 :           if (maybe_eq (op->off, -1))
    1272       995867 :             max_size = -1;
    1273              :           else
    1274      5776128 :             offset += poly_offset_int (op->off) * BITS_PER_UNIT;
    1275              :           break;
    1276              : 
    1277              :         case REALPART_EXPR:
    1278              :           break;
    1279              : 
    1280              :         case IMAGPART_EXPR:
    1281     40640379 :           offset += size;
    1282              :           break;
    1283              : 
    1284              :         case VIEW_CONVERT_EXPR:
    1285              :           break;
    1286              : 
    1287              :         case POLY_INT_CST:
    1288              :         case COMPLEX_CST:
    1289              :         case VECTOR_CST:
    1290              :         case REAL_CST:
    1291              :         case FIXED_CST:
    1292              :         case CONSTRUCTOR:
    1293              :           return false;
    1294              : 
    1295              :         default:
    1296              :           return false;
    1297              :         }
    1298              :     }
    1299              : 
    1300     14197151 :   if (base == NULL_TREE)
    1301              :     return false;
    1302              : 
    1303     14197151 :   ref->ref = NULL_TREE;
    1304     14197151 :   ref->base = base;
    1305     14197151 :   ref->ref_alias_set = set;
    1306     14197151 :   ref->base_alias_set = base_set;
    1307              :   /* We discount volatiles from value-numbering elsewhere.  */
    1308     14197151 :   ref->volatile_p = false;
    1309              : 
    1310     14197151 :   if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
    1311              :     {
    1312            0 :       ref->offset = 0;
    1313            0 :       ref->size = -1;
    1314            0 :       ref->max_size = -1;
    1315            0 :       return true;
    1316              :     }
    1317              : 
    1318     14197151 :   if (!offset.to_shwi (&ref->offset))
    1319              :     {
    1320           26 :       ref->offset = 0;
    1321           26 :       ref->max_size = -1;
    1322           26 :       return true;
    1323              :     }
    1324              : 
    1325     14197125 :   if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
    1326       852559 :     ref->max_size = -1;
    1327              : 
    1328              :   return true;
    1329              : }
    1330              : 
    1331              : /* Copy the operations present in load/store/call REF into RESULT, a vector of
    1332              :    vn_reference_op_s's.  */
    1333              : 
    1334              : static void
    1335      9105924 : copy_reference_ops_from_call (gcall *call,
    1336              :                               vec<vn_reference_op_s> *result)
    1337              : {
    1338      9105924 :   vn_reference_op_s temp;
    1339      9105924 :   unsigned i;
    1340      9105924 :   tree lhs = gimple_call_lhs (call);
    1341      9105924 :   int lr;
    1342              : 
    1343              :   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
    1344              :      different.  By adding the lhs here in the vector, we ensure that the
    1345              :      hashcode is different, guaranteeing a different value number.  */
    1346      9105924 :   if (lhs && TREE_CODE (lhs) != SSA_NAME)
    1347              :     {
    1348       439787 :       memset (&temp, 0, sizeof (temp));
    1349       439787 :       temp.opcode = MODIFY_EXPR;
    1350       439787 :       temp.type = TREE_TYPE (lhs);
    1351       439787 :       temp.op0 = lhs;
    1352       439787 :       temp.off = -1;
    1353       439787 :       result->safe_push (temp);
    1354              :     }
    1355              : 
    1356              :   /* Copy the type, opcode, function, static chain and EH region, if any.  */
    1357      9105924 :   memset (&temp, 0, sizeof (temp));
    1358      9105924 :   temp.type = gimple_call_fntype (call);
    1359      9105924 :   temp.opcode = CALL_EXPR;
    1360      9105924 :   temp.op0 = gimple_call_fn (call);
    1361      9105924 :   if (gimple_call_internal_p (call))
    1362       523466 :     temp.clique = gimple_call_internal_fn (call);
    1363      9105924 :   temp.op1 = gimple_call_chain (call);
    1364      9105924 :   if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
    1365       600444 :     temp.op2 = size_int (lr);
    1366      9105924 :   temp.off = -1;
    1367      9105924 :   result->safe_push (temp);
    1368              : 
    1369              :   /* Copy the call arguments.  As they can be references as well,
    1370              :      just chain them together.  */
    1371     26816957 :   for (i = 0; i < gimple_call_num_args (call); ++i)
    1372              :     {
    1373     17711033 :       tree callarg = gimple_call_arg (call, i);
    1374     17711033 :       copy_reference_ops_from_ref (callarg, result);
    1375              :     }
    1376      9105924 : }
    1377              : 
    1378              : /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
    1379              :    *I_P to point to the last element of the replacement.  */
    1380              : static bool
    1381    126566625 : vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
    1382              :                             unsigned int *i_p)
    1383              : {
    1384    126566625 :   unsigned int i = *i_p;
    1385    126566625 :   vn_reference_op_t op = &(*ops)[i];
    1386    126566625 :   vn_reference_op_t mem_op = &(*ops)[i - 1];
    1387    126566625 :   tree addr_base;
    1388    126566625 :   poly_int64 addr_offset = 0;
    1389              : 
    1390              :   /* The only thing we have to do is from &OBJ.foo.bar add the offset
    1391              :      from .foo.bar to the preceding MEM_REF offset and replace the
    1392              :      address with &OBJ.  */
    1393    126566625 :   addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0),
    1394              :                                                &addr_offset, vn_valueize);
    1395    126566625 :   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
    1396    126566625 :   if (addr_base != TREE_OPERAND (op->op0, 0))
    1397              :     {
    1398       661953 :       poly_offset_int off
    1399       661953 :         = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
    1400              :                                   SIGNED)
    1401       661953 :            + addr_offset);
    1402       661953 :       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
    1403       661953 :       op->op0 = build_fold_addr_expr (addr_base);
    1404       661953 :       if (tree_fits_shwi_p (mem_op->op0))
    1405       661884 :         mem_op->off = tree_to_shwi (mem_op->op0);
    1406              :       else
    1407           69 :         mem_op->off = -1;
    1408       661953 :       return true;
    1409              :     }
    1410              :   return false;
    1411              : }
    1412              : 
    1413              : /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
    1414              :    *I_P to point to the last element of the replacement.  */
    1415              : static bool
    1416     85244369 : vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
    1417              :                                      unsigned int *i_p)
    1418              : {
    1419     85244369 :   bool changed = false;
    1420     92302327 :   vn_reference_op_t op;
    1421              : 
    1422     92302327 :   do
    1423              :     {
    1424     92302327 :       unsigned int i = *i_p;
    1425     92302327 :       op = &(*ops)[i];
    1426     92302327 :       vn_reference_op_t mem_op = &(*ops)[i - 1];
    1427     92302327 :       gimple *def_stmt;
    1428     92302327 :       enum tree_code code;
    1429     92302327 :       poly_offset_int off;
    1430              : 
    1431     92302327 :       def_stmt = SSA_NAME_DEF_STMT (op->op0);
    1432     92302327 :       if (!is_gimple_assign (def_stmt))
    1433     85242418 :         return changed;
    1434              : 
    1435     37262960 :       code = gimple_assign_rhs_code (def_stmt);
    1436     37262960 :       if (code != ADDR_EXPR
    1437     37262960 :           && code != POINTER_PLUS_EXPR)
    1438              :         return changed;
    1439              : 
    1440     19543763 :       off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
    1441              : 
    1442              :       /* The only thing we have to do is from &OBJ.foo.bar add the offset
    1443              :          from .foo.bar to the preceding MEM_REF offset and replace the
    1444              :          address with &OBJ.  */
    1445     19543763 :       if (code == ADDR_EXPR)
    1446              :         {
    1447       931073 :           tree addr, addr_base;
    1448       931073 :           poly_int64 addr_offset;
    1449              : 
    1450       931073 :           addr = gimple_assign_rhs1 (def_stmt);
    1451       931073 :           addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0),
    1452              :                                                        &addr_offset,
    1453              :                                                        vn_valueize);
    1454              :           /* If that didn't work because the address isn't invariant propagate
    1455              :              the reference tree from the address operation in case the current
    1456              :              dereference isn't offsetted.  */
    1457       931073 :           if (!addr_base
    1458       280146 :               && *i_p == ops->length () - 1
    1459       140073 :               && known_eq (off, 0)
    1460              :               /* This makes us disable this transform for PRE where the
    1461              :                  reference ops might be also used for code insertion which
    1462              :                  is invalid.  */
    1463      1016268 :               && default_vn_walk_kind == VN_WALKREWRITE)
    1464              :             {
    1465        85105 :               auto_vec<vn_reference_op_s, 32> tem;
    1466        85105 :               copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
    1467              :               /* Make sure to preserve TBAA info.  The only objects not
    1468              :                  wrapped in MEM_REFs that can have their address taken are
    1469              :                  STRING_CSTs.  */
    1470        85105 :               if (tem.length () >= 2
    1471        85105 :                   && tem[tem.length () - 2].opcode == MEM_REF)
    1472              :                 {
    1473        85090 :                   vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
    1474        85090 :                   new_mem_op->op0
    1475        85090 :                       = wide_int_to_tree (TREE_TYPE (mem_op->op0),
    1476       170180 :                                           wi::to_poly_wide (new_mem_op->op0));
    1477              :                 }
    1478              :               else
    1479           15 :                 gcc_assert (tem.last ().opcode == STRING_CST);
    1480        85105 :               ops->pop ();
    1481        85105 :               ops->pop ();
    1482        85105 :               ops->safe_splice (tem);
    1483        85105 :               --*i_p;
    1484        85105 :               return true;
    1485        85105 :             }
    1486       845968 :           if (!addr_base
    1487       791000 :               || TREE_CODE (addr_base) != MEM_REF
    1488      1635139 :               || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
    1489       787310 :                   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
    1490              :                                                                     0))))
    1491              :             return changed;
    1492              : 
    1493       789171 :           off += addr_offset;
    1494       789171 :           off += mem_ref_offset (addr_base);
    1495       789171 :           op->op0 = TREE_OPERAND (addr_base, 0);
    1496              :         }
    1497              :       else
    1498              :         {
    1499     18612690 :           tree ptr, ptroff;
    1500     18612690 :           ptr = gimple_assign_rhs1 (def_stmt);
    1501     18612690 :           ptroff = gimple_assign_rhs2 (def_stmt);
    1502     18612690 :           if (TREE_CODE (ptr) != SSA_NAME
    1503     16977805 :               || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
    1504              :               /* Make sure to not endlessly recurse.
    1505              :                  See gcc.dg/tree-ssa/20040408-1.c for an example.  Can easily
    1506              :                  happen when we value-number a PHI to its backedge value.  */
    1507     16976407 :               || SSA_VAL (ptr) == op->op0
    1508     35589097 :               || !poly_int_tree_p (ptroff))
    1509     12341952 :             return changed;
    1510              : 
    1511      6270738 :           off += wi::to_poly_offset (ptroff);
    1512      6270738 :           op->op0 = ptr;
    1513              :         }
    1514              : 
    1515      7059909 :       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
    1516      7059909 :       if (tree_fits_shwi_p (mem_op->op0))
    1517      6763416 :         mem_op->off = tree_to_shwi (mem_op->op0);
    1518              :       else
    1519       296493 :         mem_op->off = -1;
    1520              :       /* ???  Can end up with endless recursion here!?
    1521              :          gcc.c-torture/execute/strcmp-1.c  */
    1522      7059909 :       if (TREE_CODE (op->op0) == SSA_NAME)
    1523      7058048 :         op->op0 = SSA_VAL (op->op0);
    1524      7059909 :       if (TREE_CODE (op->op0) != SSA_NAME)
    1525         1951 :         op->opcode = TREE_CODE (op->op0);
    1526              : 
    1527      7059909 :       changed = true;
    1528              :     }
    1529              :   /* Tail-recurse.  */
    1530      7059909 :   while (TREE_CODE (op->op0) == SSA_NAME);
    1531              : 
    1532              :   /* Fold a remaining *&.  */
    1533         1951 :   if (TREE_CODE (op->op0) == ADDR_EXPR)
    1534          261 :     vn_reference_fold_indirect (ops, i_p);
    1535              : 
    1536              :   return changed;
    1537              : }
    1538              : 
    1539              : /* Optimize the reference REF to a constant if possible or return
    1540              :    NULL_TREE if not.  */
    1541              : 
    1542              : tree
    1543    108275397 : fully_constant_vn_reference_p (vn_reference_t ref)
    1544              : {
    1545    108275397 :   vec<vn_reference_op_s> operands = ref->operands;
    1546    108275397 :   vn_reference_op_t op;
    1547              : 
    1548              :   /* Try to simplify the translated expression if it is
    1549              :      a call to a builtin function with at most two arguments.  */
    1550    108275397 :   op = &operands[0];
    1551    108275397 :   if (op->opcode == CALL_EXPR
    1552        88825 :       && (!op->op0
    1553        81735 :           || (TREE_CODE (op->op0) == ADDR_EXPR
    1554        81735 :               && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
    1555        81735 :               && fndecl_built_in_p (TREE_OPERAND (op->op0, 0),
    1556              :                                     BUILT_IN_NORMAL)))
    1557        72020 :       && operands.length () >= 2
    1558    108347373 :       && operands.length () <= 3)
    1559              :     {
    1560        34983 :       vn_reference_op_t arg0, arg1 = NULL;
    1561        34983 :       bool anyconst = false;
    1562        34983 :       arg0 = &operands[1];
    1563        34983 :       if (operands.length () > 2)
    1564         5468 :         arg1 = &operands[2];
    1565        34983 :       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
    1566        34983 :           || (arg0->opcode == ADDR_EXPR
    1567        13392 :               && is_gimple_min_invariant (arg0->op0)))
    1568              :         anyconst = true;
    1569        34983 :       if (arg1
    1570        34983 :           && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
    1571         4013 :               || (arg1->opcode == ADDR_EXPR
    1572          579 :                   && is_gimple_min_invariant (arg1->op0))))
    1573              :         anyconst = true;
    1574        32949 :       if (anyconst)
    1575              :         {
    1576        21411 :           combined_fn fn;
    1577        21411 :           if (op->op0)
    1578        20455 :             fn = as_combined_fn (DECL_FUNCTION_CODE
    1579        20455 :                                         (TREE_OPERAND (op->op0, 0)));
    1580              :           else
    1581          956 :             fn = as_combined_fn ((internal_fn) op->clique);
    1582        21411 :           tree folded;
    1583        21411 :           if (arg1)
    1584         2627 :             folded = fold_const_call (fn, ref->type, arg0->op0, arg1->op0);
    1585              :           else
    1586        18784 :             folded = fold_const_call (fn, ref->type, arg0->op0);
    1587        21411 :           if (folded
    1588        21411 :               && is_gimple_min_invariant (folded))
    1589              :             return folded;
    1590              :         }
    1591              :     }
    1592              : 
    1593              :   /* Simplify reads from constants or constant initializers.  */
    1594    108240414 :   else if (BITS_PER_UNIT == 8
    1595    108240414 :            && ref->type
    1596    108240414 :            && COMPLETE_TYPE_P (ref->type)
    1597    216480786 :            && is_gimple_reg_type (ref->type))
    1598              :     {
    1599    103626838 :       poly_int64 off = 0;
    1600    103626838 :       HOST_WIDE_INT size;
    1601    103626838 :       if (INTEGRAL_TYPE_P (ref->type))
    1602     52674706 :         size = TYPE_PRECISION (ref->type);
    1603     50952132 :       else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
    1604     50952132 :         size = tree_to_shwi (TYPE_SIZE (ref->type));
    1605              :       else
    1606    108275397 :         return NULL_TREE;
    1607    103626838 :       if (size % BITS_PER_UNIT != 0
    1608    101798425 :           || size > MAX_BITSIZE_MODE_ANY_MODE)
    1609              :         return NULL_TREE;
    1610    101797098 :       size /= BITS_PER_UNIT;
    1611    101797098 :       unsigned i;
    1612    188344071 :       for (i = 0; i < operands.length (); ++i)
    1613              :         {
    1614    188344071 :           if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
    1615              :             {
    1616          309 :               ++i;
    1617          309 :               break;
    1618              :             }
    1619    188343762 :           if (operands[i].reverse)
    1620              :             return NULL_TREE;
    1621    188335404 :           if (known_eq (operands[i].off, -1))
    1622              :             return NULL_TREE;
    1623    174672938 :           off += operands[i].off;
    1624    174672938 :           if (operands[i].opcode == MEM_REF)
    1625              :             {
    1626     88125965 :               ++i;
    1627     88125965 :               break;
    1628              :             }
    1629              :         }
    1630     88126274 :       vn_reference_op_t base = &operands[--i];
    1631     88126274 :       tree ctor = error_mark_node;
    1632     88126274 :       tree decl = NULL_TREE;
    1633     88126274 :       if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
    1634          309 :         ctor = base->op0;
    1635     88125965 :       else if (base->opcode == MEM_REF
    1636     88125965 :                && base[1].opcode == ADDR_EXPR
    1637    144745541 :                && (VAR_P (TREE_OPERAND (base[1].op0, 0))
    1638      3528767 :                    || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
    1639      3528707 :                    || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
    1640              :         {
    1641     53096330 :           decl = TREE_OPERAND (base[1].op0, 0);
    1642     53096330 :           if (TREE_CODE (decl) == STRING_CST)
    1643              :             ctor = decl;
    1644              :           else
    1645     53090869 :             ctor = ctor_for_folding (decl);
    1646              :         }
    1647     88120813 :       if (ctor == NULL_TREE)
    1648          386 :         return build_zero_cst (ref->type);
    1649     88125888 :       else if (ctor != error_mark_node)
    1650              :         {
    1651        97318 :           HOST_WIDE_INT const_off;
    1652        97318 :           if (decl)
    1653              :             {
    1654       194018 :               tree res = fold_ctor_reference (ref->type, ctor,
    1655        97009 :                                               off * BITS_PER_UNIT,
    1656        97009 :                                               size * BITS_PER_UNIT, decl);
    1657        97009 :               if (res)
    1658              :                 {
    1659        55396 :                   STRIP_USELESS_TYPE_CONVERSION (res);
    1660        55396 :                   if (is_gimple_min_invariant (res))
    1661    108275397 :                     return res;
    1662              :                 }
    1663              :             }
    1664          309 :           else if (off.is_constant (&const_off))
    1665              :             {
    1666          309 :               unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
    1667          309 :               int len = native_encode_expr (ctor, buf, size, const_off);
    1668          309 :               if (len > 0)
    1669          139 :                 return native_interpret_expr (ref->type, buf, len);
    1670              :             }
    1671              :         }
    1672              :     }
    1673              : 
    1674              :   return NULL_TREE;
    1675              : }
    1676              : 
    1677              : /* Return true if OPS contain a storage order barrier.  */
    1678              : 
    1679              : static bool
    1680     58056473 : contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
    1681              : {
    1682     58056473 :   vn_reference_op_t op;
    1683     58056473 :   unsigned i;
    1684              : 
    1685    227262117 :   FOR_EACH_VEC_ELT (ops, i, op)
    1686    169205644 :     if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
    1687              :       return true;
    1688              : 
    1689              :   return false;
    1690              : }
    1691              : 
    1692              : /* Return true if OPS represent an access with reverse storage order.  */
    1693              : 
    1694              : static bool
    1695     58064744 : reverse_storage_order_for_component_p (vec<vn_reference_op_s> ops)
    1696              : {
    1697     58064744 :   unsigned i = 0;
    1698     58064744 :   if (ops[i].opcode == REALPART_EXPR || ops[i].opcode == IMAGPART_EXPR)
    1699              :     ++i;
    1700     58064744 :   switch (ops[i].opcode)
    1701              :     {
    1702     55989342 :     case ARRAY_REF:
    1703     55989342 :     case COMPONENT_REF:
    1704     55989342 :     case BIT_FIELD_REF:
    1705     55989342 :     case MEM_REF:
    1706     55989342 :       return ops[i].reverse;
    1707              :     default:
    1708              :       return false;
    1709              :     }
    1710              : }
    1711              : 
    1712              : /* Transform any SSA_NAME's in a vector of vn_reference_op_s
    1713              :    structures into their value numbers.  This is done in-place, and
    1714              :    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
    1715              :    whether any operands were valueized.  */
    1716              : 
    1717              : static void
    1718    219084835 : valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
    1719              :                  bool with_avail = false)
    1720              : {
    1721    219084835 :   *valueized_anything = false;
    1722              : 
    1723    883568994 :   for (unsigned i = 0; i < orig->length (); ++i)
    1724              :     {
    1725    664484159 : re_valueize:
    1726    668294605 :       vn_reference_op_t vro = &(*orig)[i];
    1727    668294605 :       if (vro->opcode == SSA_NAME
    1728    571237649 :           || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
    1729              :         {
    1730    121535236 :           tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
    1731    121535236 :           if (tem != vro->op0)
    1732              :             {
    1733     17683472 :               *valueized_anything = true;
    1734     17683472 :               vro->op0 = tem;
    1735              :             }
    1736              :           /* If it transforms from an SSA_NAME to a constant, update
    1737              :              the opcode.  */
    1738    121535236 :           if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
    1739      2061084 :             vro->opcode = TREE_CODE (vro->op0);
    1740              :         }
    1741    668294605 :       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
    1742              :         {
    1743        26325 :           tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
    1744        26325 :           if (tem != vro->op1)
    1745              :             {
    1746          603 :               *valueized_anything = true;
    1747          603 :               vro->op1 = tem;
    1748              :             }
    1749              :         }
    1750    668294605 :       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
    1751              :         {
    1752       205579 :           tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
    1753       205579 :           if (tem != vro->op2)
    1754              :             {
    1755       119600 :               *valueized_anything = true;
    1756       119600 :               vro->op2 = tem;
    1757              :             }
    1758              :         }
    1759              :       /* If it transforms from an SSA_NAME to an address, fold with
    1760              :          a preceding indirect reference.  */
    1761    668294605 :       if (i > 0
    1762    449130606 :           && vro->op0
    1763    445626783 :           && TREE_CODE (vro->op0) == ADDR_EXPR
    1764    800692659 :           && (*orig)[i - 1].opcode == MEM_REF)
    1765              :         {
    1766    126566364 :           if (vn_reference_fold_indirect (orig, &i))
    1767       661953 :             *valueized_anything = true;
    1768              :         }
    1769    541728241 :       else if (i > 0
    1770    322564242 :                && vro->opcode == SSA_NAME
    1771    636724113 :                && (*orig)[i - 1].opcode == MEM_REF)
    1772              :         {
    1773     85244369 :           if (vn_reference_maybe_forwprop_address (orig, &i))
    1774              :             {
    1775      3810446 :               *valueized_anything = true;
    1776              :               /* Re-valueize the current operand.  */
    1777      3810446 :               goto re_valueize;
    1778              :             }
    1779              :         }
    1780              :       /* If it transforms a non-constant ARRAY_REF into a constant
    1781              :          one, adjust the constant offset.  */
    1782    456483872 :       else if ((vro->opcode == ARRAY_REF
    1783    456483872 :                 || vro->opcode == ARRAY_RANGE_REF)
    1784     39873870 :                && known_eq (vro->off, -1)
    1785     17358086 :                && poly_int_tree_p (vro->op0)
    1786      4674423 :                && poly_int_tree_p (vro->op1)
    1787    461158295 :                && TREE_CODE (vro->op2) == INTEGER_CST)
    1788              :         {
    1789              :             /* Prohibit value-numbering addresses of one-after-the-last
    1790              :                element ARRAY_REFs the same as addresses of other components
    1791              :                before the pass folding __builtin_object_size had a chance
    1792              :                to run.  */
    1793      4540665 :           if (!(cfun->curr_properties & PROP_objsz)
    1794      5774219 :               && (*orig)[0].opcode == ADDR_EXPR)
    1795              :             {
    1796        35294 :               tree dom = TYPE_DOMAIN ((*orig)[i + 1].type);
    1797        53550 :               if (!dom
    1798        35144 :                   || !TYPE_MAX_VALUE (dom)
    1799        25374 :                   || !poly_int_tree_p (TYPE_MAX_VALUE (dom))
    1800        52422 :                   || integer_minus_onep (TYPE_MAX_VALUE (dom)))
    1801        19063 :                 continue;
    1802        17038 :               if (!known_le (wi::to_poly_offset (vro->op0),
    1803              :                              wi::to_poly_offset (TYPE_MAX_VALUE (dom))))
    1804          807 :                 continue;
    1805              :             }
    1806              : 
    1807      9043204 :           poly_offset_int off = ((wi::to_poly_offset (vro->op0)
    1808     13564806 :                                   - wi::to_poly_offset (vro->op1))
    1809      9043204 :                                  * wi::to_offset (vro->op2)
    1810      4521602 :                                  * vn_ref_op_align_unit (vro));
    1811      4521602 :           off.to_shwi (&vro->off);
    1812              :         }
    1813              :     }
    1814    219084835 : }
    1815              : 
    1816              : static void
    1817     14701712 : valueize_refs (vec<vn_reference_op_s> *orig)
    1818              : {
    1819     14701712 :   bool tem;
    1820            0 :   valueize_refs_1 (orig, &tem);
    1821            0 : }
    1822              : 
    1823              : static vec<vn_reference_op_s> shared_lookup_references;
    1824              : 
    1825              : /* Create a vector of vn_reference_op_s structures from REF, a
    1826              :    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
    1827              :    this function.  *VALUEIZED_ANYTHING will specify whether any
    1828              :    operands were valueized.  */
    1829              : 
    1830              : static vec<vn_reference_op_s>
    1831    178205174 : valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
    1832              : {
    1833    178205174 :   if (!ref)
    1834            0 :     return vNULL;
    1835    178205174 :   shared_lookup_references.truncate (0);
    1836    178205174 :   copy_reference_ops_from_ref (ref, &shared_lookup_references);
    1837    178205174 :   valueize_refs_1 (&shared_lookup_references, valueized_anything);
    1838    178205174 :   return shared_lookup_references;
    1839              : }
    1840              : 
    1841              : /* Create a vector of vn_reference_op_s structures from CALL, a
    1842              :    call statement.  The vector is shared among all callers of
    1843              :    this function.  */
    1844              : 
    1845              : static vec<vn_reference_op_s>
    1846      9105924 : valueize_shared_reference_ops_from_call (gcall *call)
    1847              : {
    1848      9105924 :   if (!call)
    1849            0 :     return vNULL;
    1850      9105924 :   shared_lookup_references.truncate (0);
    1851      9105924 :   copy_reference_ops_from_call (call, &shared_lookup_references);
    1852      9105924 :   valueize_refs (&shared_lookup_references);
    1853      9105924 :   return shared_lookup_references;
    1854              : }
    1855              : 
    1856              : /* Lookup a SCCVN reference operation VR in the current hash table.
    1857              :    Returns the resulting value number if it exists in the hash table,
    1858              :    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
    1859              :    vn_reference_t stored in the hashtable if something is found.  */
    1860              : 
    1861              : static tree
    1862     64639042 : vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
    1863              : {
    1864     64639042 :   vn_reference_s **slot;
    1865     64639042 :   hashval_t hash;
    1866              : 
    1867     64639042 :   hash = vr->hashcode;
    1868     64639042 :   slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
    1869     64639042 :   if (slot)
    1870              :     {
    1871      8262731 :       if (vnresult)
    1872      8262731 :         *vnresult = (vn_reference_t)*slot;
    1873      8262731 :       return ((vn_reference_t)*slot)->result;
    1874              :     }
    1875              : 
    1876              :   return NULL_TREE;
    1877              : }
    1878              : 
    1879              : 
    1880              : /* Partial definition tracking support.  */
    1881              : 
    1882              : struct pd_range
    1883              : {
    1884              :   HOST_WIDE_INT offset;
    1885              :   HOST_WIDE_INT size;
    1886              :   pd_range *m_children[2];
    1887              : };
    1888              : 
    1889              : struct pd_data
    1890              : {
    1891              :   tree rhs;
    1892              :   HOST_WIDE_INT rhs_off;
    1893              :   HOST_WIDE_INT offset;
    1894              :   HOST_WIDE_INT size;
    1895              : };
    1896              : 
    1897              : /* Context for alias walking.  */
    1898              : 
    1899              : struct vn_walk_cb_data
    1900              : {
    1901     60536419 :   vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
    1902              :                    vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_,
    1903              :                    bool redundant_store_removal_p_)
    1904     60536419 :     : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
    1905     60536419 :       mask (mask_), masked_result (NULL_TREE), same_val (NULL_TREE),
    1906     60536419 :       vn_walk_kind (vn_walk_kind_),
    1907     60536419 :       tbaa_p (tbaa_p_), redundant_store_removal_p (redundant_store_removal_p_),
    1908    121072838 :       saved_operands (vNULL), first_range (), first_set (-2),
    1909    121072838 :       first_base_set (-2)
    1910              :   {
    1911     60536419 :     if (!last_vuse_ptr)
    1912     28061517 :       last_vuse_ptr = &last_vuse;
    1913     60536419 :     ao_ref_init (&orig_ref, orig_ref_);
    1914     60536419 :     if (mask)
    1915              :       {
    1916       318731 :         wide_int w = wi::to_wide (mask);
    1917       318731 :         unsigned int pos = 0, prec = w.get_precision ();
    1918       318731 :         pd_data pd;
    1919       318731 :         pd.rhs = build_constructor (NULL_TREE, NULL);
    1920       318731 :         pd.rhs_off = 0;
    1921              :         /* When bitwise and with a constant is done on a memory load,
    1922              :            we don't really need all the bits to be defined or defined
    1923              :            to constants, we don't really care what is in the position
    1924              :            corresponding to 0 bits in the mask.
    1925              :            So, push the ranges of those 0 bits in the mask as artificial
    1926              :            zero stores and let the partial def handling code do the
    1927              :            rest.  */
    1928       680577 :         while (pos < prec)
    1929              :           {
    1930       660591 :             int tz = wi::ctz (w);
    1931       660591 :             if (pos + tz > prec)
    1932       298745 :               tz = prec - pos;
    1933       660591 :             if (tz)
    1934              :               {
    1935       502404 :                 if (BYTES_BIG_ENDIAN)
    1936              :                   pd.offset = prec - pos - tz;
    1937              :                 else
    1938       502404 :                   pd.offset = pos;
    1939       502404 :                 pd.size = tz;
    1940       502404 :                 void *r = push_partial_def (pd, 0, 0, 0, prec);
    1941       502404 :                 gcc_assert (r == NULL_TREE);
    1942              :               }
    1943       660591 :             pos += tz;
    1944       660591 :             if (pos == prec)
    1945              :               break;
    1946       361846 :             w = wi::lrshift (w, tz);
    1947       361846 :             tz = wi::ctz (wi::bit_not (w));
    1948       361846 :             if (pos + tz > prec)
    1949            0 :               tz = prec - pos;
    1950       361846 :             pos += tz;
    1951       361846 :             w = wi::lrshift (w, tz);
    1952              :           }
    1953       318731 :       }
    1954     60536419 :   }
    1955              :   ~vn_walk_cb_data ();
    1956              :   void *finish (alias_set_type, alias_set_type, tree);
    1957              :   void *push_partial_def (pd_data pd,
    1958              :                           alias_set_type, alias_set_type, HOST_WIDE_INT,
    1959              :                           HOST_WIDE_INT);
    1960              : 
    1961              :   vn_reference_t vr;
    1962              :   ao_ref orig_ref;
    1963              :   tree *last_vuse_ptr;
    1964              :   tree last_vuse;
    1965              :   tree mask;
    1966              :   tree masked_result;
    1967              :   tree same_val;
    1968              :   vn_lookup_kind vn_walk_kind;
    1969              :   bool tbaa_p;
    1970              :   bool redundant_store_removal_p;
    1971              :   vec<vn_reference_op_s> saved_operands;
    1972              : 
    1973              :   /* The VDEFs of partial defs we come along.  */
    1974              :   auto_vec<pd_data, 2> partial_defs;
    1975              :   /* The first defs range to avoid splay tree setup in most cases.  */
    1976              :   pd_range first_range;
    1977              :   alias_set_type first_set;
    1978              :   alias_set_type first_base_set;
    1979              :   default_splay_tree<pd_range *> known_ranges;
    1980              :   obstack ranges_obstack;
    1981              :   static constexpr HOST_WIDE_INT bufsize = 64;
    1982              : };
    1983              : 
    1984     60536419 : vn_walk_cb_data::~vn_walk_cb_data ()
    1985              : {
    1986     60536419 :   if (known_ranges)
    1987       178459 :     obstack_free (&ranges_obstack, NULL);
    1988     60536419 :   saved_operands.release ();
    1989     60536419 : }
    1990              : 
    1991              : void *
    1992      1453982 : vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
    1993              : {
    1994      1453982 :   if (first_set != -2)
    1995              :     {
    1996       353175 :       set = first_set;
    1997       353175 :       base_set = first_base_set;
    1998              :     }
    1999      1453982 :   if (mask)
    2000              :     {
    2001          438 :       masked_result = val;
    2002          438 :       return (void *) -1;
    2003              :     }
    2004      1453544 :   if (same_val && !operand_equal_p (val, same_val))
    2005              :     return (void *) -1;
    2006      1449760 :   vec<vn_reference_op_s> &operands
    2007      1449760 :     = saved_operands.exists () ? saved_operands : vr->operands;
    2008      1449760 :   return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
    2009              :                                                    vr->offset, vr->max_size,
    2010      1449760 :                                                    vr->type, operands, val);
    2011              : }
    2012              : 
    2013              : /* Push PD to the vector of partial definitions returning a
    2014              :    value when we are ready to combine things with VUSE, SET and MAXSIZEI,
    2015              :    NULL when we want to continue looking for partial defs or -1
    2016              :    on failure.  */
    2017              : 
    2018              : void *
    2019       578711 : vn_walk_cb_data::push_partial_def (pd_data pd,
    2020              :                                    alias_set_type set, alias_set_type base_set,
    2021              :                                    HOST_WIDE_INT offseti,
    2022              :                                    HOST_WIDE_INT maxsizei)
    2023              : {
    2024              :   /* We're using a fixed buffer for encoding so fail early if the object
    2025              :      we want to interpret is bigger.  */
    2026       578711 :   if (maxsizei > bufsize * BITS_PER_UNIT
    2027              :       || CHAR_BIT != 8
    2028              :       || BITS_PER_UNIT != 8
    2029              :       /* Not prepared to handle PDP endian.  */
    2030              :       || BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
    2031              :     return (void *)-1;
    2032              : 
    2033              :   /* Turn too large constant stores into non-constant stores.  */
    2034       578640 :   if (CONSTANT_CLASS_P (pd.rhs) && pd.size > bufsize * BITS_PER_UNIT)
    2035            0 :     pd.rhs = error_mark_node;
    2036              : 
    2037              :   /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
    2038              :      most a partial byte before and/or after the region.  */
    2039       578640 :   if (!CONSTANT_CLASS_P (pd.rhs))
    2040              :     {
    2041       541400 :       if (pd.offset < offseti)
    2042              :         {
    2043         7932 :           HOST_WIDE_INT o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT);
    2044         7932 :           gcc_assert (pd.size > o);
    2045         7932 :           pd.size -= o;
    2046         7932 :           pd.offset += o;
    2047              :         }
    2048       541400 :       if (pd.size > maxsizei)
    2049         7095 :         pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT);
    2050              :     }
    2051              : 
    2052       578640 :   pd.offset -= offseti;
    2053              : 
    2054      1157280 :   bool pd_constant_p = (TREE_CODE (pd.rhs) == CONSTRUCTOR
    2055       578640 :                         || CONSTANT_CLASS_P (pd.rhs));
    2056       578640 :   pd_range *r;
    2057       578640 :   if (partial_defs.is_empty ())
    2058              :     {
    2059              :       /* If we get a clobber upfront, fail.  */
    2060       369855 :       if (TREE_CLOBBER_P (pd.rhs))
    2061              :         return (void *)-1;
    2062       369498 :       if (!pd_constant_p)
    2063              :         return (void *)-1;
    2064       341079 :       partial_defs.safe_push (pd);
    2065       341079 :       first_range.offset = pd.offset;
    2066       341079 :       first_range.size = pd.size;
    2067       341079 :       first_set = set;
    2068       341079 :       first_base_set = base_set;
    2069       341079 :       last_vuse_ptr = NULL;
    2070       341079 :       r = &first_range;
    2071              :       /* Go check if the first partial definition was a full one in case
    2072              :          the caller didn't optimize for this.  */
    2073              :     }
    2074              :   else
    2075              :     {
    2076       208785 :       if (!known_ranges)
    2077              :         {
    2078              :           /* ???  Optimize the case where the 2nd partial def completes
    2079              :              things.  */
    2080       178459 :           gcc_obstack_init (&ranges_obstack);
    2081       178459 :           known_ranges.insert_max_node (&first_range);
    2082              :         }
    2083              :       /* Lookup the offset and see if we need to merge.  */
    2084       208785 :       int comparison = known_ranges.lookup_le
    2085       421697 :         ([&] (pd_range *r) { return pd.offset < r->offset; },
    2086       189600 :          [&] (pd_range *r) { return pd.offset > r->offset; });
    2087       208785 :       r = known_ranges.root ();
    2088       208785 :       if (comparison >= 0
    2089       208785 :           && ranges_known_overlap_p (r->offset, r->size + 1,
    2090              :                                      pd.offset, pd.size))
    2091              :         {
    2092              :           /* Ignore partial defs already covered.  Here we also drop shadowed
    2093              :              clobbers arriving here at the floor.  */
    2094         5548 :           if (known_subrange_p (pd.offset, pd.size, r->offset, r->size))
    2095              :             return NULL;
    2096         4844 :           r->size = MAX (r->offset + r->size, pd.offset + pd.size) - r->offset;
    2097              :         }
    2098              :       else
    2099              :         {
    2100              :           /* pd.offset wasn't covered yet, insert the range.  */
    2101       203237 :           void *addr = XOBNEW (&ranges_obstack, pd_range);
    2102       203237 :           r = new (addr) pd_range { pd.offset, pd.size, {} };
    2103       203237 :           known_ranges.insert_relative (comparison, r);
    2104              :         }
    2105              :       /* Merge r which now contains pd's range and is a member of the splay
    2106              :          tree with adjacent overlapping ranges.  */
    2107       208081 :       if (known_ranges.splay_next_node ())
    2108        21197 :         do
    2109              :           {
    2110        21197 :             pd_range *rafter = known_ranges.root ();
    2111        21197 :             if (!ranges_known_overlap_p (r->offset, r->size + 1,
    2112        21197 :                                          rafter->offset, rafter->size))
    2113              :               break;
    2114        20845 :             r->size = MAX (r->offset + r->size,
    2115        20845 :                            rafter->offset + rafter->size) - r->offset;
    2116              :           }
    2117        20845 :         while (known_ranges.remove_root_and_splay_next ());
    2118              :       /* If we get a clobber, fail.  */
    2119       208081 :       if (TREE_CLOBBER_P (pd.rhs))
    2120              :         return (void *)-1;
    2121              :       /* Non-constants are OK as long as they are shadowed by a constant.  */
    2122       205971 :       if (!pd_constant_p)
    2123              :         return (void *)-1;
    2124       199819 :       partial_defs.safe_push (pd);
    2125              :     }
    2126              : 
    2127              :   /* Now we have merged pd's range into the range tree.  When we have covered
    2128              :      [offseti, sizei] then the tree will contain exactly one node which has
    2129              :      the desired properties and it will be 'r'.  */
    2130       540898 :   if (!known_subrange_p (0, maxsizei, r->offset, r->size))
    2131              :     /* Continue looking for partial defs.  */
    2132              :     return NULL;
    2133              : 
    2134              :   /* Now simply native encode all partial defs in reverse order.  */
    2135         8311 :   unsigned ndefs = partial_defs.length ();
    2136              :   /* We support up to 512-bit values (for V8DFmode).  */
    2137         8311 :   unsigned char buffer[bufsize + 1];
    2138         8311 :   unsigned char this_buffer[bufsize + 1];
    2139         8311 :   int len;
    2140              : 
    2141         8311 :   memset (buffer, 0, bufsize + 1);
    2142         8311 :   unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT) / BITS_PER_UNIT;
    2143        40591 :   while (!partial_defs.is_empty ())
    2144              :     {
    2145        23969 :       pd_data pd = partial_defs.pop ();
    2146        23969 :       unsigned int amnt;
    2147        23969 :       if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
    2148              :         {
    2149              :           /* Empty CONSTRUCTOR.  */
    2150         1931 :           if (pd.size >= needed_len * BITS_PER_UNIT)
    2151         1931 :             len = needed_len;
    2152              :           else
    2153         1698 :             len = ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT;
    2154         1931 :           memset (this_buffer, 0, len);
    2155              :         }
    2156        22038 :       else if (pd.rhs_off >= 0)
    2157              :         {
    2158        44076 :           len = native_encode_expr (pd.rhs, this_buffer, bufsize,
    2159        22038 :                                     (MAX (0, -pd.offset)
    2160        22038 :                                      + pd.rhs_off) / BITS_PER_UNIT);
    2161        22038 :           if (len <= 0
    2162        22038 :               || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
    2163        22038 :                         - MAX (0, -pd.offset) / BITS_PER_UNIT))
    2164              :             {
    2165            0 :               if (dump_file && (dump_flags & TDF_DETAILS))
    2166            0 :                 fprintf (dump_file, "Failed to encode %u "
    2167              :                          "partial definitions\n", ndefs);
    2168            0 :               return (void *)-1;
    2169              :             }
    2170              :         }
    2171              :       else /* negative pd.rhs_off indicates we want to chop off first bits */
    2172              :         {
    2173            0 :           if (-pd.rhs_off >= bufsize)
    2174              :             return (void *)-1;
    2175            0 :           len = native_encode_expr (pd.rhs,
    2176            0 :                                     this_buffer + -pd.rhs_off / BITS_PER_UNIT,
    2177            0 :                                     bufsize - -pd.rhs_off / BITS_PER_UNIT,
    2178            0 :                                     MAX (0, -pd.offset) / BITS_PER_UNIT);
    2179            0 :           if (len <= 0
    2180            0 :               || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
    2181            0 :                         - MAX (0, -pd.offset) / BITS_PER_UNIT))
    2182              :             {
    2183            0 :               if (dump_file && (dump_flags & TDF_DETAILS))
    2184            0 :                 fprintf (dump_file, "Failed to encode %u "
    2185              :                          "partial definitions\n", ndefs);
    2186            0 :               return (void *)-1;
    2187              :             }
    2188              :         }
    2189              : 
    2190        23969 :       unsigned char *p = buffer;
    2191        23969 :       HOST_WIDE_INT size = pd.size;
    2192        23969 :       if (pd.offset < 0)
    2193          228 :         size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT);
    2194        23969 :       this_buffer[len] = 0;
    2195        23969 :       if (BYTES_BIG_ENDIAN)
    2196              :         {
    2197              :           /* LSB of this_buffer[len - 1] byte should be at
    2198              :              pd.offset + pd.size - 1 bits in buffer.  */
    2199              :           amnt = ((unsigned HOST_WIDE_INT) pd.offset
    2200              :                   + pd.size) % BITS_PER_UNIT;
    2201              :           if (amnt)
    2202              :             shift_bytes_in_array_right (this_buffer, len + 1, amnt);
    2203              :           unsigned char *q = this_buffer;
    2204              :           unsigned int off = 0;
    2205              :           if (pd.offset >= 0)
    2206              :             {
    2207              :               unsigned int msk;
    2208              :               off = pd.offset / BITS_PER_UNIT;
    2209              :               gcc_assert (off < needed_len);
    2210              :               p = buffer + off;
    2211              :               if (size <= amnt)
    2212              :                 {
    2213              :                   msk = ((1 << size) - 1) << (BITS_PER_UNIT - amnt);
    2214              :                   *p = (*p & ~msk) | (this_buffer[len] & msk);
    2215              :                   size = 0;
    2216              :                 }
    2217              :               else
    2218              :                 {
    2219              :                   if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
    2220              :                     q = (this_buffer + len
    2221              :                          - (ROUND_UP (size - amnt, BITS_PER_UNIT)
    2222              :                             / BITS_PER_UNIT));
    2223              :                   if (pd.offset % BITS_PER_UNIT)
    2224              :                     {
    2225              :                       msk = -1U << (BITS_PER_UNIT
    2226              :                                     - (pd.offset % BITS_PER_UNIT));
    2227              :                       *p = (*p & msk) | (*q & ~msk);
    2228              :                       p++;
    2229              :                       q++;
    2230              :                       off++;
    2231              :                       size -= BITS_PER_UNIT - (pd.offset % BITS_PER_UNIT);
    2232              :                       gcc_assert (size >= 0);
    2233              :                     }
    2234              :                 }
    2235              :             }
    2236              :           else if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
    2237              :             {
    2238              :               q = (this_buffer + len
    2239              :                    - (ROUND_UP (size - amnt, BITS_PER_UNIT)
    2240              :                       / BITS_PER_UNIT));
    2241              :               if (pd.offset % BITS_PER_UNIT)
    2242              :                 {
    2243              :                   q++;
    2244              :                   size -= BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) pd.offset
    2245              :                                            % BITS_PER_UNIT);
    2246              :                   gcc_assert (size >= 0);
    2247              :                 }
    2248              :             }
    2249              :           if ((unsigned HOST_WIDE_INT) size / BITS_PER_UNIT + off
    2250              :               > needed_len)
    2251              :             size = (needed_len - off) * BITS_PER_UNIT;
    2252              :           memcpy (p, q, size / BITS_PER_UNIT);
    2253              :           if (size % BITS_PER_UNIT)
    2254              :             {
    2255              :               unsigned int msk
    2256              :                 = -1U << (BITS_PER_UNIT - (size % BITS_PER_UNIT));
    2257              :               p += size / BITS_PER_UNIT;
    2258              :               q += size / BITS_PER_UNIT;
    2259              :               *p = (*q & msk) | (*p & ~msk);
    2260              :             }
    2261              :         }
    2262              :       else
    2263              :         {
    2264        23969 :           if (pd.offset >= 0)
    2265              :             {
    2266              :               /* LSB of this_buffer[0] byte should be at pd.offset bits
    2267              :                  in buffer.  */
    2268        23741 :               unsigned int msk;
    2269        23741 :               size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
    2270        23741 :               amnt = pd.offset % BITS_PER_UNIT;
    2271        23741 :               if (amnt)
    2272         1513 :                 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
    2273        23741 :               unsigned int off = pd.offset / BITS_PER_UNIT;
    2274        23741 :               gcc_assert (off < needed_len);
    2275        23741 :               size = MIN (size,
    2276              :                           (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT);
    2277        23741 :               p = buffer + off;
    2278        23741 :               if (amnt + size < BITS_PER_UNIT)
    2279              :                 {
    2280              :                   /* Low amnt bits come from *p, then size bits
    2281              :                      from this_buffer[0] and the remaining again from
    2282              :                      *p.  */
    2283         1082 :                   msk = ((1 << size) - 1) << amnt;
    2284         1082 :                   *p = (*p & ~msk) | (this_buffer[0] & msk);
    2285         1082 :                   size = 0;
    2286              :                 }
    2287        22659 :               else if (amnt)
    2288              :                 {
    2289         1121 :                   msk = -1U << amnt;
    2290         1121 :                   *p = (*p & ~msk) | (this_buffer[0] & msk);
    2291         1121 :                   p++;
    2292         1121 :                   size -= (BITS_PER_UNIT - amnt);
    2293              :                 }
    2294              :             }
    2295              :           else
    2296              :             {
    2297          228 :               amnt = (unsigned HOST_WIDE_INT) pd.offset % BITS_PER_UNIT;
    2298          228 :               if (amnt)
    2299           16 :                 size -= BITS_PER_UNIT - amnt;
    2300          228 :               size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
    2301          228 :               if (amnt)
    2302           16 :                 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
    2303              :             }
    2304        23969 :           memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT);
    2305        23969 :           p += size / BITS_PER_UNIT;
    2306        23969 :           if (size % BITS_PER_UNIT)
    2307              :             {
    2308          629 :               unsigned int msk = -1U << (size % BITS_PER_UNIT);
    2309          629 :               *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT]
    2310          629 :                     & ~msk) | (*p & msk);
    2311              :             }
    2312              :         }
    2313              :     }
    2314              : 
    2315         8311 :   tree type = vr->type;
    2316              :   /* Make sure to interpret in a type that has a range covering the whole
    2317              :      access size.  */
    2318         8311 :   if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
    2319              :     {
    2320           13 :       if (TREE_CODE (vr->type) == BITINT_TYPE
    2321           26 :           && maxsizei > MAX_FIXED_MODE_SIZE)
    2322           13 :         type = build_bitint_type (maxsizei, TYPE_UNSIGNED (type));
    2323              :       else
    2324            0 :         type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
    2325              :     }
    2326         8311 :   tree val;
    2327         8311 :   if (BYTES_BIG_ENDIAN)
    2328              :     {
    2329              :       unsigned sz = needed_len;
    2330              :       if (maxsizei % BITS_PER_UNIT)
    2331              :         shift_bytes_in_array_right (buffer, needed_len,
    2332              :                                     BITS_PER_UNIT
    2333              :                                     - (maxsizei % BITS_PER_UNIT));
    2334              :       if (INTEGRAL_TYPE_P (type))
    2335              :         {
    2336              :           if (TYPE_MODE (type) != BLKmode)
    2337              :             sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
    2338              :           else
    2339              :             sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
    2340              :         }
    2341              :       if (sz > needed_len)
    2342              :         {
    2343              :           memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
    2344              :           val = native_interpret_expr (type, this_buffer, sz);
    2345              :         }
    2346              :       else
    2347              :         val = native_interpret_expr (type, buffer, needed_len);
    2348              :     }
    2349              :   else
    2350         8311 :     val = native_interpret_expr (type, buffer, bufsize);
    2351              :   /* If we chop off bits because the types precision doesn't match the memory
    2352              :      access size this is ok when optimizing reads but not when called from
    2353              :      the DSE code during elimination.  */
    2354         8311 :   if (val && type != vr->type)
    2355              :     {
    2356           13 :       if (! int_fits_type_p (val, vr->type))
    2357              :         val = NULL_TREE;
    2358              :       else
    2359           13 :         val = fold_convert (vr->type, val);
    2360              :     }
    2361              : 
    2362         8307 :   if (val)
    2363              :     {
    2364         8307 :       if (dump_file && (dump_flags & TDF_DETAILS))
    2365            0 :         fprintf (dump_file,
    2366              :                  "Successfully combined %u partial definitions\n", ndefs);
    2367              :       /* We are using the alias-set of the first store we encounter which
    2368              :          should be appropriate here.  */
    2369         8307 :       return finish (first_set, first_base_set, val);
    2370              :     }
    2371              :   else
    2372              :     {
    2373            4 :       if (dump_file && (dump_flags & TDF_DETAILS))
    2374            0 :         fprintf (dump_file,
    2375              :                  "Failed to interpret %u encoded partial definitions\n", ndefs);
    2376            4 :       return (void *)-1;
    2377              :     }
    2378              : }
    2379              : 
    2380              : /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
    2381              :    with the current VUSE and performs the expression lookup.  */
    2382              : 
    2383              : static void *
    2384   1058103611 : vn_reference_lookup_2 (ao_ref *op, tree vuse, void *data_)
    2385              : {
    2386   1058103611 :   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
    2387   1058103611 :   vn_reference_t vr = data->vr;
    2388   1058103611 :   vn_reference_s **slot;
    2389   1058103611 :   hashval_t hash;
    2390              : 
    2391              :   /* If we have partial definitions recorded we have to go through
    2392              :      vn_reference_lookup_3.  */
    2393   2108451076 :   if (!data->partial_defs.is_empty ())
    2394              :     return NULL;
    2395              : 
    2396   1057314572 :   if (data->last_vuse_ptr)
    2397              :     {
    2398   1035713155 :       *data->last_vuse_ptr = vuse;
    2399   1035713155 :       data->last_vuse = vuse;
    2400              :     }
    2401              : 
    2402              :   /* Fixup vuse and hash.  */
    2403   1057314572 :   if (vr->vuse)
    2404   1057314572 :     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
    2405   1057314572 :   vr->vuse = vuse_ssa_val (vuse);
    2406   1057314572 :   if (vr->vuse)
    2407   1057314572 :     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
    2408              : 
    2409   1057314572 :   hash = vr->hashcode;
    2410   1057314572 :   slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
    2411   1057314572 :   if (slot)
    2412              :     {
    2413      7754900 :       if ((*slot)->result && data->saved_operands.exists ())
    2414       339399 :         return data->finish (vr->set, vr->base_set, (*slot)->result);
    2415              :       return *slot;
    2416              :     }
    2417              : 
    2418   1049559672 :   if (SSA_NAME_IS_DEFAULT_DEF (vuse))
    2419              :     {
    2420     17657208 :       HOST_WIDE_INT op_offset, op_size;
    2421     17657208 :       tree v = NULL_TREE;
    2422     17657208 :       tree base = ao_ref_base (op);
    2423              : 
    2424     17657208 :       if (base
    2425     17657208 :           && op->offset.is_constant (&op_offset)
    2426     17657208 :           && op->size.is_constant (&op_size)
    2427     17657208 :           && op->max_size_known_p ()
    2428     34874279 :           && known_eq (op->size, op->max_size))
    2429              :         {
    2430     16921336 :           if (TREE_CODE (base) == PARM_DECL)
    2431       675736 :             v = ipcp_get_aggregate_const (cfun, base, false, op_offset,
    2432              :                                           op_size);
    2433     16245600 :           else if (TREE_CODE (base) == MEM_REF
    2434      6685474 :                    && integer_zerop (TREE_OPERAND (base, 1))
    2435      5442889 :                    && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
    2436      5436905 :                    && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
    2437     19875998 :                    && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0)))
    2438              :                        == PARM_DECL))
    2439      3577304 :             v = ipcp_get_aggregate_const (cfun,
    2440      3577304 :                                           SSA_NAME_VAR (TREE_OPERAND (base, 0)),
    2441              :                                           true, op_offset, op_size);
    2442              :         }
    2443      4253040 :       if (v)
    2444         1246 :         return data->finish (vr->set, vr->base_set, v);
    2445              :     }
    2446              : 
    2447              :   return NULL;
    2448              : }
    2449              : 
    2450              : /* Lookup an existing or insert a new vn_reference entry into the
    2451              :    value table for the VUSE, SET, TYPE, OPERANDS reference which
    2452              :    has the value VALUE which is either a constant or an SSA name.  */
    2453              : 
    2454              : static vn_reference_t
    2455      1449760 : vn_reference_lookup_or_insert_for_pieces (tree vuse,
    2456              :                                           alias_set_type set,
    2457              :                                           alias_set_type base_set,
    2458              :                                           poly_int64 offset,
    2459              :                                           poly_int64 max_size,
    2460              :                                           tree type,
    2461              :                                           vec<vn_reference_op_s,
    2462              :                                                 va_heap> operands,
    2463              :                                           tree value)
    2464              : {
    2465      1449760 :   vn_reference_s vr1;
    2466      1449760 :   vn_reference_t result;
    2467      1449760 :   unsigned value_id;
    2468      1449760 :   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
    2469      1449760 :   vr1.operands = operands;
    2470      1449760 :   vr1.type = type;
    2471      1449760 :   vr1.set = set;
    2472      1449760 :   vr1.base_set = base_set;
    2473      1449760 :   vr1.offset = offset;
    2474      1449760 :   vr1.max_size = max_size;
    2475      1449760 :   vr1.hashcode = vn_reference_compute_hash (&vr1);
    2476      1449760 :   if (vn_reference_lookup_1 (&vr1, &result))
    2477         7713 :     return result;
    2478              : 
    2479      1442047 :   if (TREE_CODE (value) == SSA_NAME)
    2480       275728 :     value_id = VN_INFO (value)->value_id;
    2481              :   else
    2482      1166319 :     value_id = get_or_alloc_constant_value_id (value);
    2483      1442047 :   return vn_reference_insert_pieces (vuse, set, base_set, offset, max_size,
    2484      1442047 :                                      type, operands.copy (), value, value_id);
    2485              : }
    2486              : 
    2487              : /* Return a value-number for RCODE OPS... either by looking up an existing
    2488              :    value-number for the possibly simplified result or by inserting the
    2489              :    operation if INSERT is true.  If SIMPLIFY is false, return a value
    2490              :    number for the unsimplified expression.  */
    2491              : 
    2492              : static tree
    2493     18353408 : vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert,
    2494              :                            bool simplify)
    2495              : {
    2496     18353408 :   tree result = NULL_TREE;
    2497              :   /* We will be creating a value number for
    2498              :        RCODE (OPS...).
    2499              :      So first simplify and lookup this expression to see if it
    2500              :      is already available.  */
    2501              :   /* For simplification valueize.  */
    2502     18353408 :   unsigned i = 0;
    2503     18353408 :   if (simplify)
    2504     42612802 :     for (i = 0; i < res_op->num_ops; ++i)
    2505     24265127 :       if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
    2506              :         {
    2507     15512012 :           tree tem = vn_valueize (res_op->ops[i]);
    2508     15512012 :           if (!tem)
    2509              :             break;
    2510     15512012 :           res_op->ops[i] = tem;
    2511              :         }
    2512              :   /* If valueization of an operand fails (it is not available), skip
    2513              :      simplification.  */
    2514     18353408 :   bool res = false;
    2515     18353408 :   if (i == res_op->num_ops)
    2516              :     {
    2517              :       /* Do not leak not available operands into the simplified expression
    2518              :          when called from PRE context.  */
    2519     18347675 :       if (rpo_avail)
    2520     10983758 :         mprts_hook = vn_lookup_simplify_result;
    2521     18347675 :       res = res_op->resimplify (NULL, vn_valueize);
    2522     18347675 :       mprts_hook = NULL;
    2523              :     }
    2524     31557724 :   gimple *new_stmt = NULL;
    2525     18347675 :   if (res
    2526     18347675 :       && gimple_simplified_result_is_gimple_val (res_op))
    2527              :     {
    2528              :       /* The expression is already available.  */
    2529      5143359 :       result = res_op->ops[0];
    2530              :       /* Valueize it, simplification returns sth in AVAIL only.  */
    2531      5143359 :       if (TREE_CODE (result) == SSA_NAME)
    2532       289482 :         result = SSA_VAL (result);
    2533              :     }
    2534              :   else
    2535              :     {
    2536     13210049 :       tree val = vn_lookup_simplify_result (res_op);
    2537              :       /* ???  In weird cases we can end up with internal-fn calls,
    2538              :          but this isn't expected so throw the result away.  See
    2539              :          PR123040 for an example.  */
    2540     13210049 :       if (!val && insert && res_op->code.is_tree_code ())
    2541              :         {
    2542       135463 :           gimple_seq stmts = NULL;
    2543       135463 :           result = maybe_push_res_to_seq (res_op, &stmts);
    2544       135463 :           if (result)
    2545              :             {
    2546       135457 :               gcc_assert (gimple_seq_singleton_p (stmts));
    2547       135457 :               new_stmt = gimple_seq_first_stmt (stmts);
    2548              :             }
    2549              :         }
    2550              :       else
    2551              :         /* The expression is already available.  */
    2552              :         result = val;
    2553              :     }
    2554       289488 :   if (new_stmt)
    2555              :     {
    2556              :       /* The expression is not yet available, value-number lhs to
    2557              :          the new SSA_NAME we created.  */
    2558              :       /* Initialize value-number information properly.  */
    2559       135457 :       vn_ssa_aux_t result_info = VN_INFO (result);
    2560       135457 :       result_info->valnum = result;
    2561       135457 :       result_info->value_id = get_next_value_id ();
    2562       135457 :       result_info->visited = 1;
    2563       135457 :       gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
    2564              :                                           new_stmt);
    2565       135457 :       result_info->needs_insertion = true;
    2566              :       /* ???  PRE phi-translation inserts NARYs without corresponding
    2567              :          SSA name result.  Re-use those but set their result according
    2568              :          to the stmt we just built.  */
    2569       135457 :       vn_nary_op_t nary = NULL;
    2570       135457 :       vn_nary_op_lookup_stmt (new_stmt, &nary);
    2571       135457 :       if (nary)
    2572              :         {
    2573            0 :           gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
    2574            0 :           nary->u.result = gimple_assign_lhs (new_stmt);
    2575              :         }
    2576              :       /* As all "inserted" statements are singleton SCCs, insert
    2577              :          to the valid table.  This is strictly needed to
    2578              :          avoid re-generating new value SSA_NAMEs for the same
    2579              :          expression during SCC iteration over and over (the
    2580              :          optimistic table gets cleared after each iteration).
    2581              :          We do not need to insert into the optimistic table, as
    2582              :          lookups there will fall back to the valid table.  */
    2583              :       else
    2584              :         {
    2585       135457 :           unsigned int length = vn_nary_length_from_stmt (new_stmt);
    2586       135457 :           vn_nary_op_t vno1
    2587       135457 :             = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
    2588       135457 :           vno1->value_id = result_info->value_id;
    2589       135457 :           vno1->length = length;
    2590       135457 :           vno1->predicated_values = 0;
    2591       135457 :           vno1->u.result = result;
    2592       135457 :           init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (new_stmt));
    2593       135457 :           vn_nary_op_insert_into (vno1, valid_info->nary);
    2594              :           /* Also do not link it into the undo chain.  */
    2595       135457 :           last_inserted_nary = vno1->next;
    2596       135457 :           vno1->next = (vn_nary_op_t)(void *)-1;
    2597              :         }
    2598       135457 :       if (dump_file && (dump_flags & TDF_DETAILS))
    2599              :         {
    2600          590 :           fprintf (dump_file, "Inserting name ");
    2601          590 :           print_generic_expr (dump_file, result);
    2602          590 :           fprintf (dump_file, " for expression ");
    2603          590 :           print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
    2604          590 :           fprintf (dump_file, "\n");
    2605              :         }
    2606              :     }
    2607     18353408 :   return result;
    2608              : }
    2609              : 
    2610              : /* Return a value-number for RCODE OPS... either by looking up an existing
    2611              :    value-number for the simplified result or by inserting the operation.  */
    2612              : 
    2613              : static tree
    2614       178825 : vn_nary_build_or_lookup (gimple_match_op *res_op)
    2615              : {
    2616            0 :   return vn_nary_build_or_lookup_1 (res_op, true, true);
    2617              : }
    2618              : 
    2619              : /* Try to simplify the expression RCODE OPS... of type TYPE and return
    2620              :    its value if present.  Update NARY with a simplified expression if
    2621              :    it fits.  */
    2622              : 
    2623              : tree
    2624      7358508 : vn_nary_simplify (vn_nary_op_t nary)
    2625              : {
    2626      7358508 :   if (nary->length > gimple_match_op::MAX_NUM_OPS
    2627              :       /* For CONSTRUCTOR the vn_nary_op_t and gimple_match_op representation
    2628              :          does not match.  */
    2629      7358328 :       || nary->opcode == CONSTRUCTOR)
    2630              :     return NULL_TREE;
    2631      7357912 :   gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
    2632      7357912 :                       nary->type, nary->length);
    2633      7357912 :   memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
    2634      7357912 :   tree res = vn_nary_build_or_lookup_1 (&op, false, true);
    2635              :   /* Do not update *NARY with a simplified result that contains abnormals.
    2636              :      This matches what maybe_push_res_to_seq does when requesting insertion.  */
    2637     19304956 :   for (unsigned i = 0; i < op.num_ops; ++i)
    2638     11947133 :     if (TREE_CODE (op.ops[i]) == SSA_NAME
    2639     11947133 :         && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op.ops[i]))
    2640              :       return res;
    2641      7357823 :   if (op.code.is_tree_code ()
    2642      7357823 :       && op.num_ops <= nary->length
    2643     14714944 :       && (tree_code) op.code != CONSTRUCTOR)
    2644              :     {
    2645      7357120 :       nary->opcode = (tree_code) op.code;
    2646      7357120 :       nary->length = op.num_ops;
    2647     19302747 :       for (unsigned i = 0; i < op.num_ops; ++i)
    2648     11945627 :         nary->op[i] = op.ops[i];
    2649              :     }
    2650              :   return res;
    2651              : }
    2652              : 
    2653              : /* Elimination engine.  */
    2654              : 
    2655              : class eliminate_dom_walker : public dom_walker
    2656              : {
    2657              : public:
    2658              :   eliminate_dom_walker (cdi_direction, bitmap);
    2659              :   ~eliminate_dom_walker ();
    2660              : 
    2661              :   edge before_dom_children (basic_block) final override;
    2662              :   void after_dom_children (basic_block) final override;
    2663              : 
    2664              :   virtual tree eliminate_avail (basic_block, tree op);
    2665              :   virtual void eliminate_push_avail (basic_block, tree op);
    2666              :   tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
    2667              : 
    2668              :   void eliminate_stmt (basic_block, gimple_stmt_iterator *);
    2669              : 
    2670              :   unsigned eliminate_cleanup (bool region_p = false);
    2671              : 
    2672              :   bool do_pre;
    2673              :   unsigned int el_todo;
    2674              :   unsigned int eliminations;
    2675              :   unsigned int insertions;
    2676              : 
    2677              :   /* SSA names that had their defs inserted by PRE if do_pre.  */
    2678              :   bitmap inserted_exprs;
    2679              : 
    2680              :   /* Blocks with statements that have had their EH properties changed.  */
    2681              :   bitmap need_eh_cleanup;
    2682              : 
    2683              :   /* Blocks with statements that have had their AB properties changed.  */
    2684              :   bitmap need_ab_cleanup;
    2685              : 
    2686              :   /* Local state for the eliminate domwalk.  */
    2687              :   auto_vec<gimple *> to_remove;
    2688              :   auto_vec<gimple *> to_fixup;
    2689              :   auto_vec<tree> avail;
    2690              :   auto_vec<tree> avail_stack;
    2691              : };
    2692              : 
    2693              : /* Adaptor to the elimination engine using RPO availability.  */
    2694              : 
    2695     12240762 : class rpo_elim : public eliminate_dom_walker
    2696              : {
    2697              : public:
    2698      6120381 :   rpo_elim(basic_block entry_)
    2699     12240762 :     : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
    2700     12240762 :       m_avail_freelist (NULL) {}
    2701              : 
    2702              :   tree eliminate_avail (basic_block, tree op) final override;
    2703              : 
    2704              :   void eliminate_push_avail (basic_block, tree) final override;
    2705              : 
    2706              :   basic_block entry;
    2707              :   /* Freelist of avail entries which are allocated from the vn_ssa_aux
    2708              :      obstack.  */
    2709              :   vn_avail *m_avail_freelist;
    2710              : };
    2711              : 
    2712              : /* Return true if BASE1 and BASE2 can be adjusted so they have the
    2713              :    same address and adjust *OFFSET1 and *OFFSET2 accordingly.
    2714              :    Otherwise return false.  */
    2715              : 
    2716              : static bool
    2717      6794113 : adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
    2718              :                                        tree base2, poly_int64 *offset2)
    2719              : {
    2720      6794113 :   poly_int64 soff;
    2721      6794113 :   if (TREE_CODE (base1) == MEM_REF
    2722      3081529 :       && TREE_CODE (base2) == MEM_REF)
    2723              :     {
    2724      2473241 :       if (mem_ref_offset (base1).to_shwi (&soff))
    2725              :         {
    2726      2473241 :           base1 = TREE_OPERAND (base1, 0);
    2727      2473241 :           *offset1 += soff * BITS_PER_UNIT;
    2728              :         }
    2729      2473241 :       if (mem_ref_offset (base2).to_shwi (&soff))
    2730              :         {
    2731      2473241 :           base2 = TREE_OPERAND (base2, 0);
    2732      2473241 :           *offset2 += soff * BITS_PER_UNIT;
    2733              :         }
    2734      2473241 :       return operand_equal_p (base1, base2, 0);
    2735              :     }
    2736      4320872 :   return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
    2737              : }
    2738              : 
    2739              : /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
    2740              :    from the statement defining VUSE and if not successful tries to
    2741              :    translate *REFP and VR_ through an aggregate copy at the definition
    2742              :    of VUSE.  If *DISAMBIGUATE_ONLY is true then do not perform translation
    2743              :    of *REF and *VR.  If only disambiguation was performed then
    2744              :    *DISAMBIGUATE_ONLY is set to true.  */
    2745              : 
    2746              : static void *
    2747     42196290 : vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
    2748              :                        translate_flags *disambiguate_only)
    2749              : {
    2750     42196290 :   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
    2751     42196290 :   vn_reference_t vr = data->vr;
    2752     42196290 :   gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
    2753     42196290 :   tree base = ao_ref_base (ref);
    2754     42196290 :   HOST_WIDE_INT offseti = 0, maxsizei, sizei = 0;
    2755     42196290 :   static vec<vn_reference_op_s> lhs_ops;
    2756     42196290 :   ao_ref lhs_ref;
    2757     42196290 :   bool lhs_ref_ok = false;
    2758     42196290 :   poly_int64 copy_size;
    2759              : 
    2760              :   /* First try to disambiguate after value-replacing in the definitions LHS.  */
    2761     42196290 :   if (is_gimple_assign (def_stmt))
    2762              :     {
    2763     20736837 :       tree lhs = gimple_assign_lhs (def_stmt);
    2764     20736837 :       bool valueized_anything = false;
    2765              :       /* Avoid re-allocation overhead.  */
    2766     20736837 :       lhs_ops.truncate (0);
    2767     20736837 :       basic_block saved_rpo_bb = vn_context_bb;
    2768     20736837 :       vn_context_bb = gimple_bb (def_stmt);
    2769     20736837 :       if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
    2770              :         {
    2771     13509890 :           copy_reference_ops_from_ref (lhs, &lhs_ops);
    2772     13509890 :           valueize_refs_1 (&lhs_ops, &valueized_anything, true);
    2773              :         }
    2774     20736837 :       vn_context_bb = saved_rpo_bb;
    2775     20736837 :       ao_ref_init (&lhs_ref, lhs);
    2776     20736837 :       lhs_ref_ok = true;
    2777     20736837 :       if (valueized_anything
    2778      1935873 :           && ao_ref_init_from_vn_reference
    2779      1935873 :                (&lhs_ref, ao_ref_alias_set (&lhs_ref),
    2780      1935873 :                 ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
    2781     22672710 :           && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
    2782              :         {
    2783      1630340 :           *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
    2784      8124037 :           return NULL;
    2785              :         }
    2786              : 
    2787              :       /* When the def is a CLOBBER we can optimistically disambiguate
    2788              :          against it since any overlap it would be undefined behavior.
    2789              :          Avoid this for obvious must aliases to save compile-time though.
    2790              :          We also may not do this when the query is used for redundant
    2791              :          store removal.  */
    2792     19106497 :       if (!data->redundant_store_removal_p
    2793     10516432 :           && gimple_clobber_p (def_stmt)
    2794     19628012 :           && !operand_equal_p (ao_ref_base (&lhs_ref), base, OEP_ADDRESS_OF))
    2795              :         {
    2796       485276 :           *disambiguate_only = TR_DISAMBIGUATE;
    2797       485276 :           return NULL;
    2798              :         }
    2799              : 
    2800              :       /* Besides valueizing the LHS we can also use access-path based
    2801              :          disambiguation on the original non-valueized ref.  */
    2802     18621221 :       if (!ref->ref
    2803              :           && lhs_ref_ok
    2804      2693873 :           && data->orig_ref.ref)
    2805              :         {
    2806              :           /* We want to use the non-valueized LHS for this, but avoid redundant
    2807              :              work.  */
    2808      1820476 :           ao_ref *lref = &lhs_ref;
    2809      1820476 :           ao_ref lref_alt;
    2810      1820476 :           if (valueized_anything)
    2811              :             {
    2812       122952 :               ao_ref_init (&lref_alt, lhs);
    2813       122952 :               lref = &lref_alt;
    2814              :             }
    2815      1820476 :           if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
    2816              :             {
    2817       240038 :               *disambiguate_only = (valueized_anything
    2818       120019 :                                     ? TR_VALUEIZE_AND_DISAMBIGUATE
    2819              :                                     : TR_DISAMBIGUATE);
    2820       120019 :               return NULL;
    2821              :             }
    2822              :         }
    2823              : 
    2824              :       /* If we reach a clobbering statement try to skip it and see if
    2825              :          we find a VN result with exactly the same value as the
    2826              :          possible clobber.  In this case we can ignore the clobber
    2827              :          and return the found value.  */
    2828     18501202 :       if (!gimple_has_volatile_ops (def_stmt)
    2829     17133307 :           && ((is_gimple_reg_type (TREE_TYPE (lhs))
    2830     12653413 :                && types_compatible_p (TREE_TYPE (lhs), vr->type)
    2831      9762194 :                && !storage_order_barrier_p (lhs)
    2832      9762194 :                && !reverse_storage_order_for_component_p (lhs))
    2833      7371117 :               || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == CONSTRUCTOR)
    2834     10812445 :           && (ref->ref || data->orig_ref.ref)
    2835     10388773 :           && !data->mask
    2836     10367300 :           && data->partial_defs.is_empty ()
    2837     10365178 :           && multiple_p (get_object_alignment
    2838              :                            (ref->ref ? ref->ref : data->orig_ref.ref),
    2839              :                            ref->size)
    2840     41330253 :           && multiple_p (get_object_alignment (lhs), ref->size))
    2841              :         {
    2842      9953806 :           HOST_WIDE_INT offset2i, size2i;
    2843      9953806 :           poly_int64 offset = ref->offset;
    2844      9953806 :           poly_int64 maxsize = ref->max_size;
    2845              : 
    2846      9953806 :           gcc_assert (lhs_ref_ok);
    2847      9953806 :           tree base2 = ao_ref_base (&lhs_ref);
    2848      9953806 :           poly_int64 offset2 = lhs_ref.offset;
    2849      9953806 :           poly_int64 size2 = lhs_ref.size;
    2850      9953806 :           poly_int64 maxsize2 = lhs_ref.max_size;
    2851              : 
    2852      9953806 :           tree rhs = gimple_assign_rhs1 (def_stmt);
    2853      9953806 :           if (TREE_CODE (rhs) == CONSTRUCTOR)
    2854      1021612 :             rhs = integer_zero_node;
    2855              :           /* ???  We may not compare to ahead values which might be from
    2856              :              a different loop iteration but only to loop invariants.  Use
    2857              :              CONSTANT_CLASS_P (unvalueized!) as conservative approximation.
    2858              :              The one-hop lookup below doesn't have this issue since there's
    2859              :              a virtual PHI before we ever reach a backedge to cross.
    2860              :              We can skip multiple defs as long as they are from the same
    2861              :              value though.  */
    2862      9953806 :           if (data->same_val
    2863      9953806 :               && !operand_equal_p (data->same_val, rhs))
    2864              :             ;
    2865              :           /* When this is a (partial) must-def, leave it to handling
    2866              :              below in case we are interested in the value.  */
    2867      9664739 :           else if (!(*disambiguate_only > TR_TRANSLATE)
    2868      3315722 :                    && base2
    2869      3315722 :                    && known_eq (maxsize2, size2)
    2870      2317603 :                    && adjust_offsets_for_equal_base_address (base, &offset,
    2871              :                                                              base2, &offset2)
    2872      1131994 :                    && offset2.is_constant (&offset2i)
    2873      1131994 :                    && size2.is_constant (&size2i)
    2874      1131994 :                    && maxsize.is_constant (&maxsizei)
    2875      1131994 :                    && offset.is_constant (&offseti)
    2876     10796733 :                    && ranges_known_overlap_p (offseti, maxsizei, offset2i,
    2877              :                                               size2i))
    2878              :             ;
    2879      8618995 :           else if (CONSTANT_CLASS_P (rhs))
    2880              :             {
    2881      4195465 :               if (dump_file && (dump_flags & TDF_DETAILS))
    2882              :                 {
    2883         1940 :                   fprintf (dump_file,
    2884              :                            "Skipping possible redundant definition ");
    2885         1940 :                   print_gimple_stmt (dump_file, def_stmt, 0);
    2886              :                 }
    2887              :               /* Delay the actual compare of the values to the end of the walk
    2888              :                  but do not update last_vuse from here.  */
    2889      4195465 :               data->last_vuse_ptr = NULL;
    2890      4195465 :               data->same_val = rhs;
    2891      4258062 :               return NULL;
    2892              :             }
    2893              :           else
    2894              :             {
    2895      4423530 :               tree saved_vuse = vr->vuse;
    2896      4423530 :               hashval_t saved_hashcode = vr->hashcode;
    2897      4423530 :               if (vr->vuse)
    2898      4423530 :                 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
    2899      8847060 :               vr->vuse = vuse_ssa_val (gimple_vuse (def_stmt));
    2900      4423530 :               if (vr->vuse)
    2901      4423530 :                 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
    2902      4423530 :               vn_reference_t vnresult = NULL;
    2903              :               /* Do not use vn_reference_lookup_2 since that might perform
    2904              :                  expression hashtable insertion but this lookup crosses
    2905              :                  a possible may-alias making such insertion conditionally
    2906              :                  invalid.  */
    2907      4423530 :               vn_reference_lookup_1 (vr, &vnresult);
    2908              :               /* Need to restore vr->vuse and vr->hashcode.  */
    2909      4423530 :               vr->vuse = saved_vuse;
    2910      4423530 :               vr->hashcode = saved_hashcode;
    2911      4423530 :               if (vnresult)
    2912              :                 {
    2913       236207 :                   if (TREE_CODE (rhs) == SSA_NAME)
    2914       234687 :                     rhs = SSA_VAL (rhs);
    2915       236207 :                   if (vnresult->result
    2916       236207 :                       && operand_equal_p (vnresult->result, rhs, 0))
    2917        62597 :                     return vnresult;
    2918              :                 }
    2919              :             }
    2920              :         }
    2921              :     }
    2922     21459453 :   else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
    2923     19313700 :            && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
    2924     23523305 :            && gimple_call_num_args (def_stmt) <= 4)
    2925              :     {
    2926              :       /* For builtin calls valueize its arguments and call the
    2927              :          alias oracle again.  Valueization may improve points-to
    2928              :          info of pointers and constify size and position arguments.
    2929              :          Originally this was motivated by PR61034 which has
    2930              :          conditional calls to free falsely clobbering ref because
    2931              :          of imprecise points-to info of the argument.  */
    2932              :       tree oldargs[4];
    2933              :       bool valueized_anything = false;
    2934      4915318 :       for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
    2935              :         {
    2936      3389279 :           oldargs[i] = gimple_call_arg (def_stmt, i);
    2937      3389279 :           tree val = vn_valueize (oldargs[i]);
    2938      3389279 :           if (val != oldargs[i])
    2939              :             {
    2940       132379 :               gimple_call_set_arg (def_stmt, i, val);
    2941       132379 :               valueized_anything = true;
    2942              :             }
    2943              :         }
    2944      1526039 :       if (valueized_anything)
    2945              :         {
    2946       197592 :           bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
    2947        98796 :                                                ref, data->tbaa_p);
    2948       361451 :           for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
    2949       262655 :             gimple_call_set_arg (def_stmt, i, oldargs[i]);
    2950        98796 :           if (!res)
    2951              :             {
    2952        28999 :               *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
    2953        28999 :               return NULL;
    2954              :             }
    2955              :         }
    2956              :     }
    2957              : 
    2958     35673594 :   if (*disambiguate_only > TR_TRANSLATE)
    2959              :     return (void *)-1;
    2960              : 
    2961              :   /* If we cannot constrain the size of the reference we cannot
    2962              :      test if anything kills it.  */
    2963     23804936 :   if (!ref->max_size_known_p ())
    2964              :     return (void *)-1;
    2965              : 
    2966     23378657 :   poly_int64 offset = ref->offset;
    2967     23378657 :   poly_int64 maxsize = ref->max_size;
    2968              : 
    2969              :   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
    2970              :      from that definition.
    2971              :      1) Memset.  */
    2972     23378657 :   if (is_gimple_reg_type (vr->type)
    2973     23069958 :       && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
    2974     22982253 :           || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
    2975        88247 :       && (integer_zerop (gimple_call_arg (def_stmt, 1))
    2976        31932 :           || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
    2977         8622 :                || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
    2978              :               && CHAR_BIT == 8
    2979              :               && BITS_PER_UNIT == 8
    2980              :               && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
    2981        30737 :               && offset.is_constant (&offseti)
    2982        30737 :               && ref->size.is_constant (&sizei)
    2983        30737 :               && (offseti % BITS_PER_UNIT == 0
    2984           39 :                   || TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST)))
    2985        87052 :       && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
    2986        35645 :           || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
    2987        35645 :               && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
    2988     23430627 :       && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
    2989        29268 :           || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
    2990              :     {
    2991        51929 :       tree base2;
    2992        51929 :       poly_int64 offset2, size2, maxsize2;
    2993        51929 :       bool reverse;
    2994        51929 :       tree ref2 = gimple_call_arg (def_stmt, 0);
    2995        51929 :       if (TREE_CODE (ref2) == SSA_NAME)
    2996              :         {
    2997        29227 :           ref2 = SSA_VAL (ref2);
    2998        29227 :           if (TREE_CODE (ref2) == SSA_NAME
    2999        29227 :               && (TREE_CODE (base) != MEM_REF
    3000        18793 :                   || TREE_OPERAND (base, 0) != ref2))
    3001              :             {
    3002        22906 :               gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
    3003        22906 :               if (gimple_assign_single_p (def_stmt)
    3004        22906 :                   && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
    3005          802 :                 ref2 = gimple_assign_rhs1 (def_stmt);
    3006              :             }
    3007              :         }
    3008        51929 :       if (TREE_CODE (ref2) == ADDR_EXPR)
    3009              :         {
    3010        26489 :           ref2 = TREE_OPERAND (ref2, 0);
    3011        26489 :           base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
    3012              :                                            &reverse);
    3013        26489 :           if (!known_size_p (maxsize2)
    3014        26449 :               || !known_eq (maxsize2, size2)
    3015        52864 :               || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
    3016        55421 :             return (void *)-1;
    3017              :         }
    3018        25440 :       else if (TREE_CODE (ref2) == SSA_NAME)
    3019              :         {
    3020        25440 :           poly_int64 soff;
    3021        25440 :           if (TREE_CODE (base) != MEM_REF
    3022        43650 :               || !(mem_ref_offset (base)
    3023        36417 :                    << LOG2_BITS_PER_UNIT).to_shwi (&soff))
    3024        21492 :             return (void *)-1;
    3025        18207 :           offset += soff;
    3026        18207 :           offset2 = 0;
    3027        18207 :           if (TREE_OPERAND (base, 0) != ref2)
    3028              :             {
    3029        14871 :               gimple *def = SSA_NAME_DEF_STMT (ref2);
    3030        14871 :               if (is_gimple_assign (def)
    3031        13633 :                   && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
    3032        11661 :                   && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
    3033        15513 :                   && poly_int_tree_p (gimple_assign_rhs2 (def)))
    3034              :                 {
    3035          612 :                   tree rhs2 = gimple_assign_rhs2 (def);
    3036          612 :                   if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
    3037              :                                                SIGNED)
    3038          612 :                         << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
    3039              :                     return (void *)-1;
    3040          612 :                   ref2 = gimple_assign_rhs1 (def);
    3041          612 :                   if (TREE_CODE (ref2) == SSA_NAME)
    3042          612 :                     ref2 = SSA_VAL (ref2);
    3043              :                 }
    3044              :               else
    3045              :                 return (void *)-1;
    3046              :             }
    3047              :         }
    3048              :       else
    3049              :         return (void *)-1;
    3050        26560 :       tree len = gimple_call_arg (def_stmt, 2);
    3051        26560 :       HOST_WIDE_INT leni, offset2i;
    3052        26560 :       if (TREE_CODE (len) == SSA_NAME)
    3053          255 :         len = SSA_VAL (len);
    3054              :       /* Sometimes the above trickery is smarter than alias analysis.  Take
    3055              :          advantage of that.  */
    3056        26560 :       if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
    3057        53120 :                                    (wi::to_poly_offset (len)
    3058        26560 :                                     << LOG2_BITS_PER_UNIT)))
    3059              :         return NULL;
    3060        53070 :       if (data->partial_defs.is_empty ()
    3061        26510 :           && known_subrange_p (offset, maxsize, offset2,
    3062        26510 :                                wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
    3063              :         {
    3064        26013 :           tree val;
    3065        26013 :           if (integer_zerop (gimple_call_arg (def_stmt, 1)))
    3066        21190 :             val = build_zero_cst (vr->type);
    3067         4823 :           else if (INTEGRAL_TYPE_P (vr->type)
    3068         3683 :                    && known_eq (ref->size, 8)
    3069         7759 :                    && offseti % BITS_PER_UNIT == 0)
    3070              :             {
    3071         2936 :               gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
    3072         2936 :                                       vr->type, gimple_call_arg (def_stmt, 1));
    3073         2936 :               val = vn_nary_build_or_lookup (&res_op);
    3074         2936 :               if (!val
    3075         2936 :                   || (TREE_CODE (val) == SSA_NAME
    3076          626 :                       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
    3077            0 :                 return (void *)-1;
    3078              :             }
    3079              :           else
    3080              :             {
    3081         1887 :               unsigned buflen
    3082         1887 :                 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type)) + 1;
    3083         1887 :               if (INTEGRAL_TYPE_P (vr->type)
    3084         1887 :                   && TYPE_MODE (vr->type) != BLKmode)
    3085         1492 :                 buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)) + 1;
    3086         1887 :               unsigned char *buf = XALLOCAVEC (unsigned char, buflen);
    3087         1887 :               memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
    3088              :                       buflen);
    3089         1887 :               if (BYTES_BIG_ENDIAN)
    3090              :                 {
    3091              :                   unsigned int amnt
    3092              :                     = (((unsigned HOST_WIDE_INT) offseti + sizei)
    3093              :                        % BITS_PER_UNIT);
    3094              :                   if (amnt)
    3095              :                     {
    3096              :                       shift_bytes_in_array_right (buf, buflen,
    3097              :                                                   BITS_PER_UNIT - amnt);
    3098              :                       buf++;
    3099              :                       buflen--;
    3100              :                     }
    3101              :                 }
    3102         1887 :               else if (offseti % BITS_PER_UNIT != 0)
    3103              :                 {
    3104            7 :                   unsigned int amnt
    3105              :                     = BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) offseti
    3106            7 :                                        % BITS_PER_UNIT);
    3107            7 :                   shift_bytes_in_array_left (buf, buflen, amnt);
    3108            7 :                   buf++;
    3109            7 :                   buflen--;
    3110              :                 }
    3111         1887 :               val = native_interpret_expr (vr->type, buf, buflen);
    3112         1887 :               if (!val)
    3113              :                 return (void *)-1;
    3114              :             }
    3115        26013 :           return data->finish (0, 0, val);
    3116              :         }
    3117              :       /* For now handle clearing memory with partial defs.  */
    3118          547 :       else if (known_eq (ref->size, maxsize)
    3119          478 :                && integer_zerop (gimple_call_arg (def_stmt, 1))
    3120          166 :                && tree_fits_poly_int64_p (len)
    3121          162 :                && tree_to_poly_int64 (len).is_constant (&leni)
    3122          162 :                && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT
    3123          162 :                && offset.is_constant (&offseti)
    3124          162 :                && offset2.is_constant (&offset2i)
    3125          162 :                && maxsize.is_constant (&maxsizei)
    3126          547 :                && ranges_known_overlap_p (offseti, maxsizei, offset2i,
    3127          547 :                                           leni << LOG2_BITS_PER_UNIT))
    3128              :         {
    3129          162 :           pd_data pd;
    3130          162 :           pd.rhs = build_constructor (NULL_TREE, NULL);
    3131          162 :           pd.rhs_off = 0;
    3132          162 :           pd.offset = offset2i;
    3133          162 :           pd.size = leni << LOG2_BITS_PER_UNIT;
    3134          162 :           return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
    3135              :         }
    3136              :     }
    3137              : 
    3138              :   /* 2) Assignment from an empty CONSTRUCTOR.  */
    3139     23326728 :   else if (is_gimple_reg_type (vr->type)
    3140     23018029 :            && gimple_assign_single_p (def_stmt)
    3141      7619032 :            && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
    3142      1958181 :            && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0
    3143     25284909 :            && !TREE_THIS_VOLATILE (gimple_assign_lhs (def_stmt)))
    3144              :     {
    3145      1958149 :       tree base2;
    3146      1958149 :       poly_int64 offset2, size2, maxsize2;
    3147      1958149 :       HOST_WIDE_INT offset2i, size2i;
    3148      1958149 :       gcc_assert (lhs_ref_ok);
    3149      1958149 :       base2 = ao_ref_base (&lhs_ref);
    3150      1958149 :       offset2 = lhs_ref.offset;
    3151      1958149 :       size2 = lhs_ref.size;
    3152      1958149 :       maxsize2 = lhs_ref.max_size;
    3153      1958149 :       if (known_size_p (maxsize2)
    3154      1958111 :           && known_eq (maxsize2, size2)
    3155      3916214 :           && adjust_offsets_for_equal_base_address (base, &offset,
    3156              :                                                     base2, &offset2))
    3157              :         {
    3158      1931000 :           if (data->partial_defs.is_empty ()
    3159      1927656 :               && known_subrange_p (offset, maxsize, offset2, size2))
    3160              :             {
    3161              :               /* While technically undefined behavior do not optimize
    3162              :                  a full read from a clobber.  */
    3163      1926755 :               if (gimple_clobber_p (def_stmt))
    3164      1930946 :                 return (void *)-1;
    3165       971774 :               tree val = build_zero_cst (vr->type);
    3166       971774 :               return data->finish (ao_ref_alias_set (&lhs_ref),
    3167       971774 :                                    ao_ref_base_alias_set (&lhs_ref), val);
    3168              :             }
    3169         4245 :           else if (known_eq (ref->size, maxsize)
    3170         4191 :                    && maxsize.is_constant (&maxsizei)
    3171         4191 :                    && offset.is_constant (&offseti)
    3172         4191 :                    && offset2.is_constant (&offset2i)
    3173         4191 :                    && size2.is_constant (&size2i)
    3174         4245 :                    && ranges_known_overlap_p (offseti, maxsizei,
    3175              :                                               offset2i, size2i))
    3176              :             {
    3177              :               /* Let clobbers be consumed by the partial-def tracker
    3178              :                  which can choose to ignore them if they are shadowed
    3179              :                  by a later def.  */
    3180         4191 :               pd_data pd;
    3181         4191 :               pd.rhs = gimple_assign_rhs1 (def_stmt);
    3182         4191 :               pd.rhs_off = 0;
    3183         4191 :               pd.offset = offset2i;
    3184         4191 :               pd.size = size2i;
    3185         4191 :               return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
    3186              :                                              ao_ref_base_alias_set (&lhs_ref),
    3187              :                                              offseti, maxsizei);
    3188              :             }
    3189              :         }
    3190              :     }
    3191              : 
    3192              :   /* 3) Assignment from a constant.  We can use folds native encode/interpret
    3193              :      routines to extract the assigned bits.  */
    3194     21368579 :   else if (known_eq (ref->size, maxsize)
    3195     20835237 :            && is_gimple_reg_type (vr->type)
    3196     20526538 :            && !reverse_storage_order_for_component_p (vr->operands)
    3197     20523781 :            && !contains_storage_order_barrier_p (vr->operands)
    3198     20523781 :            && gimple_assign_single_p (def_stmt)
    3199      5324019 :            && !TREE_THIS_VOLATILE (gimple_assign_lhs (def_stmt))
    3200              :            && CHAR_BIT == 8
    3201              :            && BITS_PER_UNIT == 8
    3202              :            && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
    3203              :            /* native_encode and native_decode operate on arrays of bytes
    3204              :               and so fundamentally need a compile-time size and offset.  */
    3205      5321065 :            && maxsize.is_constant (&maxsizei)
    3206      5321065 :            && offset.is_constant (&offseti)
    3207     26689644 :            && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
    3208      4497995 :                || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
    3209      1852439 :                    && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
    3210              :     {
    3211       839361 :       tree lhs = gimple_assign_lhs (def_stmt);
    3212       839361 :       tree base2;
    3213       839361 :       poly_int64 offset2, size2, maxsize2;
    3214       839361 :       HOST_WIDE_INT offset2i, size2i;
    3215       839361 :       bool reverse;
    3216       839361 :       gcc_assert (lhs_ref_ok);
    3217       839361 :       base2 = ao_ref_base (&lhs_ref);
    3218       839361 :       offset2 = lhs_ref.offset;
    3219       839361 :       size2 = lhs_ref.size;
    3220       839361 :       maxsize2 = lhs_ref.max_size;
    3221       839361 :       reverse = reverse_storage_order_for_component_p (lhs);
    3222       839361 :       if (base2
    3223       839361 :           && !reverse
    3224       838533 :           && !storage_order_barrier_p (lhs)
    3225       838533 :           && known_eq (maxsize2, size2)
    3226       809907 :           && adjust_offsets_for_equal_base_address (base, &offset,
    3227              :                                                     base2, &offset2)
    3228        79364 :           && offset.is_constant (&offseti)
    3229        79364 :           && offset2.is_constant (&offset2i)
    3230       839361 :           && size2.is_constant (&size2i))
    3231              :         {
    3232        79364 :           if (data->partial_defs.is_empty ()
    3233        63778 :               && known_subrange_p (offseti, maxsizei, offset2, size2))
    3234              :             {
    3235              :               /* We support up to 512-bit values (for V8DFmode).  */
    3236        42013 :               unsigned char buffer[65];
    3237        42013 :               int len;
    3238              : 
    3239        42013 :               tree rhs = gimple_assign_rhs1 (def_stmt);
    3240        42013 :               if (TREE_CODE (rhs) == SSA_NAME)
    3241         1432 :                 rhs = SSA_VAL (rhs);
    3242        84026 :               len = native_encode_expr (rhs,
    3243              :                                         buffer, sizeof (buffer) - 1,
    3244        42013 :                                         (offseti - offset2i) / BITS_PER_UNIT);
    3245        42013 :               if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
    3246              :                 {
    3247        38991 :                   tree type = vr->type;
    3248        38991 :                   unsigned char *buf = buffer;
    3249        38991 :                   unsigned int amnt = 0;
    3250              :                   /* Make sure to interpret in a type that has a range
    3251              :                      covering the whole access size.  */
    3252        38991 :                   if (INTEGRAL_TYPE_P (vr->type)
    3253        38991 :                       && maxsizei != TYPE_PRECISION (vr->type))
    3254         1744 :                     type = build_nonstandard_integer_type (maxsizei,
    3255          872 :                                                            TYPE_UNSIGNED (type));
    3256        38991 :                   if (BYTES_BIG_ENDIAN)
    3257              :                     {
    3258              :                       /* For big-endian native_encode_expr stored the rhs
    3259              :                          such that the LSB of it is the LSB of buffer[len - 1].
    3260              :                          That bit is stored into memory at position
    3261              :                          offset2 + size2 - 1, i.e. in byte
    3262              :                          base + (offset2 + size2 - 1) / BITS_PER_UNIT.
    3263              :                          E.g. for offset2 1 and size2 14, rhs -1 and memory
    3264              :                          previously cleared that is:
    3265              :                          0        1
    3266              :                          01111111|11111110
    3267              :                          Now, if we want to extract offset 2 and size 12 from
    3268              :                          it using native_interpret_expr (which actually works
    3269              :                          for integral bitfield types in terms of byte size of
    3270              :                          the mode), the native_encode_expr stored the value
    3271              :                          into buffer as
    3272              :                          XX111111|11111111
    3273              :                          and returned len 2 (the X bits are outside of
    3274              :                          precision).
    3275              :                          Let sz be maxsize / BITS_PER_UNIT if not extracting
    3276              :                          a bitfield, and GET_MODE_SIZE otherwise.
    3277              :                          We need to align the LSB of the value we want to
    3278              :                          extract as the LSB of buf[sz - 1].
    3279              :                          The LSB from memory we need to read is at position
    3280              :                          offset + maxsize - 1.  */
    3281              :                       HOST_WIDE_INT sz = maxsizei / BITS_PER_UNIT;
    3282              :                       if (INTEGRAL_TYPE_P (type))
    3283              :                         {
    3284              :                           if (TYPE_MODE (type) != BLKmode)
    3285              :                             sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
    3286              :                           else
    3287              :                             sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
    3288              :                         }
    3289              :                       amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
    3290              :                               - offseti - maxsizei) % BITS_PER_UNIT;
    3291              :                       if (amnt)
    3292              :                         shift_bytes_in_array_right (buffer, len, amnt);
    3293              :                       amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
    3294              :                               - offseti - maxsizei - amnt) / BITS_PER_UNIT;
    3295              :                       if ((unsigned HOST_WIDE_INT) sz + amnt > (unsigned) len)
    3296              :                         len = 0;
    3297              :                       else
    3298              :                         {
    3299              :                           buf = buffer + len - sz - amnt;
    3300              :                           len -= (buf - buffer);
    3301              :                         }
    3302              :                     }
    3303              :                   else
    3304              :                     {
    3305        38991 :                       amnt = ((unsigned HOST_WIDE_INT) offset2i
    3306        38991 :                               - offseti) % BITS_PER_UNIT;
    3307        38991 :                       if (amnt)
    3308              :                         {
    3309          315 :                           buffer[len] = 0;
    3310          315 :                           shift_bytes_in_array_left (buffer, len + 1, amnt);
    3311          315 :                           buf = buffer + 1;
    3312              :                         }
    3313              :                     }
    3314        38991 :                   tree val = native_interpret_expr (type, buf, len);
    3315              :                   /* If we chop off bits because the types precision doesn't
    3316              :                      match the memory access size this is ok when optimizing
    3317              :                      reads but not when called from the DSE code during
    3318              :                      elimination.  */
    3319        38991 :                   if (val
    3320        38989 :                       && type != vr->type)
    3321              :                     {
    3322          872 :                       if (! int_fits_type_p (val, vr->type))
    3323              :                         val = NULL_TREE;
    3324              :                       else
    3325          872 :                         val = fold_convert (vr->type, val);
    3326              :                     }
    3327              : 
    3328        38989 :                   if (val)
    3329        38989 :                     return data->finish (ao_ref_alias_set (&lhs_ref),
    3330        38989 :                                          ao_ref_base_alias_set (&lhs_ref), val);
    3331              :                 }
    3332              :             }
    3333        37351 :           else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
    3334              :                                            size2i))
    3335              :             {
    3336        37351 :               pd_data pd;
    3337        37351 :               tree rhs = gimple_assign_rhs1 (def_stmt);
    3338        37351 :               if (TREE_CODE (rhs) == SSA_NAME)
    3339         2176 :                 rhs = SSA_VAL (rhs);
    3340        37351 :               pd.rhs = rhs;
    3341        37351 :               pd.rhs_off = 0;
    3342        37351 :               pd.offset = offset2i;
    3343        37351 :               pd.size = size2i;
    3344        37351 :               return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
    3345              :                                              ao_ref_base_alias_set (&lhs_ref),
    3346              :                                              offseti, maxsizei);
    3347              :             }
    3348              :         }
    3349              :     }
    3350              : 
    3351              :   /* 4) Assignment from an SSA name which definition we may be able
    3352              :      to access pieces from or we can combine to a larger entity.  */
    3353     20529218 :   else if (known_eq (ref->size, maxsize)
    3354     19995876 :            && is_gimple_reg_type (vr->type)
    3355     19687177 :            && !reverse_storage_order_for_component_p (vr->operands)
    3356     19684420 :            && !contains_storage_order_barrier_p (vr->operands)
    3357     19684420 :            && gimple_assign_single_p (def_stmt)
    3358      4484658 :            && !TREE_THIS_VOLATILE (gimple_assign_lhs (def_stmt))
    3359     25010922 :            && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
    3360              :     {
    3361      1836148 :       tree lhs = gimple_assign_lhs (def_stmt);
    3362      1836148 :       tree base2;
    3363      1836148 :       poly_int64 offset2, size2, maxsize2;
    3364      1836148 :       HOST_WIDE_INT offset2i, size2i, offseti;
    3365      1836148 :       bool reverse;
    3366      1836148 :       gcc_assert (lhs_ref_ok);
    3367      1836148 :       base2 = ao_ref_base (&lhs_ref);
    3368      1836148 :       offset2 = lhs_ref.offset;
    3369      1836148 :       size2 = lhs_ref.size;
    3370      1836148 :       maxsize2 = lhs_ref.max_size;
    3371      1836148 :       reverse = reverse_storage_order_for_component_p (lhs);
    3372      1836148 :       tree def_rhs = gimple_assign_rhs1 (def_stmt);
    3373      1836148 :       if (!reverse
    3374      1835936 :           && !storage_order_barrier_p (lhs)
    3375      1835936 :           && known_size_p (maxsize2)
    3376      1812205 :           && known_eq (maxsize2, size2)
    3377      3544672 :           && adjust_offsets_for_equal_base_address (base, &offset,
    3378              :                                                     base2, &offset2))
    3379              :         {
    3380        78605 :           if (data->partial_defs.is_empty ()
    3381        72463 :               && known_subrange_p (offset, maxsize, offset2, size2)
    3382              :               /* ???  We can't handle bitfield precision extracts without
    3383              :                  either using an alternate type for the BIT_FIELD_REF and
    3384              :                  then doing a conversion or possibly adjusting the offset
    3385              :                  according to endianness.  */
    3386        48928 :               && (! INTEGRAL_TYPE_P (vr->type)
    3387        35421 :                   || known_eq (ref->size, TYPE_PRECISION (vr->type)))
    3388        91467 :               && multiple_p (ref->size, BITS_PER_UNIT))
    3389              :             {
    3390        44013 :               tree val = NULL_TREE;
    3391        88020 :               if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
    3392        48512 :                   || type_has_mode_precision_p (TREE_TYPE (def_rhs)))
    3393              :                 {
    3394        42854 :                   gimple_match_op op (gimple_match_cond::UNCOND,
    3395        42854 :                                       BIT_FIELD_REF, vr->type,
    3396              :                                       SSA_VAL (def_rhs),
    3397              :                                       bitsize_int (ref->size),
    3398        42854 :                                       bitsize_int (offset - offset2));
    3399        42854 :                   val = vn_nary_build_or_lookup (&op);
    3400              :                 }
    3401         1159 :               else if (known_eq (ref->size, size2))
    3402              :                 {
    3403         1085 :                   gimple_match_op op (gimple_match_cond::UNCOND,
    3404         1085 :                                       VIEW_CONVERT_EXPR, vr->type,
    3405         1085 :                                       SSA_VAL (def_rhs));
    3406         1085 :                   val = vn_nary_build_or_lookup (&op);
    3407              :                 }
    3408        43939 :               if (val
    3409        43939 :                   && (TREE_CODE (val) != SSA_NAME
    3410        43130 :                       || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
    3411        43920 :                 return data->finish (ao_ref_alias_set (&lhs_ref),
    3412        78512 :                                      ao_ref_base_alias_set (&lhs_ref), val);
    3413              :             }
    3414        34592 :           else if (maxsize.is_constant (&maxsizei)
    3415        34592 :                    && offset.is_constant (&offseti)
    3416        34592 :                    && offset2.is_constant (&offset2i)
    3417        34592 :                    && size2.is_constant (&size2i)
    3418        34592 :                    && ranges_known_overlap_p (offset, maxsize, offset2, size2))
    3419              :             {
    3420        34592 :               pd_data pd;
    3421        34592 :               pd.rhs = SSA_VAL (def_rhs);
    3422        34592 :               pd.rhs_off = 0;
    3423        34592 :               pd.offset = offset2i;
    3424        34592 :               pd.size = size2i;
    3425        34592 :               return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
    3426              :                                              ao_ref_base_alias_set (&lhs_ref),
    3427              :                                              offseti, maxsizei);
    3428              :             }
    3429              :         }
    3430              :     }
    3431              : 
    3432              :   /* 4b) Assignment done via one of the vectorizer internal store
    3433              :      functions where we may be able to access pieces from or we can
    3434              :      combine to a larger entity.  */
    3435     18693070 :   else if (known_eq (ref->size, maxsize)
    3436     18159728 :            && is_gimple_reg_type (vr->type)
    3437     17851029 :            && !reverse_storage_order_for_component_p (vr->operands)
    3438     17848272 :            && !contains_storage_order_barrier_p (vr->operands)
    3439     17848272 :            && is_gimple_call (def_stmt)
    3440     14430534 :            && gimple_call_internal_p (def_stmt)
    3441     18940752 :            && internal_store_fn_p (gimple_call_internal_fn (def_stmt)))
    3442              :     {
    3443           46 :       gcall *call = as_a <gcall *> (def_stmt);
    3444           46 :       internal_fn fn = gimple_call_internal_fn (call);
    3445              : 
    3446           46 :       tree mask = NULL_TREE, len = NULL_TREE, bias = NULL_TREE;
    3447           46 :       switch (fn)
    3448              :         {
    3449           46 :         case IFN_MASK_STORE:
    3450           46 :           mask = gimple_call_arg (call, internal_fn_mask_index (fn));
    3451           46 :           mask = vn_valueize (mask);
    3452           46 :           if (TREE_CODE (mask) != VECTOR_CST)
    3453           38 :             return (void *)-1;
    3454              :           break;
    3455            0 :         case IFN_LEN_STORE:
    3456            0 :           {
    3457            0 :             int len_index = internal_fn_len_index (fn);
    3458            0 :             len = gimple_call_arg (call, len_index);
    3459            0 :             bias = gimple_call_arg (call, len_index + 1);
    3460            0 :             if (!tree_fits_uhwi_p (len) || !tree_fits_shwi_p (bias))
    3461              :               return (void *) -1;
    3462              :             break;
    3463              :           }
    3464              :         default:
    3465              :           return (void *)-1;
    3466              :         }
    3467           14 :       tree def_rhs = gimple_call_arg (call,
    3468           14 :                                       internal_fn_stored_value_index (fn));
    3469           14 :       def_rhs = vn_valueize (def_rhs);
    3470           14 :       if (TREE_CODE (def_rhs) != VECTOR_CST)
    3471              :         return (void *)-1;
    3472              : 
    3473           14 :       ao_ref_init_from_ptr_and_size (&lhs_ref,
    3474              :                                      vn_valueize (gimple_call_arg (call, 0)),
    3475           14 :                                      TYPE_SIZE_UNIT (TREE_TYPE (def_rhs)));
    3476           14 :       tree base2;
    3477           14 :       poly_int64 offset2, size2, maxsize2;
    3478           14 :       HOST_WIDE_INT offset2i, size2i, offseti;
    3479           14 :       base2 = ao_ref_base (&lhs_ref);
    3480           14 :       offset2 = lhs_ref.offset;
    3481           14 :       size2 = lhs_ref.size;
    3482           14 :       maxsize2 = lhs_ref.max_size;
    3483           14 :       if (known_size_p (maxsize2)
    3484           14 :           && known_eq (maxsize2, size2)
    3485           14 :           && adjust_offsets_for_equal_base_address (base, &offset,
    3486              :                                                     base2, &offset2)
    3487            6 :           && maxsize.is_constant (&maxsizei)
    3488            6 :           && offset.is_constant (&offseti)
    3489            6 :           && offset2.is_constant (&offset2i)
    3490           14 :           && size2.is_constant (&size2i))
    3491              :         {
    3492            6 :           if (!ranges_maybe_overlap_p (offset, maxsize, offset2, size2))
    3493              :             /* Poor-mans disambiguation.  */
    3494              :             return NULL;
    3495            6 :           else if (ranges_known_overlap_p (offset, maxsize, offset2, size2))
    3496              :             {
    3497            6 :               pd_data pd;
    3498            6 :               pd.rhs = def_rhs;
    3499            6 :               tree aa = gimple_call_arg (call, 1);
    3500            6 :               alias_set_type set = get_deref_alias_set (TREE_TYPE (aa));
    3501            6 :               tree vectype = TREE_TYPE (def_rhs);
    3502            6 :               unsigned HOST_WIDE_INT elsz
    3503            6 :                 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (vectype)));
    3504            6 :               if (mask)
    3505              :                 {
    3506              :                   HOST_WIDE_INT start = 0, length = 0;
    3507              :                   unsigned mask_idx = 0;
    3508           48 :                   do
    3509              :                     {
    3510           48 :                       if (integer_zerop (VECTOR_CST_ELT (mask, mask_idx)))
    3511              :                         {
    3512           24 :                           if (length != 0)
    3513              :                             {
    3514           18 :                               pd.rhs_off = start;
    3515           18 :                               pd.offset = offset2i + start;
    3516           18 :                               pd.size = length;
    3517           18 :                               if (ranges_known_overlap_p
    3518           18 :                                     (offset, maxsize, pd.offset, pd.size))
    3519              :                                 {
    3520            0 :                                   void *res = data->push_partial_def
    3521            0 :                                               (pd, set, set, offseti, maxsizei);
    3522            0 :                                   if (res != NULL)
    3523            6 :                                     return res;
    3524              :                                 }
    3525              :                             }
    3526           24 :                           start = (mask_idx + 1) * elsz;
    3527           24 :                           length = 0;
    3528              :                         }
    3529              :                       else
    3530           24 :                         length += elsz;
    3531           48 :                       mask_idx++;
    3532              :                     }
    3533           48 :                   while (known_lt (mask_idx, TYPE_VECTOR_SUBPARTS (vectype)));
    3534            6 :                   if (length != 0)
    3535              :                     {
    3536            6 :                       pd.rhs_off = start;
    3537            6 :                       pd.offset = offset2i + start;
    3538            6 :                       pd.size = length;
    3539            6 :                       if (ranges_known_overlap_p (offset, maxsize,
    3540              :                                                   pd.offset, pd.size))
    3541            2 :                         return data->push_partial_def (pd, set, set,
    3542            2 :                                                        offseti, maxsizei);
    3543              :                     }
    3544              :                 }
    3545            0 :               else if (fn == IFN_LEN_STORE)
    3546              :                 {
    3547            0 :                   pd.offset = offset2i;
    3548            0 :                   pd.size = (tree_to_uhwi (len)
    3549            0 :                              + -tree_to_shwi (bias)) * BITS_PER_UNIT;
    3550            0 :                   if (BYTES_BIG_ENDIAN)
    3551              :                     pd.rhs_off = pd.size - tree_to_uhwi (TYPE_SIZE (vectype));
    3552              :                   else
    3553            0 :                     pd.rhs_off = 0;
    3554            0 :                   if (ranges_known_overlap_p (offset, maxsize,
    3555              :                                               pd.offset, pd.size))
    3556            0 :                     return data->push_partial_def (pd, set, set,
    3557            0 :                                                    offseti, maxsizei);
    3558              :                 }
    3559              :               else
    3560            0 :                 gcc_unreachable ();
    3561            4 :               return NULL;
    3562              :             }
    3563              :         }
    3564              :     }
    3565              : 
    3566              :   /* 5) For aggregate copies translate the reference through them if
    3567              :      the copy kills ref.  */
    3568     18693024 :   else if (data->vn_walk_kind == VN_WALKREWRITE
    3569     14851270 :            && gimple_assign_single_p (def_stmt)
    3570      2457867 :            && !gimple_has_volatile_ops (def_stmt)
    3571     21148593 :            && (DECL_P (gimple_assign_rhs1 (def_stmt))
    3572      1978760 :                || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
    3573      1572632 :                || handled_component_p (gimple_assign_rhs1 (def_stmt))))
    3574              :     {
    3575      2236271 :       tree base2;
    3576      2236271 :       int i, j, k;
    3577      2236271 :       auto_vec<vn_reference_op_s> rhs;
    3578      2236271 :       vn_reference_op_t vro;
    3579      2236271 :       ao_ref r;
    3580              : 
    3581      2236271 :       gcc_assert (lhs_ref_ok);
    3582              : 
    3583              :       /* See if the assignment kills REF.  */
    3584      2236271 :       base2 = ao_ref_base (&lhs_ref);
    3585      2236271 :       if (!lhs_ref.max_size_known_p ()
    3586      2235836 :           || (base != base2
    3587        91558 :               && (TREE_CODE (base) != MEM_REF
    3588        78194 :                   || TREE_CODE (base2) != MEM_REF
    3589        63254 :                   || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
    3590        29283 :                   || !tree_int_cst_equal (TREE_OPERAND (base, 1),
    3591        29283 :                                           TREE_OPERAND (base2, 1))))
    3592      4408373 :           || !stmt_kills_ref_p (def_stmt, ref))
    3593       392541 :         return (void *)-1;
    3594              : 
    3595              :       /* Find the common base of ref and the lhs.  lhs_ops already
    3596              :          contains valueized operands for the lhs.  */
    3597      1843730 :       poly_int64 extra_off = 0;
    3598      1843730 :       i = vr->operands.length () - 1;
    3599      1843730 :       j = lhs_ops.length () - 1;
    3600              : 
    3601              :       /* The base should be always equal due to the above check.  */
    3602      1843730 :       if (! vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
    3603              :         return (void *)-1;
    3604      1843468 :       i--, j--;
    3605              : 
    3606              :       /* The 2nd component should always exist and be a MEM_REF.  */
    3607      1843468 :       if (!(i >= 0 && j >= 0))
    3608              :         ;
    3609      1843468 :       else if (vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
    3610       842537 :         i--, j--;
    3611      1000931 :       else if (vr->operands[i].opcode == MEM_REF
    3612       999369 :                && lhs_ops[j].opcode == MEM_REF
    3613       999369 :                && known_ne (lhs_ops[j].off, -1)
    3614      2000300 :                && known_ne (vr->operands[i].off, -1))
    3615              :         {
    3616       999369 :           bool found = false;
    3617              :           /* When we ge a mismatch at a MEM_REF that is not the sole component
    3618              :              try finding a match in one of the outer components and continue
    3619              :              stripping there.  This happens when addresses of components get
    3620              :              forwarded into dereferences.  */
    3621       999369 :           if (i > 0)
    3622              :             {
    3623       104700 :               int temi = i - 1;
    3624       104700 :               poly_int64 tem_extra_off = extra_off + vr->operands[i].off;
    3625       104700 :               while (temi >= 0
    3626       227774 :                      && known_ne (vr->operands[temi].off, -1))
    3627              :                 {
    3628       124579 :                   if (vr->operands[temi].type
    3629       124579 :                       && lhs_ops[j].type
    3630       249158 :                       && (TYPE_MAIN_VARIANT (vr->operands[temi].type)
    3631       124579 :                           == TYPE_MAIN_VARIANT (lhs_ops[j].type)))
    3632              :                     {
    3633         1505 :                       i = temi;
    3634              :                       /* Strip the component that was type matched to
    3635              :                          the MEM_REF.  */
    3636         1505 :                       extra_off = (tem_extra_off
    3637         1505 :                                    + vr->operands[i].off - lhs_ops[j].off);
    3638         1505 :                       i--, j--;
    3639              :                       /* Strip further equal components.  */
    3640         1505 :                       found = true;
    3641         1505 :                       break;
    3642              :                     }
    3643       123074 :                   tem_extra_off += vr->operands[temi].off;
    3644       123074 :                   temi--;
    3645              :                 }
    3646              :             }
    3647       999369 :           if (!found && j > 0)
    3648              :             {
    3649        25546 :               int temj = j - 1;
    3650        25546 :               poly_int64 tem_extra_off = extra_off - lhs_ops[j].off;
    3651        25546 :               while (temj >= 0
    3652        48969 :                      && known_ne (lhs_ops[temj].off, -1))
    3653              :                 {
    3654        27204 :                   if (vr->operands[i].type
    3655        27204 :                       && lhs_ops[temj].type
    3656        54408 :                       && (TYPE_MAIN_VARIANT (vr->operands[i].type)
    3657        27204 :                           == TYPE_MAIN_VARIANT (lhs_ops[temj].type)))
    3658              :                     {
    3659         3781 :                       j = temj;
    3660              :                       /* Strip the component that was type matched to
    3661              :                          the MEM_REF.  */
    3662         3781 :                       extra_off = (tem_extra_off
    3663         3781 :                                    + vr->operands[i].off - lhs_ops[j].off);
    3664         3781 :                       i--, j--;
    3665              :                       /* Strip further equal components.  */
    3666         3781 :                       found = true;
    3667         3781 :                       break;
    3668              :                     }
    3669        23423 :                   tem_extra_off += -lhs_ops[temj].off;
    3670        23423 :                   temj--;
    3671              :                 }
    3672              :             }
    3673              :           /* When we cannot find a common base to reconstruct the full
    3674              :              reference instead try to reduce the lookup to the new
    3675              :              base plus a constant offset.  */
    3676       999369 :           if (!found)
    3677              :             {
    3678              :               while (j >= 0
    3679      2011251 :                      && known_ne (lhs_ops[j].off, -1))
    3680              :                 {
    3681      1017168 :                   extra_off += -lhs_ops[j].off;
    3682      1017168 :                   j--;
    3683              :                 }
    3684       994083 :               if (j != -1)
    3685              :                 return (void *)-1;
    3686              :               while (i >= 0
    3687      2106075 :                      && known_ne (vr->operands[i].off, -1))
    3688              :                 {
    3689              :                   /* Punt if the additional ops contain a storage order
    3690              :                      barrier.  */
    3691      1111992 :                   if (vr->operands[i].opcode == VIEW_CONVERT_EXPR
    3692      1111992 :                       && vr->operands[i].reverse)
    3693              :                     break;
    3694      1111992 :                   extra_off += vr->operands[i].off;
    3695      1111992 :                   i--;
    3696              :                 }
    3697       994083 :               if (i != -1)
    3698              :                 return (void *)-1;
    3699              :               found = true;
    3700              :             }
    3701              :           /* If we did find a match we'd eventually append a MEM_REF
    3702              :              as component.  Don't.  */
    3703              :           if (!found)
    3704              :             return (void *)-1;
    3705              :         }
    3706              :       else
    3707              :         return (void *)-1;
    3708              : 
    3709              :       /* Strip further common components, attempting to consume lhs_ops
    3710              :          in full.  */
    3711      1843554 :       while (j >= 0 && i >= 0
    3712      1843554 :              && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
    3713              :         {
    3714        28465 :           i--;
    3715        28465 :           j--;
    3716              :         }
    3717              : 
    3718              :       /* i now points to the first additional op.
    3719              :          ???  LHS may not be completely contained in VR, one or more
    3720              :          VIEW_CONVERT_EXPRs could be in its way.  We could at least
    3721              :          try handling outermost VIEW_CONVERT_EXPRs.  */
    3722      1815089 :       if (j != -1)
    3723              :         return (void *)-1;
    3724              : 
    3725              :       /* Punt if the additional ops contain a storage order barrier.  */
    3726      2798070 :       for (k = i; k >= 0; k--)
    3727              :         {
    3728       985780 :           vro = &vr->operands[k];
    3729       985780 :           if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
    3730              :             return (void *)-1;
    3731              :         }
    3732              : 
    3733              :       /* Now re-write REF to be based on the rhs of the assignment.  */
    3734      1812290 :       tree rhs1 = gimple_assign_rhs1 (def_stmt);
    3735      1812290 :       copy_reference_ops_from_ref (rhs1, &rhs);
    3736              : 
    3737              :       /* Apply an extra offset to the inner MEM_REF of the RHS.  */
    3738      1812290 :       bool force_no_tbaa = false;
    3739      1812290 :       if (maybe_ne (extra_off, 0))
    3740              :         {
    3741       721380 :           if (rhs.length () < 2)
    3742              :             return (void *)-1;
    3743       721380 :           int ix = rhs.length () - 2;
    3744       721380 :           if (rhs[ix].opcode != MEM_REF
    3745       721380 :               || known_eq (rhs[ix].off, -1))
    3746              :             return (void *)-1;
    3747       721378 :           rhs[ix].off += extra_off;
    3748       721378 :           rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
    3749       721378 :                                          build_int_cst (TREE_TYPE (rhs[ix].op0),
    3750              :                                                         extra_off));
    3751              :           /* When we have offsetted the RHS, reading only parts of it,
    3752              :              we can no longer use the original TBAA type, force alias-set
    3753              :              zero.  */
    3754       721378 :           force_no_tbaa = true;
    3755              :         }
    3756              : 
    3757              :       /* Save the operands since we need to use the original ones for
    3758              :          the hash entry we use.  */
    3759      1812288 :       if (!data->saved_operands.exists ())
    3760      1717070 :         data->saved_operands = vr->operands.copy ();
    3761              : 
    3762              :       /* We need to pre-pend vr->operands[0..i] to rhs.  */
    3763      1812288 :       vec<vn_reference_op_s> old = vr->operands;
    3764      5436864 :       if (i + 1 + rhs.length () > vr->operands.length ())
    3765      1143915 :         vr->operands.safe_grow (i + 1 + rhs.length (), true);
    3766              :       else
    3767       668373 :         vr->operands.truncate (i + 1 + rhs.length ());
    3768      6627118 :       FOR_EACH_VEC_ELT (rhs, j, vro)
    3769      4814830 :         vr->operands[i + 1 + j] = *vro;
    3770      1812288 :       valueize_refs (&vr->operands);
    3771      3624576 :       if (old == shared_lookup_references)
    3772      1812288 :         shared_lookup_references = vr->operands;
    3773      1812288 :       vr->hashcode = vn_reference_compute_hash (vr);
    3774              : 
    3775              :       /* Try folding the new reference to a constant.  */
    3776      1812288 :       tree val = fully_constant_vn_reference_p (vr);
    3777      1812288 :       if (val)
    3778              :         {
    3779        21888 :           if (data->partial_defs.is_empty ())
    3780        21879 :             return data->finish (ao_ref_alias_set (&lhs_ref),
    3781        21879 :                                  ao_ref_base_alias_set (&lhs_ref), val);
    3782              :           /* This is the only interesting case for partial-def handling
    3783              :              coming from targets that like to gimplify init-ctors as
    3784              :              aggregate copies from constant data like aarch64 for
    3785              :              PR83518.  */
    3786            9 :           if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize))
    3787              :             {
    3788            9 :               pd_data pd;
    3789            9 :               pd.rhs = val;
    3790            9 :               pd.rhs_off = 0;
    3791            9 :               pd.offset = 0;
    3792            9 :               pd.size = maxsizei;
    3793            9 :               return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
    3794              :                                              ao_ref_base_alias_set (&lhs_ref),
    3795              :                                              0, maxsizei);
    3796              :             }
    3797              :         }
    3798              : 
    3799              :       /* Continuing with partial defs isn't easily possible here, we
    3800              :          have to find a full def from further lookups from here.  Probably
    3801              :          not worth the special-casing everywhere.  */
    3802      2220976 :       if (!data->partial_defs.is_empty ())
    3803              :         return (void *)-1;
    3804              : 
    3805              :       /* Adjust *ref from the new operands.  */
    3806      1783807 :       ao_ref rhs1_ref;
    3807      1783807 :       ao_ref_init (&rhs1_ref, rhs1);
    3808      2860697 :       if (!ao_ref_init_from_vn_reference (&r,
    3809              :                                           force_no_tbaa ? 0
    3810      1076890 :                                           : ao_ref_alias_set (&rhs1_ref),
    3811              :                                           force_no_tbaa ? 0
    3812      1076890 :                                           : ao_ref_base_alias_set (&rhs1_ref),
    3813              :                                           vr->type, vr->operands))
    3814              :         return (void *)-1;
    3815              :       /* This can happen with bitfields.  */
    3816      1783807 :       if (maybe_ne (ref->size, r.size))
    3817              :         {
    3818              :           /* If the access lacks some subsetting simply apply that by
    3819              :              shortening it.  That in the end can only be successful
    3820              :              if we can pun the lookup result which in turn requires
    3821              :              exact offsets.  */
    3822         1359 :           if (known_eq (r.size, r.max_size)
    3823         1359 :               && known_lt (ref->size, r.size))
    3824         1359 :             r.size = r.max_size = ref->size;
    3825              :           else
    3826              :             return (void *)-1;
    3827              :         }
    3828      1783807 :       *ref = r;
    3829      1783807 :       vr->offset = r.offset;
    3830      1783807 :       vr->max_size = r.max_size;
    3831              : 
    3832              :       /* Do not update last seen VUSE after translating.  */
    3833      1783807 :       data->last_vuse_ptr = NULL;
    3834              :       /* Invalidate the original access path since it now contains
    3835              :          the wrong base.  */
    3836      1783807 :       data->orig_ref.ref = NULL_TREE;
    3837              :       /* Use the alias-set of this LHS for recording an eventual result.  */
    3838      1783807 :       if (data->first_set == -2)
    3839              :         {
    3840      1690106 :           data->first_set = ao_ref_alias_set (&lhs_ref);
    3841      1690106 :           data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
    3842              :         }
    3843              : 
    3844              :       /* Keep looking for the adjusted *REF / VR pair.  */
    3845      1783807 :       return NULL;
    3846      2236271 :     }
    3847              : 
    3848              :   /* 6) For memcpy copies translate the reference through them if the copy
    3849              :      kills ref.  But we cannot (easily) do this translation if the memcpy is
    3850              :      a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
    3851              :      can modify the storage order of objects (see storage_order_barrier_p).  */
    3852     16456753 :   else if (data->vn_walk_kind == VN_WALKREWRITE
    3853     12614999 :            && is_gimple_reg_type (vr->type)
    3854              :            /* ???  Handle BCOPY as well.  */
    3855     12607134 :            && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
    3856     12538160 :                || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
    3857     12537737 :                || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
    3858     12536551 :                || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
    3859     12536309 :                || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
    3860     12510142 :                || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
    3861        97320 :            && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
    3862        87602 :                || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
    3863        97286 :            && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
    3864        71060 :                || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
    3865        97271 :            && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
    3866        58866 :                || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
    3867        58866 :                    && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
    3868              :                                        &copy_size)))
    3869              :            /* Handling this is more complicated, give up for now.  */
    3870     16497573 :            && data->partial_defs.is_empty ())
    3871              :     {
    3872        40265 :       tree lhs, rhs;
    3873        40265 :       ao_ref r;
    3874        40265 :       poly_int64 rhs_offset, lhs_offset;
    3875        40265 :       vn_reference_op_s op;
    3876        40265 :       poly_uint64 mem_offset;
    3877        40265 :       poly_int64 at, byte_maxsize;
    3878              : 
    3879              :       /* Only handle non-variable, addressable refs.  */
    3880        40265 :       if (maybe_ne (ref->size, maxsize)
    3881        39800 :           || !multiple_p (offset, BITS_PER_UNIT, &at)
    3882        40265 :           || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
    3883          465 :         return (void *)-1;
    3884              : 
    3885              :       /* Extract a pointer base and an offset for the destination.  */
    3886        39800 :       lhs = gimple_call_arg (def_stmt, 0);
    3887        39800 :       lhs_offset = 0;
    3888        39800 :       if (TREE_CODE (lhs) == SSA_NAME)
    3889              :         {
    3890        31597 :           lhs = vn_valueize (lhs);
    3891        31597 :           if (TREE_CODE (lhs) == SSA_NAME)
    3892              :             {
    3893        31282 :               gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
    3894        31282 :               if (gimple_assign_single_p (def_stmt)
    3895        31282 :                   && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
    3896         2381 :                 lhs = gimple_assign_rhs1 (def_stmt);
    3897              :             }
    3898              :         }
    3899        39800 :       if (TREE_CODE (lhs) == ADDR_EXPR)
    3900              :         {
    3901        15133 :           if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))
    3902        14836 :               && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs))))
    3903              :             return (void *)-1;
    3904        10759 :           tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
    3905              :                                                     &lhs_offset);
    3906        10759 :           if (!tem)
    3907              :             return (void *)-1;
    3908        10067 :           if (TREE_CODE (tem) == MEM_REF
    3909        10067 :               && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
    3910              :             {
    3911         1689 :               lhs = TREE_OPERAND (tem, 0);
    3912         1689 :               if (TREE_CODE (lhs) == SSA_NAME)
    3913         1689 :                 lhs = vn_valueize (lhs);
    3914         1689 :               lhs_offset += mem_offset;
    3915              :             }
    3916         8378 :           else if (DECL_P (tem))
    3917         8378 :             lhs = build_fold_addr_expr (tem);
    3918              :           else
    3919              :             return (void *)-1;
    3920              :         }
    3921        38968 :       if (TREE_CODE (lhs) != SSA_NAME
    3922         8379 :           && TREE_CODE (lhs) != ADDR_EXPR)
    3923              :         return (void *)-1;
    3924              : 
    3925              :       /* Extract a pointer base and an offset for the source.  */
    3926        38968 :       rhs = gimple_call_arg (def_stmt, 1);
    3927        38968 :       rhs_offset = 0;
    3928        38968 :       if (TREE_CODE (rhs) == SSA_NAME)
    3929        18312 :         rhs = vn_valueize (rhs);
    3930        38968 :       if (TREE_CODE (rhs) == ADDR_EXPR)
    3931              :         {
    3932        32887 :           if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))
    3933        22650 :               && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs))))
    3934              :             return (void *)-1;
    3935        22206 :           tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
    3936              :                                                     &rhs_offset);
    3937        22206 :           if (!tem)
    3938              :             return (void *)-1;
    3939        22206 :           if (TREE_CODE (tem) == MEM_REF
    3940        22206 :               && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
    3941              :             {
    3942            0 :               rhs = TREE_OPERAND (tem, 0);
    3943            0 :               rhs_offset += mem_offset;
    3944              :             }
    3945        22206 :           else if (DECL_P (tem)
    3946        16423 :                    || TREE_CODE (tem) == STRING_CST)
    3947        22206 :             rhs = build_fold_addr_expr (tem);
    3948              :           else
    3949              :             return (void *)-1;
    3950              :         }
    3951        38968 :       if (TREE_CODE (rhs) == SSA_NAME)
    3952        16762 :         rhs = SSA_VAL (rhs);
    3953        22206 :       else if (TREE_CODE (rhs) != ADDR_EXPR)
    3954              :         return (void *)-1;
    3955              : 
    3956              :       /* The bases of the destination and the references have to agree.  */
    3957        38968 :       if (TREE_CODE (base) == MEM_REF)
    3958              :         {
    3959        15087 :           if (TREE_OPERAND (base, 0) != lhs
    3960        15087 :               || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
    3961        11535 :             return (void *) -1;
    3962        10747 :           at += mem_offset;
    3963              :         }
    3964        23881 :       else if (!DECL_P (base)
    3965        23018 :                || TREE_CODE (lhs) != ADDR_EXPR
    3966        31077 :                || TREE_OPERAND (lhs, 0) != base)
    3967              :         return (void *)-1;
    3968              : 
    3969              :       /* If the access is completely outside of the memcpy destination
    3970              :          area there is no aliasing.  */
    3971        10747 :       if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
    3972              :         return NULL;
    3973              :       /* And the access has to be contained within the memcpy destination.  */
    3974        10714 :       if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
    3975              :         return (void *)-1;
    3976              : 
    3977              :       /* Save the operands since we need to use the original ones for
    3978              :          the hash entry we use.  */
    3979        10357 :       if (!data->saved_operands.exists ())
    3980        10007 :         data->saved_operands = vr->operands.copy ();
    3981              : 
    3982              :       /* Make room for 2 operands in the new reference.  */
    3983        10357 :       if (vr->operands.length () < 2)
    3984              :         {
    3985            0 :           vec<vn_reference_op_s> old = vr->operands;
    3986            0 :           vr->operands.safe_grow_cleared (2, true);
    3987            0 :           if (old == shared_lookup_references)
    3988            0 :             shared_lookup_references = vr->operands;
    3989              :         }
    3990              :       else
    3991        10357 :         vr->operands.truncate (2);
    3992              : 
    3993              :       /* The looked-through reference is a simple MEM_REF.  */
    3994        10357 :       memset (&op, 0, sizeof (op));
    3995        10357 :       op.type = vr->type;
    3996        10357 :       op.opcode = MEM_REF;
    3997        10357 :       op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
    3998        10357 :       op.off = at - lhs_offset + rhs_offset;
    3999        10357 :       vr->operands[0] = op;
    4000        10357 :       op.type = TREE_TYPE (rhs);
    4001        10357 :       op.opcode = TREE_CODE (rhs);
    4002        10357 :       op.op0 = rhs;
    4003        10357 :       op.off = -1;
    4004        10357 :       vr->operands[1] = op;
    4005        10357 :       vr->hashcode = vn_reference_compute_hash (vr);
    4006              : 
    4007              :       /* Try folding the new reference to a constant.  */
    4008        10357 :       tree val = fully_constant_vn_reference_p (vr);
    4009        10357 :       if (val)
    4010         2455 :         return data->finish (0, 0, val);
    4011              : 
    4012              :       /* Adjust *ref from the new operands.  */
    4013         7902 :       if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
    4014              :         return (void *)-1;
    4015              :       /* This can happen with bitfields.  */
    4016         7902 :       if (maybe_ne (ref->size, r.size))
    4017              :         return (void *)-1;
    4018         7902 :       *ref = r;
    4019         7902 :       vr->offset = r.offset;
    4020         7902 :       vr->max_size = r.max_size;
    4021              : 
    4022              :       /* Do not update last seen VUSE after translating.  */
    4023         7902 :       data->last_vuse_ptr = NULL;
    4024              :       /* Invalidate the original access path since it now contains
    4025              :          the wrong base.  */
    4026         7902 :       data->orig_ref.ref = NULL_TREE;
    4027              :       /* Use the alias-set of this stmt for recording an eventual result.  */
    4028         7902 :       if (data->first_set == -2)
    4029              :         {
    4030         7592 :           data->first_set = 0;
    4031         7592 :           data->first_base_set = 0;
    4032              :         }
    4033              : 
    4034              :       /* Keep looking for the adjusted *REF / VR pair.  */
    4035         7902 :       return NULL;
    4036              :     }
    4037              : 
    4038              :   /* Bail out and stop walking.  */
    4039              :   return (void *)-1;
    4040              : }
    4041              : 
    4042              : /* Return true if E is a backedge with respect to our CFG walk order.  */
    4043              : 
    4044              : static bool
    4045    116761154 : vn_is_backedge (edge e, void *)
    4046              : {
    4047              :   /* During PRE elimination we no longer have access to this info.  */
    4048    116761154 :   return (!vn_bb_to_rpo
    4049    116761154 :           || vn_bb_to_rpo[e->dest->index] <= vn_bb_to_rpo[e->src->index]);
    4050              : }
    4051              : 
    4052              : /* Return a reference op vector from OP that can be used for
    4053              :    vn_reference_lookup_pieces.  The caller is responsible for releasing
    4054              :    the vector.  */
    4055              : 
    4056              : vec<vn_reference_op_s>
    4057      5076875 : vn_reference_operands_for_lookup (tree op)
    4058              : {
    4059      5076875 :   bool valueized;
    4060      5076875 :   return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
    4061              : }
    4062              : 
    4063              : /* Lookup a reference operation by it's parts, in the current hash table.
    4064              :    Returns the resulting value number if it exists in the hash table,
    4065              :    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
    4066              :    vn_reference_t stored in the hashtable if something is found.  */
    4067              : 
    4068              : tree
    4069      8080207 : vn_reference_lookup_pieces (tree vuse, alias_set_type set,
    4070              :                             alias_set_type base_set, tree type,
    4071              :                             vec<vn_reference_op_s> operands,
    4072              :                             vn_reference_t *vnresult, vn_lookup_kind kind)
    4073              : {
    4074      8080207 :   struct vn_reference_s vr1;
    4075      8080207 :   vn_reference_t tmp;
    4076      8080207 :   tree cst;
    4077              : 
    4078      8080207 :   if (!vnresult)
    4079            0 :     vnresult = &tmp;
    4080      8080207 :   *vnresult = NULL;
    4081              : 
    4082      8080207 :   vr1.vuse = vuse_ssa_val (vuse);
    4083      8080207 :   shared_lookup_references.truncate (0);
    4084     16160414 :   shared_lookup_references.safe_grow (operands.length (), true);
    4085      8080207 :   memcpy (shared_lookup_references.address (),
    4086      8080207 :           operands.address (),
    4087              :           sizeof (vn_reference_op_s)
    4088      8080207 :           * operands.length ());
    4089      8080207 :   bool valueized_p;
    4090      8080207 :   valueize_refs_1 (&shared_lookup_references, &valueized_p);
    4091      8080207 :   vr1.operands = shared_lookup_references;
    4092      8080207 :   vr1.type = type;
    4093      8080207 :   vr1.set = set;
    4094      8080207 :   vr1.base_set = base_set;
    4095              :   /* We can pretend there's no extra info fed in since the ao_refs offset
    4096              :      and max_size are computed only from the VN reference ops.  */
    4097      8080207 :   vr1.offset = 0;
    4098      8080207 :   vr1.max_size = -1;
    4099      8080207 :   vr1.hashcode = vn_reference_compute_hash (&vr1);
    4100      8080207 :   if ((cst = fully_constant_vn_reference_p (&vr1)))
    4101              :     return cst;
    4102              : 
    4103      8061607 :   vn_reference_lookup_1 (&vr1, vnresult);
    4104      8061607 :   if (!*vnresult
    4105      3162405 :       && kind != VN_NOWALK
    4106      3162405 :       && vr1.vuse)
    4107              :     {
    4108      3136571 :       ao_ref r;
    4109      3136571 :       unsigned limit = param_sccvn_max_alias_queries_per_access;
    4110      3136571 :       vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE,
    4111      3136571 :                             false);
    4112      3136571 :       vec<vn_reference_op_s> ops_for_ref;
    4113      3136571 :       if (!valueized_p)
    4114      3051677 :         ops_for_ref = vr1.operands;
    4115              :       else
    4116              :         {
    4117              :           /* For ao_ref_from_mem we have to ensure only available SSA names
    4118              :              end up in base and the only convenient way to make this work
    4119              :              for PRE is to re-valueize with that in mind.  */
    4120       169788 :           ops_for_ref.create (operands.length ());
    4121       169788 :           ops_for_ref.quick_grow (operands.length ());
    4122        84894 :           memcpy (ops_for_ref.address (),
    4123        84894 :                   operands.address (),
    4124              :                   sizeof (vn_reference_op_s)
    4125        84894 :                   * operands.length ());
    4126        84894 :           valueize_refs_1 (&ops_for_ref, &valueized_p, true);
    4127              :         }
    4128      3136571 :       if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
    4129              :                                          ops_for_ref))
    4130      3066770 :         *vnresult
    4131      3066770 :           = ((vn_reference_t)
    4132      3066770 :              walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
    4133              :                                      vn_reference_lookup_3, vn_is_backedge,
    4134              :                                      vuse_valueize, limit, &data));
    4135      6273142 :       if (ops_for_ref != shared_lookup_references)
    4136        84894 :         ops_for_ref.release ();
    4137      6273142 :       gcc_checking_assert (vr1.operands == shared_lookup_references);
    4138      3136571 :       if (*vnresult
    4139       458775 :           && data.same_val
    4140      3136571 :           && (!(*vnresult)->result
    4141            0 :               || !operand_equal_p ((*vnresult)->result, data.same_val)))
    4142              :         {
    4143            0 :           *vnresult = NULL;
    4144            0 :           return NULL_TREE;
    4145              :         }
    4146      3136571 :     }
    4147              : 
    4148      8061607 :   if (*vnresult)
    4149      5357977 :      return (*vnresult)->result;
    4150              : 
    4151              :   return NULL_TREE;
    4152              : }
    4153              : 
    4154              : /* When OPERANDS is an ADDR_EXPR that can be possibly expressed as a
    4155              :    POINTER_PLUS_EXPR return true and fill in its operands in OPS.  */
    4156              : 
    4157              : bool
    4158      2209559 : vn_pp_nary_for_addr (const vec<vn_reference_op_s>& operands, tree ops[2])
    4159              : {
    4160      4419118 :   gcc_assert (operands[0].opcode == ADDR_EXPR
    4161              :               && operands.last ().opcode == SSA_NAME);
    4162              :   poly_int64 off = 0;
    4163              :   vn_reference_op_t vro;
    4164              :   unsigned i;
    4165      7162740 :   for (i = 1; operands.iterate (i, &vro); ++i)
    4166              :     {
    4167      7162740 :       if (vro->opcode == SSA_NAME)
    4168              :         break;
    4169      5002863 :       else if (known_eq (vro->off, -1))
    4170              :         break;
    4171      4953181 :       off += vro->off;
    4172              :     }
    4173      2209559 :   if (i == operands.length () - 1
    4174      2159877 :       && maybe_ne (off, 0)
    4175              :       /* Make sure we the offset we accumulated in a 64bit int
    4176              :          fits the address computation carried out in target
    4177              :          offset precision.  */
    4178      3632052 :       && (off.coeffs[0]
    4179      1422493 :           == sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype))))
    4180              :     {
    4181      1421693 :       gcc_assert (operands[i-1].opcode == MEM_REF);
    4182      1421693 :       ops[0] = operands[i].op0;
    4183      1421693 :       ops[1] = wide_int_to_tree (sizetype, off);
    4184      1421693 :       return true;
    4185              :     }
    4186              :   return false;
    4187              : }
    4188              : 
    4189              : /* Lookup OP in the current hash table, and return the resulting value
    4190              :    number if it exists in the hash table.  Return NULL_TREE if it does
    4191              :    not exist in the hash table or if the result field of the structure
    4192              :    was NULL..  VNRESULT will be filled in with the vn_reference_t
    4193              :    stored in the hashtable if one exists.  When TBAA_P is false assume
    4194              :    we are looking up a store and treat it as having alias-set zero.
    4195              :    *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
    4196              :    MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
    4197              :    load is bitwise anded with MASK and so we are only interested in a subset
    4198              :    of the bits and can ignore if the other bits are uninitialized or
    4199              :    not initialized with constants.  When doing redundant store removal
    4200              :    the caller has to set REDUNDANT_STORE_REMOVAL_P.  */
    4201              : 
    4202              : tree
    4203     99384528 : vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
    4204              :                      vn_reference_t *vnresult, bool tbaa_p,
    4205              :                      tree *last_vuse_ptr, tree mask,
    4206              :                      bool redundant_store_removal_p)
    4207              : {
    4208     99384528 :   vec<vn_reference_op_s> operands;
    4209     99384528 :   struct vn_reference_s vr1;
    4210     99384528 :   bool valueized_anything;
    4211              : 
    4212     99384528 :   if (vnresult)
    4213     98987867 :     *vnresult = NULL;
    4214              : 
    4215     99384528 :   vr1.vuse = vuse_ssa_val (vuse);
    4216    198769056 :   vr1.operands = operands
    4217     99384528 :     = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
    4218              : 
    4219              :   /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR.  Avoid doing
    4220              :      this before the pass folding __builtin_object_size had a chance to run.  */
    4221     99384528 :   if ((cfun->curr_properties & PROP_objsz)
    4222     72147336 :       && operands[0].opcode == ADDR_EXPR
    4223    100496766 :       && operands.last ().opcode == SSA_NAME)
    4224              :     {
    4225      1078225 :       tree ops[2];
    4226      1078225 :       if (vn_pp_nary_for_addr (operands, ops))
    4227              :         {
    4228       693252 :           tree res = vn_nary_op_lookup_pieces (2, POINTER_PLUS_EXPR,
    4229       693252 :                                                TREE_TYPE (op), ops, NULL);
    4230       693252 :           if (res)
    4231       693252 :             return res;
    4232       693252 :           return NULL_TREE;
    4233              :         }
    4234              :     }
    4235              : 
    4236     98691276 :   vr1.type = TREE_TYPE (op);
    4237     98691276 :   ao_ref op_ref;
    4238     98691276 :   ao_ref_init (&op_ref, op);
    4239     98691276 :   vr1.set = ao_ref_alias_set (&op_ref);
    4240     98691276 :   vr1.base_set = ao_ref_base_alias_set (&op_ref);
    4241     98691276 :   vr1.offset = 0;
    4242     98691276 :   vr1.max_size = -1;
    4243     98691276 :   vr1.hashcode = vn_reference_compute_hash (&vr1);
    4244     98691276 :   if (mask == NULL_TREE)
    4245     98372545 :     if (tree cst = fully_constant_vn_reference_p (&vr1))
    4246              :       return cst;
    4247              : 
    4248     98677950 :   if (kind != VN_NOWALK && vr1.vuse)
    4249              :     {
    4250     57399848 :       vn_reference_t wvnresult;
    4251     57399848 :       ao_ref r;
    4252     57399848 :       unsigned limit = param_sccvn_max_alias_queries_per_access;
    4253     57399848 :       auto_vec<vn_reference_op_s> ops_for_ref;
    4254     57399848 :       if (valueized_anything)
    4255              :         {
    4256      4502958 :           copy_reference_ops_from_ref (op, &ops_for_ref);
    4257      4502958 :           bool tem;
    4258      4502958 :           valueize_refs_1 (&ops_for_ref, &tem, true);
    4259              :         }
    4260              :       /* Make sure to use a valueized reference if we valueized anything.
    4261              :          Otherwise preserve the full reference for advanced TBAA.  */
    4262     57399848 :       if (!valueized_anything
    4263     57399848 :           || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
    4264              :                                              vr1.type, ops_for_ref))
    4265              :         {
    4266     52896890 :           ao_ref_init (&r, op);
    4267              :           /* Record the extra info we're getting from the full ref.  */
    4268     52896890 :           ao_ref_base (&r);
    4269     52896890 :           vr1.offset = r.offset;
    4270     52896890 :           vr1.max_size = r.max_size;
    4271              :         }
    4272     57399848 :       vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
    4273              :                             last_vuse_ptr, kind, tbaa_p, mask,
    4274    110296738 :                             redundant_store_removal_p);
    4275              : 
    4276     57399848 :       wvnresult
    4277              :         = ((vn_reference_t)
    4278     57399848 :            walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
    4279              :                                    vn_reference_lookup_3, vn_is_backedge,
    4280              :                                    vuse_valueize, limit, &data));
    4281    114799696 :       gcc_checking_assert (vr1.operands == shared_lookup_references);
    4282     57399848 :       if (wvnresult)
    4283              :         {
    4284      8464514 :           gcc_assert (mask == NULL_TREE);
    4285      8464514 :           if (data.same_val
    4286      8464514 :               && (!wvnresult->result
    4287        67640 :                   || !operand_equal_p (wvnresult->result, data.same_val)))
    4288        47720 :             return NULL_TREE;
    4289      8416794 :           if (vnresult)
    4290      8415606 :             *vnresult = wvnresult;
    4291      8416794 :           return wvnresult->result;
    4292              :         }
    4293     48935334 :       else if (mask)
    4294       318731 :         return data.masked_result;
    4295              : 
    4296              :       return NULL_TREE;
    4297     57399848 :     }
    4298              : 
    4299     41278102 :   if (last_vuse_ptr)
    4300      1422786 :     *last_vuse_ptr = vr1.vuse;
    4301     41278102 :   if (mask)
    4302              :     return NULL_TREE;
    4303     41278102 :   return vn_reference_lookup_1 (&vr1, vnresult);
    4304              : }
    4305              : 
    4306              : /* Lookup CALL in the current hash table and return the entry in
    4307              :    *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
    4308              : 
    4309              : void
    4310      9105924 : vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
    4311              :                           vn_reference_t vr)
    4312              : {
    4313      9105924 :   if (vnresult)
    4314      9105924 :     *vnresult = NULL;
    4315              : 
    4316      9105924 :   tree vuse = gimple_vuse (call);
    4317              : 
    4318      9105924 :   vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
    4319      9105924 :   vr->operands = valueize_shared_reference_ops_from_call (call);
    4320      9105924 :   tree lhs = gimple_call_lhs (call);
    4321              :   /* For non-SSA return values the referece ops contain the LHS.  */
    4322      4967480 :   vr->type = ((lhs && TREE_CODE (lhs) == SSA_NAME)
    4323     13633617 :               ? TREE_TYPE (lhs) : NULL_TREE);
    4324      9105924 :   vr->punned = false;
    4325      9105924 :   vr->set = 0;
    4326      9105924 :   vr->base_set = 0;
    4327      9105924 :   vr->offset = 0;
    4328      9105924 :   vr->max_size = -1;
    4329      9105924 :   vr->hashcode = vn_reference_compute_hash (vr);
    4330      9105924 :   vn_reference_lookup_1 (vr, vnresult);
    4331      9105924 : }
    4332              : 
    4333              : /* Insert OP into the current hash table with a value number of RESULT.  */
    4334              : 
    4335              : static void
    4336     73743771 : vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
    4337              : {
    4338     73743771 :   vn_reference_s **slot;
    4339     73743771 :   vn_reference_t vr1;
    4340     73743771 :   bool tem;
    4341              : 
    4342     73743771 :   vec<vn_reference_op_s> operands
    4343     73743771 :     = valueize_shared_reference_ops_from_ref (op, &tem);
    4344              :   /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR.  Avoid doing this
    4345              :      before the pass folding __builtin_object_size had a chance to run.  */
    4346     73743771 :   if ((cfun->curr_properties & PROP_objsz)
    4347     55378162 :       && operands[0].opcode == ADDR_EXPR
    4348     74648179 :       && operands.last ().opcode == SSA_NAME)
    4349              :     {
    4350       873034 :       tree ops[2];
    4351       873034 :       if (vn_pp_nary_for_addr (operands, ops))
    4352              :         {
    4353       556099 :           vn_nary_op_insert_pieces (2, POINTER_PLUS_EXPR,
    4354       556099 :                                     TREE_TYPE (op), ops, result,
    4355       556099 :                                     VN_INFO (result)->value_id);
    4356       556099 :           return;
    4357              :         }
    4358              :     }
    4359              : 
    4360     73187672 :   vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
    4361     73187672 :   if (TREE_CODE (result) == SSA_NAME)
    4362     50537004 :     vr1->value_id = VN_INFO (result)->value_id;
    4363              :   else
    4364     22650668 :     vr1->value_id = get_or_alloc_constant_value_id (result);
    4365     73187672 :   vr1->vuse = vuse_ssa_val (vuse);
    4366     73187672 :   vr1->operands = operands.copy ();
    4367     73187672 :   vr1->type = TREE_TYPE (op);
    4368     73187672 :   vr1->punned = false;
    4369     73187672 :   ao_ref op_ref;
    4370     73187672 :   ao_ref_init (&op_ref, op);
    4371     73187672 :   vr1->set = ao_ref_alias_set (&op_ref);
    4372     73187672 :   vr1->base_set = ao_ref_base_alias_set (&op_ref);
    4373              :   /* Specifically use an unknown extent here, we're not doing any lookup
    4374              :      and assume the caller didn't either (or it went VARYING).  */
    4375     73187672 :   vr1->offset = 0;
    4376     73187672 :   vr1->max_size = -1;
    4377     73187672 :   vr1->hashcode = vn_reference_compute_hash (vr1);
    4378     73187672 :   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
    4379     73187672 :   vr1->result_vdef = vdef;
    4380              : 
    4381     73187672 :   slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
    4382              :                                                       INSERT);
    4383              : 
    4384              :   /* Because IL walking on reference lookup can end up visiting
    4385              :      a def that is only to be visited later in iteration order
    4386              :      when we are about to make an irreducible region reducible
    4387              :      the def can be effectively processed and its ref being inserted
    4388              :      by vn_reference_lookup_3 already.  So we cannot assert (!*slot)
    4389              :      but save a lookup if we deal with already inserted refs here.  */
    4390     73187672 :   if (*slot)
    4391              :     {
    4392              :       /* We cannot assert that we have the same value either because
    4393              :          when disentangling an irreducible region we may end up visiting
    4394              :          a use before the corresponding def.  That's a missed optimization
    4395              :          only though.  See gcc.dg/tree-ssa/pr87126.c for example.  */
    4396            0 :       if (dump_file && (dump_flags & TDF_DETAILS)
    4397            0 :           && !operand_equal_p ((*slot)->result, vr1->result, 0))
    4398              :         {
    4399            0 :           fprintf (dump_file, "Keeping old value ");
    4400            0 :           print_generic_expr (dump_file, (*slot)->result);
    4401            0 :           fprintf (dump_file, " because of collision\n");
    4402              :         }
    4403            0 :       free_reference (vr1);
    4404            0 :       obstack_free (&vn_tables_obstack, vr1);
    4405            0 :       return;
    4406              :     }
    4407              : 
    4408     73187672 :   *slot = vr1;
    4409     73187672 :   vr1->next = last_inserted_ref;
    4410     73187672 :   last_inserted_ref = vr1;
    4411              : }
    4412              : 
    4413              : /* Insert a reference by it's pieces into the current hash table with
    4414              :    a value number of RESULT.  Return the resulting reference
    4415              :    structure we created.  */
    4416              : 
    4417              : vn_reference_t
    4418      3783500 : vn_reference_insert_pieces (tree vuse, alias_set_type set,
    4419              :                             alias_set_type base_set,
    4420              :                             poly_int64 offset, poly_int64 max_size, tree type,
    4421              :                             vec<vn_reference_op_s> operands,
    4422              :                             tree result, unsigned int value_id)
    4423              : 
    4424              : {
    4425      3783500 :   vn_reference_s **slot;
    4426      3783500 :   vn_reference_t vr1;
    4427              : 
    4428      3783500 :   vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
    4429      3783500 :   vr1->value_id = value_id;
    4430      3783500 :   vr1->vuse = vuse_ssa_val (vuse);
    4431      3783500 :   vr1->operands = operands;
    4432      3783500 :   valueize_refs (&vr1->operands);
    4433      3783500 :   vr1->type = type;
    4434      3783500 :   vr1->punned = false;
    4435      3783500 :   vr1->set = set;
    4436      3783500 :   vr1->base_set = base_set;
    4437      3783500 :   vr1->offset = offset;
    4438      3783500 :   vr1->max_size = max_size;
    4439      3783500 :   vr1->hashcode = vn_reference_compute_hash (vr1);
    4440      3783500 :   if (result && TREE_CODE (result) == SSA_NAME)
    4441       275728 :     result = SSA_VAL (result);
    4442      3783500 :   vr1->result = result;
    4443      3783500 :   vr1->result_vdef = NULL_TREE;
    4444              : 
    4445      3783500 :   slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
    4446              :                                                       INSERT);
    4447              : 
    4448              :   /* At this point we should have all the things inserted that we have
    4449              :      seen before, and we should never try inserting something that
    4450              :      already exists.  */
    4451      3783500 :   gcc_assert (!*slot);
    4452              : 
    4453      3783500 :   *slot = vr1;
    4454      3783500 :   vr1->next = last_inserted_ref;
    4455      3783500 :   last_inserted_ref = vr1;
    4456      3783500 :   return vr1;
    4457              : }
    4458              : 
    4459              : /* Compute and return the hash value for nary operation VBO1.  */
    4460              : 
    4461              : hashval_t
    4462    301296487 : vn_nary_op_compute_hash (const vn_nary_op_t vno1)
    4463              : {
    4464    301296487 :   inchash::hash hstate;
    4465    301296487 :   unsigned i;
    4466              : 
    4467    301296487 :   if (((vno1->length == 2
    4468    254140731 :         && commutative_tree_code (vno1->opcode))
    4469    137870574 :        || (vno1->length == 3
    4470      1437662 :            && commutative_ternary_tree_code (vno1->opcode)))
    4471    464724385 :       && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
    4472      2381363 :     std::swap (vno1->op[0], vno1->op[1]);
    4473    298915124 :   else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
    4474    298915124 :            && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
    4475              :     {
    4476       473835 :       std::swap (vno1->op[0], vno1->op[1]);
    4477       473835 :       vno1->opcode = swap_tree_comparison  (vno1->opcode);
    4478              :     }
    4479              : 
    4480    301296487 :   hstate.add_int (vno1->opcode);
    4481    860308218 :   for (i = 0; i < vno1->length; ++i)
    4482    559011731 :     inchash::add_expr (vno1->op[i], hstate);
    4483              : 
    4484    301296487 :   return hstate.end ();
    4485              : }
    4486              : 
    4487              : /* Compare nary operations VNO1 and VNO2 and return true if they are
    4488              :    equivalent.  */
    4489              : 
    4490              : bool
    4491    959280686 : vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
    4492              : {
    4493    959280686 :   unsigned i;
    4494              : 
    4495    959280686 :   if (vno1->hashcode != vno2->hashcode)
    4496              :     return false;
    4497              : 
    4498     49771916 :   if (vno1->length != vno2->length)
    4499              :     return false;
    4500              : 
    4501     49771916 :   if (vno1->opcode != vno2->opcode
    4502     49771916 :       || !types_compatible_p (vno1->type, vno2->type))
    4503      1141017 :     return false;
    4504              : 
    4505    140535831 :   for (i = 0; i < vno1->length; ++i)
    4506     92002973 :     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
    4507              :       return false;
    4508              : 
    4509              :   /* BIT_INSERT_EXPR has an implict operand as the type precision
    4510              :      of op1.  Need to check to make sure they are the same.  */
    4511     48532858 :   if (vno1->opcode == BIT_INSERT_EXPR
    4512          532 :       && TREE_CODE (vno1->op[1]) == INTEGER_CST
    4513     48532965 :       && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
    4514          107 :          != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
    4515              :     return false;
    4516              : 
    4517              :   return true;
    4518              : }
    4519              : 
    4520              : /* Initialize VNO from the pieces provided.  */
    4521              : 
    4522              : static void
    4523    187055916 : init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
    4524              :                              enum tree_code code, tree type, tree *ops)
    4525              : {
    4526    187055916 :   vno->opcode = code;
    4527    187055916 :   vno->length = length;
    4528    187055916 :   vno->type = type;
    4529      4545802 :   memcpy (&vno->op[0], ops, sizeof (tree) * length);
    4530            0 : }
    4531              : 
    4532              : /* Return the number of operands for a vn_nary ops structure from STMT.  */
    4533              : 
    4534              : unsigned int
    4535    108286645 : vn_nary_length_from_stmt (gimple *stmt)
    4536              : {
    4537    108286645 :   switch (gimple_assign_rhs_code (stmt))
    4538              :     {
    4539              :     case REALPART_EXPR:
    4540              :     case IMAGPART_EXPR:
    4541              :     case VIEW_CONVERT_EXPR:
    4542              :       return 1;
    4543              : 
    4544       538390 :     case BIT_FIELD_REF:
    4545       538390 :       return 3;
    4546              : 
    4547       495424 :     case CONSTRUCTOR:
    4548       495424 :       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
    4549              : 
    4550    103964393 :     default:
    4551    103964393 :       return gimple_num_ops (stmt) - 1;
    4552              :     }
    4553              : }
    4554              : 
    4555              : /* Initialize VNO from STMT.  */
    4556              : 
    4557              : void
    4558    108286645 : init_vn_nary_op_from_stmt (vn_nary_op_t vno, gassign *stmt)
    4559              : {
    4560    108286645 :   unsigned i;
    4561              : 
    4562    108286645 :   vno->opcode = gimple_assign_rhs_code (stmt);
    4563    108286645 :   vno->type = TREE_TYPE (gimple_assign_lhs (stmt));
    4564    108286645 :   switch (vno->opcode)
    4565              :     {
    4566      3288438 :     case REALPART_EXPR:
    4567      3288438 :     case IMAGPART_EXPR:
    4568      3288438 :     case VIEW_CONVERT_EXPR:
    4569      3288438 :       vno->length = 1;
    4570      3288438 :       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
    4571      3288438 :       break;
    4572              : 
    4573       538390 :     case BIT_FIELD_REF:
    4574       538390 :       vno->length = 3;
    4575       538390 :       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
    4576       538390 :       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
    4577       538390 :       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
    4578       538390 :       break;
    4579              : 
    4580       495424 :     case CONSTRUCTOR:
    4581       495424 :       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
    4582      1951861 :       for (i = 0; i < vno->length; ++i)
    4583      1456437 :         vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
    4584              :       break;
    4585              : 
    4586    103964393 :     default:
    4587    103964393 :       gcc_checking_assert (!gimple_assign_single_p (stmt));
    4588    103964393 :       vno->length = gimple_num_ops (stmt) - 1;
    4589    284853200 :       for (i = 0; i < vno->length; ++i)
    4590    180888807 :         vno->op[i] = gimple_op (stmt, i + 1);
    4591              :     }
    4592    108286645 : }
    4593              : 
    4594              : /* Compute the hashcode for VNO and look for it in the hash table;
    4595              :    return the resulting value number if it exists in the hash table.
    4596              :    Return NULL_TREE if it does not exist in the hash table or if the
    4597              :    result field of the operation is NULL.  VNRESULT will contain the
    4598              :    vn_nary_op_t from the hashtable if it exists.  */
    4599              : 
    4600              : static tree
    4601    130454873 : vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
    4602              : {
    4603    130454873 :   vn_nary_op_s **slot;
    4604              : 
    4605    130454873 :   if (vnresult)
    4606    123314030 :     *vnresult = NULL;
    4607              : 
    4608    362809904 :   for (unsigned i = 0; i < vno->length; ++i)
    4609    232355031 :     if (TREE_CODE (vno->op[i]) == SSA_NAME)
    4610    164587958 :       vno->op[i] = SSA_VAL (vno->op[i]);
    4611              : 
    4612    130454873 :   vno->hashcode = vn_nary_op_compute_hash (vno);
    4613    130454873 :   slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
    4614    130454873 :   if (!slot)
    4615              :     return NULL_TREE;
    4616     17458510 :   if (vnresult)
    4617     17017718 :     *vnresult = *slot;
    4618     17458510 :   return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
    4619              : }
    4620              : 
    4621              : /* Lookup a n-ary operation by its pieces and return the resulting value
    4622              :    number if it exists in the hash table.  Return NULL_TREE if it does
    4623              :    not exist in the hash table or if the result field of the operation
    4624              :    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
    4625              :    if it exists.  */
    4626              : 
    4627              : tree
    4628     74416847 : vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
    4629              :                           tree type, tree *ops, vn_nary_op_t *vnresult)
    4630              : {
    4631     74416847 :   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
    4632              :                                   sizeof_vn_nary_op (length));
    4633     74416847 :   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
    4634     74416847 :   return vn_nary_op_lookup_1 (vno1, vnresult);
    4635              : }
    4636              : 
    4637              : /* Lookup the rhs of STMT in the current hash table, and return the resulting
    4638              :    value number if it exists in the hash table.  Return NULL_TREE if
    4639              :    it does not exist in the hash table.  VNRESULT will contain the
    4640              :    vn_nary_op_t from the hashtable if it exists.  */
    4641              : 
    4642              : tree
    4643     56038026 : vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
    4644              : {
    4645     56038026 :   vn_nary_op_t vno1
    4646     56038026 :     = XALLOCAVAR (struct vn_nary_op_s,
    4647              :                   sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
    4648     56038026 :   init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
    4649     56038026 :   return vn_nary_op_lookup_1 (vno1, vnresult);
    4650              : }
    4651              : 
    4652              : /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
    4653              : 
    4654              : vn_nary_op_t
    4655    169880768 : alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
    4656              : {
    4657    169880768 :   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
    4658              : }
    4659              : 
    4660              : /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
    4661              :    obstack.  */
    4662              : 
    4663              : static vn_nary_op_t
    4664    152834374 : alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
    4665              : {
    4666            0 :   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
    4667              : 
    4668    152834374 :   vno1->value_id = value_id;
    4669    152834374 :   vno1->length = length;
    4670    152834374 :   vno1->predicated_values = 0;
    4671    152834374 :   vno1->u.result = result;
    4672              : 
    4673    152834374 :   return vno1;
    4674              : }
    4675              : 
    4676              : /* Insert VNO into TABLE.  */
    4677              : 
    4678              : static vn_nary_op_t
    4679    157515633 : vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table)
    4680              : {
    4681    157515633 :   vn_nary_op_s **slot;
    4682              : 
    4683    157515633 :   gcc_assert (! vno->predicated_values
    4684              :               || (! vno->u.values->next
    4685              :                   && vno->u.values->n == 1));
    4686              : 
    4687    460963598 :   for (unsigned i = 0; i < vno->length; ++i)
    4688    303447965 :     if (TREE_CODE (vno->op[i]) == SSA_NAME)
    4689    197914333 :       vno->op[i] = SSA_VAL (vno->op[i]);
    4690              : 
    4691    157515633 :   vno->hashcode = vn_nary_op_compute_hash (vno);
    4692    157515633 :   slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
    4693    157515633 :   vno->unwind_to = *slot;
    4694    157515633 :   if (*slot)
    4695              :     {
    4696              :       /* Prefer non-predicated values.
    4697              :          ???  Only if those are constant, otherwise, with constant predicated
    4698              :          value, turn them into predicated values with entry-block validity
    4699              :          (???  but we always find the first valid result currently).  */
    4700     30113502 :       if ((*slot)->predicated_values
    4701     29396755 :           && ! vno->predicated_values)
    4702              :         {
    4703              :           /* ???  We cannot remove *slot from the unwind stack list.
    4704              :              For the moment we deal with this by skipping not found
    4705              :              entries but this isn't ideal ...  */
    4706        83589 :           *slot = vno;
    4707              :           /* ???  Maintain a stack of states we can unwind in
    4708              :              vn_nary_op_s?  But how far do we unwind?  In reality
    4709              :              we need to push change records somewhere...  Or not
    4710              :              unwind vn_nary_op_s and linking them but instead
    4711              :              unwind the results "list", linking that, which also
    4712              :              doesn't move on hashtable resize.  */
    4713              :           /* We can also have a ->unwind_to recording *slot there.
    4714              :              That way we can make u.values a fixed size array with
    4715              :              recording the number of entries but of course we then
    4716              :              have always N copies for each unwind_to-state.  Or we
    4717              :              make sure to only ever append and each unwinding will
    4718              :              pop off one entry (but how to deal with predicated
    4719              :              replaced with non-predicated here?)  */
    4720        83589 :           vno->next = last_inserted_nary;
    4721        83589 :           last_inserted_nary = vno;
    4722        83589 :           return vno;
    4723              :         }
    4724     30029913 :       else if (vno->predicated_values
    4725     30029557 :                && ! (*slot)->predicated_values)
    4726              :         return *slot;
    4727     29313522 :       else if (vno->predicated_values
    4728     29313166 :                && (*slot)->predicated_values)
    4729              :         {
    4730              :           /* ???  Factor this all into a insert_single_predicated_value
    4731              :              routine.  */
    4732     29313166 :           gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
    4733     29313166 :           basic_block vno_bb
    4734     29313166 :             = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
    4735     29313166 :           vn_pval *nval = vno->u.values;
    4736     29313166 :           vn_pval **next = &vno->u.values;
    4737     29313166 :           vn_pval *ins = NULL;
    4738     29313166 :           vn_pval *ins_at = NULL;
    4739              :           /* Find an existing value to append to.  */
    4740     55223339 :           for (vn_pval *val = (*slot)->u.values; val; val = val->next)
    4741              :             {
    4742     30291139 :               if (expressions_equal_p (val->result, nval->result))
    4743              :                 {
    4744              :                   /* Limit the number of places we register a predicate
    4745              :                      as valid.  */
    4746      4380966 :                   if (val->n > 8)
    4747       115396 :                     return *slot;
    4748     10771328 :                   for (unsigned i = 0; i < val->n; ++i)
    4749              :                     {
    4750      6738697 :                       basic_block val_bb
    4751      6738697 :                         = BASIC_BLOCK_FOR_FN (cfun,
    4752              :                                               val->valid_dominated_by_p[i]);
    4753      6738697 :                       if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
    4754              :                         /* Value registered with more generic predicate.  */
    4755       232939 :                         return *slot;
    4756      6505758 :                       else if (flag_checking)
    4757              :                         /* Shouldn't happen, we insert in RPO order.  */
    4758      6505758 :                         gcc_assert (!dominated_by_p (CDI_DOMINATORS,
    4759              :                                                      val_bb, vno_bb));
    4760              :                     }
    4761              :                   /* Append the location.  */
    4762      4032631 :                   ins_at = val;
    4763      4032631 :                   ins = (vn_pval *) obstack_alloc (&vn_tables_obstack,
    4764              :                                                    sizeof (vn_pval)
    4765              :                                                    + val->n * sizeof (int));
    4766      4032631 :                   ins->next = NULL;
    4767      4032631 :                   ins->result = val->result;
    4768      4032631 :                   ins->n = val->n + 1;
    4769      4032631 :                   memcpy (ins->valid_dominated_by_p,
    4770      4032631 :                           val->valid_dominated_by_p,
    4771      4032631 :                           val->n * sizeof (int));
    4772      4032631 :                   ins->valid_dominated_by_p[val->n] = vno_bb->index;
    4773      4032631 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    4774            4 :                     fprintf (dump_file, "Appending predicate to value.\n");
    4775              :                   break;
    4776              :                 }
    4777              :             }
    4778              :           /* Copy the rest of the value chain.  */
    4779     59764982 :           for (vn_pval *val = (*slot)->u.values; val; val = val->next)
    4780              :             {
    4781     30800151 :               if (val == ins_at)
    4782              :                 /* Replace the node we appended to.  */
    4783      4032631 :                 *next = ins;
    4784              :               else
    4785              :                 {
    4786              :                   /* Copy other predicated values.  */
    4787     26767520 :                   *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
    4788              :                                                      sizeof (vn_pval)
    4789              :                                                      + ((val->n-1)
    4790              :                                                         * sizeof (int)));
    4791     26767520 :                   memcpy (*next, val,
    4792     26767520 :                           sizeof (vn_pval) + (val->n-1) * sizeof (int));
    4793     26767520 :                   (*next)->next = NULL;
    4794              :                 }
    4795     30800151 :               next = &(*next)->next;
    4796              :             }
    4797              :           /* Append the value if we didn't find it.  */
    4798     28964831 :           if (!ins_at)
    4799     24932200 :             *next = nval;
    4800     28964831 :           *slot = vno;
    4801     28964831 :           vno->next = last_inserted_nary;
    4802     28964831 :           last_inserted_nary = vno;
    4803     28964831 :           return vno;
    4804              :         }
    4805              : 
    4806              :       /* While we do not want to insert things twice it's awkward to
    4807              :          avoid it in the case where visit_nary_op pattern-matches stuff
    4808              :          and ends up simplifying the replacement to itself.  We then
    4809              :          get two inserts, one from visit_nary_op and one from
    4810              :          vn_nary_build_or_lookup.
    4811              :          So allow inserts with the same value number.  */
    4812          356 :       if ((*slot)->u.result == vno->u.result)
    4813              :         return *slot;
    4814              :     }
    4815              : 
    4816              :   /* ???  There's also optimistic vs. previous commited state merging
    4817              :      that is problematic for the case of unwinding.  */
    4818              : 
    4819              :   /* ???  We should return NULL if we do not use 'vno' and have the
    4820              :      caller release it.  */
    4821    127402131 :   gcc_assert (!*slot);
    4822              : 
    4823    127402131 :   *slot = vno;
    4824    127402131 :   vno->next = last_inserted_nary;
    4825    127402131 :   last_inserted_nary = vno;
    4826    127402131 :   return vno;
    4827              : }
    4828              : 
    4829              : /* Insert a n-ary operation into the current hash table using it's
    4830              :    pieces.  Return the vn_nary_op_t structure we created and put in
    4831              :    the hashtable.  */
    4832              : 
    4833              : vn_nary_op_t
    4834       556099 : vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
    4835              :                           tree type, tree *ops,
    4836              :                           tree result, unsigned int value_id)
    4837              : {
    4838       556099 :   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
    4839       556099 :   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
    4840       556099 :   return vn_nary_op_insert_into (vno1, valid_info->nary);
    4841              : }
    4842              : 
    4843              : /* Return whether we can track a predicate valid when PRED_E is executed.  */
    4844              : 
    4845              : static bool
    4846    151697345 : can_track_predicate_on_edge (edge pred_e)
    4847              : {
    4848              :   /* ???  As we are currently recording the destination basic-block index in
    4849              :      vn_pval.valid_dominated_by_p and using dominance for the
    4850              :      validity check we cannot track predicates on all edges.  */
    4851    151697345 :   if (single_pred_p (pred_e->dest))
    4852              :     return true;
    4853              :   /* Never record for backedges.  */
    4854     11966333 :   if (pred_e->flags & EDGE_DFS_BACK)
    4855              :     return false;
    4856              :   /* When there's more than one predecessor we cannot track
    4857              :      predicate validity based on the destination block.  The
    4858              :      exception is when all other incoming edges sources are
    4859              :      dominated by the destination block.  */
    4860     11309165 :   edge_iterator ei;
    4861     11309165 :   edge e;
    4862     19416774 :   FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
    4863     17566759 :     if (e != pred_e && ! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
    4864              :       return false;
    4865              :   return true;
    4866              : }
    4867              : 
    4868              : static vn_nary_op_t
    4869    107537168 : vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
    4870              :                                      tree type, tree *ops,
    4871              :                                      tree result, unsigned int value_id,
    4872              :                                      edge pred_e)
    4873              : {
    4874    107537168 :   if (flag_checking)
    4875    107536332 :     gcc_assert (can_track_predicate_on_edge (pred_e));
    4876              : 
    4877        74818 :   if (dump_file && (dump_flags & TDF_DETAILS)
    4878              :       /* ???  Fix dumping, but currently we only get comparisons.  */
    4879    107607956 :       && TREE_CODE_CLASS (code) == tcc_comparison)
    4880              :     {
    4881        70788 :       fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
    4882        70788 :                pred_e->dest->index);
    4883        70788 :       print_generic_expr (dump_file, ops[0], TDF_SLIM);
    4884        70788 :       fprintf (dump_file, " %s ", get_tree_code_name (code));
    4885        70788 :       print_generic_expr (dump_file, ops[1], TDF_SLIM);
    4886       105813 :       fprintf (dump_file, " == %s\n",
    4887        70788 :                integer_zerop (result) ? "false" : "true");
    4888              :     }
    4889    107537168 :   vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
    4890    107537168 :   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
    4891    107537168 :   vno1->predicated_values = 1;
    4892    107537168 :   vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
    4893              :                                               sizeof (vn_pval));
    4894    107537168 :   vno1->u.values->next = NULL;
    4895    107537168 :   vno1->u.values->result = result;
    4896    107537168 :   vno1->u.values->n = 1;
    4897    107537168 :   vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
    4898    107537168 :   return vn_nary_op_insert_into (vno1, valid_info->nary);
    4899              : }
    4900              : 
    4901              : static bool
    4902              : dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool);
    4903              : 
    4904              : static tree
    4905      1704836 : vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb,
    4906              :                                  edge e = NULL)
    4907              : {
    4908      1704836 :   if (! vno->predicated_values)
    4909            0 :     return vno->u.result;
    4910      3532371 :   for (vn_pval *val = vno->u.values; val; val = val->next)
    4911      5373826 :     for (unsigned i = 0; i < val->n; ++i)
    4912              :       {
    4913      3546291 :         basic_block cand
    4914      3546291 :           = BASIC_BLOCK_FOR_FN (cfun, val->valid_dominated_by_p[i]);
    4915              :         /* Do not handle backedge executability optimistically since
    4916              :            when figuring out whether to iterate we do not consider
    4917              :            changed predication.
    4918              :            When asking for predicated values on an edge avoid looking
    4919              :            at edge executability for edges forward in our iteration
    4920              :            as well.  */
    4921      3546291 :         if (e && (e->flags & EDGE_DFS_BACK))
    4922              :           {
    4923        23412 :             if (dominated_by_p (CDI_DOMINATORS, bb, cand))
    4924         7782 :               return val->result;
    4925              :           }
    4926      3522879 :         else if (dominated_by_p_w_unex (bb, cand, false))
    4927       535496 :           return val->result;
    4928              :       }
    4929              :   return NULL_TREE;
    4930              : }
    4931              : 
    4932              : static tree
    4933       211187 : vn_nary_op_get_predicated_value (vn_nary_op_t vno, edge e)
    4934              : {
    4935            0 :   return vn_nary_op_get_predicated_value (vno, e->src, e);
    4936              : }
    4937              : 
    4938              : /* Insert the rhs of STMT into the current hash table with a value number of
    4939              :    RESULT.  */
    4940              : 
    4941              : static vn_nary_op_t
    4942     44741107 : vn_nary_op_insert_stmt (gimple *stmt, tree result)
    4943              : {
    4944     44741107 :   vn_nary_op_t vno1
    4945     44741107 :     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
    4946     44741107 :                         result, VN_INFO (result)->value_id);
    4947     44741107 :   init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
    4948     44741107 :   return vn_nary_op_insert_into (vno1, valid_info->nary);
    4949              : }
    4950              : 
    4951              : /* Compute a hashcode for PHI operation VP1 and return it.  */
    4952              : 
    4953              : static inline hashval_t
    4954     50236387 : vn_phi_compute_hash (vn_phi_t vp1)
    4955              : {
    4956     50236387 :   inchash::hash hstate;
    4957     50236387 :   tree phi1op;
    4958     50236387 :   tree type;
    4959     50236387 :   edge e;
    4960     50236387 :   edge_iterator ei;
    4961              : 
    4962    100472774 :   hstate.add_int (EDGE_COUNT (vp1->block->preds));
    4963     50236387 :   switch (EDGE_COUNT (vp1->block->preds))
    4964              :     {
    4965              :     case 1:
    4966              :       break;
    4967     43147863 :     case 2:
    4968              :       /* When this is a PHI node subject to CSE for different blocks
    4969              :          avoid hashing the block index.  */
    4970     43147863 :       if (vp1->cclhs)
    4971              :         break;
    4972              :       /* Fallthru.  */
    4973     33932552 :     default:
    4974     33932552 :       hstate.add_int (vp1->block->index);
    4975              :     }
    4976              : 
    4977              :   /* If all PHI arguments are constants we need to distinguish
    4978              :      the PHI node via its type.  */
    4979     50236387 :   type = vp1->type;
    4980     50236387 :   hstate.merge_hash (vn_hash_type (type));
    4981              : 
    4982    174639721 :   FOR_EACH_EDGE (e, ei, vp1->block->preds)
    4983              :     {
    4984              :       /* Don't hash backedge values they need to be handled as VN_TOP
    4985              :          for optimistic value-numbering.  */
    4986    124403334 :       if (e->flags & EDGE_DFS_BACK)
    4987     27816150 :         continue;
    4988              : 
    4989     96587184 :       phi1op = vp1->phiargs[e->dest_idx];
    4990     96587184 :       if (phi1op == VN_TOP)
    4991       243952 :         continue;
    4992     96343232 :       inchash::add_expr (phi1op, hstate);
    4993              :     }
    4994              : 
    4995     50236387 :   return hstate.end ();
    4996              : }
    4997              : 
    4998              : 
    4999              : /* Return true if COND1 and COND2 represent the same condition, set
    5000              :    *INVERTED_P if one needs to be inverted to make it the same as
    5001              :    the other.  */
    5002              : 
    5003              : static bool
    5004      3761061 : cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
    5005              :                     gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
    5006              : {
    5007      3761061 :   enum tree_code code1 = gimple_cond_code (cond1);
    5008      3761061 :   enum tree_code code2 = gimple_cond_code (cond2);
    5009              : 
    5010      3761061 :   *inverted_p = false;
    5011      3761061 :   if (code1 == code2)
    5012              :     ;
    5013       299098 :   else if (code1 == swap_tree_comparison (code2))
    5014              :     std::swap (lhs2, rhs2);
    5015       263042 :   else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
    5016       130574 :     *inverted_p = true;
    5017       132468 :   else if (code1 == invert_tree_comparison
    5018       132468 :                       (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
    5019              :     {
    5020        10288 :       std::swap (lhs2, rhs2);
    5021        10288 :       *inverted_p = true;
    5022              :     }
    5023              :   else
    5024              :     return false;
    5025              : 
    5026      3638881 :   return ((expressions_equal_p (lhs1, lhs2)
    5027       108276 :            && expressions_equal_p (rhs1, rhs2))
    5028      3663076 :           || (commutative_tree_code (code1)
    5029      1781069 :               && expressions_equal_p (lhs1, rhs2)
    5030         2324 :               && expressions_equal_p (rhs1, lhs2)));
    5031              : }
    5032              : 
    5033              : /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
    5034              : 
    5035              : static int
    5036     40925369 : vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
    5037              : {
    5038     40925369 :   if (vp1->hashcode != vp2->hashcode)
    5039              :     return false;
    5040              : 
    5041     12683665 :   if (vp1->block != vp2->block)
    5042              :     {
    5043     11306586 :       if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
    5044              :         return false;
    5045              : 
    5046     36764569 :       switch (EDGE_COUNT (vp1->block->preds))
    5047              :         {
    5048              :         case 1:
    5049              :           /* Single-arg PHIs are just copies.  */
    5050              :           break;
    5051              : 
    5052      3768862 :         case 2:
    5053      3768862 :           {
    5054              :             /* Make sure both PHIs are classified as CSEable.  */
    5055      3768862 :             if (! vp1->cclhs || ! vp2->cclhs)
    5056              :               return false;
    5057              : 
    5058              :             /* Rule out backedges into the PHI.  */
    5059      3768862 :             gcc_checking_assert
    5060              :               (vp1->block->loop_father->header != vp1->block
    5061              :                && vp2->block->loop_father->header != vp2->block);
    5062              : 
    5063              :             /* If the PHI nodes do not have compatible types
    5064              :                they are not the same.  */
    5065      3768862 :             if (!types_compatible_p (vp1->type, vp2->type))
    5066              :               return false;
    5067              : 
    5068              :             /* If the immediate dominator end in switch stmts multiple
    5069              :                values may end up in the same PHI arg via intermediate
    5070              :                CFG merges.  */
    5071      3761061 :             basic_block idom1
    5072      3761061 :               = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
    5073      3761061 :             basic_block idom2
    5074      3761061 :               = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
    5075      3761061 :             gcc_checking_assert (EDGE_COUNT (idom1->succs) == 2
    5076              :                                  && EDGE_COUNT (idom2->succs) == 2);
    5077              : 
    5078              :             /* Verify the controlling stmt is the same.  */
    5079      7522122 :             gcond *last1 = as_a <gcond *> (*gsi_last_bb (idom1));
    5080      7522122 :             gcond *last2 = as_a <gcond *> (*gsi_last_bb (idom2));
    5081      3761061 :             bool inverted_p;
    5082      3761061 :             if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
    5083      3761061 :                                       last2, vp2->cclhs, vp2->ccrhs,
    5084              :                                       &inverted_p))
    5085              :               return false;
    5086              : 
    5087              :             /* Get at true/false controlled edges into the PHI.  */
    5088        84173 :             edge te1, te2, fe1, fe2;
    5089        84173 :             if (! extract_true_false_controlled_edges (idom1, vp1->block,
    5090              :                                                        &te1, &fe1)
    5091        84173 :                 || ! extract_true_false_controlled_edges (idom2, vp2->block,
    5092              :                                                           &te2, &fe2))
    5093        38277 :               return false;
    5094              : 
    5095              :             /* Swap edges if the second condition is the inverted of the
    5096              :                first.  */
    5097        45896 :             if (inverted_p)
    5098         2028 :               std::swap (te2, fe2);
    5099              : 
    5100              :             /* Since we do not know which edge will be executed we have
    5101              :                to be careful when matching VN_TOP.  Be conservative and
    5102              :                only match VN_TOP == VN_TOP for now, we could allow
    5103              :                VN_TOP on the not prevailing PHI though.  See for example
    5104              :                PR102920.  */
    5105        45896 :             if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
    5106        45896 :                                        vp2->phiargs[te2->dest_idx], false)
    5107        89979 :                 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
    5108        44083 :                                           vp2->phiargs[fe2->dest_idx], false))
    5109         1813 :               return false;
    5110              : 
    5111              :             return true;
    5112              :           }
    5113              : 
    5114              :         default:
    5115              :           return false;
    5116              :         }
    5117              :     }
    5118              : 
    5119              :   /* If the PHI nodes do not have compatible types
    5120              :      they are not the same.  */
    5121      8914803 :   if (!types_compatible_p (vp1->type, vp2->type))
    5122              :     return false;
    5123              : 
    5124              :   /* Any phi in the same block will have it's arguments in the
    5125              :      same edge order, because of how we store phi nodes.  */
    5126      8913753 :   unsigned nargs = EDGE_COUNT (vp1->block->preds);
    5127     20691464 :   for (unsigned i = 0; i < nargs; ++i)
    5128              :     {
    5129     16530664 :       tree phi1op = vp1->phiargs[i];
    5130     16530664 :       tree phi2op = vp2->phiargs[i];
    5131     16530664 :       if (phi1op == phi2op)
    5132     11682984 :         continue;
    5133      4847680 :       if (!expressions_equal_p (phi1op, phi2op, false))
    5134              :         return false;
    5135              :     }
    5136              : 
    5137              :   return true;
    5138              : }
    5139              : 
    5140              : /* Lookup PHI in the current hash table, and return the resulting
    5141              :    value number if it exists in the hash table.  Return NULL_TREE if
    5142              :    it does not exist in the hash table. */
    5143              : 
    5144              : static tree
    5145     27526629 : vn_phi_lookup (gimple *phi, bool backedges_varying_p)
    5146              : {
    5147     27526629 :   vn_phi_s **slot;
    5148     27526629 :   struct vn_phi_s *vp1;
    5149     27526629 :   edge e;
    5150     27526629 :   edge_iterator ei;
    5151              : 
    5152     27526629 :   vp1 = XALLOCAVAR (struct vn_phi_s,
    5153              :                     sizeof (struct vn_phi_s)
    5154              :                     + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
    5155              : 
    5156              :   /* Canonicalize the SSA_NAME's to their value number.  */
    5157     94995775 :   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
    5158              :     {
    5159     67469146 :       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
    5160     67469146 :       if (TREE_CODE (def) == SSA_NAME
    5161     56231232 :           && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
    5162              :         {
    5163     53683401 :           if (!virtual_operand_p (def)
    5164     53683401 :               && ssa_undefined_value_p (def, false))
    5165       135361 :             def = VN_TOP;
    5166              :           else
    5167     53548040 :             def = SSA_VAL (def);
    5168              :         }
    5169     67469146 :       vp1->phiargs[e->dest_idx] = def;
    5170              :     }
    5171     27526629 :   vp1->type = TREE_TYPE (gimple_phi_result (phi));
    5172     27526629 :   vp1->block = gimple_bb (phi);
    5173              :   /* Extract values of the controlling condition.  */
    5174     27526629 :   vp1->cclhs = NULL_TREE;
    5175     27526629 :   vp1->ccrhs = NULL_TREE;
    5176     27526629 :   if (EDGE_COUNT (vp1->block->preds) == 2
    5177     27526629 :       && vp1->block->loop_father->header != vp1->block)
    5178              :     {
    5179      8596850 :       basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
    5180      8596850 :       if (EDGE_COUNT (idom1->succs) == 2)
    5181     17092498 :         if (gcond *last1 = safe_dyn_cast <gcond *> (*gsi_last_bb (idom1)))
    5182              :           {
    5183              :             /* ???  We want to use SSA_VAL here.  But possibly not
    5184              :                allow VN_TOP.  */
    5185      8327818 :             vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
    5186      8327818 :             vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
    5187              :           }
    5188              :     }
    5189     27526629 :   vp1->hashcode = vn_phi_compute_hash (vp1);
    5190     27526629 :   slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
    5191     27526629 :   if (!slot)
    5192              :     return NULL_TREE;
    5193      4204883 :   return (*slot)->result;
    5194              : }
    5195              : 
    5196              : /* Insert PHI into the current hash table with a value number of
    5197              :    RESULT.  */
    5198              : 
    5199              : static vn_phi_t
    5200     22709758 : vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
    5201              : {
    5202     22709758 :   vn_phi_s **slot;
    5203     22709758 :   vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
    5204              :                                            sizeof (vn_phi_s)
    5205              :                                            + ((gimple_phi_num_args (phi) - 1)
    5206              :                                               * sizeof (tree)));
    5207     22709758 :   edge e;
    5208     22709758 :   edge_iterator ei;
    5209              : 
    5210              :   /* Canonicalize the SSA_NAME's to their value number.  */
    5211     79643946 :   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
    5212              :     {
    5213     56934188 :       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
    5214     56934188 :       if (TREE_CODE (def) == SSA_NAME
    5215     46767613 :           && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
    5216              :         {
    5217     44220194 :           if (!virtual_operand_p (def)
    5218     44220194 :               && ssa_undefined_value_p (def, false))
    5219       108835 :             def = VN_TOP;
    5220              :           else
    5221     44111359 :             def = SSA_VAL (def);
    5222              :         }
    5223     56934188 :       vp1->phiargs[e->dest_idx] = def;
    5224              :     }
    5225     22709758 :   vp1->value_id = VN_INFO (result)->value_id;
    5226     22709758 :   vp1->type = TREE_TYPE (gimple_phi_result (phi));
    5227     22709758 :   vp1->block = gimple_bb (phi);
    5228              :   /* Extract values of the controlling condition.  */
    5229     22709758 :   vp1->cclhs = NULL_TREE;
    5230     22709758 :   vp1->ccrhs = NULL_TREE;
    5231     22709758 :   if (EDGE_COUNT (vp1->block->preds) == 2
    5232     22709758 :       && vp1->block->loop_father->header != vp1->block)
    5233              :     {
    5234      8241407 :       basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
    5235      8241407 :       if (EDGE_COUNT (idom1->succs) == 2)
    5236     16384518 :         if (gcond *last1 = safe_dyn_cast <gcond *> (*gsi_last_bb (idom1)))
    5237              :           {
    5238              :             /* ???  We want to use SSA_VAL here.  But possibly not
    5239              :                allow VN_TOP.  */
    5240      7976017 :             vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
    5241      7976017 :             vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
    5242              :           }
    5243              :     }
    5244     22709758 :   vp1->result = result;
    5245     22709758 :   vp1->hashcode = vn_phi_compute_hash (vp1);
    5246              : 
    5247     22709758 :   slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
    5248     22709758 :   gcc_assert (!*slot);
    5249              : 
    5250     22709758 :   *slot = vp1;
    5251     22709758 :   vp1->next = last_inserted_phi;
    5252     22709758 :   last_inserted_phi = vp1;
    5253     22709758 :   return vp1;
    5254              : }
    5255              : 
    5256              : 
    5257              : /* Return true if BB1 is dominated by BB2 taking into account edges
    5258              :    that are not executable.  When ALLOW_BACK is false consider not
    5259              :    executable backedges as executable.  */
    5260              : 
    5261              : static bool
    5262     70561964 : dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool allow_back)
    5263              : {
    5264     70561964 :   edge_iterator ei;
    5265     70561964 :   edge e;
    5266              : 
    5267     70561964 :   if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
    5268              :     return true;
    5269              : 
    5270              :   /* Before iterating we'd like to know if there exists a
    5271              :      (executable) path from bb2 to bb1 at all, if not we can
    5272              :      directly return false.  For now simply iterate once.  */
    5273              : 
    5274              :   /* Iterate to the single executable bb1 predecessor.  */
    5275     21078142 :   if (EDGE_COUNT (bb1->preds) > 1)
    5276              :     {
    5277      2904923 :       edge prede = NULL;
    5278      6303558 :       FOR_EACH_EDGE (e, ei, bb1->preds)
    5279      5899500 :         if ((e->flags & EDGE_EXECUTABLE)
    5280       571729 :             || (!allow_back && (e->flags & EDGE_DFS_BACK)))
    5281              :           {
    5282      5405788 :             if (prede)
    5283              :               {
    5284              :                 prede = NULL;
    5285              :                 break;
    5286              :               }
    5287              :             prede = e;
    5288              :           }
    5289      2904923 :       if (prede)
    5290              :         {
    5291       404058 :           bb1 = prede->src;
    5292              : 
    5293              :           /* Re-do the dominance check with changed bb1.  */
    5294       404058 :           if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
    5295              :             return true;
    5296              :         }
    5297              :     }
    5298              : 
    5299              :   /* Iterate to the single executable bb2 successor.  */
    5300     20844070 :   if (EDGE_COUNT (bb2->succs) > 1)
    5301              :     {
    5302      6093715 :       edge succe = NULL;
    5303     12334333 :       FOR_EACH_EDGE (e, ei, bb2->succs)
    5304     12187682 :         if ((e->flags & EDGE_EXECUTABLE)
    5305       180287 :             || (!allow_back && (e->flags & EDGE_DFS_BACK)))
    5306              :           {
    5307     12007436 :             if (succe)
    5308              :               {
    5309              :                 succe = NULL;
    5310              :                 break;
    5311              :               }
    5312              :             succe = e;
    5313              :           }
    5314      6093715 :       if (succe
    5315              :           /* Limit the number of edges we check, we should bring in
    5316              :              context from the iteration and compute the single
    5317              :              executable incoming edge when visiting a block.  */
    5318      6093715 :           && EDGE_COUNT (succe->dest->preds) < 8)
    5319              :         {
    5320              :           /* Verify the reached block is only reached through succe.
    5321              :              If there is only one edge we can spare us the dominator
    5322              :              check and iterate directly.  */
    5323       112768 :           if (EDGE_COUNT (succe->dest->preds) > 1)
    5324              :             {
    5325        51897 :               FOR_EACH_EDGE (e, ei, succe->dest->preds)
    5326        40433 :                 if (e != succe
    5327        26485 :                     && ((e->flags & EDGE_EXECUTABLE)
    5328        17444 :                         || (!allow_back && (e->flags & EDGE_DFS_BACK))))
    5329              :                   {
    5330              :                     succe = NULL;
    5331              :                     break;
    5332              :                   }
    5333              :             }
    5334       112768 :           if (succe)
    5335              :             {
    5336       103718 :               bb2 = succe->dest;
    5337              : 
    5338              :               /* Re-do the dominance check with changed bb2.  */
    5339       103718 :               if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
    5340              :                 return true;
    5341              :             }
    5342              :         }
    5343              :     }
    5344              :   /* Iterate to the single successor of bb2 with only a single executable
    5345              :      incoming edge.  */
    5346     14750355 :   else if (EDGE_COUNT (bb2->succs) == 1
    5347     14211661 :            && EDGE_COUNT (single_succ (bb2)->preds) > 1
    5348              :            /* Limit the number of edges we check, we should bring in
    5349              :               context from the iteration and compute the single
    5350              :               executable incoming edge when visiting a block.  */
    5351     28726622 :            && EDGE_COUNT (single_succ (bb2)->preds) < 8)
    5352              :     {
    5353      4958794 :       edge prede = NULL;
    5354     11167551 :       FOR_EACH_EDGE (e, ei, single_succ (bb2)->preds)
    5355     10642360 :         if ((e->flags & EDGE_EXECUTABLE)
    5356      1298919 :             || (!allow_back && (e->flags & EDGE_DFS_BACK)))
    5357              :           {
    5358      9347622 :             if (prede)
    5359              :               {
    5360              :                 prede = NULL;
    5361              :                 break;
    5362              :               }
    5363              :             prede = e;
    5364              :           }
    5365              :       /* We might actually get to a query with BB2 not visited yet when
    5366              :          we're querying for a predicated value.  */
    5367      4958794 :       if (prede && prede->src == bb2)
    5368              :         {
    5369       467326 :           bb2 = prede->dest;
    5370              : 
    5371              :           /* Re-do the dominance check with changed bb2.  */
    5372       467326 :           if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
    5373              :             return true;
    5374              :         }
    5375              :     }
    5376              : 
    5377              :   /* We could now iterate updating bb1 / bb2.  */
    5378              :   return false;
    5379              : }
    5380              : 
    5381              : /* Set the value number of FROM to TO, return true if it has changed
    5382              :    as a result.  */
    5383              : 
    5384              : static inline bool
    5385    202891527 : set_ssa_val_to (tree from, tree to)
    5386              : {
    5387    202891527 :   vn_ssa_aux_t from_info = VN_INFO (from);
    5388    202891527 :   tree currval = from_info->valnum; // SSA_VAL (from)
    5389    202891527 :   poly_int64 toff, coff;
    5390    202891527 :   bool curr_undefined = false;
    5391    202891527 :   bool curr_invariant = false;
    5392              : 
    5393              :   /* The only thing we allow as value numbers are ssa_names
    5394              :      and invariants.  So assert that here.  We don't allow VN_TOP
    5395              :      as visiting a stmt should produce a value-number other than
    5396              :      that.
    5397              :      ???  Still VN_TOP can happen for unreachable code, so force
    5398              :      it to varying in that case.  Not all code is prepared to
    5399              :      get VN_TOP on valueization.  */
    5400    202891527 :   if (to == VN_TOP)
    5401              :     {
    5402              :       /* ???  When iterating and visiting PHI <undef, backedge-value>
    5403              :          for the first time we rightfully get VN_TOP and we need to
    5404              :          preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
    5405              :          With SCCVN we were simply lucky we iterated the other PHI
    5406              :          cycles first and thus visited the backedge-value DEF.  */
    5407            0 :       if (currval == VN_TOP)
    5408            0 :         goto set_and_exit;
    5409            0 :       if (dump_file && (dump_flags & TDF_DETAILS))
    5410            0 :         fprintf (dump_file, "Forcing value number to varying on "
    5411              :                  "receiving VN_TOP\n");
    5412              :       to = from;
    5413              :     }
    5414              : 
    5415    202891527 :   gcc_checking_assert (to != NULL_TREE
    5416              :                        && ((TREE_CODE (to) == SSA_NAME
    5417              :                             && (to == from || SSA_VAL (to) == to))
    5418              :                            || is_gimple_min_invariant (to)));
    5419              : 
    5420    202891527 :   if (from != to)
    5421              :     {
    5422     32195416 :       if (currval == from)
    5423              :         {
    5424        13762 :           if (dump_file && (dump_flags & TDF_DETAILS))
    5425              :             {
    5426            0 :               fprintf (dump_file, "Not changing value number of ");
    5427            0 :               print_generic_expr (dump_file, from);
    5428            0 :               fprintf (dump_file, " from VARYING to ");
    5429            0 :               print_generic_expr (dump_file, to);
    5430            0 :               fprintf (dump_file, "\n");
    5431              :             }
    5432        13762 :           return false;
    5433              :         }
    5434     32181654 :       curr_invariant = is_gimple_min_invariant (currval);
    5435     64363308 :       curr_undefined = (TREE_CODE (currval) == SSA_NAME
    5436      3831448 :                         && !virtual_operand_p (currval)
    5437     35790363 :                         && ssa_undefined_value_p (currval, false));
    5438     32181654 :       if (currval != VN_TOP
    5439              :           && !curr_invariant
    5440      5312656 :           && !curr_undefined
    5441     36000215 :           && is_gimple_min_invariant (to))
    5442              :         {
    5443          220 :           if (dump_file && (dump_flags & TDF_DETAILS))
    5444              :             {
    5445            0 :               fprintf (dump_file, "Forcing VARYING instead of changing "
    5446              :                        "value number of ");
    5447            0 :               print_generic_expr (dump_file, from);
    5448            0 :               fprintf (dump_file, " from ");
    5449            0 :               print_generic_expr (dump_file, currval);
    5450            0 :               fprintf (dump_file, " (non-constant) to ");
    5451            0 :               print_generic_expr (dump_file, to);
    5452            0 :               fprintf (dump_file, " (constant)\n");
    5453              :             }
    5454              :           to = from;
    5455              :         }
    5456     32181434 :       else if (currval != VN_TOP
    5457      5312436 :                && !curr_undefined
    5458      5299549 :                && TREE_CODE (to) == SSA_NAME
    5459      4474566 :                && !virtual_operand_p (to)
    5460     36433261 :                && ssa_undefined_value_p (to, false))
    5461              :         {
    5462            6 :           if (dump_file && (dump_flags & TDF_DETAILS))
    5463              :             {
    5464            0 :               fprintf (dump_file, "Forcing VARYING instead of changing "
    5465              :                        "value number of ");
    5466            0 :               print_generic_expr (dump_file, from);
    5467            0 :               fprintf (dump_file, " from ");
    5468            0 :               print_generic_expr (dump_file, currval);
    5469            0 :               fprintf (dump_file, " (non-undefined) to ");
    5470            0 :               print_generic_expr (dump_file, to);
    5471            0 :               fprintf (dump_file, " (undefined)\n");
    5472              :             }
    5473              :           to = from;
    5474              :         }
    5475     32181428 :       else if (TREE_CODE (to) == SSA_NAME
    5476     32181428 :                && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
    5477              :         to = from;
    5478              :     }
    5479              : 
    5480    170696111 : set_and_exit:
    5481    202877765 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5482              :     {
    5483       398519 :       fprintf (dump_file, "Setting value number of ");
    5484       398519 :       print_generic_expr (dump_file, from);
    5485       398519 :       fprintf (dump_file, " to ");
    5486       398519 :       print_generic_expr (dump_file, to);
    5487              :     }
    5488              : 
    5489    202877765 :   if (currval != to
    5490    165222868 :       && !operand_equal_p (currval, to, 0)
    5491              :       /* Different undefined SSA names are not actually different.  See
    5492              :          PR82320 for a testcase were we'd otherwise not terminate iteration.  */
    5493    165154093 :       && !(curr_undefined
    5494         3359 :            && TREE_CODE (to) == SSA_NAME
    5495          574 :            && !virtual_operand_p (to)
    5496          574 :            && ssa_undefined_value_p (to, false))
    5497              :       /* ???  For addresses involving volatile objects or types operand_equal_p
    5498              :          does not reliably detect ADDR_EXPRs as equal.  We know we are only
    5499              :          getting invariant gimple addresses here, so can use
    5500              :          get_addr_base_and_unit_offset to do this comparison.  */
    5501    368031252 :       && !(TREE_CODE (currval) == ADDR_EXPR
    5502       459277 :            && TREE_CODE (to) == ADDR_EXPR
    5503           12 :            && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
    5504            6 :                == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
    5505            6 :            && known_eq (coff, toff)))
    5506              :     {
    5507    165153481 :       if (to != from
    5508     27864899 :           && currval != VN_TOP
    5509       999372 :           && !curr_undefined
    5510              :           /* We do not want to allow lattice transitions from one value
    5511              :              to another since that may lead to not terminating iteration
    5512              :              (see PR95049).  Since there's no convenient way to check
    5513              :              for the allowed transition of VAL -> PHI (loop entry value,
    5514              :              same on two PHIs, to same PHI result) we restrict the check
    5515              :              to invariants.  */
    5516       999372 :           && curr_invariant
    5517    165809700 :           && is_gimple_min_invariant (to))
    5518              :         {
    5519            0 :           if (dump_file && (dump_flags & TDF_DETAILS))
    5520            0 :             fprintf (dump_file, " forced VARYING");
    5521              :           to = from;
    5522              :         }
    5523    165153481 :       if (dump_file && (dump_flags & TDF_DETAILS))
    5524       398203 :         fprintf (dump_file, " (changed)\n");
    5525    165153481 :       from_info->valnum = to;
    5526    165153481 :       return true;
    5527              :     }
    5528     37724284 :   if (dump_file && (dump_flags & TDF_DETAILS))
    5529          316 :     fprintf (dump_file, "\n");
    5530              :   return false;
    5531              : }
    5532              : 
    5533              : /* Set all definitions in STMT to value number to themselves.
    5534              :    Return true if a value number changed. */
    5535              : 
    5536              : static bool
    5537    282033320 : defs_to_varying (gimple *stmt)
    5538              : {
    5539    282033320 :   bool changed = false;
    5540    282033320 :   ssa_op_iter iter;
    5541    282033320 :   def_operand_p defp;
    5542              : 
    5543    311107158 :   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
    5544              :     {
    5545     29073838 :       tree def = DEF_FROM_PTR (defp);
    5546     29073838 :       changed |= set_ssa_val_to (def, def);
    5547              :     }
    5548    282033320 :   return changed;
    5549              : }
    5550              : 
    5551              : /* Visit a copy between LHS and RHS, return true if the value number
    5552              :    changed.  */
    5553              : 
    5554              : static bool
    5555      7862429 : visit_copy (tree lhs, tree rhs)
    5556              : {
    5557              :   /* Valueize.  */
    5558      7862429 :   rhs = SSA_VAL (rhs);
    5559              : 
    5560      7862429 :   return set_ssa_val_to (lhs, rhs);
    5561              : }
    5562              : 
    5563              : /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
    5564              :    is the same.  */
    5565              : 
    5566              : static tree
    5567      2440965 : valueized_wider_op (tree wide_type, tree op, bool allow_truncate)
    5568              : {
    5569      2440965 :   if (TREE_CODE (op) == SSA_NAME)
    5570      2139581 :     op = vn_valueize (op);
    5571              : 
    5572              :   /* Either we have the op widened available.  */
    5573      2440965 :   tree ops[3] = {};
    5574      2440965 :   ops[0] = op;
    5575      2440965 :   tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
    5576              :                                        wide_type, ops, NULL);
    5577      2440965 :   if (tem)
    5578              :     return tem;
    5579              : 
    5580              :   /* Or the op is truncated from some existing value.  */
    5581      2153253 :   if (allow_truncate && TREE_CODE (op) == SSA_NAME)
    5582              :     {
    5583       545373 :       gimple *def = SSA_NAME_DEF_STMT (op);
    5584       545373 :       if (is_gimple_assign (def)
    5585       545373 :           && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
    5586              :         {
    5587       278732 :           tem = gimple_assign_rhs1 (def);
    5588       278732 :           if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
    5589              :             {
    5590       180007 :               if (TREE_CODE (tem) == SSA_NAME)
    5591       180007 :                 tem = vn_valueize (tem);
    5592       180007 :               return tem;
    5593              :             }
    5594              :         }
    5595              :     }
    5596              : 
    5597              :   /* For constants simply extend it.  */
    5598      1973246 :   if (TREE_CODE (op) == INTEGER_CST)
    5599       334429 :     return wide_int_to_tree (wide_type, wi::to_widest (op));
    5600              : 
    5601              :   return NULL_TREE;
    5602              : }
    5603              : 
    5604              : /* Visit a nary operator RHS, value number it, and return true if the
    5605              :    value number of LHS has changed as a result.  */
    5606              : 
    5607              : static bool
    5608     48424009 : visit_nary_op (tree lhs, gassign *stmt)
    5609              : {
    5610     48424009 :   vn_nary_op_t vnresult;
    5611     48424009 :   tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
    5612     48424009 :   if (! result && vnresult)
    5613       153543 :     result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
    5614     44812321 :   if (result)
    5615      3681642 :     return set_ssa_val_to (lhs, result);
    5616              : 
    5617              :   /* Do some special pattern matching for redundancies of operations
    5618              :      in different types.  */
    5619     44742367 :   enum tree_code code = gimple_assign_rhs_code (stmt);
    5620     44742367 :   tree type = TREE_TYPE (lhs);
    5621     44742367 :   tree rhs1 = gimple_assign_rhs1 (stmt);
    5622     44742367 :   switch (code)
    5623              :     {
    5624      9960876 :     CASE_CONVERT:
    5625              :       /* Match arithmetic done in a different type where we can easily
    5626              :          substitute the result from some earlier sign-changed or widened
    5627              :          operation.  */
    5628      9960876 :       if (INTEGRAL_TYPE_P (type)
    5629      8919176 :           && TREE_CODE (rhs1) == SSA_NAME
    5630              :           /* We only handle sign-changes, zero-extension -> & mask or
    5631              :              sign-extension if we know the inner operation doesn't
    5632              :              overflow.  */
    5633     18646313 :           && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
    5634      5230778 :                 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
    5635      5229992 :                     && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
    5636      7958640 :                && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
    5637      5884719 :               || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
    5638              :         {
    5639      7617333 :           gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
    5640      5469283 :           if (def
    5641      5469283 :               && (gimple_assign_rhs_code (def) == PLUS_EXPR
    5642      4224598 :                   || gimple_assign_rhs_code (def) == MINUS_EXPR
    5643      4090904 :                   || gimple_assign_rhs_code (def) == MULT_EXPR))
    5644              :             {
    5645      1975197 :               tree ops[3] = {};
    5646              :               /* When requiring a sign-extension we cannot model a
    5647              :                  previous truncation with a single op so don't bother.  */
    5648      1975197 :               bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1));
    5649              :               /* Either we have the op widened available.  */
    5650      1975197 :               ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def),
    5651              :                                            allow_truncate);
    5652      1975197 :               if (ops[0])
    5653       931536 :                 ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def),
    5654              :                                              allow_truncate);
    5655      1975197 :               if (ops[0] && ops[1])
    5656              :                 {
    5657       336380 :                   ops[0] = vn_nary_op_lookup_pieces
    5658       336380 :                       (2, gimple_assign_rhs_code (def), type, ops, NULL);
    5659              :                   /* We have wider operation available.  */
    5660       336380 :                   if (ops[0]
    5661              :                       /* If the leader is a wrapping operation we can
    5662              :                          insert it for code hoisting w/o introducing
    5663              :                          undefined overflow.  If it is not it has to
    5664              :                          be available.  See PR86554.  */
    5665       336380 :                       && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
    5666         2080 :                           || (rpo_avail && vn_context_bb
    5667         2080 :                               && rpo_avail->eliminate_avail (vn_context_bb,
    5668              :                                                              ops[0]))))
    5669              :                     {
    5670         9571 :                       unsigned lhs_prec = TYPE_PRECISION (type);
    5671         9571 :                       unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
    5672         9571 :                       if (lhs_prec == rhs_prec
    5673         9571 :                           || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
    5674         1755 :                               && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
    5675              :                         {
    5676         8981 :                           gimple_match_op match_op (gimple_match_cond::UNCOND,
    5677         8981 :                                                     NOP_EXPR, type, ops[0]);
    5678         8981 :                           result = vn_nary_build_or_lookup (&match_op);
    5679         8981 :                           if (result)
    5680              :                             {
    5681         8981 :                               bool changed = set_ssa_val_to (lhs, result);
    5682         8981 :                               if (TREE_CODE (result) == SSA_NAME)
    5683         8981 :                                 vn_nary_op_insert_stmt (stmt, result);
    5684         8981 :                               return changed;
    5685              :                             }
    5686              :                         }
    5687              :                       else
    5688              :                         {
    5689          590 :                           tree mask = wide_int_to_tree
    5690          590 :                             (type, wi::mask (rhs_prec, false, lhs_prec));
    5691          590 :                           gimple_match_op match_op (gimple_match_cond::UNCOND,
    5692          590 :                                                     BIT_AND_EXPR,
    5693          590 :                                                     TREE_TYPE (lhs),
    5694          590 :                                                     ops[0], mask);
    5695          590 :                           result = vn_nary_build_or_lookup (&match_op);
    5696          590 :                           if (result)
    5697              :                             {
    5698          590 :                               bool changed = set_ssa_val_to (lhs, result);
    5699          590 :                               if (TREE_CODE (result) == SSA_NAME)
    5700          590 :                                 vn_nary_op_insert_stmt (stmt, result);
    5701          590 :                               return changed;
    5702              :                             }
    5703              :                         }
    5704              :                     }
    5705              :                 }
    5706              :             }
    5707              :         }
    5708              :       break;
    5709      1503934 :     case BIT_AND_EXPR:
    5710      1503934 :       if (INTEGRAL_TYPE_P (type)
    5711      1467873 :           && TREE_CODE (rhs1) == SSA_NAME
    5712      1467873 :           && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
    5713       914400 :           && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)
    5714       914282 :           && default_vn_walk_kind != VN_NOWALK
    5715              :           && CHAR_BIT == 8
    5716              :           && BITS_PER_UNIT == 8
    5717              :           && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
    5718       914074 :           && TYPE_PRECISION (type) <= vn_walk_cb_data::bufsize * BITS_PER_UNIT
    5719       914072 :           && !integer_all_onesp (gimple_assign_rhs2 (stmt))
    5720      2418006 :           && !integer_zerop (gimple_assign_rhs2 (stmt)))
    5721              :         {
    5722       914072 :           gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
    5723       671552 :           if (ass
    5724       671552 :               && !gimple_has_volatile_ops (ass)
    5725       670106 :               && vn_get_stmt_kind (ass) == VN_REFERENCE)
    5726              :             {
    5727       318731 :               tree last_vuse = gimple_vuse (ass);
    5728       318731 :               tree op = gimple_assign_rhs1 (ass);
    5729       956193 :               tree result = vn_reference_lookup (op, gimple_vuse (ass),
    5730              :                                                  default_vn_walk_kind,
    5731              :                                                  NULL, true, &last_vuse,
    5732              :                                                  gimple_assign_rhs2 (stmt));
    5733       318731 :               if (result
    5734       319169 :                   && useless_type_conversion_p (TREE_TYPE (result),
    5735          438 :                                                 TREE_TYPE (op)))
    5736          438 :                 return set_ssa_val_to (lhs, result);
    5737              :             }
    5738              :         }
    5739              :       break;
    5740       223545 :     case BIT_FIELD_REF:
    5741       223545 :       if (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
    5742              :         {
    5743       223525 :           tree op0 = TREE_OPERAND (rhs1, 0);
    5744       223525 :           gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (op0));
    5745       182254 :           if (ass
    5746       182254 :               && !gimple_has_volatile_ops (ass)
    5747       182171 :               && vn_get_stmt_kind (ass) == VN_REFERENCE)
    5748              :             {
    5749        85250 :               tree last_vuse = gimple_vuse (ass);
    5750        85250 :               tree op = gimple_assign_rhs1 (ass);
    5751              :               /* Avoid building invalid and unexpected refs.  */
    5752        85250 :               if (TREE_CODE (op) != TARGET_MEM_REF
    5753              :                   && TREE_CODE (op) != BIT_FIELD_REF
    5754              :                   && TREE_CODE (op) != REALPART_EXPR
    5755              :                   && TREE_CODE (op) != IMAGPART_EXPR)
    5756              :                 {
    5757        77930 :                   tree op = build3 (BIT_FIELD_REF, TREE_TYPE (rhs1),
    5758              :                                     gimple_assign_rhs1 (ass),
    5759        77930 :                                     TREE_OPERAND (rhs1, 1),
    5760        77930 :                                     TREE_OPERAND (rhs1, 2));
    5761       155860 :                   tree result = vn_reference_lookup (op, gimple_vuse (ass),
    5762              :                                                      default_vn_walk_kind,
    5763              :                                                      NULL, true, &last_vuse);
    5764        77930 :                   if (result
    5765        77930 :                       && useless_type_conversion_p (type, TREE_TYPE (result)))
    5766         1188 :                     return set_ssa_val_to (lhs, result);
    5767        77121 :                   else if (result
    5768          379 :                            && TYPE_SIZE (type)
    5769          379 :                            && TYPE_SIZE (TREE_TYPE (result))
    5770        77500 :                            && operand_equal_p (TYPE_SIZE (type),
    5771          379 :                                                TYPE_SIZE (TREE_TYPE (result))))
    5772              :                     {
    5773          379 :                       gimple_match_op match_op (gimple_match_cond::UNCOND,
    5774          379 :                                                 VIEW_CONVERT_EXPR,
    5775          379 :                                                 type, result);
    5776          379 :                       result = vn_nary_build_or_lookup (&match_op);
    5777          379 :                       if (result)
    5778              :                         {
    5779          379 :                           bool changed = set_ssa_val_to (lhs, result);
    5780          379 :                           if (TREE_CODE (result) == SSA_NAME)
    5781          367 :                             vn_nary_op_insert_stmt (stmt, result);
    5782          379 :                           return changed;
    5783              :                         }
    5784              :                     }
    5785              :                 }
    5786              :             }
    5787              :         }
    5788              :       break;
    5789       325767 :     case TRUNC_DIV_EXPR:
    5790       325767 :       if (TYPE_UNSIGNED (type))
    5791              :         break;
    5792              :       /* Fallthru.  */
    5793      5406956 :     case RDIV_EXPR:
    5794      5406956 :     case MULT_EXPR:
    5795              :       /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v.  */
    5796      5406956 :       if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
    5797              :         {
    5798      5406046 :           tree rhs[2];
    5799      5406046 :           rhs[0] = rhs1;
    5800      5406046 :           rhs[1] = gimple_assign_rhs2 (stmt);
    5801     16211251 :           for (unsigned i = 0; i <= 1; ++i)
    5802              :             {
    5803     10810938 :               unsigned j = i == 0 ? 1 : 0;
    5804     10810938 :               tree ops[2];
    5805     10810938 :               gimple_match_op match_op (gimple_match_cond::UNCOND,
    5806     10810938 :                                         NEGATE_EXPR, type, rhs[i]);
    5807     10810938 :               ops[i] = vn_nary_build_or_lookup_1 (&match_op, false, true);
    5808     10810938 :               ops[j] = rhs[j];
    5809     10810938 :               if (ops[i]
    5810     10810938 :                   && (ops[0] = vn_nary_op_lookup_pieces (2, code,
    5811              :                                                          type, ops, NULL)))
    5812              :                 {
    5813         5733 :                   gimple_match_op match_op (gimple_match_cond::UNCOND,
    5814         5733 :                                             NEGATE_EXPR, type, ops[0]);
    5815         5733 :                   result = vn_nary_build_or_lookup_1 (&match_op, true, false);
    5816         5733 :                   if (result)
    5817              :                     {
    5818         5733 :                       bool changed = set_ssa_val_to (lhs, result);
    5819         5733 :                       if (TREE_CODE (result) == SSA_NAME)
    5820         5733 :                         vn_nary_op_insert_stmt (stmt, result);
    5821         5733 :                       return changed;
    5822              :                     }
    5823              :                 }
    5824              :             }
    5825              :         }
    5826              :       break;
    5827       359359 :     case LSHIFT_EXPR:
    5828              :       /* For X << C, use the value number of X * (1 << C).  */
    5829       359359 :       if (INTEGRAL_TYPE_P (type)
    5830       347101 :           && TYPE_OVERFLOW_WRAPS (type)
    5831       544441 :           && !TYPE_SATURATING (type))
    5832              :         {
    5833       185082 :           tree rhs2 = gimple_assign_rhs2 (stmt);
    5834       185082 :           if (TREE_CODE (rhs2) == INTEGER_CST
    5835       107385 :               && tree_fits_uhwi_p (rhs2)
    5836       292467 :               && tree_to_uhwi (rhs2) < TYPE_PRECISION (type))
    5837              :             {
    5838       107385 :               wide_int w = wi::set_bit_in_zero (tree_to_uhwi (rhs2),
    5839       107385 :                                                 TYPE_PRECISION (type));
    5840       214770 :               gimple_match_op match_op (gimple_match_cond::UNCOND,
    5841       107385 :                                         MULT_EXPR, type, rhs1,
    5842       107385 :                                         wide_int_to_tree (type, w));
    5843       107385 :               result = vn_nary_build_or_lookup (&match_op);
    5844       107385 :               if (result)
    5845              :                 {
    5846       107385 :                   bool changed = set_ssa_val_to (lhs, result);
    5847       107385 :                   if (TREE_CODE (result) == SSA_NAME)
    5848       107384 :                     vn_nary_op_insert_stmt (stmt, result);
    5849       107385 :                   return changed;
    5850              :                 }
    5851       107385 :             }
    5852              :         }
    5853              :       break;
    5854              :     default:
    5855              :       break;
    5856              :     }
    5857              : 
    5858     44618052 :   bool changed = set_ssa_val_to (lhs, lhs);
    5859     44618052 :   vn_nary_op_insert_stmt (stmt, lhs);
    5860     44618052 :   return changed;
    5861              : }
    5862              : 
    5863              : /* Visit a call STMT storing into LHS.  Return true if the value number
    5864              :    of the LHS has changed as a result.  */
    5865              : 
    5866              : static bool
    5867      8554530 : visit_reference_op_call (tree lhs, gcall *stmt)
    5868              : {
    5869      8554530 :   bool changed = false;
    5870      8554530 :   struct vn_reference_s vr1;
    5871      8554530 :   vn_reference_t vnresult = NULL;
    5872      8554530 :   tree vdef = gimple_vdef (stmt);
    5873      8554530 :   modref_summary *summary;
    5874              : 
    5875              :   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
    5876      8554530 :   if (lhs && TREE_CODE (lhs) != SSA_NAME)
    5877      4555765 :     lhs = NULL_TREE;
    5878              : 
    5879      8554530 :   vn_reference_lookup_call (stmt, &vnresult, &vr1);
    5880              : 
    5881              :   /* If the lookup did not succeed for pure functions try to use
    5882              :      modref info to find a candidate to CSE to.  */
    5883      8554530 :   const unsigned accesses_limit = 8;
    5884      8554530 :   if (!vnresult
    5885      7915680 :       && !vdef
    5886      7915680 :       && lhs
    5887      2785783 :       && gimple_vuse (stmt)
    5888     10113929 :       && (((summary = get_modref_function_summary (stmt, NULL))
    5889       211324 :            && !summary->global_memory_read
    5890        82061 :            && summary->load_accesses < accesses_limit)
    5891      1477472 :           || gimple_call_flags (stmt) & ECF_CONST))
    5892              :     {
    5893              :       /* First search if we can do someting useful and build a
    5894              :          vector of all loads we have to check.  */
    5895        82663 :       bool unknown_memory_access = false;
    5896        82663 :       auto_vec<ao_ref, accesses_limit> accesses;
    5897        82663 :       unsigned load_accesses = summary ? summary->load_accesses : 0;
    5898        82663 :       if (!unknown_memory_access)
    5899              :         /* Add loads done as part of setting up the call arguments.
    5900              :            That's also necessary for CONST functions which will
    5901              :            not have a modref summary.  */
    5902       242547 :         for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
    5903              :           {
    5904       159892 :             tree arg = gimple_call_arg (stmt, i);
    5905       159892 :             if (TREE_CODE (arg) != SSA_NAME
    5906       159892 :                 && !is_gimple_min_invariant (arg))
    5907              :               {
    5908        61754 :                 if (accesses.length () >= accesses_limit - load_accesses)
    5909              :                   {
    5910              :                     unknown_memory_access = true;
    5911              :                     break;
    5912              :                   }
    5913        30869 :                 accesses.quick_grow (accesses.length () + 1);
    5914        30869 :                 ao_ref_init (&accesses.last (), arg);
    5915              :               }
    5916              :           }
    5917        82663 :       if (summary && !unknown_memory_access)
    5918              :         {
    5919              :           /* Add loads as analyzed by IPA modref.  */
    5920       281110 :           for (auto base_node : summary->loads->bases)
    5921        69933 :             if (unknown_memory_access)
    5922              :               break;
    5923       284999 :             else for (auto ref_node : base_node->refs)
    5924        76043 :               if (unknown_memory_access)
    5925              :                 break;
    5926       310901 :               else for (auto access_node : ref_node->accesses)
    5927              :                 {
    5928       201268 :                   accesses.quick_grow (accesses.length () + 1);
    5929       100634 :                   ao_ref *r = &accesses.last ();
    5930       100634 :                   if (!access_node.get_ao_ref (stmt, r))
    5931              :                     {
    5932              :                       /* Initialize a ref based on the argument and
    5933              :                          unknown offset if possible.  */
    5934        17826 :                       tree arg = access_node.get_call_arg (stmt);
    5935        17826 :                       if (arg && TREE_CODE (arg) == SSA_NAME)
    5936         4240 :                         arg = SSA_VAL (arg);
    5937         4240 :                       if (arg
    5938        17816 :                           && TREE_CODE (arg) == ADDR_EXPR
    5939        13652 :                           && (arg = get_base_address (arg))
    5940        17892 :                           && DECL_P (arg))
    5941              :                         {
    5942            0 :                           ao_ref_init (r, arg);
    5943            0 :                           r->ref = NULL_TREE;
    5944            0 :                           r->base = arg;
    5945              :                         }
    5946              :                       else
    5947              :                         {
    5948              :                           unknown_memory_access = true;
    5949              :                           break;
    5950              :                         }
    5951              :                     }
    5952        82808 :                   r->base_alias_set = base_node->base;
    5953        82808 :                   r->ref_alias_set = ref_node->ref;
    5954              :                 }
    5955              :         }
    5956              : 
    5957              :       /* Walk the VUSE->VDEF chain optimistically trying to find an entry
    5958              :          for the call in the hashtable.  */
    5959        82663 :       unsigned limit = (unknown_memory_access
    5960        82663 :                         ? 0
    5961        64829 :                         : (param_sccvn_max_alias_queries_per_access
    5962        64829 :                            / (accesses.length () + 1)));
    5963        82663 :       tree saved_vuse = vr1.vuse;
    5964        82663 :       hashval_t saved_hashcode = vr1.hashcode;
    5965       402782 :       while (limit > 0 && !vnresult && !SSA_NAME_IS_DEFAULT_DEF (vr1.vuse))
    5966              :         {
    5967       340918 :           vr1.hashcode = vr1.hashcode - SSA_NAME_VERSION (vr1.vuse);
    5968       340918 :           gimple *def = SSA_NAME_DEF_STMT (vr1.vuse);
    5969              :           /* ???  We could use fancy stuff like in walk_non_aliased_vuses, but
    5970              :              do not bother for now.  */
    5971       340918 :           if (is_a <gphi *> (def))
    5972              :             break;
    5973       640238 :           vr1.vuse = vuse_ssa_val (gimple_vuse (def));
    5974       320119 :           vr1.hashcode = vr1.hashcode + SSA_NAME_VERSION (vr1.vuse);
    5975       320119 :           vn_reference_lookup_1 (&vr1, &vnresult);
    5976       320119 :           limit--;
    5977              :         }
    5978              : 
    5979              :       /* If we found a candidate to CSE to verify it is valid.  */
    5980        82663 :       if (vnresult && !accesses.is_empty ())
    5981              :         {
    5982         1917 :           tree vuse = vuse_ssa_val (gimple_vuse (stmt));
    5983         7132 :           while (vnresult && vuse != vr1.vuse)
    5984              :             {
    5985         3298 :               gimple *def = SSA_NAME_DEF_STMT (vuse);
    5986        17349 :               for (auto &ref : accesses)
    5987              :                 {
    5988              :                   /* ???  stmt_may_clobber_ref_p_1 does per stmt constant
    5989              :                      analysis overhead that we might be able to cache.  */
    5990         9198 :                   if (stmt_may_clobber_ref_p_1 (def, &ref, true))
    5991              :                     {
    5992         1743 :                       vnresult = NULL;
    5993         1743 :                       break;
    5994              :                     }
    5995              :                 }
    5996         6596 :               vuse = vuse_ssa_val (gimple_vuse (def));
    5997              :             }
    5998              :         }
    5999        82663 :       vr1.vuse = saved_vuse;
    6000        82663 :       vr1.hashcode = saved_hashcode;
    6001        82663 :     }
    6002              : 
    6003      8554530 :   if (vnresult)
    6004              :     {
    6005       639052 :       if (vdef)
    6006              :         {
    6007       172984 :           if (vnresult->result_vdef)
    6008       172984 :             changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
    6009            0 :           else if (!lhs && gimple_call_lhs (stmt))
    6010              :             /* If stmt has non-SSA_NAME lhs, value number the vdef to itself,
    6011              :                as the call still acts as a lhs store.  */
    6012            0 :             changed |= set_ssa_val_to (vdef, vdef);
    6013              :           else
    6014              :             /* If the call was discovered to be pure or const reflect
    6015              :                that as far as possible.  */
    6016            0 :             changed |= set_ssa_val_to (vdef,
    6017              :                                        vuse_ssa_val (gimple_vuse (stmt)));
    6018              :         }
    6019              : 
    6020       639052 :       if (!vnresult->result && lhs)
    6021            0 :         vnresult->result = lhs;
    6022              : 
    6023       639052 :       if (vnresult->result && lhs)
    6024        95855 :         changed |= set_ssa_val_to (lhs, vnresult->result);
    6025              :     }
    6026              :   else
    6027              :     {
    6028      7915478 :       vn_reference_t vr2;
    6029      7915478 :       vn_reference_s **slot;
    6030      7915478 :       tree vdef_val = vdef;
    6031      7915478 :       if (vdef)
    6032              :         {
    6033              :           /* If we value numbered an indirect functions function to
    6034              :              one not clobbering memory value number its VDEF to its
    6035              :              VUSE.  */
    6036      4804192 :           tree fn = gimple_call_fn (stmt);
    6037      4804192 :           if (fn && TREE_CODE (fn) == SSA_NAME)
    6038              :             {
    6039       129542 :               fn = SSA_VAL (fn);
    6040       129542 :               if (TREE_CODE (fn) == ADDR_EXPR
    6041         1715 :                   && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
    6042         1715 :                   && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
    6043         1715 :                       & (ECF_CONST | ECF_PURE))
    6044              :                   /* If stmt has non-SSA_NAME lhs, value number the
    6045              :                      vdef to itself, as the call still acts as a lhs
    6046              :                      store.  */
    6047       130661 :                   && (lhs || gimple_call_lhs (stmt) == NULL_TREE))
    6048         2112 :                 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
    6049              :             }
    6050      4804192 :           changed |= set_ssa_val_to (vdef, vdef_val);
    6051              :         }
    6052      7915478 :       if (lhs)
    6053      3902910 :         changed |= set_ssa_val_to (lhs, lhs);
    6054      7915478 :       vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
    6055      7915478 :       vr2->vuse = vr1.vuse;
    6056              :       /* As we are not walking the virtual operand chain we know the
    6057              :          shared_lookup_references are still original so we can re-use
    6058              :          them here.  */
    6059      7915478 :       vr2->operands = vr1.operands.copy ();
    6060      7915478 :       vr2->type = vr1.type;
    6061      7915478 :       vr2->punned = vr1.punned;
    6062      7915478 :       vr2->set = vr1.set;
    6063      7915478 :       vr2->offset = vr1.offset;
    6064      7915478 :       vr2->max_size = vr1.max_size;
    6065      7915478 :       vr2->base_set = vr1.base_set;
    6066      7915478 :       vr2->hashcode = vr1.hashcode;
    6067      7915478 :       vr2->result = lhs;
    6068      7915478 :       vr2->result_vdef = vdef_val;
    6069      7915478 :       vr2->value_id = 0;
    6070      7915478 :       slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
    6071              :                                                           INSERT);
    6072      7915478 :       gcc_assert (!*slot);
    6073      7915478 :       *slot = vr2;
    6074      7915478 :       vr2->next = last_inserted_ref;
    6075      7915478 :       last_inserted_ref = vr2;
    6076              :     }
    6077              : 
    6078      8554530 :   return changed;
    6079              : }
    6080              : 
    6081              : /* Visit a load from a reference operator RHS, part of STMT, value number it,
    6082              :    and return true if the value number of the LHS has changed as a result.  */
    6083              : 
    6084              : static bool
    6085     34207280 : visit_reference_op_load (tree lhs, tree op, gimple *stmt)
    6086              : {
    6087     34207280 :   bool changed = false;
    6088     34207280 :   tree result;
    6089     34207280 :   vn_reference_t res;
    6090              : 
    6091     34207280 :   tree vuse = gimple_vuse (stmt);
    6092     34207280 :   tree last_vuse = vuse;
    6093     34207280 :   result = vn_reference_lookup (op, vuse, default_vn_walk_kind, &res, true, &last_vuse);
    6094              : 
    6095              :   /* We handle type-punning through unions by value-numbering based
    6096              :      on offset and size of the access.  Be prepared to handle a
    6097              :      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
    6098     34207280 :   if (result
    6099     34207280 :       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
    6100              :     {
    6101        18807 :       if (CONSTANT_CLASS_P (result))
    6102         4192 :         result = const_unop (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
    6103              :       else
    6104              :         {
    6105              :           /* We will be setting the value number of lhs to the value number
    6106              :              of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
    6107              :              So first simplify and lookup this expression to see if it
    6108              :              is already available.  */
    6109        14615 :           gimple_match_op res_op (gimple_match_cond::UNCOND,
    6110        14615 :                                   VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
    6111        14615 :           result = vn_nary_build_or_lookup (&res_op);
    6112        14615 :           if (result
    6113        14609 :               && TREE_CODE (result) == SSA_NAME
    6114        27539 :               && VN_INFO (result)->needs_insertion)
    6115              :             /* Track whether this is the canonical expression for different
    6116              :                typed loads.  We use that as a stopgap measure for code
    6117              :                hoisting when dealing with floating point loads.  */
    6118        11712 :             res->punned = true;
    6119              :         }
    6120              : 
    6121              :       /* When building the conversion fails avoid inserting the reference
    6122              :          again.  */
    6123        18807 :       if (!result)
    6124            6 :         return set_ssa_val_to (lhs, lhs);
    6125              :     }
    6126              : 
    6127     34188473 :   if (result)
    6128      5466673 :     changed = set_ssa_val_to (lhs, result);
    6129              :   else
    6130              :     {
    6131     28740601 :       changed = set_ssa_val_to (lhs, lhs);
    6132     28740601 :       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
    6133     28740601 :       if (vuse && SSA_VAL (last_vuse) != SSA_VAL (vuse))
    6134              :         {
    6135      8754443 :           if (dump_file && (dump_flags & TDF_DETAILS))
    6136              :             {
    6137        15208 :               fprintf (dump_file, "Using extra use virtual operand ");
    6138        15208 :               print_generic_expr (dump_file, last_vuse);
    6139        15208 :               fprintf (dump_file, "\n");
    6140              :             }
    6141      8754443 :           vn_reference_insert (op, lhs, vuse, NULL_TREE);
    6142              :         }
    6143              :     }
    6144              : 
    6145              :   return changed;
    6146              : }
    6147              : 
    6148              : 
    6149              : /* Visit a store to a reference operator LHS, part of STMT, value number it,
    6150              :    and return true if the value number of the LHS has changed as a result.  */
    6151              : 
    6152              : static bool
    6153     32442788 : visit_reference_op_store (tree lhs, tree op, gimple *stmt)
    6154              : {
    6155     32442788 :   bool changed = false;
    6156     32442788 :   vn_reference_t vnresult = NULL;
    6157     32442788 :   tree assign;
    6158     32442788 :   bool resultsame = false;
    6159     32442788 :   tree vuse = gimple_vuse (stmt);
    6160     32442788 :   tree vdef = gimple_vdef (stmt);
    6161              : 
    6162     32442788 :   if (TREE_CODE (op) == SSA_NAME)
    6163     14668714 :     op = SSA_VAL (op);
    6164              : 
    6165              :   /* First we want to lookup using the *vuses* from the store and see
    6166              :      if there the last store to this location with the same address
    6167              :      had the same value.
    6168              : 
    6169              :      The vuses represent the memory state before the store.  If the
    6170              :      memory state, address, and value of the store is the same as the
    6171              :      last store to this location, then this store will produce the
    6172              :      same memory state as that store.
    6173              : 
    6174              :      In this case the vdef versions for this store are value numbered to those
    6175              :      vuse versions, since they represent the same memory state after
    6176              :      this store.
    6177              : 
    6178              :      Otherwise, the vdefs for the store are used when inserting into
    6179              :      the table, since the store generates a new memory state.  */
    6180              : 
    6181     32442788 :   vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
    6182     32442788 :   if (vnresult
    6183      1665880 :       && vnresult->result)
    6184              :     {
    6185      1665880 :       tree result = vnresult->result;
    6186      1665880 :       gcc_checking_assert (TREE_CODE (result) != SSA_NAME
    6187              :                            || result == SSA_VAL (result));
    6188      1665880 :       resultsame = expressions_equal_p (result, op);
    6189      1665880 :       if (resultsame)
    6190              :         {
    6191              :           /* If the TBAA state isn't compatible for downstream reads
    6192              :              we cannot value-number the VDEFs the same.  */
    6193        51517 :           ao_ref lhs_ref;
    6194        51517 :           ao_ref_init (&lhs_ref, lhs);
    6195        51517 :           alias_set_type set = ao_ref_alias_set (&lhs_ref);
    6196        51517 :           alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
    6197        51517 :           if ((vnresult->set != set
    6198          850 :                && ! alias_set_subset_of (set, vnresult->set))
    6199        52043 :               || (vnresult->base_set != base_set
    6200         6303 :                   && ! alias_set_subset_of (base_set, vnresult->base_set)))
    6201          846 :             resultsame = false;
    6202              :         }
    6203              :     }
    6204              : 
    6205          846 :   if (!resultsame)
    6206              :     {
    6207     32392117 :       if (dump_file && (dump_flags & TDF_DETAILS))
    6208              :         {
    6209        20440 :           fprintf (dump_file, "No store match\n");
    6210        20440 :           fprintf (dump_file, "Value numbering store ");
    6211        20440 :           print_generic_expr (dump_file, lhs);
    6212        20440 :           fprintf (dump_file, " to ");
    6213        20440 :           print_generic_expr (dump_file, op);
    6214        20440 :           fprintf (dump_file, "\n");
    6215              :         }
    6216              :       /* Have to set value numbers before insert, since insert is
    6217              :          going to valueize the references in-place.  */
    6218     32392117 :       if (vdef)
    6219     32392117 :         changed |= set_ssa_val_to (vdef, vdef);
    6220              : 
    6221              :       /* Do not insert structure copies into the tables.  */
    6222     32392117 :       if (is_gimple_min_invariant (op)
    6223     32392117 :           || is_gimple_reg (op))
    6224     28878868 :         vn_reference_insert (lhs, op, vdef, NULL);
    6225              : 
    6226              :       /* Only perform the following when being called from PRE
    6227              :          which embeds tail merging.  */
    6228     32392117 :       if (default_vn_walk_kind == VN_WALK)
    6229              :         {
    6230      7412701 :           assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
    6231      7412701 :           vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
    6232      7412701 :           if (!vnresult)
    6233      7369859 :             vn_reference_insert (assign, lhs, vuse, vdef);
    6234              :         }
    6235              :     }
    6236              :   else
    6237              :     {
    6238              :       /* We had a match, so value number the vdef to have the value
    6239              :          number of the vuse it came from.  */
    6240              : 
    6241        50671 :       if (dump_file && (dump_flags & TDF_DETAILS))
    6242            9 :         fprintf (dump_file, "Store matched earlier value, "
    6243              :                  "value numbering store vdefs to matching vuses.\n");
    6244              : 
    6245        50671 :       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
    6246              :     }
    6247              : 
    6248     32442788 :   return changed;
    6249              : }
    6250              : 
    6251              : /* Visit and value number PHI, return true if the value number
    6252              :    changed.  When BACKEDGES_VARYING_P is true then assume all
    6253              :    backedge values are varying.  When INSERTED is not NULL then
    6254              :    this is just a ahead query for a possible iteration, set INSERTED
    6255              :    to true if we'd insert into the hashtable.  */
    6256              : 
    6257              : static bool
    6258     34398750 : visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
    6259              : {
    6260     34398750 :   tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
    6261     34398750 :   bool seen_undef_visited = false;
    6262     34398750 :   tree backedge_val = NULL_TREE;
    6263     34398750 :   bool seen_non_backedge = false;
    6264     34398750 :   tree sameval_base = NULL_TREE;
    6265     34398750 :   poly_int64 soff, doff;
    6266     34398750 :   unsigned n_executable = 0;
    6267     34398750 :   edge sameval_e = NULL;
    6268              : 
    6269              :   /* TODO: We could check for this in initialization, and replace this
    6270              :      with a gcc_assert.  */
    6271     34398750 :   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
    6272        30009 :     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
    6273              : 
    6274              :   /* We track whether a PHI was CSEd to avoid excessive iterations
    6275              :      that would be necessary only because the PHI changed arguments
    6276              :      but not value.  */
    6277     34368741 :   if (!inserted)
    6278     26806569 :     gimple_set_plf (phi, GF_PLF_1, false);
    6279              : 
    6280     34368741 :   basic_block bb = gimple_bb (phi);
    6281              : 
    6282              :   /* For the equivalence handling below make sure to first process an
    6283              :      edge with a non-constant.  */
    6284     34368741 :   auto_vec<edge, 2> preds;
    6285     68737482 :   preds.reserve_exact (EDGE_COUNT (bb->preds));
    6286     34368741 :   bool seen_nonconstant = false;
    6287    113593562 :   for (unsigned i = 0; i < EDGE_COUNT (bb->preds); ++i)
    6288              :     {
    6289     79224821 :       edge e = EDGE_PRED (bb, i);
    6290     79224821 :       preds.quick_push (e);
    6291     79224821 :       if (!seen_nonconstant)
    6292              :         {
    6293     41987184 :           tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
    6294     41987184 :           if (TREE_CODE (def) == SSA_NAME)
    6295              :             {
    6296     32661111 :               seen_nonconstant = true;
    6297     32661111 :               if (i != 0)
    6298      5700227 :                 std::swap (preds[0], preds[i]);
    6299              :             }
    6300              :         }
    6301              :     }
    6302              : 
    6303              :   /* See if all non-TOP arguments have the same value.  TOP is
    6304              :      equivalent to everything, so we can ignore it.  */
    6305    144633381 :   for (edge e : preds)
    6306     68301630 :     if (e->flags & EDGE_EXECUTABLE)
    6307              :       {
    6308     63291060 :         tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
    6309              : 
    6310     63291060 :         if (def == PHI_RESULT (phi))
    6311       318601 :           continue;
    6312     62994051 :         ++n_executable;
    6313     62994051 :         bool visited = true;
    6314     62994051 :         if (TREE_CODE (def) == SSA_NAME)
    6315              :           {
    6316     50871179 :             tree val = SSA_VAL (def, &visited);
    6317     50871179 :             if (SSA_NAME_IS_DEFAULT_DEF (def))
    6318      2668440 :               visited = true;
    6319     50871179 :             if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
    6320     48328202 :               def = val;
    6321     50871179 :             if (e->flags & EDGE_DFS_BACK)
    6322     15428444 :               backedge_val = def;
    6323              :           }
    6324     62994051 :         if (!(e->flags & EDGE_DFS_BACK))
    6325     47398920 :           seen_non_backedge = true;
    6326     62994051 :         if (def == VN_TOP)
    6327              :           ;
    6328              :         /* Ignore undefined defs for sameval but record one.  */
    6329     62994051 :         else if (TREE_CODE (def) == SSA_NAME
    6330     47501378 :                  && ! virtual_operand_p (def)
    6331     87271329 :                  && ssa_undefined_value_p (def, false))
    6332              :           {
    6333       225740 :             if (!seen_undef
    6334              :                 /* Avoid having not visited undefined defs if we also have
    6335              :                    a visited one.  */
    6336        28879 :                 || (!seen_undef_visited && visited))
    6337              :               {
    6338       196864 :                 seen_undef = def;
    6339       196864 :                 seen_undef_visited = visited;
    6340              :               }
    6341              :           }
    6342     62768311 :         else if (sameval == VN_TOP)
    6343              :           {
    6344              :             sameval = def;
    6345              :             sameval_e = e;
    6346              :           }
    6347     28446231 :         else if (expressions_equal_p (def, sameval))
    6348              :           sameval_e = NULL;
    6349     44785289 :         else if (virtual_operand_p (def))
    6350              :           {
    6351              :             sameval = NULL_TREE;
    6352     26774472 :             break;
    6353              :           }
    6354              :         else
    6355              :           {
    6356              :             /* We know we're arriving only with invariant addresses here,
    6357              :                try harder comparing them.  We can do some caching here
    6358              :                which we cannot do in expressions_equal_p.  */
    6359     16760791 :             if (TREE_CODE (def) == ADDR_EXPR
    6360       388970 :                 && TREE_CODE (sameval) == ADDR_EXPR
    6361       108822 :                 && sameval_base != (void *)-1)
    6362              :               {
    6363       108822 :                 if (!sameval_base)
    6364       108820 :                   sameval_base = get_addr_base_and_unit_offset
    6365       108820 :                                    (TREE_OPERAND (sameval, 0), &soff);
    6366       108820 :                 if (!sameval_base)
    6367              :                   sameval_base = (tree)(void *)-1;
    6368       108827 :                 else if ((get_addr_base_and_unit_offset
    6369       108822 :                             (TREE_OPERAND (def, 0), &doff) == sameval_base)
    6370       108822 :                          && known_eq (soff, doff))
    6371            5 :                   continue;
    6372              :               }
    6373              :             /* There's also the possibility to use equivalences.  */
    6374     32434199 :             if (!FLOAT_TYPE_P (TREE_TYPE (def))
    6375              :                 /* But only do this if we didn't force any of sameval or
    6376              :                    val to VARYING because of backedge processing rules.  */
    6377     15569988 :                 && (TREE_CODE (sameval) != SSA_NAME
    6378     12304333 :                     || SSA_VAL (sameval) == sameval)
    6379     32330711 :                 && (TREE_CODE (def) != SSA_NAME || SSA_VAL (def) == def))
    6380              :               {
    6381     15569911 :                 vn_nary_op_t vnresult;
    6382     15569911 :                 tree ops[2];
    6383     15569911 :                 ops[0] = def;
    6384     15569911 :                 ops[1] = sameval;
    6385              :                 /* Canonicalize the operands order for eq below. */
    6386     15569911 :                 if (tree_swap_operands_p (ops[0], ops[1]))
    6387      9398816 :                   std::swap (ops[0], ops[1]);
    6388     15569911 :                 tree val = vn_nary_op_lookup_pieces (2, EQ_EXPR,
    6389              :                                                      boolean_type_node,
    6390              :                                                      ops, &vnresult);
    6391     15569911 :                 if (! val && vnresult && vnresult->predicated_values)
    6392              :                   {
    6393       211187 :                     val = vn_nary_op_get_predicated_value (vnresult, e);
    6394       123851 :                     if (val && integer_truep (val)
    6395       232896 :                         && !(sameval_e && (sameval_e->flags & EDGE_DFS_BACK)))
    6396              :                       {
    6397        21587 :                         if (dump_file && (dump_flags & TDF_DETAILS))
    6398              :                           {
    6399            2 :                             fprintf (dump_file, "Predication says ");
    6400            2 :                             print_generic_expr (dump_file, def, TDF_NONE);
    6401            2 :                             fprintf (dump_file, " and ");
    6402            2 :                             print_generic_expr (dump_file, sameval, TDF_NONE);
    6403            2 :                             fprintf (dump_file, " are equal on edge %d -> %d\n",
    6404            2 :                                      e->src->index, e->dest->index);
    6405              :                           }
    6406        21587 :                         continue;
    6407              :                       }
    6408              :                   }
    6409              :               }
    6410              :             sameval = NULL_TREE;
    6411              :             break;
    6412              :           }
    6413              :       }
    6414              : 
    6415              :   /* If the value we want to use is flowing over the backedge and we
    6416              :      should take it as VARYING but it has a non-VARYING value drop to
    6417              :      VARYING.
    6418              :      If we value-number a virtual operand never value-number to the
    6419              :      value from the backedge as that confuses the alias-walking code.
    6420              :      See gcc.dg/torture/pr87176.c.  If the value is the same on a
    6421              :      non-backedge everything is OK though.  */
    6422     34368741 :   bool visited_p;
    6423     34368741 :   if ((backedge_val
    6424     34368741 :        && !seen_non_backedge
    6425         2065 :        && TREE_CODE (backedge_val) == SSA_NAME
    6426         1785 :        && sameval == backedge_val
    6427          341 :        && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
    6428           62 :            || SSA_VAL (backedge_val) != backedge_val))
    6429              :       /* Do not value-number a virtual operand to sth not visited though
    6430              :          given that allows us to escape a region in alias walking.  */
    6431     34370527 :       || (sameval
    6432      7593990 :           && TREE_CODE (sameval) == SSA_NAME
    6433      4509681 :           && !SSA_NAME_IS_DEFAULT_DEF (sameval)
    6434      3818049 :           && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
    6435      1908215 :           && (SSA_VAL (sameval, &visited_p), !visited_p)))
    6436              :     /* Note this just drops to VARYING without inserting the PHI into
    6437              :        the hashes.  */
    6438       297000 :     result = PHI_RESULT (phi);
    6439              :   /* If none of the edges was executable keep the value-number at VN_TOP,
    6440              :      if only a single edge is exectuable use its value.  */
    6441     34071741 :   else if (n_executable <= 1)
    6442      6539814 :     result = seen_undef ? seen_undef : sameval;
    6443              :   /* If we saw only undefined values and VN_TOP use one of the
    6444              :      undefined values.  */
    6445     27531927 :   else if (sameval == VN_TOP)
    6446      7115737 :     result = (seen_undef && seen_undef_visited) ? seen_undef : sameval;
    6447              :   /* First see if it is equivalent to a phi node in this block.  We prefer
    6448              :      this as it allows IV elimination - see PRs 66502 and 67167.  */
    6449     27526629 :   else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
    6450              :     {
    6451      4204883 :       if (!inserted
    6452        67153 :           && TREE_CODE (result) == SSA_NAME
    6453      4272036 :           && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
    6454              :         {
    6455        67153 :           gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
    6456        67153 :           if (dump_file && (dump_flags & TDF_DETAILS))
    6457              :             {
    6458            6 :               fprintf (dump_file, "Marking CSEd to PHI node ");
    6459            6 :               print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
    6460              :                                  0, TDF_SLIM);
    6461            6 :               fprintf (dump_file, "\n");
    6462              :             }
    6463              :         }
    6464              :     }
    6465              :   /* If all values are the same use that, unless we've seen undefined
    6466              :      values as well and the value isn't constant.
    6467              :      CCP/copyprop have the same restriction to not remove uninit warnings.  */
    6468     23321746 :   else if (sameval
    6469     23321746 :            && (! seen_undef || is_gimple_min_invariant (sameval)))
    6470              :     result = sameval;
    6471              :   else
    6472              :     {
    6473     22709758 :       result = PHI_RESULT (phi);
    6474              :       /* Only insert PHIs that are varying, for constant value numbers
    6475              :          we mess up equivalences otherwise as we are only comparing
    6476              :          the immediate controlling predicates.  */
    6477     22709758 :       vn_phi_insert (phi, result, backedges_varying_p);
    6478     22709758 :       if (inserted)
    6479      3273100 :         *inserted = true;
    6480              :     }
    6481              : 
    6482     34368741 :   return set_ssa_val_to (PHI_RESULT (phi), result);
    6483     34368741 : }
    6484              : 
    6485              : /* Try to simplify RHS using equivalences and constant folding.  */
    6486              : 
    6487              : static tree
    6488    125545662 : try_to_simplify (gassign *stmt)
    6489              : {
    6490    125545662 :   enum tree_code code = gimple_assign_rhs_code (stmt);
    6491    125545662 :   tree tem;
    6492              : 
    6493              :   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
    6494              :      in this case, there is no point in doing extra work.  */
    6495    125545662 :   if (code == SSA_NAME)
    6496              :     return NULL_TREE;
    6497              : 
    6498              :   /* First try constant folding based on our current lattice.  */
    6499    110876695 :   mprts_hook = vn_lookup_simplify_result;
    6500    110876695 :   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
    6501    110876695 :   mprts_hook = NULL;
    6502    110876695 :   if (tem
    6503    110876695 :       && (TREE_CODE (tem) == SSA_NAME
    6504     24566911 :           || is_gimple_min_invariant (tem)))
    6505     24637139 :     return tem;
    6506              : 
    6507              :   return NULL_TREE;
    6508              : }
    6509              : 
    6510              : /* Visit and value number STMT, return true if the value number
    6511              :    changed.  */
    6512              : 
    6513              : static bool
    6514    447857134 : visit_stmt (gimple *stmt, bool backedges_varying_p = false)
    6515              : {
    6516    447857134 :   bool changed = false;
    6517              : 
    6518    447857134 :   if (dump_file && (dump_flags & TDF_DETAILS))
    6519              :     {
    6520       410913 :       fprintf (dump_file, "Value numbering stmt = ");
    6521       410913 :       print_gimple_stmt (dump_file, stmt, 0);
    6522              :     }
    6523              : 
    6524    447857134 :   if (gimple_code (stmt) == GIMPLE_PHI)
    6525     26827026 :     changed = visit_phi (stmt, NULL, backedges_varying_p);
    6526    590247883 :   else if (gimple_has_volatile_ops (stmt))
    6527      8524957 :     changed = defs_to_varying (stmt);
    6528    412505151 :   else if (gassign *ass = dyn_cast <gassign *> (stmt))
    6529              :     {
    6530    130481529 :       enum tree_code code = gimple_assign_rhs_code (ass);
    6531    130481529 :       tree lhs = gimple_assign_lhs (ass);
    6532    130481529 :       tree rhs1 = gimple_assign_rhs1 (ass);
    6533    130481529 :       tree simplified;
    6534              : 
    6535              :       /* Shortcut for copies. Simplifying copies is pointless,
    6536              :          since we copy the expression and value they represent.  */
    6537    130481529 :       if (code == SSA_NAME
    6538     19604834 :           && TREE_CODE (lhs) == SSA_NAME)
    6539              :         {
    6540      4935867 :           changed = visit_copy (lhs, rhs1);
    6541      4935867 :           goto done;
    6542              :         }
    6543    125545662 :       simplified = try_to_simplify (ass);
    6544    125545662 :       if (simplified)
    6545              :         {
    6546     24637139 :           if (dump_file && (dump_flags & TDF_DETAILS))
    6547              :             {
    6548        14548 :               fprintf (dump_file, "RHS ");
    6549        14548 :               print_gimple_expr (dump_file, ass, 0);
    6550        14548 :               fprintf (dump_file, " simplified to ");
    6551        14548 :               print_generic_expr (dump_file, simplified);
    6552        14548 :               fprintf (dump_file, "\n");
    6553              :             }
    6554              :         }
    6555              :       /* Setting value numbers to constants will occasionally
    6556              :          screw up phi congruence because constants are not
    6557              :          uniquely associated with a single ssa name that can be
    6558              :          looked up.  */
    6559     24637139 :       if (simplified
    6560     24637139 :           && is_gimple_min_invariant (simplified)
    6561     21710870 :           && TREE_CODE (lhs) == SSA_NAME)
    6562              :         {
    6563      7450025 :           changed = set_ssa_val_to (lhs, simplified);
    6564      7450025 :           goto done;
    6565              :         }
    6566    118095637 :       else if (simplified
    6567     17187114 :                && TREE_CODE (simplified) == SSA_NAME
    6568      2926269 :                && TREE_CODE (lhs) == SSA_NAME)
    6569              :         {
    6570      2926269 :           changed = visit_copy (lhs, simplified);
    6571      2926269 :           goto done;
    6572              :         }
    6573              : 
    6574    115169368 :       if ((TREE_CODE (lhs) == SSA_NAME
    6575              :            /* We can substitute SSA_NAMEs that are live over
    6576              :               abnormal edges with their constant value.  */
    6577     82726353 :            && !(gimple_assign_copy_p (ass)
    6578           26 :                 && is_gimple_min_invariant (rhs1))
    6579     82726327 :            && !(simplified
    6580            0 :                 && is_gimple_min_invariant (simplified))
    6581     82726327 :            && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
    6582              :           /* Stores or copies from SSA_NAMEs that are live over
    6583              :              abnormal edges are a problem.  */
    6584    197894411 :           || (code == SSA_NAME
    6585     14668967 :               && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
    6586         1537 :         changed = defs_to_varying (ass);
    6587    115167831 :       else if (REFERENCE_CLASS_P (lhs)
    6588    115167831 :                || DECL_P (lhs))
    6589     32442788 :         changed = visit_reference_op_store (lhs, rhs1, ass);
    6590     82725043 :       else if (TREE_CODE (lhs) == SSA_NAME)
    6591              :         {
    6592     82725043 :           if ((gimple_assign_copy_p (ass)
    6593           26 :                && is_gimple_min_invariant (rhs1))
    6594     82725069 :               || (simplified
    6595            0 :                   && is_gimple_min_invariant (simplified)))
    6596              :             {
    6597            0 :               if (simplified)
    6598            0 :                 changed = set_ssa_val_to (lhs, simplified);
    6599              :               else
    6600            0 :                 changed = set_ssa_val_to (lhs, rhs1);
    6601              :             }
    6602              :           else
    6603              :             {
    6604              :               /* Visit the original statement.  */
    6605     82725043 :               switch (vn_get_stmt_kind (ass))
    6606              :                 {
    6607     48424009 :                 case VN_NARY:
    6608     48424009 :                   changed = visit_nary_op (lhs, ass);
    6609     48424009 :                   break;
    6610     34207280 :                 case VN_REFERENCE:
    6611     34207280 :                   changed = visit_reference_op_load (lhs, rhs1, ass);
    6612     34207280 :                   break;
    6613        93754 :                 default:
    6614        93754 :                   changed = defs_to_varying (ass);
    6615        93754 :                   break;
    6616              :                 }
    6617              :             }
    6618              :         }
    6619              :       else
    6620            0 :         changed = defs_to_varying (ass);
    6621              :     }
    6622    282023622 :   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
    6623              :     {
    6624     24552875 :       tree lhs = gimple_call_lhs (call_stmt);
    6625     24552875 :       if (lhs && TREE_CODE (lhs) == SSA_NAME)
    6626              :         {
    6627              :           /* Try constant folding based on our current lattice.  */
    6628      8283262 :           tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
    6629              :                                                             vn_valueize);
    6630      8283262 :           if (simplified)
    6631              :             {
    6632        61955 :               if (dump_file && (dump_flags & TDF_DETAILS))
    6633              :                 {
    6634            1 :                   fprintf (dump_file, "call ");
    6635            1 :                   print_gimple_expr (dump_file, call_stmt, 0);
    6636            1 :                   fprintf (dump_file, " simplified to ");
    6637            1 :                   print_generic_expr (dump_file, simplified);
    6638            1 :                   fprintf (dump_file, "\n");
    6639              :                 }
    6640              :             }
    6641              :           /* Setting value numbers to constants will occasionally
    6642              :              screw up phi congruence because constants are not
    6643              :              uniquely associated with a single ssa name that can be
    6644              :              looked up.  */
    6645        61955 :           if (simplified
    6646        61955 :               && is_gimple_min_invariant (simplified))
    6647              :             {
    6648        55727 :               changed = set_ssa_val_to (lhs, simplified);
    6649       111454 :               if (gimple_vdef (call_stmt))
    6650          740 :                 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
    6651              :                                            SSA_VAL (gimple_vuse (call_stmt)));
    6652        55727 :               goto done;
    6653              :             }
    6654      8227535 :           else if (simplified
    6655         6228 :                    && TREE_CODE (simplified) == SSA_NAME)
    6656              :             {
    6657          293 :               changed = visit_copy (lhs, simplified);
    6658          586 :               if (gimple_vdef (call_stmt))
    6659            0 :                 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
    6660              :                                            SSA_VAL (gimple_vuse (call_stmt)));
    6661          293 :               goto done;
    6662              :             }
    6663      8227242 :           else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
    6664              :             {
    6665          381 :               changed = defs_to_varying (call_stmt);
    6666          381 :               goto done;
    6667              :             }
    6668              :         }
    6669              : 
    6670              :       /* Pick up flags from a devirtualization target.  */
    6671     24496474 :       tree fn = gimple_call_fn (stmt);
    6672     24496474 :       int extra_fnflags = 0;
    6673     24496474 :       if (fn && TREE_CODE (fn) == SSA_NAME)
    6674              :         {
    6675       537727 :           fn = SSA_VAL (fn);
    6676       537727 :           if (TREE_CODE (fn) == ADDR_EXPR
    6677       537727 :               && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
    6678         4813 :             extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
    6679              :         }
    6680     24496474 :       if ((/* Calls to the same function with the same vuse
    6681              :               and the same operands do not necessarily return the same
    6682              :               value, unless they're pure or const.  */
    6683     24496474 :            ((gimple_call_flags (call_stmt) | extra_fnflags)
    6684     24496474 :             & (ECF_PURE | ECF_CONST))
    6685              :            /* If calls have a vdef, subsequent calls won't have
    6686              :               the same incoming vuse.  So, if 2 calls with vdef have the
    6687              :               same vuse, we know they're not subsequent.
    6688              :               We can value number 2 calls to the same function with the
    6689              :               same vuse and the same operands which are not subsequent
    6690              :               the same, because there is no code in the program that can
    6691              :               compare the 2 values...  */
    6692     20669982 :            || (gimple_vdef (call_stmt)
    6693              :                /* ... unless the call returns a pointer which does
    6694              :                   not alias with anything else.  In which case the
    6695              :                   information that the values are distinct are encoded
    6696              :                   in the IL.  */
    6697     20635125 :                && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
    6698              :                /* Only perform the following when being called from PRE
    6699              :                   which embeds tail merging.  */
    6700     20088969 :                && default_vn_walk_kind == VN_WALK))
    6701              :           /* Do not process .DEFERRED_INIT since that confuses uninit
    6702              :              analysis.  */
    6703     29409974 :           && !gimple_call_internal_p (call_stmt, IFN_DEFERRED_INIT))
    6704      8554530 :         changed = visit_reference_op_call (lhs, call_stmt);
    6705              :       else
    6706     15941944 :         changed = defs_to_varying (call_stmt);
    6707              :     }
    6708              :   else
    6709    257470747 :     changed = defs_to_varying (stmt);
    6710    447857134 :  done:
    6711    447857134 :   return changed;
    6712              : }
    6713              : 
    6714              : 
    6715              : /* Allocate a value number table.  */
    6716              : 
    6717              : static void
    6718      6120381 : allocate_vn_table (vn_tables_t table, unsigned size)
    6719              : {
    6720      6120381 :   table->phis = new vn_phi_table_type (size);
    6721      6120381 :   table->nary = new vn_nary_op_table_type (size);
    6722      6120381 :   table->references = new vn_reference_table_type (size);
    6723      6120381 : }
    6724              : 
    6725              : /* Free a value number table.  */
    6726              : 
    6727              : static void
    6728      6120381 : free_vn_table (vn_tables_t table)
    6729              : {
    6730              :   /* Walk over elements and release vectors.  */
    6731      6120381 :   vn_reference_iterator_type hir;
    6732      6120381 :   vn_reference_t vr;
    6733    149436851 :   FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
    6734     71658235 :     vr->operands.release ();
    6735      6120381 :   delete table->phis;
    6736      6120381 :   table->phis = NULL;
    6737      6120381 :   delete table->nary;
    6738      6120381 :   table->nary = NULL;
    6739      6120381 :   delete table->references;
    6740      6120381 :   table->references = NULL;
    6741      6120381 : }
    6742              : 
    6743              : /* Set *ID according to RESULT.  */
    6744              : 
    6745              : static void
    6746     34295376 : set_value_id_for_result (tree result, unsigned int *id)
    6747              : {
    6748     34295376 :   if (result && TREE_CODE (result) == SSA_NAME)
    6749     21395600 :     *id = VN_INFO (result)->value_id;
    6750      9662843 :   else if (result && is_gimple_min_invariant (result))
    6751      3655987 :     *id = get_or_alloc_constant_value_id (result);
    6752              :   else
    6753      9243789 :     *id = get_next_value_id ();
    6754     34295376 : }
    6755              : 
    6756              : /* Set the value ids in the valid hash tables.  */
    6757              : 
    6758              : static void
    6759       964218 : set_hashtable_value_ids (void)
    6760              : {
    6761       964218 :   vn_nary_op_iterator_type hin;
    6762       964218 :   vn_phi_iterator_type hip;
    6763       964218 :   vn_reference_iterator_type hir;
    6764       964218 :   vn_nary_op_t vno;
    6765       964218 :   vn_reference_t vr;
    6766       964218 :   vn_phi_t vp;
    6767              : 
    6768              :   /* Now set the value ids of the things we had put in the hash
    6769              :      table.  */
    6770              : 
    6771     48807034 :   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
    6772     23921408 :     if (! vno->predicated_values)
    6773      7725511 :       set_value_id_for_result (vno->u.result, &vno->value_id);
    6774              : 
    6775      9033910 :   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
    6776      4034846 :     set_value_id_for_result (vp->result, &vp->value_id);
    6777              : 
    6778     46034256 :   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
    6779              :                                hir)
    6780     22535019 :     set_value_id_for_result (vr->result, &vr->value_id);
    6781       964218 : }
    6782              : 
    6783              : /* Return the maximum value id we have ever seen.  */
    6784              : 
    6785              : unsigned int
    6786      1928436 : get_max_value_id (void)
    6787              : {
    6788      1928436 :   return next_value_id;
    6789              : }
    6790              : 
    6791              : /* Return the maximum constant value id we have ever seen.  */
    6792              : 
    6793              : unsigned int
    6794      1928436 : get_max_constant_value_id (void)
    6795              : {
    6796      1928436 :   return -next_constant_value_id;
    6797              : }
    6798              : 
    6799              : /* Return the next unique value id.  */
    6800              : 
    6801              : unsigned int
    6802     48610629 : get_next_value_id (void)
    6803              : {
    6804     48610629 :   gcc_checking_assert ((int)next_value_id > 0);
    6805     48610629 :   return next_value_id++;
    6806              : }
    6807              : 
    6808              : /* Return the next unique value id for constants.  */
    6809              : 
    6810              : unsigned int
    6811      2513307 : get_next_constant_value_id (void)
    6812              : {
    6813      2513307 :   gcc_checking_assert (next_constant_value_id < 0);
    6814      2513307 :   return next_constant_value_id--;
    6815              : }
    6816              : 
    6817              : 
    6818              : /* Compare two expressions E1 and E2 and return true if they are equal.
    6819              :    If match_vn_top_optimistically is true then VN_TOP is equal to anything,
    6820              :    otherwise VN_TOP only matches VN_TOP.  */
    6821              : 
    6822              : bool
    6823    238528087 : expressions_equal_p (tree e1, tree e2, bool match_vn_top_optimistically)
    6824              : {
    6825              :   /* The obvious case.  */
    6826    238528087 :   if (e1 == e2)
    6827              :     return true;
    6828              : 
    6829              :   /* If either one is VN_TOP consider them equal.  */
    6830     70207211 :   if (match_vn_top_optimistically
    6831     65355930 :       && (e1 == VN_TOP || e2 == VN_TOP))
    6832              :     return true;
    6833              : 
    6834              :   /* If only one of them is null, they cannot be equal.  While in general
    6835              :      this should not happen for operations like TARGET_MEM_REF some
    6836              :      operands are optional and an identity value we could substitute
    6837              :      has differing semantics.  */
    6838     70207211 :   if (!e1 || !e2)
    6839              :     return false;
    6840              : 
    6841              :   /* SSA_NAME compare pointer equal.  */
    6842     70207211 :   if (TREE_CODE (e1) == SSA_NAME || TREE_CODE (e2) == SSA_NAME)
    6843              :     return false;
    6844              : 
    6845              :   /* Now perform the actual comparison.  */
    6846     34678873 :   if (TREE_CODE (e1) == TREE_CODE (e2)
    6847     34678873 :       && operand_equal_p (e1, e2, OEP_PURE_SAME))
    6848              :     return true;
    6849              : 
    6850              :   return false;
    6851              : }
    6852              : 
    6853              : 
    6854              : /* Return true if the nary operation NARY may trap.  This is a copy
    6855              :    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
    6856              : 
    6857              : bool
    6858      5612174 : vn_nary_may_trap (vn_nary_op_t nary)
    6859              : {
    6860      5612174 :   tree type;
    6861      5612174 :   tree rhs2 = NULL_TREE;
    6862      5612174 :   bool honor_nans = false;
    6863      5612174 :   bool honor_snans = false;
    6864      5612174 :   bool fp_operation = false;
    6865      5612174 :   bool honor_trapv = false;
    6866      5612174 :   bool handled, ret;
    6867      5612174 :   unsigned i;
    6868              : 
    6869      5612174 :   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
    6870              :       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
    6871      5612174 :       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
    6872              :     {
    6873      5524073 :       type = nary->type;
    6874      5524073 :       fp_operation = FLOAT_TYPE_P (type);
    6875      5524073 :       if (fp_operation)
    6876              :         {
    6877       119236 :           honor_nans = flag_trapping_math && !flag_finite_math_only;
    6878       119236 :           honor_snans = flag_signaling_nans != 0;
    6879              :         }
    6880      5404837 :       else if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type))
    6881              :         honor_trapv = true;
    6882              :     }
    6883      5612174 :   if (nary->length >= 2)
    6884      2237132 :     rhs2 = nary->op[1];
    6885      5612174 :   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
    6886              :                                        honor_trapv, honor_nans, honor_snans,
    6887              :                                        rhs2, &handled);
    6888      5612174 :   if (handled && ret)
    6889              :     return true;
    6890              : 
    6891     13139511 :   for (i = 0; i < nary->length; ++i)
    6892      7644998 :     if (tree_could_trap_p (nary->op[i]))
    6893              :       return true;
    6894              : 
    6895              :   return false;
    6896              : }
    6897              : 
    6898              : /* Return true if the reference operation REF may trap.  */
    6899              : 
    6900              : bool
    6901      1872128 : vn_reference_may_trap (vn_reference_t ref)
    6902              : {
    6903      1872128 :   switch (ref->operands[0].opcode)
    6904              :     {
    6905              :     case MODIFY_EXPR:
    6906              :     case CALL_EXPR:
    6907              :       /* We do not handle calls.  */
    6908              :       return true;
    6909              :     case ADDR_EXPR:
    6910              :       /* And toplevel address computations never trap.  */
    6911              :       return false;
    6912              :     default:;
    6913              :     }
    6914              : 
    6915              :   vn_reference_op_t op;
    6916              :   unsigned i;
    6917      4951384 :   FOR_EACH_VEC_ELT (ref->operands, i, op)
    6918              :     {
    6919      4951171 :       switch (op->opcode)
    6920              :         {
    6921              :         case WITH_SIZE_EXPR:
    6922              :         case TARGET_MEM_REF:
    6923              :           /* Always variable.  */
    6924              :           return true;
    6925      1206172 :         case COMPONENT_REF:
    6926      1206172 :           if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
    6927              :             return true;
    6928              :           break;
    6929            0 :         case ARRAY_RANGE_REF:
    6930            0 :           if (TREE_CODE (op->op0) == SSA_NAME)
    6931              :             return true;
    6932              :           break;
    6933       241254 :         case ARRAY_REF:
    6934       241254 :           {
    6935       241254 :             if (TREE_CODE (op->op0) != INTEGER_CST)
    6936              :               return true;
    6937              : 
    6938              :             /* !in_array_bounds   */
    6939       211993 :             tree domain_type = TYPE_DOMAIN (ref->operands[i+1].type);
    6940       211993 :             if (!domain_type)
    6941              :               return true;
    6942              : 
    6943       211860 :             tree min = op->op1;
    6944       211860 :             tree max = TYPE_MAX_VALUE (domain_type);
    6945       211860 :             if (!min
    6946       211860 :                 || !max
    6947       198381 :                 || TREE_CODE (min) != INTEGER_CST
    6948       198381 :                 || TREE_CODE (max) != INTEGER_CST)
    6949              :               return true;
    6950              : 
    6951       195532 :             if (tree_int_cst_lt (op->op0, min)
    6952       195532 :                 || tree_int_cst_lt (max, op->op0))
    6953          653 :               return true;
    6954              : 
    6955              :             break;
    6956              :           }
    6957              :         case MEM_REF:
    6958              :           /* Nothing interesting in itself, the base is separate.  */
    6959              :           break;
    6960              :         /* The following are the address bases.  */
    6961              :         case SSA_NAME:
    6962              :           return true;
    6963      1265665 :         case ADDR_EXPR:
    6964      1265665 :           if (op->op0)
    6965      1265665 :             return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
    6966              :           return false;
    6967      3168106 :         default:;
    6968              :         }
    6969              :     }
    6970              :   return false;
    6971              : }
    6972              : 
    6973     10355792 : eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
    6974     10355792 :                                             bitmap inserted_exprs_)
    6975     10355792 :   : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
    6976     10355792 :     el_todo (0), eliminations (0), insertions (0),
    6977     10355792 :     inserted_exprs (inserted_exprs_)
    6978              : {
    6979     10355792 :   need_eh_cleanup = BITMAP_ALLOC (NULL);
    6980     10355792 :   need_ab_cleanup = BITMAP_ALLOC (NULL);
    6981     10355792 : }
    6982              : 
    6983     10355792 : eliminate_dom_walker::~eliminate_dom_walker ()
    6984              : {
    6985     10355792 :   BITMAP_FREE (need_eh_cleanup);
    6986     10355792 :   BITMAP_FREE (need_ab_cleanup);
    6987     10355792 : }
    6988              : 
    6989              : /* Return a leader for OP that is available at the current point of the
    6990              :    eliminate domwalk.  */
    6991              : 
    6992              : tree
    6993    179915213 : eliminate_dom_walker::eliminate_avail (basic_block, tree op)
    6994              : {
    6995    179915213 :   tree valnum = VN_INFO (op)->valnum;
    6996    179915213 :   if (TREE_CODE (valnum) == SSA_NAME)
    6997              :     {
    6998    175014105 :       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
    6999              :         return valnum;
    7000    304854685 :       if (avail.length () > SSA_NAME_VERSION (valnum))
    7001              :         {
    7002    137456859 :           tree av = avail[SSA_NAME_VERSION (valnum)];
    7003              :           /* When PRE discovers a new redundancy there's no way to unite
    7004              :              the value classes so it instead inserts a copy old-val = new-val.
    7005              :              Look through such copies here, providing one more level of
    7006              :              simplification at elimination time.  */
    7007    137456859 :           gassign *ass;
    7008    241517128 :           if (av && (ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (av))))
    7009     73561850 :             if (gimple_assign_rhs_class (ass) == GIMPLE_SINGLE_RHS)
    7010              :               {
    7011     38925949 :                 tree rhs1 = gimple_assign_rhs1 (ass);
    7012     38925949 :                 if (CONSTANT_CLASS_P (rhs1)
    7013     38925949 :                     || (TREE_CODE (rhs1) == SSA_NAME
    7014         9313 :                         && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
    7015              :                   av = rhs1;
    7016              :               }
    7017    137456859 :           return av;
    7018              :         }
    7019              :     }
    7020      4901108 :   else if (is_gimple_min_invariant (valnum))
    7021              :     return valnum;
    7022              :   return NULL_TREE;
    7023              : }
    7024              : 
    7025              : /* At the current point of the eliminate domwalk make OP available.  */
    7026              : 
    7027              : void
    7028     49747596 : eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
    7029              : {
    7030     49747596 :   tree valnum = VN_INFO (op)->valnum;
    7031     49747596 :   if (TREE_CODE (valnum) == SSA_NAME)
    7032              :     {
    7033     96156759 :       if (avail.length () <= SSA_NAME_VERSION (valnum))
    7034     16700036 :         avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1, true);
    7035     49747596 :       tree pushop = op;
    7036     49747596 :       if (avail[SSA_NAME_VERSION (valnum)])
    7037        43130 :         pushop = avail[SSA_NAME_VERSION (valnum)];
    7038     49747596 :       avail_stack.safe_push (pushop);
    7039     49747596 :       avail[SSA_NAME_VERSION (valnum)] = op;
    7040              :     }
    7041     49747596 : }
    7042              : 
    7043              : /* Insert the expression recorded by SCCVN for VAL at *GSI.  Returns
    7044              :    the leader for the expression if insertion was successful.  */
    7045              : 
    7046              : tree
    7047       123914 : eliminate_dom_walker::eliminate_insert (basic_block bb,
    7048              :                                         gimple_stmt_iterator *gsi, tree val)
    7049              : {
    7050              :   /* We can insert a sequence with a single assignment only.  */
    7051       123914 :   gimple_seq stmts = VN_INFO (val)->expr;
    7052       123914 :   if (!gimple_seq_singleton_p (stmts))
    7053              :     return NULL_TREE;
    7054       225574 :   gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
    7055       123914 :   if (!stmt
    7056       123914 :       || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
    7057              :           && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
    7058              :           && gimple_assign_rhs_code (stmt) != NEGATE_EXPR
    7059              :           && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
    7060              :           && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
    7061           75 :               || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
    7062              :     return NULL_TREE;
    7063              : 
    7064        32680 :   tree op = gimple_assign_rhs1 (stmt);
    7065        32680 :   if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
    7066        32680 :       || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
    7067        19371 :     op = TREE_OPERAND (op, 0);
    7068        32680 :   tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
    7069        32634 :   if (!leader)
    7070              :     return NULL_TREE;
    7071              : 
    7072        22258 :   tree res;
    7073        22258 :   stmts = NULL;
    7074        40735 :   if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
    7075        33060 :     res = gimple_build (&stmts, BIT_FIELD_REF,
    7076        16530 :                         TREE_TYPE (val), leader,
    7077        16530 :                         TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
    7078        16530 :                         TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
    7079         5728 :   else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
    7080          150 :     res = gimple_build (&stmts, BIT_AND_EXPR,
    7081           75 :                         TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
    7082              :   else
    7083         5653 :     res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
    7084         5653 :                         TREE_TYPE (val), leader);
    7085        22258 :   if (TREE_CODE (res) != SSA_NAME
    7086        22257 :       || SSA_NAME_IS_DEFAULT_DEF (res)
    7087        44515 :       || gimple_bb (SSA_NAME_DEF_STMT (res)))
    7088              :     {
    7089            4 :       gimple_seq_discard (stmts);
    7090              : 
    7091              :       /* During propagation we have to treat SSA info conservatively
    7092              :          and thus we can end up simplifying the inserted expression
    7093              :          at elimination time to sth not defined in stmts.  */
    7094              :       /* But then this is a redundancy we failed to detect.  Which means
    7095              :          res now has two values.  That doesn't play well with how
    7096              :          we track availability here, so give up.  */
    7097            4 :       if (dump_file && (dump_flags & TDF_DETAILS))
    7098              :         {
    7099            0 :           if (TREE_CODE (res) == SSA_NAME)
    7100            0 :             res = eliminate_avail (bb, res);
    7101            0 :           if (res)
    7102              :             {
    7103            0 :               fprintf (dump_file, "Failed to insert expression for value ");
    7104            0 :               print_generic_expr (dump_file, val);
    7105            0 :               fprintf (dump_file, " which is really fully redundant to ");
    7106            0 :               print_generic_expr (dump_file, res);
    7107            0 :               fprintf (dump_file, "\n");
    7108              :             }
    7109              :         }
    7110              : 
    7111            4 :       return NULL_TREE;
    7112              :     }
    7113              :   else
    7114              :     {
    7115        22254 :       gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
    7116        22254 :       vn_ssa_aux_t vn_info = VN_INFO (res);
    7117        22254 :       vn_info->valnum = val;
    7118        22254 :       vn_info->visited = true;
    7119              :     }
    7120              : 
    7121        22254 :   insertions++;
    7122        22254 :   if (dump_file && (dump_flags & TDF_DETAILS))
    7123              :     {
    7124          501 :       fprintf (dump_file, "Inserted ");
    7125          501 :       print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
    7126              :     }
    7127              : 
    7128              :   return res;
    7129              : }
    7130              : 
    7131              : void
    7132    345991450 : eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
    7133              : {
    7134    345991450 :   tree sprime = NULL_TREE;
    7135    345991450 :   gimple *stmt = gsi_stmt (*gsi);
    7136    345991450 :   tree lhs = gimple_get_lhs (stmt);
    7137    118514802 :   if (lhs && TREE_CODE (lhs) == SSA_NAME
    7138    164367352 :       && !gimple_has_volatile_ops (stmt)
    7139              :       /* See PR43491.  Do not replace a global register variable when
    7140              :          it is a the RHS of an assignment.  Do replace local register
    7141              :          variables since gcc does not guarantee a local variable will
    7142              :          be allocated in register.
    7143              :          ???  The fix isn't effective here.  This should instead
    7144              :          be ensured by not value-numbering them the same but treating
    7145              :          them like volatiles?  */
    7146    427113692 :       && !(gimple_assign_single_p (stmt)
    7147     34910156 :            && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
    7148      2442148 :                && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
    7149         4172 :                && is_global_var (gimple_assign_rhs1 (stmt)))))
    7150              :     {
    7151     81121998 :       sprime = eliminate_avail (b, lhs);
    7152     81121998 :       if (!sprime)
    7153              :         {
    7154              :           /* If there is no existing usable leader but SCCVN thinks
    7155              :              it has an expression it wants to use as replacement,
    7156              :              insert that.  */
    7157     68421708 :           tree val = VN_INFO (lhs)->valnum;
    7158     68421708 :           vn_ssa_aux_t vn_info;
    7159     68421708 :           if (val != VN_TOP
    7160     68421708 :               && TREE_CODE (val) == SSA_NAME
    7161     68421708 :               && (vn_info = VN_INFO (val), true)
    7162     68421708 :               && vn_info->needs_insertion
    7163       312146 :               && vn_info->expr != NULL
    7164     68545622 :               && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
    7165        22254 :             eliminate_push_avail (b, sprime);
    7166              :         }
    7167              : 
    7168              :       /* If this now constitutes a copy duplicate points-to
    7169              :          and range info appropriately.  This is especially
    7170              :          important for inserted code.  */
    7171     68421708 :       if (sprime
    7172     12722544 :           && TREE_CODE (sprime) == SSA_NAME)
    7173      8774640 :         maybe_duplicate_ssa_info_at_copy (lhs, sprime);
    7174              : 
    7175              :       /* Inhibit the use of an inserted PHI on a loop header when
    7176              :          the address of the memory reference is a simple induction
    7177              :          variable.  In other cases the vectorizer won't do anything
    7178              :          anyway (either it's loop invariant or a complicated
    7179              :          expression).  */
    7180      8774640 :       if (sprime
    7181     12722544 :           && TREE_CODE (sprime) == SSA_NAME
    7182      8774640 :           && do_pre
    7183       914455 :           && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
    7184       896013 :           && loop_outer (b->loop_father)
    7185       378416 :           && has_zero_uses (sprime)
    7186       186722 :           && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
    7187       186568 :           && gimple_assign_load_p (stmt))
    7188              :         {
    7189        99963 :           gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
    7190        99963 :           basic_block def_bb = gimple_bb (def_stmt);
    7191        99963 :           if (gimple_code (def_stmt) == GIMPLE_PHI
    7192        99963 :               && def_bb->loop_father->header == def_bb)
    7193              :             {
    7194        63688 :               loop_p loop = def_bb->loop_father;
    7195        63688 :               ssa_op_iter iter;
    7196        63688 :               tree op;
    7197        63688 :               bool found = false;
    7198        80636 :               FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
    7199              :                 {
    7200        60036 :                   affine_iv iv;
    7201        60036 :                   def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
    7202        60036 :                   if (def_bb
    7203        54539 :                       && flow_bb_inside_loop_p (loop, def_bb)
    7204       109602 :                       && simple_iv (loop, loop, op, &iv, true))
    7205              :                     {
    7206        43088 :                       found = true;
    7207        43088 :                       break;
    7208              :                     }
    7209              :                 }
    7210        20600 :               if (found)
    7211              :                 {
    7212        43088 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    7213              :                     {
    7214            3 :                       fprintf (dump_file, "Not replacing ");
    7215            3 :                       print_gimple_expr (dump_file, stmt, 0);
    7216            3 :                       fprintf (dump_file, " with ");
    7217            3 :                       print_generic_expr (dump_file, sprime);
    7218            3 :                       fprintf (dump_file, " which would add a loop"
    7219              :                                " carried dependence to loop %d\n",
    7220              :                                loop->num);
    7221              :                     }
    7222              :                   /* Don't keep sprime available.  */
    7223        43088 :                   sprime = NULL_TREE;
    7224              :                 }
    7225              :             }
    7226              :         }
    7227              : 
    7228     81121998 :       if (sprime)
    7229              :         {
    7230              :           /* If we can propagate the value computed for LHS into
    7231              :              all uses don't bother doing anything with this stmt.  */
    7232     12679456 :           if (may_propagate_copy (lhs, sprime))
    7233              :             {
    7234              :               /* Mark it for removal.  */
    7235     12677550 :               to_remove.safe_push (stmt);
    7236              : 
    7237              :               /* ???  Don't count copy/constant propagations.  */
    7238     12677550 :               if (gimple_assign_single_p (stmt)
    7239     12677550 :                   && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
    7240      4423322 :                       || gimple_assign_rhs1 (stmt) == sprime))
    7241     13481687 :                 return;
    7242              : 
    7243      7687754 :               if (dump_file && (dump_flags & TDF_DETAILS))
    7244              :                 {
    7245        18954 :                   fprintf (dump_file, "Replaced ");
    7246        18954 :                   print_gimple_expr (dump_file, stmt, 0);
    7247        18954 :                   fprintf (dump_file, " with ");
    7248        18954 :                   print_generic_expr (dump_file, sprime);
    7249        18954 :                   fprintf (dump_file, " in all uses of ");
    7250        18954 :                   print_gimple_stmt (dump_file, stmt, 0);
    7251              :                 }
    7252              : 
    7253      7687754 :               eliminations++;
    7254      7687754 :               return;
    7255              :             }
    7256              : 
    7257              :           /* If this is an assignment from our leader (which
    7258              :              happens in the case the value-number is a constant)
    7259              :              then there is nothing to do.  Likewise if we run into
    7260              :              inserted code that needed a conversion because of
    7261              :              our type-agnostic value-numbering of loads.  */
    7262         1906 :           if ((gimple_assign_single_p (stmt)
    7263            1 :                || (is_gimple_assign (stmt)
    7264            1 :                    && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
    7265            0 :                        || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)))
    7266         1907 :               && sprime == gimple_assign_rhs1 (stmt))
    7267              :             return;
    7268              : 
    7269              :           /* Else replace its RHS.  */
    7270          718 :           if (dump_file && (dump_flags & TDF_DETAILS))
    7271              :             {
    7272            0 :               fprintf (dump_file, "Replaced ");
    7273            0 :               print_gimple_expr (dump_file, stmt, 0);
    7274            0 :               fprintf (dump_file, " with ");
    7275            0 :               print_generic_expr (dump_file, sprime);
    7276            0 :               fprintf (dump_file, " in ");
    7277            0 :               print_gimple_stmt (dump_file, stmt, 0);
    7278              :             }
    7279          718 :           eliminations++;
    7280              : 
    7281          718 :           bool can_make_abnormal_goto = (is_gimple_call (stmt)
    7282          718 :                                          && stmt_can_make_abnormal_goto (stmt));
    7283          718 :           gimple *orig_stmt = stmt;
    7284          718 :           if (!useless_type_conversion_p (TREE_TYPE (lhs),
    7285          718 :                                           TREE_TYPE (sprime)))
    7286              :             {
    7287              :               /* We preserve conversions to but not from function or method
    7288              :                  types.  This asymmetry makes it necessary to re-instantiate
    7289              :                  conversions here.  */
    7290          716 :               if (POINTER_TYPE_P (TREE_TYPE (lhs))
    7291          716 :                   && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
    7292          716 :                 sprime = fold_convert (TREE_TYPE (lhs), sprime);
    7293              :               else
    7294            0 :                 gcc_unreachable ();
    7295              :             }
    7296          718 :           tree vdef = gimple_vdef (stmt);
    7297          718 :           tree vuse = gimple_vuse (stmt);
    7298          718 :           propagate_tree_value_into_stmt (gsi, sprime);
    7299          718 :           stmt = gsi_stmt (*gsi);
    7300          718 :           update_stmt (stmt);
    7301              :           /* In case the VDEF on the original stmt was released, value-number
    7302              :              it to the VUSE.  This is to make vuse_ssa_val able to skip
    7303              :              released virtual operands.  */
    7304         1436 :           if (vdef != gimple_vdef (stmt))
    7305              :             {
    7306            0 :               gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
    7307            0 :               VN_INFO (vdef)->valnum = vuse;
    7308              :             }
    7309              : 
    7310              :           /* If we removed EH side-effects from the statement, clean
    7311              :              its EH information.  */
    7312          718 :           if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
    7313              :             {
    7314            0 :               bitmap_set_bit (need_eh_cleanup,
    7315            0 :                               gimple_bb (stmt)->index);
    7316            0 :               if (dump_file && (dump_flags & TDF_DETAILS))
    7317            0 :                 fprintf (dump_file, "  Removed EH side-effects.\n");
    7318              :             }
    7319              : 
    7320              :           /* Likewise for AB side-effects.  */
    7321          718 :           if (can_make_abnormal_goto
    7322          718 :               && !stmt_can_make_abnormal_goto (stmt))
    7323              :             {
    7324            0 :               bitmap_set_bit (need_ab_cleanup,
    7325            0 :                               gimple_bb (stmt)->index);
    7326            0 :               if (dump_file && (dump_flags & TDF_DETAILS))
    7327            0 :                 fprintf (dump_file, "  Removed AB side-effects.\n");
    7328              :             }
    7329              : 
    7330          718 :           return;
    7331              :         }
    7332              :     }
    7333              : 
    7334              :   /* If the statement is a scalar store, see if the expression
    7335              :      has the same value number as its rhs.  If so, the store is
    7336              :      dead.  */
    7337    333311994 :   if (gimple_assign_single_p (stmt)
    7338    125002188 :       && !gimple_has_volatile_ops (stmt)
    7339     54579577 :       && !is_gimple_reg (gimple_assign_lhs (stmt))
    7340     28063680 :       && (TREE_CODE (gimple_assign_lhs (stmt)) != VAR_DECL
    7341      2707606 :           || !DECL_HARD_REGISTER (gimple_assign_lhs (stmt)))
    7342    361371665 :       && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
    7343     16094429 :           || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
    7344              :     {
    7345     25056461 :       tree rhs = gimple_assign_rhs1 (stmt);
    7346     25056461 :       vn_reference_t vnresult;
    7347              :       /* ???  gcc.dg/torture/pr91445.c shows that we lookup a boolean
    7348              :          typed load of a byte known to be 0x11 as 1 so a store of
    7349              :          a boolean 1 is detected as redundant.  Because of this we
    7350              :          have to make sure to lookup with a ref where its size
    7351              :          matches the precision.  */
    7352     25056461 :       tree lookup_lhs = lhs;
    7353     49851669 :       if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
    7354     13089696 :           && (TREE_CODE (lhs) != COMPONENT_REF
    7355      7957450 :               || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
    7356     37927067 :           && !type_has_mode_precision_p (TREE_TYPE (lhs)))
    7357              :         {
    7358       418114 :           if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
    7359       427196 :               && TYPE_PRECISION (TREE_TYPE (lhs)) > MAX_FIXED_MODE_SIZE)
    7360              :             lookup_lhs = NULL_TREE;
    7361       411241 :           else if (TREE_CODE (lhs) == COMPONENT_REF
    7362       411241 :                    || TREE_CODE (lhs) == MEM_REF)
    7363              :             {
    7364       286751 :               tree ltype = build_nonstandard_integer_type
    7365       286751 :                                 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs))),
    7366       286751 :                                  TYPE_UNSIGNED (TREE_TYPE (lhs)));
    7367       286751 :               if (TREE_CODE (lhs) == COMPONENT_REF)
    7368              :                 {
    7369       217237 :                   tree foff = component_ref_field_offset (lhs);
    7370       217237 :                   tree f = TREE_OPERAND (lhs, 1);
    7371       217237 :                   if (!poly_int_tree_p (foff))
    7372              :                     lookup_lhs = NULL_TREE;
    7373              :                   else
    7374       434474 :                     lookup_lhs = build3 (BIT_FIELD_REF, ltype,
    7375       217237 :                                          TREE_OPERAND (lhs, 0),
    7376       217237 :                                          TYPE_SIZE (TREE_TYPE (lhs)),
    7377              :                                          bit_from_pos
    7378       217237 :                                            (foff, DECL_FIELD_BIT_OFFSET (f)));
    7379              :                 }
    7380              :               else
    7381        69514 :                 lookup_lhs = build2 (MEM_REF, ltype,
    7382        69514 :                                      TREE_OPERAND (lhs, 0),
    7383        69514 :                                      TREE_OPERAND (lhs, 1));
    7384              :             }
    7385              :           else
    7386              :             lookup_lhs = NULL_TREE;
    7387              :         }
    7388     24925098 :       tree val = NULL_TREE, tem;
    7389     24925098 :       if (lookup_lhs)
    7390     49850196 :         val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt),
    7391              :                                    VN_WALKREWRITE, &vnresult, false,
    7392              :                                    NULL, NULL_TREE, true);
    7393     25056461 :       if (TREE_CODE (rhs) == SSA_NAME)
    7394     11965242 :         rhs = VN_INFO (rhs)->valnum;
    7395     25056461 :       gassign *ass;
    7396     25056461 :       if (val
    7397     25056461 :           && (operand_equal_p (val, rhs, 0)
    7398              :               /* Due to the bitfield lookups above we can get bit
    7399              :                  interpretations of the same RHS as values here.  Those
    7400              :                  are redundant as well.  */
    7401      3087519 :               || (TREE_CODE (val) == SSA_NAME
    7402      1887583 :                   && gimple_assign_single_p (SSA_NAME_DEF_STMT (val))
    7403      1706664 :                   && (tem = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val)))
    7404      1706664 :                   && TREE_CODE (tem) == VIEW_CONVERT_EXPR
    7405         3745 :                   && TREE_OPERAND (tem, 0) == rhs)
    7406      3087509 :               || (TREE_CODE (rhs) == SSA_NAME
    7407     25510518 :                   && (ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs)))
    7408      1465016 :                   && gimple_assign_rhs1 (ass) == val
    7409       671836 :                   && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (ass))
    7410            9 :                   && tree_nop_conversion_p (TREE_TYPE (rhs), TREE_TYPE (val)))))
    7411              :         {
    7412              :           /* We can only remove the later store if the former aliases
    7413              :              at least all accesses the later one does or if the store
    7414              :              was to readonly memory storing the same value.  */
    7415       235766 :           ao_ref lhs_ref;
    7416       235766 :           ao_ref_init (&lhs_ref, lhs);
    7417       235766 :           alias_set_type set = ao_ref_alias_set (&lhs_ref);
    7418       235766 :           alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
    7419       235766 :           if (! vnresult
    7420       235766 :               || ((vnresult->set == set
    7421        48433 :                    || alias_set_subset_of (set, vnresult->set))
    7422       219987 :                   && (vnresult->base_set == base_set
    7423        21224 :                       || alias_set_subset_of (base_set, vnresult->base_set))))
    7424              :             {
    7425       217779 :               if (dump_file && (dump_flags & TDF_DETAILS))
    7426              :                 {
    7427           17 :                   fprintf (dump_file, "Deleted redundant store ");
    7428           17 :                   print_gimple_stmt (dump_file, stmt, 0);
    7429              :                 }
    7430              : 
    7431              :               /* Queue stmt for removal.  */
    7432       217779 :               to_remove.safe_push (stmt);
    7433       217779 :               return;
    7434              :             }
    7435              :         }
    7436              :     }
    7437              : 
    7438              :   /* If this is a control statement value numbering left edges
    7439              :      unexecuted on force the condition in a way consistent with
    7440              :      that.  */
    7441    333094215 :   if (gcond *cond = dyn_cast <gcond *> (stmt))
    7442              :     {
    7443     18953361 :       if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
    7444     18953361 :           ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
    7445              :         {
    7446       584452 :           if (dump_file && (dump_flags & TDF_DETAILS))
    7447              :             {
    7448           15 :               fprintf (dump_file, "Removing unexecutable edge from ");
    7449           15 :               print_gimple_stmt (dump_file, stmt, 0);
    7450              :             }
    7451       584452 :           if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
    7452       584452 :               == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
    7453       223978 :             gimple_cond_make_true (cond);
    7454              :           else
    7455       360474 :             gimple_cond_make_false (cond);
    7456       584452 :           update_stmt (cond);
    7457       584452 :           el_todo |= TODO_cleanup_cfg;
    7458       584452 :           return;
    7459              :         }
    7460              :     }
    7461              : 
    7462    332509763 :   bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
    7463    332509763 :   bool was_noreturn = (is_gimple_call (stmt)
    7464    332509763 :                        && gimple_call_noreturn_p (stmt));
    7465    332509763 :   tree vdef = gimple_vdef (stmt);
    7466    332509763 :   tree vuse = gimple_vuse (stmt);
    7467              : 
    7468              :   /* If we didn't replace the whole stmt (or propagate the result
    7469              :      into all uses), replace all uses on this stmt with their
    7470              :      leaders.  */
    7471    332509763 :   bool modified = false;
    7472    332509763 :   use_operand_p use_p;
    7473    332509763 :   ssa_op_iter iter;
    7474    496069678 :   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
    7475              :     {
    7476    163559915 :       tree use = USE_FROM_PTR (use_p);
    7477              :       /* ???  The call code above leaves stmt operands un-updated.  */
    7478    163559915 :       if (TREE_CODE (use) != SSA_NAME)
    7479            0 :         continue;
    7480    163559915 :       tree sprime;
    7481    163559915 :       if (SSA_NAME_IS_DEFAULT_DEF (use))
    7482              :         /* ???  For default defs BB shouldn't matter, but we have to
    7483              :            solve the inconsistency between rpo eliminate and
    7484              :            dom eliminate avail valueization first.  */
    7485     26263875 :         sprime = eliminate_avail (b, use);
    7486              :       else
    7487              :         /* Look for sth available at the definition block of the argument.
    7488              :            This avoids inconsistencies between availability there which
    7489              :            decides if the stmt can be removed and availability at the
    7490              :            use site.  The SSA property ensures that things available
    7491              :            at the definition are also available at uses.  */
    7492    137296040 :         sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
    7493    163559915 :       if (sprime && sprime != use
    7494     12380327 :           && may_propagate_copy (use, sprime, true)
    7495              :           /* We substitute into debug stmts to avoid excessive
    7496              :              debug temporaries created by removed stmts, but we need
    7497              :              to avoid doing so for inserted sprimes as we never want
    7498              :              to create debug temporaries for them.  */
    7499    175939526 :           && (!inserted_exprs
    7500      1191107 :               || TREE_CODE (sprime) != SSA_NAME
    7501      1170914 :               || !is_gimple_debug (stmt)
    7502       374917 :               || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
    7503              :         {
    7504     12033734 :           propagate_value (use_p, sprime);
    7505     12033734 :           modified = true;
    7506              :         }
    7507              :     }
    7508              : 
    7509              :   /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
    7510              :      into which is a requirement for the IPA devirt machinery.  */
    7511    332509763 :   gimple *old_stmt = stmt;
    7512    332509763 :   if (modified)
    7513              :     {
    7514              :       /* If a formerly non-invariant ADDR_EXPR is turned into an
    7515              :          invariant one it was on a separate stmt.  */
    7516     11170122 :       if (gimple_assign_single_p (stmt)
    7517     11170122 :           && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
    7518       233355 :         recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
    7519     11170122 :       gimple_stmt_iterator prev = *gsi;
    7520     11170122 :       gsi_prev (&prev);
    7521     11170122 :       if (fold_stmt (gsi, follow_all_ssa_edges))
    7522              :         {
    7523              :           /* fold_stmt may have created new stmts inbetween
    7524              :              the previous stmt and the folded stmt.  Mark
    7525              :              all defs created there as varying to not confuse
    7526              :              the SCCVN machinery as we're using that even during
    7527              :              elimination.  */
    7528       975412 :           if (gsi_end_p (prev))
    7529       219624 :             prev = gsi_start_bb (b);
    7530              :           else
    7531       865600 :             gsi_next (&prev);
    7532       975412 :           if (gsi_stmt (prev) != gsi_stmt (*gsi))
    7533        88060 :             do
    7534              :               {
    7535        54874 :                 tree def;
    7536        54874 :                 ssa_op_iter dit;
    7537       105367 :                 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
    7538              :                                            dit, SSA_OP_ALL_DEFS)
    7539              :                     /* As existing DEFs may move between stmts
    7540              :                        only process new ones.  */
    7541        50493 :                     if (! has_VN_INFO (def))
    7542              :                       {
    7543        33084 :                         vn_ssa_aux_t vn_info = VN_INFO (def);
    7544        33084 :                         vn_info->valnum = def;
    7545        33084 :                         vn_info->visited = true;
    7546              :                       }
    7547        54874 :                 if (gsi_stmt (prev) == gsi_stmt (*gsi))
    7548              :                   break;
    7549        33186 :                 gsi_next (&prev);
    7550        33186 :               }
    7551              :             while (1);
    7552              :         }
    7553     11170122 :       stmt = gsi_stmt (*gsi);
    7554              :       /* In case we folded the stmt away schedule the NOP for removal.  */
    7555     11170122 :       if (gimple_nop_p (stmt))
    7556          815 :         to_remove.safe_push (stmt);
    7557              :     }
    7558              : 
    7559              :   /* Visit indirect calls and turn them into direct calls if
    7560              :      possible using the devirtualization machinery.  Do this before
    7561              :      checking for required EH/abnormal/noreturn cleanup as devird
    7562              :      may expose more of those.  */
    7563    332509763 :   if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
    7564              :     {
    7565     22112190 :       tree fn = gimple_call_fn (call_stmt);
    7566     22112190 :       if (fn
    7567     21392008 :           && flag_devirtualize
    7568     42766369 :           && virtual_method_call_p (fn))
    7569              :         {
    7570       192891 :           tree otr_type = obj_type_ref_class (fn);
    7571       192891 :           unsigned HOST_WIDE_INT otr_tok
    7572       192891 :               = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
    7573       192891 :           tree instance;
    7574       192891 :           ipa_polymorphic_call_context context (current_function_decl,
    7575       192891 :                                                 fn, stmt, &instance);
    7576       192891 :           context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
    7577              :                                     otr_type, stmt, NULL);
    7578       192891 :           bool final;
    7579       192891 :           vec <cgraph_node *> targets
    7580       192891 :               = possible_polymorphic_call_targets (obj_type_ref_class (fn),
    7581              :                                                    otr_tok, context, &final);
    7582       192891 :           if (dump_file)
    7583           22 :             dump_possible_polymorphic_call_targets (dump_file,
    7584              :                                                     obj_type_ref_class (fn),
    7585              :                                                     otr_tok, context);
    7586       193096 :           if (final && targets.length () <= 1 && dbg_cnt (devirt))
    7587              :             {
    7588           64 :               tree fn;
    7589           64 :               if (targets.length () == 1)
    7590           64 :                 fn = targets[0]->decl;
    7591              :               else
    7592            0 :                 fn = builtin_decl_unreachable ();
    7593           64 :               if (dump_enabled_p ())
    7594              :                 {
    7595            9 :                   dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
    7596              :                                    "converting indirect call to "
    7597              :                                    "function %s\n",
    7598            9 :                                    lang_hooks.decl_printable_name (fn, 2));
    7599              :                 }
    7600           64 :               gimple_call_set_fndecl (call_stmt, fn);
    7601              :               /* If changing the call to __builtin_unreachable
    7602              :                  or similar noreturn function, adjust gimple_call_fntype
    7603              :                  too.  */
    7604           64 :               if (gimple_call_noreturn_p (call_stmt)
    7605            0 :                   && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
    7606            0 :                   && TYPE_ARG_TYPES (TREE_TYPE (fn))
    7607           64 :                   && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
    7608            0 :                       == void_type_node))
    7609            0 :                 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
    7610           64 :               maybe_remove_unused_call_args (cfun, call_stmt);
    7611           64 :               modified = true;
    7612              :             }
    7613              :         }
    7614              :     }
    7615              : 
    7616    332509763 :   if (modified)
    7617              :     {
    7618              :       /* When changing a call into a noreturn call, cfg cleanup
    7619              :          is needed to fix up the noreturn call.  */
    7620     11170143 :       if (!was_noreturn
    7621     11170143 :           && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
    7622           56 :         to_fixup.safe_push  (stmt);
    7623              :       /* When changing a condition or switch into one we know what
    7624              :          edge will be executed, schedule a cfg cleanup.  */
    7625     11170143 :       if ((gimple_code (stmt) == GIMPLE_COND
    7626      1496745 :            && (gimple_cond_true_p (as_a <gcond *> (stmt))
    7627      1491181 :                || gimple_cond_false_p (as_a <gcond *> (stmt))))
    7628     12659164 :           || (gimple_code (stmt) == GIMPLE_SWITCH
    7629         7944 :               && TREE_CODE (gimple_switch_index
    7630              :                             (as_a <gswitch *> (stmt))) == INTEGER_CST))
    7631         9509 :         el_todo |= TODO_cleanup_cfg;
    7632              :       /* If we removed EH side-effects from the statement, clean
    7633              :          its EH information.  */
    7634     11170143 :       if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
    7635              :         {
    7636         1611 :           bitmap_set_bit (need_eh_cleanup,
    7637         1611 :                           gimple_bb (stmt)->index);
    7638         1611 :           if (dump_file && (dump_flags & TDF_DETAILS))
    7639            0 :             fprintf (dump_file, "  Removed EH side-effects.\n");
    7640              :         }
    7641              :       /* Likewise for AB side-effects.  */
    7642     11170143 :       if (can_make_abnormal_goto
    7643     11170143 :           && !stmt_can_make_abnormal_goto (stmt))
    7644              :         {
    7645            0 :           bitmap_set_bit (need_ab_cleanup,
    7646            0 :                           gimple_bb (stmt)->index);
    7647            0 :           if (dump_file && (dump_flags & TDF_DETAILS))
    7648            0 :             fprintf (dump_file, "  Removed AB side-effects.\n");
    7649              :         }
    7650     11170143 :       update_stmt (stmt);
    7651              :       /* In case the VDEF on the original stmt was released, value-number
    7652              :          it to the VUSE.  This is to make vuse_ssa_val able to skip
    7653              :          released virtual operands.  */
    7654     14251098 :       if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
    7655         1830 :         VN_INFO (vdef)->valnum = vuse;
    7656              :     }
    7657              : 
    7658              :   /* Make new values available - for fully redundant LHS we
    7659              :      continue with the next stmt above and skip this.
    7660              :      But avoid picking up dead defs.  */
    7661    332509763 :   tree def;
    7662    402270457 :   FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
    7663     69760694 :     if (! has_zero_uses (def)
    7664     69760694 :         || (inserted_exprs
    7665       212172 :             && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (def))))
    7666     68387799 :       eliminate_push_avail (b, def);
    7667              : }
    7668              : 
    7669              : /* Perform elimination for the basic-block B during the domwalk.  */
    7670              : 
    7671              : edge
    7672     41013135 : eliminate_dom_walker::before_dom_children (basic_block b)
    7673              : {
    7674              :   /* Mark new bb.  */
    7675     41013135 :   avail_stack.safe_push (NULL_TREE);
    7676              : 
    7677              :   /* Skip unreachable blocks marked unreachable during the SCCVN domwalk.  */
    7678     41013135 :   if (!(b->flags & BB_EXECUTABLE))
    7679              :     return NULL;
    7680              : 
    7681     36276277 :   vn_context_bb = b;
    7682              : 
    7683     47776913 :   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
    7684              :     {
    7685     11500636 :       gphi *phi = gsi.phi ();
    7686     11500636 :       tree res = PHI_RESULT (phi);
    7687              : 
    7688     23001272 :       if (virtual_operand_p (res))
    7689              :         {
    7690      5288192 :           gsi_next (&gsi);
    7691      5288192 :           continue;
    7692              :         }
    7693              : 
    7694      6212444 :       tree sprime = eliminate_avail (b, res);
    7695      6212444 :       if (sprime
    7696      6212444 :           && sprime != res)
    7697              :         {
    7698       420620 :           if (dump_file && (dump_flags & TDF_DETAILS))
    7699              :             {
    7700           22 :               fprintf (dump_file, "Replaced redundant PHI node defining ");
    7701           22 :               print_generic_expr (dump_file, res);
    7702           22 :               fprintf (dump_file, " with ");
    7703           22 :               print_generic_expr (dump_file, sprime);
    7704           22 :               fprintf (dump_file, "\n");
    7705              :             }
    7706              : 
    7707              :           /* If we inserted this PHI node ourself, it's not an elimination.  */
    7708       420620 :           if (! inserted_exprs
    7709       534527 :               || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
    7710       393300 :             eliminations++;
    7711              : 
    7712              :           /* If we will propagate into all uses don't bother to do
    7713              :              anything.  */
    7714       420620 :           if (may_propagate_copy (res, sprime))
    7715              :             {
    7716              :               /* Mark the PHI for removal.  */
    7717       420620 :               to_remove.safe_push (phi);
    7718       420620 :               gsi_next (&gsi);
    7719       420620 :               continue;
    7720              :             }
    7721              : 
    7722            0 :           remove_phi_node (&gsi, false);
    7723              : 
    7724            0 :           if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
    7725            0 :             sprime = fold_convert (TREE_TYPE (res), sprime);
    7726            0 :           gimple *stmt = gimple_build_assign (res, sprime);
    7727            0 :           gimple_stmt_iterator gsi2 = gsi_after_labels (b);
    7728            0 :           gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
    7729            0 :           continue;
    7730            0 :         }
    7731              : 
    7732      5791824 :       eliminate_push_avail (b, res);
    7733      5791824 :       gsi_next (&gsi);
    7734              :     }
    7735              : 
    7736     72552554 :   for (gimple_stmt_iterator gsi = gsi_start_bb (b);
    7737    276482871 :        !gsi_end_p (gsi);
    7738    240206594 :        gsi_next (&gsi))
    7739    240206594 :     eliminate_stmt (b, &gsi);
    7740              : 
    7741              :   /* Replace destination PHI arguments.  */
    7742     36276277 :   edge_iterator ei;
    7743     36276277 :   edge e;
    7744     85628435 :   FOR_EACH_EDGE (e, ei, b->succs)
    7745     49352158 :     if (e->flags & EDGE_EXECUTABLE)
    7746     48831850 :       for (gphi_iterator gsi = gsi_start_phis (e->dest);
    7747     78445572 :            !gsi_end_p (gsi);
    7748     29613722 :            gsi_next (&gsi))
    7749              :         {
    7750     29613722 :           gphi *phi = gsi.phi ();
    7751     29613722 :           use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
    7752     29613722 :           tree arg = USE_FROM_PTR (use_p);
    7753     48975626 :           if (TREE_CODE (arg) != SSA_NAME
    7754     29613722 :               || virtual_operand_p (arg))
    7755     19361904 :             continue;
    7756     10251818 :           tree sprime = eliminate_avail (b, arg);
    7757     20503636 :           if (sprime && may_propagate_copy (arg, sprime,
    7758     10251818 :                                             !(e->flags & EDGE_ABNORMAL)))
    7759     10239779 :             propagate_value (use_p, sprime);
    7760              :         }
    7761              : 
    7762     36276277 :   vn_context_bb = NULL;
    7763              : 
    7764     36276277 :   return NULL;
    7765              : }
    7766              : 
    7767              : /* Make no longer available leaders no longer available.  */
    7768              : 
    7769              : void
    7770     41013135 : eliminate_dom_walker::after_dom_children (basic_block)
    7771              : {
    7772     41013135 :   tree entry;
    7773     90760731 :   while ((entry = avail_stack.pop ()) != NULL_TREE)
    7774              :     {
    7775     49747596 :       tree valnum = VN_INFO (entry)->valnum;
    7776     49747596 :       tree old = avail[SSA_NAME_VERSION (valnum)];
    7777     49747596 :       if (old == entry)
    7778     49704466 :         avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
    7779              :       else
    7780        43130 :         avail[SSA_NAME_VERSION (valnum)] = entry;
    7781              :     }
    7782     41013135 : }
    7783              : 
    7784              : /* Remove queued stmts and perform delayed cleanups.  */
    7785              : 
    7786              : unsigned
    7787      6100962 : eliminate_dom_walker::eliminate_cleanup (bool region_p)
    7788              : {
    7789      6100962 :   statistics_counter_event (cfun, "Eliminated", eliminations);
    7790      6100962 :   statistics_counter_event (cfun, "Insertions", insertions);
    7791              : 
    7792              :   /* We cannot remove stmts during BB walk, especially not release SSA
    7793              :      names there as this confuses the VN machinery.  The stmts ending
    7794              :      up in to_remove are either stores or simple copies.
    7795              :      Remove stmts in reverse order to make debug stmt creation possible.  */
    7796     32893457 :   while (!to_remove.is_empty ())
    7797              :     {
    7798     14590515 :       bool do_release_defs = true;
    7799     14590515 :       gimple *stmt = to_remove.pop ();
    7800              : 
    7801              :       /* When we are value-numbering a region we do not require exit PHIs to
    7802              :          be present so we have to make sure to deal with uses outside of the
    7803              :          region of stmts that we thought are eliminated.
    7804              :          ??? Note we may be confused by uses in dead regions we didn't run
    7805              :          elimination on.  Rather than checking individual uses we accept
    7806              :          dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
    7807              :          contains such example).  */
    7808     14590515 :       if (region_p)
    7809              :         {
    7810      1712577 :           if (gphi *phi = dyn_cast <gphi *> (stmt))
    7811              :             {
    7812      1103701 :               tree lhs = gimple_phi_result (phi);
    7813      1103701 :               if (!has_zero_uses (lhs))
    7814              :                 {
    7815        22614 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    7816            3 :                     fprintf (dump_file, "Keeping eliminated stmt live "
    7817              :                              "as copy because of out-of-region uses\n");
    7818        22614 :                   tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
    7819        22614 :                   gimple *copy = gimple_build_assign (lhs, sprime);
    7820        22614 :                   gimple_stmt_iterator gsi
    7821        22614 :                     = gsi_after_labels (gimple_bb (stmt));
    7822        22614 :                   gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
    7823        22614 :                   do_release_defs = false;
    7824              :                 }
    7825              :             }
    7826       608876 :           else if (tree lhs = gimple_get_lhs (stmt))
    7827       608876 :             if (TREE_CODE (lhs) == SSA_NAME
    7828       608876 :                 && !has_zero_uses (lhs))
    7829              :               {
    7830         1685 :                 if (dump_file && (dump_flags & TDF_DETAILS))
    7831            0 :                   fprintf (dump_file, "Keeping eliminated stmt live "
    7832              :                            "as copy because of out-of-region uses\n");
    7833         1685 :                 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
    7834         1685 :                 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
    7835         1685 :                 if (is_gimple_assign (stmt))
    7836              :                   {
    7837         1685 :                     gimple_assign_set_rhs_from_tree (&gsi, sprime);
    7838         1685 :                     stmt = gsi_stmt (gsi);
    7839         1685 :                     update_stmt (stmt);
    7840         1685 :                     if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
    7841            0 :                       bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
    7842         1685 :                     continue;
    7843              :                   }
    7844              :                 else
    7845              :                   {
    7846            0 :                     gimple *copy = gimple_build_assign (lhs, sprime);
    7847            0 :                     gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
    7848            0 :                     do_release_defs = false;
    7849              :                   }
    7850              :               }
    7851              :         }
    7852              : 
    7853     14588830 :       if (dump_file && (dump_flags & TDF_DETAILS))
    7854              :         {
    7855        21994 :           fprintf (dump_file, "Removing dead stmt ");
    7856        21994 :           print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
    7857              :         }
    7858              : 
    7859     14588830 :       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
    7860     14588830 :       if (gimple_code (stmt) == GIMPLE_PHI)
    7861      1694371 :         remove_phi_node (&gsi, do_release_defs);
    7862              :       else
    7863              :         {
    7864     12894459 :           basic_block bb = gimple_bb (stmt);
    7865     12894459 :           unlink_stmt_vdef (stmt);
    7866     12894459 :           if (gsi_remove (&gsi, true))
    7867        26542 :             bitmap_set_bit (need_eh_cleanup, bb->index);
    7868     12894459 :           if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
    7869            2 :             bitmap_set_bit (need_ab_cleanup, bb->index);
    7870     12894459 :           if (do_release_defs)
    7871     12894459 :             release_defs (stmt);
    7872              :         }
    7873              : 
    7874              :       /* Removing a stmt may expose a forwarder block.  */
    7875     14588830 :       el_todo |= TODO_cleanup_cfg;
    7876              :     }
    7877              : 
    7878              :   /* Fixup stmts that became noreturn calls.  This may require splitting
    7879              :      blocks and thus isn't possible during the dominator walk.  Do this
    7880              :      in reverse order so we don't inadvertedly remove a stmt we want to
    7881              :      fixup by visiting a dominating now noreturn call first.  */
    7882      6101018 :   while (!to_fixup.is_empty ())
    7883              :     {
    7884           56 :       gimple *stmt = to_fixup.pop ();
    7885              : 
    7886           56 :       if (dump_file && (dump_flags & TDF_DETAILS))
    7887              :         {
    7888            0 :           fprintf (dump_file, "Fixing up noreturn call ");
    7889            0 :           print_gimple_stmt (dump_file, stmt, 0);
    7890              :         }
    7891              : 
    7892           56 :       if (fixup_noreturn_call (stmt))
    7893           56 :         el_todo |= TODO_cleanup_cfg;
    7894              :     }
    7895              : 
    7896      6100962 :   bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
    7897      6100962 :   bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
    7898              : 
    7899      6100962 :   if (do_eh_cleanup)
    7900        10624 :     gimple_purge_all_dead_eh_edges (need_eh_cleanup);
    7901              : 
    7902      6100962 :   if (do_ab_cleanup)
    7903            2 :     gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
    7904              : 
    7905      6100962 :   if (do_eh_cleanup || do_ab_cleanup)
    7906        10626 :     el_todo |= TODO_cleanup_cfg;
    7907              : 
    7908      6100962 :   return el_todo;
    7909              : }
    7910              : 
    7911              : /* Eliminate fully redundant computations.  */
    7912              : 
    7913              : unsigned
    7914      4235411 : eliminate_with_rpo_vn (bitmap inserted_exprs)
    7915              : {
    7916      4235411 :   eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
    7917              : 
    7918      4235411 :   eliminate_dom_walker *saved_rpo_avail = rpo_avail;
    7919      4235411 :   rpo_avail = &walker;
    7920      4235411 :   walker.walk (cfun->cfg->x_entry_block_ptr);
    7921      4235411 :   rpo_avail = saved_rpo_avail;
    7922              : 
    7923      4235411 :   return walker.eliminate_cleanup ();
    7924      4235411 : }
    7925              : 
    7926              : static unsigned
    7927              : do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
    7928              :              bool iterate, bool eliminate, bool skip_entry_phis,
    7929              :              vn_lookup_kind kind);
    7930              : 
    7931              : void
    7932       964218 : run_rpo_vn (vn_lookup_kind kind)
    7933              : {
    7934       964218 :   do_rpo_vn_1 (cfun, NULL, NULL, true, false, false, kind);
    7935              : 
    7936              :   /* ???  Prune requirement of these.  */
    7937       964218 :   constant_to_value_id = new hash_table<vn_constant_hasher> (23);
    7938              : 
    7939              :   /* Initialize the value ids and prune out remaining VN_TOPs
    7940              :      from dead code.  */
    7941       964218 :   tree name;
    7942       964218 :   unsigned i;
    7943     46918496 :   FOR_EACH_SSA_NAME (i, name, cfun)
    7944              :     {
    7945     33413712 :       vn_ssa_aux_t info = VN_INFO (name);
    7946     33413712 :       if (!info->visited
    7947     33337886 :           || info->valnum == VN_TOP)
    7948        75826 :         info->valnum = name;
    7949     33413712 :       if (info->valnum == name)
    7950     32255871 :         info->value_id = get_next_value_id ();
    7951      1157841 :       else if (is_gimple_min_invariant (info->valnum))
    7952        42008 :         info->value_id = get_or_alloc_constant_value_id (info->valnum);
    7953              :     }
    7954              : 
    7955              :   /* Propagate.  */
    7956     46918496 :   FOR_EACH_SSA_NAME (i, name, cfun)
    7957              :     {
    7958     33413712 :       vn_ssa_aux_t info = VN_INFO (name);
    7959     33413712 :       if (TREE_CODE (info->valnum) == SSA_NAME
    7960     33371704 :           && info->valnum != name
    7961     34529545 :           && info->value_id != VN_INFO (info->valnum)->value_id)
    7962      1115833 :         info->value_id = VN_INFO (info->valnum)->value_id;
    7963              :     }
    7964              : 
    7965       964218 :   set_hashtable_value_ids ();
    7966              : 
    7967       964218 :   if (dump_file && (dump_flags & TDF_DETAILS))
    7968              :     {
    7969           14 :       fprintf (dump_file, "Value numbers:\n");
    7970          406 :       FOR_EACH_SSA_NAME (i, name, cfun)
    7971              :         {
    7972          307 :           if (VN_INFO (name)->visited
    7973          307 :               && SSA_VAL (name) != name)
    7974              :             {
    7975           33 :               print_generic_expr (dump_file, name);
    7976           33 :               fprintf (dump_file, " = ");
    7977           33 :               print_generic_expr (dump_file, SSA_VAL (name));
    7978           33 :               fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
    7979              :             }
    7980              :         }
    7981              :     }
    7982       964218 : }
    7983              : 
    7984              : /* Free VN associated data structures.  */
    7985              : 
    7986              : void
    7987      6120381 : free_rpo_vn (void)
    7988              : {
    7989      6120381 :   free_vn_table (valid_info);
    7990      6120381 :   XDELETE (valid_info);
    7991      6120381 :   obstack_free (&vn_tables_obstack, NULL);
    7992      6120381 :   obstack_free (&vn_tables_insert_obstack, NULL);
    7993              : 
    7994      6120381 :   vn_ssa_aux_iterator_type it;
    7995      6120381 :   vn_ssa_aux_t info;
    7996    346740209 :   FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
    7997    170309914 :     if (info->needs_insertion)
    7998      4024424 :       release_ssa_name (info->name);
    7999      6120381 :   obstack_free (&vn_ssa_aux_obstack, NULL);
    8000      6120381 :   delete vn_ssa_aux_hash;
    8001              : 
    8002      6120381 :   delete constant_to_value_id;
    8003      6120381 :   constant_to_value_id = NULL;
    8004      6120381 : }
    8005              : 
    8006              : /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables.  */
    8007              : 
    8008              : static tree
    8009     22831674 : vn_lookup_simplify_result (gimple_match_op *res_op)
    8010              : {
    8011     22831674 :   if (!res_op->code.is_tree_code ())
    8012              :     return NULL_TREE;
    8013     22828500 :   tree *ops = res_op->ops;
    8014     22828500 :   unsigned int length = res_op->num_ops;
    8015     22828500 :   if (res_op->code == CONSTRUCTOR
    8016              :       /* ???  We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
    8017              :          and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree.  */
    8018     22828500 :       && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
    8019              :     {
    8020         1269 :       length = CONSTRUCTOR_NELTS (res_op->ops[0]);
    8021         1269 :       ops = XALLOCAVEC (tree, length);
    8022         6119 :       for (unsigned i = 0; i < length; ++i)
    8023         4850 :         ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
    8024              :     }
    8025     22828500 :   vn_nary_op_t vnresult = NULL;
    8026     22828500 :   tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
    8027              :                                        res_op->type, ops, &vnresult);
    8028              :   /* If this is used from expression simplification make sure to
    8029              :      return an available expression.  */
    8030     22828500 :   if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
    8031      2207284 :     res = rpo_avail->eliminate_avail (vn_context_bb, res);
    8032              :   return res;
    8033              : }
    8034              : 
    8035              : /* Return a leader for OPs value that is valid at BB.  */
    8036              : 
    8037              : tree
    8038    263929013 : rpo_elim::eliminate_avail (basic_block bb, tree op)
    8039              : {
    8040    263929013 :   bool visited;
    8041    263929013 :   tree valnum = SSA_VAL (op, &visited);
    8042              :   /* If we didn't visit OP then it must be defined outside of the
    8043              :      region we process and also dominate it.  So it is available.  */
    8044    263929013 :   if (!visited)
    8045              :     return op;
    8046    261782990 :   if (TREE_CODE (valnum) == SSA_NAME)
    8047              :     {
    8048    247927429 :       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
    8049              :         return valnum;
    8050    241221210 :       vn_ssa_aux_t valnum_info = VN_INFO (valnum);
    8051    241221210 :       vn_avail *av = valnum_info->avail;
    8052    241221210 :       if (!av)
    8053              :         {
    8054              :           /* See above.  But when there's availability info prefer
    8055              :              what we recorded there for example to preserve LC SSA.  */
    8056     83081113 :           if (!valnum_info->visited)
    8057              :             return valnum;
    8058              :           return NULL_TREE;
    8059              :         }
    8060    158140097 :       if (av->location == bb->index)
    8061              :         /* On tramp3d 90% of the cases are here.  */
    8062    104931772 :         return ssa_name (av->leader);
    8063     67039085 :       do
    8064              :         {
    8065     67039085 :           basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
    8066              :           /* ???  During elimination we have to use availability at the
    8067              :              definition site of a use we try to replace.  This
    8068              :              is required to not run into inconsistencies because
    8069              :              of dominated_by_p_w_unex behavior and removing a definition
    8070              :              while not replacing all uses.
    8071              :              ???  We could try to consistently walk dominators
    8072              :              ignoring non-executable regions.  The nearest common
    8073              :              dominator of bb and abb is where we can stop walking.  We
    8074              :              may also be able to "pre-compute" (bits of) the next immediate
    8075              :              (non-)dominator during the RPO walk when marking edges as
    8076              :              executable.  */
    8077     67039085 :           if (dominated_by_p_w_unex (bb, abb, true))
    8078              :             {
    8079     49353615 :               tree leader = ssa_name (av->leader);
    8080              :               /* Prevent eliminations that break loop-closed SSA.  */
    8081     49353615 :               if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
    8082      3155514 :                   && ! SSA_NAME_IS_DEFAULT_DEF (leader)
    8083     52509129 :                   && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
    8084      3155514 :                                                          (leader))->loop_father,
    8085              :                                               bb))
    8086              :                 return NULL_TREE;
    8087     49274651 :               if (dump_file && (dump_flags & TDF_DETAILS))
    8088              :                 {
    8089         3555 :                   print_generic_expr (dump_file, leader);
    8090         3555 :                   fprintf (dump_file, " is available for ");
    8091         3555 :                   print_generic_expr (dump_file, valnum);
    8092         3555 :                   fprintf (dump_file, "\n");
    8093              :                 }
    8094              :               /* On tramp3d 99% of the _remaining_ cases succeed at
    8095              :                  the first enty.  */
    8096     49274651 :               return leader;
    8097              :             }
    8098              :           /* ???  Can we somehow skip to the immediate dominator
    8099              :              RPO index (bb_to_rpo)?  Again, maybe not worth, on
    8100              :              tramp3d the worst number of elements in the vector is 9.  */
    8101     17685470 :           av = av->next;
    8102              :         }
    8103     17685470 :       while (av);
    8104              :       /* While we prefer avail we have to fallback to using the value
    8105              :          directly if defined outside of the region when none of the
    8106              :          available defs suit.  */
    8107      3854710 :       if (!valnum_info->visited)
    8108              :         return valnum;
    8109              :     }
    8110     13855561 :   else if (valnum != VN_TOP)
    8111              :     /* valnum is is_gimple_min_invariant.  */
    8112              :     return valnum;
    8113              :   return NULL_TREE;
    8114              : }
    8115              : 
    8116              : /* Make LEADER a leader for its value at BB.  */
    8117              : 
    8118              : void
    8119     96479740 : rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
    8120              : {
    8121     96479740 :   tree valnum = VN_INFO (leader)->valnum;
    8122     96479740 :   if (valnum == VN_TOP
    8123     96479740 :       || is_gimple_min_invariant (valnum))
    8124            0 :     return;
    8125     96479740 :   if (dump_file && (dump_flags & TDF_DETAILS))
    8126              :     {
    8127       324739 :       fprintf (dump_file, "Making available beyond BB%d ", bb->index);
    8128       324739 :       print_generic_expr (dump_file, leader);
    8129       324739 :       fprintf (dump_file, " for value ");
    8130       324739 :       print_generic_expr (dump_file, valnum);
    8131       324739 :       fprintf (dump_file, "\n");
    8132              :     }
    8133     96479740 :   vn_ssa_aux_t value = VN_INFO (valnum);
    8134     96479740 :   vn_avail *av;
    8135     96479740 :   if (m_avail_freelist)
    8136              :     {
    8137     18733842 :       av = m_avail_freelist;
    8138     18733842 :       m_avail_freelist = m_avail_freelist->next;
    8139              :     }
    8140              :   else
    8141     77745898 :     av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
    8142     96479740 :   av->location = bb->index;
    8143     96479740 :   av->leader = SSA_NAME_VERSION (leader);
    8144     96479740 :   av->next = value->avail;
    8145     96479740 :   av->next_undo = last_pushed_avail;
    8146     96479740 :   last_pushed_avail = value;
    8147     96479740 :   value->avail = av;
    8148              : }
    8149              : 
    8150              : /* Valueization hook for RPO VN plus required state.  */
    8151              : 
    8152              : tree
    8153   2023743151 : rpo_vn_valueize (tree name)
    8154              : {
    8155   2023743151 :   if (TREE_CODE (name) == SSA_NAME)
    8156              :     {
    8157   1977998250 :       vn_ssa_aux_t val = VN_INFO (name);
    8158   1977998250 :       if (val)
    8159              :         {
    8160   1977998250 :           tree tem = val->valnum;
    8161   1977998250 :           if (tem != VN_TOP && tem != name)
    8162              :             {
    8163    105400209 :               if (TREE_CODE (tem) != SSA_NAME)
    8164              :                 return tem;
    8165              :               /* For all values we only valueize to an available leader
    8166              :                  which means we can use SSA name info without restriction.  */
    8167     88695558 :               tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
    8168     88695558 :               if (tem)
    8169              :                 return tem;
    8170              :             }
    8171              :         }
    8172              :     }
    8173              :   return name;
    8174              : }
    8175              : 
    8176              : /* Insert on PRED_E predicates derived from CODE OPS being true besides the
    8177              :    inverted condition.  */
    8178              : 
    8179              : static void
    8180     27353680 : insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
    8181              : {
    8182     27353680 :   switch (code)
    8183              :     {
    8184      1377639 :     case LT_EXPR:
    8185              :       /* a < b -> a {!,<}= b */
    8186      1377639 :       vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
    8187              :                                            ops, boolean_true_node, 0, pred_e);
    8188      1377639 :       vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
    8189              :                                            ops, boolean_true_node, 0, pred_e);
    8190              :       /* a < b -> ! a {>,=} b */
    8191      1377639 :       vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
    8192              :                                            ops, boolean_false_node, 0, pred_e);
    8193      1377639 :       vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
    8194              :                                            ops, boolean_false_node, 0, pred_e);
    8195      1377639 :       break;
    8196      3455529 :     case GT_EXPR:
    8197              :       /* a > b -> a {!,>}= b */
    8198      3455529 :       vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
    8199              :                                            ops, boolean_true_node, 0, pred_e);
    8200      3455529 :       vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
    8201              :                                            ops, boolean_true_node, 0, pred_e);
    8202              :       /* a > b -> ! a {<,=} b */
    8203      3455529 :       vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
    8204              :                                            ops, boolean_false_node, 0, pred_e);
    8205      3455529 :       vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
    8206              :                                            ops, boolean_false_node, 0, pred_e);
    8207      3455529 :       break;
    8208      9332020 :     case EQ_EXPR:
    8209              :       /* a == b -> ! a {<,>} b */
    8210      9332020 :       vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
    8211              :                                            ops, boolean_false_node, 0, pred_e);
    8212      9332020 :       vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
    8213              :                                            ops, boolean_false_node, 0, pred_e);
    8214      9332020 :       break;
    8215              :     case LE_EXPR:
    8216              :     case GE_EXPR:
    8217              :     case NE_EXPR:
    8218              :       /* Nothing besides inverted condition.  */
    8219              :       break;
    8220     27353680 :     default:;
    8221              :     }
    8222     27353680 : }
    8223              : 
    8224              : /* Insert on the TRUE_E true and FALSE_E false predicates
    8225              :    derived from LHS CODE RHS.  */
    8226              : 
    8227              : static void
    8228     23348053 : insert_predicates_for_cond (tree_code code, tree lhs, tree rhs,
    8229              :                             edge true_e, edge false_e)
    8230              : {
    8231              :   /* If both edges are null, then there is nothing to be done. */
    8232     23348053 :   if (!true_e && !false_e)
    8233      1316188 :     return;
    8234              : 
    8235              :   /* Canonicalize the comparison if needed, putting
    8236              :      the constant in the rhs.  */
    8237     22035335 :   if (tree_swap_operands_p (lhs, rhs))
    8238              :     {
    8239        16503 :       std::swap (lhs, rhs);
    8240        16503 :       code = swap_tree_comparison (code);
    8241              :     }
    8242              : 
    8243              :   /* If the lhs is not a ssa name, don't record anything. */
    8244     22035335 :   if (TREE_CODE (lhs) != SSA_NAME)
    8245              :     return;
    8246              : 
    8247     22031865 :   tree_code icode = invert_tree_comparison (code, HONOR_NANS (lhs));
    8248     22031865 :   tree ops[2];
    8249     22031865 :   ops[0] = lhs;
    8250     22031865 :   ops[1] = rhs;
    8251     22031865 :   if (true_e)
    8252     17982297 :     vn_nary_op_insert_pieces_predicated (2, code, boolean_type_node, ops,
    8253              :                                          boolean_true_node, 0, true_e);
    8254     22031865 :   if (false_e)
    8255     16964488 :     vn_nary_op_insert_pieces_predicated (2, code, boolean_type_node, ops,
    8256              :                                          boolean_false_node, 0, false_e);
    8257     22031865 :   if (icode != ERROR_MARK)
    8258              :     {
    8259     21784646 :       if (true_e)
    8260     17829048 :         vn_nary_op_insert_pieces_predicated (2, icode, boolean_type_node, ops,
    8261              :                                              boolean_false_node, 0, true_e);
    8262     21784646 :       if (false_e)
    8263     16764623 :         vn_nary_op_insert_pieces_predicated (2, icode, boolean_type_node, ops,
    8264              :                                              boolean_true_node, 0, false_e);
    8265              :     }
    8266              :   /* Relax for non-integers, inverted condition handled
    8267              :      above.  */
    8268     22031865 :   if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
    8269              :     {
    8270     17232771 :       if (true_e)
    8271     14139440 :         insert_related_predicates_on_edge (code, ops, true_e);
    8272     17232771 :       if (false_e)
    8273     13214240 :         insert_related_predicates_on_edge (icode, ops, false_e);
    8274              :   }
    8275     22031865 :   if (integer_zerop (rhs)
    8276     22031865 :       && (code == NE_EXPR || code == EQ_EXPR))
    8277              :     {
    8278      9107115 :       gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
    8279              :       /* (A CMP B) != 0 is the same as (A CMP B).
    8280              :          (A CMP B) == 0 is just (A CMP B) with the edges swapped.  */
    8281      9107115 :       if (is_gimple_assign (def_stmt)
    8282      9107115 :           && TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_comparison)
    8283              :           {
    8284       430268 :             tree_code nc = gimple_assign_rhs_code (def_stmt);
    8285       430268 :             tree nlhs = vn_valueize (gimple_assign_rhs1 (def_stmt));
    8286       430268 :             tree nrhs = vn_valueize (gimple_assign_rhs2 (def_stmt));
    8287       430268 :             edge nt = true_e;
    8288       430268 :             edge nf = false_e;
    8289       430268 :             if (code == EQ_EXPR)
    8290       303836 :               std::swap (nt, nf);
    8291       430268 :             if (lhs != nlhs)
    8292       430268 :               insert_predicates_for_cond (nc, nlhs, nrhs, nt, nf);
    8293              :           }
    8294              :       /* (a | b) == 0 ->
    8295              :             on true edge assert: a == 0 & b == 0. */
    8296              :       /* (a | b) != 0 ->
    8297              :             on false edge assert: a == 0 & b == 0. */
    8298      9107115 :       if (is_gimple_assign (def_stmt)
    8299      9107115 :           && gimple_assign_rhs_code (def_stmt) == BIT_IOR_EXPR)
    8300              :         {
    8301       254472 :           edge e = code == EQ_EXPR ? true_e : false_e;
    8302       254472 :           tree nlhs;
    8303              : 
    8304       254472 :           nlhs = vn_valueize (gimple_assign_rhs1 (def_stmt));
    8305              :           /* A valueization of the `a` might return the old lhs
    8306              :              which is already handled above. */
    8307       254472 :           if (nlhs != lhs)
    8308       254472 :             insert_predicates_for_cond (EQ_EXPR, nlhs, rhs, e, nullptr);
    8309              : 
    8310              :           /* A valueization of the `b` might return the old lhs
    8311              :              which is already handled above. */
    8312       254472 :           nlhs = vn_valueize (gimple_assign_rhs2 (def_stmt));
    8313       254472 :           if (nlhs != lhs)
    8314       254472 :             insert_predicates_for_cond (EQ_EXPR, nlhs, rhs, e, nullptr);
    8315              :         }
    8316              :     }
    8317              : }
    8318              : 
    8319              : /* Main stmt worker for RPO VN, process BB.  */
    8320              : 
    8321              : static unsigned
    8322     61225126 : process_bb (rpo_elim &avail, basic_block bb,
    8323              :             bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
    8324              :             bool do_region, bitmap exit_bbs, bool skip_phis)
    8325              : {
    8326     61225126 :   unsigned todo = 0;
    8327     61225126 :   edge_iterator ei;
    8328     61225126 :   edge e;
    8329              : 
    8330     61225126 :   vn_context_bb = bb;
    8331              : 
    8332              :   /* If we are in loop-closed SSA preserve this state.  This is
    8333              :      relevant when called on regions from outside of FRE/PRE.  */
    8334     61225126 :   bool lc_phi_nodes = false;
    8335     61225126 :   if (!skip_phis
    8336     61225126 :       && loops_state_satisfies_p (LOOP_CLOSED_SSA))
    8337      3695255 :     FOR_EACH_EDGE (e, ei, bb->preds)
    8338      2232641 :       if (e->src->loop_father != e->dest->loop_father
    8339      2232641 :           && flow_loop_nested_p (e->dest->loop_father,
    8340              :                                  e->src->loop_father))
    8341              :         {
    8342              :           lc_phi_nodes = true;
    8343              :           break;
    8344              :         }
    8345              : 
    8346              :   /* When we visit a loop header substitute into loop info.  */
    8347     61225126 :   if (!iterate && eliminate && bb->loop_father->header == bb)
    8348              :     {
    8349              :       /* Keep fields in sync with substitute_in_loop_info.  */
    8350       946936 :       if (bb->loop_father->nb_iterations)
    8351       155813 :         bb->loop_father->nb_iterations
    8352       155813 :           = simplify_replace_tree (bb->loop_father->nb_iterations,
    8353              :                                    NULL_TREE, NULL_TREE, &vn_valueize_for_srt);
    8354              :     }
    8355              : 
    8356              :   /* Value-number all defs in the basic-block.  */
    8357     61225126 :   if (!skip_phis)
    8358     88022908 :     for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
    8359     26827026 :          gsi_next (&gsi))
    8360              :       {
    8361     26827026 :         gphi *phi = gsi.phi ();
    8362     26827026 :         tree res = PHI_RESULT (phi);
    8363     26827026 :         vn_ssa_aux_t res_info = VN_INFO (res);
    8364     26827026 :         if (!bb_visited)
    8365              :           {
    8366     18946842 :             gcc_assert (!res_info->visited);
    8367     18946842 :             res_info->valnum = VN_TOP;
    8368     18946842 :             res_info->visited = true;
    8369              :           }
    8370              : 
    8371              :         /* When not iterating force backedge values to varying.  */
    8372     26827026 :         visit_stmt (phi, !iterate_phis);
    8373     53654052 :         if (virtual_operand_p (res))
    8374     10618568 :           continue;
    8375              : 
    8376              :         /* Eliminate */
    8377              :         /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
    8378              :            how we handle backedges and availability.
    8379              :            And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization.  */
    8380     16208458 :         tree val = res_info->valnum;
    8381     16208458 :         if (res != val && !iterate && eliminate)
    8382              :           {
    8383      1398902 :             if (tree leader = avail.eliminate_avail (bb, res))
    8384              :               {
    8385      1274322 :                 if (leader != res
    8386              :                     /* Preserve loop-closed SSA form.  */
    8387      1274322 :                     && (! lc_phi_nodes
    8388         6534 :                         || is_gimple_min_invariant (leader)))
    8389              :                   {
    8390      1273751 :                     if (dump_file && (dump_flags & TDF_DETAILS))
    8391              :                       {
    8392          203 :                         fprintf (dump_file, "Replaced redundant PHI node "
    8393              :                                  "defining ");
    8394          203 :                         print_generic_expr (dump_file, res);
    8395          203 :                         fprintf (dump_file, " with ");
    8396          203 :                         print_generic_expr (dump_file, leader);
    8397          203 :                         fprintf (dump_file, "\n");
    8398              :                       }
    8399      1273751 :                     avail.eliminations++;
    8400              : 
    8401      1273751 :                     if (may_propagate_copy (res, leader))
    8402              :                       {
    8403              :                         /* Schedule for removal.  */
    8404      1273751 :                         avail.to_remove.safe_push (phi);
    8405      1273751 :                         continue;
    8406              :                       }
    8407              :                     /* ???  Else generate a copy stmt.  */
    8408              :                   }
    8409              :               }
    8410              :           }
    8411              :         /* Only make defs available that not already are.  But make
    8412              :            sure loop-closed SSA PHI node defs are picked up for
    8413              :            downstream uses.  */
    8414     14934707 :         if (lc_phi_nodes
    8415     14934707 :             || res == val
    8416     14934707 :             || ! avail.eliminate_avail (bb, res))
    8417     11436648 :           avail.eliminate_push_avail (bb, res);
    8418              :       }
    8419              : 
    8420              :   /* For empty BBs mark outgoing edges executable.  For non-empty BBs
    8421              :      we do this when processing the last stmt as we have to do this
    8422              :      before elimination which otherwise forces GIMPLE_CONDs to
    8423              :      if (1 != 0) style when seeing non-executable edges.  */
    8424    122450252 :   if (gsi_end_p (gsi_start_bb (bb)))
    8425              :     {
    8426     14084204 :       FOR_EACH_EDGE (e, ei, bb->succs)
    8427              :         {
    8428      7042102 :           if (!(e->flags & EDGE_EXECUTABLE))
    8429              :             {
    8430      4798323 :               if (dump_file && (dump_flags & TDF_DETAILS))
    8431         6167 :                 fprintf (dump_file,
    8432              :                          "marking outgoing edge %d -> %d executable\n",
    8433         6167 :                          e->src->index, e->dest->index);
    8434      4798323 :               e->flags |= EDGE_EXECUTABLE;
    8435      4798323 :               e->dest->flags |= BB_EXECUTABLE;
    8436              :             }
    8437      2243779 :           else if (!(e->dest->flags & BB_EXECUTABLE))
    8438              :             {
    8439            0 :               if (dump_file && (dump_flags & TDF_DETAILS))
    8440            0 :                 fprintf (dump_file,
    8441              :                          "marking destination block %d reachable\n",
    8442              :                          e->dest->index);
    8443            0 :               e->dest->flags |= BB_EXECUTABLE;
    8444              :             }
    8445              :         }
    8446              :     }
    8447    122450252 :   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
    8448    482255234 :        !gsi_end_p (gsi); gsi_next (&gsi))
    8449              :     {
    8450    421030108 :       ssa_op_iter i;
    8451    421030108 :       tree op;
    8452    421030108 :       if (!bb_visited)
    8453              :         {
    8454    481574206 :           FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
    8455              :             {
    8456    135912000 :               vn_ssa_aux_t op_info = VN_INFO (op);
    8457    135912000 :               gcc_assert (!op_info->visited);
    8458    135912000 :               op_info->valnum = VN_TOP;
    8459    135912000 :               op_info->visited = true;
    8460              :             }
    8461              : 
    8462              :           /* We somehow have to deal with uses that are not defined
    8463              :              in the processed region.  Forcing unvisited uses to
    8464              :              varying here doesn't play well with def-use following during
    8465              :              expression simplification, so we deal with this by checking
    8466              :              the visited flag in SSA_VAL.  */
    8467              :         }
    8468              : 
    8469    421030108 :       visit_stmt (gsi_stmt (gsi));
    8470              : 
    8471    421030108 :       gimple *last = gsi_stmt (gsi);
    8472    421030108 :       e = NULL;
    8473    421030108 :       switch (gimple_code (last))
    8474              :         {
    8475       114341 :         case GIMPLE_SWITCH:
    8476       114341 :           e = find_taken_edge (bb, vn_valueize (gimple_switch_index
    8477       114341 :                                                 (as_a <gswitch *> (last))));
    8478       114341 :           break;
    8479     24583374 :         case GIMPLE_COND:
    8480     24583374 :           {
    8481     24583374 :             tree lhs = vn_valueize (gimple_cond_lhs (last));
    8482     24583374 :             tree rhs = vn_valueize (gimple_cond_rhs (last));
    8483     24583374 :             tree_code cmpcode = gimple_cond_code (last);
    8484              :             /* Canonicalize the comparison if needed, putting
    8485              :                the constant in the rhs.  */
    8486     24583374 :             if (tree_swap_operands_p (lhs, rhs))
    8487              :               {
    8488       837049 :                 std::swap (lhs, rhs);
    8489       837049 :                 cmpcode = swap_tree_comparison (cmpcode);
    8490              :                }
    8491     24583374 :             tree val = gimple_simplify (cmpcode,
    8492              :                                         boolean_type_node, lhs, rhs,
    8493              :                                         NULL, vn_valueize);
    8494              :             /* If the condition didn't simplfy see if we have recorded
    8495              :                an expression from sofar taken edges.  */
    8496     24583374 :             if (! val || TREE_CODE (val) != INTEGER_CST)
    8497              :               {
    8498     22758330 :                 vn_nary_op_t vnresult;
    8499     22758330 :                 tree ops[2];
    8500     22758330 :                 ops[0] = lhs;
    8501     22758330 :                 ops[1] = rhs;
    8502     22758330 :                 val = vn_nary_op_lookup_pieces (2, cmpcode,
    8503              :                                                 boolean_type_node, ops,
    8504              :                                                 &vnresult);
    8505              :                 /* Got back a ssa name, then try looking up `val != 0`
    8506              :                    as it might have been recorded that way.  */
    8507     22758330 :                 if (val && TREE_CODE (val) == SSA_NAME)
    8508              :                   {
    8509       136410 :                     ops[0] = val;
    8510       136410 :                     ops[1] = build_zero_cst (TREE_TYPE (val));
    8511       136410 :                     val = vn_nary_op_lookup_pieces (2, NE_EXPR,
    8512              :                                                     boolean_type_node, ops,
    8513              :                                                     &vnresult);
    8514              :                   }
    8515              :                 /* Did we get a predicated value?  */
    8516     22758314 :                 if (! val && vnresult && vnresult->predicated_values)
    8517              :                   {
    8518      1340106 :                     val = vn_nary_op_get_predicated_value (vnresult, bb);
    8519      1340106 :                     if (val && dump_file && (dump_flags & TDF_DETAILS))
    8520              :                       {
    8521            2 :                         fprintf (dump_file, "Got predicated value ");
    8522            2 :                         print_generic_expr (dump_file, val, TDF_NONE);
    8523            2 :                         fprintf (dump_file, " for ");
    8524            2 :                         print_gimple_stmt (dump_file, last, TDF_SLIM);
    8525              :                       }
    8526              :                   }
    8527              :               }
    8528     22758330 :             if (val)
    8529      2174533 :               e = find_taken_edge (bb, val);
    8530     24583374 :             if (! e)
    8531              :               {
    8532              :                 /* If we didn't manage to compute the taken edge then
    8533              :                    push predicated expressions for the condition itself
    8534              :                    and related conditions to the hashtables.  This allows
    8535              :                    simplification of redundant conditions which is
    8536              :                    important as early cleanup.  */
    8537     22408841 :                 edge true_e, false_e;
    8538     22408841 :                 extract_true_false_edges_from_block (bb, &true_e, &false_e);
    8539       542934 :                 if ((do_region && bitmap_bit_p (exit_bbs, true_e->dest->index))
    8540     22636428 :                     || !can_track_predicate_on_edge (true_e))
    8541      4936765 :                   true_e = NULL;
    8542       542934 :                 if ((do_region && bitmap_bit_p (exit_bbs, false_e->dest->index))
    8543     22610453 :                     || !can_track_predicate_on_edge (false_e))
    8544      5836222 :                   false_e = NULL;
    8545     22408841 :                 insert_predicates_for_cond (cmpcode, lhs, rhs, true_e, false_e);
    8546              :               }
    8547              :             break;
    8548              :           }
    8549         1390 :         case GIMPLE_GOTO:
    8550         1390 :           e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
    8551         1390 :           break;
    8552              :         default:
    8553              :           e = NULL;
    8554              :         }
    8555    421030108 :       if (e)
    8556              :         {
    8557      2178126 :           todo = TODO_cleanup_cfg;
    8558      2178126 :           if (!(e->flags & EDGE_EXECUTABLE))
    8559              :             {
    8560      1719265 :               if (dump_file && (dump_flags & TDF_DETAILS))
    8561           35 :                 fprintf (dump_file,
    8562              :                          "marking known outgoing %sedge %d -> %d executable\n",
    8563           35 :                          e->flags & EDGE_DFS_BACK ? "back-" : "",
    8564           35 :                          e->src->index, e->dest->index);
    8565      1719265 :               e->flags |= EDGE_EXECUTABLE;
    8566      1719265 :               e->dest->flags |= BB_EXECUTABLE;
    8567              :             }
    8568       458861 :           else if (!(e->dest->flags & BB_EXECUTABLE))
    8569              :             {
    8570        27220 :               if (dump_file && (dump_flags & TDF_DETAILS))
    8571            1 :                 fprintf (dump_file,
    8572              :                          "marking destination block %d reachable\n",
    8573              :                          e->dest->index);
    8574        27220 :               e->dest->flags |= BB_EXECUTABLE;
    8575              :             }
    8576              :         }
    8577    837703964 :       else if (gsi_one_before_end_p (gsi))
    8578              :         {
    8579    127642596 :           FOR_EACH_EDGE (e, ei, bb->succs)
    8580              :             {
    8581     75637698 :               if (!(e->flags & EDGE_EXECUTABLE))
    8582              :                 {
    8583     55525467 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    8584        18478 :                     fprintf (dump_file,
    8585              :                              "marking outgoing edge %d -> %d executable\n",
    8586        18478 :                              e->src->index, e->dest->index);
    8587     55525467 :                   e->flags |= EDGE_EXECUTABLE;
    8588     55525467 :                   e->dest->flags |= BB_EXECUTABLE;
    8589              :                 }
    8590     20112231 :               else if (!(e->dest->flags & BB_EXECUTABLE))
    8591              :                 {
    8592      2502292 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    8593         5987 :                     fprintf (dump_file,
    8594              :                              "marking destination block %d reachable\n",
    8595              :                              e->dest->index);
    8596      2502292 :                   e->dest->flags |= BB_EXECUTABLE;
    8597              :                 }
    8598              :             }
    8599              :         }
    8600              : 
    8601              :       /* Eliminate.  That also pushes to avail.  */
    8602    421030108 :       if (eliminate && ! iterate)
    8603    105784856 :         avail.eliminate_stmt (bb, &gsi);
    8604              :       else
    8605              :         /* If not eliminating, make all not already available defs
    8606              :            available.  But avoid picking up dead defs.  */
    8607    394615830 :         FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
    8608     79370578 :           if (! has_zero_uses (op)
    8609     79370578 :               && ! avail.eliminate_avail (bb, op))
    8610     60588811 :             avail.eliminate_push_avail (bb, op);
    8611              :     }
    8612              : 
    8613              :   /* Eliminate in destination PHI arguments.  Always substitute in dest
    8614              :      PHIs, even for non-executable edges.  This handles region
    8615              :      exits PHIs.  */
    8616     61225126 :   if (!iterate && eliminate)
    8617     32935735 :     FOR_EACH_EDGE (e, ei, bb->succs)
    8618     19618477 :       for (gphi_iterator gsi = gsi_start_phis (e->dest);
    8619     38097442 :            !gsi_end_p (gsi); gsi_next (&gsi))
    8620              :         {
    8621     18478965 :           gphi *phi = gsi.phi ();
    8622     18478965 :           use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
    8623     18478965 :           tree arg = USE_FROM_PTR (use_p);
    8624     28076844 :           if (TREE_CODE (arg) != SSA_NAME
    8625     18478965 :               || virtual_operand_p (arg))
    8626      9597879 :             continue;
    8627      8881086 :           tree sprime;
    8628      8881086 :           if (SSA_NAME_IS_DEFAULT_DEF (arg))
    8629              :             {
    8630       118469 :               sprime = SSA_VAL (arg);
    8631       118469 :               gcc_assert (TREE_CODE (sprime) != SSA_NAME
    8632              :                           || SSA_NAME_IS_DEFAULT_DEF (sprime));
    8633              :             }
    8634              :           else
    8635              :             /* Look for sth available at the definition block of the argument.
    8636              :                This avoids inconsistencies between availability there which
    8637              :                decides if the stmt can be removed and availability at the
    8638              :                use site.  The SSA property ensures that things available
    8639              :                at the definition are also available at uses.  */
    8640      8762617 :             sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
    8641              :                                             arg);
    8642      8881086 :           if (sprime
    8643      8881086 :               && sprime != arg
    8644      8881086 :               && may_propagate_copy (arg, sprime, !(e->flags & EDGE_ABNORMAL)))
    8645      1509740 :             propagate_value (use_p, sprime);
    8646              :         }
    8647              : 
    8648     61225126 :   vn_context_bb = NULL;
    8649     61225126 :   return todo;
    8650              : }
    8651              : 
    8652              : /* Unwind state per basic-block.  */
    8653              : 
    8654              : struct unwind_state
    8655              : {
    8656              :   /* Times this block has been visited.  */
    8657              :   unsigned visited;
    8658              :   /* Whether to handle this as iteration point or whether to treat
    8659              :      incoming backedge PHI values as varying.  */
    8660              :   bool iterate;
    8661              :   /* Maximum RPO index this block is reachable from.  */
    8662              :   int max_rpo;
    8663              :   /* Unwind state.  */
    8664              :   void *ob_top;
    8665              :   vn_reference_t ref_top;
    8666              :   vn_phi_t phi_top;
    8667              :   vn_nary_op_t nary_top;
    8668              :   vn_avail *avail_top;
    8669              : };
    8670              : 
    8671              : /* Unwind the RPO VN state for iteration.  */
    8672              : 
    8673              : static void
    8674      1902403 : do_unwind (unwind_state *to, rpo_elim &avail)
    8675              : {
    8676      1902403 :   gcc_assert (to->iterate);
    8677     34826094 :   for (; last_inserted_nary != to->nary_top;
    8678     32923691 :        last_inserted_nary = last_inserted_nary->next)
    8679              :     {
    8680     32923691 :       vn_nary_op_t *slot;
    8681     32923691 :       slot = valid_info->nary->find_slot_with_hash
    8682     32923691 :         (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
    8683              :       /* Predication causes the need to restore previous state.  */
    8684     32923691 :       if ((*slot)->unwind_to)
    8685      6641596 :         *slot = (*slot)->unwind_to;
    8686              :       else
    8687     26282095 :         valid_info->nary->clear_slot (slot);
    8688              :     }
    8689      7481753 :   for (; last_inserted_phi != to->phi_top;
    8690      5579350 :        last_inserted_phi = last_inserted_phi->next)
    8691              :     {
    8692      5579350 :       vn_phi_t *slot;
    8693      5579350 :       slot = valid_info->phis->find_slot_with_hash
    8694      5579350 :         (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
    8695      5579350 :       valid_info->phis->clear_slot (slot);
    8696              :     }
    8697     15130818 :   for (; last_inserted_ref != to->ref_top;
    8698     13228415 :        last_inserted_ref = last_inserted_ref->next)
    8699              :     {
    8700     13228415 :       vn_reference_t *slot;
    8701     13228415 :       slot = valid_info->references->find_slot_with_hash
    8702     13228415 :         (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
    8703     13228415 :       (*slot)->operands.release ();
    8704     13228415 :       valid_info->references->clear_slot (slot);
    8705              :     }
    8706      1902403 :   obstack_free (&vn_tables_obstack, to->ob_top);
    8707              : 
    8708              :   /* Prune [rpo_idx, ] from avail.  */
    8709     20636245 :   for (; last_pushed_avail && last_pushed_avail->avail != to->avail_top;)
    8710              :     {
    8711     18733842 :       vn_ssa_aux_t val = last_pushed_avail;
    8712     18733842 :       vn_avail *av = val->avail;
    8713     18733842 :       val->avail = av->next;
    8714     18733842 :       last_pushed_avail = av->next_undo;
    8715     18733842 :       av->next = avail.m_avail_freelist;
    8716     18733842 :       avail.m_avail_freelist = av;
    8717              :     }
    8718      1902403 : }
    8719              : 
    8720              : /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
    8721              :    If ITERATE is true then treat backedges optimistically as not
    8722              :    executed and iterate.  If ELIMINATE is true then perform
    8723              :    elimination, otherwise leave that to the caller.  If SKIP_ENTRY_PHIS
    8724              :    is true then force PHI nodes in ENTRY->dest to VARYING.  */
    8725              : 
    8726              : static unsigned
    8727      6120381 : do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
    8728              :              bool iterate, bool eliminate, bool skip_entry_phis,
    8729              :              vn_lookup_kind kind)
    8730              : {
    8731      6120381 :   unsigned todo = 0;
    8732      6120381 :   default_vn_walk_kind = kind;
    8733              : 
    8734              :   /* We currently do not support region-based iteration when
    8735              :      elimination is requested.  */
    8736      6120381 :   gcc_assert (!entry || !iterate || !eliminate);
    8737              :   /* When iterating we need loop info up-to-date.  */
    8738      6120381 :   gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
    8739              : 
    8740      6120381 :   bool do_region = entry != NULL;
    8741      6120381 :   if (!do_region)
    8742              :     {
    8743      5433343 :       entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
    8744      5433343 :       exit_bbs = BITMAP_ALLOC (NULL);
    8745      5433343 :       bitmap_set_bit (exit_bbs, EXIT_BLOCK);
    8746              :     }
    8747              : 
    8748              :   /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
    8749              :      re-mark those that are contained in the region.  */
    8750      6120381 :   edge_iterator ei;
    8751      6120381 :   edge e;
    8752     12300586 :   FOR_EACH_EDGE (e, ei, entry->dest->preds)
    8753      6180205 :     e->flags &= ~EDGE_DFS_BACK;
    8754              : 
    8755      6120381 :   int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
    8756      6120381 :   auto_vec<std::pair<int, int> > toplevel_scc_extents;
    8757      6120381 :   int n = rev_post_order_and_mark_dfs_back_seme
    8758      8005351 :     (fn, entry, exit_bbs, true, rpo, !iterate ? &toplevel_scc_extents : NULL);
    8759              : 
    8760      6120381 :   if (!do_region)
    8761      5433343 :     BITMAP_FREE (exit_bbs);
    8762              : 
    8763              :   /* If there are any non-DFS_BACK edges into entry->dest skip
    8764              :      processing PHI nodes for that block.  This supports
    8765              :      value-numbering loop bodies w/o the actual loop.  */
    8766     12300585 :   FOR_EACH_EDGE (e, ei, entry->dest->preds)
    8767      6180205 :     if (e != entry
    8768        59824 :         && !(e->flags & EDGE_DFS_BACK))
    8769              :       break;
    8770      6120381 :   if (e != NULL && dump_file && (dump_flags & TDF_DETAILS))
    8771            0 :     fprintf (dump_file, "Region does not contain all edges into "
    8772              :              "the entry block, skipping its PHIs.\n");
    8773      6120381 :   skip_entry_phis |= e != NULL;
    8774              : 
    8775      6120381 :   int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
    8776     56375103 :   for (int i = 0; i < n; ++i)
    8777     50254722 :     bb_to_rpo[rpo[i]] = i;
    8778      6120381 :   vn_bb_to_rpo = bb_to_rpo;
    8779              : 
    8780      6120381 :   unwind_state *rpo_state = XNEWVEC (unwind_state, n);
    8781              : 
    8782      6120381 :   rpo_elim avail (entry->dest);
    8783      6120381 :   rpo_avail = &avail;
    8784              : 
    8785              :   /* Verify we have no extra entries into the region.  */
    8786      6120381 :   if (flag_checking && do_region)
    8787              :     {
    8788       687032 :       auto_bb_flag bb_in_region (fn);
    8789      2078353 :       for (int i = 0; i < n; ++i)
    8790              :         {
    8791      1391321 :           basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
    8792      1391321 :           bb->flags |= bb_in_region;
    8793              :         }
    8794              :       /* We can't merge the first two loops because we cannot rely
    8795              :          on EDGE_DFS_BACK for edges not within the region.  But if
    8796              :          we decide to always have the bb_in_region flag we can
    8797              :          do the checking during the RPO walk itself (but then it's
    8798              :          also easy to handle MEME conservatively).  */
    8799      2078353 :       for (int i = 0; i < n; ++i)
    8800              :         {
    8801      1391321 :           basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
    8802      1391321 :           edge e;
    8803      1391321 :           edge_iterator ei;
    8804      3042105 :           FOR_EACH_EDGE (e, ei, bb->preds)
    8805      1650784 :             gcc_assert (e == entry
    8806              :                         || (skip_entry_phis && bb == entry->dest)
    8807              :                         || (e->src->flags & bb_in_region));
    8808              :         }
    8809      2078353 :       for (int i = 0; i < n; ++i)
    8810              :         {
    8811      1391321 :           basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
    8812      1391321 :           bb->flags &= ~bb_in_region;
    8813              :         }
    8814       687032 :     }
    8815              : 
    8816              :   /* Create the VN state.  For the initial size of the various hashtables
    8817              :      use a heuristic based on region size and number of SSA names.  */
    8818      6120381 :   unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
    8819      6120381 :                           / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
    8820      6120381 :   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
    8821      6120381 :   next_value_id = 1;
    8822      6120381 :   next_constant_value_id = -1;
    8823              : 
    8824      6120381 :   vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
    8825      6120381 :   gcc_obstack_init (&vn_ssa_aux_obstack);
    8826              : 
    8827      6120381 :   gcc_obstack_init (&vn_tables_obstack);
    8828      6120381 :   gcc_obstack_init (&vn_tables_insert_obstack);
    8829      6120381 :   valid_info = XCNEW (struct vn_tables_s);
    8830      6120381 :   allocate_vn_table (valid_info, region_size);
    8831      6120381 :   last_inserted_ref = NULL;
    8832      6120381 :   last_inserted_phi = NULL;
    8833      6120381 :   last_inserted_nary = NULL;
    8834      6120381 :   last_pushed_avail = NULL;
    8835              : 
    8836      6120381 :   vn_valueize = rpo_vn_valueize;
    8837              : 
    8838              :   /* Initialize the unwind state and edge/BB executable state.  */
    8839      6120381 :   unsigned curr_scc = 0;
    8840     56375103 :   for (int i = 0; i < n; ++i)
    8841              :     {
    8842     50254722 :       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
    8843     50254722 :       rpo_state[i].visited = 0;
    8844     50254722 :       rpo_state[i].max_rpo = i;
    8845     58736632 :       if (!iterate && curr_scc < toplevel_scc_extents.length ())
    8846              :         {
    8847      7072372 :           if (i >= toplevel_scc_extents[curr_scc].first
    8848      7072372 :               && i <= toplevel_scc_extents[curr_scc].second)
    8849      3882174 :             rpo_state[i].max_rpo = toplevel_scc_extents[curr_scc].second;
    8850      7072372 :           if (i == toplevel_scc_extents[curr_scc].second)
    8851       732068 :             curr_scc++;
    8852              :         }
    8853     50254722 :       bb->flags &= ~BB_EXECUTABLE;
    8854     50254722 :       bool has_backedges = false;
    8855     50254722 :       edge e;
    8856     50254722 :       edge_iterator ei;
    8857    119257678 :       FOR_EACH_EDGE (e, ei, bb->preds)
    8858              :         {
    8859     69002956 :           if (e->flags & EDGE_DFS_BACK)
    8860      2846396 :             has_backedges = true;
    8861     69002956 :           e->flags &= ~EDGE_EXECUTABLE;
    8862     69002956 :           if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
    8863     69002956 :             continue;
    8864              :         }
    8865     50254722 :       rpo_state[i].iterate = iterate && has_backedges;
    8866              :     }
    8867      6120381 :   entry->flags |= EDGE_EXECUTABLE;
    8868      6120381 :   entry->dest->flags |= BB_EXECUTABLE;
    8869              : 
    8870              :   /* As heuristic to improve compile-time we handle only the N innermost
    8871              :      loops and the outermost one optimistically.  */
    8872      6120381 :   if (iterate)
    8873              :     {
    8874      4235411 :       unsigned max_depth = param_rpo_vn_max_loop_depth;
    8875     14251081 :       for (auto loop : loops_list (cfun, LI_ONLY_INNERMOST))
    8876      1547239 :         if (loop_depth (loop) > max_depth)
    8877         2092 :           for (unsigned i = 2;
    8878         8966 :                i < loop_depth (loop) - max_depth; ++i)
    8879              :             {
    8880         2092 :               basic_block header = superloop_at_depth (loop, i)->header;
    8881         2092 :               bool non_latch_backedge = false;
    8882         2092 :               edge e;
    8883         2092 :               edge_iterator ei;
    8884         6307 :               FOR_EACH_EDGE (e, ei, header->preds)
    8885         4215 :                 if (e->flags & EDGE_DFS_BACK)
    8886              :                   {
    8887              :                     /* There can be a non-latch backedge into the header
    8888              :                        which is part of an outer irreducible region.  We
    8889              :                        cannot avoid iterating this block then.  */
    8890         2123 :                     if (!dominated_by_p (CDI_DOMINATORS,
    8891         2123 :                                          e->src, e->dest))
    8892              :                       {
    8893           12 :                         if (dump_file && (dump_flags & TDF_DETAILS))
    8894            0 :                           fprintf (dump_file, "non-latch backedge %d -> %d "
    8895              :                                    "forces iteration of loop %d\n",
    8896            0 :                                    e->src->index, e->dest->index, loop->num);
    8897              :                         non_latch_backedge = true;
    8898              :                       }
    8899              :                     else
    8900         2111 :                       e->flags |= EDGE_EXECUTABLE;
    8901              :                   }
    8902         2092 :               rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
    8903      4235411 :             }
    8904              :     }
    8905              : 
    8906      6120381 :   uint64_t nblk = 0;
    8907      6120381 :   int idx = 0;
    8908      4235411 :   if (iterate)
    8909              :     /* Go and process all blocks, iterating as necessary.  */
    8910     48701131 :     do
    8911              :       {
    8912     48701131 :         basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
    8913              : 
    8914              :         /* If the block has incoming backedges remember unwind state.  This
    8915              :            is required even for non-executable blocks since in irreducible
    8916              :            regions we might reach them via the backedge and re-start iterating
    8917              :            from there.
    8918              :            Note we can individually mark blocks with incoming backedges to
    8919              :            not iterate where we then handle PHIs conservatively.  We do that
    8920              :            heuristically to reduce compile-time for degenerate cases.  */
    8921     48701131 :         if (rpo_state[idx].iterate)
    8922              :           {
    8923      4385856 :             rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
    8924      4385856 :             rpo_state[idx].ref_top = last_inserted_ref;
    8925      4385856 :             rpo_state[idx].phi_top = last_inserted_phi;
    8926      4385856 :             rpo_state[idx].nary_top = last_inserted_nary;
    8927      4385856 :             rpo_state[idx].avail_top
    8928      4385856 :               = last_pushed_avail ? last_pushed_avail->avail : NULL;
    8929              :           }
    8930              : 
    8931     48701131 :         if (!(bb->flags & BB_EXECUTABLE))
    8932              :           {
    8933       899053 :             if (dump_file && (dump_flags & TDF_DETAILS))
    8934            2 :               fprintf (dump_file, "Block %d: BB%d found not executable\n",
    8935              :                        idx, bb->index);
    8936       899053 :             idx++;
    8937      2801456 :             continue;
    8938              :           }
    8939              : 
    8940     47802078 :         if (dump_file && (dump_flags & TDF_DETAILS))
    8941          334 :           fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
    8942     47802078 :         nblk++;
    8943     95604156 :         todo |= process_bb (avail, bb,
    8944     47802078 :                             rpo_state[idx].visited != 0,
    8945              :                             rpo_state[idx].iterate,
    8946              :                             iterate, eliminate, do_region, exit_bbs, false);
    8947     47802078 :         rpo_state[idx].visited++;
    8948              : 
    8949              :         /* Verify if changed values flow over executable outgoing backedges
    8950              :            and those change destination PHI values (that's the thing we
    8951              :            can easily verify).  Reduce over all such edges to the farthest
    8952              :            away PHI.  */
    8953     47802078 :         int iterate_to = -1;
    8954     47802078 :         edge_iterator ei;
    8955     47802078 :         edge e;
    8956    115104411 :         FOR_EACH_EDGE (e, ei, bb->succs)
    8957     67302333 :           if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
    8958              :               == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
    8959      4395736 :               && rpo_state[bb_to_rpo[e->dest->index]].iterate)
    8960              :             {
    8961      4393002 :               int destidx = bb_to_rpo[e->dest->index];
    8962      4393002 :               if (!rpo_state[destidx].visited)
    8963              :                 {
    8964          135 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    8965            0 :                     fprintf (dump_file, "Unvisited destination %d\n",
    8966              :                              e->dest->index);
    8967          135 :                   if (iterate_to == -1 || destidx < iterate_to)
    8968          135 :                     iterate_to = destidx;
    8969          135 :                   continue;
    8970              :                 }
    8971      4392867 :               if (dump_file && (dump_flags & TDF_DETAILS))
    8972           53 :                 fprintf (dump_file, "Looking for changed values of backedge"
    8973              :                          " %d->%d destination PHIs\n",
    8974           53 :                          e->src->index, e->dest->index);
    8975      4392867 :               vn_context_bb = e->dest;
    8976      4392867 :               gphi_iterator gsi;
    8977      4392867 :               for (gsi = gsi_start_phis (e->dest);
    8978     10062031 :                    !gsi_end_p (gsi); gsi_next (&gsi))
    8979              :                 {
    8980      7571724 :                   bool inserted = false;
    8981              :                   /* While we'd ideally just iterate on value changes
    8982              :                      we CSE PHIs and do that even across basic-block
    8983              :                      boundaries.  So even hashtable state changes can
    8984              :                      be important (which is roughly equivalent to
    8985              :                      PHI argument value changes).  To not excessively
    8986              :                      iterate because of that we track whether a PHI
    8987              :                      was CSEd to with GF_PLF_1.  */
    8988      7571724 :                   bool phival_changed;
    8989      7571724 :                   if ((phival_changed = visit_phi (gsi.phi (),
    8990              :                                                    &inserted, false))
    8991      8946828 :                       || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
    8992              :                     {
    8993      1902560 :                       if (!phival_changed
    8994      1902560 :                           && dump_file && (dump_flags & TDF_DETAILS))
    8995            0 :                         fprintf (dump_file, "PHI was CSEd and hashtable "
    8996              :                                  "state (changed)\n");
    8997      1902560 :                       if (iterate_to == -1 || destidx < iterate_to)
    8998      1902475 :                         iterate_to = destidx;
    8999      1902560 :                       break;
    9000              :                     }
    9001              :                 }
    9002      4392867 :               vn_context_bb = NULL;
    9003              :             }
    9004     47802078 :         if (iterate_to != -1)
    9005              :           {
    9006      1902403 :             do_unwind (&rpo_state[iterate_to], avail);
    9007      1902403 :             idx = iterate_to;
    9008      1902403 :             if (dump_file && (dump_flags & TDF_DETAILS))
    9009           20 :               fprintf (dump_file, "Iterating to %d BB%d\n",
    9010           20 :                        iterate_to, rpo[iterate_to]);
    9011      1902403 :             continue;
    9012              :           }
    9013              : 
    9014     45899675 :         idx++;
    9015              :       }
    9016     48701131 :     while (idx < n);
    9017              : 
    9018              :   else /* !iterate */
    9019              :     {
    9020              :       /* Process all blocks greedily with a worklist that enforces RPO
    9021              :          processing of reachable blocks.  */
    9022      1884970 :       auto_bitmap worklist;
    9023      1884970 :       bitmap_set_bit (worklist, 0);
    9024     17192988 :       while (!bitmap_empty_p (worklist))
    9025              :         {
    9026     13423048 :           int idx = bitmap_clear_first_set_bit (worklist);
    9027     13423048 :           basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
    9028     13423048 :           gcc_assert ((bb->flags & BB_EXECUTABLE)
    9029              :                       && !rpo_state[idx].visited);
    9030              : 
    9031     13423048 :           if (dump_file && (dump_flags & TDF_DETAILS))
    9032        35043 :             fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
    9033              : 
    9034              :           /* When we run into predecessor edges where we cannot trust its
    9035              :              executable state mark them executable so PHI processing will
    9036              :              be conservative.
    9037              :              ???  Do we need to force arguments flowing over that edge
    9038              :              to be varying or will they even always be?  */
    9039     13423048 :           edge_iterator ei;
    9040     13423048 :           edge e;
    9041     32536692 :           FOR_EACH_EDGE (e, ei, bb->preds)
    9042     19113644 :             if (!(e->flags & EDGE_EXECUTABLE)
    9043      1022155 :                 && (bb == entry->dest
    9044       965530 :                     || (!rpo_state[bb_to_rpo[e->src->index]].visited
    9045       929594 :                         && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
    9046              :                             >= (int)idx))))
    9047              :               {
    9048       963111 :                 if (dump_file && (dump_flags & TDF_DETAILS))
    9049        11241 :                   fprintf (dump_file, "Cannot trust state of predecessor "
    9050              :                            "edge %d -> %d, marking executable\n",
    9051        11241 :                            e->src->index, e->dest->index);
    9052       963111 :                 e->flags |= EDGE_EXECUTABLE;
    9053              :               }
    9054              : 
    9055     13423048 :           nblk++;
    9056     13423048 :           todo |= process_bb (avail, bb, false, false, false, eliminate,
    9057              :                               do_region, exit_bbs,
    9058     13423048 :                               skip_entry_phis && bb == entry->dest);
    9059     13423048 :           rpo_state[idx].visited++;
    9060              : 
    9061     33170717 :           FOR_EACH_EDGE (e, ei, bb->succs)
    9062     19747669 :             if ((e->flags & EDGE_EXECUTABLE)
    9063     19671630 :                 && e->dest->index != EXIT_BLOCK
    9064     18503339 :                 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
    9065     36911995 :                 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
    9066     16206519 :               bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
    9067              :         }
    9068      1884970 :     }
    9069              : 
    9070              :   /* If statistics or dump file active.  */
    9071      6120381 :   int nex = 0;
    9072      6120381 :   unsigned max_visited = 1;
    9073     56375103 :   for (int i = 0; i < n; ++i)
    9074              :     {
    9075     50254722 :       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
    9076     50254722 :       if (bb->flags & BB_EXECUTABLE)
    9077     49687777 :         nex++;
    9078     50254722 :       statistics_histogram_event (cfun, "RPO block visited times",
    9079     50254722 :                                   rpo_state[i].visited);
    9080     50254722 :       if (rpo_state[i].visited > max_visited)
    9081              :         max_visited = rpo_state[i].visited;
    9082              :     }
    9083      6120381 :   unsigned nvalues = 0, navail = 0;
    9084    167834783 :   for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
    9085    329549185 :        i != vn_ssa_aux_hash->end (); ++i)
    9086              :     {
    9087    161714402 :       nvalues++;
    9088    161714402 :       vn_avail *av = (*i)->avail;
    9089    239460300 :       while (av)
    9090              :         {
    9091     77745898 :           navail++;
    9092     77745898 :           av = av->next;
    9093              :         }
    9094              :     }
    9095      6120381 :   statistics_counter_event (cfun, "RPO blocks", n);
    9096      6120381 :   statistics_counter_event (cfun, "RPO blocks visited", nblk);
    9097      6120381 :   statistics_counter_event (cfun, "RPO blocks executable", nex);
    9098      6120381 :   statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
    9099      6120381 :   statistics_histogram_event (cfun, "RPO num values", nvalues);
    9100      6120381 :   statistics_histogram_event (cfun, "RPO num avail", navail);
    9101      6120381 :   statistics_histogram_event (cfun, "RPO num lattice",
    9102      6120381 :                               vn_ssa_aux_hash->elements ());
    9103      6120381 :   if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
    9104              :     {
    9105        11148 :       fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
    9106              :                " blocks in total discovering %d executable blocks iterating "
    9107              :                "%d.%d times, a block was visited max. %u times\n",
    9108              :                n, nblk, nex,
    9109        11148 :                (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
    9110              :                max_visited);
    9111        11148 :       fprintf (dump_file, "RPO tracked %d values available at %d locations "
    9112              :                "and %" PRIu64 " lattice elements\n",
    9113        11148 :                nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
    9114              :     }
    9115              : 
    9116      6120381 :   if (eliminate)
    9117              :     {
    9118              :       /* When !iterate we already performed elimination during the RPO
    9119              :          walk.  */
    9120      5136744 :       if (iterate)
    9121              :         {
    9122              :           /* Elimination for region-based VN needs to be done within the
    9123              :              RPO walk.  */
    9124      3271193 :           gcc_assert (! do_region);
    9125              :           /* Note we can't use avail.walk here because that gets confused
    9126              :              by the existing availability and it will be less efficient
    9127              :              as well.  */
    9128      3271193 :           todo |= eliminate_with_rpo_vn (NULL);
    9129              :         }
    9130              :       else
    9131      1865551 :         todo |= avail.eliminate_cleanup (do_region);
    9132              :     }
    9133              : 
    9134      6120381 :   vn_valueize = NULL;
    9135      6120381 :   rpo_avail = NULL;
    9136      6120381 :   vn_bb_to_rpo = NULL;
    9137              : 
    9138      6120381 :   XDELETEVEC (bb_to_rpo);
    9139      6120381 :   XDELETEVEC (rpo);
    9140      6120381 :   XDELETEVEC (rpo_state);
    9141              : 
    9142      6120381 :   return todo;
    9143      6120381 : }
    9144              : 
    9145              : /* Region-based entry for RPO VN.  Performs value-numbering and elimination
    9146              :    on the SEME region specified by ENTRY and EXIT_BBS.  If ENTRY is not
    9147              :    the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
    9148              :    are not considered.
    9149              :    If ITERATE is true then treat backedges optimistically as not
    9150              :    executed and iterate.  If ELIMINATE is true then perform
    9151              :    elimination, otherwise leave that to the caller.
    9152              :    If SKIP_ENTRY_PHIS is true then force PHI nodes in ENTRY->dest to VARYING.
    9153              :    KIND specifies the amount of work done for handling memory operations.  */
    9154              : 
    9155              : unsigned
    9156       706457 : do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
    9157              :            bool iterate, bool eliminate, bool skip_entry_phis,
    9158              :            vn_lookup_kind kind)
    9159              : {
    9160       706457 :   auto_timevar tv (TV_TREE_RPO_VN);
    9161       706457 :   unsigned todo = do_rpo_vn_1 (fn, entry, exit_bbs, iterate, eliminate,
    9162              :                                skip_entry_phis, kind);
    9163       706457 :   free_rpo_vn ();
    9164      1412914 :   return todo;
    9165       706457 : }
    9166              : 
    9167              : 
    9168              : namespace {
    9169              : 
    9170              : const pass_data pass_data_fre =
    9171              : {
    9172              :   GIMPLE_PASS, /* type */
    9173              :   "fre", /* name */
    9174              :   OPTGROUP_NONE, /* optinfo_flags */
    9175              :   TV_TREE_FRE, /* tv_id */
    9176              :   ( PROP_cfg | PROP_ssa ), /* properties_required */
    9177              :   0, /* properties_provided */
    9178              :   0, /* properties_destroyed */
    9179              :   0, /* todo_flags_start */
    9180              :   0, /* todo_flags_finish */
    9181              : };
    9182              : 
    9183              : class pass_fre : public gimple_opt_pass
    9184              : {
    9185              : public:
    9186      1428610 :   pass_fre (gcc::context *ctxt)
    9187      2857220 :     : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
    9188              :   {}
    9189              : 
    9190              :   /* opt_pass methods: */
    9191      1142888 :   opt_pass * clone () final override { return new pass_fre (m_ctxt); }
    9192      1428610 :   void set_pass_param (unsigned int n, bool param) final override
    9193              :     {
    9194      1428610 :       gcc_assert (n == 0);
    9195      1428610 :       may_iterate = param;
    9196      1428610 :     }
    9197      4528398 :   bool gate (function *) final override
    9198              :     {
    9199      4528398 :       return flag_tree_fre != 0 && (may_iterate || optimize > 1);
    9200              :     }
    9201              :   unsigned int execute (function *) final override;
    9202              : 
    9203              : private:
    9204              :   bool may_iterate;
    9205              : }; // class pass_fre
    9206              : 
    9207              : unsigned int
    9208      4449706 : pass_fre::execute (function *fun)
    9209              : {
    9210      4449706 :   unsigned todo = 0;
    9211              : 
    9212              :   /* At -O[1g] use the cheap non-iterating mode.  */
    9213      4449706 :   bool iterate_p = may_iterate && (optimize > 1);
    9214      4449706 :   calculate_dominance_info (CDI_DOMINATORS);
    9215      4449706 :   if (iterate_p)
    9216      3271193 :     loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
    9217              : 
    9218      4449706 :   todo = do_rpo_vn_1 (fun, NULL, NULL, iterate_p, true, false, VN_WALKREWRITE);
    9219      4449706 :   free_rpo_vn ();
    9220              : 
    9221      4449706 :   if (iterate_p)
    9222      3271193 :     loop_optimizer_finalize ();
    9223              : 
    9224      4449706 :   if (scev_initialized_p ())
    9225        31819 :     scev_reset_htab ();
    9226              : 
    9227              :   /* For late FRE after IVOPTs and unrolling, see if we can
    9228              :      remove some TREE_ADDRESSABLE and rewrite stuff into SSA.  */
    9229      4449706 :   if (!may_iterate)
    9230       995811 :     todo |= TODO_update_address_taken;
    9231              : 
    9232      4449706 :   return todo;
    9233              : }
    9234              : 
    9235              : } // anon namespace
    9236              : 
    9237              : gimple_opt_pass *
    9238       285722 : make_pass_fre (gcc::context *ctxt)
    9239              : {
    9240       285722 :   return new pass_fre (ctxt);
    9241              : }
    9242              : 
    9243              : #undef BB_EXECUTABLE
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.