LCOV - code coverage report
Current view: top level - gcc - tree-ssa-alias.cc (source / functions) Coverage Total Hit
Test: gcc.info Lines: 93.5 % 1867 1746
Test Date: 2026-04-20 14:57:17 Functions: 92.2 % 77 71
Legend: Lines:     hit not hit

            Line data    Source code
       1              : /* Alias analysis for trees.
       2              :    Copyright (C) 2004-2026 Free Software Foundation, Inc.
       3              :    Contributed by Diego Novillo <dnovillo@redhat.com>
       4              : 
       5              : This file is part of GCC.
       6              : 
       7              : GCC is free software; you can redistribute it and/or modify
       8              : it under the terms of the GNU General Public License as published by
       9              : the Free Software Foundation; either version 3, or (at your option)
      10              : any later version.
      11              : 
      12              : GCC is distributed in the hope that it will be useful,
      13              : but WITHOUT ANY WARRANTY; without even the implied warranty of
      14              : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
      15              : GNU General Public License for more details.
      16              : 
      17              : You should have received a copy of the GNU General Public License
      18              : along with GCC; see the file COPYING3.  If not see
      19              : <http://www.gnu.org/licenses/>.  */
      20              : 
      21              : #include "config.h"
      22              : #include "system.h"
      23              : #include "coretypes.h"
      24              : #include "backend.h"
      25              : #include "target.h"
      26              : #include "rtl.h"
      27              : #include "tree.h"
      28              : #include "gimple.h"
      29              : #include "timevar.h"  /* for TV_ALIAS_STMT_WALK */
      30              : #include "ssa.h"
      31              : #include "cgraph.h"
      32              : #include "tree-pretty-print.h"
      33              : #include "alias.h"
      34              : #include "fold-const.h"
      35              : #include "langhooks.h"
      36              : #include "dumpfile.h"
      37              : #include "tree-eh.h"
      38              : #include "tree-dfa.h"
      39              : #include "ipa-reference.h"
      40              : #include "varasm.h"
      41              : #include "ipa-modref-tree.h"
      42              : #include "ipa-modref.h"
      43              : #include "attr-fnspec.h"
      44              : #include "errors.h"
      45              : #include "dbgcnt.h"
      46              : #include "gimple-pretty-print.h"
      47              : #include "print-tree.h"
      48              : #include "tree-ssa-alias-compare.h"
      49              : #include "builtins.h"
      50              : #include "internal-fn.h"
      51              : #include "ipa-utils.h"
      52              : 
      53              : /* Broad overview of how alias analysis on gimple works:
      54              : 
      55              :    Statements clobbering or using memory are linked through the
      56              :    virtual operand factored use-def chain.  The virtual operand
      57              :    is unique per function, its symbol is accessible via gimple_vop (cfun).
      58              :    Virtual operands are used for efficiently walking memory statements
      59              :    in the gimple IL and are useful for things like value-numbering as
      60              :    a generation count for memory references.
      61              : 
      62              :    SSA_NAME pointers may have associated points-to information
      63              :    accessible via the SSA_NAME_PTR_INFO macro.  Flow-insensitive
      64              :    points-to information is (re-)computed by the TODO_rebuild_alias
      65              :    pass manager todo.  Points-to information is also used for more
      66              :    precise tracking of call-clobbered and call-used variables and
      67              :    related disambiguations.
      68              : 
      69              :    This file contains functions for disambiguating memory references,
      70              :    the so called alias-oracle and tools for walking of the gimple IL.
      71              : 
      72              :    The main alias-oracle entry-points are
      73              : 
      74              :    bool stmt_may_clobber_ref_p (gimple *, tree)
      75              : 
      76              :      This function queries if a statement may invalidate (parts of)
      77              :      the memory designated by the reference tree argument.
      78              : 
      79              :    bool ref_maybe_used_by_stmt_p (gimple *, tree)
      80              : 
      81              :      This function queries if a statement may need (parts of) the
      82              :      memory designated by the reference tree argument.
      83              : 
      84              :    There are variants of these functions that only handle the call
      85              :    part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
      86              :    Note that these do not disambiguate against a possible call lhs.
      87              : 
      88              :    bool refs_may_alias_p (tree, tree)
      89              : 
      90              :      This function tries to disambiguate two reference trees.
      91              : 
      92              :    bool ptr_deref_may_alias_global_p (tree, bool)
      93              : 
      94              :      This function queries if dereferencing a pointer variable may
      95              :      alias global memory.  If bool argument is true, global memory
      96              :      is considered to also include function local memory that escaped.
      97              : 
      98              :    More low-level disambiguators are available and documented in
      99              :    this file.  Low-level disambiguators dealing with points-to
     100              :    information are in tree-ssa-structalias.cc.  */
     101              : 
     102              : static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
     103              : static bool nonoverlapping_component_refs_p (const_tree, const_tree);
     104              : 
     105              : /* Query statistics for the different low-level disambiguators.
     106              :    A high-level query may trigger multiple of them.  */
     107              : 
     108              : static struct {
     109              :   unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
     110              :   unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
     111              :   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
     112              :   unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
     113              :   unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
     114              :   unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
     115              :   unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
     116              :   unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
     117              :   unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
     118              :   unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
     119              :   unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
     120              :   unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
     121              :   unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
     122              :   unsigned HOST_WIDE_INT stmt_kills_ref_p_no;
     123              :   unsigned HOST_WIDE_INT stmt_kills_ref_p_yes;
     124              :   unsigned HOST_WIDE_INT modref_use_may_alias;
     125              :   unsigned HOST_WIDE_INT modref_use_no_alias;
     126              :   unsigned HOST_WIDE_INT modref_clobber_may_alias;
     127              :   unsigned HOST_WIDE_INT modref_clobber_no_alias;
     128              :   unsigned HOST_WIDE_INT modref_kill_no;
     129              :   unsigned HOST_WIDE_INT modref_kill_yes;
     130              :   unsigned HOST_WIDE_INT modref_tests;
     131              :   unsigned HOST_WIDE_INT modref_baseptr_tests;
     132              : } alias_stats;
     133              : 
     134              : void
     135            0 : dump_alias_stats (FILE *s)
     136              : {
     137            0 :   fprintf (s, "\nAlias oracle query stats:\n");
     138            0 :   fprintf (s, "  refs_may_alias_p: "
     139              :            HOST_WIDE_INT_PRINT_DEC" disambiguations, "
     140              :            HOST_WIDE_INT_PRINT_DEC" queries\n",
     141              :            alias_stats.refs_may_alias_p_no_alias,
     142            0 :            alias_stats.refs_may_alias_p_no_alias
     143            0 :            + alias_stats.refs_may_alias_p_may_alias);
     144            0 :   fprintf (s, "  ref_maybe_used_by_call_p: "
     145              :            HOST_WIDE_INT_PRINT_DEC" disambiguations, "
     146              :            HOST_WIDE_INT_PRINT_DEC" queries\n",
     147              :            alias_stats.ref_maybe_used_by_call_p_no_alias,
     148            0 :            alias_stats.refs_may_alias_p_no_alias
     149            0 :            + alias_stats.ref_maybe_used_by_call_p_may_alias);
     150            0 :   fprintf (s, "  call_may_clobber_ref_p: "
     151              :            HOST_WIDE_INT_PRINT_DEC" disambiguations, "
     152              :            HOST_WIDE_INT_PRINT_DEC" queries\n",
     153              :            alias_stats.call_may_clobber_ref_p_no_alias,
     154            0 :            alias_stats.call_may_clobber_ref_p_no_alias
     155            0 :            + alias_stats.call_may_clobber_ref_p_may_alias);
     156            0 :   fprintf (s, "  stmt_kills_ref_p: "
     157              :            HOST_WIDE_INT_PRINT_DEC" kills, "
     158              :            HOST_WIDE_INT_PRINT_DEC" queries\n",
     159              :            alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes,
     160            0 :            alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes
     161            0 :            + alias_stats.stmt_kills_ref_p_no + alias_stats.modref_kill_no);
     162            0 :   fprintf (s, "  nonoverlapping_component_refs_p: "
     163              :            HOST_WIDE_INT_PRINT_DEC" disambiguations, "
     164              :            HOST_WIDE_INT_PRINT_DEC" queries\n",
     165              :            alias_stats.nonoverlapping_component_refs_p_no_alias,
     166            0 :            alias_stats.nonoverlapping_component_refs_p_no_alias
     167            0 :            + alias_stats.nonoverlapping_component_refs_p_may_alias);
     168            0 :   fprintf (s, "  nonoverlapping_refs_since_match_p: "
     169              :            HOST_WIDE_INT_PRINT_DEC" disambiguations, "
     170              :            HOST_WIDE_INT_PRINT_DEC" must overlaps, "
     171              :            HOST_WIDE_INT_PRINT_DEC" queries\n",
     172              :            alias_stats.nonoverlapping_refs_since_match_p_no_alias,
     173              :            alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
     174            0 :            alias_stats.nonoverlapping_refs_since_match_p_no_alias
     175            0 :            + alias_stats.nonoverlapping_refs_since_match_p_may_alias
     176            0 :            + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
     177            0 :   fprintf (s, "  aliasing_component_refs_p: "
     178              :            HOST_WIDE_INT_PRINT_DEC" disambiguations, "
     179              :            HOST_WIDE_INT_PRINT_DEC" queries\n",
     180              :            alias_stats.aliasing_component_refs_p_no_alias,
     181            0 :            alias_stats.aliasing_component_refs_p_no_alias
     182            0 :            + alias_stats.aliasing_component_refs_p_may_alias);
     183            0 :   dump_alias_stats_in_alias_c (s);
     184            0 :   fprintf (s, "\nModref stats:\n");
     185            0 :   fprintf (s, "  modref kill: "
     186              :            HOST_WIDE_INT_PRINT_DEC" kills, "
     187              :            HOST_WIDE_INT_PRINT_DEC" queries\n",
     188              :            alias_stats.modref_kill_yes,
     189            0 :            alias_stats.modref_kill_yes
     190            0 :            + alias_stats.modref_kill_no);
     191            0 :   fprintf (s, "  modref use: "
     192              :            HOST_WIDE_INT_PRINT_DEC" disambiguations, "
     193              :            HOST_WIDE_INT_PRINT_DEC" queries\n",
     194              :            alias_stats.modref_use_no_alias,
     195            0 :            alias_stats.modref_use_no_alias
     196            0 :            + alias_stats.modref_use_may_alias);
     197            0 :   fprintf (s, "  modref clobber: "
     198              :            HOST_WIDE_INT_PRINT_DEC" disambiguations, "
     199              :            HOST_WIDE_INT_PRINT_DEC" queries\n"
     200              :            "  " HOST_WIDE_INT_PRINT_DEC" tbaa queries (%f per modref query)\n"
     201              :            "  " HOST_WIDE_INT_PRINT_DEC" base compares (%f per modref query)\n",
     202              :            alias_stats.modref_clobber_no_alias,
     203              :            alias_stats.modref_clobber_no_alias
     204              :            + alias_stats.modref_clobber_may_alias,
     205              :            alias_stats.modref_tests,
     206            0 :            ((double)alias_stats.modref_tests)
     207              :            / (alias_stats.modref_clobber_no_alias
     208              :               + alias_stats.modref_clobber_may_alias),
     209              :            alias_stats.modref_baseptr_tests,
     210            0 :            ((double)alias_stats.modref_baseptr_tests)
     211            0 :            / (alias_stats.modref_clobber_no_alias
     212            0 :               + alias_stats.modref_clobber_may_alias));
     213            0 : }
     214              : 
     215              : 
     216              : /* Return true, if dereferencing PTR may alias with a global variable.
     217              :    When ESCAPED_LOCAL_P is true escaped local memory is also considered
     218              :    global.  */
     219              : 
     220              : bool
     221     57847364 : ptr_deref_may_alias_global_p (tree ptr, bool escaped_local_p)
     222              : {
     223     57847364 :   struct ptr_info_def *pi;
     224              : 
     225              :   /* If we end up with a pointer constant here that may point
     226              :      to global memory.  */
     227     57847364 :   if (TREE_CODE (ptr) != SSA_NAME)
     228              :     return true;
     229              : 
     230     57841283 :   pi = SSA_NAME_PTR_INFO (ptr);
     231              : 
     232              :   /* If we do not have points-to information for this variable,
     233              :      we have to punt.  */
     234     57841283 :   if (!pi)
     235              :     return true;
     236              : 
     237              :   /* ???  This does not use TBAA to prune globals ptr may not access.  */
     238     46186875 :   return pt_solution_includes_global (&pi->pt, escaped_local_p);
     239              : }
     240              : 
     241              : /* Return true if dereferencing PTR may alias DECL.
     242              :    The caller is responsible for applying TBAA to see if PTR
     243              :    may access DECL at all.  */
     244              : 
     245              : static bool
     246    212329137 : ptr_deref_may_alias_decl_p (tree ptr, tree decl)
     247              : {
     248    212329137 :   struct ptr_info_def *pi;
     249              : 
     250              :   /* Conversions are irrelevant for points-to information and
     251              :      data-dependence analysis can feed us those.  */
     252    212329137 :   STRIP_NOPS (ptr);
     253              : 
     254              :   /* Anything we do not explicilty handle aliases.  */
     255    212329137 :   if ((TREE_CODE (ptr) != SSA_NAME
     256      2292543 :        && TREE_CODE (ptr) != ADDR_EXPR
     257      1095374 :        && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
     258    211233763 :       || !POINTER_TYPE_P (TREE_TYPE (ptr))
     259    423562516 :       || (!VAR_P (decl)
     260              :           && TREE_CODE (decl) != PARM_DECL
     261              :           && TREE_CODE (decl) != RESULT_DECL))
     262              :     return true;
     263              : 
     264              :   /* Disregard pointer offsetting.  */
     265    211232322 :   if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
     266              :     {
     267            0 :       do
     268              :         {
     269            0 :           ptr = TREE_OPERAND (ptr, 0);
     270              :         }
     271            0 :       while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
     272              :       return ptr_deref_may_alias_decl_p (ptr, decl);
     273              :     }
     274              : 
     275              :   /* ADDR_EXPR pointers either just offset another pointer or directly
     276              :      specify the pointed-to set.  */
     277    211232322 :   if (TREE_CODE (ptr) == ADDR_EXPR)
     278              :     {
     279      1196146 :       tree base = get_base_address (TREE_OPERAND (ptr, 0));
     280      1196146 :       if (base
     281      1196146 :           && (TREE_CODE (base) == MEM_REF
     282      1196146 :               || TREE_CODE (base) == TARGET_MEM_REF))
     283        18177 :         ptr = TREE_OPERAND (base, 0);
     284      1177969 :       else if (base
     285      1177969 :                && DECL_P (base))
     286      1177358 :         return compare_base_decls (base, decl) != 0;
     287          611 :       else if (base
     288          611 :                && CONSTANT_CLASS_P (base))
     289              :         return false;
     290              :       else
     291              :         return true;
     292              :     }
     293              : 
     294              :   /* Non-aliased variables cannot be pointed to.  */
     295    210054353 :   if (!may_be_aliased (decl))
     296              :     return false;
     297              : 
     298              :   /* From here we require a SSA name pointer.  Anything else aliases.  */
     299     78838754 :   if (TREE_CODE (ptr) != SSA_NAME
     300     78838754 :       || !POINTER_TYPE_P (TREE_TYPE (ptr)))
     301              :     return true;
     302              : 
     303              :   /* If we do not have useful points-to information for this pointer
     304              :      we cannot disambiguate anything else.  */
     305     78838754 :   pi = SSA_NAME_PTR_INFO (ptr);
     306     78838754 :   if (!pi)
     307              :     return true;
     308              : 
     309     77816919 :   return pt_solution_includes (&pi->pt, decl);
     310              : }
     311              : 
     312              : /* Return true if dereferenced PTR1 and PTR2 may alias.
     313              :    The caller is responsible for applying TBAA to see if accesses
     314              :    through PTR1 and PTR2 may conflict at all.  */
     315              : 
     316              : bool
     317     67021851 : ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
     318              : {
     319     68352435 :   struct ptr_info_def *pi1, *pi2;
     320              : 
     321              :   /* Conversions are irrelevant for points-to information and
     322              :      data-dependence analysis can feed us those.  */
     323     68352435 :   STRIP_NOPS (ptr1);
     324     68352435 :   STRIP_NOPS (ptr2);
     325              : 
     326              :   /* Disregard pointer offsetting.  */
     327     68352435 :   if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
     328              :     {
     329       548717 :       do
     330              :         {
     331       548717 :           ptr1 = TREE_OPERAND (ptr1, 0);
     332              :         }
     333       548717 :       while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
     334              :       return ptr_derefs_may_alias_p (ptr1, ptr2);
     335              :     }
     336     67803718 :   if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
     337              :     {
     338       562440 :       do
     339              :         {
     340       562440 :           ptr2 = TREE_OPERAND (ptr2, 0);
     341              :         }
     342       562440 :       while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
     343              :       return ptr_derefs_may_alias_p (ptr1, ptr2);
     344              :     }
     345              : 
     346              :   /* ADDR_EXPR pointers either just offset another pointer or directly
     347              :      specify the pointed-to set.  */
     348     67241278 :   if (TREE_CODE (ptr1) == ADDR_EXPR)
     349              :     {
     350       828531 :       tree base = get_base_address (TREE_OPERAND (ptr1, 0));
     351       828531 :       if (base
     352       828531 :           && (TREE_CODE (base) == MEM_REF
     353       828531 :               || TREE_CODE (base) == TARGET_MEM_REF))
     354       112504 :         return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
     355       716027 :       else if (base
     356       716027 :                && DECL_P (base))
     357       713498 :         return ptr_deref_may_alias_decl_p (ptr2, base);
     358              :       /* Try ptr2 when ptr1 points to a constant.  */
     359              :       else if (base
     360         2529 :                && !CONSTANT_CLASS_P (base))
     361              :         return true;
     362              :     }
     363     66415276 :   if (TREE_CODE (ptr2) == ADDR_EXPR)
     364              :     {
     365       333022 :       tree base = get_base_address (TREE_OPERAND (ptr2, 0));
     366       333022 :       if (base
     367       333022 :           && (TREE_CODE (base) == MEM_REF
     368       333022 :               || TREE_CODE (base) == TARGET_MEM_REF))
     369       106923 :         return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
     370       226099 :       else if (base
     371       226099 :                && DECL_P (base))
     372       224832 :         return ptr_deref_may_alias_decl_p (ptr1, base);
     373              :       else
     374              :         return true;
     375              :     }
     376              : 
     377              :   /* From here we require SSA name pointers.  Anything else aliases.  */
     378     66082254 :   if (TREE_CODE (ptr1) != SSA_NAME
     379     65924560 :       || TREE_CODE (ptr2) != SSA_NAME
     380     65899922 :       || !POINTER_TYPE_P (TREE_TYPE (ptr1))
     381    131972366 :       || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
     382              :     return true;
     383              : 
     384              :   /* We may end up with two empty points-to solutions for two same pointers.
     385              :      In this case we still want to say both pointers alias, so shortcut
     386              :      that here.  */
     387     65889955 :   if (ptr1 == ptr2)
     388              :     return true;
     389              : 
     390              :   /* If we do not have useful points-to information for either pointer
     391              :      we cannot disambiguate anything else.  */
     392     62079711 :   pi1 = SSA_NAME_PTR_INFO (ptr1);
     393     62079711 :   pi2 = SSA_NAME_PTR_INFO (ptr2);
     394     62079711 :   if (!pi1 || !pi2)
     395              :     return true;
     396              : 
     397              :   /* ???  This does not use TBAA to prune decls from the intersection
     398              :      that not both pointers may access.  */
     399     59416874 :   return pt_solutions_intersect (&pi1->pt, &pi2->pt);
     400              : }
     401              : 
     402              : /* Return true if dereferencing PTR may alias *REF.
     403              :    The caller is responsible for applying TBAA to see if PTR
     404              :    may access *REF at all.  */
     405              : 
     406              : static bool
     407      1697821 : ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
     408              : {
     409      1697821 :   tree base = ao_ref_base (ref);
     410              : 
     411      1697821 :   if (TREE_CODE (base) == MEM_REF
     412      1697821 :       || TREE_CODE (base) == TARGET_MEM_REF)
     413       126445 :     return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
     414      1571376 :   else if (DECL_P (base))
     415      1569103 :     return ptr_deref_may_alias_decl_p (ptr, base);
     416              : 
     417              :   return true;
     418              : }
     419              : 
     420              : /* Returns true if PTR1 and PTR2 compare unequal because of points-to.  */
     421              : 
     422              : bool
     423     57216308 : ptrs_compare_unequal (tree ptr1, tree ptr2)
     424              : {
     425              :   /* First resolve the pointers down to a SSA name pointer base or
     426              :      a VAR_DECL, PARM_DECL or RESULT_DECL.  This explicitly does
     427              :      not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
     428              :      or STRING_CSTs which needs points-to adjustments to track them
     429              :      in the points-to sets.  */
     430     57216308 :   tree obj1 = NULL_TREE;
     431     57216308 :   tree obj2 = NULL_TREE;
     432     57216308 :   if (TREE_CODE (ptr1) == ADDR_EXPR)
     433              :     {
     434        56697 :       tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
     435        56697 :       if (! tem)
     436              :         return false;
     437        56697 :       if (VAR_P (tem)
     438              :           || TREE_CODE (tem) == PARM_DECL
     439              :           || TREE_CODE (tem) == RESULT_DECL)
     440              :         obj1 = tem;
     441              :       else if (TREE_CODE (tem) == MEM_REF)
     442        10380 :         ptr1 = TREE_OPERAND (tem, 0);
     443              :     }
     444     57216308 :   if (TREE_CODE (ptr2) == ADDR_EXPR)
     445              :     {
     446      9655161 :       tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
     447      9655161 :       if (! tem)
     448              :         return false;
     449      9655161 :       if (VAR_P (tem)
     450              :           || TREE_CODE (tem) == PARM_DECL
     451              :           || TREE_CODE (tem) == RESULT_DECL)
     452              :         obj2 = tem;
     453              :       else if (TREE_CODE (tem) == MEM_REF)
     454       163882 :         ptr2 = TREE_OPERAND (tem, 0);
     455              :     }
     456              : 
     457              :   /* Canonicalize ptr vs. object.  */
     458     57216308 :   if (TREE_CODE (ptr1) == SSA_NAME && obj2)
     459              :     {
     460              :       std::swap (ptr1, ptr2);
     461              :       std::swap (obj1, obj2);
     462              :     }
     463              : 
     464     57216308 :   if (obj1 && obj2)
     465              :     /* Other code handles this correctly, no need to duplicate it here.  */;
     466      6142912 :   else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
     467              :     {
     468      6131748 :       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
     469              :       /* We may not use restrict to optimize pointer comparisons.
     470              :          See PR71062.  So we have to assume that restrict-pointed-to
     471              :          may be in fact obj1.  */
     472      6131748 :       if (!pi
     473      5042541 :           || pi->pt.vars_contains_restrict
     474      5034083 :           || pi->pt.vars_contains_interposable)
     475              :         return false;
     476      4320547 :       if (VAR_P (obj1)
     477      4320547 :           && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
     478              :         {
     479      1126088 :           varpool_node *node = varpool_node::get (obj1);
     480              :           /* If obj1 may bind to NULL give up (see below).  */
     481      1126088 :           if (! node
     482      1126088 :               || ! node->nonzero_address ()
     483      2252176 :               || ! decl_binds_to_current_def_p (obj1))
     484       849799 :             return false;
     485              :         }
     486      3470748 :       return !pt_solution_includes (&pi->pt, obj1);
     487              :     }
     488     51072194 :   else if (TREE_CODE (ptr1) == SSA_NAME)
     489              :     {
     490     44836375 :       struct ptr_info_def *pi1 = SSA_NAME_PTR_INFO (ptr1);
     491     44836375 :       if (!pi1
     492     34179904 :           || pi1->pt.vars_contains_restrict
     493     33191303 :           || pi1->pt.vars_contains_interposable)
     494              :         return false;
     495     31232936 :       if (integer_zerop (ptr2) && !pi1->pt.null)
     496              :         return true;
     497     31210538 :       if (TREE_CODE (ptr2) == SSA_NAME)
     498              :         {
     499     10460836 :           struct ptr_info_def *pi2 = SSA_NAME_PTR_INFO (ptr2);
     500     10460836 :           if (!pi2
     501      9872808 :               || pi2->pt.vars_contains_restrict
     502      9866734 :               || pi2->pt.vars_contains_interposable)
     503              :             return false;
     504      8172163 :           if ((!pi1->pt.null || !pi2->pt.null)
     505              :               /* ???  We do not represent FUNCTION_DECL and LABEL_DECL
     506              :                  in pt.vars but only set pt.vars_contains_nonlocal.  This
     507              :                  makes compares involving those and other nonlocals
     508              :                  imprecise.  */
     509      4087915 :               && (!pi1->pt.vars_contains_nonlocal
     510        63582 :                   || !pi2->pt.vars_contains_nonlocal)
     511     13819054 :               && (!pt_solution_includes_const_pool (&pi1->pt)
     512      3676893 :                   || !pt_solution_includes_const_pool (&pi2->pt)))
     513       455208 :             return !pt_solutions_intersect (&pi1->pt, &pi2->pt);
     514              :         }
     515              :     }
     516              : 
     517              :   return false;
     518              : }
     519              : 
     520              : /* Returns whether reference REF to BASE may refer to global memory.
     521              :    When ESCAPED_LOCAL_P is true escaped local memory is also considered
     522              :    global.  */
     523              : 
     524              : static bool
     525     54765617 : ref_may_alias_global_p_1 (tree base, bool escaped_local_p)
     526              : {
     527     54765617 :   if (DECL_P (base))
     528     42654175 :     return (is_global_var (base)
     529     42654175 :             || (escaped_local_p
     530      1248326 :                 && pt_solution_includes (&cfun->gimple_df->escaped_return,
     531              :                                          base)));
     532     12111442 :   else if (TREE_CODE (base) == MEM_REF
     533     12111442 :            || TREE_CODE (base) == TARGET_MEM_REF)
     534     12100771 :     return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0),
     535     12100771 :                                          escaped_local_p);
     536              :   return true;
     537              : }
     538              : 
     539              : bool
     540     11073755 : ref_may_alias_global_p (ao_ref *ref, bool escaped_local_p)
     541              : {
     542     11073755 :   tree base = ao_ref_base (ref);
     543     11073755 :   return ref_may_alias_global_p_1 (base, escaped_local_p);
     544              : }
     545              : 
     546              : bool
     547     43691862 : ref_may_alias_global_p (tree ref, bool escaped_local_p)
     548              : {
     549     43691862 :   tree base = get_base_address (ref);
     550     43691862 :   return ref_may_alias_global_p_1 (base, escaped_local_p);
     551              : }
     552              : 
     553              : /* Return true whether STMT may clobber global memory.
     554              :    When ESCAPED_LOCAL_P is true escaped local memory is also considered
     555              :    global.  */
     556              : 
     557              : bool
     558    177521427 : stmt_may_clobber_global_p (gimple *stmt, bool escaped_local_p)
     559              : {
     560    177521427 :   tree lhs;
     561              : 
     562    353928314 :   if (!gimple_vdef (stmt))
     563              :     return false;
     564              : 
     565              :   /* ???  We can ask the oracle whether an artificial pointer
     566              :      dereference with a pointer with points-to information covering
     567              :      all global memory (what about non-address taken memory?) maybe
     568              :      clobbered by this call.  As there is at the moment no convenient
     569              :      way of doing that without generating garbage do some manual
     570              :      checking instead.
     571              :      ???  We could make a NULL ao_ref argument to the various
     572              :      predicates special, meaning any global memory.  */
     573              : 
     574     43884610 :   switch (gimple_code (stmt))
     575              :     {
     576     43691862 :     case GIMPLE_ASSIGN:
     577     43691862 :       lhs = gimple_assign_lhs (stmt);
     578     43691862 :       return (TREE_CODE (lhs) != SSA_NAME
     579     43691862 :               && ref_may_alias_global_p (lhs, escaped_local_p));
     580              :     case GIMPLE_CALL:
     581              :       return true;
     582              :     default:
     583              :       return true;
     584              :     }
     585              : }
     586              : 
     587              : 
     588              : /* Dump alias information on FILE.  */
     589              : 
     590              : void
     591          286 : dump_alias_info (FILE *file)
     592              : {
     593          286 :   unsigned i;
     594          286 :   tree ptr;
     595          286 :   const char *funcname
     596          286 :     = lang_hooks.decl_printable_name (current_function_decl, 2);
     597          286 :   tree var;
     598              : 
     599          286 :   fprintf (file, "\n\nAlias information for %s\n\n", funcname);
     600              : 
     601          286 :   fprintf (file, "Aliased symbols\n\n");
     602              : 
     603         1149 :   FOR_EACH_LOCAL_DECL (cfun, i, var)
     604              :     {
     605          606 :       if (may_be_aliased (var))
     606          357 :         dump_variable (file, var);
     607              :     }
     608              : 
     609          286 :   fprintf (file, "\nCall clobber information\n");
     610              : 
     611          286 :   fprintf (file, "\nESCAPED");
     612          286 :   dump_points_to_solution (file, &cfun->gimple_df->escaped);
     613              : 
     614          286 :   fprintf (file, "\nESCAPED_RETURN");
     615          286 :   dump_points_to_solution (file, &cfun->gimple_df->escaped_return);
     616              : 
     617          286 :   fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
     618              : 
     619         3088 :   FOR_EACH_SSA_NAME (i, ptr, cfun)
     620              :     {
     621         2570 :       struct ptr_info_def *pi;
     622              : 
     623         4693 :       if (!POINTER_TYPE_P (TREE_TYPE (ptr))
     624         2617 :           || SSA_NAME_IN_FREE_LIST (ptr))
     625         2076 :         continue;
     626              : 
     627          494 :       pi = SSA_NAME_PTR_INFO (ptr);
     628          494 :       if (pi)
     629          487 :         dump_points_to_info_for (file, ptr);
     630              :     }
     631              : 
     632          286 :   fprintf (file, "\n");
     633          286 : }
     634              : 
     635              : 
     636              : /* Dump alias information on stderr.  */
     637              : 
     638              : DEBUG_FUNCTION void
     639            0 : debug_alias_info (void)
     640              : {
     641            0 :   dump_alias_info (stderr);
     642            0 : }
     643              : 
     644              : 
     645              : /* Dump the points-to set *PT into FILE.  */
     646              : 
     647              : void
     648         1059 : dump_points_to_solution (FILE *file, struct pt_solution *pt)
     649              : {
     650         1059 :   if (pt->anything)
     651            3 :     fprintf (file, ", points-to anything");
     652              : 
     653         1059 :   if (pt->nonlocal)
     654          635 :     fprintf (file, ", points-to non-local");
     655              : 
     656         1059 :   if (pt->escaped)
     657          423 :     fprintf (file, ", points-to escaped");
     658              : 
     659         1059 :   if (pt->ipa_escaped)
     660            0 :     fprintf (file, ", points-to unit escaped");
     661              : 
     662         1059 :   if (pt->null)
     663          614 :     fprintf (file, ", points-to NULL");
     664              : 
     665         1059 :   if (pt->const_pool)
     666            0 :     fprintf (file, ", points-to const-pool");
     667              : 
     668         1059 :   if (pt->vars)
     669              :     {
     670         1056 :       fprintf (file, ", points-to vars: ");
     671         1056 :       dump_decl_set (file, pt->vars);
     672         1056 :       if (pt->vars_contains_nonlocal
     673          921 :           || pt->vars_contains_escaped
     674          844 :           || pt->vars_contains_escaped_heap
     675          844 :           || pt->vars_contains_restrict
     676          844 :           || pt->vars_contains_interposable)
     677              :         {
     678          212 :           const char *comma = "";
     679          212 :           fprintf (file, " (");
     680          212 :           if (pt->vars_contains_nonlocal)
     681              :             {
     682          135 :               fprintf (file, "nonlocal");
     683          135 :               comma = ", ";
     684              :             }
     685          212 :           if (pt->vars_contains_escaped)
     686              :             {
     687          139 :               fprintf (file, "%sescaped", comma);
     688          139 :               comma = ", ";
     689              :             }
     690          212 :           if (pt->vars_contains_escaped_heap)
     691              :             {
     692            0 :               fprintf (file, "%sescaped heap", comma);
     693            0 :               comma = ", ";
     694              :             }
     695          212 :           if (pt->vars_contains_restrict)
     696              :             {
     697           58 :               fprintf (file, "%srestrict", comma);
     698           58 :               comma = ", ";
     699              :             }
     700          212 :           if (pt->vars_contains_interposable)
     701            0 :             fprintf (file, "%sinterposable", comma);
     702          212 :           fprintf (file, ")");
     703              :         }
     704              :     }
     705         1059 : }
     706              : 
     707              : 
     708              : /* Unified dump function for pt_solution.  */
     709              : 
     710              : DEBUG_FUNCTION void
     711            0 : debug (pt_solution &ref)
     712              : {
     713            0 :   dump_points_to_solution (stderr, &ref);
     714            0 : }
     715              : 
     716              : DEBUG_FUNCTION void
     717            0 : debug (pt_solution *ptr)
     718              : {
     719            0 :   if (ptr)
     720            0 :     debug (*ptr);
     721              :   else
     722            0 :     fprintf (stderr, "<nil>\n");
     723            0 : }
     724              : 
     725              : 
     726              : /* Dump points-to information for SSA_NAME PTR into FILE.  */
     727              : 
     728              : void
     729          487 : dump_points_to_info_for (FILE *file, tree ptr)
     730              : {
     731          487 :   struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
     732              : 
     733          487 :   print_generic_expr (file, ptr, dump_flags);
     734              : 
     735          487 :   if (pi)
     736          487 :     dump_points_to_solution (file, &pi->pt);
     737              :   else
     738            0 :     fprintf (file, ", points-to anything");
     739              : 
     740          487 :   fprintf (file, "\n");
     741          487 : }
     742              : 
     743              : 
     744              : /* Dump points-to information for VAR into stderr.  */
     745              : 
     746              : DEBUG_FUNCTION void
     747            0 : debug_points_to_info_for (tree var)
     748              : {
     749            0 :   dump_points_to_info_for (stderr, var);
     750            0 : }
     751              : 
     752              : 
     753              : /* Initializes the alias-oracle reference representation *R from REF.  */
     754              : 
     755              : void
     756   2777849417 : ao_ref_init (ao_ref *r, tree ref)
     757              : {
     758   2777849417 :   r->ref = ref;
     759   2777849417 :   r->base = NULL_TREE;
     760   2777849417 :   r->offset = 0;
     761   2777849417 :   r->size = -1;
     762   2777849417 :   r->max_size = -1;
     763   2777849417 :   r->ref_alias_set = -1;
     764   2777849417 :   r->base_alias_set = -1;
     765   2777849417 :   r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
     766   2777849417 : }
     767              : 
     768              : /* Returns the base object of the memory reference *REF.  */
     769              : 
     770              : tree
     771   5422577202 : ao_ref_base (ao_ref *ref)
     772              : {
     773   5422577202 :   bool reverse;
     774              : 
     775   5422577202 :   if (ref->base)
     776              :     return ref->base;
     777   2512814922 :   ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
     778              :                                        &ref->max_size, &reverse);
     779   2512814922 :   return ref->base;
     780              : }
     781              : 
     782              : /* Returns the base object alias set of the memory reference *REF.  */
     783              : 
     784              : alias_set_type
     785   1006330662 : ao_ref_base_alias_set (ao_ref *ref)
     786              : {
     787   1006330662 :   tree base_ref;
     788   1006330662 :   if (ref->base_alias_set != -1)
     789              :     return ref->base_alias_set;
     790    778626562 :   if (!ref->ref)
     791              :     return 0;
     792    741542619 :   base_ref = ref->ref;
     793    741542619 :   if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
     794            4 :     base_ref = TREE_OPERAND (base_ref, 0);
     795   1187990510 :   while (handled_component_p (base_ref))
     796    446447891 :     base_ref = TREE_OPERAND (base_ref, 0);
     797    741542619 :   ref->base_alias_set = get_alias_set (base_ref);
     798    741542619 :   return ref->base_alias_set;
     799              : }
     800              : 
     801              : /* Returns the reference alias set of the memory reference *REF.  */
     802              : 
     803              : alias_set_type
     804   1271007066 : ao_ref_alias_set (ao_ref *ref)
     805              : {
     806   1271007066 :   if (ref->ref_alias_set != -1)
     807              :     return ref->ref_alias_set;
     808    510748774 :   if (!ref->ref)
     809              :     return 0;
     810    510748772 :   ref->ref_alias_set = get_alias_set (ref->ref);
     811    510748772 :   return ref->ref_alias_set;
     812              : }
     813              : 
     814              : /* Returns a type satisfying
     815              :    get_deref_alias_set (type) == ao_ref_base_alias_set (REF).  */
     816              : 
     817              : tree
     818       324951 : ao_ref_base_alias_ptr_type (ao_ref *ref)
     819              : {
     820       324951 :   tree base_ref;
     821              : 
     822       324951 :   if (!ref->ref)
     823              :     return NULL_TREE;
     824       324951 :   base_ref = ref->ref;
     825       324951 :   if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
     826            0 :     base_ref = TREE_OPERAND (base_ref, 0);
     827       433660 :   while (handled_component_p (base_ref))
     828       108709 :     base_ref = TREE_OPERAND (base_ref, 0);
     829       324951 :   tree ret = reference_alias_ptr_type (base_ref);
     830       324951 :   return ret;
     831              : }
     832              : 
     833              : /* Returns a type satisfying
     834              :    get_deref_alias_set (type) == ao_ref_alias_set (REF).  */
     835              : 
     836              : tree
     837       324951 : ao_ref_alias_ptr_type (ao_ref *ref)
     838              : {
     839       324951 :   if (!ref->ref)
     840              :     return NULL_TREE;
     841       324951 :   tree ret = reference_alias_ptr_type (ref->ref);
     842       324951 :   return ret;
     843              : }
     844              : 
     845              : /* Return the alignment of the access *REF and store it in the *ALIGN
     846              :    and *BITPOS pairs.  Returns false if no alignment could be determined.
     847              :    See get_object_alignment_2 for details.  */
     848              : 
     849              : bool
     850        94340 : ao_ref_alignment (ao_ref *ref, unsigned int *align,
     851              :                   unsigned HOST_WIDE_INT *bitpos)
     852              : {
     853        94340 :   if (ref->ref)
     854        92785 :     return get_object_alignment_1 (ref->ref, align, bitpos);
     855              : 
     856              :   /* When we just have ref->base we cannot use get_object_alignment since
     857              :      that will eventually use the type of the appearant access while for
     858              :      example ao_ref_init_from_ptr_and_range is not careful to adjust that.  */
     859         1555 :   *align = BITS_PER_UNIT;
     860         1555 :   HOST_WIDE_INT offset;
     861         1555 :   if (!ref->offset.is_constant (&offset)
     862         1555 :       || !get_object_alignment_2 (ref->base, align, bitpos, true))
     863              :     return false;
     864         1334 :   *bitpos += (unsigned HOST_WIDE_INT)offset * BITS_PER_UNIT;
     865         1334 :   *bitpos = *bitpos & (*align - 1);
     866         1334 :   return true;
     867              : }
     868              : 
     869              : /* Init an alias-oracle reference representation from a gimple pointer
     870              :    PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
     871              :    that RANGE_KNOWN is set.
     872              : 
     873              :    The access is assumed to be only to or after of the pointer target adjusted
     874              :    by the offset, not before it (even in the case RANGE_KNOWN is false).  */
     875              : 
     876              : void
     877     40247465 : ao_ref_init_from_ptr_and_range (ao_ref *ref, tree ptr,
     878              :                                 bool range_known,
     879              :                                 poly_int64 offset,
     880              :                                 poly_int64 size,
     881              :                                 poly_int64 max_size)
     882              : {
     883     40247465 :   poly_int64 t, extra_offset = 0;
     884              : 
     885     40247465 :   ref->ref = NULL_TREE;
     886     40247465 :   if (TREE_CODE (ptr) == SSA_NAME)
     887              :     {
     888     26342369 :       gimple *stmt = SSA_NAME_DEF_STMT (ptr);
     889     26342369 :       if (gimple_assign_single_p (stmt)
     890     26342369 :           && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
     891      4042717 :         ptr = gimple_assign_rhs1 (stmt);
     892     22299652 :       else if (is_gimple_assign (stmt)
     893     15002029 :                && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
     894     28172056 :                && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
     895              :         {
     896       305370 :           ptr = gimple_assign_rhs1 (stmt);
     897       305370 :           extra_offset *= BITS_PER_UNIT;
     898              :         }
     899              :     }
     900              : 
     901     40247465 :   if (TREE_CODE (ptr) == ADDR_EXPR)
     902              :     {
     903     17879480 :       ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
     904     17879480 :       if (ref->base
     905     32916192 :           && coeffs_in_range_p (t, -HOST_WIDE_INT_MAX / BITS_PER_UNIT,
     906              :                                 HOST_WIDE_INT_MAX / BITS_PER_UNIT))
     907     15036534 :         ref->offset = BITS_PER_UNIT * t;
     908              :       else
     909              :         {
     910      2842946 :           range_known = false;
     911      2842946 :           ref->offset = 0;
     912      2842946 :           ref->base = get_base_address (TREE_OPERAND (ptr, 0));
     913              :         }
     914              :     }
     915              :   else
     916              :     {
     917     22367985 :       gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
     918     22367985 :       ref->base = build2 (MEM_REF, char_type_node,
     919              :                           ptr, null_pointer_node);
     920     22367985 :       ref->offset = 0;
     921              :     }
     922     40247465 :   ref->offset += extra_offset + offset;
     923     40247465 :   if (range_known)
     924              :     {
     925     21419524 :       ref->max_size = max_size;
     926     21419524 :       ref->size = size;
     927              :     }
     928              :   else
     929     18827941 :     ref->max_size = ref->size = -1;
     930     40247465 :   ref->ref_alias_set = 0;
     931     40247465 :   ref->base_alias_set = 0;
     932     40247465 :   ref->volatile_p = false;
     933     40247465 : }
     934              : 
     935              : /* Init an alias-oracle reference representation from a gimple pointer
     936              :    PTR and a gimple size SIZE in bytes.  If SIZE is NULL_TREE then the
     937              :    size is assumed to be unknown.  The access is assumed to be only
     938              :    to or after of the pointer target, not before it.  */
     939              : 
     940              : void
     941     10826298 : ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
     942              : {
     943     10826298 :   poly_int64 size_hwi;
     944     10826298 :   if (size
     945      5609235 :       && poly_int_tree_p (size, &size_hwi)
     946     15750643 :       && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
     947              :     {
     948      4923653 :       size_hwi = size_hwi * BITS_PER_UNIT;
     949      4923653 :       ao_ref_init_from_ptr_and_range (ref, ptr, true, 0, size_hwi, size_hwi);
     950              :     }
     951              :   else
     952      5902645 :     ao_ref_init_from_ptr_and_range (ref, ptr, false, 0, -1, -1);
     953     10826298 : }
     954              : 
     955              : /* S1 and S2 are TYPE_SIZE or DECL_SIZE.  Compare them:
     956              :    Return -1 if S1 < S2
     957              :    Return 1 if S1 > S2
     958              :    Return 0 if equal or incomparable.  */
     959              : 
     960              : static int
     961      8644034 : compare_sizes (tree s1, tree s2)
     962              : {
     963      8644034 :   if (!s1 || !s2)
     964              :     return 0;
     965              : 
     966      8634298 :   poly_uint64 size1;
     967      8634298 :   poly_uint64 size2;
     968              : 
     969      8634298 :   if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
     970          530 :     return 0;
     971      8633768 :   if (known_lt (size1, size2))
     972              :     return -1;
     973      6164437 :   if (known_lt (size2, size1))
     974              :     return 1;
     975              :   return 0;
     976              : }
     977              : 
     978              : /* Compare TYPE1 and TYPE2 by its size.
     979              :    Return -1 if size of TYPE1 < size of TYPE2
     980              :    Return 1 if size of TYPE1 > size of TYPE2
     981              :    Return 0 if types are of equal sizes or we can not compare them.  */
     982              : 
     983              : static int
     984      7338895 : compare_type_sizes (tree type1, tree type2)
     985              : {
     986              :   /* Be conservative for arrays and vectors.  We want to support partial
     987              :      overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c.  */
     988      7338895 :   while (TREE_CODE (type1) == ARRAY_TYPE
     989      8081901 :          || VECTOR_TYPE_P (type1))
     990       743006 :     type1 = TREE_TYPE (type1);
     991      7534592 :   while (TREE_CODE (type2) == ARRAY_TYPE
     992      7534592 :          || VECTOR_TYPE_P (type2))
     993       195697 :     type2 = TREE_TYPE (type2);
     994      7338895 :   return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
     995              : }
     996              : 
     997              : /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
     998              :    purpose of TBAA.  Return 0 if they are distinct and -1 if we cannot
     999              :    decide.  */
    1000              : 
    1001              : static inline int
    1002    847816798 : same_type_for_tbaa (tree type1, tree type2)
    1003              : {
    1004    847816798 :   type1 = TYPE_MAIN_VARIANT (type1);
    1005    847816798 :   type2 = TYPE_MAIN_VARIANT (type2);
    1006              : 
    1007              :   /* Handle the most common case first.  */
    1008    847816798 :   if (type1 == type2)
    1009              :     return 1;
    1010              : 
    1011              :   /* If we would have to do structural comparison bail out.  */
    1012    135346244 :   if (TYPE_STRUCTURAL_EQUALITY_P (type1)
    1013    135346244 :       || TYPE_STRUCTURAL_EQUALITY_P (type2))
    1014              :     return -1;
    1015              : 
    1016              :   /* Compare the canonical types.  */
    1017     82917471 :   if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
    1018              :     return 1;
    1019              : 
    1020              :   /* ??? Array types are not properly unified in all cases as we have
    1021              :      spurious changes in the index types for example.  Removing this
    1022              :      causes all sorts of problems with the Fortran frontend.  */
    1023     82074895 :   if (TREE_CODE (type1) == ARRAY_TYPE
    1024      4855713 :       && TREE_CODE (type2) == ARRAY_TYPE)
    1025              :     return -1;
    1026              : 
    1027              :   /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
    1028              :      object of one of its constrained subtypes, e.g. when a function with an
    1029              :      unconstrained parameter passed by reference is called on an object and
    1030              :      inlined.  But, even in the case of a fixed size, type and subtypes are
    1031              :      not equivalent enough as to share the same TYPE_CANONICAL, since this
    1032              :      would mean that conversions between them are useless, whereas they are
    1033              :      not (e.g. type and subtypes can have different modes).  So, in the end,
    1034              :      they are only guaranteed to have the same alias set.  */
    1035     81754489 :   alias_set_type set1 = get_alias_set (type1);
    1036     81754489 :   alias_set_type set2 = get_alias_set (type2);
    1037     81754489 :   if (set1 == set2)
    1038              :     return -1;
    1039              : 
    1040              :   /* Pointers to void are considered compatible with all other pointers,
    1041              :      so for two pointers see what the alias set resolution thinks.  */
    1042     45766516 :   if (POINTER_TYPE_P (type1)
    1043     10613041 :       && POINTER_TYPE_P (type2)
    1044     45942703 :       && alias_sets_conflict_p (set1, set2))
    1045              :     return -1;
    1046              : 
    1047              :   /* The types are known to be not equal.  */
    1048              :   return 0;
    1049              : }
    1050              : 
    1051              : /* Return true if TYPE is a composite type (i.e. we may apply one of handled
    1052              :    components on it).  */
    1053              : 
    1054              : static bool
    1055      1910616 : type_has_components_p (tree type)
    1056              : {
    1057      1910616 :   return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
    1058      1910616 :          || TREE_CODE (type) == COMPLEX_TYPE;
    1059              : }
    1060              : 
    1061              : /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
    1062              :    respectively are either pointing to same address or are completely
    1063              :    disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
    1064              :    just partly overlap.
    1065              : 
    1066              :    Try to disambiguate using the access path starting from the match
    1067              :    and return false if there is no conflict.
    1068              : 
    1069              :    Helper for aliasing_component_refs_p.  */
    1070              : 
    1071              : static bool
    1072       873439 : aliasing_matching_component_refs_p (tree match1, tree ref1,
    1073              :                                     poly_int64 offset1, poly_int64 max_size1,
    1074              :                                     tree match2, tree ref2,
    1075              :                                     poly_int64 offset2, poly_int64 max_size2,
    1076              :                                     bool partial_overlap)
    1077              : {
    1078       873439 :   poly_int64 offadj, sztmp, msztmp;
    1079       873439 :   bool reverse;
    1080              : 
    1081       873439 :   if (!partial_overlap)
    1082              :     {
    1083       873431 :       get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
    1084       873431 :       offset2 -= offadj;
    1085       873431 :       get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
    1086       873431 :       offset1 -= offadj;
    1087       873431 :       if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
    1088              :         {
    1089        61816 :           ++alias_stats.aliasing_component_refs_p_no_alias;
    1090        61816 :           return false;
    1091              :         }
    1092              :     }
    1093              : 
    1094       811623 :   int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
    1095              :                                                partial_overlap);
    1096       811623 :   if (cmp == 1
    1097       811623 :       || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
    1098              :     {
    1099          322 :       ++alias_stats.aliasing_component_refs_p_no_alias;
    1100          322 :       return false;
    1101              :     }
    1102       811301 :   ++alias_stats.aliasing_component_refs_p_may_alias;
    1103       811301 :   return true;
    1104              : }
    1105              : 
    1106              : /* Return true if REF is reference to zero sized trailing array. I.e.
    1107              :    struct foo {int bar; int array[0];} *fooptr;
    1108              :    fooptr->array.  */
    1109              : 
    1110              : static bool
    1111      6166792 : component_ref_to_zero_sized_trailing_array_p (tree ref)
    1112              : {
    1113      6166792 :   return (TREE_CODE (ref) == COMPONENT_REF
    1114      5572661 :           && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
    1115       128544 :           && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
    1116        42227 :               || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
    1117      6253186 :           && array_ref_flexible_size_p (ref));
    1118              : }
    1119              : 
    1120              : /* Worker for aliasing_component_refs_p. Most parameters match parameters of
    1121              :    aliasing_component_refs_p.
    1122              : 
    1123              :    Walk access path REF2 and try to find type matching TYPE1
    1124              :    (which is a start of possibly aliasing access path REF1).
    1125              :    If match is found, try to disambiguate.
    1126              : 
    1127              :    Return 0 for sucessful disambiguation.
    1128              :    Return 1 if match was found but disambiguation failed
    1129              :    Return -1 if there is no match.
    1130              :    In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
    1131              :    in access patch REF2 and -1 if we are not sure.  */
    1132              : 
    1133              : static int
    1134      2519919 : aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
    1135              :                               poly_int64 offset1, poly_int64 max_size1,
    1136              :                               tree end_struct_ref1,
    1137              :                               tree ref2, tree base2,
    1138              :                               poly_int64 offset2, poly_int64 max_size2,
    1139              :                               bool *maybe_match)
    1140              : {
    1141      2519919 :   tree ref = ref2;
    1142      2519919 :   int same_p = 0;
    1143              : 
    1144      7232657 :   while (true)
    1145              :     {
    1146              :       /* We walk from inner type to the outer types. If type we see is
    1147              :          already too large to be part of type1, terminate the search.  */
    1148      4876288 :       int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
    1149              : 
    1150      4876288 :       if (cmp < 0
    1151      4876288 :           && (!end_struct_ref1
    1152           60 :               || compare_type_sizes (TREE_TYPE (end_struct_ref1),
    1153           60 :                                      TREE_TYPE (ref)) < 0))
    1154              :         break;
    1155              :       /* If types may be of same size, see if we can decide about their
    1156              :          equality.  */
    1157      3554634 :       if (cmp == 0)
    1158              :         {
    1159      2569390 :           same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
    1160      2569390 :           if (same_p == 1)
    1161              :             break;
    1162              :           /* In case we can't decide whether types are same try to
    1163              :              continue looking for the exact match.
    1164              :              Remember however that we possibly saw a match
    1165              :              to bypass the access path continuations tests we do later.  */
    1166      1695951 :           if (same_p == -1)
    1167       624183 :             *maybe_match = true;
    1168              :         }
    1169      2681195 :       if (!handled_component_p (ref))
    1170              :         break;
    1171      2356369 :       ref = TREE_OPERAND (ref, 0);
    1172      2356369 :     }
    1173      2519919 :   if (same_p == 1)
    1174              :     {
    1175       873439 :       bool partial_overlap = false;
    1176              : 
    1177              :       /* We assume that arrays can overlap by multiple of their elements
    1178              :          size as tested in gcc.dg/torture/alias-2.c.
    1179              :          This partial overlap happen only when both arrays are bases of
    1180              :          the access and not contained within another component ref.
    1181              :          To be safe we also assume partial overlap for VLAs. */
    1182       873439 :       if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
    1183       873439 :           && (!TYPE_SIZE (TREE_TYPE (base1))
    1184         2087 :               || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
    1185         2087 :               || ref == base2))
    1186              :         {
    1187              :           /* Setting maybe_match to true triggers
    1188              :              nonoverlapping_component_refs_p test later that still may do
    1189              :              useful disambiguation.  */
    1190            8 :           *maybe_match = true;
    1191            8 :           partial_overlap = true;
    1192              :         }
    1193       873439 :       return aliasing_matching_component_refs_p (base1, ref1,
    1194              :                                                  offset1, max_size1,
    1195              :                                                  ref, ref2,
    1196              :                                                  offset2, max_size2,
    1197       873439 :                                                  partial_overlap);
    1198              :     }
    1199              :   return -1;
    1200              : }
    1201              : 
    1202              : /* Consider access path1 base1....ref1 and access path2 base2...ref2.
    1203              :    Return true if they can be composed to single access path
    1204              :    base1...ref1...base2...ref2.
    1205              : 
    1206              :    REF_TYPE1 if type of REF1.  END_STRUCT_PAST_END1 is true if there is
    1207              :    a trailing array access after REF1 in the non-TBAA part of the access.
    1208              :    REF1_ALIAS_SET is the alias set of REF1.
    1209              : 
    1210              :    BASE_TYPE2 is type of base2.  END_STRUCT_REF2 is non-NULL if there is
    1211              :    a trailing array access in the TBAA part of access path2.
    1212              :    BASE2_ALIAS_SET is the alias set of base2.  */
    1213              : 
    1214              : bool
    1215      1910616 : access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
    1216              :                             alias_set_type ref1_alias_set,
    1217              :                             tree base_type2, tree end_struct_ref2,
    1218              :                             alias_set_type base2_alias_set)
    1219              : {
    1220              :   /* Access path can not continue past types with no components.  */
    1221      1910616 :   if (!type_has_components_p (ref_type1))
    1222              :     return false;
    1223              : 
    1224              :   /* If first access path ends by too small type to hold base of
    1225              :      the second access path, typically paths can not continue.
    1226              : 
    1227              :      Punt if end_struct_past_end1 is true.  We want to support arbitrary
    1228              :      type puning past first COMPONENT_REF to union because redundant store
    1229              :      elimination depends on this, see PR92152.  For this reason we can not
    1230              :      check size of the reference because types may partially overlap.  */
    1231       158421 :   if (!end_struct_past_end1)
    1232              :     {
    1233       158372 :       if (compare_type_sizes (ref_type1, base_type2) < 0)
    1234              :         return false;
    1235              :       /* If the path2 contains trailing array access we can strenghten the check
    1236              :          to verify that also the size of element of the trailing array fits.
    1237              :          In fact we could check for offset + type_size, but we do not track
    1238              :          offsets and this is quite side case.  */
    1239       142697 :       if (end_struct_ref2
    1240       142697 :           && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
    1241              :         return false;
    1242              :     }
    1243       142746 :   return (base2_alias_set == ref1_alias_set
    1244       142746 :           || alias_set_subset_of (base2_alias_set, ref1_alias_set));
    1245              : }
    1246              : 
    1247              : /* Determine if the two component references REF1 and REF2 which are
    1248              :    based on access types TYPE1 and TYPE2 and of which at least one is based
    1249              :    on an indirect reference may alias.
    1250              :    REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
    1251              :    are the respective alias sets.  */
    1252              : 
    1253              : static bool
    1254      2304114 : aliasing_component_refs_p (tree ref1,
    1255              :                            alias_set_type ref1_alias_set,
    1256              :                            alias_set_type base1_alias_set,
    1257              :                            poly_int64 offset1, poly_int64 max_size1,
    1258              :                            tree ref2,
    1259              :                            alias_set_type ref2_alias_set,
    1260              :                            alias_set_type base2_alias_set,
    1261              :                            poly_int64 offset2, poly_int64 max_size2)
    1262              : {
    1263              :   /* If one reference is a component references through pointers try to find a
    1264              :      common base and apply offset based disambiguation.  This handles
    1265              :      for example
    1266              :        struct A { int i; int j; } *q;
    1267              :        struct B { struct A a; int k; } *p;
    1268              :      disambiguating q->i and p->a.j.  */
    1269      2304114 :   tree base1, base2;
    1270      2304114 :   tree type1, type2;
    1271      2304114 :   bool maybe_match = false;
    1272      2304114 :   tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
    1273      2304114 :   bool end_struct_past_end1 = false;
    1274      2304114 :   bool end_struct_past_end2 = false;
    1275              : 
    1276              :   /* Choose bases and base types to search for.
    1277              :      The access path is as follows:
    1278              :        base....end_of_tbaa_ref...actual_ref
    1279              :      At one place in the access path may be a reference to zero sized or
    1280              :      trailing array.
    1281              : 
    1282              :      We generally discard the segment after end_of_tbaa_ref however
    1283              :      we need to be careful in case it contains zero sized or trailing array.
    1284              :      These may happen after reference to union and in this case we need to
    1285              :      not disambiguate type puning scenarios.
    1286              : 
    1287              :      We set:
    1288              :         base1 to point to base
    1289              : 
    1290              :         ref1 to point to end_of_tbaa_ref
    1291              : 
    1292              :         end_struct_ref1 to point the trailing reference (if it exists
    1293              :         in range base....end_of_tbaa_ref
    1294              : 
    1295              :         end_struct_past_end1 is true if this trailing reference occurs in
    1296              :         end_of_tbaa_ref...actual_ref.  */
    1297      2304114 :   base1 = ref1;
    1298      4916077 :   while (handled_component_p (base1))
    1299              :     {
    1300              :       /* Generally access paths are monotous in the size of object. The
    1301              :          exception are trailing arrays of structures. I.e.
    1302              :            struct a {int array[0];};
    1303              :          or
    1304              :            struct a {int array1[0]; int array[];};
    1305              :          Such struct has size 0 but accesses to a.array may have non-zero size.
    1306              :          In this case the size of TREE_TYPE (base1) is smaller than
    1307              :          size of TREE_TYPE (TREE_OPERAND (base1, 0)).
    1308              : 
    1309              :          Because we compare sizes of arrays just by sizes of their elements,
    1310              :          we only need to care about zero sized array fields here.  */
    1311      2611963 :       if (component_ref_to_zero_sized_trailing_array_p (base1))
    1312              :         {
    1313        76739 :           gcc_checking_assert (!end_struct_ref1);
    1314              :           end_struct_ref1 = base1;
    1315              :         }
    1316      2611963 :       if (ends_tbaa_access_path_p (base1))
    1317              :         {
    1318        25622 :           ref1 = TREE_OPERAND (base1, 0);
    1319        25622 :           if (end_struct_ref1)
    1320              :             {
    1321            1 :               end_struct_past_end1 = true;
    1322            1 :               end_struct_ref1 = NULL;
    1323              :             }
    1324              :         }
    1325      2611963 :       base1 = TREE_OPERAND (base1, 0);
    1326              :     }
    1327      2304114 :   type1 = TREE_TYPE (base1);
    1328      2304114 :   base2 = ref2;
    1329      5347415 :   while (handled_component_p (base2))
    1330              :     {
    1331      3043301 :       if (component_ref_to_zero_sized_trailing_array_p (base2))
    1332              :         {
    1333         9138 :           gcc_checking_assert (!end_struct_ref2);
    1334              :           end_struct_ref2 = base2;
    1335              :         }
    1336      3043301 :       if (ends_tbaa_access_path_p (base2))
    1337              :         {
    1338        91209 :           ref2 = TREE_OPERAND (base2, 0);
    1339        91209 :           if (end_struct_ref2)
    1340              :             {
    1341           48 :               end_struct_past_end2 = true;
    1342           48 :               end_struct_ref2 = NULL;
    1343              :             }
    1344              :         }
    1345      3043301 :       base2 = TREE_OPERAND (base2, 0);
    1346              :     }
    1347      2304114 :   type2 = TREE_TYPE (base2);
    1348              : 
    1349              :   /* Now search for the type1 in the access path of ref2.  This
    1350              :      would be a common base for doing offset based disambiguation on.
    1351              :      This however only makes sense if type2 is big enough to hold type1.  */
    1352      2304114 :   int cmp_outer = compare_type_sizes (type2, type1);
    1353              : 
    1354              :   /* If type2 is big enough to contain type1 walk its access path.
    1355              :      We also need to care of arrays at the end of structs that may extend
    1356              :      beyond the end of structure.  If this occurs in the TBAA part of the
    1357              :      access path, we need to consider the increased type as well.  */
    1358      2304114 :   if (cmp_outer >= 0
    1359      2304114 :       || (end_struct_ref2
    1360            1 :           && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
    1361              :     {
    1362      1205058 :       int res = aliasing_component_refs_walk (ref1, type1, base1,
    1363              :                                               offset1, max_size1,
    1364              :                                               end_struct_ref1,
    1365              :                                               ref2, base2, offset2, max_size2,
    1366              :                                               &maybe_match);
    1367      1205058 :       if (res != -1)
    1368       571991 :         return res;
    1369              :     }
    1370              : 
    1371              :   /* If we didn't find a common base, try the other way around.  */
    1372      1732123 :   if (cmp_outer <= 0
    1373      1732123 :       || (end_struct_ref1
    1374           60 :           && compare_type_sizes (TREE_TYPE (end_struct_ref1), type2) <= 0))
    1375              :     {
    1376      1314861 :       int res = aliasing_component_refs_walk (ref2, type2, base2,
    1377              :                                               offset2, max_size2,
    1378              :                                               end_struct_ref2,
    1379              :                                               ref1, base1, offset1, max_size1,
    1380              :                                               &maybe_match);
    1381      1314861 :       if (res != -1)
    1382       301448 :         return res;
    1383              :     }
    1384              : 
    1385              :   /* In the following code we make an assumption that the types in access
    1386              :      paths do not overlap and thus accesses alias only if one path can be
    1387              :      continuation of another.  If we was not able to decide about equivalence,
    1388              :      we need to give up.  */
    1389      1430675 :   if (maybe_match)
    1390              :     {
    1391       461091 :       if (!nonoverlapping_component_refs_p (ref1, ref2))
    1392              :         {
    1393       459283 :           ++alias_stats.aliasing_component_refs_p_may_alias;
    1394       459283 :           return true;
    1395              :         }
    1396         1808 :       ++alias_stats.aliasing_component_refs_p_no_alias;
    1397         1808 :       return false;
    1398              :     }
    1399              : 
    1400       969584 :   if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
    1401              :                                   ref1_alias_set,
    1402              :                                   type2, end_struct_ref2,
    1403              :                                   base2_alias_set)
    1404       969584 :       || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
    1405              :                                      ref2_alias_set,
    1406              :                                      type1, end_struct_ref1,
    1407              :                                      base1_alias_set))
    1408              :     {
    1409       141869 :       ++alias_stats.aliasing_component_refs_p_may_alias;
    1410       141869 :       return true;
    1411              :     }
    1412       827715 :   ++alias_stats.aliasing_component_refs_p_no_alias;
    1413       827715 :   return false;
    1414              : }
    1415              : 
    1416              : /* FIELD1 and FIELD2 are two fields of component refs.  We assume
    1417              :    that bases of both component refs are either equivalent or nonoverlapping.
    1418              :    We do not assume that the containers of FIELD1 and FIELD2 are of the
    1419              :    same type or size.
    1420              : 
    1421              :    Return 0 in case the base address of component_refs are same then
    1422              :    FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
    1423              :    may not be of same type or size.
    1424              : 
    1425              :    Return 1 if FIELD1 and FIELD2 are non-overlapping.
    1426              : 
    1427              :    Return -1 otherwise.
    1428              : 
    1429              :    Main difference between 0 and -1 is to let
    1430              :    nonoverlapping_component_refs_since_match_p discover the semantically
    1431              :    equivalent part of the access path.
    1432              : 
    1433              :    Note that this function is used even with -fno-strict-aliasing
    1434              :    and makes use of no TBAA assumptions.  */
    1435              : 
    1436              : static int
    1437      3286183 : nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
    1438              : {
    1439              :   /* If both fields are of the same type, we could save hard work of
    1440              :      comparing offsets.  */
    1441      3286183 :   tree type1 = DECL_CONTEXT (field1);
    1442      3286183 :   tree type2 = DECL_CONTEXT (field2);
    1443              : 
    1444      3286183 :   if (TREE_CODE (type1) == RECORD_TYPE
    1445      6438383 :       && DECL_BIT_FIELD_REPRESENTATIVE (field1))
    1446              :     field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
    1447      3286183 :   if (TREE_CODE (type2) == RECORD_TYPE
    1448      6438383 :       && DECL_BIT_FIELD_REPRESENTATIVE (field2))
    1449              :     field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
    1450              : 
    1451              :   /* ??? Bitfields can overlap at RTL level so punt on them.
    1452              :      FIXME: RTL expansion should be fixed by adjusting the access path
    1453              :      when producing MEM_ATTRs for MEMs which are wider than
    1454              :      the bitfields similarly as done in set_mem_attrs_minus_bitpos.  */
    1455      3286183 :   if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
    1456              :     return -1;
    1457              : 
    1458              :   /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE.  */
    1459      3286183 :   if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
    1460      3130157 :     return field1 != field2;
    1461              : 
    1462              :   /* In common case the offsets and bit offsets will be the same.
    1463              :      However if frontends do not agree on the alignment, they may be
    1464              :      different even if they actually represent same address.
    1465              :      Try the common case first and if that fails calcualte the
    1466              :      actual bit offset.  */
    1467       156026 :   if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
    1468       156026 :                           DECL_FIELD_OFFSET (field2))
    1469       287342 :       && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
    1470       131316 :                              DECL_FIELD_BIT_OFFSET (field2)))
    1471              :     return 0;
    1472              : 
    1473              :   /* Note that it may be possible to use component_ref_field_offset
    1474              :      which would provide offsets as trees. However constructing and folding
    1475              :      trees is expensive and does not seem to be worth the compile time
    1476              :      cost.  */
    1477              : 
    1478        25416 :   poly_uint64 offset1, offset2;
    1479        25416 :   poly_uint64 bit_offset1, bit_offset2;
    1480              : 
    1481        25416 :   if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
    1482        25416 :       && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
    1483        25416 :       && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
    1484        50832 :       && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
    1485              :     {
    1486        25416 :       offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
    1487        25416 :       offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
    1488              : 
    1489        25416 :       if (known_eq (offset1, offset2))
    1490        21994 :         return 0;
    1491              : 
    1492        25416 :       poly_uint64 size1, size2;
    1493              : 
    1494        25416 :       if (poly_int_tree_p (DECL_SIZE (field1), &size1)
    1495        25416 :           && poly_int_tree_p (DECL_SIZE (field2), &size2)
    1496        50832 :           && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
    1497              :         return 1;
    1498              :     }
    1499              :   /* Resort to slower overlap checking by looking for matching types in
    1500              :      the middle of access path.  */
    1501              :   return -1;
    1502              : }
    1503              : 
    1504              : /* Return low bound of array. Do not produce new trees
    1505              :    and thus do not care about particular type of integer constant
    1506              :    and placeholder exprs.  */
    1507              : 
    1508              : static tree
    1509     17250313 : cheap_array_ref_low_bound (tree ref)
    1510              : {
    1511     17250313 :   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
    1512              : 
    1513              :   /* Avoid expensive array_ref_low_bound.
    1514              :      low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
    1515              :      type or it is zero.  */
    1516     17250313 :   if (TREE_OPERAND (ref, 2))
    1517        74451 :     return TREE_OPERAND (ref, 2);
    1518     17175862 :   else if (domain_type && TYPE_MIN_VALUE (domain_type))
    1519     17168254 :     return TYPE_MIN_VALUE (domain_type);
    1520              :   else
    1521         7608 :     return integer_zero_node;
    1522              : }
    1523              : 
    1524              : /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
    1525              :    completely disjoint.
    1526              : 
    1527              :    Return 1 if the refs are non-overlapping.
    1528              :    Return 0 if they are possibly overlapping but if so the overlap again
    1529              :    starts on the same address.
    1530              :    Return -1 otherwise.  */
    1531              : 
    1532              : int
    1533      8607916 : nonoverlapping_array_refs_p (tree ref1, tree ref2)
    1534              : {
    1535      8607916 :   tree index1 = TREE_OPERAND (ref1, 1);
    1536      8607916 :   tree index2 = TREE_OPERAND (ref2, 1);
    1537      8607916 :   tree low_bound1 = cheap_array_ref_low_bound (ref1);
    1538      8607916 :   tree low_bound2 = cheap_array_ref_low_bound (ref2);
    1539              : 
    1540              :   /* Handle zero offsets first: we do not need to match type size in this
    1541              :      case.  */
    1542      8607916 :   if (operand_equal_p (index1, low_bound1, 0)
    1543      8607916 :       && operand_equal_p (index2, low_bound2, 0))
    1544              :     return 0;
    1545              : 
    1546              :   /* If type sizes are different, give up.
    1547              : 
    1548              :      Avoid expensive array_ref_element_size.
    1549              :      If operand 3 is present it denotes size in the alignmnet units.
    1550              :      Otherwise size is TYPE_SIZE of the element type.
    1551              :      Handle only common cases where types are of the same "kind".  */
    1552      8534381 :   if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
    1553              :     return -1;
    1554              : 
    1555      8534381 :   tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
    1556      8534381 :   tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
    1557              : 
    1558      8534381 :   if (TREE_OPERAND (ref1, 3))
    1559              :     {
    1560         4218 :       if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
    1561         8436 :           || !operand_equal_p (TREE_OPERAND (ref1, 3),
    1562         4218 :                                TREE_OPERAND (ref2, 3), 0))
    1563          646 :         return -1;
    1564              :     }
    1565              :   else
    1566              :     {
    1567      8530163 :       if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
    1568      8530163 :                             TYPE_SIZE_UNIT (elmt_type2), 0))
    1569              :         return -1;
    1570              :     }
    1571              : 
    1572              :   /* Since we know that type sizes are the same, there is no need to return
    1573              :      -1 after this point. Partial overlap can not be introduced.  */
    1574              : 
    1575              :   /* We may need to fold trees in this case.
    1576              :      TODO: Handle integer constant case at least.  */
    1577      8525531 :   if (!operand_equal_p (low_bound1, low_bound2, 0))
    1578              :     return 0;
    1579              : 
    1580      8525531 :   if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
    1581              :     {
    1582       309527 :       if (tree_int_cst_equal (index1, index2))
    1583              :         return 0;
    1584              :       return 1;
    1585              :     }
    1586              :   /* TODO: We can use VRP to further disambiguate here. */
    1587              :   return 0;
    1588              : }
    1589              : 
    1590              : /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
    1591              :    MATCH2 either point to the same address or are disjoint.
    1592              :    MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
    1593              :    respectively or NULL in the case we established equivalence of bases.
    1594              :    If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
    1595              :    overlap by exact multiply of their element size.
    1596              : 
    1597              :    This test works by matching the initial segment of the access path
    1598              :    and does not rely on TBAA thus is safe for !flag_strict_aliasing if
    1599              :    match was determined without use of TBAA oracle.
    1600              : 
    1601              :    Return 1 if we can determine that component references REF1 and REF2,
    1602              :    that are within a common DECL, cannot overlap.
    1603              : 
    1604              :    Return 0 if paths are same and thus there is nothing to disambiguate more
    1605              :    (i.e. there is must alias assuming there is must alias between MATCH1 and
    1606              :    MATCH2)
    1607              : 
    1608              :    Return -1 if we can not determine 0 or 1 - this happens when we met
    1609              :    non-matching types was met in the path.
    1610              :    In this case it may make sense to continue by other disambiguation
    1611              :    oracles.  */
    1612              : 
    1613              : static int
    1614      9039946 : nonoverlapping_refs_since_match_p (tree match1, tree ref1,
    1615              :                                    tree match2, tree ref2,
    1616              :                                    bool partial_overlap)
    1617              : {
    1618      9039946 :   int ntbaa1 = 0, ntbaa2 = 0;
    1619              :   /* Early return if there are no references to match, we do not need
    1620              :      to walk the access paths.
    1621              : 
    1622              :      Do not consider this as may-alias for stats - it is more useful
    1623              :      to have information how many disambiguations happened provided that
    1624              :      the query was meaningful.  */
    1625              : 
    1626      8270586 :   if (match1 == ref1 || !handled_component_p (ref1)
    1627     17309078 :       || match2 == ref2 || !handled_component_p (ref2))
    1628              :     return -1;
    1629              : 
    1630      8251656 :   auto_vec<tree, 16> component_refs1;
    1631      8251656 :   auto_vec<tree, 16> component_refs2;
    1632              : 
    1633              :   /* Create the stack of handled components for REF1.  */
    1634     20999624 :   while (handled_component_p (ref1) && ref1 != match1)
    1635              :     {
    1636              :       /* We use TBAA only to re-synchronize after mismatched refs.  So we
    1637              :          do not need to truncate access path after TBAA part ends.  */
    1638     12747968 :       if (ends_tbaa_access_path_p (ref1))
    1639              :         ntbaa1 = 0;
    1640              :       else
    1641     12530095 :         ntbaa1++;
    1642     12747968 :       component_refs1.safe_push (ref1);
    1643     12747968 :       ref1 = TREE_OPERAND (ref1, 0);
    1644              :     }
    1645              : 
    1646              :   /* Create the stack of handled components for REF2.  */
    1647     20972134 :   while (handled_component_p (ref2) && ref2 != match2)
    1648              :     {
    1649     12720478 :       if (ends_tbaa_access_path_p (ref2))
    1650              :         ntbaa2 = 0;
    1651              :       else
    1652     12481158 :         ntbaa2++;
    1653     12720478 :       component_refs2.safe_push (ref2);
    1654     12720478 :       ref2 = TREE_OPERAND (ref2, 0);
    1655              :     }
    1656              : 
    1657      8251656 :   if (!flag_strict_aliasing)
    1658              :     {
    1659       486753 :       ntbaa1 = 0;
    1660       486753 :       ntbaa2 = 0;
    1661              :     }
    1662              : 
    1663      8251656 :   bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
    1664      8251656 :   bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
    1665              : 
    1666              :   /* If only one of access path starts with MEM_REF check that offset is 0
    1667              :      so the addresses stays the same after stripping it.
    1668              :      TODO: In this case we may walk the other access path until we get same
    1669              :      offset.
    1670              : 
    1671              :      If both starts with MEM_REF, offset has to be same.  */
    1672        65151 :   if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
    1673      8231862 :       || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
    1674     16474551 :       || (mem_ref1 && mem_ref2
    1675      1107208 :           && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
    1676      1107208 :                                   TREE_OPERAND (ref2, 1))))
    1677              :     {
    1678        54546 :       ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
    1679        54546 :       return -1;
    1680              :     }
    1681              : 
    1682              :   /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
    1683              :      to handle them here at all.  */
    1684      8197110 :   gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
    1685              :                        && TREE_CODE (ref2) != TARGET_MEM_REF);
    1686              : 
    1687              :   /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
    1688              :      rank.  This is sufficient because we start from the same DECL and you
    1689              :      cannot reference several fields at a time with COMPONENT_REFs (unlike
    1690              :      with ARRAY_RANGE_REFs for arrays) so you always need the same number
    1691              :      of them to access a sub-component, unless you're in a union, in which
    1692              :      case the return value will precisely be false.  */
    1693     10425845 :   while (true)
    1694              :     {
    1695              :       /* Track if we seen unmatched ref with non-zero offset.  In this case
    1696              :          we must look for partial overlaps.  */
    1697     10425845 :       bool seen_unmatched_ref_p = false;
    1698              : 
    1699              :       /* First match ARRAY_REFs an try to disambiguate.  */
    1700     20155082 :       if (!component_refs1.is_empty ()
    1701     10157186 :           && !component_refs2.is_empty ())
    1702              :         {
    1703     18692072 :           unsigned int narray_refs1=0, narray_refs2=0;
    1704              : 
    1705              :           /* We generally assume that both access paths starts by same sequence
    1706              :              of refs.  However if number of array refs is not in sync, try
    1707              :              to recover and pop elts until number match.  This helps the case
    1708              :              where one access path starts by array and other by element.  */
    1709     18692072 :           for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
    1710              :                narray_refs1++)
    1711     12426123 :             if (TREE_CODE (component_refs1 [component_refs1.length()
    1712              :                                             - 1 - narray_refs1]) != ARRAY_REF)
    1713              :               break;
    1714              : 
    1715     18695435 :           for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
    1716              :                narray_refs2++)
    1717     12436076 :             if (TREE_CODE (component_refs2 [component_refs2.length()
    1718              :                                             - 1 - narray_refs2]) != ARRAY_REF)
    1719              :               break;
    1720      9701666 :           for (; narray_refs1 > narray_refs2; narray_refs1--)
    1721              :             {
    1722        15559 :               ref1 = component_refs1.pop ();
    1723        15559 :               ntbaa1--;
    1724              : 
    1725              :               /* If index is non-zero we need to check whether the reference
    1726              :                  does not break the main invariant that bases are either
    1727              :                  disjoint or equal.  Consider the example:
    1728              : 
    1729              :                  unsigned char out[][1];
    1730              :                  out[1]="a";
    1731              :                  out[i][0];
    1732              : 
    1733              :                  Here bases out and out are same, but after removing the
    1734              :                  [i] index, this invariant no longer holds, because
    1735              :                  out[i] points to the middle of array out.
    1736              : 
    1737              :                  TODO: If size of type of the skipped reference is an integer
    1738              :                  multiply of the size of type of the other reference this
    1739              :                  invariant can be verified, but even then it is not completely
    1740              :                  safe with !flag_strict_aliasing if the other reference contains
    1741              :                  unbounded array accesses.
    1742              :                  See   */
    1743              : 
    1744        15559 :               if (!operand_equal_p (TREE_OPERAND (ref1, 1),
    1745        15559 :                                     cheap_array_ref_low_bound (ref1), 0))
    1746              :                 return 0;
    1747              :             }
    1748      9686144 :           for (; narray_refs2 > narray_refs1; narray_refs2--)
    1749              :             {
    1750        18922 :               ref2 = component_refs2.pop ();
    1751        18922 :               ntbaa2--;
    1752        18922 :               if (!operand_equal_p (TREE_OPERAND (ref2, 1),
    1753        18922 :                                     cheap_array_ref_low_bound (ref2), 0))
    1754              :                 return 0;
    1755              :             }
    1756              :           /* Try to disambiguate matched arrays.  */
    1757     18039713 :           for (unsigned int i = 0; i < narray_refs1; i++)
    1758              :             {
    1759     17215832 :               int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
    1760      8607916 :                                                      component_refs2.pop ());
    1761      8607916 :               ntbaa1--;
    1762      8607916 :               ntbaa2--;
    1763      8607916 :               if (cmp == 1 && !partial_overlap)
    1764              :                 {
    1765       226575 :                   ++alias_stats
    1766       226575 :                     .nonoverlapping_refs_since_match_p_no_alias;
    1767       226575 :                   return 1;
    1768              :                 }
    1769      8381341 :               if (cmp == -1)
    1770              :                 {
    1771         8850 :                   seen_unmatched_ref_p = true;
    1772              :                   /* We can not maintain the invariant that bases are either
    1773              :                      same or completely disjoint.  However we can still recover
    1774              :                      from type based alias analysis if we reach references to
    1775              :                      same sizes.  We do not attempt to match array sizes, so
    1776              :                      just finish array walking and look for component refs.  */
    1777         8850 :                   if (ntbaa1 < 0 || ntbaa2 < 0)
    1778              :                     {
    1779         7985 :                       ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
    1780         7985 :                       return -1;
    1781              :                     }
    1782         1804 :                   for (i++; i < narray_refs1; i++)
    1783              :                     {
    1784          939 :                       component_refs1.pop ();
    1785          939 :                       component_refs2.pop ();
    1786          939 :                       ntbaa1--;
    1787          939 :                       ntbaa2--;
    1788              :                     }
    1789              :                   break;
    1790              :                 }
    1791      8372491 :               partial_overlap = false;
    1792              :             }
    1793              :         }
    1794              : 
    1795              :       /* Next look for component_refs.  */
    1796     10228441 :       do
    1797              :         {
    1798     10228441 :           if (component_refs1.is_empty ())
    1799              :             {
    1800      6812005 :               ++alias_stats
    1801      6812005 :                 .nonoverlapping_refs_since_match_p_must_overlap;
    1802      6812005 :               return 0;
    1803              :             }
    1804      3416436 :           ref1 = component_refs1.pop ();
    1805      3416436 :           ntbaa1--;
    1806      3416436 :           if (TREE_CODE (ref1) != COMPONENT_REF)
    1807              :             {
    1808       114213 :               seen_unmatched_ref_p = true;
    1809       114213 :               if (ntbaa1 < 0 || ntbaa2 < 0)
    1810              :                 {
    1811        42958 :                   ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
    1812        42958 :                   return -1;
    1813              :                 }
    1814              :             }
    1815              :         }
    1816      6675701 :       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
    1817              : 
    1818      3302223 :       do
    1819              :         {
    1820      3302223 :           if (component_refs2.is_empty ())
    1821              :             {
    1822        19426 :               ++alias_stats
    1823        19426 :                 .nonoverlapping_refs_since_match_p_must_overlap;
    1824        19426 :               return 0;
    1825              :             }
    1826      3282797 :           ref2 = component_refs2.pop ();
    1827      3282797 :           ntbaa2--;
    1828      3282797 :           if (TREE_CODE (ref2) != COMPONENT_REF)
    1829              :             {
    1830           48 :               if (ntbaa1 < 0 || ntbaa2 < 0)
    1831              :                 {
    1832           48 :                   ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
    1833           48 :                   return -1;
    1834              :                 }
    1835              :               seen_unmatched_ref_p = true;
    1836              :             }
    1837              :         }
    1838      6565498 :       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
    1839              : 
    1840              :       /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
    1841              :          earlier.  */
    1842      3282749 :       gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
    1843              :                            && TREE_CODE (ref2) == COMPONENT_REF);
    1844              : 
    1845      3282749 :       tree field1 = TREE_OPERAND (ref1, 1);
    1846      3282749 :       tree field2 = TREE_OPERAND (ref2, 1);
    1847              : 
    1848              :       /* ??? We cannot simply use the type of operand #0 of the refs here
    1849              :          as the Fortran compiler smuggles type punning into COMPONENT_REFs
    1850              :          for common blocks instead of using unions like everyone else.  */
    1851      3282749 :       tree type1 = DECL_CONTEXT (field1);
    1852      3282749 :       tree type2 = DECL_CONTEXT (field2);
    1853              : 
    1854      3282749 :       partial_overlap = false;
    1855              : 
    1856              :       /* If we skipped array refs on type of different sizes, we can
    1857              :          no longer be sure that there are not partial overlaps.  */
    1858          432 :       if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
    1859      3283181 :           && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
    1860              :         {
    1861            0 :           ++alias_stats
    1862            0 :             .nonoverlapping_refs_since_match_p_may_alias;
    1863            0 :           return -1;
    1864              :         }
    1865              : 
    1866      3282749 :       int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
    1867      3282749 :       if (cmp == -1)
    1868              :         {
    1869         3422 :           ++alias_stats
    1870         3422 :             .nonoverlapping_refs_since_match_p_may_alias;
    1871         3422 :           return -1;
    1872              :         }
    1873      3279327 :       else if (cmp == 1)
    1874              :         {
    1875      1050592 :           ++alias_stats
    1876      1050592 :             .nonoverlapping_refs_since_match_p_no_alias;
    1877      1050592 :           return 1;
    1878              :         }
    1879              :     }
    1880      8251656 : }
    1881              : 
    1882              : /* Return TYPE_UID which can be used to match record types we consider
    1883              :    same for TBAA purposes.  */
    1884              : 
    1885              : static inline int
    1886       130102 : ncr_type_uid (const_tree field)
    1887              : {
    1888              :   /* ??? We cannot simply use the type of operand #0 of the refs here
    1889              :      as the Fortran compiler smuggles type punning into COMPONENT_REFs
    1890              :      for common blocks instead of using unions like everyone else.  */
    1891       130102 :   tree type = DECL_FIELD_CONTEXT (field);
    1892              :   /* With LTO types considered same_type_for_tbaa_p
    1893              :      from different translation unit may not have same
    1894              :      main variant.  They however have same TYPE_CANONICAL.  */
    1895       130102 :   if (TYPE_CANONICAL (type))
    1896       130102 :     return TYPE_UID (TYPE_CANONICAL (type));
    1897            0 :   return TYPE_UID (type);
    1898              : }
    1899              : 
    1900              : /* qsort compare function to sort FIELD_DECLs after their
    1901              :    DECL_FIELD_CONTEXT TYPE_UID.  */
    1902              : 
    1903              : static inline int
    1904        56843 : ncr_compar (const void *field1_, const void *field2_)
    1905              : {
    1906        56843 :   const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
    1907        56843 :   const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
    1908        56843 :   unsigned int uid1 = ncr_type_uid (field1);
    1909        56843 :   unsigned int uid2 = ncr_type_uid (field2);
    1910              : 
    1911        56843 :   if (uid1 < uid2)
    1912              :     return -1;
    1913        22353 :   else if (uid1 > uid2)
    1914        22353 :     return 1;
    1915              :   return 0;
    1916              : }
    1917              : 
    1918              : /* Return true if we can determine that the fields referenced cannot
    1919              :    overlap for any pair of objects.  This relies on TBAA.  */
    1920              : 
    1921              : static bool
    1922      1247073 : nonoverlapping_component_refs_p (const_tree x, const_tree y)
    1923              : {
    1924              :   /* Early return if we have nothing to do.
    1925              : 
    1926              :      Do not consider this as may-alias for stats - it is more useful
    1927              :      to have information how many disambiguations happened provided that
    1928              :      the query was meaningful.  */
    1929      1247073 :   if (!flag_strict_aliasing
    1930      1247073 :       || !x || !y
    1931       164702 :       || !handled_component_p (x)
    1932      1247073 :       || !handled_component_p (y))
    1933              :     return false;
    1934              : 
    1935       101252 :   auto_vec<const_tree, 16> fieldsx;
    1936       318992 :   while (handled_component_p (x))
    1937              :     {
    1938       217740 :       if (TREE_CODE (x) == COMPONENT_REF)
    1939              :         {
    1940       127782 :           tree field = TREE_OPERAND (x, 1);
    1941       127782 :           tree type = DECL_FIELD_CONTEXT (field);
    1942       127782 :           if (TREE_CODE (type) == RECORD_TYPE)
    1943       127500 :             fieldsx.safe_push (field);
    1944              :         }
    1945        89958 :       else if (ends_tbaa_access_path_p (x))
    1946         2427 :         fieldsx.truncate (0);
    1947       217740 :       x = TREE_OPERAND (x, 0);
    1948              :     }
    1949       170572 :   if (fieldsx.length () == 0)
    1950              :     return false;
    1951        69320 :   auto_vec<const_tree, 16> fieldsy;
    1952       151036 :   while (handled_component_p (y))
    1953              :     {
    1954        81716 :       if (TREE_CODE (y) == COMPONENT_REF)
    1955              :         {
    1956        15725 :           tree field = TREE_OPERAND (y, 1);
    1957        15725 :           tree type = DECL_FIELD_CONTEXT (field);
    1958        15725 :           if (TREE_CODE (type) == RECORD_TYPE)
    1959        15443 :             fieldsy.safe_push (TREE_OPERAND (y, 1));
    1960              :         }
    1961        65991 :       else if (ends_tbaa_access_path_p (y))
    1962          188 :         fieldsy.truncate (0);
    1963        81716 :       y = TREE_OPERAND (y, 0);
    1964              :     }
    1965        69320 :   if (fieldsy.length () == 0)
    1966              :     {
    1967        61552 :       ++alias_stats.nonoverlapping_component_refs_p_may_alias;
    1968        61552 :       return false;
    1969              :     }
    1970              : 
    1971              :   /* Most common case first.  */
    1972         7768 :   if (fieldsx.length () == 1
    1973         7768 :       && fieldsy.length () == 1)
    1974              :    {
    1975         8884 :      if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
    1976         4442 :                              DECL_FIELD_CONTEXT (fieldsy[0])) == 1
    1977         7690 :          && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
    1978              :       {
    1979         1808 :          ++alias_stats.nonoverlapping_component_refs_p_no_alias;
    1980         1808 :          return true;
    1981              :       }
    1982              :      else
    1983              :       {
    1984         2634 :          ++alias_stats.nonoverlapping_component_refs_p_may_alias;
    1985         2634 :          return false;
    1986              :       }
    1987              :    }
    1988              : 
    1989         3326 :   if (fieldsx.length () == 2)
    1990              :     {
    1991          300 :       if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
    1992          162 :         std::swap (fieldsx[0], fieldsx[1]);
    1993              :     }
    1994              :   else
    1995         3026 :     fieldsx.qsort (ncr_compar);
    1996              : 
    1997         3326 :   if (fieldsy.length () == 2)
    1998              :     {
    1999          106 :       if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
    2000           46 :         std::swap (fieldsy[0], fieldsy[1]);
    2001              :     }
    2002              :   else
    2003         3220 :     fieldsy.qsort (ncr_compar);
    2004              : 
    2005              :   unsigned i = 0, j = 0;
    2006         8208 :   do
    2007              :     {
    2008         8208 :       const_tree fieldx = fieldsx[i];
    2009         8208 :       const_tree fieldy = fieldsy[j];
    2010              : 
    2011              :       /* We're left with accessing different fields of a structure,
    2012              :          no possible overlap.  */
    2013        16416 :       if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
    2014         8208 :                               DECL_FIELD_CONTEXT (fieldy)) == 1
    2015         8208 :           && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
    2016              :         {
    2017            0 :           ++alias_stats.nonoverlapping_component_refs_p_no_alias;
    2018            0 :           return true;
    2019              :         }
    2020              : 
    2021         8208 :       if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
    2022              :         {
    2023         2855 :           i++;
    2024         5710 :           if (i == fieldsx.length ())
    2025              :             break;
    2026              :         }
    2027              :       else
    2028              :         {
    2029         5353 :           j++;
    2030        10706 :           if (j == fieldsy.length ())
    2031              :             break;
    2032              :         }
    2033              :     }
    2034              :   while (1);
    2035              : 
    2036         3326 :   ++alias_stats.nonoverlapping_component_refs_p_may_alias;
    2037         3326 :   return false;
    2038       170572 : }
    2039              : 
    2040              : 
    2041              : /* Return true if two memory references based on the variables BASE1
    2042              :    and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
    2043              :    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  REF1 and REF2
    2044              :    if non-NULL are the complete memory reference trees.  */
    2045              : 
    2046              : static bool
    2047   1502464455 : decl_refs_may_alias_p (tree ref1, tree base1,
    2048              :                        poly_int64 offset1, poly_int64 max_size1,
    2049              :                        poly_int64 size1,
    2050              :                        tree ref2, tree base2,
    2051              :                        poly_int64 offset2, poly_int64 max_size2,
    2052              :                        poly_int64 size2)
    2053              : {
    2054   1502464455 :   gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
    2055              : 
    2056              :   /* If both references are based on different variables, they cannot alias.  */
    2057   1502464455 :   if (compare_base_decls (base1, base2) == 0)
    2058              :     return false;
    2059              : 
    2060              :   /* If both references are based on the same variable, they cannot alias if
    2061              :      the accesses do not overlap.  */
    2062    206967210 :   if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
    2063              :     return false;
    2064              : 
    2065              :   /* If there is must alias, there is no use disambiguating further.  */
    2066     60438865 :   if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
    2067              :     return true;
    2068              : 
    2069              :   /* For components with variable position, the above test isn't sufficient,
    2070              :      so we disambiguate component references manually.  */
    2071     10381906 :   if (ref1 && ref2
    2072      7944330 :       && handled_component_p (ref1) && handled_component_p (ref2)
    2073     17540285 :       && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
    2074              :     return false;
    2075              : 
    2076              :   return true;
    2077              : }
    2078              : 
    2079              : /* Return true if access with BASE is view converted.
    2080              :    Base must not be stripped from inner MEM_REF (&decl)
    2081              :    which is done by ao_ref_base and thus one extra walk
    2082              :    of handled components is needed.  */
    2083              : 
    2084              : bool
    2085   1223001709 : view_converted_memref_p (tree base)
    2086              : {
    2087   1223001709 :   if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
    2088              :     return false;
    2089    819996375 :   return (same_type_for_tbaa (TREE_TYPE (base),
    2090    819996375 :                               TREE_TYPE (TREE_TYPE (TREE_OPERAND (base, 1))))
    2091    819996375 :           != 1);
    2092              : }
    2093              : 
    2094              : /* Return true if an indirect reference based on *PTR1 constrained
    2095              :    to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
    2096              :    constrained to [OFFSET2, OFFSET2 + MAX_SIZE2).  *PTR1 and BASE2 have
    2097              :    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
    2098              :    in which case they are computed on-demand.  REF1 and REF2
    2099              :    if non-NULL are the complete memory reference trees.  */
    2100              : 
    2101              : static bool
    2102    302062604 : indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
    2103              :                                poly_int64 offset1, poly_int64 max_size1,
    2104              :                                poly_int64 size1,
    2105              :                                alias_set_type ref1_alias_set,
    2106              :                                alias_set_type base1_alias_set,
    2107              :                                tree ref2 ATTRIBUTE_UNUSED, tree base2,
    2108              :                                poly_int64 offset2, poly_int64 max_size2,
    2109              :                                poly_int64 size2,
    2110              :                                alias_set_type ref2_alias_set,
    2111              :                                alias_set_type base2_alias_set, bool tbaa_p)
    2112              : {
    2113    302062604 :   tree ptr1;
    2114    302062604 :   tree ptrtype1, dbase2;
    2115              : 
    2116    302062604 :   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
    2117              :                         || TREE_CODE (base1) == TARGET_MEM_REF)
    2118              :                        && DECL_P (base2));
    2119              : 
    2120    302062604 :   ptr1 = TREE_OPERAND (base1, 0);
    2121    302062604 :   poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
    2122              : 
    2123              :   /* If only one reference is based on a variable, they cannot alias if
    2124              :      the pointer access is beyond the extent of the variable access.
    2125              :      (the pointer base cannot validly point to an offset less than zero
    2126              :      of the variable).
    2127              :      ???  IVOPTs creates bases that do not honor this restriction,
    2128              :      so do not apply this optimization for TARGET_MEM_REFs.  */
    2129    302062604 :   if (TREE_CODE (base1) != TARGET_MEM_REF
    2130    302062604 :       && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
    2131     73396102 :     return false;
    2132              : 
    2133              :   /* If the pointer based access is bigger than the variable they cannot
    2134              :      alias.  This is similar to the check below where we use TBAA to
    2135              :      increase the size of the pointer based access based on the dynamic
    2136              :      type of a containing object we can infer from it.  */
    2137    228666502 :   poly_int64 dsize2;
    2138    228666502 :   if (known_size_p (size1)
    2139    213233496 :       && poly_int_tree_p (DECL_SIZE (base2), &dsize2)
    2140    404989997 :       && known_lt (dsize2, size1))
    2141              :     return false;
    2142              : 
    2143              :   /* They also cannot alias if the pointer may not point to the decl.  */
    2144    209821704 :   if (!ptr_deref_may_alias_decl_p (ptr1, base2))
    2145              :     return false;
    2146              : 
    2147              :   /* Disambiguations that rely on strict aliasing rules follow.  */
    2148     31489197 :   if (!flag_strict_aliasing || !tbaa_p)
    2149              :     return true;
    2150              : 
    2151              :   /* If the alias set for a pointer access is zero all bets are off.  */
    2152      6313675 :   if (base1_alias_set == 0 || base2_alias_set == 0)
    2153              :     return true;
    2154              : 
    2155              :   /* When we are trying to disambiguate an access with a pointer dereference
    2156              :      as base versus one with a decl as base we can use both the size
    2157              :      of the decl and its dynamic type for extra disambiguation.
    2158              :      ???  We do not know anything about the dynamic type of the decl
    2159              :      other than that its alias-set contains base2_alias_set as a subset
    2160              :      which does not help us here.  */
    2161              :   /* As we know nothing useful about the dynamic type of the decl just
    2162              :      use the usual conflict check rather than a subset test.
    2163              :      ???  We could introduce -fvery-strict-aliasing when the language
    2164              :      does not allow decls to have a dynamic type that differs from their
    2165              :      static type.  Then we can check
    2166              :      !alias_set_subset_of (base1_alias_set, base2_alias_set) instead.  */
    2167      1554021 :   if (base1_alias_set != base2_alias_set
    2168      1554021 :       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
    2169              :     return false;
    2170              : 
    2171      1337798 :   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
    2172              : 
    2173              :   /* If the size of the access relevant for TBAA through the pointer
    2174              :      is bigger than the size of the decl we can't possibly access the
    2175              :      decl via that pointer.  */
    2176      1337798 :   if (/* ???  This in turn may run afoul when a decl of type T which is
    2177              :          a member of union type U is accessed through a pointer to
    2178              :          type U and sizeof T is smaller than sizeof U.  */
    2179      1337798 :       TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
    2180      1305139 :       && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
    2181      2642937 :       && compare_sizes (DECL_SIZE (base2),
    2182      1305139 :                         TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
    2183              :     return false;
    2184              : 
    2185      1304913 :   if (!ref2)
    2186              :     return true;
    2187              : 
    2188              :   /* If the decl is accessed via a MEM_REF, reconstruct the base
    2189              :      we can use for TBAA and an appropriately adjusted offset.  */
    2190              :   dbase2 = ref2;
    2191      2126561 :   while (handled_component_p (dbase2))
    2192       889089 :     dbase2 = TREE_OPERAND (dbase2, 0);
    2193      1237472 :   poly_int64 doffset1 = offset1;
    2194      1237472 :   poly_offset_int doffset2 = offset2;
    2195      1237472 :   if (TREE_CODE (dbase2) == MEM_REF
    2196      1237472 :       || TREE_CODE (dbase2) == TARGET_MEM_REF)
    2197              :     {
    2198       918352 :       doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
    2199       459176 :       tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
    2200              :       /* If second reference is view-converted, give up now.  */
    2201       459176 :       if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
    2202              :         return true;
    2203              :     }
    2204              : 
    2205              :   /* If first reference is view-converted, give up now.  */
    2206      1132953 :   if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
    2207              :     return true;
    2208              : 
    2209              :   /* If both references are through the same type, they do not alias
    2210              :      if the accesses do not overlap.  This does extra disambiguation
    2211              :      for mixed/pointer accesses but requires strict aliasing.
    2212              :      For MEM_REFs we require that the component-ref offset we computed
    2213              :      is relative to the start of the type which we ensure by
    2214              :      comparing rvalue and access type and disregarding the constant
    2215              :      pointer offset.
    2216              : 
    2217              :      But avoid treating variable length arrays as "objects", instead assume they
    2218              :      can overlap by an exact multiple of their element size.
    2219              :      See gcc.dg/torture/alias-2.c.  */
    2220      1018588 :   if (((TREE_CODE (base1) != TARGET_MEM_REF
    2221       174749 :        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
    2222       985206 :        && (TREE_CODE (dbase2) != TARGET_MEM_REF
    2223        23884 :            || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
    2224      1979933 :       && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
    2225              :     {
    2226       335077 :       bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
    2227       335077 :                               && (TYPE_SIZE (TREE_TYPE (base1))
    2228         2278 :                               && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
    2229       649724 :                                  != INTEGER_CST));
    2230       335077 :       if (!partial_overlap
    2231       335077 :           && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
    2232              :         return false;
    2233       314647 :       if (!ref1 || !ref2
    2234              :           /* If there is must alias, there is no use disambiguating further.  */
    2235       314647 :           || (!partial_overlap
    2236       302065 :               && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
    2237              :         return true;
    2238         3294 :       int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
    2239              :                                                    partial_overlap);
    2240         3294 :       if (res == -1)
    2241         3190 :         return !nonoverlapping_component_refs_p (ref1, ref2);
    2242          104 :       return !res;
    2243              :     }
    2244              : 
    2245              :   /* Do access-path based disambiguation.  */
    2246       683511 :   if (ref1 && ref2
    2247      1153103 :       && (handled_component_p (ref1) || handled_component_p (ref2)))
    2248       493463 :     return aliasing_component_refs_p (ref1,
    2249              :                                       ref1_alias_set, base1_alias_set,
    2250              :                                       offset1, max_size1,
    2251              :                                       ref2,
    2252              :                                       ref2_alias_set, base2_alias_set,
    2253       493463 :                                       offset2, max_size2);
    2254              : 
    2255              :   return true;
    2256              : }
    2257              : 
    2258              : /* Return true if two indirect references based on *PTR1
    2259              :    and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
    2260              :    [OFFSET2, OFFSET2 + MAX_SIZE2) may alias.  *PTR1 and *PTR2 have
    2261              :    the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
    2262              :    in which case they are computed on-demand.  REF1 and REF2
    2263              :    if non-NULL are the complete memory reference trees. */
    2264              : 
    2265              : static bool
    2266     93855556 : indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
    2267              :                            poly_int64 offset1, poly_int64 max_size1,
    2268              :                            poly_int64 size1,
    2269              :                            alias_set_type ref1_alias_set,
    2270              :                            alias_set_type base1_alias_set,
    2271              :                            tree ref2 ATTRIBUTE_UNUSED, tree base2,
    2272              :                            poly_int64 offset2, poly_int64 max_size2,
    2273              :                            poly_int64 size2,
    2274              :                            alias_set_type ref2_alias_set,
    2275              :                            alias_set_type base2_alias_set, bool tbaa_p)
    2276              : {
    2277     93855556 :   tree ptr1;
    2278     93855556 :   tree ptr2;
    2279     93855556 :   tree ptrtype1, ptrtype2;
    2280              : 
    2281     93855556 :   gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
    2282              :                         || TREE_CODE (base1) == TARGET_MEM_REF)
    2283              :                        && (TREE_CODE (base2) == MEM_REF
    2284              :                            || TREE_CODE (base2) == TARGET_MEM_REF));
    2285              : 
    2286     93855556 :   ptr1 = TREE_OPERAND (base1, 0);
    2287     93855556 :   ptr2 = TREE_OPERAND (base2, 0);
    2288              : 
    2289              :   /* If both bases are based on pointers they cannot alias if they may not
    2290              :      point to the same memory object or if they point to the same object
    2291              :      and the accesses do not overlap.  */
    2292     93855556 :   if ((!cfun || gimple_in_ssa_p (cfun))
    2293     59642324 :       && operand_equal_p (ptr1, ptr2, 0)
    2294    125698573 :       && (((TREE_CODE (base1) != TARGET_MEM_REF
    2295      1160322 :             || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
    2296     31752242 :            && (TREE_CODE (base2) != TARGET_MEM_REF
    2297      1060615 :                || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
    2298       108951 :           || (TREE_CODE (base1) == TARGET_MEM_REF
    2299        94499 :               && TREE_CODE (base2) == TARGET_MEM_REF
    2300        85668 :               && (TMR_STEP (base1) == TMR_STEP (base2)
    2301        13590 :                   || (TMR_STEP (base1) && TMR_STEP (base2)
    2302         2836 :                       && operand_equal_p (TMR_STEP (base1),
    2303         2836 :                                           TMR_STEP (base2), 0)))
    2304        72078 :               && (TMR_INDEX (base1) == TMR_INDEX (base2)
    2305        12388 :                   || (TMR_INDEX (base1) && TMR_INDEX (base2)
    2306        10916 :                       && operand_equal_p (TMR_INDEX (base1),
    2307        10916 :                                           TMR_INDEX (base2), 0)))
    2308        59690 :               && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
    2309            0 :                   || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
    2310            0 :                       && operand_equal_p (TMR_INDEX2 (base1),
    2311            0 :                                           TMR_INDEX2 (base2), 0))))))
    2312              :     {
    2313     31793756 :       poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
    2314     31793756 :       poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
    2315     31793756 :       if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
    2316     31793756 :                                    offset2 + moff2, max_size2))
    2317     31501027 :         return false;
    2318              :       /* If there is must alias, there is no use disambiguating further.  */
    2319      4119975 :       if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
    2320              :         return true;
    2321      1158307 :       if (ref1 && ref2)
    2322              :         {
    2323       876371 :           int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
    2324              :                                                        false);
    2325       876371 :           if (res != -1)
    2326       865578 :             return !res;
    2327              :         }
    2328              :     }
    2329     62354529 :   if (!ptr_derefs_may_alias_p (ptr1, ptr2))
    2330              :     return false;
    2331              : 
    2332              :   /* Disambiguations that rely on strict aliasing rules follow.  */
    2333     38356553 :   if (!flag_strict_aliasing || !tbaa_p)
    2334              :     return true;
    2335              : 
    2336     15042283 :   ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
    2337     15042283 :   ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
    2338              : 
    2339              :   /* If the alias set for a pointer access is zero all bets are off.  */
    2340     15042283 :   if (base1_alias_set == 0
    2341     15042283 :       || base2_alias_set == 0)
    2342              :     return true;
    2343              : 
    2344              :   /* Do type-based disambiguation.  */
    2345     10170260 :   if (base1_alias_set != base2_alias_set
    2346     10170260 :       && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
    2347              :     return false;
    2348              : 
    2349              :   /* If either reference is view-converted, give up now.  */
    2350      9434355 :   if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
    2351      9434355 :       || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
    2352      2938269 :     return true;
    2353              : 
    2354              :   /* If both references are through the same type, they do not alias
    2355              :      if the accesses do not overlap.  This does extra disambiguation
    2356              :      for mixed/pointer accesses but requires strict aliasing.  */
    2357      6496086 :   if ((TREE_CODE (base1) != TARGET_MEM_REF
    2358      1231212 :        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
    2359      6156045 :       && (TREE_CODE (base2) != TARGET_MEM_REF
    2360      1120174 :           || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
    2361     12518890 :       && same_type_for_tbaa (TREE_TYPE (ptrtype1),
    2362      6022804 :                              TREE_TYPE (ptrtype2)) == 1)
    2363              :     {
    2364              :       /* But avoid treating arrays as "objects", instead assume they
    2365              :          can overlap by an exact multiple of their element size.
    2366              :          See gcc.dg/torture/alias-2.c.  */
    2367      4017728 :       bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
    2368              : 
    2369      4017728 :       if (!partial_overlap
    2370      4017728 :           && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
    2371              :         return false;
    2372      3668685 :       if (!ref1 || !ref2
    2373      3668685 :           || (!partial_overlap
    2374      3095072 :               && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
    2375              :         return true;
    2376       190279 :       int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
    2377              :                                                    partial_overlap);
    2378       190279 :       if (res == -1)
    2379        69589 :         return !nonoverlapping_component_refs_p (ref1, ref2);
    2380       120690 :       return !res;
    2381              :     }
    2382              : 
    2383              :   /* Do access-path based disambiguation.  */
    2384      2478358 :   if (ref1 && ref2
    2385      3606526 :       && (handled_component_p (ref1) || handled_component_p (ref2)))
    2386      1810651 :     return aliasing_component_refs_p (ref1,
    2387              :                                       ref1_alias_set, base1_alias_set,
    2388              :                                       offset1, max_size1,
    2389              :                                       ref2,
    2390              :                                       ref2_alias_set, base2_alias_set,
    2391      1810651 :                                       offset2, max_size2);
    2392              : 
    2393              :   return true;
    2394              : }
    2395              : 
    2396              : /* Return true, if the two memory references REF1 and REF2 may alias.  */
    2397              : 
    2398              : static bool
    2399   1975178314 : refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
    2400              : {
    2401   1975178314 :   tree base1, base2;
    2402   1975178314 :   poly_int64 offset1 = 0, offset2 = 0;
    2403   1975178314 :   poly_int64 max_size1 = -1, max_size2 = -1;
    2404   1975178314 :   bool var1_p, var2_p, ind1_p, ind2_p;
    2405              : 
    2406   1975178314 :   gcc_checking_assert ((!ref1->ref
    2407              :                         || TREE_CODE (ref1->ref) == SSA_NAME
    2408              :                         || DECL_P (ref1->ref)
    2409              :                         || TREE_CODE (ref1->ref) == STRING_CST
    2410              :                         || handled_component_p (ref1->ref)
    2411              :                         || TREE_CODE (ref1->ref) == MEM_REF
    2412              :                         || TREE_CODE (ref1->ref) == TARGET_MEM_REF
    2413              :                         || TREE_CODE (ref1->ref) == WITH_SIZE_EXPR)
    2414              :                        && (!ref2->ref
    2415              :                            || TREE_CODE (ref2->ref) == SSA_NAME
    2416              :                            || DECL_P (ref2->ref)
    2417              :                            || TREE_CODE (ref2->ref) == STRING_CST
    2418              :                            || handled_component_p (ref2->ref)
    2419              :                            || TREE_CODE (ref2->ref) == MEM_REF
    2420              :                            || TREE_CODE (ref2->ref) == TARGET_MEM_REF
    2421              :                            || TREE_CODE (ref2->ref) == WITH_SIZE_EXPR));
    2422              : 
    2423              :   /* Decompose the references into their base objects and the access.  */
    2424   1975178314 :   base1 = ao_ref_base (ref1);
    2425   1975178314 :   offset1 = ref1->offset;
    2426   1975178314 :   max_size1 = ref1->max_size;
    2427   1975178314 :   base2 = ao_ref_base (ref2);
    2428   1975178314 :   offset2 = ref2->offset;
    2429   1975178314 :   max_size2 = ref2->max_size;
    2430              : 
    2431              :   /* We can end up with registers or constants as bases for example from
    2432              :      *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
    2433              :      which is seen as a struct copy.  */
    2434   1975178314 :   if (TREE_CODE (base1) == SSA_NAME
    2435   1975175968 :       || TREE_CODE (base1) == CONST_DECL
    2436   1973209706 :       || TREE_CODE (base1) == CONSTRUCTOR
    2437   1973209706 :       || TREE_CODE (base1) == ADDR_EXPR
    2438   1973209706 :       || CONSTANT_CLASS_P (base1)
    2439   1966607541 :       || TREE_CODE (base2) == SSA_NAME
    2440   1966607541 :       || TREE_CODE (base2) == CONST_DECL
    2441   1966510098 :       || TREE_CODE (base2) == CONSTRUCTOR
    2442   1966510098 :       || TREE_CODE (base2) == ADDR_EXPR
    2443   1966510098 :       || CONSTANT_CLASS_P (base2))
    2444              :     return false;
    2445              : 
    2446              :   /* Two volatile accesses always conflict.  */
    2447   1966466392 :   if (ref1->volatile_p
    2448      5877081 :       && ref2->volatile_p)
    2449              :     return true;
    2450              : 
    2451              :   /* refN->ref may convey size information, do not confuse our workers
    2452              :      with that but strip it - ao_ref_base took it into account already.  */
    2453   1962841172 :   tree ref1ref = ref1->ref;
    2454   1962841172 :   if (ref1ref && TREE_CODE (ref1ref) == WITH_SIZE_EXPR)
    2455          162 :     ref1ref = TREE_OPERAND (ref1ref, 0);
    2456   1962841172 :   tree ref2ref = ref2->ref;
    2457   1962841172 :   if (ref2ref && TREE_CODE (ref2ref) == WITH_SIZE_EXPR)
    2458            0 :     ref2ref = TREE_OPERAND (ref2ref, 0);
    2459              : 
    2460              :   /* Defer to simple offset based disambiguation if we have
    2461              :      references based on two decls.  Do this before defering to
    2462              :      TBAA to handle must-alias cases in conformance with the
    2463              :      GCC extension of allowing type-punning through unions.  */
    2464   1962841172 :   var1_p = DECL_P (base1);
    2465   1962841172 :   var2_p = DECL_P (base2);
    2466   1962841172 :   if (var1_p && var2_p)
    2467   1502464455 :     return decl_refs_may_alias_p (ref1ref, base1, offset1, max_size1,
    2468              :                                   ref1->size,
    2469              :                                   ref2ref, base2, offset2, max_size2,
    2470   1502464455 :                                   ref2->size);
    2471              : 
    2472              :   /* We can end up referring to code via function and label decls.
    2473              :      As we likely do not properly track code aliases conservatively
    2474              :      bail out.  */
    2475    460376717 :   if (TREE_CODE (base1) == FUNCTION_DECL
    2476    460376717 :       || TREE_CODE (base1) == LABEL_DECL
    2477    459426341 :       || TREE_CODE (base2) == FUNCTION_DECL
    2478    459413048 :       || TREE_CODE (base2) == LABEL_DECL)
    2479              :     return true;
    2480              : 
    2481              :   /* Handle restrict based accesses.
    2482              :      ???  ao_ref_base strips inner MEM_REF [&decl], recover from that
    2483              :      here.  */
    2484    459413048 :   tree rbase1 = base1;
    2485    459413048 :   tree rbase2 = base2;
    2486    459413048 :   if (var1_p)
    2487              :     {
    2488    225169962 :       rbase1 = ref1ref;
    2489    225169962 :       if (rbase1)
    2490    292426548 :         while (handled_component_p (rbase1))
    2491    106870601 :           rbase1 = TREE_OPERAND (rbase1, 0);
    2492              :     }
    2493    459413048 :   if (var2_p)
    2494              :     {
    2495    116032874 :       rbase2 = ref2ref;
    2496    116032874 :       if (rbase2)
    2497    190757905 :         while (handled_component_p (rbase2))
    2498     82380421 :           rbase2 = TREE_OPERAND (rbase2, 0);
    2499              :     }
    2500    459413048 :   if (rbase1 && rbase2
    2501    412143643 :       && (TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
    2502    260009005 :       && (TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
    2503              :       /* If the accesses are in the same restrict clique... */
    2504    182261404 :       && MR_DEPENDENCE_CLIQUE (rbase1) == MR_DEPENDENCE_CLIQUE (rbase2)
    2505              :       /* But based on different pointers they do not alias.  */
    2506    612462234 :       && MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))
    2507              :     return false;
    2508              : 
    2509    443384185 :   ind1_p = (TREE_CODE (base1) == MEM_REF
    2510    443384185 :             || TREE_CODE (base1) == TARGET_MEM_REF);
    2511    443384185 :   ind2_p = (TREE_CODE (base2) == MEM_REF
    2512    443384185 :             || TREE_CODE (base2) == TARGET_MEM_REF);
    2513              : 
    2514              :   /* Canonicalize the pointer-vs-decl case.  */
    2515    443384185 :   if (ind1_p && var2_p)
    2516              :     {
    2517    114740864 :       std::swap (offset1, offset2);
    2518    114740864 :       std::swap (max_size1, max_size2);
    2519    114740864 :       std::swap (base1, base2);
    2520    114740864 :       std::swap (ref1, ref2);
    2521    114740864 :       std::swap (ref1ref, ref2ref);
    2522    114740864 :       var1_p = true;
    2523    114740864 :       ind1_p = false;
    2524    114740864 :       var2_p = false;
    2525    114740864 :       ind2_p = true;
    2526              :     }
    2527              : 
    2528              :   /* First defer to TBAA if possible.  */
    2529    443384185 :   if (tbaa_p
    2530    197350686 :       && flag_strict_aliasing
    2531    573108748 :       && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
    2532              :                                  ao_ref_alias_set (ref2)))
    2533              :     return false;
    2534              : 
    2535              :   /* If the reference is based on a pointer that points to memory
    2536              :      that may not be written to then the other reference cannot possibly
    2537              :      clobber it.  */
    2538    396489875 :   if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
    2539    395170892 :        && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
    2540    791180654 :       || (ind1_p
    2541     93947158 :           && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
    2542     93752111 :           && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
    2543              :     return false;
    2544              : 
    2545              :   /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators.  */
    2546    395918160 :   if (var1_p && ind2_p)
    2547    302062604 :     return indirect_ref_may_alias_decl_p (ref2ref, base2,
    2548              :                                           offset2, max_size2, ref2->size,
    2549              :                                           ao_ref_alias_set (ref2),
    2550              :                                           ao_ref_base_alias_set (ref2),
    2551              :                                           ref1ref, base1,
    2552              :                                           offset1, max_size1, ref1->size,
    2553              :                                           ao_ref_alias_set (ref1),
    2554              :                                           ao_ref_base_alias_set (ref1),
    2555    302062604 :                                           tbaa_p);
    2556     93855556 :   else if (ind1_p && ind2_p)
    2557     93855556 :     return indirect_refs_may_alias_p (ref1ref, base1,
    2558              :                                       offset1, max_size1, ref1->size,
    2559              :                                       ao_ref_alias_set (ref1),
    2560              :                                       ao_ref_base_alias_set (ref1),
    2561              :                                       ref2ref, base2,
    2562              :                                       offset2, max_size2, ref2->size,
    2563              :                                       ao_ref_alias_set (ref2),
    2564              :                                       ao_ref_base_alias_set (ref2),
    2565     93855556 :                                       tbaa_p);
    2566              : 
    2567            0 :   gcc_unreachable ();
    2568              : }
    2569              : 
    2570              : /* Return true, if the two memory references REF1 and REF2 may alias
    2571              :    and update statistics.  */
    2572              : 
    2573              : bool
    2574   1975178314 : refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
    2575              : {
    2576   1975178314 :   bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
    2577   1975178314 :   if (res)
    2578    135177758 :     ++alias_stats.refs_may_alias_p_may_alias;
    2579              :   else
    2580   1840000556 :     ++alias_stats.refs_may_alias_p_no_alias;
    2581   1975178314 :   return res;
    2582              : }
    2583              : 
    2584              : static bool
    2585     61182611 : refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
    2586              : {
    2587     61182611 :   ao_ref r1;
    2588     61182611 :   ao_ref_init (&r1, ref1);
    2589     61182611 :   return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
    2590              : }
    2591              : 
    2592              : bool
    2593      1052224 : refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
    2594              : {
    2595      1052224 :   ao_ref r1, r2;
    2596      1052224 :   ao_ref_init (&r1, ref1);
    2597      1052224 :   ao_ref_init (&r2, ref2);
    2598      1052224 :   return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
    2599              : }
    2600              : 
    2601              : /* Returns true if there is a anti-dependence for the STORE that
    2602              :    executes after the LOAD.  */
    2603              : 
    2604              : bool
    2605       481126 : refs_anti_dependent_p (tree load, tree store)
    2606              : {
    2607       481126 :   ao_ref r1, r2;
    2608       481126 :   ao_ref_init (&r1, load);
    2609       481126 :   ao_ref_init (&r2, store);
    2610       481126 :   return refs_may_alias_p_1 (&r1, &r2, false);
    2611              : }
    2612              : 
    2613              : /* Returns true if there is a output dependence for the stores
    2614              :    STORE1 and STORE2.  */
    2615              : 
    2616              : bool
    2617      2886818 : refs_output_dependent_p (tree store1, tree store2)
    2618              : {
    2619      2886818 :   ao_ref r1, r2;
    2620      2886818 :   ao_ref_init (&r1, store1);
    2621      2886818 :   ao_ref_init (&r2, store2);
    2622      2886818 :   return refs_may_alias_p_1 (&r1, &r2, false);
    2623              : }
    2624              : 
    2625              : /* Returns true if and only if REF may alias any access stored in TT.
    2626              :    IF TBAA_P is true, use TBAA oracle.  */
    2627              : 
    2628              : static bool
    2629     44691790 : modref_may_conflict (const gcall *stmt,
    2630              :                      modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
    2631              : {
    2632     44691790 :   alias_set_type base_set, ref_set;
    2633     44691790 :   bool global_memory_ok = false;
    2634              : 
    2635     44691790 :   if (tt->every_base)
    2636              :     return true;
    2637              : 
    2638      8407315 :   if (!dbg_cnt (ipa_mod_ref))
    2639              :     return true;
    2640              : 
    2641      8407315 :   base_set = ao_ref_base_alias_set (ref);
    2642              : 
    2643      8407315 :   ref_set = ao_ref_alias_set (ref);
    2644              : 
    2645      8407315 :   int num_tests = 0, max_tests = param_modref_max_tests;
    2646     34175174 :   for (auto base_node : tt->bases)
    2647              :     {
    2648     13004881 :       if (tbaa_p && flag_strict_aliasing)
    2649              :         {
    2650      9922260 :           if (num_tests >= max_tests)
    2651              :             return true;
    2652      9922260 :           alias_stats.modref_tests++;
    2653      9922260 :           if (!alias_sets_conflict_p (base_set, base_node->base))
    2654      3409560 :             continue;
    2655      6512700 :           num_tests++;
    2656              :         }
    2657              : 
    2658      9595321 :       if (base_node->every_ref)
    2659              :         return true;
    2660              : 
    2661     36225729 :       for (auto ref_node : base_node->refs)
    2662              :         {
    2663              :           /* Do not repeat same test as before.  */
    2664     11091252 :           if ((ref_set != base_set || base_node->base != ref_node->ref)
    2665      6563324 :               && tbaa_p && flag_strict_aliasing)
    2666              :             {
    2667      4170935 :               if (num_tests >= max_tests)
    2668              :                 return true;
    2669      4139663 :               alias_stats.modref_tests++;
    2670      4139663 :               if (!alias_sets_conflict_p (ref_set, ref_node->ref))
    2671      1094956 :                 continue;
    2672      3044707 :               num_tests++;
    2673              :             }
    2674              : 
    2675      9965024 :           if (ref_node->every_access)
    2676              :             return true;
    2677              : 
    2678              :           /* TBAA checks did not disambiguate, try individual accesses.  */
    2679     31447726 :           for (auto access_node : ref_node->accesses)
    2680              :             {
    2681      9000653 :               if (num_tests >= max_tests)
    2682      1686427 :                 return true;
    2683              : 
    2684      9000653 :               if (access_node.parm_index == MODREF_GLOBAL_MEMORY_PARM)
    2685              :                 {
    2686      1760135 :                   if (global_memory_ok)
    2687      1086827 :                     continue;
    2688      1760135 :                   if (ref_may_alias_global_p (ref, true))
    2689              :                     return true;
    2690      1062506 :                   global_memory_ok = true;
    2691      1062506 :                   num_tests++;
    2692      1062506 :                   continue;
    2693              :                 }
    2694              : 
    2695      7240518 :               tree arg = access_node.get_call_arg (stmt);
    2696      7240518 :               if (!arg)
    2697              :                 return true;
    2698              : 
    2699      7239439 :               alias_stats.modref_baseptr_tests++;
    2700              : 
    2701      7239439 :               if (integer_zerop (arg) && flag_delete_null_pointer_checks)
    2702        24321 :                 continue;
    2703              : 
    2704              :               /* PTA oracle will be unhapy of arg is not an pointer.  */
    2705      7215118 :               if (!POINTER_TYPE_P (TREE_TYPE (arg)))
    2706              :                 return true;
    2707              : 
    2708              :               /* If we don't have base pointer, give up.  */
    2709      7215118 :               if (!ref->ref && !ref->base)
    2710            0 :                 continue;
    2711              : 
    2712      7215118 :               ao_ref ref2;
    2713      7215118 :               if (access_node.get_ao_ref (stmt, &ref2))
    2714              :                 {
    2715      5517297 :                   ref2.ref_alias_set = ref_node->ref;
    2716      5517297 :                   ref2.base_alias_set = base_node->base;
    2717      5517297 :                   if (refs_may_alias_p_1 (&ref2, ref, tbaa_p))
    2718              :                     return true;
    2719              :                 }
    2720      1697821 :               else if (ptr_deref_may_alias_ref_p_1 (arg, ref))
    2721              :                 return true;
    2722              : 
    2723      6227399 :               num_tests++;
    2724              :             }
    2725              :         }
    2726              :     }
    2727              :   return false;
    2728              : }
    2729              : 
    2730              : /* Check if REF conflicts with call using "fn spec" attribute.
    2731              :    If CLOBBER is true we are checking for writes, otherwise check loads.
    2732              : 
    2733              :    Return 0 if there are no conflicts (except for possible function call
    2734              :    argument reads), 1 if there are conflicts and -1 if we can not decide by
    2735              :    fn spec.  */
    2736              : 
    2737              : static int
    2738    152471562 : check_fnspec (gcall *call, ao_ref *ref, bool clobber)
    2739              : {
    2740    152471562 :   attr_fnspec fnspec = gimple_call_fnspec (call);
    2741    152471562 :   if (fnspec.known_p ())
    2742              :     {
    2743     77338311 :       if (clobber
    2744     77338311 :           ? !fnspec.global_memory_written_p ()
    2745      9058118 :           : !fnspec.global_memory_read_p ())
    2746              :         {
    2747     93959423 :           for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
    2748     91432021 :             if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))
    2749     64784910 :                 && (!fnspec.arg_specified_p (i)
    2750     38080983 :                     || (clobber ? fnspec.arg_maybe_written_p (i)
    2751      3943653 :                         : fnspec.arg_maybe_read_p (i))))
    2752              :               {
    2753     23519027 :                 ao_ref dref;
    2754     23519027 :                 tree size = NULL_TREE;
    2755     23519027 :                 unsigned int size_arg;
    2756              : 
    2757     23519027 :                 if (!fnspec.arg_specified_p (i))
    2758              :                   ;
    2759     23515696 :                 else if (fnspec.arg_max_access_size_given_by_arg_p
    2760     23515696 :                            (i, &size_arg))
    2761     15854882 :                   size = gimple_call_arg (call, size_arg);
    2762      7660814 :                 else if (fnspec.arg_access_size_given_by_type_p (i))
    2763              :                   {
    2764        25636 :                     tree callee = gimple_call_fndecl (call);
    2765        25636 :                     tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
    2766              : 
    2767        52582 :                     for (unsigned int p = 0; p < i; p++)
    2768        26946 :                       t = TREE_CHAIN (t);
    2769        25636 :                     size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
    2770              :                   }
    2771     15880518 :                 poly_int64 size_hwi;
    2772     15880518 :                 if (size
    2773     15880518 :                     && poly_int_tree_p (size, &size_hwi)
    2774     29237709 :                     && coeffs_in_range_p (size_hwi, 0,
    2775              :                                           HOST_WIDE_INT_MAX / BITS_PER_UNIT))
    2776              :                   {
    2777     13355655 :                     size_hwi = size_hwi * BITS_PER_UNIT;
    2778     13355655 :                     ao_ref_init_from_ptr_and_range (&dref,
    2779              :                                                     gimple_call_arg (call, i),
    2780              :                                                     true, 0, -1, size_hwi);
    2781              :                   }
    2782              :                 else
    2783     10163372 :                   ao_ref_init_from_ptr_and_range (&dref,
    2784              :                                                   gimple_call_arg (call, i),
    2785              :                                                   false, 0, -1, -1);
    2786     23519027 :                 if (refs_may_alias_p_1 (&dref, ref, false))
    2787      4738494 :                   return 1;
    2788              :               }
    2789     29227998 :           if (clobber
    2790     25370407 :               && fnspec.errno_maybe_written_p ()
    2791     36804838 :               && targetm.ref_may_alias_errno (ref))
    2792              :             return 1;
    2793     29202431 :           return 0;
    2794              :         }
    2795              :     }
    2796              : 
    2797              :  /* FIXME: we should handle barriers more consistently, but for now leave the
    2798              :     check here.  */
    2799    118505070 :   if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
    2800      6540626 :     switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
    2801              :       {
    2802              :       /* __sync_* builtins and some OpenMP builtins act as threading
    2803              :          barriers.  */
    2804              : #undef DEF_SYNC_BUILTIN
    2805              : #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
    2806              : #include "sync-builtins.def"
    2807              : #undef DEF_SYNC_BUILTIN
    2808              :       case BUILT_IN_GOMP_ATOMIC_START:
    2809              :       case BUILT_IN_GOMP_ATOMIC_END:
    2810              :       case BUILT_IN_GOMP_BARRIER:
    2811              :       case BUILT_IN_GOMP_BARRIER_CANCEL:
    2812              :       case BUILT_IN_GOMP_TASKWAIT:
    2813              :       case BUILT_IN_GOMP_TASKGROUP_END:
    2814              :       case BUILT_IN_GOMP_CRITICAL_START:
    2815              :       case BUILT_IN_GOMP_CRITICAL_END:
    2816              :       case BUILT_IN_GOMP_CRITICAL_NAME_START:
    2817              :       case BUILT_IN_GOMP_CRITICAL_NAME_END:
    2818              :       case BUILT_IN_GOMP_LOOP_END:
    2819              :       case BUILT_IN_GOMP_LOOP_END_CANCEL:
    2820              :       case BUILT_IN_GOMP_ORDERED_START:
    2821              :       case BUILT_IN_GOMP_ORDERED_END:
    2822              :       case BUILT_IN_GOMP_SECTIONS_END:
    2823              :       case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
    2824              :       case BUILT_IN_GOMP_SINGLE_COPY_START:
    2825              :       case BUILT_IN_GOMP_SINGLE_COPY_END:
    2826              :         return 1;
    2827              : 
    2828              :       default:
    2829              :         return -1;
    2830              :       }
    2831              :   return -1;
    2832              : }
    2833              : 
    2834              : /* If the call CALL may use the memory reference REF return true,
    2835              :    otherwise return false.  */
    2836              : 
    2837              : static bool
    2838     44658958 : ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
    2839              : {
    2840     44658958 :   tree base, callee;
    2841     44658958 :   unsigned i;
    2842     44658958 :   int flags = gimple_call_flags (call);
    2843              : 
    2844     44658958 :   if (flags & (ECF_CONST|ECF_NOVOPS))
    2845       261710 :     goto process_args;
    2846              : 
    2847              :   /* A call that is not without side-effects might involve volatile
    2848              :      accesses and thus conflicts with all other volatile accesses.  */
    2849     44397248 :   if (ref->volatile_p)
    2850              :     return true;
    2851              : 
    2852     44397073 :   if (gimple_call_internal_p (call))
    2853        63160 :     switch (gimple_call_internal_fn (call))
    2854              :       {
    2855              :       case IFN_MASK_STORE:
    2856              :       case IFN_SCATTER_STORE:
    2857              :       case IFN_MASK_SCATTER_STORE:
    2858              :       case IFN_LEN_STORE:
    2859              :       case IFN_MASK_LEN_STORE:
    2860              :         return false;
    2861            0 :       case IFN_MASK_STORE_LANES:
    2862            0 :       case IFN_MASK_LEN_STORE_LANES:
    2863            0 :         goto process_args;
    2864          839 :       case IFN_MASK_LOAD:
    2865          839 :       case IFN_LEN_LOAD:
    2866          839 :       case IFN_MASK_LEN_LOAD:
    2867          839 :       case IFN_MASK_LOAD_LANES:
    2868          839 :       case IFN_MASK_LEN_LOAD_LANES:
    2869          839 :         {
    2870          839 :           ao_ref rhs_ref;
    2871          839 :           tree lhs = gimple_call_lhs (call);
    2872          839 :           if (lhs)
    2873              :             {
    2874          839 :               ao_ref_init_from_ptr_and_size (&rhs_ref,
    2875              :                                              gimple_call_arg (call, 0),
    2876          839 :                                              TYPE_SIZE_UNIT (TREE_TYPE (lhs)));
    2877              :               /* We cannot make this a known-size access since otherwise
    2878              :                  we disambiguate against refs to decls that are smaller.  */
    2879          839 :               rhs_ref.size = -1;
    2880         1678 :               rhs_ref.ref_alias_set = rhs_ref.base_alias_set
    2881         1678 :                 = tbaa_p ? get_deref_alias_set (TREE_TYPE
    2882              :                                         (gimple_call_arg (call, 1))) : 0;
    2883          839 :               return refs_may_alias_p_1 (ref, &rhs_ref, tbaa_p);
    2884              :             }
    2885            0 :           break;
    2886              :         }
    2887              :       default:;
    2888              :       }
    2889              : 
    2890     44390641 :   callee = gimple_call_fndecl (call);
    2891     44390641 :   if (callee != NULL_TREE)
    2892              :     {
    2893     42940751 :       struct cgraph_node *node = cgraph_node::get (callee);
    2894              :       /* We can not safely optimize based on summary of calle if it does
    2895              :          not always bind to current def: it is possible that memory load
    2896              :          was optimized out earlier and the interposed variant may not be
    2897              :          optimized this way.  */
    2898     42940751 :       if (node && node->binds_to_current_def_p ())
    2899              :         {
    2900      5456425 :           modref_summary *summary = get_modref_function_summary (node);
    2901      5456425 :           if (summary && !summary->calls_interposable)
    2902              :             {
    2903      3105040 :               if (!modref_may_conflict (call, summary->loads, ref, tbaa_p))
    2904              :                 {
    2905       634335 :                   alias_stats.modref_use_no_alias++;
    2906       634335 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    2907              :                     {
    2908           24 :                       fprintf (dump_file,
    2909              :                                "ipa-modref: call stmt ");
    2910           24 :                       print_gimple_stmt (dump_file, call, 0);
    2911           24 :                       fprintf (dump_file,
    2912              :                                "ipa-modref: call to %s does not use ",
    2913              :                                node->dump_name ());
    2914           24 :                       if (!ref->ref && ref->base)
    2915              :                         {
    2916            3 :                           fprintf (dump_file, "base: ");
    2917            3 :                           print_generic_expr (dump_file, ref->base);
    2918              :                         }
    2919           21 :                       else if (ref->ref)
    2920              :                         {
    2921           21 :                           fprintf (dump_file, "ref: ");
    2922           21 :                           print_generic_expr (dump_file, ref->ref);
    2923              :                         }
    2924           24 :                       fprintf (dump_file, " alias sets: %i->%i\n",
    2925              :                                ao_ref_base_alias_set (ref),
    2926              :                                ao_ref_alias_set (ref));
    2927              :                     }
    2928       634335 :                   goto process_args;
    2929              :                 }
    2930      2470705 :               alias_stats.modref_use_may_alias++;
    2931              :             }
    2932              :        }
    2933              :     }
    2934              : 
    2935     43756306 :   base = ao_ref_base (ref);
    2936     43756306 :   if (!base)
    2937              :     return true;
    2938              : 
    2939              :   /* If the reference is based on a decl that is not aliased the call
    2940              :      cannot possibly use it.  */
    2941     43756306 :   if (DECL_P (base)
    2942     38922352 :       && !may_be_aliased (base)
    2943              :       /* But local statics can be used through recursion.  */
    2944     65019032 :       && !is_global_var (base))
    2945     20967787 :     goto process_args;
    2946              : 
    2947     22788519 :   if (int res = check_fnspec (call, ref, false))
    2948              :     {
    2949     18930928 :       if (res == 1)
    2950              :         return true;
    2951              :     }
    2952              :   else
    2953      3857591 :     goto process_args;
    2954              : 
    2955              :   /* Check if base is a global static variable that is not read
    2956              :      by the function.  */
    2957     18406438 :   if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
    2958              :     {
    2959       977501 :       struct cgraph_node *node = cgraph_node::get (callee);
    2960       977501 :       bitmap read;
    2961       977501 :       int id;
    2962              : 
    2963              :       /* FIXME: Callee can be an OMP builtin that does not have a call graph
    2964              :          node yet.  We should enforce that there are nodes for all decls in the
    2965              :          IL and remove this check instead.  */
    2966       977501 :       if (node
    2967       976859 :           && (id = ipa_reference_var_uid (base)) != -1
    2968       143932 :           && (read = ipa_reference_get_read_global (node))
    2969       994493 :           && !bitmap_bit_p (read, id))
    2970        11731 :         goto process_args;
    2971              :     }
    2972              : 
    2973              :   /* Check if the base variable is call-used.  */
    2974     18394707 :   if (DECL_P (base))
    2975              :     {
    2976     15021091 :       if (pt_solution_includes (gimple_call_use_set (call), base))
    2977              :         return true;
    2978              :     }
    2979      3373616 :   else if ((TREE_CODE (base) == MEM_REF
    2980      3373616 :             || TREE_CODE (base) == TARGET_MEM_REF)
    2981      3373616 :            && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
    2982              :     {
    2983      3373384 :       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
    2984      3373384 :       if (!pi)
    2985              :         return true;
    2986              : 
    2987      3364463 :       if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
    2988              :         return true;
    2989              :     }
    2990              :   else
    2991              :     return true;
    2992              : 
    2993              :   /* Inspect call arguments for passed-by-value aliases.  */
    2994              : process_args:
    2995     84098579 :   for (i = 0; i < gimple_call_num_args (call); ++i)
    2996              :     {
    2997     59142550 :       tree op = gimple_call_arg (call, i);
    2998     59142550 :       int flags = gimple_call_arg_flags (call, i);
    2999              : 
    3000     59142550 :       if (flags & (EAF_UNUSED | EAF_NO_DIRECT_READ))
    3001       713856 :         continue;
    3002              : 
    3003     58428694 :       if (TREE_CODE (op) == WITH_SIZE_EXPR)
    3004          504 :         op = TREE_OPERAND (op, 0);
    3005              : 
    3006     58428694 :       if (TREE_CODE (op) != SSA_NAME
    3007     58428694 :           && !is_gimple_min_invariant (op))
    3008              :         {
    3009     10272317 :           ao_ref r;
    3010     10272317 :           ao_ref_init (&r, op);
    3011     10272317 :           if (refs_may_alias_p_1 (&r, ref, tbaa_p))
    3012      4138982 :             return true;
    3013              :         }
    3014              :     }
    3015              : 
    3016              :   return false;
    3017              : }
    3018              : 
    3019              : static bool
    3020     44649959 : ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
    3021              : {
    3022     44649959 :   bool res;
    3023     44649959 :   res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
    3024     44649959 :   if (res)
    3025     19691202 :     ++alias_stats.ref_maybe_used_by_call_p_may_alias;
    3026              :   else
    3027     24958757 :     ++alias_stats.ref_maybe_used_by_call_p_no_alias;
    3028     44649959 :   return res;
    3029              : }
    3030              : 
    3031              : 
    3032              : /* If the statement STMT may use the memory reference REF return
    3033              :    true, otherwise return false.  */
    3034              : 
    3035              : bool
    3036    356420026 : ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
    3037              : {
    3038    356420026 :   if (is_gimple_assign (stmt))
    3039              :     {
    3040    305751451 :       tree rhs;
    3041              : 
    3042              :       /* All memory assign statements are single.  */
    3043    305751451 :       if (!gimple_assign_single_p (stmt))
    3044              :         return false;
    3045              : 
    3046    305751451 :       rhs = gimple_assign_rhs1 (stmt);
    3047    305751451 :       if (is_gimple_reg (rhs)
    3048    249189380 :           || is_gimple_min_invariant (rhs)
    3049    401186953 :           || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
    3050    246620286 :         return false;
    3051              : 
    3052     59131165 :       return refs_may_alias_p (rhs, ref, tbaa_p);
    3053              :     }
    3054     50668575 :   else if (is_gimple_call (stmt))
    3055     44649959 :     return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
    3056      6018616 :   else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
    3057              :     {
    3058      5261303 :       tree retval = gimple_return_retval (return_stmt);
    3059      5261303 :       if (retval
    3060      2812168 :           && TREE_CODE (retval) != SSA_NAME
    3061      2213546 :           && !is_gimple_min_invariant (retval)
    3062      7312749 :           && refs_may_alias_p (retval, ref, tbaa_p))
    3063              :         return true;
    3064              :       /* If ref escapes the function then the return acts as a use.  */
    3065      3723813 :       tree base = ao_ref_base (ref);
    3066      3723813 :       if (!base)
    3067              :         ;
    3068      3723813 :       else if (DECL_P (base))
    3069      1347344 :         return is_global_var (base);
    3070      2376469 :       else if (TREE_CODE (base) == MEM_REF
    3071      2376469 :                || TREE_CODE (base) == TARGET_MEM_REF)
    3072      2376372 :         return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0), false);
    3073              :       return false;
    3074              :     }
    3075              : 
    3076              :   return true;
    3077              : }
    3078              : 
    3079              : bool
    3080       739070 : ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
    3081              : {
    3082       739070 :   ao_ref r;
    3083       739070 :   ao_ref_init (&r, ref);
    3084       739070 :   return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
    3085              : }
    3086              : 
    3087              : /* If the call in statement CALL may clobber the memory reference REF
    3088              :    return true, otherwise return false.  */
    3089              : 
    3090              : bool
    3091    331422703 : call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
    3092              : {
    3093    331422703 :   tree base;
    3094    331422703 :   tree callee;
    3095              : 
    3096              :   /* If the call is pure or const it cannot clobber anything.  */
    3097    331422703 :   if (gimple_call_flags (call)
    3098    331422703 :       & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
    3099              :     return false;
    3100    326058538 :   if (gimple_call_internal_p (call))
    3101       598926 :     switch (auto fn = gimple_call_internal_fn (call))
    3102              :       {
    3103              :         /* Treat these internal calls like ECF_PURE for aliasing,
    3104              :            they don't write to any memory the program should care about.
    3105              :            They have important other side-effects, and read memory,
    3106              :            so can't be ECF_NOVOPS.  */
    3107              :       case IFN_UBSAN_NULL:
    3108              :       case IFN_UBSAN_BOUNDS:
    3109              :       case IFN_UBSAN_VPTR:
    3110              :       case IFN_UBSAN_OBJECT_SIZE:
    3111              :       case IFN_UBSAN_PTR:
    3112              :       case IFN_ASAN_CHECK:
    3113              :         return false;
    3114         7540 :       case IFN_MASK_STORE:
    3115         7540 :       case IFN_LEN_STORE:
    3116         7540 :       case IFN_MASK_LEN_STORE:
    3117         7540 :       case IFN_MASK_STORE_LANES:
    3118         7540 :       case IFN_MASK_LEN_STORE_LANES:
    3119         7540 :         {
    3120         7540 :           tree rhs = gimple_call_arg (call,
    3121         7540 :                                       internal_fn_stored_value_index (fn));
    3122         7540 :           ao_ref lhs_ref;
    3123         7540 :           ao_ref_init_from_ptr_and_size (&lhs_ref, gimple_call_arg (call, 0),
    3124         7540 :                                          TYPE_SIZE_UNIT (TREE_TYPE (rhs)));
    3125              :           /* We cannot make this a known-size access since otherwise
    3126              :              we disambiguate against refs to decls that are smaller.  */
    3127         7540 :           lhs_ref.size = -1;
    3128        15080 :           lhs_ref.ref_alias_set = lhs_ref.base_alias_set
    3129         7540 :             = tbaa_p ? get_deref_alias_set
    3130         7092 :                                    (TREE_TYPE (gimple_call_arg (call, 1))) : 0;
    3131         7540 :           return refs_may_alias_p_1 (ref, &lhs_ref, tbaa_p);
    3132              :         }
    3133              :       default:
    3134              :         break;
    3135              :       }
    3136              : 
    3137    325789975 :   callee = gimple_call_fndecl (call);
    3138              : 
    3139    325789975 :   if (callee != NULL_TREE && !ref->volatile_p)
    3140              :     {
    3141    305988850 :       struct cgraph_node *node = cgraph_node::get (callee);
    3142    305988850 :       if (node)
    3143              :         {
    3144    305702353 :           modref_summary *summary = get_modref_function_summary (node);
    3145    305702353 :           if (summary)
    3146              :             {
    3147     41586750 :               if (!modref_may_conflict (call, summary->stores, ref, tbaa_p)
    3148     41586750 :                   && (!summary->writes_errno
    3149       284959 :                       || !targetm.ref_may_alias_errno (ref)))
    3150              :                 {
    3151      4129316 :                   alias_stats.modref_clobber_no_alias++;
    3152      4129316 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    3153              :                     {
    3154           52 :                       fprintf (dump_file,
    3155              :                                "ipa-modref: call stmt ");
    3156           52 :                       print_gimple_stmt (dump_file, call, 0);
    3157           52 :                       fprintf (dump_file,
    3158              :                                "ipa-modref: call to %s does not clobber ",
    3159              :                                node->dump_name ());
    3160           52 :                       if (!ref->ref && ref->base)
    3161              :                         {
    3162           32 :                           fprintf (dump_file, "base: ");
    3163           32 :                           print_generic_expr (dump_file, ref->base);
    3164              :                         }
    3165           20 :                       else if (ref->ref)
    3166              :                         {
    3167           20 :                           fprintf (dump_file, "ref: ");
    3168           20 :                           print_generic_expr (dump_file, ref->ref);
    3169              :                         }
    3170           52 :                       fprintf (dump_file, " alias sets: %i->%i\n",
    3171              :                                ao_ref_base_alias_set (ref),
    3172              :                                ao_ref_alias_set (ref));
    3173              :                     }
    3174      4129316 :                   return false;
    3175              :                 }
    3176     37457434 :               alias_stats.modref_clobber_may_alias++;
    3177              :           }
    3178              :         }
    3179              :     }
    3180              : 
    3181    321660659 :   base = ao_ref_base (ref);
    3182    321660659 :   if (!base)
    3183              :     return true;
    3184              : 
    3185    321660659 :   if (TREE_CODE (base) == SSA_NAME
    3186    321660064 :       || CONSTANT_CLASS_P (base))
    3187              :     return false;
    3188              : 
    3189              :   /* A call that is not without side-effects might involve volatile
    3190              :      accesses and thus conflicts with all other volatile accesses.  */
    3191    313539045 :   if (ref->volatile_p)
    3192              :     return true;
    3193              : 
    3194              :   /* If the reference is based on a decl that is not aliased the call
    3195              :      cannot possibly clobber it.  */
    3196    312078547 :   if (DECL_P (base)
    3197    283429929 :       && !may_be_aliased (base)
    3198              :       /* But local non-readonly statics can be modified through recursion
    3199              :          or the call may implement a threading barrier which we must
    3200              :          treat as may-def.  */
    3201    503887649 :       && (TREE_READONLY (base)
    3202    184544210 :           || !is_global_var (base)))
    3203              :     return false;
    3204              : 
    3205              :   /* If the reference is based on a pointer that points to memory
    3206              :      that may not be written to then the call cannot possibly clobber it.  */
    3207    129984126 :   if ((TREE_CODE (base) == MEM_REF
    3208    129984126 :        || TREE_CODE (base) == TARGET_MEM_REF)
    3209     28648618 :       && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
    3210    158264021 :       && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
    3211              :     return false;
    3212              : 
    3213    129683043 :   if (int res = check_fnspec (call, ref, true))
    3214              :     {
    3215    104338203 :       if (res == 1)
    3216              :         return true;
    3217              :     }
    3218              :   else
    3219              :     return false;
    3220              : 
    3221              :   /* Check if base is a global static variable that is not written
    3222              :      by the function.  */
    3223     98716464 :   if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
    3224              :     {
    3225     10525508 :       struct cgraph_node *node = cgraph_node::get (callee);
    3226     10525508 :       bitmap written;
    3227     10525508 :       int id;
    3228              : 
    3229     10525508 :       if (node
    3230     10525007 :           && (id = ipa_reference_var_uid (base)) != -1
    3231      4496782 :           && (written = ipa_reference_get_written_global (node))
    3232     10621258 :           && !bitmap_bit_p (written, id))
    3233              :         return false;
    3234              :     }
    3235              : 
    3236              :   /* Check if the base variable is call-clobbered.  */
    3237     98637559 :   if (DECL_P (base))
    3238     77479369 :     return pt_solution_includes (gimple_call_clobber_set (call), base);
    3239     21158190 :   else if ((TREE_CODE (base) == MEM_REF
    3240     21158190 :             || TREE_CODE (base) == TARGET_MEM_REF)
    3241     21158190 :            && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
    3242              :     {
    3243     20855832 :       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
    3244     20855832 :       if (!pi)
    3245              :         return true;
    3246              : 
    3247     20055364 :       return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
    3248              :     }
    3249              : 
    3250              :   return true;
    3251              : }
    3252              : 
    3253              : /* If the call in statement CALL may clobber the memory reference REF
    3254              :    return true, otherwise return false.  */
    3255              : 
    3256              : bool
    3257       323158 : call_may_clobber_ref_p (gcall *call, tree ref, bool tbaa_p)
    3258              : {
    3259       323158 :   bool res;
    3260       323158 :   ao_ref r;
    3261       323158 :   ao_ref_init (&r, ref);
    3262       323158 :   res = call_may_clobber_ref_p_1 (call, &r, tbaa_p);
    3263       323158 :   if (res)
    3264        43603 :     ++alias_stats.call_may_clobber_ref_p_may_alias;
    3265              :   else
    3266       279555 :     ++alias_stats.call_may_clobber_ref_p_no_alias;
    3267       323158 :   return res;
    3268              : }
    3269              : 
    3270              : 
    3271              : /* If the statement STMT may clobber the memory reference REF return true,
    3272              :    otherwise return false.  */
    3273              : 
    3274              : bool
    3275   1951203338 : stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
    3276              : {
    3277   1951203338 :   if (is_gimple_call (stmt))
    3278              :     {
    3279    336717621 :       tree lhs = gimple_call_lhs (stmt);
    3280    336717621 :       if (lhs
    3281    155459700 :           && TREE_CODE (lhs) != SSA_NAME)
    3282              :         {
    3283     59241344 :           ao_ref r;
    3284     59241344 :           ao_ref_init (&r, lhs);
    3285     59241344 :           if (refs_may_alias_p_1 (ref, &r, tbaa_p))
    3286      5714350 :             return true;
    3287              :         }
    3288              : 
    3289    331003271 :       return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref, tbaa_p);
    3290              :     }
    3291   1614485717 :   else if (gimple_assign_single_p (stmt))
    3292              :     {
    3293   1606230186 :       tree lhs = gimple_assign_lhs (stmt);
    3294   1606230186 :       if (TREE_CODE (lhs) != SSA_NAME)
    3295              :         {
    3296   1605361704 :           ao_ref r;
    3297   1605361704 :           ao_ref_init (&r, lhs);
    3298   1605361704 :           return refs_may_alias_p_1 (ref, &r, tbaa_p);
    3299              :         }
    3300              :     }
    3301      8255531 :   else if (gimple_code (stmt) == GIMPLE_ASM)
    3302              :     return true;
    3303              : 
    3304              :   return false;
    3305              : }
    3306              : 
    3307              : bool
    3308      4226607 : stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
    3309              : {
    3310      4226607 :   ao_ref r;
    3311      4226607 :   ao_ref_init (&r, ref);
    3312      4226607 :   return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
    3313              : }
    3314              : 
    3315              : /* Return true if store1 and store2 described by corresponding tuples
    3316              :    <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
    3317              :    address.  */
    3318              : 
    3319              : static bool
    3320    133128804 : same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
    3321              :                          poly_int64 max_size1,
    3322              :                          tree base2, poly_int64 offset2, poly_int64 size2,
    3323              :                          poly_int64 max_size2)
    3324              : {
    3325              :   /* Offsets need to be 0.  */
    3326    133128804 :   if (maybe_ne (offset1, 0)
    3327    133128804 :       || maybe_ne (offset2, 0))
    3328              :     return false;
    3329              : 
    3330     37261419 :   bool base1_obj_p = SSA_VAR_P (base1);
    3331     37261419 :   bool base2_obj_p = SSA_VAR_P (base2);
    3332              : 
    3333              :   /* We need one object.  */
    3334     37261419 :   if (base1_obj_p == base2_obj_p)
    3335              :     return false;
    3336      4564517 :   tree obj = base1_obj_p ? base1 : base2;
    3337              : 
    3338              :   /* And we need one MEM_REF.  */
    3339      4564517 :   bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
    3340      4564517 :   bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
    3341      4564517 :   if (base1_memref_p == base2_memref_p)
    3342              :     return false;
    3343      4469828 :   tree memref = base1_memref_p ? base1 : base2;
    3344              : 
    3345              :   /* Sizes need to be valid.  */
    3346      4469828 :   if (!known_size_p (max_size1)
    3347      4447212 :       || !known_size_p (max_size2)
    3348      4446880 :       || !known_size_p (size1)
    3349      8916708 :       || !known_size_p (size2))
    3350              :     return false;
    3351              : 
    3352              :   /* Max_size needs to match size.  */
    3353      4446880 :   if (maybe_ne (max_size1, size1)
    3354      4446880 :       || maybe_ne (max_size2, size2))
    3355              :     return false;
    3356              : 
    3357              :   /* Sizes need to match.  */
    3358      4401069 :   if (maybe_ne (size1, size2))
    3359              :     return false;
    3360              : 
    3361              : 
    3362              :   /* Check that memref is a store to pointer with singleton points-to info.  */
    3363      1201319 :   if (!integer_zerop (TREE_OPERAND (memref, 1)))
    3364              :     return false;
    3365       908088 :   tree ptr = TREE_OPERAND (memref, 0);
    3366       908088 :   if (TREE_CODE (ptr) != SSA_NAME)
    3367              :     return false;
    3368       907812 :   struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
    3369       907812 :   unsigned int pt_uid;
    3370       907812 :   if (pi == NULL
    3371       907812 :       || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
    3372       708056 :     return false;
    3373              : 
    3374              :   /* Be conservative with non-call exceptions when the address might
    3375              :      be NULL.  */
    3376       199756 :   if (cfun->can_throw_non_call_exceptions && pi->pt.null)
    3377              :     return false;
    3378              : 
    3379              :   /* Check that ptr points relative to obj.  */
    3380       199750 :   unsigned int obj_uid = DECL_PT_UID (obj);
    3381       199750 :   if (obj_uid != pt_uid)
    3382              :     return false;
    3383              : 
    3384              :   /* Check that the object size is the same as the store size.  That ensures us
    3385              :      that ptr points to the start of obj.  */
    3386        34992 :   return (DECL_SIZE (obj)
    3387        34992 :           && poly_int_tree_p (DECL_SIZE (obj))
    3388       104832 :           && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
    3389              : }
    3390              : 
    3391              : /* Return true if REF is killed by an store described by
    3392              :    BASE, OFFSET, SIZE and MAX_SIZE.  */
    3393              : 
    3394              : static bool
    3395    230066701 : store_kills_ref_p (tree base, poly_int64 offset, poly_int64 size,
    3396              :                    poly_int64 max_size, ao_ref *ref)
    3397              : {
    3398    230066701 :   poly_int64 ref_offset = ref->offset;
    3399              :   /* We can get MEM[symbol: sZ, index: D.8862_1] here,
    3400              :      so base == ref->base does not always hold.  */
    3401    230066701 :   if (base != ref->base)
    3402              :     {
    3403              :       /* Try using points-to info.  */
    3404    133128804 :       if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
    3405              :                                    ref->offset, ref->size, ref->max_size))
    3406              :         return true;
    3407              : 
    3408              :       /* If both base and ref->base are MEM_REFs, only compare the
    3409              :          first operand, and if the second operand isn't equal constant,
    3410              :          try to add the offsets into offset and ref_offset.  */
    3411     34712989 :       if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
    3412    159083822 :           && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
    3413              :         {
    3414     19365377 :           if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
    3415     19365377 :                                    TREE_OPERAND (ref->base, 1)))
    3416              :             {
    3417      7237382 :               poly_offset_int off1 = mem_ref_offset (base);
    3418      7237382 :               off1 <<= LOG2_BITS_PER_UNIT;
    3419      7237382 :               off1 += offset;
    3420      7237382 :               poly_offset_int off2 = mem_ref_offset (ref->base);
    3421      7237382 :               off2 <<= LOG2_BITS_PER_UNIT;
    3422      7237382 :               off2 += ref_offset;
    3423      7237382 :               if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
    3424            0 :                 size = -1;
    3425              :             }
    3426              :         }
    3427              :       else
    3428    113763283 :         size = -1;
    3429              :     }
    3430              :   /* For a must-alias check we need to be able to constrain
    3431              :      the access properly.  */
    3432    230066557 :   return (known_eq (size, max_size)
    3433    230066557 :           && known_subrange_p (ref_offset, ref->max_size, offset, size));
    3434              : }
    3435              : 
    3436              : /* If STMT kills the memory reference REF return true, otherwise
    3437              :    return false.  */
    3438              : 
    3439              : bool
    3440    278906340 : stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
    3441              : {
    3442    278906340 :   if (!ao_ref_base (ref))
    3443              :     return false;
    3444              : 
    3445    278906340 :   if (gimple_has_lhs (stmt)
    3446    243323773 :       && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
    3447              :       /* The assignment is not necessarily carried out if it can throw
    3448              :          and we can catch it in the current function where we could inspect
    3449              :          the previous value.  Similarly if the function can throw externally
    3450              :          and the ref does not die on the function return.
    3451              :          ???  We only need to care about the RHS throwing.  For aggregate
    3452              :          assignments or similar calls and non-call exceptions the LHS
    3453              :          might throw as well.
    3454              :          ???  We also should care about possible longjmp, but since we
    3455              :          do not understand that longjmp is not using global memory we will
    3456              :          not consider a kill here since the function call will be considered
    3457              :          as possibly using REF.  */
    3458    237527574 :       && !stmt_can_throw_internal (cfun, stmt)
    3459    256407389 :       && (!stmt_can_throw_external (cfun, stmt)
    3460      4177520 :           || !ref_may_alias_global_p (ref, false)))
    3461              :     {
    3462    232926373 :       tree lhs = gimple_get_lhs (stmt);
    3463              :       /* If LHS is literally a base of the access we are done.  */
    3464    232926373 :       if (ref->ref)
    3465              :         {
    3466    231384361 :           tree base = ref->ref;
    3467    231384361 :           tree innermost_dropped_array_ref = NULL_TREE;
    3468    231384361 :           if (handled_component_p (base))
    3469              :             {
    3470    144404283 :               tree saved_lhs0 = NULL_TREE;
    3471    257276655 :               if (handled_component_p (lhs))
    3472              :                 {
    3473    112872372 :                   saved_lhs0 = TREE_OPERAND (lhs, 0);
    3474    112872372 :                   TREE_OPERAND (lhs, 0) = integer_zero_node;
    3475              :                 }
    3476    246829027 :               do
    3477              :                 {
    3478              :                   /* Just compare the outermost handled component, if
    3479              :                      they are equal we have found a possible common
    3480              :                      base.  */
    3481    246829027 :                   tree saved_base0 = TREE_OPERAND (base, 0);
    3482    246829027 :                   TREE_OPERAND (base, 0) = integer_zero_node;
    3483    246829027 :                   bool res = operand_equal_p (lhs, base, 0);
    3484    246829027 :                   TREE_OPERAND (base, 0) = saved_base0;
    3485    246829027 :                   if (res)
    3486              :                     break;
    3487              :                   /* Remember if we drop an array-ref that we need to
    3488              :                      double-check not being at struct end.  */
    3489    232009631 :                   if (TREE_CODE (base) == ARRAY_REF
    3490    232009631 :                       || TREE_CODE (base) == ARRAY_RANGE_REF)
    3491     65528726 :                     innermost_dropped_array_ref = base;
    3492              :                   /* Otherwise drop handled components of the access.  */
    3493    232009631 :                   base = saved_base0;
    3494              :                 }
    3495    361594518 :               while (handled_component_p (base));
    3496    144404283 :               if (saved_lhs0)
    3497    112872372 :                 TREE_OPERAND (lhs, 0) = saved_lhs0;
    3498              :             }
    3499              :           /* Finally check if the lhs has the same address and size as the
    3500              :              base candidate of the access.  Watch out if we have dropped
    3501              :              an array-ref that might have flexible size, this means ref->ref
    3502              :              may be outside of the TYPE_SIZE of its base.  */
    3503    144404283 :           if ((! innermost_dropped_array_ref
    3504     64723353 :                || ! array_ref_flexible_size_p (innermost_dropped_array_ref))
    3505    365330649 :               && (lhs == base
    3506    218804059 :                   || (((TYPE_SIZE (TREE_TYPE (lhs))
    3507    218804059 :                         == TYPE_SIZE (TREE_TYPE (base)))
    3508    147105815 :                        || (TYPE_SIZE (TREE_TYPE (lhs))
    3509    147105752 :                            && TYPE_SIZE (TREE_TYPE (base))
    3510    147105593 :                            && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
    3511    147105593 :                                                TYPE_SIZE (TREE_TYPE (base)),
    3512              :                                                0)))
    3513     71698244 :                       && operand_equal_p (lhs, base,
    3514              :                                           OEP_ADDRESS_OF
    3515              :                                           | OEP_MATCH_SIDE_EFFECTS))))
    3516              :             {
    3517      2803180 :               ++alias_stats.stmt_kills_ref_p_yes;
    3518      8360468 :               return true;
    3519              :             }
    3520              :         }
    3521              : 
    3522              :       /* Now look for non-literal equal bases with the restriction of
    3523              :          handling constant offset and size.  */
    3524              :       /* For a must-alias check we need to be able to constrain
    3525              :          the access properly.  */
    3526    230123193 :       if (!ref->max_size_known_p ())
    3527              :         {
    3528      1289067 :           ++alias_stats.stmt_kills_ref_p_no;
    3529      1289067 :           return false;
    3530              :         }
    3531    228834126 :       poly_int64 size, offset, max_size;
    3532    228834126 :       bool reverse;
    3533    228834126 :       tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
    3534              :                                            &reverse);
    3535    228834126 :       if (store_kills_ref_p (base, offset, size, max_size, ref))
    3536              :         {
    3537      1465041 :           ++alias_stats.stmt_kills_ref_p_yes;
    3538      1465041 :           return true;
    3539              :         }
    3540              :     }
    3541              : 
    3542    273349052 :   if (is_gimple_call (stmt))
    3543              :     {
    3544     22568063 :       tree callee = gimple_call_fndecl (stmt);
    3545     22568063 :       struct cgraph_node *node;
    3546     22568063 :       modref_summary *summary;
    3547              : 
    3548              :       /* Try to disambiguate using modref summary.  Modref records a vector
    3549              :          of stores with known offsets relative to function parameters that must
    3550              :          happen every execution of function.  Find if we have a matching
    3551              :          store and verify that function can not use the value.  */
    3552     22568063 :       if (callee != NULL_TREE
    3553     21538335 :           && (node = cgraph_node::get (callee)) != NULL
    3554     21498943 :           && node->binds_to_current_def_p ()
    3555      2080065 :           && (summary = get_modref_function_summary (node)) != NULL
    3556      1087276 :           && summary->kills.length ()
    3557              :           /* Check that we can not trap while evaulating function
    3558              :              parameters.  This check is overly conservative.  */
    3559     22694301 :           && (!cfun->can_throw_non_call_exceptions
    3560            0 :               || (!stmt_can_throw_internal (cfun, stmt)
    3561            0 :                   && (!stmt_can_throw_external (cfun, stmt)
    3562            0 :                       || !ref_may_alias_global_p (ref, false)))))
    3563              :         {
    3564       522671 :           for (auto kill : summary->kills)
    3565              :             {
    3566       152956 :               ao_ref dref;
    3567              : 
    3568              :               /* We only can do useful compares if we know the access range
    3569              :                  precisely.  */
    3570       152956 :               if (!kill.get_ao_ref (as_a <gcall *> (stmt), &dref))
    3571           24 :                 continue;
    3572       152932 :               if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
    3573              :                                      dref.size, dref.max_size, ref))
    3574              :                 {
    3575              :                   /* For store to be killed it needs to not be used
    3576              :                      earlier.  */
    3577         8999 :                   if (ref_maybe_used_by_call_p_1 (as_a <gcall *> (stmt), ref,
    3578              :                                                   true)
    3579         8999 :                       || !dbg_cnt (ipa_mod_ref))
    3580              :                     break;
    3581         3482 :                   if (dump_file && (dump_flags & TDF_DETAILS))
    3582              :                     {
    3583            2 :                       fprintf (dump_file,
    3584              :                                "ipa-modref: call stmt ");
    3585            2 :                       print_gimple_stmt (dump_file, stmt, 0);
    3586            2 :                       fprintf (dump_file,
    3587              :                                "ipa-modref: call to %s kills ",
    3588              :                                node->dump_name ());
    3589            2 :                       print_generic_expr (dump_file, ref->base);
    3590            2 :                       fprintf (dump_file, "\n");
    3591              :                     }
    3592         3482 :                     ++alias_stats.modref_kill_yes;
    3593         3482 :                     return true;
    3594              :                 }
    3595              :             }
    3596       122756 :           ++alias_stats.modref_kill_no;
    3597              :         }
    3598     22564581 :       if (callee != NULL_TREE
    3599     22564581 :           && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
    3600      4589019 :         switch (DECL_FUNCTION_CODE (callee))
    3601              :           {
    3602       582309 :           case BUILT_IN_FREE:
    3603       582309 :             {
    3604       582309 :               tree ptr = gimple_call_arg (stmt, 0);
    3605       582309 :               tree base = ao_ref_base (ref);
    3606       582309 :               if (base && TREE_CODE (base) == MEM_REF
    3607       687371 :                   && TREE_OPERAND (base, 0) == ptr)
    3608              :                 {
    3609        14894 :                   ++alias_stats.stmt_kills_ref_p_yes;
    3610        14894 :                   return true;
    3611              :                 }
    3612              :               break;
    3613              :             }
    3614              : 
    3615      1458573 :           case BUILT_IN_MEMCPY:
    3616      1458573 :           case BUILT_IN_MEMPCPY:
    3617      1458573 :           case BUILT_IN_MEMMOVE:
    3618      1458573 :           case BUILT_IN_MEMSET:
    3619      1458573 :           case BUILT_IN_MEMCPY_CHK:
    3620      1458573 :           case BUILT_IN_MEMPCPY_CHK:
    3621      1458573 :           case BUILT_IN_MEMMOVE_CHK:
    3622      1458573 :           case BUILT_IN_MEMSET_CHK:
    3623      1458573 :           case BUILT_IN_STRNCPY:
    3624      1458573 :           case BUILT_IN_STPNCPY:
    3625      1458573 :           case BUILT_IN_CALLOC:
    3626      1458573 :             {
    3627              :               /* For a must-alias check we need to be able to constrain
    3628              :                  the access properly.  */
    3629      1458573 :               if (!ref->max_size_known_p ())
    3630              :                 {
    3631        63110 :                   ++alias_stats.stmt_kills_ref_p_no;
    3632       447372 :                   return false;
    3633              :                 }
    3634      1395463 :               tree dest;
    3635      1395463 :               tree len;
    3636              : 
    3637              :               /* In execution order a calloc call will never kill
    3638              :                  anything.  However, DSE will (ab)use this interface
    3639              :                  to ask if a calloc call writes the same memory locations
    3640              :                  as a later assignment, memset, etc.  So handle calloc
    3641              :                  in the expected way.  */
    3642      1395463 :               if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
    3643              :                 {
    3644         1456 :                   tree arg0 = gimple_call_arg (stmt, 0);
    3645         1456 :                   tree arg1 = gimple_call_arg (stmt, 1);
    3646         1456 :                   if (TREE_CODE (arg0) != INTEGER_CST
    3647         1311 :                       || TREE_CODE (arg1) != INTEGER_CST)
    3648              :                     {
    3649          177 :                       ++alias_stats.stmt_kills_ref_p_no;
    3650          177 :                       return false;
    3651              :                     }
    3652              : 
    3653         1279 :                   dest = gimple_call_lhs (stmt);
    3654         1279 :                   if (!dest)
    3655              :                     {
    3656            1 :                       ++alias_stats.stmt_kills_ref_p_no;
    3657            1 :                       return false;
    3658              :                     }
    3659         1278 :                   len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
    3660              :                 }
    3661              :               else
    3662              :                 {
    3663      1394007 :                   dest = gimple_call_arg (stmt, 0);
    3664      1394007 :                   len = gimple_call_arg (stmt, 2);
    3665              :                 }
    3666      1395285 :               if (!poly_int_tree_p (len))
    3667              :                 return false;
    3668      1079643 :               ao_ref dref;
    3669      1079643 :               ao_ref_init_from_ptr_and_size (&dref, dest, len);
    3670      1079643 :               if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
    3671              :                                      dref.size, dref.max_size, ref))
    3672              :                 {
    3673         5332 :                   ++alias_stats.stmt_kills_ref_p_yes;
    3674         5332 :                   return true;
    3675              :                 }
    3676      1074311 :               break;
    3677              :             }
    3678              : 
    3679        12126 :           case BUILT_IN_VA_END:
    3680        12126 :             {
    3681        12126 :               tree ptr = gimple_call_arg (stmt, 0);
    3682        12126 :               if (TREE_CODE (ptr) == ADDR_EXPR)
    3683              :                 {
    3684        12075 :                   tree base = ao_ref_base (ref);
    3685        12075 :                   if (TREE_OPERAND (ptr, 0) == base)
    3686              :                     {
    3687         7304 :                       ++alias_stats.stmt_kills_ref_p_yes;
    3688         7304 :                       return true;
    3689              :                     }
    3690              :                 }
    3691              :               break;
    3692              :             }
    3693              : 
    3694              :           default:;
    3695              :           }
    3696              :     }
    3697    272939110 :   ++alias_stats.stmt_kills_ref_p_no;
    3698    272939110 :   return false;
    3699              : }
    3700              : 
    3701              : bool
    3702            0 : stmt_kills_ref_p (gimple *stmt, tree ref)
    3703              : {
    3704            0 :   ao_ref r;
    3705            0 :   ao_ref_init (&r, ref);
    3706            0 :   return stmt_kills_ref_p (stmt, &r);
    3707              : }
    3708              : 
    3709              : /* Return whether REF can be subject to store data races.  */
    3710              : 
    3711              : bool
    3712        25406 : ref_can_have_store_data_races (tree ref)
    3713              : {
    3714              :   /* With -fallow-store-data-races do not care about them.  */
    3715        25406 :   if (flag_store_data_races)
    3716              :     return false;
    3717              : 
    3718        25301 :   tree base = get_base_address (ref);
    3719        25301 :   if (auto_var_p (base)
    3720        25301 :       && ! may_be_aliased (base))
    3721              :     /* Automatic variables not aliased are not subject to
    3722              :        data races.  */
    3723              :     return false;
    3724              : 
    3725              :   return true;
    3726              : }
    3727              : 
    3728              : 
    3729              : /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
    3730              :    TARGET or a statement clobbering the memory reference REF in which
    3731              :    case false is returned.  The walk starts with VUSE, one argument of PHI.  */
    3732              : 
    3733              : static bool
    3734    127638238 : maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
    3735              :                   ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
    3736              :                   bitmap *visited, bool abort_on_visited,
    3737              :                   void *(*translate)(ao_ref *, tree, void *, translate_flags *),
    3738              :                   bool (*is_backedge)(edge, void *),
    3739              :                   translate_flags disambiguate_only,
    3740              :                   void *data)
    3741              : {
    3742    127638238 :   basic_block bb = gimple_bb (phi);
    3743              : 
    3744    127638238 :   if (!*visited)
    3745              :     {
    3746     23813524 :       *visited = BITMAP_ALLOC (NULL);
    3747     23813524 :       bitmap_tree_view (*visited);
    3748              :     }
    3749              : 
    3750    127638238 :   bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
    3751              : 
    3752              :   /* Walk until we hit the target.  */
    3753    127638238 :   while (vuse != target)
    3754              :     {
    3755    394779037 :       gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
    3756              :       /* If we are searching for the target VUSE by walking up to
    3757              :          TARGET_BB dominating the original PHI we are finished once
    3758              :          we reach a default def or a definition in a block dominating
    3759              :          that block.  Update TARGET and return.  */
    3760    394779037 :       if (!target
    3761    394779037 :           && (gimple_nop_p (def_stmt)
    3762     68047670 :               || dominated_by_p (CDI_DOMINATORS,
    3763     68047670 :                                  target_bb, gimple_bb (def_stmt))))
    3764              :         {
    3765     18658320 :           target = vuse;
    3766     18658320 :           return true;
    3767              :         }
    3768              : 
    3769              :       /* Recurse for PHI nodes.  */
    3770    376120717 :       if (gphi *phi = dyn_cast <gphi *> (def_stmt))
    3771              :         {
    3772              :           /* An already visited PHI node ends the walk successfully.  */
    3773     72776679 :           if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (phi))))
    3774     34913933 :             return !abort_on_visited;
    3775     37862746 :           vuse = get_continuation_for_phi (phi, ref, tbaa_p, limit,
    3776              :                                            visited, abort_on_visited,
    3777              :                                            translate, data, is_backedge,
    3778              :                                            disambiguate_only);
    3779     37862746 :           if (!vuse)
    3780              :             return false;
    3781     33575978 :           continue;
    3782              :         }
    3783    303344038 :       else if (gimple_nop_p (def_stmt))
    3784              :         return false;
    3785              :       else
    3786              :         {
    3787              :           /* A clobbering statement or the end of the IL ends it failing.  */
    3788    303344038 :           if ((int)limit <= 0)
    3789              :             return false;
    3790    303310104 :           --limit;
    3791    303310104 :           if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
    3792              :             {
    3793     18585845 :               translate_flags tf = disambiguate_only;
    3794     18585845 :               if (translate
    3795     18585845 :                   && (*translate) (ref, vuse, data, &tf) == NULL)
    3796              :                 ;
    3797              :               else
    3798     15301922 :                 return false;
    3799              :             }
    3800              :         }
    3801              :       /* If we reach a new basic-block see if we already skipped it
    3802              :          in a previous walk that ended successfully.  */
    3803    288008182 :       if (gimple_bb (def_stmt) != bb)
    3804              :         {
    3805    126666166 :           if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
    3806     10069294 :             return !abort_on_visited;
    3807    116596872 :           bb = gimple_bb (def_stmt);
    3808              :         }
    3809    717091992 :       vuse = gimple_vuse (def_stmt);
    3810              :     }
    3811              :   return true;
    3812              : }
    3813              : 
    3814              : 
    3815              : /* Starting from a PHI node for the virtual operand of the memory reference
    3816              :    REF find a continuation virtual operand that allows to continue walking
    3817              :    statements dominating PHI skipping only statements that cannot possibly
    3818              :    clobber REF.  Decrements LIMIT for each alias disambiguation done
    3819              :    and aborts the walk, returning NULL_TREE if it reaches zero.
    3820              :    Returns NULL_TREE if no suitable virtual operand can be found.  */
    3821              : 
    3822              : tree
    3823     99703087 : get_continuation_for_phi (gphi *phi, ao_ref *ref, bool tbaa_p,
    3824              :                           unsigned int &limit, bitmap *visited,
    3825              :                           bool abort_on_visited,
    3826              :                           void *(*translate)(ao_ref *, tree, void *,
    3827              :                                              translate_flags *),
    3828              :                           void *data,
    3829              :                           bool (*is_backedge)(edge, void *),
    3830              :                           translate_flags disambiguate_only)
    3831              : {
    3832     99703087 :   unsigned nargs = gimple_phi_num_args (phi);
    3833              : 
    3834              :   /* Through a single-argument PHI we can simply look through.  */
    3835     99703087 :   if (nargs == 1)
    3836      3345022 :     return PHI_ARG_DEF (phi, 0);
    3837              : 
    3838              :   /* For two or more arguments try to pairwise skip non-aliasing code
    3839              :      until we hit the phi argument definition that dominates the other one.  */
    3840     96358065 :   basic_block phi_bb = gimple_bb (phi);
    3841     96358065 :   tree arg0, arg1;
    3842     96358065 :   unsigned i;
    3843              : 
    3844              :   /* Find a candidate for the virtual operand which definition
    3845              :      dominates those of all others.  */
    3846              :   /* First look if any of the args themselves satisfy this.  */
    3847    177193645 :   for (i = 0; i < nargs; ++i)
    3848              :     {
    3849    154326644 :       arg0 = PHI_ARG_DEF (phi, i);
    3850    154326644 :       if (SSA_NAME_IS_DEFAULT_DEF (arg0))
    3851              :         break;
    3852    151187048 :       basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
    3853    151187048 :       if (def_bb != phi_bb
    3854    151187048 :           && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
    3855              :         break;
    3856     80835580 :       arg0 = NULL_TREE;
    3857              :     }
    3858              :   /* If not, look if we can reach such candidate by walking defs
    3859              :      until we hit the immediate dominator.  maybe_skip_until will
    3860              :      do that for us.  */
    3861     96358065 :   basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
    3862              : 
    3863              :   /* Then check against the (to be) found candidate.  */
    3864    373933720 :   for (i = 0; i < nargs; ++i)
    3865              :     {
    3866    200960615 :       arg1 = PHI_ARG_DEF (phi, i);
    3867    200960615 :       if (arg1 == arg0)
    3868              :         ;
    3869    255276476 :       else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
    3870              :                                    limit, visited,
    3871              :                                    abort_on_visited,
    3872              :                                    translate, is_backedge,
    3873              :                                    /* Do not valueize when walking over
    3874              :                                       backedges.  */
    3875              :                                    (is_backedge
    3876    118832864 :                                     && !is_backedge
    3877    118832864 :                                           (gimple_phi_arg_edge (phi, i), data))
    3878              :                                    ? disambiguate_only : TR_DISAMBIGUATE,
    3879              :                                    data))
    3880              :         return NULL_TREE;
    3881              :     }
    3882              : 
    3883     76615040 :   return arg0;
    3884              : }
    3885              : 
    3886              : /* Based on the memory reference REF and its virtual use VUSE call
    3887              :    WALKER for each virtual use that is equivalent to VUSE, including VUSE
    3888              :    itself.  That is, for each virtual use for which its defining statement
    3889              :    does not clobber REF.
    3890              : 
    3891              :    WALKER is called with REF, the current virtual use and DATA.  If
    3892              :    WALKER returns non-NULL the walk stops and its result is returned.
    3893              :    At the end of a non-successful walk NULL is returned.
    3894              : 
    3895              :    TRANSLATE if non-NULL is called with a pointer to REF, the virtual
    3896              :    use which definition is a statement that may clobber REF and DATA.
    3897              :    If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
    3898              :    If TRANSLATE returns non-NULL the walk stops and its result is returned.
    3899              :    If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
    3900              :    to adjust REF and *DATA to make that valid.
    3901              : 
    3902              :    VALUEIZE if non-NULL is called with the next VUSE that is considered
    3903              :    and return value is substituted for that.  This can be used to
    3904              :    implement optimistic value-numbering for example.  Note that the
    3905              :    VUSE argument is assumed to be valueized already.
    3906              : 
    3907              :    LIMIT specifies the number of alias queries we are allowed to do,
    3908              :    the walk stops when it reaches zero and NULL is returned.  LIMIT
    3909              :    is decremented by the number of alias queries (plus adjustments
    3910              :    done by the callbacks) upon return.
    3911              : 
    3912              :    TODO: Cache the vector of equivalent vuses per ref, vuse pair.  */
    3913              : 
    3914              : void *
    3915     67426547 : walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
    3916              :                         void *(*walker)(ao_ref *, tree, void *),
    3917              :                         void *(*translate)(ao_ref *, tree, void *,
    3918              :                                            translate_flags *),
    3919              :                         bool (*is_backedge)(edge, void *),
    3920              :                         tree (*valueize)(tree),
    3921              :                         unsigned &limit, void *data)
    3922              : {
    3923     67426547 :   bitmap visited = NULL;
    3924     67426547 :   void *res;
    3925     67426547 :   bool translated = false;
    3926              : 
    3927     67426547 :   timevar_push (TV_ALIAS_STMT_WALK);
    3928              : 
    3929   1090488570 :   do
    3930              :     {
    3931   1090488570 :       gimple *def_stmt;
    3932              : 
    3933              :       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
    3934   1090488570 :       res = (*walker) (ref, vuse, data);
    3935              :       /* Abort walk.  */
    3936   1090488570 :       if (res == (void *)-1)
    3937              :         {
    3938              :           res = NULL;
    3939              :           break;
    3940              :         }
    3941              :       /* Lookup succeeded.  */
    3942   1090488383 :       else if (res != NULL)
    3943              :         break;
    3944              : 
    3945   1082317871 :       if (valueize)
    3946              :         {
    3947   1064013053 :           vuse = valueize (vuse);
    3948   1064013053 :           if (!vuse)
    3949              :             {
    3950              :               res = NULL;
    3951              :               break;
    3952              :             }
    3953              :         }
    3954   1066570697 :       def_stmt = SSA_NAME_DEF_STMT (vuse);
    3955   1066570697 :       if (gimple_nop_p (def_stmt))
    3956              :         break;
    3957   1063953744 :       else if (gphi *phi = dyn_cast <gphi *> (def_stmt))
    3958     58819572 :         vuse = get_continuation_for_phi (phi, ref, tbaa_p, limit,
    3959              :                                          &visited, translated, translate, data,
    3960              :                                          is_backedge);
    3961              :       else
    3962              :         {
    3963   1005134172 :           if ((int)limit <= 0)
    3964              :             {
    3965              :               res = NULL;
    3966              :               break;
    3967              :             }
    3968   1004890521 :           --limit;
    3969   1004890521 :           if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
    3970              :             {
    3971     32007826 :               if (!translate)
    3972              :                 break;
    3973     26809147 :               translate_flags disambiguate_only = TR_TRANSLATE;
    3974     26809147 :               res = (*translate) (ref, vuse, data, &disambiguate_only);
    3975              :               /* Failed lookup and translation.  */
    3976     26809147 :               if (res == (void *)-1)
    3977              :                 {
    3978              :                   res = NULL;
    3979              :                   break;
    3980              :                 }
    3981              :               /* Lookup succeeded.  */
    3982      6178278 :               else if (res != NULL)
    3983              :                 break;
    3984              :               /* Translation succeeded, continue walking.  */
    3985      8085330 :               translated = translated || disambiguate_only == TR_TRANSLATE;
    3986              :             }
    3987    977894570 :           vuse = gimple_vuse (def_stmt);
    3988              :         }
    3989              :     }
    3990   1036714142 :   while (vuse);
    3991              : 
    3992     67426547 :   if (visited)
    3993     21371638 :     BITMAP_FREE (visited);
    3994              : 
    3995     67426547 :   timevar_pop (TV_ALIAS_STMT_WALK);
    3996              : 
    3997     67426547 :   return res;
    3998              : }
    3999              : 
    4000              : 
    4001              : /* Based on the memory reference REF call WALKER for each vdef whose
    4002              :    defining statement may clobber REF, starting with VDEF.  If REF
    4003              :    is NULL_TREE, each defining statement is visited.
    4004              : 
    4005              :    WALKER is called with REF, the current vdef and DATA.  If WALKER
    4006              :    returns true the walk is stopped, otherwise it continues.
    4007              : 
    4008              :    If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
    4009              :    The pointer may be NULL and then we do not track this information.
    4010              : 
    4011              :    At PHI nodes walk_aliased_vdefs forks into one walk for each
    4012              :    PHI argument (but only one walk continues at merge points), the
    4013              :    return value is true if any of the walks was successful.
    4014              : 
    4015              :    The function returns the number of statements walked or -1 if
    4016              :    LIMIT stmts were walked and the walk was aborted at this point.
    4017              :    If LIMIT is zero the walk is not aborted.  */
    4018              : 
    4019              : static int
    4020    290095872 : walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
    4021              :                       bool (*walker)(ao_ref *, tree, void *), void *data,
    4022              :                       bitmap *visited, unsigned int cnt,
    4023              :                       bool *function_entry_reached, unsigned limit)
    4024              : {
    4025    914229581 :   do
    4026              :     {
    4027   1828459162 :       gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
    4028              : 
    4029    914229581 :       if (*visited
    4030    914229581 :           && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
    4031    161933573 :         return cnt;
    4032              : 
    4033    752296008 :       if (gimple_nop_p (def_stmt))
    4034              :         {
    4035     24244443 :           if (function_entry_reached)
    4036      3842248 :             *function_entry_reached = true;
    4037     24244443 :           return cnt;
    4038              :         }
    4039    728051565 :       else if (gimple_code (def_stmt) == GIMPLE_PHI)
    4040              :         {
    4041     88700052 :           unsigned i;
    4042     88700052 :           if (!*visited)
    4043              :             {
    4044      8018441 :               *visited = BITMAP_ALLOC (NULL);
    4045      8018441 :               bitmap_tree_view (*visited);
    4046              :             }
    4047    277421187 :           for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
    4048              :             {
    4049    193782483 :               int res = walk_aliased_vdefs_1 (ref,
    4050              :                                               gimple_phi_arg_def (def_stmt, i),
    4051              :                                               walker, data, visited, cnt,
    4052              :                                               function_entry_reached, limit);
    4053    193782483 :               if (res == -1)
    4054              :                 return -1;
    4055    188721135 :               cnt = res;
    4056              :             }
    4057     83638704 :           return cnt;
    4058              :         }
    4059              : 
    4060              :       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
    4061    639351513 :       cnt++;
    4062    639351513 :       if (cnt == limit)
    4063              :         return -1;
    4064    639204575 :       if ((!ref
    4065    564817789 :            || stmt_may_clobber_ref_p_1 (def_stmt, ref))
    4066    712826427 :           && (*walker) (ref, vdef, data))
    4067     15070866 :         return cnt;
    4068              : 
    4069   1538363290 :       vdef = gimple_vuse (def_stmt);
    4070              :     }
    4071              :   while (1);
    4072              : }
    4073              : 
    4074              : int
    4075     96313389 : walk_aliased_vdefs (ao_ref *ref, tree vdef,
    4076              :                     bool (*walker)(ao_ref *, tree, void *), void *data,
    4077              :                     bitmap *visited,
    4078              :                     bool *function_entry_reached, unsigned int limit)
    4079              : {
    4080     96313389 :   bitmap local_visited = NULL;
    4081     96313389 :   int ret;
    4082              : 
    4083     96313389 :   timevar_push (TV_ALIAS_STMT_WALK);
    4084              : 
    4085     96313389 :   if (function_entry_reached)
    4086      4839474 :     *function_entry_reached = false;
    4087              : 
    4088    166347611 :   ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
    4089              :                               visited ? visited : &local_visited, 0,
    4090              :                               function_entry_reached, limit);
    4091     96313389 :   if (local_visited)
    4092      8018441 :     BITMAP_FREE (local_visited);
    4093              : 
    4094     96313389 :   timevar_pop (TV_ALIAS_STMT_WALK);
    4095              : 
    4096     96313389 :   return ret;
    4097              : }
    4098              : 
    4099              : /* Verify validity of the fnspec string.
    4100              :    See attr-fnspec.h for details.  */
    4101              : 
    4102              : void
    4103    450391218 : attr_fnspec::verify ()
    4104              : {
    4105    450391218 :   bool err = false;
    4106    450391218 :   if (!len)
    4107              :     return;
    4108              : 
    4109              :   /* Check return value specifier.  */
    4110    146270049 :   if (len < return_desc_size)
    4111              :     err = true;
    4112    146270049 :   else if ((len - return_desc_size) % arg_desc_size)
    4113              :     err = true;
    4114    146270049 :   else if ((str[0] < '1' || str[0] > '4')
    4115    146270049 :            && str[0] != '.' && str[0] != 'm')
    4116            0 :     err = true;
    4117              : 
    4118    146270049 :   switch (str[1])
    4119              :     {
    4120              :       case ' ':
    4121              :       case 'p':
    4122              :       case 'P':
    4123              :       case 'c':
    4124              :       case 'C':
    4125              :         break;
    4126              :       default:
    4127              :         err = true;
    4128              :     }
    4129    146270049 :   if (err)
    4130            0 :     internal_error ("invalid fn spec attribute \"%s\"", str);
    4131              : 
    4132              :   /* Now check all parameters.  */
    4133    433907275 :   for (unsigned int i = 0; arg_specified_p (i); i++)
    4134              :     {
    4135    287637226 :       unsigned int idx = arg_idx (i);
    4136    287637226 :       switch (str[idx])
    4137              :         {
    4138    267009708 :           case 'x':
    4139    267009708 :           case 'X':
    4140    267009708 :           case 'r':
    4141    267009708 :           case 'R':
    4142    267009708 :           case 'o':
    4143    267009708 :           case 'O':
    4144    267009708 :           case 'w':
    4145    267009708 :           case 'W':
    4146    267009708 :           case '.':
    4147    267009708 :             if ((str[idx + 1] >= '1' && str[idx + 1] <= '9')
    4148    267009708 :                 || str[idx + 1] == 't')
    4149              :               {
    4150     43412277 :                 if (str[idx] != 'r' && str[idx] != 'R'
    4151              :                     && str[idx] != 'w' && str[idx] != 'W'
    4152              :                     && str[idx] != 'o' && str[idx] != 'O')
    4153     43412277 :                   err = true;
    4154     43412277 :                 if (str[idx + 1] != 't'
    4155              :                     /* Size specified is scalar, so it should be described
    4156              :                        by ". " if specified at all.  */
    4157     43412277 :                     && (arg_specified_p (str[idx + 1] - '1')
    4158            0 :                         && str[arg_idx (str[idx + 1] - '1')] != '.'))
    4159              :                   err = true;
    4160              :               }
    4161    223597431 :             else if (str[idx + 1] != ' ')
    4162              :               err = true;
    4163              :             break;
    4164     20627518 :           default:
    4165     20627518 :             if (str[idx] < '1' || str[idx] > '9')
    4166              :               err = true;
    4167              :         }
    4168    287637226 :       if (err)
    4169            0 :         internal_error ("invalid fn spec attribute \"%s\" arg %i", str, i);
    4170              :     }
    4171              : }
    4172              : 
    4173              : /* Return true if TYPE1 and TYPE2 will always give the same answer
    4174              :    when compared with other types using same_type_for_tbaa.  */
    4175              : 
    4176              : static bool
    4177     21540389 : types_equal_for_same_type_for_tbaa_p (tree type1, tree type2,
    4178              :                                       bool lto_streaming_safe)
    4179              : {
    4180              :   /* We use same_type_for_tbaa_p to match types in the access path.
    4181              :      This check is overly conservative.  */
    4182     21540389 :   type1 = TYPE_MAIN_VARIANT (type1);
    4183     21540389 :   type2 = TYPE_MAIN_VARIANT (type2);
    4184              : 
    4185     21540389 :   if (TYPE_STRUCTURAL_EQUALITY_P (type1)
    4186     21540389 :       != TYPE_STRUCTURAL_EQUALITY_P (type2))
    4187              :     return false;
    4188     21261849 :   if (TYPE_STRUCTURAL_EQUALITY_P (type1))
    4189              :     return true;
    4190              : 
    4191     17523107 :   if (lto_streaming_safe)
    4192        86593 :     return type1 == type2;
    4193              :   else
    4194     17436514 :     return TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2);
    4195              : }
    4196              : 
    4197              : /* Return true if TYPE1 and TYPE2 will always give the same answer
    4198              :    when compared with other types using same_type_for_tbaa.  */
    4199              : 
    4200              : bool
    4201     21246526 : types_equal_for_same_type_for_tbaa_p (tree type1, tree type2)
    4202              : {
    4203     21246526 :   return types_equal_for_same_type_for_tbaa_p (type1, type2,
    4204     21246526 :                                                lto_streaming_expected_p ());
    4205              : }
    4206              : 
    4207              : /* Compare REF1 and REF2 and return flags specifying their differences.
    4208              :    If LTO_STREAMING_SAFE is true do not use alias sets and canonical
    4209              :    types that are going to be recomputed.
    4210              :    If TBAA is true also compare TBAA metadata.  */
    4211              : 
    4212              : int
    4213       172599 : ao_compare::compare_ao_refs (ao_ref *ref1, ao_ref *ref2,
    4214              :                              bool lto_streaming_safe,
    4215              :                              bool tbaa)
    4216              : {
    4217       172599 :   if (TREE_THIS_VOLATILE (ref1->ref) != TREE_THIS_VOLATILE (ref2->ref))
    4218              :     return SEMANTICS;
    4219       172587 :   tree base1 = ao_ref_base (ref1);
    4220       172587 :   tree base2 = ao_ref_base (ref2);
    4221              : 
    4222       172587 :   if (!known_eq (ref1->offset, ref2->offset)
    4223       172587 :       || !known_eq (ref1->size, ref2->size)
    4224       345174 :       || !known_eq (ref1->max_size, ref2->max_size))
    4225              :     return SEMANTICS;
    4226              : 
    4227              :   /* For variable accesses we need to compare actual paths
    4228              :      to check that both refs are accessing same address and the access size.  */
    4229       172584 :   if (!known_eq (ref1->size, ref1->max_size))
    4230              :     {
    4231         3440 :       if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1->ref)),
    4232         3440 :                             TYPE_SIZE (TREE_TYPE (ref2->ref)), 0))
    4233              :         return SEMANTICS;
    4234         3440 :       tree r1 = ref1->ref;
    4235         3440 :       tree r2 = ref2->ref;
    4236              : 
    4237              :       /* Handle toplevel COMPONENT_REFs of bitfields.
    4238              :          Those are special since they are not allowed in
    4239              :          ADDR_EXPR.  */
    4240         3440 :       if (TREE_CODE (r1) == COMPONENT_REF
    4241         3440 :           && DECL_BIT_FIELD (TREE_OPERAND (r1, 1)))
    4242              :         {
    4243            0 :           if (TREE_CODE (r2) != COMPONENT_REF
    4244            0 :               || !DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
    4245              :             return SEMANTICS;
    4246            0 :           tree field1 = TREE_OPERAND (r1, 1);
    4247            0 :           tree field2 = TREE_OPERAND (r2, 1);
    4248            0 :           if (!operand_equal_p (DECL_FIELD_OFFSET (field1),
    4249            0 :                                 DECL_FIELD_OFFSET (field2), 0)
    4250            0 :               || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1),
    4251            0 :                                    DECL_FIELD_BIT_OFFSET (field2), 0)
    4252            0 :               || !operand_equal_p (DECL_SIZE (field1), DECL_SIZE (field2), 0)
    4253            0 :               || !types_compatible_p (TREE_TYPE (r1),
    4254            0 :                                       TREE_TYPE (r2)))
    4255            0 :             return SEMANTICS;
    4256            0 :           r1 = TREE_OPERAND (r1, 0);
    4257            0 :           r2 = TREE_OPERAND (r2, 0);
    4258              :         }
    4259         3440 :       else if (TREE_CODE (r2) == COMPONENT_REF
    4260         3440 :                && DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
    4261              :         return SEMANTICS;
    4262              : 
    4263              :       /* Similarly for bit field refs.  */
    4264         3440 :       if (TREE_CODE (r1) == BIT_FIELD_REF)
    4265              :         {
    4266            0 :           if (TREE_CODE (r2) != BIT_FIELD_REF
    4267            0 :               || !operand_equal_p (TREE_OPERAND (r1, 1),
    4268            0 :                                    TREE_OPERAND (r2, 1), 0)
    4269            0 :               || !operand_equal_p (TREE_OPERAND (r1, 2),
    4270            0 :                                    TREE_OPERAND (r2, 2), 0)
    4271            0 :               || !types_compatible_p (TREE_TYPE (r1),
    4272            0 :                                       TREE_TYPE (r2)))
    4273            0 :             return SEMANTICS;
    4274            0 :           r1 = TREE_OPERAND (r1, 0);
    4275            0 :           r2 = TREE_OPERAND (r2, 0);
    4276              :         }
    4277         3440 :       else if (TREE_CODE (r2) == BIT_FIELD_REF)
    4278              :         return SEMANTICS;
    4279              : 
    4280              :       /* Now we can compare the address of actual memory access.  */
    4281         3440 :       if (!operand_equal_p (r1, r2, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
    4282              :         return SEMANTICS;
    4283              :     }
    4284              :   /* For constant accesses we get more matches by comparing offset only.  */
    4285       169144 :   else if (!operand_equal_p (base1, base2,
    4286              :                              OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
    4287              :     return SEMANTICS;
    4288              : 
    4289              :   /* We can't simply use get_object_alignment_1 on the full
    4290              :      reference as for accesses with variable indexes this reports
    4291              :      too conservative alignment.  */
    4292       172391 :   unsigned int align1, align2;
    4293       172391 :   unsigned HOST_WIDE_INT bitpos1, bitpos2;
    4294       172391 :   bool known1 = get_object_alignment_1 (base1, &align1, &bitpos1);
    4295       172391 :   bool known2 = get_object_alignment_1 (base2, &align2, &bitpos2);
    4296              :   /* ??? For MEMREF get_object_alignment_1 determines aligned from
    4297              :      TYPE_ALIGN but still returns false.  This seem to contradict
    4298              :      its description.  So compare even if alignment is unknown.   */
    4299       172391 :   if (known1 != known2
    4300       172391 :       || (bitpos1 != bitpos2 || align1 != align2))
    4301              :     return SEMANTICS;
    4302              : 
    4303              :   /* Now we know that accesses are semantically same.  */
    4304       172176 :   int flags = 0;
    4305              : 
    4306              :   /* ao_ref_base strips inner MEM_REF [&decl], recover from that here.  */
    4307       172176 :   tree rbase1 = ref1->ref;
    4308       172176 :   if (rbase1)
    4309       301256 :     while (handled_component_p (rbase1))
    4310       129080 :       rbase1 = TREE_OPERAND (rbase1, 0);
    4311       172176 :   tree rbase2 = ref2->ref;
    4312       301232 :   while (handled_component_p (rbase2))
    4313       129056 :     rbase2 = TREE_OPERAND (rbase2, 0);
    4314              : 
    4315              :   /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
    4316              :      implement restrict pointers.  MR_DEPENDENCE_CLIQUE 0 means no information.
    4317              :      Otherwise we need to match bases and cliques.  */
    4318       172176 :   if ((((TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
    4319       101392 :         && MR_DEPENDENCE_CLIQUE (rbase1))
    4320       143620 :        || ((TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
    4321        72836 :            && MR_DEPENDENCE_CLIQUE (rbase2)))
    4322       200732 :       && (TREE_CODE (rbase1) != TREE_CODE (rbase2)
    4323        28556 :           || MR_DEPENDENCE_CLIQUE (rbase1) != MR_DEPENDENCE_CLIQUE (rbase2)
    4324        28476 :           || (MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))))
    4325              :     flags |= DEPENDENCE_CLIQUE;
    4326              : 
    4327       172176 :   if (!tbaa)
    4328              :     return flags;
    4329              : 
    4330              :   /* Alias sets are not stable across LTO sreaming; be conservative here
    4331              :      and compare types the alias sets are ultimately based on.  */
    4332       172147 :   if (lto_streaming_safe)
    4333              :     {
    4334         2853 :       tree t1 = ao_ref_alias_ptr_type (ref1);
    4335         2853 :       tree t2 = ao_ref_alias_ptr_type (ref2);
    4336         2853 :       if (!alias_ptr_types_compatible_p (t1, t2))
    4337           13 :         flags |= REF_ALIAS_SET;
    4338              : 
    4339         2853 :       t1 = ao_ref_base_alias_ptr_type (ref1);
    4340         2853 :       t2 = ao_ref_base_alias_ptr_type (ref2);
    4341         2853 :       if (!alias_ptr_types_compatible_p (t1, t2))
    4342           22 :         flags |= BASE_ALIAS_SET;
    4343              :     }
    4344              :   else
    4345              :     {
    4346       169294 :       if (ao_ref_alias_set (ref1) != ao_ref_alias_set (ref2))
    4347            0 :         flags |= REF_ALIAS_SET;
    4348       169294 :       if (ao_ref_base_alias_set (ref1) != ao_ref_base_alias_set (ref2))
    4349            0 :         flags |= BASE_ALIAS_SET;
    4350              :     }
    4351              : 
    4352              :   /* Access path is used only on non-view-converted references.  */
    4353       172147 :   bool view_converted = view_converted_memref_p (rbase1);
    4354       172147 :   if (view_converted_memref_p (rbase2) != view_converted)
    4355            0 :     return flags | ACCESS_PATH;
    4356       172147 :   else if (view_converted)
    4357              :     return flags;
    4358              : 
    4359              : 
    4360              :   /* Find start of access paths and look for trailing arrays.  */
    4361       167095 :   tree c1 = ref1->ref, c2 = ref2->ref;
    4362       167095 :   tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
    4363       167095 :   int nskipped1 = 0, nskipped2 = 0;
    4364       167095 :   int i = 0;
    4365              : 
    4366       296061 :   for (tree p1 = ref1->ref; handled_component_p (p1); p1 = TREE_OPERAND (p1, 0))
    4367              :     {
    4368       128966 :       if (component_ref_to_zero_sized_trailing_array_p (p1))
    4369          133 :         end_struct_ref1 = p1;
    4370       128966 :       if (ends_tbaa_access_path_p (p1))
    4371         6133 :         c1 = p1, nskipped1 = i;
    4372       128966 :       i++;
    4373              :     }
    4374       167095 :   i = 0;
    4375       296037 :   for (tree p2 = ref2->ref; handled_component_p (p2); p2 = TREE_OPERAND (p2, 0))
    4376              :     {
    4377       128942 :       if (component_ref_to_zero_sized_trailing_array_p (p2))
    4378          140 :         end_struct_ref2 = p2;
    4379       128942 :       if (ends_tbaa_access_path_p (p2))
    4380         6133 :         c2 = p2, nskipped2 = i;
    4381       128942 :       i++;
    4382              :     }
    4383              : 
    4384              :   /* For variable accesses we can not rely on offset match below.
    4385              :      We know that paths are struturally same, so only check that
    4386              :      starts of TBAA paths did not diverge.  */
    4387       167095 :   if (!known_eq (ref1->size, ref1->max_size)
    4388       167095 :       && nskipped1 != nskipped2)
    4389            0 :     return flags | ACCESS_PATH;
    4390              : 
    4391              :   /* Information about trailing refs is used by
    4392              :      aliasing_component_refs_p that is applied only if paths
    4393              :      has handled components..  */
    4394       167095 :   if (!handled_component_p (c1) && !handled_component_p (c2))
    4395              :     ;
    4396        73048 :   else if ((end_struct_ref1 != NULL) != (end_struct_ref2 != NULL))
    4397           27 :     return flags | ACCESS_PATH;
    4398       167068 :   if (end_struct_ref1
    4399       167191 :       && same_type_for_tbaa (TREE_TYPE (end_struct_ref1),
    4400          123 :                              TREE_TYPE (end_struct_ref2)) != 1)
    4401            3 :     return flags | ACCESS_PATH;
    4402              : 
    4403              :   /* Now compare all handled components of the access path.
    4404              :      We have three oracles that cares about access paths:
    4405              :        - aliasing_component_refs_p
    4406              :        - nonoverlapping_refs_since_match_p
    4407              :        - nonoverlapping_component_refs_p
    4408              :      We need to match things these oracles compare.
    4409              : 
    4410              :      It is only necessary to check types for compatibility
    4411              :      and offsets.  Rest of what oracles compares are actual
    4412              :      addresses.  Those are already known to be same:
    4413              :        - for constant accesses we check offsets
    4414              :        - for variable accesses we already matched
    4415              :          the path lexically with operand_equal_p.  */
    4416       420685 :   while (true)
    4417              :     {
    4418       293875 :       bool comp1 = handled_component_p (c1);
    4419       293875 :       bool comp2 = handled_component_p (c2);
    4420              : 
    4421       293875 :       if (comp1 != comp2)
    4422           12 :         return flags | ACCESS_PATH;
    4423       293863 :       if (!comp1)
    4424              :         break;
    4425              : 
    4426       126919 :       if (TREE_CODE (c1) != TREE_CODE (c2))
    4427            0 :         return flags | ACCESS_PATH;
    4428              : 
    4429              :       /* aliasing_component_refs_p attempts to find type match within
    4430              :          the paths.  For that reason both types needs to be equal
    4431              :          with respect to same_type_for_tbaa_p.  */
    4432       126919 :       if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
    4433       126919 :                                                  TREE_TYPE (c2),
    4434              :                                                  lto_streaming_safe))
    4435          109 :         return flags | ACCESS_PATH;
    4436       253620 :       if (component_ref_to_zero_sized_trailing_array_p (c1)
    4437       126810 :           != component_ref_to_zero_sized_trailing_array_p (c2))
    4438            0 :         return flags | ACCESS_PATH;
    4439              : 
    4440              :       /* aliasing_matching_component_refs_p compares
    4441              :          offsets within the path.  Other properties are ignored.
    4442              :          Do not bother to verify offsets in variable accesses.  Here we
    4443              :          already compared them by operand_equal_p so they are
    4444              :          structurally same.  */
    4445       126810 :       if (!known_eq (ref1->size, ref1->max_size))
    4446              :         {
    4447         4198 :           poly_int64 offadj1, sztmc1, msztmc1;
    4448         4198 :           bool reverse1;
    4449         4198 :           get_ref_base_and_extent (c1, &offadj1, &sztmc1, &msztmc1, &reverse1);
    4450         4198 :           poly_int64 offadj2, sztmc2, msztmc2;
    4451         4198 :           bool reverse2;
    4452         4198 :           get_ref_base_and_extent (c2, &offadj2, &sztmc2, &msztmc2, &reverse2);
    4453         4198 :           if (!known_eq (offadj1, offadj2))
    4454            0 :             return flags | ACCESS_PATH;
    4455              :         }
    4456       126810 :       c1 = TREE_OPERAND (c1, 0);
    4457       126810 :       c2 = TREE_OPERAND (c2, 0);
    4458       126810 :     }
    4459              :   /* Finally test the access type.  */
    4460       166944 :   if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
    4461       166944 :                                              TREE_TYPE (c2),
    4462              :                                              lto_streaming_safe))
    4463         1564 :     return flags | ACCESS_PATH;
    4464              :   return flags;
    4465              : }
    4466              : 
    4467              : /* Hash REF to HSTATE.  If LTO_STREAMING_SAFE do not use alias sets
    4468              :    and canonical types.  */
    4469              : void
    4470      7944570 : ao_compare::hash_ao_ref (ao_ref *ref, bool lto_streaming_safe, bool tbaa,
    4471              :                          inchash::hash &hstate)
    4472              : {
    4473      7944570 :   tree base = ao_ref_base (ref);
    4474      7944570 :   tree tbase = base;
    4475              : 
    4476      7944570 :   if (!known_eq (ref->size, ref->max_size))
    4477              :     {
    4478       417287 :       tree r = ref->ref;
    4479       417287 :       if (TREE_CODE (r) == COMPONENT_REF
    4480       417287 :           && DECL_BIT_FIELD (TREE_OPERAND (r, 1)))
    4481              :         {
    4482         1713 :           tree field = TREE_OPERAND (r, 1);
    4483         1713 :           hash_operand (DECL_FIELD_OFFSET (field), hstate, 0);
    4484         1713 :           hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, 0);
    4485         1713 :           hash_operand (DECL_SIZE (field), hstate, 0);
    4486         1713 :           r = TREE_OPERAND (r, 0);
    4487              :         }
    4488       417287 :       if (TREE_CODE (r) == BIT_FIELD_REF)
    4489              :         {
    4490         1704 :           hash_operand (TREE_OPERAND (r, 1), hstate, 0);
    4491         1704 :           hash_operand (TREE_OPERAND (r, 2), hstate, 0);
    4492         1704 :           r = TREE_OPERAND (r, 0);
    4493              :         }
    4494       417287 :       hash_operand (TYPE_SIZE (TREE_TYPE (ref->ref)), hstate, 0);
    4495       417287 :       hash_operand (r, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
    4496              :     }
    4497              :   else
    4498              :     {
    4499      7527283 :       hash_operand (tbase, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
    4500      7527283 :       hstate.add_poly_int (ref->offset);
    4501      7527283 :       hstate.add_poly_int (ref->size);
    4502      7527283 :       hstate.add_poly_int (ref->max_size);
    4503              :     }
    4504      7944570 :   if (!lto_streaming_safe && tbaa)
    4505              :     {
    4506      7624807 :       hstate.add_int (ao_ref_alias_set (ref));
    4507      7624807 :       hstate.add_int (ao_ref_base_alias_set (ref));
    4508              :     }
    4509      7944570 : }
        

Generated by: LCOV version 2.4-beta

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto,rust,m2 --enable-host-shared. GCC test suite is run with the built compiler.