Branch data Line data Source code
1 : : /* Alias analysis for trees.
2 : : Copyright (C) 2004-2025 Free Software Foundation, Inc.
3 : : Contributed by Diego Novillo <dnovillo@redhat.com>
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify
8 : : it under the terms of the GNU General Public License as published by
9 : : the Free Software Foundation; either version 3, or (at your option)
10 : : any later version.
11 : :
12 : : GCC is distributed in the hope that it will be useful,
13 : : but WITHOUT ANY WARRANTY; without even the implied warranty of
14 : : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 : : GNU General Public License for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : : #include "config.h"
22 : : #include "system.h"
23 : : #include "coretypes.h"
24 : : #include "backend.h"
25 : : #include "target.h"
26 : : #include "rtl.h"
27 : : #include "tree.h"
28 : : #include "gimple.h"
29 : : #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 : : #include "ssa.h"
31 : : #include "cgraph.h"
32 : : #include "tree-pretty-print.h"
33 : : #include "alias.h"
34 : : #include "fold-const.h"
35 : : #include "langhooks.h"
36 : : #include "dumpfile.h"
37 : : #include "tree-eh.h"
38 : : #include "tree-dfa.h"
39 : : #include "ipa-reference.h"
40 : : #include "varasm.h"
41 : : #include "ipa-modref-tree.h"
42 : : #include "ipa-modref.h"
43 : : #include "attr-fnspec.h"
44 : : #include "errors.h"
45 : : #include "dbgcnt.h"
46 : : #include "gimple-pretty-print.h"
47 : : #include "print-tree.h"
48 : : #include "tree-ssa-alias-compare.h"
49 : : #include "builtins.h"
50 : : #include "internal-fn.h"
51 : : #include "ipa-utils.h"
52 : :
53 : : /* Broad overview of how alias analysis on gimple works:
54 : :
55 : : Statements clobbering or using memory are linked through the
56 : : virtual operand factored use-def chain. The virtual operand
57 : : is unique per function, its symbol is accessible via gimple_vop (cfun).
58 : : Virtual operands are used for efficiently walking memory statements
59 : : in the gimple IL and are useful for things like value-numbering as
60 : : a generation count for memory references.
61 : :
62 : : SSA_NAME pointers may have associated points-to information
63 : : accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
64 : : points-to information is (re-)computed by the TODO_rebuild_alias
65 : : pass manager todo. Points-to information is also used for more
66 : : precise tracking of call-clobbered and call-used variables and
67 : : related disambiguations.
68 : :
69 : : This file contains functions for disambiguating memory references,
70 : : the so called alias-oracle and tools for walking of the gimple IL.
71 : :
72 : : The main alias-oracle entry-points are
73 : :
74 : : bool stmt_may_clobber_ref_p (gimple *, tree)
75 : :
76 : : This function queries if a statement may invalidate (parts of)
77 : : the memory designated by the reference tree argument.
78 : :
79 : : bool ref_maybe_used_by_stmt_p (gimple *, tree)
80 : :
81 : : This function queries if a statement may need (parts of) the
82 : : memory designated by the reference tree argument.
83 : :
84 : : There are variants of these functions that only handle the call
85 : : part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
86 : : Note that these do not disambiguate against a possible call lhs.
87 : :
88 : : bool refs_may_alias_p (tree, tree)
89 : :
90 : : This function tries to disambiguate two reference trees.
91 : :
92 : : bool ptr_deref_may_alias_global_p (tree, bool)
93 : :
94 : : This function queries if dereferencing a pointer variable may
95 : : alias global memory. If bool argument is true, global memory
96 : : is considered to also include function local memory that escaped.
97 : :
98 : : More low-level disambiguators are available and documented in
99 : : this file. Low-level disambiguators dealing with points-to
100 : : information are in tree-ssa-structalias.cc. */
101 : :
102 : : static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
103 : : static bool nonoverlapping_component_refs_p (const_tree, const_tree);
104 : :
105 : : /* Query statistics for the different low-level disambiguators.
106 : : A high-level query may trigger multiple of them. */
107 : :
108 : : static struct {
109 : : unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
110 : : unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
111 : : unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
112 : : unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
113 : : unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
114 : : unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
115 : : unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
116 : : unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
117 : : unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
118 : : unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
119 : : unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
120 : : unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
121 : : unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
122 : : unsigned HOST_WIDE_INT stmt_kills_ref_p_no;
123 : : unsigned HOST_WIDE_INT stmt_kills_ref_p_yes;
124 : : unsigned HOST_WIDE_INT modref_use_may_alias;
125 : : unsigned HOST_WIDE_INT modref_use_no_alias;
126 : : unsigned HOST_WIDE_INT modref_clobber_may_alias;
127 : : unsigned HOST_WIDE_INT modref_clobber_no_alias;
128 : : unsigned HOST_WIDE_INT modref_kill_no;
129 : : unsigned HOST_WIDE_INT modref_kill_yes;
130 : : unsigned HOST_WIDE_INT modref_tests;
131 : : unsigned HOST_WIDE_INT modref_baseptr_tests;
132 : : } alias_stats;
133 : :
134 : : void
135 : 0 : dump_alias_stats (FILE *s)
136 : : {
137 : 0 : fprintf (s, "\nAlias oracle query stats:\n");
138 : 0 : fprintf (s, " refs_may_alias_p: "
139 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
140 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
141 : : alias_stats.refs_may_alias_p_no_alias,
142 : 0 : alias_stats.refs_may_alias_p_no_alias
143 : 0 : + alias_stats.refs_may_alias_p_may_alias);
144 : 0 : fprintf (s, " ref_maybe_used_by_call_p: "
145 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
146 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
147 : : alias_stats.ref_maybe_used_by_call_p_no_alias,
148 : 0 : alias_stats.refs_may_alias_p_no_alias
149 : 0 : + alias_stats.ref_maybe_used_by_call_p_may_alias);
150 : 0 : fprintf (s, " call_may_clobber_ref_p: "
151 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
152 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
153 : : alias_stats.call_may_clobber_ref_p_no_alias,
154 : 0 : alias_stats.call_may_clobber_ref_p_no_alias
155 : 0 : + alias_stats.call_may_clobber_ref_p_may_alias);
156 : 0 : fprintf (s, " stmt_kills_ref_p: "
157 : : HOST_WIDE_INT_PRINT_DEC" kills, "
158 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
159 : : alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes,
160 : 0 : alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes
161 : 0 : + alias_stats.stmt_kills_ref_p_no + alias_stats.modref_kill_no);
162 : 0 : fprintf (s, " nonoverlapping_component_refs_p: "
163 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
164 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
165 : : alias_stats.nonoverlapping_component_refs_p_no_alias,
166 : 0 : alias_stats.nonoverlapping_component_refs_p_no_alias
167 : 0 : + alias_stats.nonoverlapping_component_refs_p_may_alias);
168 : 0 : fprintf (s, " nonoverlapping_refs_since_match_p: "
169 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
170 : : HOST_WIDE_INT_PRINT_DEC" must overlaps, "
171 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
172 : : alias_stats.nonoverlapping_refs_since_match_p_no_alias,
173 : : alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
174 : 0 : alias_stats.nonoverlapping_refs_since_match_p_no_alias
175 : 0 : + alias_stats.nonoverlapping_refs_since_match_p_may_alias
176 : 0 : + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
177 : 0 : fprintf (s, " aliasing_component_refs_p: "
178 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
179 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
180 : : alias_stats.aliasing_component_refs_p_no_alias,
181 : 0 : alias_stats.aliasing_component_refs_p_no_alias
182 : 0 : + alias_stats.aliasing_component_refs_p_may_alias);
183 : 0 : dump_alias_stats_in_alias_c (s);
184 : 0 : fprintf (s, "\nModref stats:\n");
185 : 0 : fprintf (s, " modref kill: "
186 : : HOST_WIDE_INT_PRINT_DEC" kills, "
187 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
188 : : alias_stats.modref_kill_yes,
189 : 0 : alias_stats.modref_kill_yes
190 : 0 : + alias_stats.modref_kill_no);
191 : 0 : fprintf (s, " modref use: "
192 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
193 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
194 : : alias_stats.modref_use_no_alias,
195 : 0 : alias_stats.modref_use_no_alias
196 : 0 : + alias_stats.modref_use_may_alias);
197 : 0 : fprintf (s, " modref clobber: "
198 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
199 : : HOST_WIDE_INT_PRINT_DEC" queries\n"
200 : : " " HOST_WIDE_INT_PRINT_DEC" tbaa queries (%f per modref query)\n"
201 : : " " HOST_WIDE_INT_PRINT_DEC" base compares (%f per modref query)\n",
202 : : alias_stats.modref_clobber_no_alias,
203 : : alias_stats.modref_clobber_no_alias
204 : : + alias_stats.modref_clobber_may_alias,
205 : : alias_stats.modref_tests,
206 : 0 : ((double)alias_stats.modref_tests)
207 : : / (alias_stats.modref_clobber_no_alias
208 : : + alias_stats.modref_clobber_may_alias),
209 : : alias_stats.modref_baseptr_tests,
210 : 0 : ((double)alias_stats.modref_baseptr_tests)
211 : 0 : / (alias_stats.modref_clobber_no_alias
212 : 0 : + alias_stats.modref_clobber_may_alias));
213 : 0 : }
214 : :
215 : :
216 : : /* Return true, if dereferencing PTR may alias with a global variable.
217 : : When ESCAPED_LOCAL_P is true escaped local memory is also considered
218 : : global. */
219 : :
220 : : bool
221 : 58104958 : ptr_deref_may_alias_global_p (tree ptr, bool escaped_local_p)
222 : : {
223 : 58104958 : struct ptr_info_def *pi;
224 : :
225 : : /* If we end up with a pointer constant here that may point
226 : : to global memory. */
227 : 58104958 : if (TREE_CODE (ptr) != SSA_NAME)
228 : : return true;
229 : :
230 : 58098890 : pi = SSA_NAME_PTR_INFO (ptr);
231 : :
232 : : /* If we do not have points-to information for this variable,
233 : : we have to punt. */
234 : 58098890 : if (!pi)
235 : : return true;
236 : :
237 : : /* ??? This does not use TBAA to prune globals ptr may not access. */
238 : 46460017 : return pt_solution_includes_global (&pi->pt, escaped_local_p);
239 : : }
240 : :
241 : : /* Return true if dereferencing PTR may alias DECL.
242 : : The caller is responsible for applying TBAA to see if PTR
243 : : may access DECL at all. */
244 : :
245 : : static bool
246 : 215152227 : ptr_deref_may_alias_decl_p (tree ptr, tree decl)
247 : : {
248 : 215152227 : struct ptr_info_def *pi;
249 : :
250 : : /* Conversions are irrelevant for points-to information and
251 : : data-dependence analysis can feed us those. */
252 : 215152227 : STRIP_NOPS (ptr);
253 : :
254 : : /* Anything we do not explicilty handle aliases. */
255 : 215152227 : if ((TREE_CODE (ptr) != SSA_NAME
256 : 2143600 : && TREE_CODE (ptr) != ADDR_EXPR
257 : 969133 : && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
258 : 214183094 : || !POINTER_TYPE_P (TREE_TYPE (ptr))
259 : 429334823 : || (!VAR_P (decl)
260 : : && TREE_CODE (decl) != PARM_DECL
261 : : && TREE_CODE (decl) != RESULT_DECL))
262 : : return true;
263 : :
264 : : /* Disregard pointer offsetting. */
265 : 214181548 : if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
266 : : {
267 : 0 : do
268 : : {
269 : 0 : ptr = TREE_OPERAND (ptr, 0);
270 : : }
271 : 0 : while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
272 : : return ptr_deref_may_alias_decl_p (ptr, decl);
273 : : }
274 : :
275 : : /* ADDR_EXPR pointers either just offset another pointer or directly
276 : : specify the pointed-to set. */
277 : 214181548 : if (TREE_CODE (ptr) == ADDR_EXPR)
278 : : {
279 : 1173448 : tree base = get_base_address (TREE_OPERAND (ptr, 0));
280 : 1173448 : if (base
281 : 1173448 : && (TREE_CODE (base) == MEM_REF
282 : 1173448 : || TREE_CODE (base) == TARGET_MEM_REF))
283 : 3167 : ptr = TREE_OPERAND (base, 0);
284 : 1170281 : else if (base
285 : 1170281 : && DECL_P (base))
286 : 1153570 : return compare_base_decls (base, decl) != 0;
287 : 16711 : else if (base
288 : 16711 : && CONSTANT_CLASS_P (base))
289 : : return false;
290 : : else
291 : : return true;
292 : : }
293 : :
294 : : /* Non-aliased variables cannot be pointed to. */
295 : 213011267 : if (!may_be_aliased (decl))
296 : : return false;
297 : :
298 : : /* From here we require a SSA name pointer. Anything else aliases. */
299 : 76410028 : if (TREE_CODE (ptr) != SSA_NAME
300 : 76410028 : || !POINTER_TYPE_P (TREE_TYPE (ptr)))
301 : : return true;
302 : :
303 : : /* If we do not have useful points-to information for this pointer
304 : : we cannot disambiguate anything else. */
305 : 76410028 : pi = SSA_NAME_PTR_INFO (ptr);
306 : 76410028 : if (!pi)
307 : : return true;
308 : :
309 : 75514506 : return pt_solution_includes (&pi->pt, decl);
310 : : }
311 : :
312 : : /* Return true if dereferenced PTR1 and PTR2 may alias.
313 : : The caller is responsible for applying TBAA to see if accesses
314 : : through PTR1 and PTR2 may conflict at all. */
315 : :
316 : : bool
317 : 63864552 : ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
318 : : {
319 : 64993320 : struct ptr_info_def *pi1, *pi2;
320 : :
321 : : /* Conversions are irrelevant for points-to information and
322 : : data-dependence analysis can feed us those. */
323 : 64993320 : STRIP_NOPS (ptr1);
324 : 64993320 : STRIP_NOPS (ptr2);
325 : :
326 : : /* Disregard pointer offsetting. */
327 : 64993320 : if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
328 : : {
329 : 510138 : do
330 : : {
331 : 510138 : ptr1 = TREE_OPERAND (ptr1, 0);
332 : : }
333 : 510138 : while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
334 : : return ptr_derefs_may_alias_p (ptr1, ptr2);
335 : : }
336 : 64483182 : if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
337 : : {
338 : 523690 : do
339 : : {
340 : 523690 : ptr2 = TREE_OPERAND (ptr2, 0);
341 : : }
342 : 523690 : while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
343 : : return ptr_derefs_may_alias_p (ptr1, ptr2);
344 : : }
345 : :
346 : : /* ADDR_EXPR pointers either just offset another pointer or directly
347 : : specify the pointed-to set. */
348 : 63959492 : if (TREE_CODE (ptr1) == ADDR_EXPR)
349 : : {
350 : 858173 : tree base = get_base_address (TREE_OPERAND (ptr1, 0));
351 : 858173 : if (base
352 : 858173 : && (TREE_CODE (base) == MEM_REF
353 : 858173 : || TREE_CODE (base) == TARGET_MEM_REF))
354 : 45826 : return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
355 : 812347 : else if (base
356 : 812347 : && DECL_P (base))
357 : 809818 : return ptr_deref_may_alias_decl_p (ptr2, base);
358 : : /* Try ptr2 when ptr1 points to a constant. */
359 : : else if (base
360 : 2529 : && !CONSTANT_CLASS_P (base))
361 : : return true;
362 : : }
363 : 63103848 : if (TREE_CODE (ptr2) == ADDR_EXPR)
364 : : {
365 : 314683 : tree base = get_base_address (TREE_OPERAND (ptr2, 0));
366 : 314683 : if (base
367 : 314683 : && (TREE_CODE (base) == MEM_REF
368 : 314683 : || TREE_CODE (base) == TARGET_MEM_REF))
369 : 49114 : return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
370 : 265569 : else if (base
371 : 265569 : && DECL_P (base))
372 : 263520 : return ptr_deref_may_alias_decl_p (ptr1, base);
373 : : else
374 : : return true;
375 : : }
376 : :
377 : : /* From here we require SSA name pointers. Anything else aliases. */
378 : 62789165 : if (TREE_CODE (ptr1) != SSA_NAME
379 : 62656086 : || TREE_CODE (ptr2) != SSA_NAME
380 : 62638196 : || !POINTER_TYPE_P (TREE_TYPE (ptr1))
381 : 125417579 : || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
382 : : return true;
383 : :
384 : : /* We may end up with two empty points-to solutions for two same pointers.
385 : : In this case we still want to say both pointers alias, so shortcut
386 : : that here. */
387 : 62628299 : if (ptr1 == ptr2)
388 : : return true;
389 : :
390 : : /* If we do not have useful points-to information for either pointer
391 : : we cannot disambiguate anything else. */
392 : 58958693 : pi1 = SSA_NAME_PTR_INFO (ptr1);
393 : 58958693 : pi2 = SSA_NAME_PTR_INFO (ptr2);
394 : 58958693 : if (!pi1 || !pi2)
395 : : return true;
396 : :
397 : : /* ??? This does not use TBAA to prune decls from the intersection
398 : : that not both pointers may access. */
399 : 56463045 : return pt_solutions_intersect (&pi1->pt, &pi2->pt);
400 : : }
401 : :
402 : : /* Return true if dereferencing PTR may alias *REF.
403 : : The caller is responsible for applying TBAA to see if PTR
404 : : may access *REF at all. */
405 : :
406 : : static bool
407 : 1771418 : ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
408 : : {
409 : 1771418 : tree base = ao_ref_base (ref);
410 : :
411 : 1771418 : if (TREE_CODE (base) == MEM_REF
412 : 1771418 : || TREE_CODE (base) == TARGET_MEM_REF)
413 : 217947 : return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
414 : 1553471 : else if (DECL_P (base))
415 : 1549831 : return ptr_deref_may_alias_decl_p (ptr, base);
416 : :
417 : : return true;
418 : : }
419 : :
420 : : /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
421 : :
422 : : bool
423 : 54131674 : ptrs_compare_unequal (tree ptr1, tree ptr2)
424 : : {
425 : : /* First resolve the pointers down to a SSA name pointer base or
426 : : a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
427 : : not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
428 : : or STRING_CSTs which needs points-to adjustments to track them
429 : : in the points-to sets. */
430 : 54131674 : tree obj1 = NULL_TREE;
431 : 54131674 : tree obj2 = NULL_TREE;
432 : 54131674 : if (TREE_CODE (ptr1) == ADDR_EXPR)
433 : : {
434 : 51690 : tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
435 : 51690 : if (! tem)
436 : : return false;
437 : 51690 : if (VAR_P (tem)
438 : : || TREE_CODE (tem) == PARM_DECL
439 : : || TREE_CODE (tem) == RESULT_DECL)
440 : : obj1 = tem;
441 : : else if (TREE_CODE (tem) == MEM_REF)
442 : 6680 : ptr1 = TREE_OPERAND (tem, 0);
443 : : }
444 : 54131674 : if (TREE_CODE (ptr2) == ADDR_EXPR)
445 : : {
446 : 8778329 : tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
447 : 8778329 : if (! tem)
448 : : return false;
449 : 8778329 : if (VAR_P (tem)
450 : : || TREE_CODE (tem) == PARM_DECL
451 : : || TREE_CODE (tem) == RESULT_DECL)
452 : : obj2 = tem;
453 : : else if (TREE_CODE (tem) == MEM_REF)
454 : 123955 : ptr2 = TREE_OPERAND (tem, 0);
455 : : }
456 : :
457 : : /* Canonicalize ptr vs. object. */
458 : 54131674 : if (TREE_CODE (ptr1) == SSA_NAME && obj2)
459 : : {
460 : : std::swap (ptr1, ptr2);
461 : : std::swap (obj1, obj2);
462 : : }
463 : :
464 : 54131674 : if (obj1 && obj2)
465 : : /* Other code handles this correctly, no need to duplicate it here. */;
466 : 6164366 : else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
467 : : {
468 : 6153667 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
469 : : /* We may not use restrict to optimize pointer comparisons.
470 : : See PR71062. So we have to assume that restrict-pointed-to
471 : : may be in fact obj1. */
472 : 6153667 : if (!pi
473 : 4984786 : || pi->pt.vars_contains_restrict
474 : 4974853 : || pi->pt.vars_contains_interposable)
475 : : return false;
476 : 4217344 : if (VAR_P (obj1)
477 : 4217344 : && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
478 : : {
479 : 1128943 : varpool_node *node = varpool_node::get (obj1);
480 : : /* If obj1 may bind to NULL give up (see below). */
481 : 1128943 : if (! node
482 : 1128943 : || ! node->nonzero_address ()
483 : 2257886 : || ! decl_binds_to_current_def_p (obj1))
484 : 855987 : return false;
485 : : }
486 : 3361357 : return !pt_solution_includes (&pi->pt, obj1);
487 : : }
488 : 47965749 : else if (TREE_CODE (ptr1) == SSA_NAME)
489 : : {
490 : 42386942 : struct ptr_info_def *pi1 = SSA_NAME_PTR_INFO (ptr1);
491 : 42386942 : if (!pi1
492 : 32357161 : || pi1->pt.vars_contains_restrict
493 : 31320448 : || pi1->pt.vars_contains_interposable)
494 : : return false;
495 : 29401504 : if (integer_zerop (ptr2) && !pi1->pt.null)
496 : : return true;
497 : 29380588 : if (TREE_CODE (ptr2) == SSA_NAME)
498 : : {
499 : 10069617 : struct ptr_info_def *pi2 = SSA_NAME_PTR_INFO (ptr2);
500 : 10069617 : if (!pi2
501 : 9428392 : || pi2->pt.vars_contains_restrict
502 : 9423121 : || pi2->pt.vars_contains_interposable)
503 : : return false;
504 : 8043329 : if ((!pi1->pt.null || !pi2->pt.null)
505 : : /* ??? We do not represent FUNCTION_DECL and LABEL_DECL
506 : : in pt.vars but only set pt.vars_contains_nonlocal. This
507 : : makes compares involving those and other nonlocals
508 : : imprecise. */
509 : 3591592 : && (!pi1->pt.vars_contains_nonlocal
510 : 63810 : || !pi2->pt.vars_contains_nonlocal)
511 : 12926857 : && (!pt_solution_includes_const_pool (&pi1->pt)
512 : 3225805 : || !pt_solution_includes_const_pool (&pi2->pt)))
513 : 421400 : return !pt_solutions_intersect (&pi1->pt, &pi2->pt);
514 : : }
515 : : }
516 : :
517 : : return false;
518 : : }
519 : :
520 : : /* Returns whether reference REF to BASE may refer to global memory.
521 : : When ESCAPED_LOCAL_P is true escaped local memory is also considered
522 : : global. */
523 : :
524 : : static bool
525 : 56386495 : ref_may_alias_global_p_1 (tree base, bool escaped_local_p)
526 : : {
527 : 56386495 : if (DECL_P (base))
528 : 44729241 : return (is_global_var (base)
529 : 44729241 : || (escaped_local_p
530 : 1089340 : && pt_solution_includes (&cfun->gimple_df->escaped_return,
531 : : base)));
532 : 11657254 : else if (TREE_CODE (base) == MEM_REF
533 : 11657254 : || TREE_CODE (base) == TARGET_MEM_REF)
534 : 11647549 : return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0),
535 : 11647549 : escaped_local_p);
536 : : return true;
537 : : }
538 : :
539 : : bool
540 : 12499339 : ref_may_alias_global_p (ao_ref *ref, bool escaped_local_p)
541 : : {
542 : 12499339 : tree base = ao_ref_base (ref);
543 : 12499339 : return ref_may_alias_global_p_1 (base, escaped_local_p);
544 : : }
545 : :
546 : : bool
547 : 43887156 : ref_may_alias_global_p (tree ref, bool escaped_local_p)
548 : : {
549 : 43887156 : tree base = get_base_address (ref);
550 : 43887156 : return ref_may_alias_global_p_1 (base, escaped_local_p);
551 : : }
552 : :
553 : : /* Return true whether STMT may clobber global memory.
554 : : When ESCAPED_LOCAL_P is true escaped local memory is also considered
555 : : global. */
556 : :
557 : : bool
558 : 178337505 : stmt_may_clobber_global_p (gimple *stmt, bool escaped_local_p)
559 : : {
560 : 178337505 : tree lhs;
561 : :
562 : 356572584 : if (!gimple_vdef (stmt))
563 : : return false;
564 : :
565 : : /* ??? We can ask the oracle whether an artificial pointer
566 : : dereference with a pointer with points-to information covering
567 : : all global memory (what about non-address taken memory?) maybe
568 : : clobbered by this call. As there is at the moment no convenient
569 : : way of doing that without generating garbage do some manual
570 : : checking instead.
571 : : ??? We could make a NULL ao_ref argument to the various
572 : : predicates special, meaning any global memory. */
573 : :
574 : 43976840 : switch (gimple_code (stmt))
575 : : {
576 : 43887156 : case GIMPLE_ASSIGN:
577 : 43887156 : lhs = gimple_assign_lhs (stmt);
578 : 43887156 : return (TREE_CODE (lhs) != SSA_NAME
579 : 43887156 : && ref_may_alias_global_p (lhs, escaped_local_p));
580 : : case GIMPLE_CALL:
581 : : return true;
582 : : default:
583 : : return true;
584 : : }
585 : : }
586 : :
587 : :
588 : : /* Dump alias information on FILE. */
589 : :
590 : : void
591 : 282 : dump_alias_info (FILE *file)
592 : : {
593 : 282 : unsigned i;
594 : 282 : tree ptr;
595 : 282 : const char *funcname
596 : 282 : = lang_hooks.decl_printable_name (current_function_decl, 2);
597 : 282 : tree var;
598 : :
599 : 282 : fprintf (file, "\n\nAlias information for %s\n\n", funcname);
600 : :
601 : 282 : fprintf (file, "Aliased symbols\n\n");
602 : :
603 : 1114 : FOR_EACH_LOCAL_DECL (cfun, i, var)
604 : : {
605 : 579 : if (may_be_aliased (var))
606 : 347 : dump_variable (file, var);
607 : : }
608 : :
609 : 282 : fprintf (file, "\nCall clobber information\n");
610 : :
611 : 282 : fprintf (file, "\nESCAPED");
612 : 282 : dump_points_to_solution (file, &cfun->gimple_df->escaped);
613 : :
614 : 282 : fprintf (file, "\nESCAPED_RETURN");
615 : 282 : dump_points_to_solution (file, &cfun->gimple_df->escaped_return);
616 : :
617 : 282 : fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
618 : :
619 : 3016 : FOR_EACH_SSA_NAME (i, ptr, cfun)
620 : : {
621 : 2514 : struct ptr_info_def *pi;
622 : :
623 : 4589 : if (!POINTER_TYPE_P (TREE_TYPE (ptr))
624 : 2561 : || SSA_NAME_IN_FREE_LIST (ptr))
625 : 2028 : continue;
626 : :
627 : 486 : pi = SSA_NAME_PTR_INFO (ptr);
628 : 486 : if (pi)
629 : 480 : dump_points_to_info_for (file, ptr);
630 : : }
631 : :
632 : 282 : fprintf (file, "\n");
633 : 282 : }
634 : :
635 : :
636 : : /* Dump alias information on stderr. */
637 : :
638 : : DEBUG_FUNCTION void
639 : 0 : debug_alias_info (void)
640 : : {
641 : 0 : dump_alias_info (stderr);
642 : 0 : }
643 : :
644 : :
645 : : /* Dump the points-to set *PT into FILE. */
646 : :
647 : : void
648 : 1044 : dump_points_to_solution (FILE *file, struct pt_solution *pt)
649 : : {
650 : 1044 : if (pt->anything)
651 : 3 : fprintf (file, ", points-to anything");
652 : :
653 : 1044 : if (pt->nonlocal)
654 : 632 : fprintf (file, ", points-to non-local");
655 : :
656 : 1044 : if (pt->escaped)
657 : 419 : fprintf (file, ", points-to escaped");
658 : :
659 : 1044 : if (pt->ipa_escaped)
660 : 0 : fprintf (file, ", points-to unit escaped");
661 : :
662 : 1044 : if (pt->null)
663 : 606 : fprintf (file, ", points-to NULL");
664 : :
665 : 1044 : if (pt->const_pool)
666 : 0 : fprintf (file, ", points-to const-pool");
667 : :
668 : 1044 : if (pt->vars)
669 : : {
670 : 1041 : fprintf (file, ", points-to vars: ");
671 : 1041 : dump_decl_set (file, pt->vars);
672 : 1041 : if (pt->vars_contains_nonlocal
673 : 907 : || pt->vars_contains_escaped
674 : 830 : || pt->vars_contains_escaped_heap
675 : 830 : || pt->vars_contains_restrict
676 : 830 : || pt->vars_contains_interposable)
677 : : {
678 : 211 : const char *comma = "";
679 : 211 : fprintf (file, " (");
680 : 211 : if (pt->vars_contains_nonlocal)
681 : : {
682 : 134 : fprintf (file, "nonlocal");
683 : 134 : comma = ", ";
684 : : }
685 : 211 : if (pt->vars_contains_escaped)
686 : : {
687 : 139 : fprintf (file, "%sescaped", comma);
688 : 139 : comma = ", ";
689 : : }
690 : 211 : if (pt->vars_contains_escaped_heap)
691 : : {
692 : 0 : fprintf (file, "%sescaped heap", comma);
693 : 0 : comma = ", ";
694 : : }
695 : 211 : if (pt->vars_contains_restrict)
696 : : {
697 : 58 : fprintf (file, "%srestrict", comma);
698 : 58 : comma = ", ";
699 : : }
700 : 211 : if (pt->vars_contains_interposable)
701 : 0 : fprintf (file, "%sinterposable", comma);
702 : 211 : fprintf (file, ")");
703 : : }
704 : : }
705 : 1044 : }
706 : :
707 : :
708 : : /* Unified dump function for pt_solution. */
709 : :
710 : : DEBUG_FUNCTION void
711 : 0 : debug (pt_solution &ref)
712 : : {
713 : 0 : dump_points_to_solution (stderr, &ref);
714 : 0 : }
715 : :
716 : : DEBUG_FUNCTION void
717 : 0 : debug (pt_solution *ptr)
718 : : {
719 : 0 : if (ptr)
720 : 0 : debug (*ptr);
721 : : else
722 : 0 : fprintf (stderr, "<nil>\n");
723 : 0 : }
724 : :
725 : :
726 : : /* Dump points-to information for SSA_NAME PTR into FILE. */
727 : :
728 : : void
729 : 480 : dump_points_to_info_for (FILE *file, tree ptr)
730 : : {
731 : 480 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
732 : :
733 : 480 : print_generic_expr (file, ptr, dump_flags);
734 : :
735 : 480 : if (pi)
736 : 480 : dump_points_to_solution (file, &pi->pt);
737 : : else
738 : 0 : fprintf (file, ", points-to anything");
739 : :
740 : 480 : fprintf (file, "\n");
741 : 480 : }
742 : :
743 : :
744 : : /* Dump points-to information for VAR into stderr. */
745 : :
746 : : DEBUG_FUNCTION void
747 : 0 : debug_points_to_info_for (tree var)
748 : : {
749 : 0 : dump_points_to_info_for (stderr, var);
750 : 0 : }
751 : :
752 : :
753 : : /* Initializes the alias-oracle reference representation *R from REF. */
754 : :
755 : : void
756 : 2752512787 : ao_ref_init (ao_ref *r, tree ref)
757 : : {
758 : 2752512787 : r->ref = ref;
759 : 2752512787 : r->base = NULL_TREE;
760 : 2752512787 : r->offset = 0;
761 : 2752512787 : r->size = -1;
762 : 2752512787 : r->max_size = -1;
763 : 2752512787 : r->ref_alias_set = -1;
764 : 2752512787 : r->base_alias_set = -1;
765 : 2752512787 : r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
766 : 2752512787 : }
767 : :
768 : : /* Returns the base object of the memory reference *REF. */
769 : :
770 : : tree
771 : 5363877863 : ao_ref_base (ao_ref *ref)
772 : : {
773 : 5363877863 : bool reverse;
774 : :
775 : 5363877863 : if (ref->base)
776 : : return ref->base;
777 : 2482500848 : ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
778 : : &ref->max_size, &reverse);
779 : 2482500848 : return ref->base;
780 : : }
781 : :
782 : : /* Returns the base object alias set of the memory reference *REF. */
783 : :
784 : : alias_set_type
785 : 1003308521 : ao_ref_base_alias_set (ao_ref *ref)
786 : : {
787 : 1003308521 : tree base_ref;
788 : 1003308521 : if (ref->base_alias_set != -1)
789 : : return ref->base_alias_set;
790 : 781097911 : if (!ref->ref)
791 : : return 0;
792 : 745675647 : base_ref = ref->ref;
793 : 745675647 : if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
794 : 4 : base_ref = TREE_OPERAND (base_ref, 0);
795 : 1184411324 : while (handled_component_p (base_ref))
796 : 438735677 : base_ref = TREE_OPERAND (base_ref, 0);
797 : 745675647 : ref->base_alias_set = get_alias_set (base_ref);
798 : 745675647 : return ref->base_alias_set;
799 : : }
800 : :
801 : : /* Returns the reference alias set of the memory reference *REF. */
802 : :
803 : : alias_set_type
804 : 1274313241 : ao_ref_alias_set (ao_ref *ref)
805 : : {
806 : 1274313241 : if (ref->ref_alias_set != -1)
807 : : return ref->ref_alias_set;
808 : 511142835 : if (!ref->ref)
809 : : return 0;
810 : 511142833 : ref->ref_alias_set = get_alias_set (ref->ref);
811 : 511142833 : return ref->ref_alias_set;
812 : : }
813 : :
814 : : /* Returns a type satisfying
815 : : get_deref_alias_set (type) == ao_ref_base_alias_set (REF). */
816 : :
817 : : tree
818 : 316673 : ao_ref_base_alias_ptr_type (ao_ref *ref)
819 : : {
820 : 316673 : tree base_ref;
821 : :
822 : 316673 : if (!ref->ref)
823 : : return NULL_TREE;
824 : 316673 : base_ref = ref->ref;
825 : 316673 : if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
826 : 0 : base_ref = TREE_OPERAND (base_ref, 0);
827 : 421421 : while (handled_component_p (base_ref))
828 : 104748 : base_ref = TREE_OPERAND (base_ref, 0);
829 : 316673 : tree ret = reference_alias_ptr_type (base_ref);
830 : 316673 : return ret;
831 : : }
832 : :
833 : : /* Returns a type satisfying
834 : : get_deref_alias_set (type) == ao_ref_alias_set (REF). */
835 : :
836 : : tree
837 : 316673 : ao_ref_alias_ptr_type (ao_ref *ref)
838 : : {
839 : 316673 : if (!ref->ref)
840 : : return NULL_TREE;
841 : 316673 : tree ret = reference_alias_ptr_type (ref->ref);
842 : 316673 : return ret;
843 : : }
844 : :
845 : : /* Return the alignment of the access *REF and store it in the *ALIGN
846 : : and *BITPOS pairs. Returns false if no alignment could be determined.
847 : : See get_object_alignment_2 for details. */
848 : :
849 : : bool
850 : 93208 : ao_ref_alignment (ao_ref *ref, unsigned int *align,
851 : : unsigned HOST_WIDE_INT *bitpos)
852 : : {
853 : 93208 : if (ref->ref)
854 : 91673 : return get_object_alignment_1 (ref->ref, align, bitpos);
855 : :
856 : : /* When we just have ref->base we cannot use get_object_alignment since
857 : : that will eventually use the type of the appearant access while for
858 : : example ao_ref_init_from_ptr_and_range is not careful to adjust that. */
859 : 1535 : *align = BITS_PER_UNIT;
860 : 1535 : HOST_WIDE_INT offset;
861 : 1535 : if (!ref->offset.is_constant (&offset)
862 : 1535 : || !get_object_alignment_2 (ref->base, align, bitpos, true))
863 : : return false;
864 : 1325 : *bitpos += (unsigned HOST_WIDE_INT)offset * BITS_PER_UNIT;
865 : 1325 : *bitpos = *bitpos & (*align - 1);
866 : 1325 : return true;
867 : : }
868 : :
869 : : /* Init an alias-oracle reference representation from a gimple pointer
870 : : PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
871 : : that RANGE_KNOWN is set.
872 : :
873 : : The access is assumed to be only to or after of the pointer target adjusted
874 : : by the offset, not before it (even in the case RANGE_KNOWN is false). */
875 : :
876 : : void
877 : 38141785 : ao_ref_init_from_ptr_and_range (ao_ref *ref, tree ptr,
878 : : bool range_known,
879 : : poly_int64 offset,
880 : : poly_int64 size,
881 : : poly_int64 max_size)
882 : : {
883 : 38141785 : poly_int64 t, extra_offset = 0;
884 : :
885 : 38141785 : ref->ref = NULL_TREE;
886 : 38141785 : if (TREE_CODE (ptr) == SSA_NAME)
887 : : {
888 : 24158923 : gimple *stmt = SSA_NAME_DEF_STMT (ptr);
889 : 24158923 : if (gimple_assign_single_p (stmt)
890 : 24158923 : && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
891 : 3832698 : ptr = gimple_assign_rhs1 (stmt);
892 : 20326225 : else if (is_gimple_assign (stmt)
893 : 13472672 : && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
894 : 25822991 : && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
895 : : {
896 : 255625 : ptr = gimple_assign_rhs1 (stmt);
897 : 255625 : extra_offset *= BITS_PER_UNIT;
898 : : }
899 : : }
900 : :
901 : 38141785 : if (TREE_CODE (ptr) == ADDR_EXPR)
902 : : {
903 : 17760553 : ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
904 : 17760553 : if (ref->base)
905 : 15041464 : ref->offset = BITS_PER_UNIT * t;
906 : : else
907 : : {
908 : 2719089 : range_known = false;
909 : 2719089 : ref->offset = 0;
910 : 2719089 : ref->base = get_base_address (TREE_OPERAND (ptr, 0));
911 : : }
912 : : }
913 : : else
914 : : {
915 : 20381232 : gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
916 : 20381232 : ref->base = build2 (MEM_REF, char_type_node,
917 : : ptr, null_pointer_node);
918 : 20381232 : ref->offset = 0;
919 : : }
920 : 38141785 : ref->offset += extra_offset + offset;
921 : 38141785 : if (range_known)
922 : : {
923 : 21011613 : ref->max_size = max_size;
924 : 21011613 : ref->size = size;
925 : : }
926 : : else
927 : 17130172 : ref->max_size = ref->size = -1;
928 : 38141785 : ref->ref_alias_set = 0;
929 : 38141785 : ref->base_alias_set = 0;
930 : 38141785 : ref->volatile_p = false;
931 : 38141785 : }
932 : :
933 : : /* Init an alias-oracle reference representation from a gimple pointer
934 : : PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
935 : : size is assumed to be unknown. The access is assumed to be only
936 : : to or after of the pointer target, not before it. */
937 : :
938 : : void
939 : 10756265 : ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
940 : : {
941 : 10756265 : poly_int64 size_hwi;
942 : 10756265 : if (size
943 : 5607581 : && poly_int_tree_p (size, &size_hwi)
944 : 15754474 : && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
945 : : {
946 : 4997535 : size_hwi = size_hwi * BITS_PER_UNIT;
947 : 4997535 : ao_ref_init_from_ptr_and_range (ref, ptr, true, 0, size_hwi, size_hwi);
948 : : }
949 : : else
950 : 5758730 : ao_ref_init_from_ptr_and_range (ref, ptr, false, 0, -1, -1);
951 : 10756265 : }
952 : :
953 : : /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
954 : : Return -1 if S1 < S2
955 : : Return 1 if S1 > S2
956 : : Return 0 if equal or incomparable. */
957 : :
958 : : static int
959 : 8899599 : compare_sizes (tree s1, tree s2)
960 : : {
961 : 8899599 : if (!s1 || !s2)
962 : : return 0;
963 : :
964 : 8892453 : poly_uint64 size1;
965 : 8892453 : poly_uint64 size2;
966 : :
967 : 8892453 : if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
968 : 482 : return 0;
969 : 8891971 : if (known_lt (size1, size2))
970 : : return -1;
971 : 6395231 : if (known_lt (size2, size1))
972 : : return 1;
973 : : return 0;
974 : : }
975 : :
976 : : /* Compare TYPE1 and TYPE2 by its size.
977 : : Return -1 if size of TYPE1 < size of TYPE2
978 : : Return 1 if size of TYPE1 > size of TYPE2
979 : : Return 0 if types are of equal sizes or we can not compare them. */
980 : :
981 : : static int
982 : 7659178 : compare_type_sizes (tree type1, tree type2)
983 : : {
984 : : /* Be conservative for arrays and vectors. We want to support partial
985 : : overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
986 : 7659178 : while (TREE_CODE (type1) == ARRAY_TYPE
987 : 8579049 : || VECTOR_TYPE_P (type1))
988 : 919871 : type1 = TREE_TYPE (type1);
989 : 7932689 : while (TREE_CODE (type2) == ARRAY_TYPE
990 : 7932689 : || VECTOR_TYPE_P (type2))
991 : 273511 : type2 = TREE_TYPE (type2);
992 : 7659178 : return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
993 : : }
994 : :
995 : : /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
996 : : purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
997 : : decide. */
998 : :
999 : : static inline int
1000 : 839252825 : same_type_for_tbaa (tree type1, tree type2)
1001 : : {
1002 : 839252825 : type1 = TYPE_MAIN_VARIANT (type1);
1003 : 839252825 : type2 = TYPE_MAIN_VARIANT (type2);
1004 : :
1005 : : /* Handle the most common case first. */
1006 : 839252825 : if (type1 == type2)
1007 : : return 1;
1008 : :
1009 : : /* If we would have to do structural comparison bail out. */
1010 : 127210404 : if (TYPE_STRUCTURAL_EQUALITY_P (type1)
1011 : 127210404 : || TYPE_STRUCTURAL_EQUALITY_P (type2))
1012 : : return -1;
1013 : :
1014 : : /* Compare the canonical types. */
1015 : 81065509 : if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
1016 : : return 1;
1017 : :
1018 : : /* ??? Array types are not properly unified in all cases as we have
1019 : : spurious changes in the index types for example. Removing this
1020 : : causes all sorts of problems with the Fortran frontend. */
1021 : 80402235 : if (TREE_CODE (type1) == ARRAY_TYPE
1022 : 4716720 : && TREE_CODE (type2) == ARRAY_TYPE)
1023 : : return -1;
1024 : :
1025 : : /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
1026 : : object of one of its constrained subtypes, e.g. when a function with an
1027 : : unconstrained parameter passed by reference is called on an object and
1028 : : inlined. But, even in the case of a fixed size, type and subtypes are
1029 : : not equivalent enough as to share the same TYPE_CANONICAL, since this
1030 : : would mean that conversions between them are useless, whereas they are
1031 : : not (e.g. type and subtypes can have different modes). So, in the end,
1032 : : they are only guaranteed to have the same alias set. */
1033 : 80023694 : alias_set_type set1 = get_alias_set (type1);
1034 : 80023694 : alias_set_type set2 = get_alias_set (type2);
1035 : 80023694 : if (set1 == set2)
1036 : : return -1;
1037 : :
1038 : : /* Pointers to void are considered compatible with all other pointers,
1039 : : so for two pointers see what the alias set resolution thinks. */
1040 : 46573072 : if (POINTER_TYPE_P (type1)
1041 : 13277049 : && POINTER_TYPE_P (type2)
1042 : 46743253 : && alias_sets_conflict_p (set1, set2))
1043 : : return -1;
1044 : :
1045 : : /* The types are known to be not equal. */
1046 : : return 0;
1047 : : }
1048 : :
1049 : : /* Return true if TYPE is a composite type (i.e. we may apply one of handled
1050 : : components on it). */
1051 : :
1052 : : static bool
1053 : 1989530 : type_has_components_p (tree type)
1054 : : {
1055 : 1989530 : return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
1056 : 1989530 : || TREE_CODE (type) == COMPLEX_TYPE;
1057 : : }
1058 : :
1059 : : /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
1060 : : respectively are either pointing to same address or are completely
1061 : : disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
1062 : : just partly overlap.
1063 : :
1064 : : Try to disambiguate using the access path starting from the match
1065 : : and return false if there is no conflict.
1066 : :
1067 : : Helper for aliasing_component_refs_p. */
1068 : :
1069 : : static bool
1070 : 904543 : aliasing_matching_component_refs_p (tree match1, tree ref1,
1071 : : poly_int64 offset1, poly_int64 max_size1,
1072 : : tree match2, tree ref2,
1073 : : poly_int64 offset2, poly_int64 max_size2,
1074 : : bool partial_overlap)
1075 : : {
1076 : 904543 : poly_int64 offadj, sztmp, msztmp;
1077 : 904543 : bool reverse;
1078 : :
1079 : 904543 : if (!partial_overlap)
1080 : : {
1081 : 904539 : get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
1082 : 904539 : offset2 -= offadj;
1083 : 904539 : get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
1084 : 904539 : offset1 -= offadj;
1085 : 904539 : if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1086 : : {
1087 : 78947 : ++alias_stats.aliasing_component_refs_p_no_alias;
1088 : 78947 : return false;
1089 : : }
1090 : : }
1091 : :
1092 : 825596 : int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
1093 : : partial_overlap);
1094 : 825596 : if (cmp == 1
1095 : 825596 : || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
1096 : : {
1097 : 821 : ++alias_stats.aliasing_component_refs_p_no_alias;
1098 : 821 : return false;
1099 : : }
1100 : 824775 : ++alias_stats.aliasing_component_refs_p_may_alias;
1101 : 824775 : return true;
1102 : : }
1103 : :
1104 : : /* Return true if REF is reference to zero sized trailing array. I.e.
1105 : : struct foo {int bar; int array[0];} *fooptr;
1106 : : fooptr->array. */
1107 : :
1108 : : static bool
1109 : 6328553 : component_ref_to_zero_sized_trailing_array_p (tree ref)
1110 : : {
1111 : 6328553 : return (TREE_CODE (ref) == COMPONENT_REF
1112 : 5674514 : && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
1113 : 117648 : && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
1114 : 38907 : || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
1115 : 6407371 : && array_ref_flexible_size_p (ref));
1116 : : }
1117 : :
1118 : : /* Worker for aliasing_component_refs_p. Most parameters match parameters of
1119 : : aliasing_component_refs_p.
1120 : :
1121 : : Walk access path REF2 and try to find type matching TYPE1
1122 : : (which is a start of possibly aliasing access path REF1).
1123 : : If match is found, try to disambiguate.
1124 : :
1125 : : Return 0 for sucessful disambiguation.
1126 : : Return 1 if match was found but disambiguation failed
1127 : : Return -1 if there is no match.
1128 : : In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
1129 : : in access patch REF2 and -1 if we are not sure. */
1130 : :
1131 : : static int
1132 : 2618256 : aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
1133 : : poly_int64 offset1, poly_int64 max_size1,
1134 : : tree end_struct_ref1,
1135 : : tree ref2, tree base2,
1136 : : poly_int64 offset2, poly_int64 max_size2,
1137 : : bool *maybe_match)
1138 : : {
1139 : 2618256 : tree ref = ref2;
1140 : 2618256 : int same_p = 0;
1141 : :
1142 : 7579600 : while (true)
1143 : : {
1144 : : /* We walk from inner type to the outer types. If type we see is
1145 : : already too large to be part of type1, terminate the search. */
1146 : 5098928 : int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
1147 : :
1148 : 5098928 : if (cmp < 0
1149 : 5098928 : && (!end_struct_ref1
1150 : 60 : || compare_type_sizes (TREE_TYPE (end_struct_ref1),
1151 : 60 : TREE_TYPE (ref)) < 0))
1152 : : break;
1153 : : /* If types may be of same size, see if we can decide about their
1154 : : equality. */
1155 : 3779113 : if (cmp == 0)
1156 : : {
1157 : 2697429 : same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
1158 : 2697429 : if (same_p == 1)
1159 : : break;
1160 : : /* In case we can't decide whether types are same try to
1161 : : continue looking for the exact match.
1162 : : Remember however that we possibly saw a match
1163 : : to bypass the access path continuations tests we do later. */
1164 : 1792886 : if (same_p == -1)
1165 : 674577 : *maybe_match = true;
1166 : : }
1167 : 2874570 : if (!handled_component_p (ref))
1168 : : break;
1169 : 2480672 : ref = TREE_OPERAND (ref, 0);
1170 : 2480672 : }
1171 : 2618256 : if (same_p == 1)
1172 : : {
1173 : 904543 : bool partial_overlap = false;
1174 : :
1175 : : /* We assume that arrays can overlap by multiple of their elements
1176 : : size as tested in gcc.dg/torture/alias-2.c.
1177 : : This partial overlap happen only when both arrays are bases of
1178 : : the access and not contained within another component ref.
1179 : : To be safe we also assume partial overlap for VLAs. */
1180 : 904543 : if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1181 : 904543 : && (!TYPE_SIZE (TREE_TYPE (base1))
1182 : 4271 : || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1183 : 4271 : || ref == base2))
1184 : : {
1185 : : /* Setting maybe_match to true triggers
1186 : : nonoverlapping_component_refs_p test later that still may do
1187 : : useful disambiguation. */
1188 : 4 : *maybe_match = true;
1189 : 4 : partial_overlap = true;
1190 : : }
1191 : 904543 : return aliasing_matching_component_refs_p (base1, ref1,
1192 : : offset1, max_size1,
1193 : : ref, ref2,
1194 : : offset2, max_size2,
1195 : 904543 : partial_overlap);
1196 : : }
1197 : : return -1;
1198 : : }
1199 : :
1200 : : /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1201 : : Return true if they can be composed to single access path
1202 : : base1...ref1...base2...ref2.
1203 : :
1204 : : REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1205 : : a trailing array access after REF1 in the non-TBAA part of the access.
1206 : : REF1_ALIAS_SET is the alias set of REF1.
1207 : :
1208 : : BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1209 : : a trailing array access in the TBAA part of access path2.
1210 : : BASE2_ALIAS_SET is the alias set of base2. */
1211 : :
1212 : : bool
1213 : 1989530 : access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1214 : : alias_set_type ref1_alias_set,
1215 : : tree base_type2, tree end_struct_ref2,
1216 : : alias_set_type base2_alias_set)
1217 : : {
1218 : : /* Access path can not continue past types with no components. */
1219 : 1989530 : if (!type_has_components_p (ref_type1))
1220 : : return false;
1221 : :
1222 : : /* If first access path ends by too small type to hold base of
1223 : : the second access path, typically paths can not continue.
1224 : :
1225 : : Punt if end_struct_past_end1 is true. We want to support arbitrary
1226 : : type puning past first COMPONENT_REF to union because redundant store
1227 : : elimination depends on this, see PR92152. For this reason we can not
1228 : : check size of the reference because types may partially overlap. */
1229 : 198763 : if (!end_struct_past_end1)
1230 : : {
1231 : 198714 : if (compare_type_sizes (ref_type1, base_type2) < 0)
1232 : : return false;
1233 : : /* If the path2 contains trailing array access we can strenghten the check
1234 : : to verify that also the size of element of the trailing array fits.
1235 : : In fact we could check for offset + type_size, but we do not track
1236 : : offsets and this is quite side case. */
1237 : 171600 : if (end_struct_ref2
1238 : 171600 : && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1239 : : return false;
1240 : : }
1241 : 171649 : return (base2_alias_set == ref1_alias_set
1242 : 171649 : || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1243 : : }
1244 : :
1245 : : /* Determine if the two component references REF1 and REF2 which are
1246 : : based on access types TYPE1 and TYPE2 and of which at least one is based
1247 : : on an indirect reference may alias.
1248 : : REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1249 : : are the respective alias sets. */
1250 : :
1251 : : static bool
1252 : 2361415 : aliasing_component_refs_p (tree ref1,
1253 : : alias_set_type ref1_alias_set,
1254 : : alias_set_type base1_alias_set,
1255 : : poly_int64 offset1, poly_int64 max_size1,
1256 : : tree ref2,
1257 : : alias_set_type ref2_alias_set,
1258 : : alias_set_type base2_alias_set,
1259 : : poly_int64 offset2, poly_int64 max_size2)
1260 : : {
1261 : : /* If one reference is a component references through pointers try to find a
1262 : : common base and apply offset based disambiguation. This handles
1263 : : for example
1264 : : struct A { int i; int j; } *q;
1265 : : struct B { struct A a; int k; } *p;
1266 : : disambiguating q->i and p->a.j. */
1267 : 2361415 : tree base1, base2;
1268 : 2361415 : tree type1, type2;
1269 : 2361415 : bool maybe_match = false;
1270 : 2361415 : tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1271 : 2361415 : bool end_struct_past_end1 = false;
1272 : 2361415 : bool end_struct_past_end2 = false;
1273 : :
1274 : : /* Choose bases and base types to search for.
1275 : : The access path is as follows:
1276 : : base....end_of_tbaa_ref...actual_ref
1277 : : At one place in the access path may be a reference to zero sized or
1278 : : trailing array.
1279 : :
1280 : : We generally discard the segment after end_of_tbaa_ref however
1281 : : we need to be careful in case it contains zero sized or trailing array.
1282 : : These may happen after reference to union and in this case we need to
1283 : : not disambiguate type puning scenarios.
1284 : :
1285 : : We set:
1286 : : base1 to point to base
1287 : :
1288 : : ref1 to point to end_of_tbaa_ref
1289 : :
1290 : : end_struct_ref1 to point the trailing reference (if it exists
1291 : : in range base....end_of_tbaa_ref
1292 : :
1293 : : end_struct_past_end1 is true if this trailing reference occurs in
1294 : : end_of_tbaa_ref...actual_ref. */
1295 : 2361415 : base1 = ref1;
1296 : 5154350 : while (handled_component_p (base1))
1297 : : {
1298 : : /* Generally access paths are monotous in the size of object. The
1299 : : exception are trailing arrays of structures. I.e.
1300 : : struct a {int array[0];};
1301 : : or
1302 : : struct a {int array1[0]; int array[];};
1303 : : Such struct has size 0 but accesses to a.array may have non-zero size.
1304 : : In this case the size of TREE_TYPE (base1) is smaller than
1305 : : size of TREE_TYPE (TREE_OPERAND (base1, 0)).
1306 : :
1307 : : Because we compare sizes of arrays just by sizes of their elements,
1308 : : we only need to care about zero sized array fields here. */
1309 : 2792935 : if (component_ref_to_zero_sized_trailing_array_p (base1))
1310 : : {
1311 : 75301 : gcc_checking_assert (!end_struct_ref1);
1312 : : end_struct_ref1 = base1;
1313 : : }
1314 : 2792935 : if (ends_tbaa_access_path_p (base1))
1315 : : {
1316 : 29468 : ref1 = TREE_OPERAND (base1, 0);
1317 : 29468 : if (end_struct_ref1)
1318 : : {
1319 : 1 : end_struct_past_end1 = true;
1320 : 1 : end_struct_ref1 = NULL;
1321 : : }
1322 : : }
1323 : 2792935 : base1 = TREE_OPERAND (base1, 0);
1324 : : }
1325 : 2361415 : type1 = TREE_TYPE (base1);
1326 : 2361415 : base2 = ref2;
1327 : 5452469 : while (handled_component_p (base2))
1328 : : {
1329 : 3091054 : if (component_ref_to_zero_sized_trailing_array_p (base2))
1330 : : {
1331 : 3000 : gcc_checking_assert (!end_struct_ref2);
1332 : : end_struct_ref2 = base2;
1333 : : }
1334 : 3091054 : if (ends_tbaa_access_path_p (base2))
1335 : : {
1336 : 68574 : ref2 = TREE_OPERAND (base2, 0);
1337 : 68574 : if (end_struct_ref2)
1338 : : {
1339 : 48 : end_struct_past_end2 = true;
1340 : 48 : end_struct_ref2 = NULL;
1341 : : }
1342 : : }
1343 : 3091054 : base2 = TREE_OPERAND (base2, 0);
1344 : : }
1345 : 2361415 : type2 = TREE_TYPE (base2);
1346 : :
1347 : : /* Now search for the type1 in the access path of ref2. This
1348 : : would be a common base for doing offset based disambiguation on.
1349 : : This however only makes sense if type2 is big enough to hold type1. */
1350 : 2361415 : int cmp_outer = compare_type_sizes (type2, type1);
1351 : :
1352 : : /* If type2 is big enough to contain type1 walk its access path.
1353 : : We also need to care of arrays at the end of structs that may extend
1354 : : beyond the end of structure. If this occurs in the TBAA part of the
1355 : : access path, we need to consider the increased type as well. */
1356 : 2361415 : if (cmp_outer >= 0
1357 : 2361415 : || (end_struct_ref2
1358 : 1 : && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
1359 : : {
1360 : 1239315 : int res = aliasing_component_refs_walk (ref1, type1, base1,
1361 : : offset1, max_size1,
1362 : : end_struct_ref1,
1363 : : ref2, base2, offset2, max_size2,
1364 : : &maybe_match);
1365 : 1239315 : if (res != -1)
1366 : 569736 : return res;
1367 : : }
1368 : :
1369 : : /* If we didn't find a common base, try the other way around. */
1370 : 1791679 : if (cmp_outer <= 0
1371 : 1791679 : || (end_struct_ref1
1372 : 60 : && compare_type_sizes (TREE_TYPE (end_struct_ref1), type2) <= 0))
1373 : : {
1374 : 1378941 : int res = aliasing_component_refs_walk (ref2, type2, base2,
1375 : : offset2, max_size2,
1376 : : end_struct_ref2,
1377 : : ref1, base1, offset1, max_size1,
1378 : : &maybe_match);
1379 : 1378941 : if (res != -1)
1380 : 334807 : return res;
1381 : : }
1382 : :
1383 : : /* In the following code we make an assumption that the types in access
1384 : : paths do not overlap and thus accesses alias only if one path can be
1385 : : continuation of another. If we was not able to decide about equivalence,
1386 : : we need to give up. */
1387 : 1456872 : if (maybe_match)
1388 : : {
1389 : 446004 : if (!nonoverlapping_component_refs_p (ref1, ref2))
1390 : : {
1391 : 428855 : ++alias_stats.aliasing_component_refs_p_may_alias;
1392 : 428855 : return true;
1393 : : }
1394 : 17149 : ++alias_stats.aliasing_component_refs_p_no_alias;
1395 : 17149 : return false;
1396 : : }
1397 : :
1398 : 1010868 : if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1399 : : ref1_alias_set,
1400 : : type2, end_struct_ref2,
1401 : : base2_alias_set)
1402 : 1010868 : || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
1403 : : ref2_alias_set,
1404 : : type1, end_struct_ref1,
1405 : : base1_alias_set))
1406 : : {
1407 : 169738 : ++alias_stats.aliasing_component_refs_p_may_alias;
1408 : 169738 : return true;
1409 : : }
1410 : 841130 : ++alias_stats.aliasing_component_refs_p_no_alias;
1411 : 841130 : return false;
1412 : : }
1413 : :
1414 : : /* FIELD1 and FIELD2 are two fields of component refs. We assume
1415 : : that bases of both component refs are either equivalent or nonoverlapping.
1416 : : We do not assume that the containers of FIELD1 and FIELD2 are of the
1417 : : same type or size.
1418 : :
1419 : : Return 0 in case the base address of component_refs are same then
1420 : : FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1421 : : may not be of same type or size.
1422 : :
1423 : : Return 1 if FIELD1 and FIELD2 are non-overlapping.
1424 : :
1425 : : Return -1 otherwise.
1426 : :
1427 : : Main difference between 0 and -1 is to let
1428 : : nonoverlapping_component_refs_since_match_p discover the semantically
1429 : : equivalent part of the access path.
1430 : :
1431 : : Note that this function is used even with -fno-strict-aliasing
1432 : : and makes use of no TBAA assumptions. */
1433 : :
1434 : : static int
1435 : 3313448 : nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1436 : : {
1437 : : /* If both fields are of the same type, we could save hard work of
1438 : : comparing offsets. */
1439 : 3313448 : tree type1 = DECL_CONTEXT (field1);
1440 : 3313448 : tree type2 = DECL_CONTEXT (field2);
1441 : :
1442 : 3313448 : if (TREE_CODE (type1) == RECORD_TYPE
1443 : 6506321 : && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1444 : : field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1445 : 3313448 : if (TREE_CODE (type2) == RECORD_TYPE
1446 : 6506321 : && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1447 : : field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1448 : :
1449 : : /* ??? Bitfields can overlap at RTL level so punt on them.
1450 : : FIXME: RTL expansion should be fixed by adjusting the access path
1451 : : when producing MEM_ATTRs for MEMs which are wider than
1452 : : the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1453 : 3313448 : if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1454 : : return -1;
1455 : :
1456 : : /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1457 : 3313448 : if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1458 : 3133204 : return field1 != field2;
1459 : :
1460 : : /* In common case the offsets and bit offsets will be the same.
1461 : : However if frontends do not agree on the alignment, they may be
1462 : : different even if they actually represent same address.
1463 : : Try the common case first and if that fails calcualte the
1464 : : actual bit offset. */
1465 : 180244 : if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1466 : 180244 : DECL_FIELD_OFFSET (field2))
1467 : 299806 : && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1468 : 119562 : DECL_FIELD_BIT_OFFSET (field2)))
1469 : : return 0;
1470 : :
1471 : : /* Note that it may be possible to use component_ref_field_offset
1472 : : which would provide offsets as trees. However constructing and folding
1473 : : trees is expensive and does not seem to be worth the compile time
1474 : : cost. */
1475 : :
1476 : 60717 : poly_uint64 offset1, offset2;
1477 : 60717 : poly_uint64 bit_offset1, bit_offset2;
1478 : :
1479 : 60717 : if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
1480 : 60717 : && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
1481 : 60717 : && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
1482 : 121434 : && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
1483 : : {
1484 : 60717 : offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1485 : 60717 : offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1486 : :
1487 : 60717 : if (known_eq (offset1, offset2))
1488 : 57679 : return 0;
1489 : :
1490 : 60717 : poly_uint64 size1, size2;
1491 : :
1492 : 60717 : if (poly_int_tree_p (DECL_SIZE (field1), &size1)
1493 : 60717 : && poly_int_tree_p (DECL_SIZE (field2), &size2)
1494 : 121434 : && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
1495 : : return 1;
1496 : : }
1497 : : /* Resort to slower overlap checking by looking for matching types in
1498 : : the middle of access path. */
1499 : : return -1;
1500 : : }
1501 : :
1502 : : /* Return low bound of array. Do not produce new trees
1503 : : and thus do not care about particular type of integer constant
1504 : : and placeholder exprs. */
1505 : :
1506 : : static tree
1507 : 17360249 : cheap_array_ref_low_bound (tree ref)
1508 : : {
1509 : 17360249 : tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1510 : :
1511 : : /* Avoid expensive array_ref_low_bound.
1512 : : low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1513 : : type or it is zero. */
1514 : 17360249 : if (TREE_OPERAND (ref, 2))
1515 : 72448 : return TREE_OPERAND (ref, 2);
1516 : 17287801 : else if (domain_type && TYPE_MIN_VALUE (domain_type))
1517 : 17281079 : return TYPE_MIN_VALUE (domain_type);
1518 : : else
1519 : 6722 : return integer_zero_node;
1520 : : }
1521 : :
1522 : : /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1523 : : completely disjoint.
1524 : :
1525 : : Return 1 if the refs are non-overlapping.
1526 : : Return 0 if they are possibly overlapping but if so the overlap again
1527 : : starts on the same address.
1528 : : Return -1 otherwise. */
1529 : :
1530 : : int
1531 : 8663504 : nonoverlapping_array_refs_p (tree ref1, tree ref2)
1532 : : {
1533 : 8663504 : tree index1 = TREE_OPERAND (ref1, 1);
1534 : 8663504 : tree index2 = TREE_OPERAND (ref2, 1);
1535 : 8663504 : tree low_bound1 = cheap_array_ref_low_bound (ref1);
1536 : 8663504 : tree low_bound2 = cheap_array_ref_low_bound (ref2);
1537 : :
1538 : : /* Handle zero offsets first: we do not need to match type size in this
1539 : : case. */
1540 : 8663504 : if (operand_equal_p (index1, low_bound1, 0)
1541 : 8663504 : && operand_equal_p (index2, low_bound2, 0))
1542 : : return 0;
1543 : :
1544 : : /* If type sizes are different, give up.
1545 : :
1546 : : Avoid expensive array_ref_element_size.
1547 : : If operand 3 is present it denotes size in the alignmnet units.
1548 : : Otherwise size is TYPE_SIZE of the element type.
1549 : : Handle only common cases where types are of the same "kind". */
1550 : 8594666 : if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1551 : : return -1;
1552 : :
1553 : 8594666 : tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1554 : 8594666 : tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1555 : :
1556 : 8594666 : if (TREE_OPERAND (ref1, 3))
1557 : : {
1558 : 4032 : if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1559 : 8064 : || !operand_equal_p (TREE_OPERAND (ref1, 3),
1560 : 4032 : TREE_OPERAND (ref2, 3), 0))
1561 : 646 : return -1;
1562 : : }
1563 : : else
1564 : : {
1565 : 8590634 : if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1566 : 8590634 : TYPE_SIZE_UNIT (elmt_type2), 0))
1567 : : return -1;
1568 : : }
1569 : :
1570 : : /* Since we know that type sizes are the same, there is no need to return
1571 : : -1 after this point. Partial overlap can not be introduced. */
1572 : :
1573 : : /* We may need to fold trees in this case.
1574 : : TODO: Handle integer constant case at least. */
1575 : 8585864 : if (!operand_equal_p (low_bound1, low_bound2, 0))
1576 : : return 0;
1577 : :
1578 : 8585864 : if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1579 : : {
1580 : 301857 : if (tree_int_cst_equal (index1, index2))
1581 : : return 0;
1582 : : return 1;
1583 : : }
1584 : : /* TODO: We can use VRP to further disambiguate here. */
1585 : : return 0;
1586 : : }
1587 : :
1588 : : /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1589 : : MATCH2 either point to the same address or are disjoint.
1590 : : MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1591 : : respectively or NULL in the case we established equivalence of bases.
1592 : : If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1593 : : overlap by exact multiply of their element size.
1594 : :
1595 : : This test works by matching the initial segment of the access path
1596 : : and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1597 : : match was determined without use of TBAA oracle.
1598 : :
1599 : : Return 1 if we can determine that component references REF1 and REF2,
1600 : : that are within a common DECL, cannot overlap.
1601 : :
1602 : : Return 0 if paths are same and thus there is nothing to disambiguate more
1603 : : (i.e. there is must alias assuming there is must alias between MATCH1 and
1604 : : MATCH2)
1605 : :
1606 : : Return -1 if we can not determine 0 or 1 - this happens when we met
1607 : : non-matching types was met in the path.
1608 : : In this case it may make sense to continue by other disambiguation
1609 : : oracles. */
1610 : :
1611 : : static int
1612 : 9136067 : nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1613 : : tree match2, tree ref2,
1614 : : bool partial_overlap)
1615 : : {
1616 : 9136067 : int ntbaa1 = 0, ntbaa2 = 0;
1617 : : /* Early return if there are no references to match, we do not need
1618 : : to walk the access paths.
1619 : :
1620 : : Do not consider this as may-alias for stats - it is more useful
1621 : : to have information how many disambiguations happened provided that
1622 : : the query was meaningful. */
1623 : :
1624 : 8354755 : if (match1 == ref1 || !handled_component_p (ref1)
1625 : 17489400 : || match2 == ref2 || !handled_component_p (ref2))
1626 : : return -1;
1627 : :
1628 : 8334858 : auto_vec<tree, 16> component_refs1;
1629 : 8334858 : auto_vec<tree, 16> component_refs2;
1630 : :
1631 : : /* Create the stack of handled components for REF1. */
1632 : 21138086 : while (handled_component_p (ref1) && ref1 != match1)
1633 : : {
1634 : : /* We use TBAA only to re-synchronize after mismatched refs. So we
1635 : : do not need to truncate access path after TBAA part ends. */
1636 : 12803228 : if (ends_tbaa_access_path_p (ref1))
1637 : : ntbaa1 = 0;
1638 : : else
1639 : 12603618 : ntbaa1++;
1640 : 12803228 : component_refs1.safe_push (ref1);
1641 : 12803228 : ref1 = TREE_OPERAND (ref1, 0);
1642 : : }
1643 : :
1644 : : /* Create the stack of handled components for REF2. */
1645 : 21073648 : while (handled_component_p (ref2) && ref2 != match2)
1646 : : {
1647 : 12738790 : if (ends_tbaa_access_path_p (ref2))
1648 : : ntbaa2 = 0;
1649 : : else
1650 : 12515461 : ntbaa2++;
1651 : 12738790 : component_refs2.safe_push (ref2);
1652 : 12738790 : ref2 = TREE_OPERAND (ref2, 0);
1653 : : }
1654 : :
1655 : 8334858 : if (!flag_strict_aliasing)
1656 : : {
1657 : 489870 : ntbaa1 = 0;
1658 : 489870 : ntbaa2 = 0;
1659 : : }
1660 : :
1661 : 8334858 : bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1662 : 8334858 : bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1663 : :
1664 : : /* If only one of access path starts with MEM_REF check that offset is 0
1665 : : so the addresses stays the same after stripping it.
1666 : : TODO: In this case we may walk the other access path until we get same
1667 : : offset.
1668 : :
1669 : : If both starts with MEM_REF, offset has to be same. */
1670 : 82248 : if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1671 : 8333681 : || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1672 : 16661010 : || (mem_ref1 && mem_ref2
1673 : 1124675 : && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1674 : 1124675 : TREE_OPERAND (ref2, 1))))
1675 : : {
1676 : 39353 : ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1677 : 39353 : return -1;
1678 : : }
1679 : :
1680 : : /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1681 : : to handle them here at all. */
1682 : 8295505 : gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1683 : : && TREE_CODE (ref2) != TARGET_MEM_REF);
1684 : :
1685 : : /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1686 : : rank. This is sufficient because we start from the same DECL and you
1687 : : cannot reference several fields at a time with COMPONENT_REFs (unlike
1688 : : with ARRAY_RANGE_REFs for arrays) so you always need the same number
1689 : : of them to access a sub-component, unless you're in a union, in which
1690 : : case the return value will precisely be false. */
1691 : 10500101 : while (true)
1692 : : {
1693 : : /* Track if we seen unmatched ref with non-zero offset. In this case
1694 : : we must look for partial overlaps. */
1695 : 10500101 : bool seen_unmatched_ref_p = false;
1696 : :
1697 : : /* First match ARRAY_REFs an try to disambiguate. */
1698 : 20300198 : if (!component_refs1.is_empty ()
1699 : 10235948 : && !component_refs2.is_empty ())
1700 : : {
1701 : 18820877 : unsigned int narray_refs1=0, narray_refs2=0;
1702 : :
1703 : : /* We generally assume that both access paths starts by same sequence
1704 : : of refs. However if number of array refs is not in sync, try
1705 : : to recover and pop elts until number match. This helps the case
1706 : : where one access path starts by array and other by element. */
1707 : 18820877 : for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1708 : : narray_refs1++)
1709 : 12484888 : if (TREE_CODE (component_refs1 [component_refs1.length()
1710 : : - 1 - narray_refs1]) != ARRAY_REF)
1711 : : break;
1712 : :
1713 : 18822254 : for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1714 : : narray_refs2++)
1715 : 12495621 : if (TREE_CODE (component_refs2 [component_refs2.length()
1716 : : - 1 - narray_refs2]) != ARRAY_REF)
1717 : : break;
1718 : 9774805 : for (; narray_refs1 > narray_refs2; narray_refs1--)
1719 : : {
1720 : 15932 : ref1 = component_refs1.pop ();
1721 : 15932 : ntbaa1--;
1722 : :
1723 : : /* If index is non-zero we need to check whether the reference
1724 : : does not break the main invariant that bases are either
1725 : : disjoint or equal. Consider the example:
1726 : :
1727 : : unsigned char out[][1];
1728 : : out[1]="a";
1729 : : out[i][0];
1730 : :
1731 : : Here bases out and out are same, but after removing the
1732 : : [i] index, this invariant no longer holds, because
1733 : : out[i] points to the middle of array out.
1734 : :
1735 : : TODO: If size of type of the skipped reference is an integer
1736 : : multiply of the size of type of the other reference this
1737 : : invariant can be verified, but even then it is not completely
1738 : : safe with !flag_strict_aliasing if the other reference contains
1739 : : unbounded array accesses.
1740 : : See */
1741 : :
1742 : 15932 : if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1743 : 15932 : cheap_array_ref_low_bound (ref1), 0))
1744 : : return 0;
1745 : : }
1746 : 9758896 : for (; narray_refs2 > narray_refs1; narray_refs2--)
1747 : : {
1748 : 17309 : ref2 = component_refs2.pop ();
1749 : 17309 : ntbaa2--;
1750 : 17309 : if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1751 : 17309 : cheap_array_ref_low_bound (ref2), 0))
1752 : : return 0;
1753 : : }
1754 : : /* Try to disambiguate matched arrays. */
1755 : 18172962 : for (unsigned int i = 0; i < narray_refs1; i++)
1756 : : {
1757 : 17327008 : int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
1758 : 8663504 : component_refs2.pop ());
1759 : 8663504 : ntbaa1--;
1760 : 8663504 : ntbaa2--;
1761 : 8663504 : if (cmp == 1 && !partial_overlap)
1762 : : {
1763 : 223327 : ++alias_stats
1764 : 223327 : .nonoverlapping_refs_since_match_p_no_alias;
1765 : 223327 : return 1;
1766 : : }
1767 : 8440177 : if (cmp == -1)
1768 : : {
1769 : 8802 : seen_unmatched_ref_p = true;
1770 : : /* We can not maintain the invariant that bases are either
1771 : : same or completely disjoint. However we can still recover
1772 : : from type based alias analysis if we reach references to
1773 : : same sizes. We do not attempt to match array sizes, so
1774 : : just finish array walking and look for component refs. */
1775 : 8802 : if (ntbaa1 < 0 || ntbaa2 < 0)
1776 : : {
1777 : 7937 : ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1778 : 7937 : return -1;
1779 : : }
1780 : 1804 : for (i++; i < narray_refs1; i++)
1781 : : {
1782 : 939 : component_refs1.pop ();
1783 : 939 : component_refs2.pop ();
1784 : 939 : ntbaa1--;
1785 : 939 : ntbaa2--;
1786 : : }
1787 : : break;
1788 : : }
1789 : 8431375 : partial_overlap = false;
1790 : : }
1791 : : }
1792 : :
1793 : : /* Next look for component_refs. */
1794 : 10306111 : do
1795 : : {
1796 : 10306111 : if (component_refs1.is_empty ())
1797 : : {
1798 : 6886053 : ++alias_stats
1799 : 6886053 : .nonoverlapping_refs_since_match_p_must_overlap;
1800 : 6886053 : return 0;
1801 : : }
1802 : 3420058 : ref1 = component_refs1.pop ();
1803 : 3420058 : ntbaa1--;
1804 : 3420058 : if (TREE_CODE (ref1) != COMPONENT_REF)
1805 : : {
1806 : 109080 : seen_unmatched_ref_p = true;
1807 : 109080 : if (ntbaa1 < 0 || ntbaa2 < 0)
1808 : : {
1809 : 38917 : ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1810 : 38917 : return -1;
1811 : : }
1812 : : }
1813 : : }
1814 : 6692119 : while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1815 : :
1816 : 3310978 : do
1817 : : {
1818 : 3310978 : if (component_refs2.is_empty ())
1819 : : {
1820 : 18239 : ++alias_stats
1821 : 18239 : .nonoverlapping_refs_since_match_p_must_overlap;
1822 : 18239 : return 0;
1823 : : }
1824 : 3292739 : ref2 = component_refs2.pop ();
1825 : 3292739 : ntbaa2--;
1826 : 3292739 : if (TREE_CODE (ref2) != COMPONENT_REF)
1827 : : {
1828 : 8 : if (ntbaa1 < 0 || ntbaa2 < 0)
1829 : : {
1830 : 8 : ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1831 : 8 : return -1;
1832 : : }
1833 : : seen_unmatched_ref_p = true;
1834 : : }
1835 : : }
1836 : 6585462 : while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1837 : :
1838 : : /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1839 : : earlier. */
1840 : 3292731 : gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1841 : : && TREE_CODE (ref2) == COMPONENT_REF);
1842 : :
1843 : 3292731 : tree field1 = TREE_OPERAND (ref1, 1);
1844 : 3292731 : tree field2 = TREE_OPERAND (ref2, 1);
1845 : :
1846 : : /* ??? We cannot simply use the type of operand #0 of the refs here
1847 : : as the Fortran compiler smuggles type punning into COMPONENT_REFs
1848 : : for common blocks instead of using unions like everyone else. */
1849 : 3292731 : tree type1 = DECL_CONTEXT (field1);
1850 : 3292731 : tree type2 = DECL_CONTEXT (field2);
1851 : :
1852 : 3292731 : partial_overlap = false;
1853 : :
1854 : : /* If we skipped array refs on type of different sizes, we can
1855 : : no longer be sure that there are not partial overlaps. */
1856 : 432 : if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1857 : 3293163 : && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
1858 : : {
1859 : 0 : ++alias_stats
1860 : 0 : .nonoverlapping_refs_since_match_p_may_alias;
1861 : 0 : return -1;
1862 : : }
1863 : :
1864 : 3292731 : int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1865 : 3292731 : if (cmp == -1)
1866 : : {
1867 : 3038 : ++alias_stats
1868 : 3038 : .nonoverlapping_refs_since_match_p_may_alias;
1869 : 3038 : return -1;
1870 : : }
1871 : 3289693 : else if (cmp == 1)
1872 : : {
1873 : 1085097 : ++alias_stats
1874 : 1085097 : .nonoverlapping_refs_since_match_p_no_alias;
1875 : 1085097 : return 1;
1876 : : }
1877 : : }
1878 : 8334858 : }
1879 : :
1880 : : /* Return TYPE_UID which can be used to match record types we consider
1881 : : same for TBAA purposes. */
1882 : :
1883 : : static inline int
1884 : 148542 : ncr_type_uid (const_tree field)
1885 : : {
1886 : : /* ??? We cannot simply use the type of operand #0 of the refs here
1887 : : as the Fortran compiler smuggles type punning into COMPONENT_REFs
1888 : : for common blocks instead of using unions like everyone else. */
1889 : 148542 : tree type = DECL_FIELD_CONTEXT (field);
1890 : : /* With LTO types considered same_type_for_tbaa_p
1891 : : from different translation unit may not have same
1892 : : main variant. They however have same TYPE_CANONICAL. */
1893 : 148542 : if (TYPE_CANONICAL (type))
1894 : 148542 : return TYPE_UID (TYPE_CANONICAL (type));
1895 : 0 : return TYPE_UID (type);
1896 : : }
1897 : :
1898 : : /* qsort compare function to sort FIELD_DECLs after their
1899 : : DECL_FIELD_CONTEXT TYPE_UID. */
1900 : :
1901 : : static inline int
1902 : 62330 : ncr_compar (const void *field1_, const void *field2_)
1903 : : {
1904 : 62330 : const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1905 : 62330 : const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1906 : 62330 : unsigned int uid1 = ncr_type_uid (field1);
1907 : 62330 : unsigned int uid2 = ncr_type_uid (field2);
1908 : :
1909 : 62330 : if (uid1 < uid2)
1910 : : return -1;
1911 : 23934 : else if (uid1 > uid2)
1912 : 23934 : return 1;
1913 : : return 0;
1914 : : }
1915 : :
1916 : : /* Return true if we can determine that the fields referenced cannot
1917 : : overlap for any pair of objects. This relies on TBAA. */
1918 : :
1919 : : static bool
1920 : 1245238 : nonoverlapping_component_refs_p (const_tree x, const_tree y)
1921 : : {
1922 : : /* Early return if we have nothing to do.
1923 : :
1924 : : Do not consider this as may-alias for stats - it is more useful
1925 : : to have information how many disambiguations happened provided that
1926 : : the query was meaningful. */
1927 : 1245238 : if (!flag_strict_aliasing
1928 : 1245238 : || !x || !y
1929 : 176966 : || !handled_component_p (x)
1930 : 1245238 : || !handled_component_p (y))
1931 : : return false;
1932 : :
1933 : 120466 : auto_vec<const_tree, 16> fieldsx;
1934 : 379459 : while (handled_component_p (x))
1935 : : {
1936 : 258993 : if (TREE_CODE (x) == COMPONENT_REF)
1937 : : {
1938 : 147913 : tree field = TREE_OPERAND (x, 1);
1939 : 147913 : tree type = DECL_FIELD_CONTEXT (field);
1940 : 147913 : if (TREE_CODE (type) == RECORD_TYPE)
1941 : 147627 : fieldsx.safe_push (field);
1942 : : }
1943 : 111080 : else if (ends_tbaa_access_path_p (x))
1944 : 2267 : fieldsx.truncate (0);
1945 : 258993 : x = TREE_OPERAND (x, 0);
1946 : : }
1947 : 208289 : if (fieldsx.length () == 0)
1948 : : return false;
1949 : 87823 : auto_vec<const_tree, 16> fieldsy;
1950 : 207981 : while (handled_component_p (y))
1951 : : {
1952 : 120158 : if (TREE_CODE (y) == COMPONENT_REF)
1953 : : {
1954 : 35210 : tree field = TREE_OPERAND (y, 1);
1955 : 35210 : tree type = DECL_FIELD_CONTEXT (field);
1956 : 35210 : if (TREE_CODE (type) == RECORD_TYPE)
1957 : 34924 : fieldsy.safe_push (TREE_OPERAND (y, 1));
1958 : : }
1959 : 84948 : else if (ends_tbaa_access_path_p (y))
1960 : 115 : fieldsy.truncate (0);
1961 : 120158 : y = TREE_OPERAND (y, 0);
1962 : : }
1963 : 87823 : if (fieldsy.length () == 0)
1964 : : {
1965 : 61133 : ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1966 : 61133 : return false;
1967 : : }
1968 : :
1969 : : /* Most common case first. */
1970 : 26690 : if (fieldsx.length () == 1
1971 : 26690 : && fieldsy.length () == 1)
1972 : : {
1973 : 43052 : if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1974 : 21526 : DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1975 : 42049 : && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
1976 : : {
1977 : 17149 : ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1978 : 17149 : return true;
1979 : : }
1980 : : else
1981 : : {
1982 : 4377 : ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1983 : 4377 : return false;
1984 : : }
1985 : : }
1986 : :
1987 : 5164 : if (fieldsx.length () == 2)
1988 : : {
1989 : 1884 : if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1990 : 134 : std::swap (fieldsx[0], fieldsx[1]);
1991 : : }
1992 : : else
1993 : 3280 : fieldsx.qsort (ncr_compar);
1994 : :
1995 : 5164 : if (fieldsy.length () == 2)
1996 : : {
1997 : 176 : if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1998 : 112 : std::swap (fieldsy[0], fieldsy[1]);
1999 : : }
2000 : : else
2001 : 4988 : fieldsy.qsort (ncr_compar);
2002 : :
2003 : : unsigned i = 0, j = 0;
2004 : 11941 : do
2005 : : {
2006 : 11941 : const_tree fieldx = fieldsx[i];
2007 : 11941 : const_tree fieldy = fieldsy[j];
2008 : :
2009 : : /* We're left with accessing different fields of a structure,
2010 : : no possible overlap. */
2011 : 23882 : if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
2012 : 11941 : DECL_FIELD_CONTEXT (fieldy)) == 1
2013 : 11941 : && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
2014 : : {
2015 : 0 : ++alias_stats.nonoverlapping_component_refs_p_no_alias;
2016 : 0 : return true;
2017 : : }
2018 : :
2019 : 11941 : if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
2020 : : {
2021 : 5371 : i++;
2022 : 10742 : if (i == fieldsx.length ())
2023 : : break;
2024 : : }
2025 : : else
2026 : : {
2027 : 6570 : j++;
2028 : 13140 : if (j == fieldsy.length ())
2029 : : break;
2030 : : }
2031 : : }
2032 : : while (1);
2033 : :
2034 : 5164 : ++alias_stats.nonoverlapping_component_refs_p_may_alias;
2035 : 5164 : return false;
2036 : 208289 : }
2037 : :
2038 : :
2039 : : /* Return true if two memory references based on the variables BASE1
2040 : : and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2041 : : [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
2042 : : if non-NULL are the complete memory reference trees. */
2043 : :
2044 : : static bool
2045 : 1478505393 : decl_refs_may_alias_p (tree ref1, tree base1,
2046 : : poly_int64 offset1, poly_int64 max_size1,
2047 : : poly_int64 size1,
2048 : : tree ref2, tree base2,
2049 : : poly_int64 offset2, poly_int64 max_size2,
2050 : : poly_int64 size2)
2051 : : {
2052 : 1478505393 : gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
2053 : :
2054 : : /* If both references are based on different variables, they cannot alias. */
2055 : 1478505393 : if (compare_base_decls (base1, base2) == 0)
2056 : : return false;
2057 : :
2058 : : /* If both references are based on the same variable, they cannot alias if
2059 : : the accesses do not overlap. */
2060 : 193803393 : if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2061 : : return false;
2062 : :
2063 : : /* If there is must alias, there is no use disambiguating further. */
2064 : 63844779 : if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2065 : : return true;
2066 : :
2067 : : /* For components with variable position, the above test isn't sufficient,
2068 : : so we disambiguate component references manually. */
2069 : 10572592 : if (ref1 && ref2
2070 : 8277765 : && handled_component_p (ref1) && handled_component_p (ref2)
2071 : 17790342 : && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
2072 : : return false;
2073 : :
2074 : : return true;
2075 : : }
2076 : :
2077 : : /* Return true if access with BASE is view converted.
2078 : : Base must not be stripped from inner MEM_REF (&decl)
2079 : : which is done by ao_ref_base and thus one extra walk
2080 : : of handled components is needed. */
2081 : :
2082 : : bool
2083 : 1234617520 : view_converted_memref_p (tree base)
2084 : : {
2085 : 1234617520 : if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
2086 : : return false;
2087 : 812388062 : return (same_type_for_tbaa (TREE_TYPE (base),
2088 : 812388062 : TREE_TYPE (TREE_TYPE (TREE_OPERAND (base, 1))))
2089 : 812388062 : != 1);
2090 : : }
2091 : :
2092 : : /* Return true if an indirect reference based on *PTR1 constrained
2093 : : to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
2094 : : constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
2095 : : the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2096 : : in which case they are computed on-demand. REF1 and REF2
2097 : : if non-NULL are the complete memory reference trees. */
2098 : :
2099 : : static bool
2100 : 305560232 : indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2101 : : poly_int64 offset1, poly_int64 max_size1,
2102 : : poly_int64 size1,
2103 : : alias_set_type ref1_alias_set,
2104 : : alias_set_type base1_alias_set,
2105 : : tree ref2 ATTRIBUTE_UNUSED, tree base2,
2106 : : poly_int64 offset2, poly_int64 max_size2,
2107 : : poly_int64 size2,
2108 : : alias_set_type ref2_alias_set,
2109 : : alias_set_type base2_alias_set, bool tbaa_p)
2110 : : {
2111 : 305560232 : tree ptr1;
2112 : 305560232 : tree ptrtype1, dbase2;
2113 : :
2114 : 305560232 : gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2115 : : || TREE_CODE (base1) == TARGET_MEM_REF)
2116 : : && DECL_P (base2));
2117 : :
2118 : 305560232 : ptr1 = TREE_OPERAND (base1, 0);
2119 : 305560232 : poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2120 : :
2121 : : /* If only one reference is based on a variable, they cannot alias if
2122 : : the pointer access is beyond the extent of the variable access.
2123 : : (the pointer base cannot validly point to an offset less than zero
2124 : : of the variable).
2125 : : ??? IVOPTs creates bases that do not honor this restriction,
2126 : : so do not apply this optimization for TARGET_MEM_REFs. */
2127 : 305560232 : if (TREE_CODE (base1) != TARGET_MEM_REF
2128 : 305560232 : && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
2129 : 73955464 : return false;
2130 : :
2131 : : /* If the pointer based access is bigger than the variable they cannot
2132 : : alias. This is similar to the check below where we use TBAA to
2133 : : increase the size of the pointer based access based on the dynamic
2134 : : type of a containing object we can infer from it. */
2135 : 231604768 : poly_int64 dsize2;
2136 : 231604768 : if (known_size_p (size1)
2137 : 217097693 : && poly_int_tree_p (DECL_SIZE (base2), &dsize2)
2138 : 413434274 : && known_lt (dsize2, size1))
2139 : : return false;
2140 : :
2141 : : /* They also cannot alias if the pointer may not point to the decl. */
2142 : 212529058 : if (!ptr_deref_may_alias_decl_p (ptr1, base2))
2143 : : return false;
2144 : :
2145 : : /* Disambiguations that rely on strict aliasing rules follow. */
2146 : 30832468 : if (!flag_strict_aliasing || !tbaa_p)
2147 : : return true;
2148 : :
2149 : : /* If the alias set for a pointer access is zero all bets are off. */
2150 : 6194391 : if (base1_alias_set == 0 || base2_alias_set == 0)
2151 : : return true;
2152 : :
2153 : : /* When we are trying to disambiguate an access with a pointer dereference
2154 : : as base versus one with a decl as base we can use both the size
2155 : : of the decl and its dynamic type for extra disambiguation.
2156 : : ??? We do not know anything about the dynamic type of the decl
2157 : : other than that its alias-set contains base2_alias_set as a subset
2158 : : which does not help us here. */
2159 : : /* As we know nothing useful about the dynamic type of the decl just
2160 : : use the usual conflict check rather than a subset test.
2161 : : ??? We could introduce -fvery-strict-aliasing when the language
2162 : : does not allow decls to have a dynamic type that differs from their
2163 : : static type. Then we can check
2164 : : !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2165 : 1456788 : if (base1_alias_set != base2_alias_set
2166 : 1456788 : && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2167 : : return false;
2168 : :
2169 : 1255924 : ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2170 : :
2171 : : /* If the size of the access relevant for TBAA through the pointer
2172 : : is bigger than the size of the decl we can't possibly access the
2173 : : decl via that pointer. */
2174 : 1255924 : if (/* ??? This in turn may run afoul when a decl of type T which is
2175 : : a member of union type U is accessed through a pointer to
2176 : : type U and sizeof T is smaller than sizeof U. */
2177 : 1255924 : TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
2178 : 1240421 : && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
2179 : 2496345 : && compare_sizes (DECL_SIZE (base2),
2180 : 1240421 : TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
2181 : : return false;
2182 : :
2183 : 1228274 : if (!ref2)
2184 : : return true;
2185 : :
2186 : : /* If the decl is accessed via a MEM_REF, reconstruct the base
2187 : : we can use for TBAA and an appropriately adjusted offset. */
2188 : : dbase2 = ref2;
2189 : 1958473 : while (handled_component_p (dbase2))
2190 : 800497 : dbase2 = TREE_OPERAND (dbase2, 0);
2191 : 1157976 : poly_int64 doffset1 = offset1;
2192 : 1157976 : poly_offset_int doffset2 = offset2;
2193 : 1157976 : if (TREE_CODE (dbase2) == MEM_REF
2194 : 1157976 : || TREE_CODE (dbase2) == TARGET_MEM_REF)
2195 : : {
2196 : 822492 : doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
2197 : 411246 : tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
2198 : : /* If second reference is view-converted, give up now. */
2199 : 411246 : if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
2200 : : return true;
2201 : : }
2202 : :
2203 : : /* If first reference is view-converted, give up now. */
2204 : 1051341 : if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
2205 : : return true;
2206 : :
2207 : : /* If both references are through the same type, they do not alias
2208 : : if the accesses do not overlap. This does extra disambiguation
2209 : : for mixed/pointer accesses but requires strict aliasing.
2210 : : For MEM_REFs we require that the component-ref offset we computed
2211 : : is relative to the start of the type which we ensure by
2212 : : comparing rvalue and access type and disregarding the constant
2213 : : pointer offset.
2214 : :
2215 : : But avoid treating variable length arrays as "objects", instead assume they
2216 : : can overlap by an exact multiple of their element size.
2217 : : See gcc.dg/torture/alias-2.c. */
2218 : 957969 : if (((TREE_CODE (base1) != TARGET_MEM_REF
2219 : 160846 : || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2220 : 930257 : && (TREE_CODE (dbase2) != TARGET_MEM_REF
2221 : 22305 : || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2222 : 1866050 : && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2223 : : {
2224 : 320034 : bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2225 : 320034 : && (TYPE_SIZE (TREE_TYPE (base1))
2226 : 1191 : && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2227 : 623169 : != INTEGER_CST));
2228 : 320034 : if (!partial_overlap
2229 : 320034 : && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
2230 : : return false;
2231 : 303135 : if (!ref1 || !ref2
2232 : : /* If there is must alias, there is no use disambiguating further. */
2233 : 303135 : || (!partial_overlap
2234 : 291540 : && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2235 : : return true;
2236 : 2951 : int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2237 : : partial_overlap);
2238 : 2951 : if (res == -1)
2239 : 2851 : return !nonoverlapping_component_refs_p (ref1, ref2);
2240 : 100 : return !res;
2241 : : }
2242 : :
2243 : : /* Do access-path based disambiguation. */
2244 : 637935 : if (ref1 && ref2
2245 : 1059957 : && (handled_component_p (ref1) || handled_component_p (ref2)))
2246 : 462109 : return aliasing_component_refs_p (ref1,
2247 : : ref1_alias_set, base1_alias_set,
2248 : : offset1, max_size1,
2249 : : ref2,
2250 : : ref2_alias_set, base2_alias_set,
2251 : 462109 : offset2, max_size2);
2252 : :
2253 : : return true;
2254 : : }
2255 : :
2256 : : /* Return true if two indirect references based on *PTR1
2257 : : and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2258 : : [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2259 : : the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2260 : : in which case they are computed on-demand. REF1 and REF2
2261 : : if non-NULL are the complete memory reference trees. */
2262 : :
2263 : : static bool
2264 : 88077835 : indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2265 : : poly_int64 offset1, poly_int64 max_size1,
2266 : : poly_int64 size1,
2267 : : alias_set_type ref1_alias_set,
2268 : : alias_set_type base1_alias_set,
2269 : : tree ref2 ATTRIBUTE_UNUSED, tree base2,
2270 : : poly_int64 offset2, poly_int64 max_size2,
2271 : : poly_int64 size2,
2272 : : alias_set_type ref2_alias_set,
2273 : : alias_set_type base2_alias_set, bool tbaa_p)
2274 : : {
2275 : 88077835 : tree ptr1;
2276 : 88077835 : tree ptr2;
2277 : 88077835 : tree ptrtype1, ptrtype2;
2278 : :
2279 : 88077835 : gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2280 : : || TREE_CODE (base1) == TARGET_MEM_REF)
2281 : : && (TREE_CODE (base2) == MEM_REF
2282 : : || TREE_CODE (base2) == TARGET_MEM_REF));
2283 : :
2284 : 88077835 : ptr1 = TREE_OPERAND (base1, 0);
2285 : 88077835 : ptr2 = TREE_OPERAND (base2, 0);
2286 : :
2287 : : /* If both bases are based on pointers they cannot alias if they may not
2288 : : point to the same memory object or if they point to the same object
2289 : : and the accesses do not overlap. */
2290 : 88077835 : if ((!cfun || gimple_in_ssa_p (cfun))
2291 : 54623495 : && operand_equal_p (ptr1, ptr2, 0)
2292 : 117272282 : && (((TREE_CODE (base1) != TARGET_MEM_REF
2293 : 1050038 : || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2294 : 29117474 : && (TREE_CODE (base2) != TARGET_MEM_REF
2295 : 962606 : || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2296 : 91178 : || (TREE_CODE (base1) == TARGET_MEM_REF
2297 : 78002 : && TREE_CODE (base2) == TARGET_MEM_REF
2298 : 71530 : && (TMR_STEP (base1) == TMR_STEP (base2)
2299 : 8790 : || (TMR_STEP (base1) && TMR_STEP (base2)
2300 : 1928 : && operand_equal_p (TMR_STEP (base1),
2301 : 1928 : TMR_STEP (base2), 0)))
2302 : 62740 : && (TMR_INDEX (base1) == TMR_INDEX (base2)
2303 : 9312 : || (TMR_INDEX (base1) && TMR_INDEX (base2)
2304 : 8051 : && operand_equal_p (TMR_INDEX (base1),
2305 : 8051 : TMR_INDEX (base2), 0)))
2306 : 53428 : && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2307 : 0 : || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2308 : 0 : && operand_equal_p (TMR_INDEX2 (base1),
2309 : 0 : TMR_INDEX2 (base2), 0))))))
2310 : : {
2311 : 29156697 : poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2312 : 29156697 : poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2313 : 29156697 : if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
2314 : 29156697 : offset2 + moff2, max_size2))
2315 : 28862852 : return false;
2316 : : /* If there is must alias, there is no use disambiguating further. */
2317 : 4496448 : if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2318 : : return true;
2319 : 1180489 : if (ref1 && ref2)
2320 : : {
2321 : 895458 : int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2322 : : false);
2323 : 895458 : if (res != -1)
2324 : 886644 : return !res;
2325 : : }
2326 : : }
2327 : 59214983 : if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2328 : : return false;
2329 : :
2330 : : /* Disambiguations that rely on strict aliasing rules follow. */
2331 : 36222772 : if (!flag_strict_aliasing || !tbaa_p)
2332 : : return true;
2333 : :
2334 : 13829558 : ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2335 : 13829558 : ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2336 : :
2337 : : /* If the alias set for a pointer access is zero all bets are off. */
2338 : 13829558 : if (base1_alias_set == 0
2339 : 13829558 : || base2_alias_set == 0)
2340 : : return true;
2341 : :
2342 : : /* Do type-based disambiguation. */
2343 : 9519151 : if (base1_alias_set != base2_alias_set
2344 : 9519151 : && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2345 : : return false;
2346 : :
2347 : : /* If either reference is view-converted, give up now. */
2348 : 8873322 : if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2349 : 8873322 : || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2350 : 2605465 : return true;
2351 : :
2352 : : /* If both references are through the same type, they do not alias
2353 : : if the accesses do not overlap. This does extra disambiguation
2354 : : for mixed/pointer accesses but requires strict aliasing. */
2355 : 6267857 : if ((TREE_CODE (base1) != TARGET_MEM_REF
2356 : 1150635 : || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2357 : 5976832 : && (TREE_CODE (base2) != TARGET_MEM_REF
2358 : 1011936 : || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2359 : 12137966 : && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2360 : 5870109 : TREE_TYPE (ptrtype2)) == 1)
2361 : : {
2362 : : /* But avoid treating arrays as "objects", instead assume they
2363 : : can overlap by an exact multiple of their element size.
2364 : : See gcc.dg/torture/alias-2.c. */
2365 : 3740128 : bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2366 : :
2367 : 3740128 : if (!partial_overlap
2368 : 3740128 : && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2369 : : return false;
2370 : 3405085 : if (!ref1 || !ref2
2371 : 3405085 : || (!partial_overlap
2372 : 2813154 : && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2373 : : return true;
2374 : 194312 : int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2375 : : partial_overlap);
2376 : 194312 : if (res == -1)
2377 : 72471 : return !nonoverlapping_component_refs_p (ref1, ref2);
2378 : 121841 : return !res;
2379 : : }
2380 : :
2381 : : /* Do access-path based disambiguation. */
2382 : 2527729 : if (ref1 && ref2
2383 : 3590354 : && (handled_component_p (ref1) || handled_component_p (ref2)))
2384 : 1899306 : return aliasing_component_refs_p (ref1,
2385 : : ref1_alias_set, base1_alias_set,
2386 : : offset1, max_size1,
2387 : : ref2,
2388 : : ref2_alias_set, base2_alias_set,
2389 : 1899306 : offset2, max_size2);
2390 : :
2391 : : return true;
2392 : : }
2393 : :
2394 : : /* Return true, if the two memory references REF1 and REF2 may alias. */
2395 : :
2396 : : static bool
2397 : 1951906084 : refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2398 : : {
2399 : 1951906084 : tree base1, base2;
2400 : 1951906084 : poly_int64 offset1 = 0, offset2 = 0;
2401 : 1951906084 : poly_int64 max_size1 = -1, max_size2 = -1;
2402 : 1951906084 : bool var1_p, var2_p, ind1_p, ind2_p;
2403 : :
2404 : 1951906084 : gcc_checking_assert ((!ref1->ref
2405 : : || TREE_CODE (ref1->ref) == SSA_NAME
2406 : : || DECL_P (ref1->ref)
2407 : : || TREE_CODE (ref1->ref) == STRING_CST
2408 : : || handled_component_p (ref1->ref)
2409 : : || TREE_CODE (ref1->ref) == MEM_REF
2410 : : || TREE_CODE (ref1->ref) == TARGET_MEM_REF
2411 : : || TREE_CODE (ref1->ref) == WITH_SIZE_EXPR)
2412 : : && (!ref2->ref
2413 : : || TREE_CODE (ref2->ref) == SSA_NAME
2414 : : || DECL_P (ref2->ref)
2415 : : || TREE_CODE (ref2->ref) == STRING_CST
2416 : : || handled_component_p (ref2->ref)
2417 : : || TREE_CODE (ref2->ref) == MEM_REF
2418 : : || TREE_CODE (ref2->ref) == TARGET_MEM_REF
2419 : : || TREE_CODE (ref2->ref) == WITH_SIZE_EXPR));
2420 : :
2421 : : /* Decompose the references into their base objects and the access. */
2422 : 1951906084 : base1 = ao_ref_base (ref1);
2423 : 1951906084 : offset1 = ref1->offset;
2424 : 1951906084 : max_size1 = ref1->max_size;
2425 : 1951906084 : base2 = ao_ref_base (ref2);
2426 : 1951906084 : offset2 = ref2->offset;
2427 : 1951906084 : max_size2 = ref2->max_size;
2428 : :
2429 : : /* We can end up with registers or constants as bases for example from
2430 : : *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2431 : : which is seen as a struct copy. */
2432 : 1951906084 : if (TREE_CODE (base1) == SSA_NAME
2433 : 1951904897 : || TREE_CODE (base1) == CONST_DECL
2434 : 1949987891 : || TREE_CODE (base1) == CONSTRUCTOR
2435 : 1949987891 : || TREE_CODE (base1) == ADDR_EXPR
2436 : 1949987891 : || CONSTANT_CLASS_P (base1)
2437 : 1942439624 : || TREE_CODE (base2) == SSA_NAME
2438 : 1942439624 : || TREE_CODE (base2) == CONST_DECL
2439 : 1942346583 : || TREE_CODE (base2) == CONSTRUCTOR
2440 : 1942346583 : || TREE_CODE (base2) == ADDR_EXPR
2441 : 1942346583 : || CONSTANT_CLASS_P (base2))
2442 : : return false;
2443 : :
2444 : : /* Two volatile accesses always conflict. */
2445 : 1942299681 : if (ref1->volatile_p
2446 : 5853859 : && ref2->volatile_p)
2447 : : return true;
2448 : :
2449 : : /* refN->ref may convey size information, do not confuse our workers
2450 : : with that but strip it - ao_ref_base took it into account already. */
2451 : 1938678544 : tree ref1ref = ref1->ref;
2452 : 1938678544 : if (ref1ref && TREE_CODE (ref1ref) == WITH_SIZE_EXPR)
2453 : 162 : ref1ref = TREE_OPERAND (ref1ref, 0);
2454 : 1938678544 : tree ref2ref = ref2->ref;
2455 : 1938678544 : if (ref2ref && TREE_CODE (ref2ref) == WITH_SIZE_EXPR)
2456 : 0 : ref2ref = TREE_OPERAND (ref2ref, 0);
2457 : :
2458 : : /* Defer to simple offset based disambiguation if we have
2459 : : references based on two decls. Do this before defering to
2460 : : TBAA to handle must-alias cases in conformance with the
2461 : : GCC extension of allowing type-punning through unions. */
2462 : 1938678544 : var1_p = DECL_P (base1);
2463 : 1938678544 : var2_p = DECL_P (base2);
2464 : 1938678544 : if (var1_p && var2_p)
2465 : 1478505393 : return decl_refs_may_alias_p (ref1ref, base1, offset1, max_size1,
2466 : : ref1->size,
2467 : : ref2ref, base2, offset2, max_size2,
2468 : 1478505393 : ref2->size);
2469 : :
2470 : : /* We can end up referring to code via function and label decls.
2471 : : As we likely do not properly track code aliases conservatively
2472 : : bail out. */
2473 : 460173151 : if (TREE_CODE (base1) == FUNCTION_DECL
2474 : 460173151 : || TREE_CODE (base1) == LABEL_DECL
2475 : 459242109 : || TREE_CODE (base2) == FUNCTION_DECL
2476 : 459229092 : || TREE_CODE (base2) == LABEL_DECL)
2477 : : return true;
2478 : :
2479 : : /* Handle restrict based accesses.
2480 : : ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2481 : : here. */
2482 : 459229092 : tree rbase1 = base1;
2483 : 459229092 : tree rbase2 = base2;
2484 : 459229092 : if (var1_p)
2485 : : {
2486 : 221001193 : rbase1 = ref1ref;
2487 : 221001193 : if (rbase1)
2488 : 290042653 : while (handled_component_p (rbase1))
2489 : 105243842 : rbase1 = TREE_OPERAND (rbase1, 0);
2490 : : }
2491 : 459229092 : if (var2_p)
2492 : : {
2493 : 127332687 : rbase2 = ref2ref;
2494 : 127332687 : if (rbase2)
2495 : 201943996 : while (handled_component_p (rbase2))
2496 : 81746635 : rbase2 = TREE_OPERAND (rbase2, 0);
2497 : : }
2498 : 459229092 : if (rbase1 && rbase2
2499 : 415891384 : && (TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
2500 : 261059935 : && (TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
2501 : : /* If the accesses are in the same restrict clique... */
2502 : 175090528 : && MR_DEPENDENCE_CLIQUE (rbase1) == MR_DEPENDENCE_CLIQUE (rbase2)
2503 : : /* But based on different pointers they do not alias. */
2504 : 603983742 : && MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))
2505 : : return false;
2506 : :
2507 : 443375161 : ind1_p = (TREE_CODE (base1) == MEM_REF
2508 : 443375161 : || TREE_CODE (base1) == TARGET_MEM_REF);
2509 : 443375161 : ind2_p = (TREE_CODE (base2) == MEM_REF
2510 : 443375161 : || TREE_CODE (base2) == TARGET_MEM_REF);
2511 : :
2512 : : /* Canonicalize the pointer-vs-decl case. */
2513 : 443375161 : if (ind1_p && var2_p)
2514 : : {
2515 : 125698057 : std::swap (offset1, offset2);
2516 : 125698057 : std::swap (max_size1, max_size2);
2517 : 125698057 : std::swap (base1, base2);
2518 : 125698057 : std::swap (ref1, ref2);
2519 : 125698057 : std::swap (ref1ref, ref2ref);
2520 : 125698057 : var1_p = true;
2521 : 125698057 : ind1_p = false;
2522 : 125698057 : var2_p = false;
2523 : 125698057 : ind2_p = true;
2524 : : }
2525 : :
2526 : : /* First defer to TBAA if possible. */
2527 : 443375161 : if (tbaa_p
2528 : 204596493 : && flag_strict_aliasing
2529 : 576265334 : && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
2530 : : ao_ref_alias_set (ref2)))
2531 : : return false;
2532 : :
2533 : : /* If the reference is based on a pointer that points to memory
2534 : : that may not be written to then the other reference cannot possibly
2535 : : clobber it. */
2536 : 394155106 : if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2537 : 392975540 : && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2538 : 786746650 : || (ind1_p
2539 : 88210878 : && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2540 : 88042222 : && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2541 : : return false;
2542 : :
2543 : : /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2544 : 393638067 : if (var1_p && ind2_p)
2545 : 305560232 : return indirect_ref_may_alias_decl_p (ref2ref, base2,
2546 : : offset2, max_size2, ref2->size,
2547 : : ao_ref_alias_set (ref2),
2548 : : ao_ref_base_alias_set (ref2),
2549 : : ref1ref, base1,
2550 : : offset1, max_size1, ref1->size,
2551 : : ao_ref_alias_set (ref1),
2552 : : ao_ref_base_alias_set (ref1),
2553 : 305560232 : tbaa_p);
2554 : 88077835 : else if (ind1_p && ind2_p)
2555 : 88077835 : return indirect_refs_may_alias_p (ref1ref, base1,
2556 : : offset1, max_size1, ref1->size,
2557 : : ao_ref_alias_set (ref1),
2558 : : ao_ref_base_alias_set (ref1),
2559 : : ref2ref, base2,
2560 : : offset2, max_size2, ref2->size,
2561 : : ao_ref_alias_set (ref2),
2562 : : ao_ref_base_alias_set (ref2),
2563 : 88077835 : tbaa_p);
2564 : :
2565 : 0 : gcc_unreachable ();
2566 : : }
2567 : :
2568 : : /* Return true, if the two memory references REF1 and REF2 may alias
2569 : : and update statistics. */
2570 : :
2571 : : bool
2572 : 1951906084 : refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2573 : : {
2574 : 1951906084 : bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2575 : 1951906084 : if (res)
2576 : 136195883 : ++alias_stats.refs_may_alias_p_may_alias;
2577 : : else
2578 : 1815710201 : ++alias_stats.refs_may_alias_p_no_alias;
2579 : 1951906084 : return res;
2580 : : }
2581 : :
2582 : : static bool
2583 : 60613160 : refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2584 : : {
2585 : 60613160 : ao_ref r1;
2586 : 60613160 : ao_ref_init (&r1, ref1);
2587 : 60613160 : return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
2588 : : }
2589 : :
2590 : : bool
2591 : 1183614 : refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2592 : : {
2593 : 1183614 : ao_ref r1, r2;
2594 : 1183614 : ao_ref_init (&r1, ref1);
2595 : 1183614 : ao_ref_init (&r2, ref2);
2596 : 1183614 : return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
2597 : : }
2598 : :
2599 : : /* Returns true if there is a anti-dependence for the STORE that
2600 : : executes after the LOAD. */
2601 : :
2602 : : bool
2603 : 504469 : refs_anti_dependent_p (tree load, tree store)
2604 : : {
2605 : 504469 : ao_ref r1, r2;
2606 : 504469 : ao_ref_init (&r1, load);
2607 : 504469 : ao_ref_init (&r2, store);
2608 : 504469 : return refs_may_alias_p_1 (&r1, &r2, false);
2609 : : }
2610 : :
2611 : : /* Returns true if there is a output dependence for the stores
2612 : : STORE1 and STORE2. */
2613 : :
2614 : : bool
2615 : 3165086 : refs_output_dependent_p (tree store1, tree store2)
2616 : : {
2617 : 3165086 : ao_ref r1, r2;
2618 : 3165086 : ao_ref_init (&r1, store1);
2619 : 3165086 : ao_ref_init (&r2, store2);
2620 : 3165086 : return refs_may_alias_p_1 (&r1, &r2, false);
2621 : : }
2622 : :
2623 : : /* Returns true if and only if REF may alias any access stored in TT.
2624 : : IF TBAA_P is true, use TBAA oracle. */
2625 : :
2626 : : static bool
2627 : 41902600 : modref_may_conflict (const gcall *stmt,
2628 : : modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
2629 : : {
2630 : 41902600 : alias_set_type base_set, ref_set;
2631 : 41902600 : bool global_memory_ok = false;
2632 : :
2633 : 41902600 : if (tt->every_base)
2634 : : return true;
2635 : :
2636 : 8042799 : if (!dbg_cnt (ipa_mod_ref))
2637 : : return true;
2638 : :
2639 : 8042799 : base_set = ao_ref_base_alias_set (ref);
2640 : :
2641 : 8042799 : ref_set = ao_ref_alias_set (ref);
2642 : :
2643 : 8042799 : int num_tests = 0, max_tests = param_modref_max_tests;
2644 : 33518507 : for (auto base_node : tt->bases)
2645 : : {
2646 : 13089556 : if (tbaa_p && flag_strict_aliasing)
2647 : : {
2648 : 10280724 : if (num_tests >= max_tests)
2649 : : return true;
2650 : 10280724 : alias_stats.modref_tests++;
2651 : 10280724 : if (!alias_sets_conflict_p (base_set, base_node->base))
2652 : 3648918 : continue;
2653 : 6631806 : num_tests++;
2654 : : }
2655 : :
2656 : 9440638 : if (base_node->every_ref)
2657 : : return true;
2658 : :
2659 : 35949854 : for (auto ref_node : base_node->refs)
2660 : : {
2661 : : /* Do not repeat same test as before. */
2662 : 10972766 : if ((ref_set != base_set || base_node->base != ref_node->ref)
2663 : 6438375 : && tbaa_p && flag_strict_aliasing)
2664 : : {
2665 : 4275115 : if (num_tests >= max_tests)
2666 : : return true;
2667 : 4243843 : alias_stats.modref_tests++;
2668 : 4243843 : if (!alias_sets_conflict_p (ref_set, ref_node->ref))
2669 : 1054146 : continue;
2670 : 3189697 : num_tests++;
2671 : : }
2672 : :
2673 : 9887348 : if (ref_node->every_access)
2674 : : return true;
2675 : :
2676 : : /* TBAA checks did not disambiguate, try individual accesses. */
2677 : 32126029 : for (auto access_node : ref_node->accesses)
2678 : : {
2679 : 9009195 : if (num_tests >= max_tests)
2680 : 1687394 : return true;
2681 : :
2682 : 9009195 : if (access_node.parm_index == MODREF_GLOBAL_MEMORY_PARM)
2683 : : {
2684 : 1601225 : if (global_memory_ok)
2685 : 873994 : continue;
2686 : 1601225 : if (ref_may_alias_global_p (ref, true))
2687 : : return true;
2688 : 850490 : global_memory_ok = true;
2689 : 850490 : num_tests++;
2690 : 850490 : continue;
2691 : : }
2692 : :
2693 : 7407970 : tree arg = access_node.get_call_arg (stmt);
2694 : 7407970 : if (!arg)
2695 : : return true;
2696 : :
2697 : 7406952 : alias_stats.modref_baseptr_tests++;
2698 : :
2699 : 7406952 : if (integer_zerop (arg) && flag_delete_null_pointer_checks)
2700 : 23504 : continue;
2701 : :
2702 : : /* PTA oracle will be unhapy of arg is not an pointer. */
2703 : 7383448 : if (!POINTER_TYPE_P (TREE_TYPE (arg)))
2704 : : return true;
2705 : :
2706 : : /* If we don't have base pointer, give up. */
2707 : 7383448 : if (!ref->ref && !ref->base)
2708 : 0 : continue;
2709 : :
2710 : 7383448 : ao_ref ref2;
2711 : 7383448 : if (access_node.get_ao_ref (stmt, &ref2))
2712 : : {
2713 : 5612030 : ref2.ref_alias_set = ref_node->ref;
2714 : 5612030 : ref2.base_alias_set = base_node->base;
2715 : 5612030 : if (refs_may_alias_p_1 (&ref2, ref, tbaa_p))
2716 : : return true;
2717 : : }
2718 : 1771418 : else if (ptr_deref_may_alias_ref_p_1 (arg, ref))
2719 : : return true;
2720 : :
2721 : 6447807 : num_tests++;
2722 : : }
2723 : : }
2724 : : }
2725 : : return false;
2726 : : }
2727 : :
2728 : : /* Check if REF conflicts with call using "fn spec" attribute.
2729 : : If CLOBBER is true we are checking for writes, otherwise check loads.
2730 : :
2731 : : Return 0 if there are no conflicts (except for possible function call
2732 : : argument reads), 1 if there are conflicts and -1 if we can not decide by
2733 : : fn spec. */
2734 : :
2735 : : static int
2736 : 143376127 : check_fnspec (gcall *call, ao_ref *ref, bool clobber)
2737 : : {
2738 : 143376127 : attr_fnspec fnspec = gimple_call_fnspec (call);
2739 : 143376127 : if (fnspec.known_p ())
2740 : : {
2741 : 71185304 : if (clobber
2742 : 71185304 : ? !fnspec.global_memory_written_p ()
2743 : 8057837 : : !fnspec.global_memory_read_p ())
2744 : : {
2745 : 84808496 : for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
2746 : 82663442 : if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))
2747 : 58811091 : && (!fnspec.arg_specified_p (i)
2748 : 34901721 : || (clobber ? fnspec.arg_maybe_written_p (i)
2749 : 3493168 : : fnspec.arg_maybe_read_p (i))))
2750 : : {
2751 : 21479519 : ao_ref dref;
2752 : 21479519 : tree size = NULL_TREE;
2753 : 21479519 : unsigned int size_arg;
2754 : :
2755 : 21479519 : if (!fnspec.arg_specified_p (i))
2756 : : ;
2757 : 21476139 : else if (fnspec.arg_max_access_size_given_by_arg_p
2758 : 21476139 : (i, &size_arg))
2759 : 14918494 : size = gimple_call_arg (call, size_arg);
2760 : 6557645 : else if (fnspec.arg_access_size_given_by_type_p (i))
2761 : : {
2762 : 39487 : tree callee = gimple_call_fndecl (call);
2763 : 39487 : tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
2764 : :
2765 : 79968 : for (unsigned int p = 0; p < i; p++)
2766 : 40481 : t = TREE_CHAIN (t);
2767 : 39487 : size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
2768 : : }
2769 : 14957981 : poly_int64 size_hwi;
2770 : 14957981 : if (size
2771 : 14957981 : && poly_int_tree_p (size, &size_hwi)
2772 : 27711254 : && coeffs_in_range_p (size_hwi, 0,
2773 : : HOST_WIDE_INT_MAX / BITS_PER_UNIT))
2774 : : {
2775 : 12751851 : size_hwi = size_hwi * BITS_PER_UNIT;
2776 : 12751851 : ao_ref_init_from_ptr_and_range (&dref,
2777 : : gimple_call_arg (call, i),
2778 : : true, 0, -1, size_hwi);
2779 : : }
2780 : : else
2781 : 8727668 : ao_ref_init_from_ptr_and_range (&dref,
2782 : : gimple_call_arg (call, i),
2783 : : false, 0, -1, -1);
2784 : 21479519 : if (refs_may_alias_p_1 (&dref, ref, false))
2785 : 4166707 : return 1;
2786 : : }
2787 : 26051044 : if (clobber
2788 : 22762048 : && fnspec.errno_maybe_written_p ()
2789 : 6354721 : && flag_errno_math
2790 : 27068988 : && targetm.ref_may_alias_errno (ref))
2791 : : return 1;
2792 : 26034646 : return 0;
2793 : : }
2794 : : }
2795 : :
2796 : : /* FIXME: we should handle barriers more consistently, but for now leave the
2797 : : check here. */
2798 : 113158376 : if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2799 : 5974455 : switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2800 : : {
2801 : : /* __sync_* builtins and some OpenMP builtins act as threading
2802 : : barriers. */
2803 : : #undef DEF_SYNC_BUILTIN
2804 : : #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2805 : : #include "sync-builtins.def"
2806 : : #undef DEF_SYNC_BUILTIN
2807 : : case BUILT_IN_GOMP_ATOMIC_START:
2808 : : case BUILT_IN_GOMP_ATOMIC_END:
2809 : : case BUILT_IN_GOMP_BARRIER:
2810 : : case BUILT_IN_GOMP_BARRIER_CANCEL:
2811 : : case BUILT_IN_GOMP_TASKWAIT:
2812 : : case BUILT_IN_GOMP_TASKGROUP_END:
2813 : : case BUILT_IN_GOMP_CRITICAL_START:
2814 : : case BUILT_IN_GOMP_CRITICAL_END:
2815 : : case BUILT_IN_GOMP_CRITICAL_NAME_START:
2816 : : case BUILT_IN_GOMP_CRITICAL_NAME_END:
2817 : : case BUILT_IN_GOMP_LOOP_END:
2818 : : case BUILT_IN_GOMP_LOOP_END_CANCEL:
2819 : : case BUILT_IN_GOMP_ORDERED_START:
2820 : : case BUILT_IN_GOMP_ORDERED_END:
2821 : : case BUILT_IN_GOMP_SECTIONS_END:
2822 : : case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2823 : : case BUILT_IN_GOMP_SINGLE_COPY_START:
2824 : : case BUILT_IN_GOMP_SINGLE_COPY_END:
2825 : : return 1;
2826 : :
2827 : : default:
2828 : : return -1;
2829 : : }
2830 : : return -1;
2831 : : }
2832 : :
2833 : : /* If the call CALL may use the memory reference REF return true,
2834 : : otherwise return false. */
2835 : :
2836 : : static bool
2837 : 43227900 : ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2838 : : {
2839 : 43227900 : tree base, callee;
2840 : 43227900 : unsigned i;
2841 : 43227900 : int flags = gimple_call_flags (call);
2842 : :
2843 : 43227900 : if (flags & (ECF_CONST|ECF_NOVOPS))
2844 : 60473 : goto process_args;
2845 : :
2846 : : /* A call that is not without side-effects might involve volatile
2847 : : accesses and thus conflicts with all other volatile accesses. */
2848 : 43167427 : if (ref->volatile_p)
2849 : : return true;
2850 : :
2851 : 43167225 : if (gimple_call_internal_p (call))
2852 : 44361 : switch (gimple_call_internal_fn (call))
2853 : : {
2854 : : case IFN_MASK_STORE:
2855 : : case IFN_SCATTER_STORE:
2856 : : case IFN_MASK_SCATTER_STORE:
2857 : : case IFN_LEN_STORE:
2858 : : case IFN_MASK_LEN_STORE:
2859 : : return false;
2860 : 0 : case IFN_MASK_STORE_LANES:
2861 : 0 : case IFN_MASK_LEN_STORE_LANES:
2862 : 0 : goto process_args;
2863 : 732 : case IFN_MASK_LOAD:
2864 : 732 : case IFN_LEN_LOAD:
2865 : 732 : case IFN_MASK_LEN_LOAD:
2866 : 732 : case IFN_MASK_LOAD_LANES:
2867 : 732 : case IFN_MASK_LEN_LOAD_LANES:
2868 : 732 : {
2869 : 732 : ao_ref rhs_ref;
2870 : 732 : tree lhs = gimple_call_lhs (call);
2871 : 732 : if (lhs)
2872 : : {
2873 : 732 : ao_ref_init_from_ptr_and_size (&rhs_ref,
2874 : : gimple_call_arg (call, 0),
2875 : 732 : TYPE_SIZE_UNIT (TREE_TYPE (lhs)));
2876 : : /* We cannot make this a known-size access since otherwise
2877 : : we disambiguate against refs to decls that are smaller. */
2878 : 732 : rhs_ref.size = -1;
2879 : 1464 : rhs_ref.ref_alias_set = rhs_ref.base_alias_set
2880 : 1464 : = tbaa_p ? get_deref_alias_set (TREE_TYPE
2881 : : (gimple_call_arg (call, 1))) : 0;
2882 : 732 : return refs_may_alias_p_1 (ref, &rhs_ref, tbaa_p);
2883 : : }
2884 : 0 : break;
2885 : : }
2886 : : default:;
2887 : : }
2888 : :
2889 : 43165042 : callee = gimple_call_fndecl (call);
2890 : 43165042 : if (callee != NULL_TREE)
2891 : : {
2892 : 41675404 : struct cgraph_node *node = cgraph_node::get (callee);
2893 : : /* We can not safely optimize based on summary of calle if it does
2894 : : not always bind to current def: it is possible that memory load
2895 : : was optimized out earlier and the interposed variant may not be
2896 : : optimized this way. */
2897 : 41675404 : if (node && node->binds_to_current_def_p ())
2898 : : {
2899 : 4821155 : modref_summary *summary = get_modref_function_summary (node);
2900 : 4821155 : if (summary && !summary->calls_interposable)
2901 : : {
2902 : 2507486 : if (!modref_may_conflict (call, summary->loads, ref, tbaa_p))
2903 : : {
2904 : 531013 : alias_stats.modref_use_no_alias++;
2905 : 531013 : if (dump_file && (dump_flags & TDF_DETAILS))
2906 : : {
2907 : 24 : fprintf (dump_file,
2908 : : "ipa-modref: call stmt ");
2909 : 24 : print_gimple_stmt (dump_file, call, 0);
2910 : 24 : fprintf (dump_file,
2911 : : "ipa-modref: call to %s does not use ",
2912 : : node->dump_name ());
2913 : 24 : if (!ref->ref && ref->base)
2914 : : {
2915 : 3 : fprintf (dump_file, "base: ");
2916 : 3 : print_generic_expr (dump_file, ref->base);
2917 : : }
2918 : 21 : else if (ref->ref)
2919 : : {
2920 : 21 : fprintf (dump_file, "ref: ");
2921 : 21 : print_generic_expr (dump_file, ref->ref);
2922 : : }
2923 : 24 : fprintf (dump_file, " alias sets: %i->%i\n",
2924 : : ao_ref_base_alias_set (ref),
2925 : : ao_ref_alias_set (ref));
2926 : : }
2927 : 531013 : goto process_args;
2928 : : }
2929 : 1976473 : alias_stats.modref_use_may_alias++;
2930 : : }
2931 : : }
2932 : : }
2933 : :
2934 : 42634029 : base = ao_ref_base (ref);
2935 : 42634029 : if (!base)
2936 : : return true;
2937 : :
2938 : : /* If the reference is based on a decl that is not aliased the call
2939 : : cannot possibly use it. */
2940 : 42634029 : if (DECL_P (base)
2941 : 38318296 : && !may_be_aliased (base)
2942 : : /* But local statics can be used through recursion. */
2943 : 64576767 : && !is_global_var (base))
2944 : 21710129 : goto process_args;
2945 : :
2946 : 20923900 : if (int res = check_fnspec (call, ref, false))
2947 : : {
2948 : 17634904 : if (res == 1)
2949 : : return true;
2950 : : }
2951 : : else
2952 : 3288996 : goto process_args;
2953 : :
2954 : : /* Check if base is a global static variable that is not read
2955 : : by the function. */
2956 : 17196537 : if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2957 : : {
2958 : 862300 : struct cgraph_node *node = cgraph_node::get (callee);
2959 : 862300 : bitmap read;
2960 : 862300 : int id;
2961 : :
2962 : : /* FIXME: Callee can be an OMP builtin that does not have a call graph
2963 : : node yet. We should enforce that there are nodes for all decls in the
2964 : : IL and remove this check instead. */
2965 : 862300 : if (node
2966 : 861726 : && (id = ipa_reference_var_uid (base)) != -1
2967 : 112826 : && (read = ipa_reference_get_read_global (node))
2968 : 875503 : && !bitmap_bit_p (read, id))
2969 : 9753 : goto process_args;
2970 : : }
2971 : :
2972 : : /* Check if the base variable is call-used. */
2973 : 17186784 : if (DECL_P (base))
2974 : : {
2975 : 13950981 : if (pt_solution_includes (gimple_call_use_set (call), base))
2976 : : return true;
2977 : : }
2978 : 3235803 : else if ((TREE_CODE (base) == MEM_REF
2979 : 3235803 : || TREE_CODE (base) == TARGET_MEM_REF)
2980 : 3235803 : && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2981 : : {
2982 : 3235579 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2983 : 3235579 : if (!pi)
2984 : : return true;
2985 : :
2986 : 3227906 : if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2987 : : return true;
2988 : : }
2989 : : else
2990 : : return true;
2991 : :
2992 : : /* Inspect call arguments for passed-by-value aliases. */
2993 : : process_args:
2994 : 82863336 : for (i = 0; i < gimple_call_num_args (call); ++i)
2995 : : {
2996 : 58077645 : tree op = gimple_call_arg (call, i);
2997 : 58077645 : int flags = gimple_call_arg_flags (call, i);
2998 : :
2999 : 58077645 : if (flags & (EAF_UNUSED | EAF_NO_DIRECT_READ))
3000 : 615333 : continue;
3001 : :
3002 : 57462312 : if (TREE_CODE (op) == WITH_SIZE_EXPR)
3003 : 458 : op = TREE_OPERAND (op, 0);
3004 : :
3005 : 57462312 : if (TREE_CODE (op) != SSA_NAME
3006 : 57462312 : && !is_gimple_min_invariant (op))
3007 : : {
3008 : 10452033 : ao_ref r;
3009 : 10452033 : ao_ref_init (&r, op);
3010 : 10452033 : if (refs_may_alias_p_1 (&r, ref, tbaa_p))
3011 : 3927092 : return true;
3012 : : }
3013 : : }
3014 : :
3015 : : return false;
3016 : : }
3017 : :
3018 : : static bool
3019 : 43221726 : ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
3020 : : {
3021 : 43221726 : bool res;
3022 : 43221726 : res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
3023 : 43221726 : if (res)
3024 : 18437689 : ++alias_stats.ref_maybe_used_by_call_p_may_alias;
3025 : : else
3026 : 24784037 : ++alias_stats.ref_maybe_used_by_call_p_no_alias;
3027 : 43221726 : return res;
3028 : : }
3029 : :
3030 : :
3031 : : /* If the statement STMT may use the memory reference REF return
3032 : : true, otherwise return false. */
3033 : :
3034 : : bool
3035 : 336096624 : ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
3036 : : {
3037 : 336096624 : if (is_gimple_assign (stmt))
3038 : : {
3039 : 286821328 : tree rhs;
3040 : :
3041 : : /* All memory assign statements are single. */
3042 : 286821328 : if (!gimple_assign_single_p (stmt))
3043 : : return false;
3044 : :
3045 : 286821328 : rhs = gimple_assign_rhs1 (stmt);
3046 : 286821328 : if (is_gimple_reg (rhs)
3047 : 233948614 : || is_gimple_min_invariant (rhs)
3048 : 375626390 : || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
3049 : 228361532 : return false;
3050 : :
3051 : 58459796 : return refs_may_alias_p (rhs, ref, tbaa_p);
3052 : : }
3053 : 49275296 : else if (is_gimple_call (stmt))
3054 : 43221726 : return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
3055 : 6053570 : else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
3056 : : {
3057 : 5392742 : tree retval = gimple_return_retval (return_stmt);
3058 : 5392742 : if (retval
3059 : 2956901 : && TREE_CODE (retval) != SSA_NAME
3060 : 2307714 : && !is_gimple_min_invariant (retval)
3061 : 7546106 : && refs_may_alias_p (retval, ref, tbaa_p))
3062 : : return true;
3063 : : /* If ref escapes the function then the return acts as a use. */
3064 : 3797455 : tree base = ao_ref_base (ref);
3065 : 3797455 : if (!base)
3066 : : ;
3067 : 3797455 : else if (DECL_P (base))
3068 : 1450578 : return is_global_var (base);
3069 : 2346877 : else if (TREE_CODE (base) == MEM_REF
3070 : 2346877 : || TREE_CODE (base) == TARGET_MEM_REF)
3071 : 2346780 : return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0), false);
3072 : : return false;
3073 : : }
3074 : :
3075 : : return true;
3076 : : }
3077 : :
3078 : : bool
3079 : 778410 : ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
3080 : : {
3081 : 778410 : ao_ref r;
3082 : 778410 : ao_ref_init (&r, ref);
3083 : 778410 : return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
3084 : : }
3085 : :
3086 : : /* If the call in statement CALL may clobber the memory reference REF
3087 : : return true, otherwise return false. */
3088 : :
3089 : : bool
3090 : 329572632 : call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
3091 : : {
3092 : 329572632 : tree base;
3093 : 329572632 : tree callee;
3094 : :
3095 : : /* If the call is pure or const it cannot clobber anything. */
3096 : 329572632 : if (gimple_call_flags (call)
3097 : 329572632 : & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
3098 : : return false;
3099 : 326159607 : if (gimple_call_internal_p (call))
3100 : 559356 : switch (auto fn = gimple_call_internal_fn (call))
3101 : : {
3102 : : /* Treat these internal calls like ECF_PURE for aliasing,
3103 : : they don't write to any memory the program should care about.
3104 : : They have important other side-effects, and read memory,
3105 : : so can't be ECF_NOVOPS. */
3106 : : case IFN_UBSAN_NULL:
3107 : : case IFN_UBSAN_BOUNDS:
3108 : : case IFN_UBSAN_VPTR:
3109 : : case IFN_UBSAN_OBJECT_SIZE:
3110 : : case IFN_UBSAN_PTR:
3111 : : case IFN_ASAN_CHECK:
3112 : : return false;
3113 : 7031 : case IFN_MASK_STORE:
3114 : 7031 : case IFN_LEN_STORE:
3115 : 7031 : case IFN_MASK_LEN_STORE:
3116 : 7031 : case IFN_MASK_STORE_LANES:
3117 : 7031 : case IFN_MASK_LEN_STORE_LANES:
3118 : 7031 : {
3119 : 7031 : tree rhs = gimple_call_arg (call,
3120 : 7031 : internal_fn_stored_value_index (fn));
3121 : 7031 : ao_ref lhs_ref;
3122 : 7031 : ao_ref_init_from_ptr_and_size (&lhs_ref, gimple_call_arg (call, 0),
3123 : 7031 : TYPE_SIZE_UNIT (TREE_TYPE (rhs)));
3124 : : /* We cannot make this a known-size access since otherwise
3125 : : we disambiguate against refs to decls that are smaller. */
3126 : 7031 : lhs_ref.size = -1;
3127 : 14062 : lhs_ref.ref_alias_set = lhs_ref.base_alias_set
3128 : 7031 : = tbaa_p ? get_deref_alias_set
3129 : 6691 : (TREE_TYPE (gimple_call_arg (call, 1))) : 0;
3130 : 7031 : return refs_may_alias_p_1 (ref, &lhs_ref, tbaa_p);
3131 : : }
3132 : : default:
3133 : : break;
3134 : : }
3135 : :
3136 : 325919032 : callee = gimple_call_fndecl (call);
3137 : :
3138 : 325919032 : if (callee != NULL_TREE && !ref->volatile_p)
3139 : : {
3140 : 305481931 : struct cgraph_node *node = cgraph_node::get (callee);
3141 : 305481931 : if (node)
3142 : : {
3143 : 305204628 : modref_summary *summary = get_modref_function_summary (node);
3144 : 305204628 : if (summary)
3145 : : {
3146 : 39395114 : if (!modref_may_conflict (call, summary->stores, ref, tbaa_p)
3147 : 39395114 : && (!summary->writes_errno
3148 : 80444 : || !targetm.ref_may_alias_errno (ref)))
3149 : : {
3150 : 4171221 : alias_stats.modref_clobber_no_alias++;
3151 : 4171221 : if (dump_file && (dump_flags & TDF_DETAILS))
3152 : : {
3153 : 52 : fprintf (dump_file,
3154 : : "ipa-modref: call stmt ");
3155 : 52 : print_gimple_stmt (dump_file, call, 0);
3156 : 52 : fprintf (dump_file,
3157 : : "ipa-modref: call to %s does not clobber ",
3158 : : node->dump_name ());
3159 : 52 : if (!ref->ref && ref->base)
3160 : : {
3161 : 32 : fprintf (dump_file, "base: ");
3162 : 32 : print_generic_expr (dump_file, ref->base);
3163 : : }
3164 : 20 : else if (ref->ref)
3165 : : {
3166 : 20 : fprintf (dump_file, "ref: ");
3167 : 20 : print_generic_expr (dump_file, ref->ref);
3168 : : }
3169 : 52 : fprintf (dump_file, " alias sets: %i->%i\n",
3170 : : ao_ref_base_alias_set (ref),
3171 : : ao_ref_alias_set (ref));
3172 : : }
3173 : 4171221 : return false;
3174 : : }
3175 : 35223893 : alias_stats.modref_clobber_may_alias++;
3176 : : }
3177 : : }
3178 : : }
3179 : :
3180 : 321747811 : base = ao_ref_base (ref);
3181 : 321747811 : if (!base)
3182 : : return true;
3183 : :
3184 : 321747811 : if (TREE_CODE (base) == SSA_NAME
3185 : 321747545 : || CONSTANT_CLASS_P (base))
3186 : : return false;
3187 : :
3188 : : /* A call that is not without side-effects might involve volatile
3189 : : accesses and thus conflicts with all other volatile accesses. */
3190 : 313608233 : if (ref->volatile_p)
3191 : : return true;
3192 : :
3193 : : /* If the reference is based on a decl that is not aliased the call
3194 : : cannot possibly clobber it. */
3195 : 312143515 : if (DECL_P (base)
3196 : 282876770 : && !may_be_aliased (base)
3197 : : /* But local non-readonly statics can be modified through recursion
3198 : : or the call may implement a threading barrier which we must
3199 : : treat as may-def. */
3200 : 509643584 : && (TREE_READONLY (base)
3201 : 189645064 : || !is_global_var (base)))
3202 : : return false;
3203 : :
3204 : : /* If the reference is based on a pointer that points to memory
3205 : : that may not be written to then the call cannot possibly clobber it. */
3206 : 122794498 : if ((TREE_CODE (base) == MEM_REF
3207 : 122794498 : || TREE_CODE (base) == TARGET_MEM_REF)
3208 : 29266745 : && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
3209 : 151783493 : && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
3210 : : return false;
3211 : :
3212 : 122452227 : if (int res = check_fnspec (call, ref, true))
3213 : : {
3214 : 99706577 : if (res == 1)
3215 : : return true;
3216 : : }
3217 : : else
3218 : : return false;
3219 : :
3220 : : /* Check if base is a global static variable that is not written
3221 : : by the function. */
3222 : 94630746 : if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3223 : : {
3224 : 9172320 : struct cgraph_node *node = cgraph_node::get (callee);
3225 : 9172320 : bitmap written;
3226 : 9172320 : int id;
3227 : :
3228 : 9172320 : if (node
3229 : 9171864 : && (id = ipa_reference_var_uid (base)) != -1
3230 : 3633037 : && (written = ipa_reference_get_written_global (node))
3231 : 9239223 : && !bitmap_bit_p (written, id))
3232 : : return false;
3233 : : }
3234 : :
3235 : : /* Check if the base variable is call-clobbered. */
3236 : 94580301 : if (DECL_P (base))
3237 : 71857582 : return pt_solution_includes (gimple_call_clobber_set (call), base);
3238 : 22722719 : else if ((TREE_CODE (base) == MEM_REF
3239 : 22722719 : || TREE_CODE (base) == TARGET_MEM_REF)
3240 : 22722719 : && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3241 : : {
3242 : 22494539 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3243 : 22494539 : if (!pi)
3244 : : return true;
3245 : :
3246 : 21772304 : return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
3247 : : }
3248 : :
3249 : : return true;
3250 : : }
3251 : :
3252 : : /* If the call in statement CALL may clobber the memory reference REF
3253 : : return true, otherwise return false. */
3254 : :
3255 : : bool
3256 : 334752 : call_may_clobber_ref_p (gcall *call, tree ref, bool tbaa_p)
3257 : : {
3258 : 334752 : bool res;
3259 : 334752 : ao_ref r;
3260 : 334752 : ao_ref_init (&r, ref);
3261 : 334752 : res = call_may_clobber_ref_p_1 (call, &r, tbaa_p);
3262 : 334752 : if (res)
3263 : 60786 : ++alias_stats.call_may_clobber_ref_p_may_alias;
3264 : : else
3265 : 273966 : ++alias_stats.call_may_clobber_ref_p_no_alias;
3266 : 334752 : return res;
3267 : : }
3268 : :
3269 : :
3270 : : /* If the statement STMT may clobber the memory reference REF return true,
3271 : : otherwise return false. */
3272 : :
3273 : : bool
3274 : 1923240974 : stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3275 : : {
3276 : 1923240974 : if (is_gimple_call (stmt))
3277 : : {
3278 : 336429448 : tree lhs = gimple_call_lhs (stmt);
3279 : 336429448 : if (lhs
3280 : 162866267 : && TREE_CODE (lhs) != SSA_NAME)
3281 : : {
3282 : 66002183 : ao_ref r;
3283 : 66002183 : ao_ref_init (&r, lhs);
3284 : 66002183 : if (refs_may_alias_p_1 (ref, &r, tbaa_p))
3285 : 7287157 : return true;
3286 : : }
3287 : :
3288 : 329142291 : return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref, tbaa_p);
3289 : : }
3290 : 1586811526 : else if (gimple_assign_single_p (stmt))
3291 : : {
3292 : 1578854605 : tree lhs = gimple_assign_lhs (stmt);
3293 : 1578854605 : if (TREE_CODE (lhs) != SSA_NAME)
3294 : : {
3295 : 1577945500 : ao_ref r;
3296 : 1577945500 : ao_ref_init (&r, lhs);
3297 : 1577945500 : return refs_may_alias_p_1 (ref, &r, tbaa_p);
3298 : : }
3299 : : }
3300 : 7956921 : else if (gimple_code (stmt) == GIMPLE_ASM)
3301 : : return true;
3302 : :
3303 : : return false;
3304 : : }
3305 : :
3306 : : bool
3307 : 4477693 : stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3308 : : {
3309 : 4477693 : ao_ref r;
3310 : 4477693 : ao_ref_init (&r, ref);
3311 : 4477693 : return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
3312 : : }
3313 : :
3314 : : /* Return true if store1 and store2 described by corresponding tuples
3315 : : <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3316 : : address. */
3317 : :
3318 : : static bool
3319 : 138123266 : same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3320 : : poly_int64 max_size1,
3321 : : tree base2, poly_int64 offset2, poly_int64 size2,
3322 : : poly_int64 max_size2)
3323 : : {
3324 : : /* Offsets need to be 0. */
3325 : 138123266 : if (maybe_ne (offset1, 0)
3326 : 138123266 : || maybe_ne (offset2, 0))
3327 : : return false;
3328 : :
3329 : 42618422 : bool base1_obj_p = SSA_VAR_P (base1);
3330 : 42618422 : bool base2_obj_p = SSA_VAR_P (base2);
3331 : :
3332 : : /* We need one object. */
3333 : 42618422 : if (base1_obj_p == base2_obj_p)
3334 : : return false;
3335 : 5158368 : tree obj = base1_obj_p ? base1 : base2;
3336 : :
3337 : : /* And we need one MEM_REF. */
3338 : 5158368 : bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3339 : 5158368 : bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3340 : 5158368 : if (base1_memref_p == base2_memref_p)
3341 : : return false;
3342 : 5061400 : tree memref = base1_memref_p ? base1 : base2;
3343 : :
3344 : : /* Sizes need to be valid. */
3345 : 5061400 : if (!known_size_p (max_size1)
3346 : 5039878 : || !known_size_p (max_size2)
3347 : 5039569 : || !known_size_p (size1)
3348 : 10100969 : || !known_size_p (size2))
3349 : : return false;
3350 : :
3351 : : /* Max_size needs to match size. */
3352 : 5039569 : if (maybe_ne (max_size1, size1)
3353 : 5039569 : || maybe_ne (max_size2, size2))
3354 : : return false;
3355 : :
3356 : : /* Sizes need to match. */
3357 : 4997028 : if (maybe_ne (size1, size2))
3358 : : return false;
3359 : :
3360 : :
3361 : : /* Check that memref is a store to pointer with singleton points-to info. */
3362 : 1228099 : if (!integer_zerop (TREE_OPERAND (memref, 1)))
3363 : : return false;
3364 : 932775 : tree ptr = TREE_OPERAND (memref, 0);
3365 : 932775 : if (TREE_CODE (ptr) != SSA_NAME)
3366 : : return false;
3367 : 932459 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3368 : 932459 : unsigned int pt_uid;
3369 : 932459 : if (pi == NULL
3370 : 932459 : || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3371 : 738808 : return false;
3372 : :
3373 : : /* Be conservative with non-call exceptions when the address might
3374 : : be NULL. */
3375 : 193651 : if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3376 : : return false;
3377 : :
3378 : : /* Check that ptr points relative to obj. */
3379 : 193640 : unsigned int obj_uid = DECL_PT_UID (obj);
3380 : 193640 : if (obj_uid != pt_uid)
3381 : : return false;
3382 : :
3383 : : /* Check that the object size is the same as the store size. That ensures us
3384 : : that ptr points to the start of obj. */
3385 : 42476 : return (DECL_SIZE (obj)
3386 : 42476 : && poly_int_tree_p (DECL_SIZE (obj))
3387 : 127340 : && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3388 : : }
3389 : :
3390 : : /* Return true if REF is killed by an store described by
3391 : : BASE, OFFSET, SIZE and MAX_SIZE. */
3392 : :
3393 : : static bool
3394 : 233787189 : store_kills_ref_p (tree base, poly_int64 offset, poly_int64 size,
3395 : : poly_int64 max_size, ao_ref *ref)
3396 : : {
3397 : 233787189 : poly_int64 ref_offset = ref->offset;
3398 : : /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3399 : : so base == ref->base does not always hold. */
3400 : 233787189 : if (base != ref->base)
3401 : : {
3402 : : /* Try using points-to info. */
3403 : 138123266 : if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
3404 : : ref->offset, ref->size, ref->max_size))
3405 : : return true;
3406 : :
3407 : : /* If both base and ref->base are MEM_REFs, only compare the
3408 : : first operand, and if the second operand isn't equal constant,
3409 : : try to add the offsets into offset and ref_offset. */
3410 : 34935683 : if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3411 : 164280927 : && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3412 : : {
3413 : 20084751 : if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3414 : 20084751 : TREE_OPERAND (ref->base, 1)))
3415 : : {
3416 : 7277672 : poly_offset_int off1 = mem_ref_offset (base);
3417 : 7277672 : off1 <<= LOG2_BITS_PER_UNIT;
3418 : 7277672 : off1 += offset;
3419 : 7277672 : poly_offset_int off2 = mem_ref_offset (ref->base);
3420 : 7277672 : off2 <<= LOG2_BITS_PER_UNIT;
3421 : 7277672 : off2 += ref_offset;
3422 : 7277672 : if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
3423 : 0 : size = -1;
3424 : : }
3425 : : }
3426 : : else
3427 : 118038427 : size = -1;
3428 : : }
3429 : : /* For a must-alias check we need to be able to constrain
3430 : : the access properly. */
3431 : 233787101 : return (known_eq (size, max_size)
3432 : 233787101 : && known_subrange_p (ref_offset, ref->max_size, offset, size));
3433 : : }
3434 : :
3435 : : /* If STMT kills the memory reference REF return true, otherwise
3436 : : return false. */
3437 : :
3438 : : bool
3439 : 286786861 : stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3440 : : {
3441 : 286786861 : if (!ao_ref_base (ref))
3442 : : return false;
3443 : :
3444 : 286786861 : if (gimple_has_lhs (stmt)
3445 : 248087611 : && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3446 : : /* The assignment is not necessarily carried out if it can throw
3447 : : and we can catch it in the current function where we could inspect
3448 : : the previous value. Similarly if the function can throw externally
3449 : : and the ref does not die on the function return.
3450 : : ??? We only need to care about the RHS throwing. For aggregate
3451 : : assignments or similar calls and non-call exceptions the LHS
3452 : : might throw as well.
3453 : : ??? We also should care about possible longjmp, but since we
3454 : : do not understand that longjmp is not using global memory we will
3455 : : not consider a kill here since the function call will be considered
3456 : : as possibly using REF. */
3457 : 241519304 : && !stmt_can_throw_internal (cfun, stmt)
3458 : 260833633 : && (!stmt_can_throw_external (cfun, stmt)
3459 : 4939754 : || !ref_may_alias_global_p (ref, false)))
3460 : : {
3461 : 236835242 : tree lhs = gimple_get_lhs (stmt);
3462 : : /* If LHS is literally a base of the access we are done. */
3463 : 236835242 : if (ref->ref)
3464 : : {
3465 : 235385211 : tree base = ref->ref;
3466 : 235385211 : tree innermost_dropped_array_ref = NULL_TREE;
3467 : 235385211 : if (handled_component_p (base))
3468 : : {
3469 : 143973059 : tree saved_lhs0 = NULL_TREE;
3470 : 254749545 : if (handled_component_p (lhs))
3471 : : {
3472 : 110776486 : saved_lhs0 = TREE_OPERAND (lhs, 0);
3473 : 110776486 : TREE_OPERAND (lhs, 0) = integer_zero_node;
3474 : : }
3475 : 244806501 : do
3476 : : {
3477 : : /* Just compare the outermost handled component, if
3478 : : they are equal we have found a possible common
3479 : : base. */
3480 : 244806501 : tree saved_base0 = TREE_OPERAND (base, 0);
3481 : 244806501 : TREE_OPERAND (base, 0) = integer_zero_node;
3482 : 244806501 : bool res = operand_equal_p (lhs, base, 0);
3483 : 244806501 : TREE_OPERAND (base, 0) = saved_base0;
3484 : 244806501 : if (res)
3485 : : break;
3486 : : /* Remember if we drop an array-ref that we need to
3487 : : double-check not being at struct end. */
3488 : 230088998 : if (TREE_CODE (base) == ARRAY_REF
3489 : 230088998 : || TREE_CODE (base) == ARRAY_RANGE_REF)
3490 : 63741095 : innermost_dropped_array_ref = base;
3491 : : /* Otherwise drop handled components of the access. */
3492 : 230088998 : base = saved_base0;
3493 : : }
3494 : 359344554 : while (handled_component_p (base));
3495 : 143973059 : if (saved_lhs0)
3496 : 110776486 : TREE_OPERAND (lhs, 0) = saved_lhs0;
3497 : : }
3498 : : /* Finally check if the lhs has the same address and size as the
3499 : : base candidate of the access. Watch out if we have dropped
3500 : : an array-ref that might have flexible size, this means ref->ref
3501 : : may be outside of the TYPE_SIZE of its base. */
3502 : 143973059 : if ((! innermost_dropped_array_ref
3503 : 62974622 : || ! array_ref_flexible_size_p (innermost_dropped_array_ref))
3504 : 369067834 : && (lhs == base
3505 : 222788855 : || (((TYPE_SIZE (TREE_TYPE (lhs))
3506 : 222788855 : == TYPE_SIZE (TREE_TYPE (base)))
3507 : 154819440 : || (TYPE_SIZE (TREE_TYPE (lhs))
3508 : 154819377 : && TYPE_SIZE (TREE_TYPE (base))
3509 : 154819278 : && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3510 : 154819278 : TYPE_SIZE (TREE_TYPE (base)),
3511 : : 0)))
3512 : 67969415 : && operand_equal_p (lhs, base,
3513 : : OEP_ADDRESS_OF
3514 : : | OEP_MATCH_SIDE_EFFECTS))))
3515 : : {
3516 : 3170118 : ++alias_stats.stmt_kills_ref_p_yes;
3517 : 9285176 : return true;
3518 : : }
3519 : : }
3520 : :
3521 : : /* Now look for non-literal equal bases with the restriction of
3522 : : handling constant offset and size. */
3523 : : /* For a must-alias check we need to be able to constrain
3524 : : the access properly. */
3525 : 233665124 : if (!ref->max_size_known_p ())
3526 : : {
3527 : 1083557 : ++alias_stats.stmt_kills_ref_p_no;
3528 : 1083557 : return false;
3529 : : }
3530 : 232581567 : poly_int64 size, offset, max_size;
3531 : 232581567 : bool reverse;
3532 : 232581567 : tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3533 : : &reverse);
3534 : 232581567 : if (store_kills_ref_p (base, offset, size, max_size, ref))
3535 : : {
3536 : 1861383 : ++alias_stats.stmt_kills_ref_p_yes;
3537 : 1861383 : return true;
3538 : : }
3539 : : }
3540 : :
3541 : 280671803 : if (is_gimple_call (stmt))
3542 : : {
3543 : 23159012 : tree callee = gimple_call_fndecl (stmt);
3544 : 23159012 : struct cgraph_node *node;
3545 : 23159012 : modref_summary *summary;
3546 : :
3547 : : /* Try to disambiguate using modref summary. Modref records a vector
3548 : : of stores with known offsets relative to function parameters that must
3549 : : happen every execution of function. Find if we have a matching
3550 : : store and verify that function can not use the value. */
3551 : 23159012 : if (callee != NULL_TREE
3552 : 22282041 : && (node = cgraph_node::get (callee)) != NULL
3553 : 22237876 : && node->binds_to_current_def_p ()
3554 : 2154135 : && (summary = get_modref_function_summary (node)) != NULL
3555 : 1039895 : && summary->kills.length ()
3556 : : /* Check that we can not trap while evaulating function
3557 : : parameters. This check is overly conservative. */
3558 : 23240215 : && (!cfun->can_throw_non_call_exceptions
3559 : 0 : || (!stmt_can_throw_internal (cfun, stmt)
3560 : 0 : && (!stmt_can_throw_external (cfun, stmt)
3561 : 0 : || !ref_may_alias_global_p (ref, false)))))
3562 : : {
3563 : 333808 : for (auto kill : summary->kills)
3564 : : {
3565 : 96373 : ao_ref dref;
3566 : :
3567 : : /* We only can do useful compares if we know the access range
3568 : : precisely. */
3569 : 96373 : if (!kill.get_ao_ref (as_a <gcall *> (stmt), &dref))
3570 : 24 : continue;
3571 : 96349 : if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3572 : : dref.size, dref.max_size, ref))
3573 : : {
3574 : : /* For store to be killed it needs to not be used
3575 : : earlier. */
3576 : 6174 : if (ref_maybe_used_by_call_p_1 (as_a <gcall *> (stmt), ref,
3577 : : true)
3578 : 6174 : || !dbg_cnt (ipa_mod_ref))
3579 : : break;
3580 : 3746 : if (dump_file && (dump_flags & TDF_DETAILS))
3581 : : {
3582 : 2 : fprintf (dump_file,
3583 : : "ipa-modref: call stmt ");
3584 : 2 : print_gimple_stmt (dump_file, stmt, 0);
3585 : 2 : fprintf (dump_file,
3586 : : "ipa-modref: call to %s kills ",
3587 : : node->dump_name ());
3588 : 2 : print_generic_expr (dump_file, ref->base);
3589 : 2 : fprintf (dump_file, "\n");
3590 : : }
3591 : 3746 : ++alias_stats.modref_kill_yes;
3592 : 3746 : return true;
3593 : : }
3594 : : }
3595 : 77457 : ++alias_stats.modref_kill_no;
3596 : : }
3597 : 23155266 : if (callee != NULL_TREE
3598 : 23155266 : && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3599 : 4398628 : switch (DECL_FUNCTION_CODE (callee))
3600 : : {
3601 : 533385 : case BUILT_IN_FREE:
3602 : 533385 : {
3603 : 533385 : tree ptr = gimple_call_arg (stmt, 0);
3604 : 533385 : tree base = ao_ref_base (ref);
3605 : 533385 : if (base && TREE_CODE (base) == MEM_REF
3606 : 621758 : && TREE_OPERAND (base, 0) == ptr)
3607 : : {
3608 : 12955 : ++alias_stats.stmt_kills_ref_p_yes;
3609 : 12955 : return true;
3610 : : }
3611 : : break;
3612 : : }
3613 : :
3614 : 1483617 : case BUILT_IN_MEMCPY:
3615 : 1483617 : case BUILT_IN_MEMPCPY:
3616 : 1483617 : case BUILT_IN_MEMMOVE:
3617 : 1483617 : case BUILT_IN_MEMSET:
3618 : 1483617 : case BUILT_IN_MEMCPY_CHK:
3619 : 1483617 : case BUILT_IN_MEMPCPY_CHK:
3620 : 1483617 : case BUILT_IN_MEMMOVE_CHK:
3621 : 1483617 : case BUILT_IN_MEMSET_CHK:
3622 : 1483617 : case BUILT_IN_STRNCPY:
3623 : 1483617 : case BUILT_IN_STPNCPY:
3624 : 1483617 : case BUILT_IN_CALLOC:
3625 : 1483617 : {
3626 : : /* For a must-alias check we need to be able to constrain
3627 : : the access properly. */
3628 : 1483617 : if (!ref->max_size_known_p ())
3629 : : {
3630 : 57596 : ++alias_stats.stmt_kills_ref_p_no;
3631 : 436747 : return false;
3632 : : }
3633 : 1426021 : tree dest;
3634 : 1426021 : tree len;
3635 : :
3636 : : /* In execution order a calloc call will never kill
3637 : : anything. However, DSE will (ab)use this interface
3638 : : to ask if a calloc call writes the same memory locations
3639 : : as a later assignment, memset, etc. So handle calloc
3640 : : in the expected way. */
3641 : 1426021 : if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
3642 : : {
3643 : 1655 : tree arg0 = gimple_call_arg (stmt, 0);
3644 : 1655 : tree arg1 = gimple_call_arg (stmt, 1);
3645 : 1655 : if (TREE_CODE (arg0) != INTEGER_CST
3646 : 1526 : || TREE_CODE (arg1) != INTEGER_CST)
3647 : : {
3648 : 161 : ++alias_stats.stmt_kills_ref_p_no;
3649 : 161 : return false;
3650 : : }
3651 : :
3652 : 1494 : dest = gimple_call_lhs (stmt);
3653 : 1494 : if (!dest)
3654 : : {
3655 : 1 : ++alias_stats.stmt_kills_ref_p_no;
3656 : 1 : return false;
3657 : : }
3658 : 1493 : len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3659 : : }
3660 : : else
3661 : : {
3662 : 1424366 : dest = gimple_call_arg (stmt, 0);
3663 : 1424366 : len = gimple_call_arg (stmt, 2);
3664 : : }
3665 : 1425859 : if (!poly_int_tree_p (len))
3666 : : return false;
3667 : 1109273 : ao_ref dref;
3668 : 1109273 : ao_ref_init_from_ptr_and_size (&dref, dest, len);
3669 : 1109273 : if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3670 : : dref.size, dref.max_size, ref))
3671 : : {
3672 : 4807 : ++alias_stats.stmt_kills_ref_p_yes;
3673 : 4807 : return true;
3674 : : }
3675 : 1104466 : break;
3676 : : }
3677 : :
3678 : 11895 : case BUILT_IN_VA_END:
3679 : 11895 : {
3680 : 11895 : tree ptr = gimple_call_arg (stmt, 0);
3681 : 11895 : if (TREE_CODE (ptr) == ADDR_EXPR)
3682 : : {
3683 : 11844 : tree base = ao_ref_base (ref);
3684 : 11844 : if (TREE_OPERAND (ptr, 0) == base)
3685 : : {
3686 : 7042 : ++alias_stats.stmt_kills_ref_p_yes;
3687 : 7042 : return true;
3688 : : }
3689 : : }
3690 : : break;
3691 : : }
3692 : :
3693 : : default:;
3694 : : }
3695 : : }
3696 : 280268909 : ++alias_stats.stmt_kills_ref_p_no;
3697 : 280268909 : return false;
3698 : : }
3699 : :
3700 : : bool
3701 : 8676993 : stmt_kills_ref_p (gimple *stmt, tree ref)
3702 : : {
3703 : 8676993 : ao_ref r;
3704 : 8676993 : ao_ref_init (&r, ref);
3705 : 8676993 : return stmt_kills_ref_p (stmt, &r);
3706 : : }
3707 : :
3708 : : /* Return whether REF can be subject to store data races. */
3709 : :
3710 : : bool
3711 : 23844 : ref_can_have_store_data_races (tree ref)
3712 : : {
3713 : : /* With -fallow-store-data-races do not care about them. */
3714 : 23844 : if (flag_store_data_races)
3715 : : return false;
3716 : :
3717 : 23740 : tree base = get_base_address (ref);
3718 : 23740 : if (auto_var_p (base)
3719 : 23740 : && ! may_be_aliased (base))
3720 : : /* Automatic variables not aliased are not subject to
3721 : : data races. */
3722 : : return false;
3723 : :
3724 : : return true;
3725 : : }
3726 : :
3727 : :
3728 : : /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3729 : : TARGET or a statement clobbering the memory reference REF in which
3730 : : case false is returned. The walk starts with VUSE, one argument of PHI. */
3731 : :
3732 : : static bool
3733 : 127733825 : maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3734 : : ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3735 : : bitmap *visited, bool abort_on_visited,
3736 : : void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3737 : : translate_flags disambiguate_only,
3738 : : void *data)
3739 : : {
3740 : 127733825 : basic_block bb = gimple_bb (phi);
3741 : :
3742 : 127733825 : if (!*visited)
3743 : : {
3744 : 23215207 : *visited = BITMAP_ALLOC (NULL);
3745 : 23215207 : bitmap_tree_view (*visited);
3746 : : }
3747 : :
3748 : 127733825 : bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3749 : :
3750 : : /* Walk until we hit the target. */
3751 : 127733825 : while (vuse != target)
3752 : : {
3753 : 400653018 : gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3754 : : /* If we are searching for the target VUSE by walking up to
3755 : : TARGET_BB dominating the original PHI we are finished once
3756 : : we reach a default def or a definition in a block dominating
3757 : : that block. Update TARGET and return. */
3758 : 400653018 : if (!target
3759 : 400653018 : && (gimple_nop_p (def_stmt)
3760 : 80860759 : || dominated_by_p (CDI_DOMINATORS,
3761 : 80860759 : target_bb, gimple_bb (def_stmt))))
3762 : : {
3763 : 20343527 : target = vuse;
3764 : 20343527 : return true;
3765 : : }
3766 : :
3767 : : /* Recurse for PHI nodes. */
3768 : 380309491 : if (gimple_code (def_stmt) == GIMPLE_PHI)
3769 : : {
3770 : : /* An already visited PHI node ends the walk successfully. */
3771 : 73345316 : if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3772 : 32949442 : return !abort_on_visited;
3773 : 40395874 : vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3774 : : visited, abort_on_visited,
3775 : : translate, data, disambiguate_only);
3776 : 40395874 : if (!vuse)
3777 : : return false;
3778 : 35259946 : continue;
3779 : : }
3780 : 306964175 : else if (gimple_nop_p (def_stmt))
3781 : : return false;
3782 : : else
3783 : : {
3784 : : /* A clobbering statement or the end of the IL ends it failing. */
3785 : 306964175 : if ((int)limit <= 0)
3786 : : return false;
3787 : 306931715 : --limit;
3788 : 306931715 : if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3789 : : {
3790 : 18052499 : translate_flags tf = disambiguate_only;
3791 : 18052499 : if (translate
3792 : 18052499 : && (*translate) (ref, vuse, data, &tf) == NULL)
3793 : : ;
3794 : : else
3795 : 14823896 : return false;
3796 : : }
3797 : : }
3798 : : /* If we reach a new basic-block see if we already skipped it
3799 : : in a previous walk that ended successfully. */
3800 : 292107819 : if (gimple_bb (def_stmt) != bb)
3801 : : {
3802 : 128314502 : if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3803 : 9526515 : return !abort_on_visited;
3804 : 118787987 : bb = gimple_bb (def_stmt);
3805 : : }
3806 : 728156379 : vuse = gimple_vuse (def_stmt);
3807 : : }
3808 : : return true;
3809 : : }
3810 : :
3811 : :
3812 : : /* Starting from a PHI node for the virtual operand of the memory reference
3813 : : REF find a continuation virtual operand that allows to continue walking
3814 : : statements dominating PHI skipping only statements that cannot possibly
3815 : : clobber REF. Decrements LIMIT for each alias disambiguation done
3816 : : and aborts the walk, returning NULL_TREE if it reaches zero.
3817 : : Returns NULL_TREE if no suitable virtual operand can be found. */
3818 : :
3819 : : tree
3820 : 99862997 : get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3821 : : unsigned int &limit, bitmap *visited,
3822 : : bool abort_on_visited,
3823 : : void *(*translate)(ao_ref *, tree, void *,
3824 : : translate_flags *),
3825 : : void *data,
3826 : : translate_flags disambiguate_only)
3827 : : {
3828 : 99862997 : unsigned nargs = gimple_phi_num_args (phi);
3829 : :
3830 : : /* Through a single-argument PHI we can simply look through. */
3831 : 99862997 : if (nargs == 1)
3832 : 3372197 : return PHI_ARG_DEF (phi, 0);
3833 : :
3834 : : /* For two or more arguments try to pairwise skip non-aliasing code
3835 : : until we hit the phi argument definition that dominates the other one. */
3836 : 96490800 : basic_block phi_bb = gimple_bb (phi);
3837 : 96490800 : tree arg0, arg1;
3838 : 96490800 : unsigned i;
3839 : :
3840 : : /* Find a candidate for the virtual operand which definition
3841 : : dominates those of all others. */
3842 : : /* First look if any of the args themselves satisfy this. */
3843 : 178620011 : for (i = 0; i < nargs; ++i)
3844 : : {
3845 : 153480073 : arg0 = PHI_ARG_DEF (phi, i);
3846 : 153480073 : if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3847 : : break;
3848 : 150082805 : basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3849 : 150082805 : if (def_bb != phi_bb
3850 : 150082805 : && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3851 : : break;
3852 : 82129211 : arg0 = NULL_TREE;
3853 : : }
3854 : : /* If not, look if we can reach such candidate by walking defs
3855 : : until we hit the immediate dominator. maybe_skip_until will
3856 : : do that for us. */
3857 : 96490800 : basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3858 : :
3859 : : /* Then check against the (to be) found candidate. */
3860 : 372045261 : for (i = 0; i < nargs; ++i)
3861 : : {
3862 : 199182082 : arg1 = PHI_ARG_DEF (phi, i);
3863 : 199182082 : if (arg1 == arg0)
3864 : : ;
3865 : 344147949 : else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
3866 : : limit, visited,
3867 : : abort_on_visited,
3868 : : translate,
3869 : : /* Do not valueize when walking over
3870 : : backedges. */
3871 : : dominated_by_p
3872 : 127733825 : (CDI_DOMINATORS,
3873 : 127733825 : gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3874 : : phi_bb)
3875 : : ? TR_DISAMBIGUATE
3876 : : : disambiguate_only, data))
3877 : : return NULL_TREE;
3878 : : }
3879 : :
3880 : 76372379 : return arg0;
3881 : : }
3882 : :
3883 : : /* Based on the memory reference REF and its virtual use VUSE call
3884 : : WALKER for each virtual use that is equivalent to VUSE, including VUSE
3885 : : itself. That is, for each virtual use for which its defining statement
3886 : : does not clobber REF.
3887 : :
3888 : : WALKER is called with REF, the current virtual use and DATA. If
3889 : : WALKER returns non-NULL the walk stops and its result is returned.
3890 : : At the end of a non-successful walk NULL is returned.
3891 : :
3892 : : TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3893 : : use which definition is a statement that may clobber REF and DATA.
3894 : : If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3895 : : If TRANSLATE returns non-NULL the walk stops and its result is returned.
3896 : : If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3897 : : to adjust REF and *DATA to make that valid.
3898 : :
3899 : : VALUEIZE if non-NULL is called with the next VUSE that is considered
3900 : : and return value is substituted for that. This can be used to
3901 : : implement optimistic value-numbering for example. Note that the
3902 : : VUSE argument is assumed to be valueized already.
3903 : :
3904 : : LIMIT specifies the number of alias queries we are allowed to do,
3905 : : the walk stops when it reaches zero and NULL is returned. LIMIT
3906 : : is decremented by the number of alias queries (plus adjustments
3907 : : done by the callbacks) upon return.
3908 : :
3909 : : TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3910 : :
3911 : : void *
3912 : 67585256 : walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3913 : : void *(*walker)(ao_ref *, tree, void *),
3914 : : void *(*translate)(ao_ref *, tree, void *,
3915 : : translate_flags *),
3916 : : tree (*valueize)(tree),
3917 : : unsigned &limit, void *data)
3918 : : {
3919 : 67585256 : bitmap visited = NULL;
3920 : 67585256 : void *res;
3921 : 67585256 : bool translated = false;
3922 : :
3923 : 67585256 : timevar_push (TV_ALIAS_STMT_WALK);
3924 : :
3925 : 1050100273 : do
3926 : : {
3927 : 1050100273 : gimple *def_stmt;
3928 : :
3929 : : /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3930 : 1050100273 : res = (*walker) (ref, vuse, data);
3931 : : /* Abort walk. */
3932 : 1050100273 : if (res == (void *)-1)
3933 : : {
3934 : : res = NULL;
3935 : : break;
3936 : : }
3937 : : /* Lookup succeeded. */
3938 : 1050100190 : else if (res != NULL)
3939 : : break;
3940 : :
3941 : 1041988266 : if (valueize)
3942 : : {
3943 : 1023780147 : vuse = valueize (vuse);
3944 : 1023780147 : if (!vuse)
3945 : : {
3946 : : res = NULL;
3947 : : break;
3948 : : }
3949 : : }
3950 : 1026970937 : def_stmt = SSA_NAME_DEF_STMT (vuse);
3951 : 1026970937 : if (gimple_nop_p (def_stmt))
3952 : : break;
3953 : 1024451923 : else if (gimple_code (def_stmt) == GIMPLE_PHI)
3954 : 56519259 : vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3955 : : &visited, translated, translate, data);
3956 : : else
3957 : : {
3958 : 967932664 : if ((int)limit <= 0)
3959 : : {
3960 : : res = NULL;
3961 : : break;
3962 : : }
3963 : 967696941 : --limit;
3964 : 967696941 : if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3965 : : {
3966 : 33201124 : if (!translate)
3967 : : break;
3968 : 27967197 : translate_flags disambiguate_only = TR_TRANSLATE;
3969 : 27967197 : res = (*translate) (ref, vuse, data, &disambiguate_only);
3970 : : /* Failed lookup and translation. */
3971 : 27967197 : if (res == (void *)-1)
3972 : : {
3973 : : res = NULL;
3974 : : break;
3975 : : }
3976 : : /* Lookup succeeded. */
3977 : 5935103 : else if (res != NULL)
3978 : : break;
3979 : : /* Translation succeeded, continue walking. */
3980 : 7825815 : translated = translated || disambiguate_only == TR_TRANSLATE;
3981 : : }
3982 : 939254578 : vuse = gimple_vuse (def_stmt);
3983 : : }
3984 : : }
3985 : 995773837 : while (vuse);
3986 : :
3987 : 67585256 : if (visited)
3988 : 20804411 : BITMAP_FREE (visited);
3989 : :
3990 : 67585256 : timevar_pop (TV_ALIAS_STMT_WALK);
3991 : :
3992 : 67585256 : return res;
3993 : : }
3994 : :
3995 : :
3996 : : /* Based on the memory reference REF call WALKER for each vdef whose
3997 : : defining statement may clobber REF, starting with VDEF. If REF
3998 : : is NULL_TREE, each defining statement is visited.
3999 : :
4000 : : WALKER is called with REF, the current vdef and DATA. If WALKER
4001 : : returns true the walk is stopped, otherwise it continues.
4002 : :
4003 : : If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
4004 : : The pointer may be NULL and then we do not track this information.
4005 : :
4006 : : At PHI nodes walk_aliased_vdefs forks into one walk for each
4007 : : PHI argument (but only one walk continues at merge points), the
4008 : : return value is true if any of the walks was successful.
4009 : :
4010 : : The function returns the number of statements walked or -1 if
4011 : : LIMIT stmts were walked and the walk was aborted at this point.
4012 : : If LIMIT is zero the walk is not aborted. */
4013 : :
4014 : : static int
4015 : 303519360 : walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
4016 : : bool (*walker)(ao_ref *, tree, void *), void *data,
4017 : : bitmap *visited, unsigned int cnt,
4018 : : bool *function_entry_reached, unsigned limit)
4019 : : {
4020 : 950638710 : do
4021 : : {
4022 : 1901277420 : gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
4023 : :
4024 : 950638710 : if (*visited
4025 : 950638710 : && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
4026 : 167953678 : return cnt;
4027 : :
4028 : 782685032 : if (gimple_nop_p (def_stmt))
4029 : : {
4030 : 24479647 : if (function_entry_reached)
4031 : 3846967 : *function_entry_reached = true;
4032 : 24479647 : return cnt;
4033 : : }
4034 : 758205385 : else if (gimple_code (def_stmt) == GIMPLE_PHI)
4035 : : {
4036 : 94870265 : unsigned i;
4037 : 94870265 : if (!*visited)
4038 : : {
4039 : 8365344 : *visited = BITMAP_ALLOC (NULL);
4040 : 8365344 : bitmap_tree_view (*visited);
4041 : : }
4042 : 295210392 : for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
4043 : : {
4044 : 205655398 : int res = walk_aliased_vdefs_1 (ref,
4045 : : gimple_phi_arg_def (def_stmt, i),
4046 : : walker, data, visited, cnt,
4047 : : function_entry_reached, limit);
4048 : 205655398 : if (res == -1)
4049 : : return -1;
4050 : 200340127 : cnt = res;
4051 : : }
4052 : 89554994 : return cnt;
4053 : : }
4054 : :
4055 : : /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
4056 : 663335120 : cnt++;
4057 : 663335120 : if (cnt == limit)
4058 : : return -1;
4059 : 663207004 : if ((!ref
4060 : 586613822 : || stmt_may_clobber_ref_p_1 (def_stmt, ref))
4061 : 735757047 : && (*walker) (ref, vdef, data))
4062 : 16087654 : return cnt;
4063 : :
4064 : 1597758060 : vdef = gimple_vuse (def_stmt);
4065 : : }
4066 : : while (1);
4067 : : }
4068 : :
4069 : : int
4070 : 97863962 : walk_aliased_vdefs (ao_ref *ref, tree vdef,
4071 : : bool (*walker)(ao_ref *, tree, void *), void *data,
4072 : : bitmap *visited,
4073 : : bool *function_entry_reached, unsigned int limit)
4074 : : {
4075 : 97863962 : bitmap local_visited = NULL;
4076 : 97863962 : int ret;
4077 : :
4078 : 97863962 : timevar_push (TV_ALIAS_STMT_WALK);
4079 : :
4080 : 97863962 : if (function_entry_reached)
4081 : 4786455 : *function_entry_reached = false;
4082 : :
4083 : 168715776 : ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
4084 : : visited ? visited : &local_visited, 0,
4085 : : function_entry_reached, limit);
4086 : 97863962 : if (local_visited)
4087 : 8365344 : BITMAP_FREE (local_visited);
4088 : :
4089 : 97863962 : timevar_pop (TV_ALIAS_STMT_WALK);
4090 : :
4091 : 97863962 : return ret;
4092 : : }
4093 : :
4094 : : /* Verify validity of the fnspec string.
4095 : : See attr-fnspec.h for details. */
4096 : :
4097 : : void
4098 : 438206658 : attr_fnspec::verify ()
4099 : : {
4100 : 438206658 : bool err = false;
4101 : 438206658 : if (!len)
4102 : : return;
4103 : :
4104 : : /* Check return value specifier. */
4105 : 136782571 : if (len < return_desc_size)
4106 : : err = true;
4107 : 136782571 : else if ((len - return_desc_size) % arg_desc_size)
4108 : : err = true;
4109 : 136782571 : else if ((str[0] < '1' || str[0] > '4')
4110 : 136782571 : && str[0] != '.' && str[0] != 'm')
4111 : 0 : err = true;
4112 : :
4113 : 136782571 : switch (str[1])
4114 : : {
4115 : : case ' ':
4116 : : case 'p':
4117 : : case 'P':
4118 : : case 'c':
4119 : : case 'C':
4120 : : break;
4121 : : default:
4122 : : err = true;
4123 : : }
4124 : 136782571 : if (err)
4125 : 0 : internal_error ("invalid fn spec attribute \"%s\"", str);
4126 : :
4127 : : /* Now check all parameters. */
4128 : 409545683 : for (unsigned int i = 0; arg_specified_p (i); i++)
4129 : : {
4130 : 272763112 : unsigned int idx = arg_idx (i);
4131 : 272763112 : switch (str[idx])
4132 : : {
4133 : 253570098 : case 'x':
4134 : 253570098 : case 'X':
4135 : 253570098 : case 'r':
4136 : 253570098 : case 'R':
4137 : 253570098 : case 'o':
4138 : 253570098 : case 'O':
4139 : 253570098 : case 'w':
4140 : 253570098 : case 'W':
4141 : 253570098 : case '.':
4142 : 253570098 : if ((str[idx + 1] >= '1' && str[idx + 1] <= '9')
4143 : 253570098 : || str[idx + 1] == 't')
4144 : : {
4145 : 42970761 : if (str[idx] != 'r' && str[idx] != 'R'
4146 : : && str[idx] != 'w' && str[idx] != 'W'
4147 : : && str[idx] != 'o' && str[idx] != 'O')
4148 : 42970761 : err = true;
4149 : 42970761 : if (str[idx + 1] != 't'
4150 : : /* Size specified is scalar, so it should be described
4151 : : by ". " if specified at all. */
4152 : 42970761 : && (arg_specified_p (str[idx + 1] - '1')
4153 : 0 : && str[arg_idx (str[idx + 1] - '1')] != '.'))
4154 : : err = true;
4155 : : }
4156 : 210599337 : else if (str[idx + 1] != ' ')
4157 : : err = true;
4158 : : break;
4159 : 19193014 : default:
4160 : 19193014 : if (str[idx] < '1' || str[idx] > '9')
4161 : : err = true;
4162 : : }
4163 : 272763112 : if (err)
4164 : 0 : internal_error ("invalid fn spec attribute \"%s\" arg %i", str, i);
4165 : : }
4166 : : }
4167 : :
4168 : : /* Return ture if TYPE1 and TYPE2 will always give the same answer
4169 : : when compared with other types using same_type_for_tbaa. */
4170 : :
4171 : : static bool
4172 : 22359543 : types_equal_for_same_type_for_tbaa_p (tree type1, tree type2,
4173 : : bool lto_streaming_safe)
4174 : : {
4175 : : /* We use same_type_for_tbaa_p to match types in the access path.
4176 : : This check is overly conservative. */
4177 : 22359543 : type1 = TYPE_MAIN_VARIANT (type1);
4178 : 22359543 : type2 = TYPE_MAIN_VARIANT (type2);
4179 : :
4180 : 22359543 : if (TYPE_STRUCTURAL_EQUALITY_P (type1)
4181 : 22359543 : != TYPE_STRUCTURAL_EQUALITY_P (type2))
4182 : : return false;
4183 : 22080386 : if (TYPE_STRUCTURAL_EQUALITY_P (type1))
4184 : : return true;
4185 : :
4186 : 18254543 : if (lto_streaming_safe)
4187 : 80154 : return type1 == type2;
4188 : : else
4189 : 18174389 : return TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2);
4190 : : }
4191 : :
4192 : : /* Return ture if TYPE1 and TYPE2 will always give the same answer
4193 : : when compared with other types using same_type_for_tbaa. */
4194 : :
4195 : : bool
4196 : 22055360 : types_equal_for_same_type_for_tbaa_p (tree type1, tree type2)
4197 : : {
4198 : 22055360 : return types_equal_for_same_type_for_tbaa_p (type1, type2,
4199 : 22055360 : lto_streaming_expected_p ());
4200 : : }
4201 : :
4202 : : /* Compare REF1 and REF2 and return flags specifying their differences.
4203 : : If LTO_STREAMING_SAFE is true do not use alias sets and canonical
4204 : : types that are going to be recomputed.
4205 : : If TBAA is true also compare TBAA metadata. */
4206 : :
4207 : : int
4208 : 203381 : ao_compare::compare_ao_refs (ao_ref *ref1, ao_ref *ref2,
4209 : : bool lto_streaming_safe,
4210 : : bool tbaa)
4211 : : {
4212 : 203381 : if (TREE_THIS_VOLATILE (ref1->ref) != TREE_THIS_VOLATILE (ref2->ref))
4213 : : return SEMANTICS;
4214 : 203381 : tree base1 = ao_ref_base (ref1);
4215 : 203381 : tree base2 = ao_ref_base (ref2);
4216 : :
4217 : 203381 : if (!known_eq (ref1->offset, ref2->offset)
4218 : 203381 : || !known_eq (ref1->size, ref2->size)
4219 : 406762 : || !known_eq (ref1->max_size, ref2->max_size))
4220 : : return SEMANTICS;
4221 : :
4222 : : /* For variable accesses we need to compare actual paths
4223 : : to check that both refs are accessing same address and the access size. */
4224 : 203378 : if (!known_eq (ref1->size, ref1->max_size))
4225 : : {
4226 : 4250 : if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1->ref)),
4227 : 4250 : TYPE_SIZE (TREE_TYPE (ref2->ref)), 0))
4228 : : return SEMANTICS;
4229 : 4250 : tree r1 = ref1->ref;
4230 : 4250 : tree r2 = ref2->ref;
4231 : :
4232 : : /* Handle toplevel COMPONENT_REFs of bitfields.
4233 : : Those are special since they are not allowed in
4234 : : ADDR_EXPR. */
4235 : 4250 : if (TREE_CODE (r1) == COMPONENT_REF
4236 : 4250 : && DECL_BIT_FIELD (TREE_OPERAND (r1, 1)))
4237 : : {
4238 : 0 : if (TREE_CODE (r2) != COMPONENT_REF
4239 : 0 : || !DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4240 : : return SEMANTICS;
4241 : 0 : tree field1 = TREE_OPERAND (r1, 1);
4242 : 0 : tree field2 = TREE_OPERAND (r2, 1);
4243 : 0 : if (!operand_equal_p (DECL_FIELD_OFFSET (field1),
4244 : 0 : DECL_FIELD_OFFSET (field2), 0)
4245 : 0 : || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1),
4246 : 0 : DECL_FIELD_BIT_OFFSET (field2), 0)
4247 : 0 : || !operand_equal_p (DECL_SIZE (field1), DECL_SIZE (field2), 0)
4248 : 0 : || !types_compatible_p (TREE_TYPE (r1),
4249 : 0 : TREE_TYPE (r2)))
4250 : 0 : return SEMANTICS;
4251 : 0 : r1 = TREE_OPERAND (r1, 0);
4252 : 0 : r2 = TREE_OPERAND (r2, 0);
4253 : : }
4254 : 4250 : else if (TREE_CODE (r2) == COMPONENT_REF
4255 : 4250 : && DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4256 : : return SEMANTICS;
4257 : :
4258 : : /* Similarly for bit field refs. */
4259 : 4250 : if (TREE_CODE (r1) == BIT_FIELD_REF)
4260 : : {
4261 : 0 : if (TREE_CODE (r2) != BIT_FIELD_REF
4262 : 0 : || !operand_equal_p (TREE_OPERAND (r1, 1),
4263 : 0 : TREE_OPERAND (r2, 1), 0)
4264 : 0 : || !operand_equal_p (TREE_OPERAND (r1, 2),
4265 : 0 : TREE_OPERAND (r2, 2), 0)
4266 : 0 : || !types_compatible_p (TREE_TYPE (r1),
4267 : 0 : TREE_TYPE (r2)))
4268 : 0 : return SEMANTICS;
4269 : 0 : r1 = TREE_OPERAND (r1, 0);
4270 : 0 : r2 = TREE_OPERAND (r2, 0);
4271 : : }
4272 : 4250 : else if (TREE_CODE (r2) == BIT_FIELD_REF)
4273 : : return SEMANTICS;
4274 : :
4275 : : /* Now we can compare the address of actual memory access. */
4276 : 4250 : if (!operand_equal_p (r1, r2, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4277 : : return SEMANTICS;
4278 : : }
4279 : : /* For constant accesses we get more matches by comparing offset only. */
4280 : 199128 : else if (!operand_equal_p (base1, base2,
4281 : : OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4282 : : return SEMANTICS;
4283 : :
4284 : : /* We can't simply use get_object_alignment_1 on the full
4285 : : reference as for accesses with variable indexes this reports
4286 : : too conservative alignment. */
4287 : 203307 : unsigned int align1, align2;
4288 : 203307 : unsigned HOST_WIDE_INT bitpos1, bitpos2;
4289 : 203307 : bool known1 = get_object_alignment_1 (base1, &align1, &bitpos1);
4290 : 203307 : bool known2 = get_object_alignment_1 (base2, &align2, &bitpos2);
4291 : : /* ??? For MEMREF get_object_alignment_1 determines aligned from
4292 : : TYPE_ALIGN but still returns false. This seem to contradict
4293 : : its description. So compare even if alignment is unknown. */
4294 : 203307 : if (known1 != known2
4295 : 203307 : || (bitpos1 != bitpos2 || align1 != align2))
4296 : : return SEMANTICS;
4297 : :
4298 : : /* Now we know that accesses are semantically same. */
4299 : 203092 : int flags = 0;
4300 : :
4301 : : /* ao_ref_base strips inner MEM_REF [&decl], recover from that here. */
4302 : 203092 : tree rbase1 = ref1->ref;
4303 : 203092 : if (rbase1)
4304 : 315403 : while (handled_component_p (rbase1))
4305 : 112311 : rbase1 = TREE_OPERAND (rbase1, 0);
4306 : 203092 : tree rbase2 = ref2->ref;
4307 : 315343 : while (handled_component_p (rbase2))
4308 : 112251 : rbase2 = TREE_OPERAND (rbase2, 0);
4309 : :
4310 : : /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
4311 : : implement restrict pointers. MR_DEPENDENCE_CLIQUE 0 means no information.
4312 : : Otherwise we need to match bases and cliques. */
4313 : 203092 : if ((((TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
4314 : 103192 : && MR_DEPENDENCE_CLIQUE (rbase1))
4315 : 171803 : || ((TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
4316 : 71903 : && MR_DEPENDENCE_CLIQUE (rbase2)))
4317 : 234381 : && (TREE_CODE (rbase1) != TREE_CODE (rbase2)
4318 : 31289 : || MR_DEPENDENCE_CLIQUE (rbase1) != MR_DEPENDENCE_CLIQUE (rbase2)
4319 : 31210 : || (MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))))
4320 : : flags |= DEPENDENCE_CLIQUE;
4321 : :
4322 : 203092 : if (!tbaa)
4323 : : return flags;
4324 : :
4325 : : /* Alias sets are not stable across LTO sreaming; be conservative here
4326 : : and compare types the alias sets are ultimately based on. */
4327 : 203063 : if (lto_streaming_safe)
4328 : : {
4329 : 2751 : tree t1 = ao_ref_alias_ptr_type (ref1);
4330 : 2751 : tree t2 = ao_ref_alias_ptr_type (ref2);
4331 : 2751 : if (!alias_ptr_types_compatible_p (t1, t2))
4332 : 15 : flags |= REF_ALIAS_SET;
4333 : :
4334 : 2751 : t1 = ao_ref_base_alias_ptr_type (ref1);
4335 : 2751 : t2 = ao_ref_base_alias_ptr_type (ref2);
4336 : 2751 : if (!alias_ptr_types_compatible_p (t1, t2))
4337 : 23 : flags |= BASE_ALIAS_SET;
4338 : : }
4339 : : else
4340 : : {
4341 : 200312 : if (ao_ref_alias_set (ref1) != ao_ref_alias_set (ref2))
4342 : 0 : flags |= REF_ALIAS_SET;
4343 : 200312 : if (ao_ref_base_alias_set (ref1) != ao_ref_base_alias_set (ref2))
4344 : 0 : flags |= BASE_ALIAS_SET;
4345 : : }
4346 : :
4347 : : /* Access path is used only on non-view-converted references. */
4348 : 203063 : bool view_converted = view_converted_memref_p (rbase1);
4349 : 203063 : if (view_converted_memref_p (rbase2) != view_converted)
4350 : 0 : return flags | ACCESS_PATH;
4351 : 203063 : else if (view_converted)
4352 : : return flags;
4353 : :
4354 : :
4355 : : /* Find start of access paths and look for trailing arrays. */
4356 : 194206 : tree c1 = ref1->ref, c2 = ref2->ref;
4357 : 194206 : tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
4358 : 194206 : int nskipped1 = 0, nskipped2 = 0;
4359 : 194206 : int i = 0;
4360 : :
4361 : 306463 : for (tree p1 = ref1->ref; handled_component_p (p1); p1 = TREE_OPERAND (p1, 0))
4362 : : {
4363 : 112257 : if (component_ref_to_zero_sized_trailing_array_p (p1))
4364 : 133 : end_struct_ref1 = p1;
4365 : 112257 : if (ends_tbaa_access_path_p (p1))
4366 : 5624 : c1 = p1, nskipped1 = i;
4367 : 112257 : i++;
4368 : : }
4369 : 194206 : i = 0;
4370 : 306403 : for (tree p2 = ref2->ref; handled_component_p (p2); p2 = TREE_OPERAND (p2, 0))
4371 : : {
4372 : 112197 : if (component_ref_to_zero_sized_trailing_array_p (p2))
4373 : 140 : end_struct_ref2 = p2;
4374 : 112197 : if (ends_tbaa_access_path_p (p2))
4375 : 5624 : c2 = p2, nskipped2 = i;
4376 : 112197 : i++;
4377 : : }
4378 : :
4379 : : /* For variable accesses we can not rely on offset match bellow.
4380 : : We know that paths are struturally same, so only check that
4381 : : starts of TBAA paths did not diverge. */
4382 : 194206 : if (!known_eq (ref1->size, ref1->max_size)
4383 : 194206 : && nskipped1 != nskipped2)
4384 : 0 : return flags | ACCESS_PATH;
4385 : :
4386 : : /* Information about trailing refs is used by
4387 : : aliasing_component_refs_p that is applied only if paths
4388 : : has handled components.. */
4389 : 194206 : if (!handled_component_p (c1) && !handled_component_p (c2))
4390 : : ;
4391 : 71556 : else if ((end_struct_ref1 != NULL) != (end_struct_ref2 != NULL))
4392 : 27 : return flags | ACCESS_PATH;
4393 : 194179 : if (end_struct_ref1
4394 : 194302 : && same_type_for_tbaa (TREE_TYPE (end_struct_ref1),
4395 : 123 : TREE_TYPE (end_struct_ref2)) != 1)
4396 : 3 : return flags | ACCESS_PATH;
4397 : :
4398 : : /* Now compare all handled components of the access path.
4399 : : We have three oracles that cares about access paths:
4400 : : - aliasing_component_refs_p
4401 : : - nonoverlapping_refs_since_match_p
4402 : : - nonoverlapping_component_refs_p
4403 : : We need to match things these oracles compare.
4404 : :
4405 : : It is only necessary to check types for compatibility
4406 : : and offsets. Rest of what oracles compares are actual
4407 : : addresses. Those are already known to be same:
4408 : : - for constant accesses we check offsets
4409 : : - for variable accesses we already matched
4410 : : the path lexically with operand_equal_p. */
4411 : 414286 : while (true)
4412 : : {
4413 : 304231 : bool comp1 = handled_component_p (c1);
4414 : 304231 : bool comp2 = handled_component_p (c2);
4415 : :
4416 : 304231 : if (comp1 != comp2)
4417 : 48 : return flags | ACCESS_PATH;
4418 : 304183 : if (!comp1)
4419 : : break;
4420 : :
4421 : 110267 : if (TREE_CODE (c1) != TREE_CODE (c2))
4422 : 0 : return flags | ACCESS_PATH;
4423 : :
4424 : : /* aliasing_component_refs_p attempts to find type match within
4425 : : the paths. For that reason both types needs to be equal
4426 : : with respect to same_type_for_tbaa_p. */
4427 : 110267 : if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4428 : 110267 : TREE_TYPE (c2),
4429 : : lto_streaming_safe))
4430 : 212 : return flags | ACCESS_PATH;
4431 : 220110 : if (component_ref_to_zero_sized_trailing_array_p (c1)
4432 : 110055 : != component_ref_to_zero_sized_trailing_array_p (c2))
4433 : 0 : return flags | ACCESS_PATH;
4434 : :
4435 : : /* aliasing_matching_component_refs_p compares
4436 : : offsets within the path. Other properties are ignored.
4437 : : Do not bother to verify offsets in variable accesses. Here we
4438 : : already compared them by operand_equal_p so they are
4439 : : structurally same. */
4440 : 110055 : if (!known_eq (ref1->size, ref1->max_size))
4441 : : {
4442 : 5058 : poly_int64 offadj1, sztmc1, msztmc1;
4443 : 5058 : bool reverse1;
4444 : 5058 : get_ref_base_and_extent (c1, &offadj1, &sztmc1, &msztmc1, &reverse1);
4445 : 5058 : poly_int64 offadj2, sztmc2, msztmc2;
4446 : 5058 : bool reverse2;
4447 : 5058 : get_ref_base_and_extent (c2, &offadj2, &sztmc2, &msztmc2, &reverse2);
4448 : 5058 : if (!known_eq (offadj1, offadj2))
4449 : 0 : return flags | ACCESS_PATH;
4450 : : }
4451 : 110055 : c1 = TREE_OPERAND (c1, 0);
4452 : 110055 : c2 = TREE_OPERAND (c2, 0);
4453 : 110055 : }
4454 : : /* Finally test the access type. */
4455 : 193916 : if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4456 : 193916 : TREE_TYPE (c2),
4457 : : lto_streaming_safe))
4458 : 1505 : return flags | ACCESS_PATH;
4459 : : return flags;
4460 : : }
4461 : :
4462 : : /* Hash REF to HSTATE. If LTO_STREAMING_SAFE do not use alias sets
4463 : : and canonical types. */
4464 : : void
4465 : 8308520 : ao_compare::hash_ao_ref (ao_ref *ref, bool lto_streaming_safe, bool tbaa,
4466 : : inchash::hash &hstate)
4467 : : {
4468 : 8308520 : tree base = ao_ref_base (ref);
4469 : 8308520 : tree tbase = base;
4470 : :
4471 : 8308520 : if (!known_eq (ref->size, ref->max_size))
4472 : : {
4473 : 407706 : tree r = ref->ref;
4474 : 407706 : if (TREE_CODE (r) == COMPONENT_REF
4475 : 407706 : && DECL_BIT_FIELD (TREE_OPERAND (r, 1)))
4476 : : {
4477 : 1729 : tree field = TREE_OPERAND (r, 1);
4478 : 1729 : hash_operand (DECL_FIELD_OFFSET (field), hstate, 0);
4479 : 1729 : hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, 0);
4480 : 1729 : hash_operand (DECL_SIZE (field), hstate, 0);
4481 : 1729 : r = TREE_OPERAND (r, 0);
4482 : : }
4483 : 407706 : if (TREE_CODE (r) == BIT_FIELD_REF)
4484 : : {
4485 : 1704 : hash_operand (TREE_OPERAND (r, 1), hstate, 0);
4486 : 1704 : hash_operand (TREE_OPERAND (r, 2), hstate, 0);
4487 : 1704 : r = TREE_OPERAND (r, 0);
4488 : : }
4489 : 407706 : hash_operand (TYPE_SIZE (TREE_TYPE (ref->ref)), hstate, 0);
4490 : 407706 : hash_operand (r, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4491 : : }
4492 : : else
4493 : : {
4494 : 7900814 : hash_operand (tbase, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4495 : 7900814 : hstate.add_poly_int (ref->offset);
4496 : 7900814 : hstate.add_poly_int (ref->size);
4497 : 7900814 : hstate.add_poly_int (ref->max_size);
4498 : : }
4499 : 8308520 : if (!lto_streaming_safe && tbaa)
4500 : : {
4501 : 7996953 : hstate.add_int (ao_ref_alias_set (ref));
4502 : 7996953 : hstate.add_int (ao_ref_base_alias_set (ref));
4503 : : }
4504 : 8308520 : }
|