Branch data Line data Source code
1 : : /* Alias analysis for trees.
2 : : Copyright (C) 2004-2025 Free Software Foundation, Inc.
3 : : Contributed by Diego Novillo <dnovillo@redhat.com>
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify
8 : : it under the terms of the GNU General Public License as published by
9 : : the Free Software Foundation; either version 3, or (at your option)
10 : : any later version.
11 : :
12 : : GCC is distributed in the hope that it will be useful,
13 : : but WITHOUT ANY WARRANTY; without even the implied warranty of
14 : : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 : : GNU General Public License for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : : #include "config.h"
22 : : #include "system.h"
23 : : #include "coretypes.h"
24 : : #include "backend.h"
25 : : #include "target.h"
26 : : #include "rtl.h"
27 : : #include "tree.h"
28 : : #include "gimple.h"
29 : : #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 : : #include "ssa.h"
31 : : #include "cgraph.h"
32 : : #include "tree-pretty-print.h"
33 : : #include "alias.h"
34 : : #include "fold-const.h"
35 : : #include "langhooks.h"
36 : : #include "dumpfile.h"
37 : : #include "tree-eh.h"
38 : : #include "tree-dfa.h"
39 : : #include "ipa-reference.h"
40 : : #include "varasm.h"
41 : : #include "ipa-modref-tree.h"
42 : : #include "ipa-modref.h"
43 : : #include "attr-fnspec.h"
44 : : #include "errors.h"
45 : : #include "dbgcnt.h"
46 : : #include "gimple-pretty-print.h"
47 : : #include "print-tree.h"
48 : : #include "tree-ssa-alias-compare.h"
49 : : #include "builtins.h"
50 : : #include "internal-fn.h"
51 : :
52 : : /* Broad overview of how alias analysis on gimple works:
53 : :
54 : : Statements clobbering or using memory are linked through the
55 : : virtual operand factored use-def chain. The virtual operand
56 : : is unique per function, its symbol is accessible via gimple_vop (cfun).
57 : : Virtual operands are used for efficiently walking memory statements
58 : : in the gimple IL and are useful for things like value-numbering as
59 : : a generation count for memory references.
60 : :
61 : : SSA_NAME pointers may have associated points-to information
62 : : accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
63 : : points-to information is (re-)computed by the TODO_rebuild_alias
64 : : pass manager todo. Points-to information is also used for more
65 : : precise tracking of call-clobbered and call-used variables and
66 : : related disambiguations.
67 : :
68 : : This file contains functions for disambiguating memory references,
69 : : the so called alias-oracle and tools for walking of the gimple IL.
70 : :
71 : : The main alias-oracle entry-points are
72 : :
73 : : bool stmt_may_clobber_ref_p (gimple *, tree)
74 : :
75 : : This function queries if a statement may invalidate (parts of)
76 : : the memory designated by the reference tree argument.
77 : :
78 : : bool ref_maybe_used_by_stmt_p (gimple *, tree)
79 : :
80 : : This function queries if a statement may need (parts of) the
81 : : memory designated by the reference tree argument.
82 : :
83 : : There are variants of these functions that only handle the call
84 : : part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
85 : : Note that these do not disambiguate against a possible call lhs.
86 : :
87 : : bool refs_may_alias_p (tree, tree)
88 : :
89 : : This function tries to disambiguate two reference trees.
90 : :
91 : : bool ptr_deref_may_alias_global_p (tree, bool)
92 : :
93 : : This function queries if dereferencing a pointer variable may
94 : : alias global memory. If bool argument is true, global memory
95 : : is considered to also include function local memory that escaped.
96 : :
97 : : More low-level disambiguators are available and documented in
98 : : this file. Low-level disambiguators dealing with points-to
99 : : information are in tree-ssa-structalias.cc. */
100 : :
101 : : static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
102 : : static bool nonoverlapping_component_refs_p (const_tree, const_tree);
103 : :
104 : : /* Query statistics for the different low-level disambiguators.
105 : : A high-level query may trigger multiple of them. */
106 : :
107 : : static struct {
108 : : unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
109 : : unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
110 : : unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
111 : : unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
112 : : unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
113 : : unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
114 : : unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
115 : : unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
116 : : unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
117 : : unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
118 : : unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
119 : : unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
120 : : unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
121 : : unsigned HOST_WIDE_INT stmt_kills_ref_p_no;
122 : : unsigned HOST_WIDE_INT stmt_kills_ref_p_yes;
123 : : unsigned HOST_WIDE_INT modref_use_may_alias;
124 : : unsigned HOST_WIDE_INT modref_use_no_alias;
125 : : unsigned HOST_WIDE_INT modref_clobber_may_alias;
126 : : unsigned HOST_WIDE_INT modref_clobber_no_alias;
127 : : unsigned HOST_WIDE_INT modref_kill_no;
128 : : unsigned HOST_WIDE_INT modref_kill_yes;
129 : : unsigned HOST_WIDE_INT modref_tests;
130 : : unsigned HOST_WIDE_INT modref_baseptr_tests;
131 : : } alias_stats;
132 : :
133 : : void
134 : 0 : dump_alias_stats (FILE *s)
135 : : {
136 : 0 : fprintf (s, "\nAlias oracle query stats:\n");
137 : 0 : fprintf (s, " refs_may_alias_p: "
138 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
139 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
140 : : alias_stats.refs_may_alias_p_no_alias,
141 : 0 : alias_stats.refs_may_alias_p_no_alias
142 : 0 : + alias_stats.refs_may_alias_p_may_alias);
143 : 0 : fprintf (s, " ref_maybe_used_by_call_p: "
144 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
145 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
146 : : alias_stats.ref_maybe_used_by_call_p_no_alias,
147 : 0 : alias_stats.refs_may_alias_p_no_alias
148 : 0 : + alias_stats.ref_maybe_used_by_call_p_may_alias);
149 : 0 : fprintf (s, " call_may_clobber_ref_p: "
150 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
151 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
152 : : alias_stats.call_may_clobber_ref_p_no_alias,
153 : 0 : alias_stats.call_may_clobber_ref_p_no_alias
154 : 0 : + alias_stats.call_may_clobber_ref_p_may_alias);
155 : 0 : fprintf (s, " stmt_kills_ref_p: "
156 : : HOST_WIDE_INT_PRINT_DEC" kills, "
157 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
158 : : alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes,
159 : 0 : alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes
160 : 0 : + alias_stats.stmt_kills_ref_p_no + alias_stats.modref_kill_no);
161 : 0 : fprintf (s, " nonoverlapping_component_refs_p: "
162 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
163 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
164 : : alias_stats.nonoverlapping_component_refs_p_no_alias,
165 : 0 : alias_stats.nonoverlapping_component_refs_p_no_alias
166 : 0 : + alias_stats.nonoverlapping_component_refs_p_may_alias);
167 : 0 : fprintf (s, " nonoverlapping_refs_since_match_p: "
168 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
169 : : HOST_WIDE_INT_PRINT_DEC" must overlaps, "
170 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
171 : : alias_stats.nonoverlapping_refs_since_match_p_no_alias,
172 : : alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
173 : 0 : alias_stats.nonoverlapping_refs_since_match_p_no_alias
174 : 0 : + alias_stats.nonoverlapping_refs_since_match_p_may_alias
175 : 0 : + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
176 : 0 : fprintf (s, " aliasing_component_refs_p: "
177 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
178 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
179 : : alias_stats.aliasing_component_refs_p_no_alias,
180 : 0 : alias_stats.aliasing_component_refs_p_no_alias
181 : 0 : + alias_stats.aliasing_component_refs_p_may_alias);
182 : 0 : dump_alias_stats_in_alias_c (s);
183 : 0 : fprintf (s, "\nModref stats:\n");
184 : 0 : fprintf (s, " modref kill: "
185 : : HOST_WIDE_INT_PRINT_DEC" kills, "
186 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
187 : : alias_stats.modref_kill_yes,
188 : 0 : alias_stats.modref_kill_yes
189 : 0 : + alias_stats.modref_kill_no);
190 : 0 : fprintf (s, " modref use: "
191 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
192 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
193 : : alias_stats.modref_use_no_alias,
194 : 0 : alias_stats.modref_use_no_alias
195 : 0 : + alias_stats.modref_use_may_alias);
196 : 0 : fprintf (s, " modref clobber: "
197 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
198 : : HOST_WIDE_INT_PRINT_DEC" queries\n"
199 : : " " HOST_WIDE_INT_PRINT_DEC" tbaa queries (%f per modref query)\n"
200 : : " " HOST_WIDE_INT_PRINT_DEC" base compares (%f per modref query)\n",
201 : : alias_stats.modref_clobber_no_alias,
202 : : alias_stats.modref_clobber_no_alias
203 : : + alias_stats.modref_clobber_may_alias,
204 : : alias_stats.modref_tests,
205 : 0 : ((double)alias_stats.modref_tests)
206 : : / (alias_stats.modref_clobber_no_alias
207 : : + alias_stats.modref_clobber_may_alias),
208 : : alias_stats.modref_baseptr_tests,
209 : 0 : ((double)alias_stats.modref_baseptr_tests)
210 : 0 : / (alias_stats.modref_clobber_no_alias
211 : 0 : + alias_stats.modref_clobber_may_alias));
212 : 0 : }
213 : :
214 : :
215 : : /* Return true, if dereferencing PTR may alias with a global variable.
216 : : When ESCAPED_LOCAL_P is true escaped local memory is also considered
217 : : global. */
218 : :
219 : : bool
220 : 53166539 : ptr_deref_may_alias_global_p (tree ptr, bool escaped_local_p)
221 : : {
222 : 53166539 : struct ptr_info_def *pi;
223 : :
224 : : /* If we end up with a pointer constant here that may point
225 : : to global memory. */
226 : 53166539 : if (TREE_CODE (ptr) != SSA_NAME)
227 : : return true;
228 : :
229 : 53160950 : pi = SSA_NAME_PTR_INFO (ptr);
230 : :
231 : : /* If we do not have points-to information for this variable,
232 : : we have to punt. */
233 : 53160950 : if (!pi)
234 : : return true;
235 : :
236 : : /* ??? This does not use TBAA to prune globals ptr may not access. */
237 : 42398512 : return pt_solution_includes_global (&pi->pt, escaped_local_p);
238 : : }
239 : :
240 : : /* Return true if dereferencing PTR may alias DECL.
241 : : The caller is responsible for applying TBAA to see if PTR
242 : : may access DECL at all. */
243 : :
244 : : static bool
245 : 190722749 : ptr_deref_may_alias_decl_p (tree ptr, tree decl)
246 : : {
247 : 190722749 : struct ptr_info_def *pi;
248 : :
249 : : /* Conversions are irrelevant for points-to information and
250 : : data-dependence analysis can feed us those. */
251 : 190722749 : STRIP_NOPS (ptr);
252 : :
253 : : /* Anything we do not explicilty handle aliases. */
254 : 190722749 : if ((TREE_CODE (ptr) != SSA_NAME
255 : 1973431 : && TREE_CODE (ptr) != ADDR_EXPR
256 : 859019 : && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
257 : 189863730 : || !POINTER_TYPE_P (TREE_TYPE (ptr))
258 : 380585981 : || (!VAR_P (decl)
259 : : && TREE_CODE (decl) != PARM_DECL
260 : : && TREE_CODE (decl) != RESULT_DECL))
261 : : return true;
262 : :
263 : : /* Disregard pointer offsetting. */
264 : 189862184 : if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
265 : : {
266 : 0 : do
267 : : {
268 : 0 : ptr = TREE_OPERAND (ptr, 0);
269 : : }
270 : 0 : while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
271 : : return ptr_deref_may_alias_decl_p (ptr, decl);
272 : : }
273 : :
274 : : /* ADDR_EXPR pointers either just offset another pointer or directly
275 : : specify the pointed-to set. */
276 : 189862184 : if (TREE_CODE (ptr) == ADDR_EXPR)
277 : : {
278 : 1113393 : tree base = get_base_address (TREE_OPERAND (ptr, 0));
279 : 1113393 : if (base
280 : 1113393 : && (TREE_CODE (base) == MEM_REF
281 : 1113393 : || TREE_CODE (base) == TARGET_MEM_REF))
282 : 2867 : ptr = TREE_OPERAND (base, 0);
283 : 1110526 : else if (base
284 : 1110526 : && DECL_P (base))
285 : 1094857 : return compare_base_decls (base, decl) != 0;
286 : 15669 : else if (base
287 : 15669 : && CONSTANT_CLASS_P (base))
288 : : return false;
289 : : else
290 : : return true;
291 : : }
292 : :
293 : : /* Non-aliased variables cannot be pointed to. */
294 : 188751658 : if (!may_be_aliased (decl))
295 : : return false;
296 : :
297 : : /* From here we require a SSA name pointer. Anything else aliases. */
298 : 70507848 : if (TREE_CODE (ptr) != SSA_NAME
299 : 70507848 : || !POINTER_TYPE_P (TREE_TYPE (ptr)))
300 : : return true;
301 : :
302 : : /* If we do not have useful points-to information for this pointer
303 : : we cannot disambiguate anything else. */
304 : 70507848 : pi = SSA_NAME_PTR_INFO (ptr);
305 : 70507848 : if (!pi)
306 : : return true;
307 : :
308 : 69685654 : return pt_solution_includes (&pi->pt, decl);
309 : : }
310 : :
311 : : /* Return true if dereferenced PTR1 and PTR2 may alias.
312 : : The caller is responsible for applying TBAA to see if accesses
313 : : through PTR1 and PTR2 may conflict at all. */
314 : :
315 : : bool
316 : 59383631 : ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
317 : : {
318 : 60482822 : struct ptr_info_def *pi1, *pi2;
319 : :
320 : : /* Conversions are irrelevant for points-to information and
321 : : data-dependence analysis can feed us those. */
322 : 60482822 : STRIP_NOPS (ptr1);
323 : 60482822 : STRIP_NOPS (ptr2);
324 : :
325 : : /* Disregard pointer offsetting. */
326 : 60482822 : if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
327 : : {
328 : 499015 : do
329 : : {
330 : 499015 : ptr1 = TREE_OPERAND (ptr1, 0);
331 : : }
332 : 499015 : while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
333 : : return ptr_derefs_may_alias_p (ptr1, ptr2);
334 : : }
335 : 59983807 : if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
336 : : {
337 : 512684 : do
338 : : {
339 : 512684 : ptr2 = TREE_OPERAND (ptr2, 0);
340 : : }
341 : 512684 : while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
342 : : return ptr_derefs_may_alias_p (ptr1, ptr2);
343 : : }
344 : :
345 : : /* ADDR_EXPR pointers either just offset another pointer or directly
346 : : specify the pointed-to set. */
347 : 59471123 : if (TREE_CODE (ptr1) == ADDR_EXPR)
348 : : {
349 : 743009 : tree base = get_base_address (TREE_OPERAND (ptr1, 0));
350 : 743009 : if (base
351 : 743009 : && (TREE_CODE (base) == MEM_REF
352 : 743009 : || TREE_CODE (base) == TARGET_MEM_REF))
353 : 43171 : return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
354 : 699838 : else if (base
355 : 699838 : && DECL_P (base))
356 : 697820 : return ptr_deref_may_alias_decl_p (ptr2, base);
357 : : /* Try ptr2 when ptr1 points to a constant. */
358 : : else if (base
359 : 2018 : && !CONSTANT_CLASS_P (base))
360 : : return true;
361 : : }
362 : 58730132 : if (TREE_CODE (ptr2) == ADDR_EXPR)
363 : : {
364 : 262118 : tree base = get_base_address (TREE_OPERAND (ptr2, 0));
365 : 262118 : if (base
366 : 262118 : && (TREE_CODE (base) == MEM_REF
367 : 262118 : || TREE_CODE (base) == TARGET_MEM_REF))
368 : 44321 : return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
369 : 217797 : else if (base
370 : 217797 : && DECL_P (base))
371 : 215874 : return ptr_deref_may_alias_decl_p (ptr1, base);
372 : : else
373 : : return true;
374 : : }
375 : :
376 : : /* From here we require SSA name pointers. Anything else aliases. */
377 : 58468014 : if (TREE_CODE (ptr1) != SSA_NAME
378 : 58340770 : || TREE_CODE (ptr2) != SSA_NAME
379 : 58325246 : || !POINTER_TYPE_P (TREE_TYPE (ptr1))
380 : 116783492 : || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
381 : : return true;
382 : :
383 : : /* We may end up with two empty points-to solutions for two same pointers.
384 : : In this case we still want to say both pointers alias, so shortcut
385 : : that here. */
386 : 58315363 : if (ptr1 == ptr2)
387 : : return true;
388 : :
389 : : /* If we do not have useful points-to information for either pointer
390 : : we cannot disambiguate anything else. */
391 : 54359961 : pi1 = SSA_NAME_PTR_INFO (ptr1);
392 : 54359961 : pi2 = SSA_NAME_PTR_INFO (ptr2);
393 : 54359961 : if (!pi1 || !pi2)
394 : : return true;
395 : :
396 : : /* ??? This does not use TBAA to prune decls from the intersection
397 : : that not both pointers may access. */
398 : 51802155 : return pt_solutions_intersect (&pi1->pt, &pi2->pt);
399 : : }
400 : :
401 : : /* Return true if dereferencing PTR may alias *REF.
402 : : The caller is responsible for applying TBAA to see if PTR
403 : : may access *REF at all. */
404 : :
405 : : static bool
406 : 1690647 : ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
407 : : {
408 : 1690647 : tree base = ao_ref_base (ref);
409 : :
410 : 1690647 : if (TREE_CODE (base) == MEM_REF
411 : 1690647 : || TREE_CODE (base) == TARGET_MEM_REF)
412 : 172431 : return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
413 : 1518216 : else if (DECL_P (base))
414 : 1516442 : return ptr_deref_may_alias_decl_p (ptr, base);
415 : :
416 : : return true;
417 : : }
418 : :
419 : : /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
420 : :
421 : : bool
422 : 50509653 : ptrs_compare_unequal (tree ptr1, tree ptr2)
423 : : {
424 : : /* First resolve the pointers down to a SSA name pointer base or
425 : : a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
426 : : not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
427 : : or STRING_CSTs which needs points-to adjustments to track them
428 : : in the points-to sets. */
429 : 50509653 : tree obj1 = NULL_TREE;
430 : 50509653 : tree obj2 = NULL_TREE;
431 : 50509653 : if (TREE_CODE (ptr1) == ADDR_EXPR)
432 : : {
433 : 4022432 : tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
434 : 4022432 : if (! tem)
435 : : return false;
436 : 4022432 : if (VAR_P (tem)
437 : : || TREE_CODE (tem) == PARM_DECL
438 : : || TREE_CODE (tem) == RESULT_DECL)
439 : : obj1 = tem;
440 : : else if (TREE_CODE (tem) == MEM_REF)
441 : 107210 : ptr1 = TREE_OPERAND (tem, 0);
442 : : }
443 : 50509653 : if (TREE_CODE (ptr2) == ADDR_EXPR)
444 : : {
445 : 4331985 : tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
446 : 4331985 : if (! tem)
447 : : return false;
448 : 4331985 : if (VAR_P (tem)
449 : : || TREE_CODE (tem) == PARM_DECL
450 : : || TREE_CODE (tem) == RESULT_DECL)
451 : : obj2 = tem;
452 : : else if (TREE_CODE (tem) == MEM_REF)
453 : 1945 : ptr2 = TREE_OPERAND (tem, 0);
454 : : }
455 : :
456 : : /* Canonicalize ptr vs. object. */
457 : 50509653 : if (TREE_CODE (ptr1) == SSA_NAME && obj2)
458 : : {
459 : : std::swap (ptr1, ptr2);
460 : : std::swap (obj1, obj2);
461 : : }
462 : :
463 : 50509653 : if (obj1 && obj2)
464 : : /* Other code handles this correctly, no need to duplicate it here. */;
465 : 5701918 : else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
466 : : {
467 : 5673477 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
468 : : /* We may not use restrict to optimize pointer comparisons.
469 : : See PR71062. So we have to assume that restrict-pointed-to
470 : : may be in fact obj1. */
471 : 5673477 : if (!pi
472 : : || pi->pt.vars_contains_restrict
473 : 4576297 : || pi->pt.vars_contains_interposable)
474 : : return false;
475 : 3940697 : if (VAR_P (obj1)
476 : 3940697 : && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
477 : : {
478 : 1166327 : varpool_node *node = varpool_node::get (obj1);
479 : : /* If obj1 may bind to NULL give up (see below). */
480 : 1166327 : if (! node
481 : 1166327 : || ! node->nonzero_address ()
482 : 2332654 : || ! decl_binds_to_current_def_p (obj1))
483 : 877728 : return false;
484 : : }
485 : 3062969 : return !pt_solution_includes (&pi->pt, obj1);
486 : : }
487 : 44823950 : else if (TREE_CODE (ptr1) == SSA_NAME)
488 : : {
489 : 39522366 : struct ptr_info_def *pi1 = SSA_NAME_PTR_INFO (ptr1);
490 : 39522366 : if (!pi1
491 : : || pi1->pt.vars_contains_restrict
492 : 30452239 : || pi1->pt.vars_contains_interposable)
493 : : return false;
494 : 27464916 : if (integer_zerop (ptr2) && !pi1->pt.null)
495 : : return true;
496 : 27447990 : if (TREE_CODE (ptr2) == SSA_NAME)
497 : : {
498 : 8527022 : struct ptr_info_def *pi2 = SSA_NAME_PTR_INFO (ptr2);
499 : 8527022 : if (!pi2
500 : : || pi2->pt.vars_contains_restrict
501 : 8003445 : || pi2->pt.vars_contains_interposable)
502 : : return false;
503 : 6819470 : if ((!pi1->pt.null || !pi2->pt.null)
504 : : /* ??? We do not represent FUNCTION_DECL and LABEL_DECL
505 : : in pt.vars but only set pt.vars_contains_nonlocal. This
506 : : makes compares involving those and other nonlocals
507 : : imprecise. */
508 : 2915585 : && (!pi1->pt.vars_contains_nonlocal
509 : 63807 : || !pi2->pt.vars_contains_nonlocal)
510 : 10825670 : && (!pt_solution_includes_const_pool (&pi1->pt)
511 : 2592130 : || !pt_solution_includes_const_pool (&pi2->pt)))
512 : 335038 : return !pt_solutions_intersect (&pi1->pt, &pi2->pt);
513 : : }
514 : : }
515 : :
516 : : return false;
517 : : }
518 : :
519 : : /* Returns whether reference REF to BASE may refer to global memory.
520 : : When ESCAPED_LOCAL_P is true escaped local memory is also considered
521 : : global. */
522 : :
523 : : static bool
524 : 48815849 : ref_may_alias_global_p_1 (tree base, bool escaped_local_p)
525 : : {
526 : 48815849 : if (DECL_P (base))
527 : 38459604 : return (is_global_var (base)
528 : 38459604 : || (escaped_local_p
529 : 822267 : && pt_solution_includes (&cfun->gimple_df->escaped_return,
530 : : base)));
531 : 10356245 : else if (TREE_CODE (base) == MEM_REF
532 : 10356245 : || TREE_CODE (base) == TARGET_MEM_REF)
533 : 10347163 : return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0),
534 : 10347163 : escaped_local_p);
535 : : return true;
536 : : }
537 : :
538 : : bool
539 : 7926618 : ref_may_alias_global_p (ao_ref *ref, bool escaped_local_p)
540 : : {
541 : 7926618 : tree base = ao_ref_base (ref);
542 : 7926618 : return ref_may_alias_global_p_1 (base, escaped_local_p);
543 : : }
544 : :
545 : : bool
546 : 40889231 : ref_may_alias_global_p (tree ref, bool escaped_local_p)
547 : : {
548 : 40889231 : tree base = get_base_address (ref);
549 : 40889231 : return ref_may_alias_global_p_1 (base, escaped_local_p);
550 : : }
551 : :
552 : : /* Return true whether STMT may clobber global memory.
553 : : When ESCAPED_LOCAL_P is true escaped local memory is also considered
554 : : global. */
555 : :
556 : : bool
557 : 166986533 : stmt_may_clobber_global_p (gimple *stmt, bool escaped_local_p)
558 : : {
559 : 166986533 : tree lhs;
560 : :
561 : 334034680 : if (!gimple_vdef (stmt))
562 : : return false;
563 : :
564 : : /* ??? We can ask the oracle whether an artificial pointer
565 : : dereference with a pointer with points-to information covering
566 : : all global memory (what about non-address taken memory?) maybe
567 : : clobbered by this call. As there is at the moment no convenient
568 : : way of doing that without generating garbage do some manual
569 : : checking instead.
570 : : ??? We could make a NULL ao_ref argument to the various
571 : : predicates special, meaning any global memory. */
572 : :
573 : 40964754 : switch (gimple_code (stmt))
574 : : {
575 : 40889231 : case GIMPLE_ASSIGN:
576 : 40889231 : lhs = gimple_assign_lhs (stmt);
577 : 40889231 : return (TREE_CODE (lhs) != SSA_NAME
578 : 40889231 : && ref_may_alias_global_p (lhs, escaped_local_p));
579 : : case GIMPLE_CALL:
580 : : return true;
581 : : default:
582 : : return true;
583 : : }
584 : : }
585 : :
586 : :
587 : : /* Dump alias information on FILE. */
588 : :
589 : : void
590 : 282 : dump_alias_info (FILE *file)
591 : : {
592 : 282 : unsigned i;
593 : 282 : tree ptr;
594 : 282 : const char *funcname
595 : 282 : = lang_hooks.decl_printable_name (current_function_decl, 2);
596 : 282 : tree var;
597 : :
598 : 282 : fprintf (file, "\n\nAlias information for %s\n\n", funcname);
599 : :
600 : 282 : fprintf (file, "Aliased symbols\n\n");
601 : :
602 : 1073 : FOR_EACH_LOCAL_DECL (cfun, i, var)
603 : : {
604 : 540 : if (may_be_aliased (var))
605 : 326 : dump_variable (file, var);
606 : : }
607 : :
608 : 282 : fprintf (file, "\nCall clobber information\n");
609 : :
610 : 282 : fprintf (file, "\nESCAPED");
611 : 282 : dump_points_to_solution (file, &cfun->gimple_df->escaped);
612 : :
613 : 282 : fprintf (file, "\nESCAPED_RETURN");
614 : 282 : dump_points_to_solution (file, &cfun->gimple_df->escaped_return);
615 : :
616 : 282 : fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
617 : :
618 : 2908 : FOR_EACH_SSA_NAME (i, ptr, cfun)
619 : : {
620 : 2427 : struct ptr_info_def *pi;
621 : :
622 : 4434 : if (!POINTER_TYPE_P (TREE_TYPE (ptr))
623 : 2474 : || SSA_NAME_IN_FREE_LIST (ptr))
624 : 1960 : continue;
625 : :
626 : 467 : pi = SSA_NAME_PTR_INFO (ptr);
627 : 467 : if (pi)
628 : 461 : dump_points_to_info_for (file, ptr);
629 : : }
630 : :
631 : 282 : fprintf (file, "\n");
632 : 282 : }
633 : :
634 : :
635 : : /* Dump alias information on stderr. */
636 : :
637 : : DEBUG_FUNCTION void
638 : 0 : debug_alias_info (void)
639 : : {
640 : 0 : dump_alias_info (stderr);
641 : 0 : }
642 : :
643 : :
644 : : /* Dump the points-to set *PT into FILE. */
645 : :
646 : : void
647 : 1025 : dump_points_to_solution (FILE *file, struct pt_solution *pt)
648 : : {
649 : 1025 : if (pt->anything)
650 : 3 : fprintf (file, ", points-to anything");
651 : :
652 : 1025 : if (pt->nonlocal)
653 : 610 : fprintf (file, ", points-to non-local");
654 : :
655 : 1025 : if (pt->escaped)
656 : 408 : fprintf (file, ", points-to escaped");
657 : :
658 : 1025 : if (pt->ipa_escaped)
659 : 0 : fprintf (file, ", points-to unit escaped");
660 : :
661 : 1025 : if (pt->null)
662 : 587 : fprintf (file, ", points-to NULL");
663 : :
664 : 1025 : if (pt->const_pool)
665 : 0 : fprintf (file, ", points-to const-pool");
666 : :
667 : 1025 : if (pt->vars)
668 : : {
669 : 1022 : fprintf (file, ", points-to vars: ");
670 : 1022 : dump_decl_set (file, pt->vars);
671 : 1022 : if (pt->vars_contains_nonlocal
672 : : || pt->vars_contains_escaped
673 : : || pt->vars_contains_escaped_heap
674 : : || pt->vars_contains_restrict
675 : 1022 : || pt->vars_contains_interposable)
676 : : {
677 : 197 : const char *comma = "";
678 : 197 : fprintf (file, " (");
679 : 197 : if (pt->vars_contains_nonlocal)
680 : : {
681 : 134 : fprintf (file, "nonlocal");
682 : 134 : comma = ", ";
683 : : }
684 : 197 : if (pt->vars_contains_escaped)
685 : : {
686 : 125 : fprintf (file, "%sescaped", comma);
687 : 125 : comma = ", ";
688 : : }
689 : 197 : if (pt->vars_contains_escaped_heap)
690 : : {
691 : 0 : fprintf (file, "%sescaped heap", comma);
692 : 0 : comma = ", ";
693 : : }
694 : 197 : if (pt->vars_contains_restrict)
695 : : {
696 : 58 : fprintf (file, "%srestrict", comma);
697 : 58 : comma = ", ";
698 : : }
699 : 197 : if (pt->vars_contains_interposable)
700 : 0 : fprintf (file, "%sinterposable", comma);
701 : 197 : fprintf (file, ")");
702 : : }
703 : : }
704 : 1025 : }
705 : :
706 : :
707 : : /* Unified dump function for pt_solution. */
708 : :
709 : : DEBUG_FUNCTION void
710 : 0 : debug (pt_solution &ref)
711 : : {
712 : 0 : dump_points_to_solution (stderr, &ref);
713 : 0 : }
714 : :
715 : : DEBUG_FUNCTION void
716 : 0 : debug (pt_solution *ptr)
717 : : {
718 : 0 : if (ptr)
719 : 0 : debug (*ptr);
720 : : else
721 : 0 : fprintf (stderr, "<nil>\n");
722 : 0 : }
723 : :
724 : :
725 : : /* Dump points-to information for SSA_NAME PTR into FILE. */
726 : :
727 : : void
728 : 461 : dump_points_to_info_for (FILE *file, tree ptr)
729 : : {
730 : 461 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
731 : :
732 : 461 : print_generic_expr (file, ptr, dump_flags);
733 : :
734 : 461 : if (pi)
735 : 461 : dump_points_to_solution (file, &pi->pt);
736 : : else
737 : 0 : fprintf (file, ", points-to anything");
738 : :
739 : 461 : fprintf (file, "\n");
740 : 461 : }
741 : :
742 : :
743 : : /* Dump points-to information for VAR into stderr. */
744 : :
745 : : DEBUG_FUNCTION void
746 : 0 : debug_points_to_info_for (tree var)
747 : : {
748 : 0 : dump_points_to_info_for (stderr, var);
749 : 0 : }
750 : :
751 : :
752 : : /* Initializes the alias-oracle reference representation *R from REF. */
753 : :
754 : : void
755 : 2485751591 : ao_ref_init (ao_ref *r, tree ref)
756 : : {
757 : 2485751591 : r->ref = ref;
758 : 2485751591 : r->base = NULL_TREE;
759 : 2485751591 : r->offset = 0;
760 : 2485751591 : r->size = -1;
761 : 2485751591 : r->max_size = -1;
762 : 2485751591 : r->ref_alias_set = -1;
763 : 2485751591 : r->base_alias_set = -1;
764 : 2485751591 : r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
765 : 2485751591 : }
766 : :
767 : : /* Returns the base object of the memory reference *REF. */
768 : :
769 : : tree
770 : 4825955664 : ao_ref_base (ao_ref *ref)
771 : : {
772 : 4825955664 : bool reverse;
773 : :
774 : 4825955664 : if (ref->base)
775 : : return ref->base;
776 : 2237303914 : ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
777 : : &ref->max_size, &reverse);
778 : 2237303914 : return ref->base;
779 : : }
780 : :
781 : : /* Returns the base object alias set of the memory reference *REF. */
782 : :
783 : : alias_set_type
784 : 912959729 : ao_ref_base_alias_set (ao_ref *ref)
785 : : {
786 : 912959729 : tree base_ref;
787 : 912959729 : if (ref->base_alias_set != -1)
788 : : return ref->base_alias_set;
789 : 716096698 : if (!ref->ref)
790 : : return 0;
791 : 682577528 : base_ref = ref->ref;
792 : 682577528 : if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
793 : 4 : base_ref = TREE_OPERAND (base_ref, 0);
794 : 1119308331 : while (handled_component_p (base_ref))
795 : 436730803 : base_ref = TREE_OPERAND (base_ref, 0);
796 : 682577528 : ref->base_alias_set = get_alias_set (base_ref);
797 : 682577528 : return ref->base_alias_set;
798 : : }
799 : :
800 : : /* Returns the reference alias set of the memory reference *REF. */
801 : :
802 : : alias_set_type
803 : 1141320123 : ao_ref_alias_set (ao_ref *ref)
804 : : {
805 : 1141320123 : if (ref->ref_alias_set != -1)
806 : : return ref->ref_alias_set;
807 : 456808487 : if (!ref->ref)
808 : : return 0;
809 : 456808485 : ref->ref_alias_set = get_alias_set (ref->ref);
810 : 456808485 : return ref->ref_alias_set;
811 : : }
812 : :
813 : : /* Returns a type satisfying
814 : : get_deref_alias_set (type) == ao_ref_base_alias_set (REF). */
815 : :
816 : : tree
817 : 313667 : ao_ref_base_alias_ptr_type (ao_ref *ref)
818 : : {
819 : 313667 : tree base_ref;
820 : :
821 : 313667 : if (!ref->ref)
822 : : return NULL_TREE;
823 : 313667 : base_ref = ref->ref;
824 : 313667 : if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
825 : 0 : base_ref = TREE_OPERAND (base_ref, 0);
826 : 418159 : while (handled_component_p (base_ref))
827 : 104492 : base_ref = TREE_OPERAND (base_ref, 0);
828 : 313667 : tree ret = reference_alias_ptr_type (base_ref);
829 : 313667 : return ret;
830 : : }
831 : :
832 : : /* Returns a type satisfying
833 : : get_deref_alias_set (type) == ao_ref_alias_set (REF). */
834 : :
835 : : tree
836 : 313667 : ao_ref_alias_ptr_type (ao_ref *ref)
837 : : {
838 : 313667 : if (!ref->ref)
839 : : return NULL_TREE;
840 : 313667 : tree ret = reference_alias_ptr_type (ref->ref);
841 : 313667 : return ret;
842 : : }
843 : :
844 : : /* Return the alignment of the access *REF and store it in the *ALIGN
845 : : and *BITPOS pairs. Returns false if no alignment could be determined.
846 : : See get_object_alignment_2 for details. */
847 : :
848 : : bool
849 : 90930 : ao_ref_alignment (ao_ref *ref, unsigned int *align,
850 : : unsigned HOST_WIDE_INT *bitpos)
851 : : {
852 : 90930 : if (ref->ref)
853 : 89402 : return get_object_alignment_1 (ref->ref, align, bitpos);
854 : :
855 : : /* When we just have ref->base we cannot use get_object_alignment since
856 : : that will eventually use the type of the appearant access while for
857 : : example ao_ref_init_from_ptr_and_range is not careful to adjust that. */
858 : 1528 : *align = BITS_PER_UNIT;
859 : 1528 : HOST_WIDE_INT offset;
860 : 1528 : if (!ref->offset.is_constant (&offset)
861 : 1528 : || !get_object_alignment_2 (ref->base, align, bitpos, true))
862 : : return false;
863 : 1324 : *bitpos += (unsigned HOST_WIDE_INT)offset * BITS_PER_UNIT;
864 : 1324 : *bitpos = *bitpos & (*align - 1);
865 : 1324 : return true;
866 : : }
867 : :
868 : : /* Init an alias-oracle reference representation from a gimple pointer
869 : : PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
870 : : that RANGE_KNOWN is set.
871 : :
872 : : The access is assumed to be only to or after of the pointer target adjusted
873 : : by the offset, not before it (even in the case RANGE_KNOWN is false). */
874 : :
875 : : void
876 : 33800744 : ao_ref_init_from_ptr_and_range (ao_ref *ref, tree ptr,
877 : : bool range_known,
878 : : poly_int64 offset,
879 : : poly_int64 size,
880 : : poly_int64 max_size)
881 : : {
882 : 33800744 : poly_int64 t, extra_offset = 0;
883 : :
884 : 33800744 : ref->ref = NULL_TREE;
885 : 33800744 : if (TREE_CODE (ptr) == SSA_NAME)
886 : : {
887 : 23047248 : gimple *stmt = SSA_NAME_DEF_STMT (ptr);
888 : 23047248 : if (gimple_assign_single_p (stmt)
889 : 23047248 : && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
890 : 3745184 : ptr = gimple_assign_rhs1 (stmt);
891 : 19302064 : else if (is_gimple_assign (stmt)
892 : 13133144 : && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
893 : 24696735 : && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
894 : : {
895 : 251117 : ptr = gimple_assign_rhs1 (stmt);
896 : 251117 : extra_offset *= BITS_PER_UNIT;
897 : : }
898 : : }
899 : :
900 : 33800744 : if (TREE_CODE (ptr) == ADDR_EXPR)
901 : : {
902 : 14448410 : ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
903 : 14448410 : if (ref->base)
904 : 11749393 : ref->offset = BITS_PER_UNIT * t;
905 : : else
906 : : {
907 : 2699017 : range_known = false;
908 : 2699017 : ref->offset = 0;
909 : 2699017 : ref->base = get_base_address (TREE_OPERAND (ptr, 0));
910 : : }
911 : : }
912 : : else
913 : : {
914 : 19352334 : gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
915 : 19352334 : ref->base = build2 (MEM_REF, char_type_node,
916 : : ptr, null_pointer_node);
917 : 19352334 : ref->offset = 0;
918 : : }
919 : 33800744 : ref->offset += extra_offset + offset;
920 : 33800744 : if (range_known)
921 : : {
922 : 17647213 : ref->max_size = max_size;
923 : 17647213 : ref->size = size;
924 : : }
925 : : else
926 : 16153531 : ref->max_size = ref->size = -1;
927 : 33800744 : ref->ref_alias_set = 0;
928 : 33800744 : ref->base_alias_set = 0;
929 : 33800744 : ref->volatile_p = false;
930 : 33800744 : }
931 : :
932 : : /* Init an alias-oracle reference representation from a gimple pointer
933 : : PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
934 : : size is assumed to be unknown. The access is assumed to be only
935 : : to or after of the pointer target, not before it. */
936 : :
937 : : void
938 : 9289848 : ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
939 : : {
940 : 9289848 : poly_int64 size_hwi;
941 : 9289848 : if (size
942 : 4552499 : && poly_int_tree_p (size, &size_hwi)
943 : 13282524 : && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
944 : : {
945 : 3992002 : size_hwi = size_hwi * BITS_PER_UNIT;
946 : 3992002 : ao_ref_init_from_ptr_and_range (ref, ptr, true, 0, size_hwi, size_hwi);
947 : : }
948 : : else
949 : 5297846 : ao_ref_init_from_ptr_and_range (ref, ptr, false, 0, -1, -1);
950 : 9289848 : }
951 : :
952 : : /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
953 : : Return -1 if S1 < S2
954 : : Return 1 if S1 > S2
955 : : Return 0 if equal or incomparable. */
956 : :
957 : : static int
958 : 8199717 : compare_sizes (tree s1, tree s2)
959 : : {
960 : 8199717 : if (!s1 || !s2)
961 : : return 0;
962 : :
963 : 8192824 : poly_uint64 size1;
964 : 8192824 : poly_uint64 size2;
965 : :
966 : 8192824 : if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
967 : 375 : return 0;
968 : 8192449 : if (known_lt (size1, size2))
969 : : return -1;
970 : 5858129 : if (known_lt (size2, size1))
971 : : return 1;
972 : : return 0;
973 : : }
974 : :
975 : : /* Compare TYPE1 and TYPE2 by its size.
976 : : Return -1 if size of TYPE1 < size of TYPE2
977 : : Return 1 if size of TYPE1 > size of TYPE2
978 : : Return 0 if types are of equal sizes or we can not compare them. */
979 : :
980 : : static int
981 : 7059412 : compare_type_sizes (tree type1, tree type2)
982 : : {
983 : : /* Be conservative for arrays and vectors. We want to support partial
984 : : overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
985 : 7059412 : while (TREE_CODE (type1) == ARRAY_TYPE
986 : 7972308 : || VECTOR_TYPE_P (type1))
987 : 912896 : type1 = TREE_TYPE (type1);
988 : 7345066 : while (TREE_CODE (type2) == ARRAY_TYPE
989 : 7345066 : || VECTOR_TYPE_P (type2))
990 : 285654 : type2 = TREE_TYPE (type2);
991 : 7059412 : return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
992 : : }
993 : :
994 : : /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
995 : : purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
996 : : decide. */
997 : :
998 : : static inline int
999 : 748272746 : same_type_for_tbaa (tree type1, tree type2)
1000 : : {
1001 : 748272746 : type1 = TYPE_MAIN_VARIANT (type1);
1002 : 748272746 : type2 = TYPE_MAIN_VARIANT (type2);
1003 : :
1004 : : /* Handle the most common case first. */
1005 : 748272746 : if (type1 == type2)
1006 : : return 1;
1007 : :
1008 : : /* If we would have to do structural comparison bail out. */
1009 : 77920511 : if (TYPE_STRUCTURAL_EQUALITY_P (type1)
1010 : 77920511 : || TYPE_STRUCTURAL_EQUALITY_P (type2))
1011 : : return -1;
1012 : :
1013 : : /* Compare the canonical types. */
1014 : 56580051 : if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
1015 : : return 1;
1016 : :
1017 : : /* ??? Array types are not properly unified in all cases as we have
1018 : : spurious changes in the index types for example. Removing this
1019 : : causes all sorts of problems with the Fortran frontend. */
1020 : 55986046 : if (TREE_CODE (type1) == ARRAY_TYPE
1021 : 4439385 : && TREE_CODE (type2) == ARRAY_TYPE)
1022 : : return -1;
1023 : :
1024 : : /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
1025 : : object of one of its constrained subtypes, e.g. when a function with an
1026 : : unconstrained parameter passed by reference is called on an object and
1027 : : inlined. But, even in the case of a fixed size, type and subtypes are
1028 : : not equivalent enough as to share the same TYPE_CANONICAL, since this
1029 : : would mean that conversions between them are useless, whereas they are
1030 : : not (e.g. type and subtypes can have different modes). So, in the end,
1031 : : they are only guaranteed to have the same alias set. */
1032 : 55635390 : alias_set_type set1 = get_alias_set (type1);
1033 : 55635390 : alias_set_type set2 = get_alias_set (type2);
1034 : 55635390 : if (set1 == set2)
1035 : : return -1;
1036 : :
1037 : : /* Pointers to void are considered compatible with all other pointers,
1038 : : so for two pointers see what the alias set resolution thinks. */
1039 : 23169931 : if (POINTER_TYPE_P (type1)
1040 : 5363823 : && POINTER_TYPE_P (type2)
1041 : 23334841 : && alias_sets_conflict_p (set1, set2))
1042 : : return -1;
1043 : :
1044 : : /* The types are known to be not equal. */
1045 : : return 0;
1046 : : }
1047 : :
1048 : : /* Return true if TYPE is a composite type (i.e. we may apply one of handled
1049 : : components on it). */
1050 : :
1051 : : static bool
1052 : 1912668 : type_has_components_p (tree type)
1053 : : {
1054 : 1912668 : return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
1055 : 1912668 : || TREE_CODE (type) == COMPLEX_TYPE;
1056 : : }
1057 : :
1058 : : /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
1059 : : respectively are either pointing to same address or are completely
1060 : : disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
1061 : : just partly overlap.
1062 : :
1063 : : Try to disambiguate using the access path starting from the match
1064 : : and return false if there is no conflict.
1065 : :
1066 : : Helper for aliasing_component_refs_p. */
1067 : :
1068 : : static bool
1069 : 800930 : aliasing_matching_component_refs_p (tree match1, tree ref1,
1070 : : poly_int64 offset1, poly_int64 max_size1,
1071 : : tree match2, tree ref2,
1072 : : poly_int64 offset2, poly_int64 max_size2,
1073 : : bool partial_overlap)
1074 : : {
1075 : 800930 : poly_int64 offadj, sztmp, msztmp;
1076 : 800930 : bool reverse;
1077 : :
1078 : 800930 : if (!partial_overlap)
1079 : : {
1080 : 800926 : get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
1081 : 800926 : offset2 -= offadj;
1082 : 800926 : get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
1083 : 800926 : offset1 -= offadj;
1084 : 800926 : if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1085 : : {
1086 : 66832 : ++alias_stats.aliasing_component_refs_p_no_alias;
1087 : 66832 : return false;
1088 : : }
1089 : : }
1090 : :
1091 : 734098 : int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
1092 : : partial_overlap);
1093 : 734098 : if (cmp == 1
1094 : 734098 : || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
1095 : : {
1096 : 821 : ++alias_stats.aliasing_component_refs_p_no_alias;
1097 : 821 : return false;
1098 : : }
1099 : 733277 : ++alias_stats.aliasing_component_refs_p_may_alias;
1100 : 733277 : return true;
1101 : : }
1102 : :
1103 : : /* Return true if REF is reference to zero sized trailing array. I.e.
1104 : : struct foo {int bar; int array[0];} *fooptr;
1105 : : fooptr->array. */
1106 : :
1107 : : static bool
1108 : 5836636 : component_ref_to_zero_sized_trailing_array_p (tree ref)
1109 : : {
1110 : 5836636 : return (TREE_CODE (ref) == COMPONENT_REF
1111 : 5195456 : && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
1112 : 117616 : && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
1113 : 42066 : || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
1114 : 5912263 : && array_ref_flexible_size_p (ref));
1115 : : }
1116 : :
1117 : : /* Worker for aliasing_component_refs_p. Most parameters match parameters of
1118 : : aliasing_component_refs_p.
1119 : :
1120 : : Walk access path REF2 and try to find type matching TYPE1
1121 : : (which is a start of possibly aliasing access path REF1).
1122 : : If match is found, try to disambiguate.
1123 : :
1124 : : Return 0 for sucessful disambiguation.
1125 : : Return 1 if match was found but disambiguation failed
1126 : : Return -1 if there is no match.
1127 : : In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
1128 : : in access patch REF2 and -1 if we are not sure. */
1129 : :
1130 : : static int
1131 : 2401770 : aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
1132 : : poly_int64 offset1, poly_int64 max_size1,
1133 : : tree end_struct_ref1,
1134 : : tree ref2, tree base2,
1135 : : poly_int64 offset2, poly_int64 max_size2,
1136 : : bool *maybe_match)
1137 : : {
1138 : 2401770 : tree ref = ref2;
1139 : 2401770 : int same_p = 0;
1140 : :
1141 : 7090370 : while (true)
1142 : : {
1143 : : /* We walk from inner type to the outer types. If type we see is
1144 : : already too large to be part of type1, terminate the search. */
1145 : 4746070 : int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
1146 : :
1147 : 4746070 : if (cmp < 0
1148 : 4746070 : && (!end_struct_ref1
1149 : 60 : || compare_type_sizes (TREE_TYPE (end_struct_ref1),
1150 : 60 : TREE_TYPE (ref)) < 0))
1151 : : break;
1152 : : /* If types may be of same size, see if we can decide about their
1153 : : equality. */
1154 : 3527098 : if (cmp == 0)
1155 : : {
1156 : 2489349 : same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
1157 : 2489349 : if (same_p == 1)
1158 : : break;
1159 : : /* In case we can't decide whether types are same try to
1160 : : continue looking for the exact match.
1161 : : Remember however that we possibly saw a match
1162 : : to bypass the access path continuations tests we do later. */
1163 : 1688419 : if (same_p == -1)
1164 : 623222 : *maybe_match = true;
1165 : : }
1166 : 2726168 : if (!handled_component_p (ref))
1167 : : break;
1168 : 2344300 : ref = TREE_OPERAND (ref, 0);
1169 : 2344300 : }
1170 : 2401770 : if (same_p == 1)
1171 : : {
1172 : 800930 : bool partial_overlap = false;
1173 : :
1174 : : /* We assume that arrays can overlap by multiple of their elements
1175 : : size as tested in gcc.dg/torture/alias-2.c.
1176 : : This partial overlap happen only when both arrays are bases of
1177 : : the access and not contained within another component ref.
1178 : : To be safe we also assume partial overlap for VLAs. */
1179 : 800930 : if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1180 : 800930 : && (!TYPE_SIZE (TREE_TYPE (base1))
1181 : 4344 : || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1182 : 4344 : || ref == base2))
1183 : : {
1184 : : /* Setting maybe_match to true triggers
1185 : : nonoverlapping_component_refs_p test later that still may do
1186 : : useful disambiguation. */
1187 : 4 : *maybe_match = true;
1188 : 4 : partial_overlap = true;
1189 : : }
1190 : 800930 : return aliasing_matching_component_refs_p (base1, ref1,
1191 : : offset1, max_size1,
1192 : : ref, ref2,
1193 : : offset2, max_size2,
1194 : 800930 : partial_overlap);
1195 : : }
1196 : : return -1;
1197 : : }
1198 : :
1199 : : /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1200 : : Return true if they can be composed to single access path
1201 : : base1...ref1...base2...ref2.
1202 : :
1203 : : REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1204 : : a trailing array access after REF1 in the non-TBAA part of the access.
1205 : : REF1_ALIAS_SET is the alias set of REF1.
1206 : :
1207 : : BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1208 : : a trailing array access in the TBAA part of access path2.
1209 : : BASE2_ALIAS_SET is the alias set of base2. */
1210 : :
1211 : : bool
1212 : 1912668 : access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1213 : : alias_set_type ref1_alias_set,
1214 : : tree base_type2, tree end_struct_ref2,
1215 : : alias_set_type base2_alias_set)
1216 : : {
1217 : : /* Access path can not continue past types with no components. */
1218 : 1912668 : if (!type_has_components_p (ref_type1))
1219 : : return false;
1220 : :
1221 : : /* If first access path ends by too small type to hold base of
1222 : : the second access path, typically paths can not continue.
1223 : :
1224 : : Punt if end_struct_past_end1 is true. We want to support arbitrary
1225 : : type puning past first COMPONENT_REF to union because redundant store
1226 : : elimination depends on this, see PR92152. For this reason we can not
1227 : : check size of the reference because types may partially overlap. */
1228 : 160472 : if (!end_struct_past_end1)
1229 : : {
1230 : 160423 : if (compare_type_sizes (ref_type1, base_type2) < 0)
1231 : : return false;
1232 : : /* If the path2 contains trailing array access we can strenghten the check
1233 : : to verify that also the size of element of the trailing array fits.
1234 : : In fact we could check for offset + type_size, but we do not track
1235 : : offsets and this is quite side case. */
1236 : 139018 : if (end_struct_ref2
1237 : 139018 : && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1238 : : return false;
1239 : : }
1240 : 139067 : return (base2_alias_set == ref1_alias_set
1241 : 139067 : || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1242 : : }
1243 : :
1244 : : /* Determine if the two component references REF1 and REF2 which are
1245 : : based on access types TYPE1 and TYPE2 and of which at least one is based
1246 : : on an indirect reference may alias.
1247 : : REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1248 : : are the respective alias sets. */
1249 : :
1250 : : static bool
1251 : 2152798 : aliasing_component_refs_p (tree ref1,
1252 : : alias_set_type ref1_alias_set,
1253 : : alias_set_type base1_alias_set,
1254 : : poly_int64 offset1, poly_int64 max_size1,
1255 : : tree ref2,
1256 : : alias_set_type ref2_alias_set,
1257 : : alias_set_type base2_alias_set,
1258 : : poly_int64 offset2, poly_int64 max_size2)
1259 : : {
1260 : : /* If one reference is a component references through pointers try to find a
1261 : : common base and apply offset based disambiguation. This handles
1262 : : for example
1263 : : struct A { int i; int j; } *q;
1264 : : struct B { struct A a; int k; } *p;
1265 : : disambiguating q->i and p->a.j. */
1266 : 2152798 : tree base1, base2;
1267 : 2152798 : tree type1, type2;
1268 : 2152798 : bool maybe_match = false;
1269 : 2152798 : tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1270 : 2152798 : bool end_struct_past_end1 = false;
1271 : 2152798 : bool end_struct_past_end2 = false;
1272 : :
1273 : : /* Choose bases and base types to search for.
1274 : : The access path is as follows:
1275 : : base....end_of_tbaa_ref...actual_ref
1276 : : At one place in the access path may be a reference to zero sized or
1277 : : trailing array.
1278 : :
1279 : : We generally discard the segment after end_of_tbaa_ref however
1280 : : we need to be careful in case it contains zero sized or trailing array.
1281 : : These may happen after reference to union and in this case we need to
1282 : : not disambiguate type puning scenarios.
1283 : :
1284 : : We set:
1285 : : base1 to point to base
1286 : :
1287 : : ref1 to point to end_of_tbaa_ref
1288 : :
1289 : : end_struct_ref1 to point the trailing reference (if it exists
1290 : : in range base....end_of_tbaa_ref
1291 : :
1292 : : end_struct_past_end1 is true if this trailing reference occurs in
1293 : : end_of_tbaa_ref...actual_ref. */
1294 : 2152798 : base1 = ref1;
1295 : 4785807 : while (handled_component_p (base1))
1296 : : {
1297 : : /* Generally access paths are monotous in the size of object. The
1298 : : exception are trailing arrays of structures. I.e.
1299 : : struct a {int array[0];};
1300 : : or
1301 : : struct a {int array1[0]; int array[];};
1302 : : Such struct has size 0 but accesses to a.array may have non-zero size.
1303 : : In this case the size of TREE_TYPE (base1) is smaller than
1304 : : size of TREE_TYPE (TREE_OPERAND (base1, 0)).
1305 : :
1306 : : Because we compare sizes of arrays just by sizes of their elements,
1307 : : we only need to care about zero sized array fields here. */
1308 : 2633009 : if (component_ref_to_zero_sized_trailing_array_p (base1))
1309 : : {
1310 : 72158 : gcc_checking_assert (!end_struct_ref1);
1311 : : end_struct_ref1 = base1;
1312 : : }
1313 : 2633009 : if (ends_tbaa_access_path_p (base1))
1314 : : {
1315 : 21896 : ref1 = TREE_OPERAND (base1, 0);
1316 : 21896 : if (end_struct_ref1)
1317 : : {
1318 : 1 : end_struct_past_end1 = true;
1319 : 1 : end_struct_ref1 = NULL;
1320 : : }
1321 : : }
1322 : 2633009 : base1 = TREE_OPERAND (base1, 0);
1323 : : }
1324 : 2152798 : type1 = TREE_TYPE (base1);
1325 : 2152798 : base2 = ref2;
1326 : 4980369 : while (handled_component_p (base2))
1327 : : {
1328 : 2827571 : if (component_ref_to_zero_sized_trailing_array_p (base2))
1329 : : {
1330 : 2952 : gcc_checking_assert (!end_struct_ref2);
1331 : : end_struct_ref2 = base2;
1332 : : }
1333 : 2827571 : if (ends_tbaa_access_path_p (base2))
1334 : : {
1335 : 58891 : ref2 = TREE_OPERAND (base2, 0);
1336 : 58891 : if (end_struct_ref2)
1337 : : {
1338 : 48 : end_struct_past_end2 = true;
1339 : 48 : end_struct_ref2 = NULL;
1340 : : }
1341 : : }
1342 : 2827571 : base2 = TREE_OPERAND (base2, 0);
1343 : : }
1344 : 2152798 : type2 = TREE_TYPE (base2);
1345 : :
1346 : : /* Now search for the type1 in the access path of ref2. This
1347 : : would be a common base for doing offset based disambiguation on.
1348 : : This however only makes sense if type2 is big enough to hold type1. */
1349 : 2152798 : int cmp_outer = compare_type_sizes (type2, type1);
1350 : :
1351 : : /* If type2 is big enough to contain type1 walk its access path.
1352 : : We also need to care of arrays at the end of structs that may extend
1353 : : beyond the end of structure. If this occurs in the TBAA part of the
1354 : : access path, we need to consider the increased type as well. */
1355 : 2152798 : if (cmp_outer >= 0
1356 : 2152798 : || (end_struct_ref2
1357 : 1 : && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
1358 : : {
1359 : 1084720 : int res = aliasing_component_refs_walk (ref1, type1, base1,
1360 : : offset1, max_size1,
1361 : : end_struct_ref1,
1362 : : ref2, base2, offset2, max_size2,
1363 : : &maybe_match);
1364 : 1084720 : if (res != -1)
1365 : 493480 : return res;
1366 : : }
1367 : :
1368 : : /* If we didn't find a common base, try the other way around. */
1369 : 1659318 : if (cmp_outer <= 0
1370 : 1659318 : || (end_struct_ref1
1371 : 60 : && compare_type_sizes (TREE_TYPE (end_struct_ref1), type2) <= 0))
1372 : : {
1373 : 1317050 : int res = aliasing_component_refs_walk (ref2, type2, base2,
1374 : : offset2, max_size2,
1375 : : end_struct_ref2,
1376 : : ref1, base1, offset1, max_size1,
1377 : : &maybe_match);
1378 : 1317050 : if (res != -1)
1379 : 307450 : return res;
1380 : : }
1381 : :
1382 : : /* In the following code we make an assumption that the types in access
1383 : : paths do not overlap and thus accesses alias only if one path can be
1384 : : continuation of another. If we was not able to decide about equivalence,
1385 : : we need to give up. */
1386 : 1351868 : if (maybe_match)
1387 : : {
1388 : 383501 : if (!nonoverlapping_component_refs_p (ref1, ref2))
1389 : : {
1390 : 366352 : ++alias_stats.aliasing_component_refs_p_may_alias;
1391 : 366352 : return true;
1392 : : }
1393 : 17149 : ++alias_stats.aliasing_component_refs_p_no_alias;
1394 : 17149 : return false;
1395 : : }
1396 : :
1397 : 968367 : if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1398 : : ref1_alias_set,
1399 : : type2, end_struct_ref2,
1400 : : base2_alias_set)
1401 : 968367 : || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
1402 : : ref2_alias_set,
1403 : : type1, end_struct_ref1,
1404 : : base1_alias_set))
1405 : : {
1406 : 138857 : ++alias_stats.aliasing_component_refs_p_may_alias;
1407 : 138857 : return true;
1408 : : }
1409 : 829510 : ++alias_stats.aliasing_component_refs_p_no_alias;
1410 : 829510 : return false;
1411 : : }
1412 : :
1413 : : /* FIELD1 and FIELD2 are two fields of component refs. We assume
1414 : : that bases of both component refs are either equivalent or nonoverlapping.
1415 : : We do not assume that the containers of FIELD1 and FIELD2 are of the
1416 : : same type or size.
1417 : :
1418 : : Return 0 in case the base address of component_refs are same then
1419 : : FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1420 : : may not be of same type or size.
1421 : :
1422 : : Return 1 if FIELD1 and FIELD2 are non-overlapping.
1423 : :
1424 : : Return -1 otherwise.
1425 : :
1426 : : Main difference between 0 and -1 is to let
1427 : : nonoverlapping_component_refs_since_match_p discover the semantically
1428 : : equivalent part of the access path.
1429 : :
1430 : : Note that this function is used even with -fno-strict-aliasing
1431 : : and makes use of no TBAA assumptions. */
1432 : :
1433 : : static int
1434 : 4604408 : nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1435 : : {
1436 : : /* If both fields are of the same type, we could save hard work of
1437 : : comparing offsets. */
1438 : 4604408 : tree type1 = DECL_CONTEXT (field1);
1439 : 4604408 : tree type2 = DECL_CONTEXT (field2);
1440 : :
1441 : 4604408 : if (TREE_CODE (type1) == RECORD_TYPE
1442 : 9078868 : && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1443 : : field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1444 : 4604408 : if (TREE_CODE (type2) == RECORD_TYPE
1445 : 9078868 : && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1446 : : field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1447 : :
1448 : : /* ??? Bitfields can overlap at RTL level so punt on them.
1449 : : FIXME: RTL expansion should be fixed by adjusting the access path
1450 : : when producing MEM_ATTRs for MEMs which are wider than
1451 : : the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1452 : 4604408 : if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1453 : : return -1;
1454 : :
1455 : : /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1456 : 4604408 : if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1457 : 4438562 : return field1 != field2;
1458 : :
1459 : : /* In common case the offsets and bit offsets will be the same.
1460 : : However if frontends do not agree on the alignment, they may be
1461 : : different even if they actually represent same address.
1462 : : Try the common case first and if that fails calcualte the
1463 : : actual bit offset. */
1464 : 165846 : if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1465 : 165846 : DECL_FIELD_OFFSET (field2))
1466 : 289011 : && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1467 : 123165 : DECL_FIELD_BIT_OFFSET (field2)))
1468 : : return 0;
1469 : :
1470 : : /* Note that it may be possible to use component_ref_field_offset
1471 : : which would provide offsets as trees. However constructing and folding
1472 : : trees is expensive and does not seem to be worth the compile time
1473 : : cost. */
1474 : :
1475 : 42734 : poly_uint64 offset1, offset2;
1476 : 42734 : poly_uint64 bit_offset1, bit_offset2;
1477 : :
1478 : 42734 : if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
1479 : 42734 : && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
1480 : 42734 : && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
1481 : 85468 : && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
1482 : : {
1483 : 42734 : offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1484 : 42734 : offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1485 : :
1486 : 42734 : if (known_eq (offset1, offset2))
1487 : 40022 : return 0;
1488 : :
1489 : 42734 : poly_uint64 size1, size2;
1490 : :
1491 : 42734 : if (poly_int_tree_p (DECL_SIZE (field1), &size1)
1492 : 42734 : && poly_int_tree_p (DECL_SIZE (field2), &size2)
1493 : 85468 : && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
1494 : : return 1;
1495 : : }
1496 : : /* Resort to slower overlap checking by looking for matching types in
1497 : : the middle of access path. */
1498 : : return -1;
1499 : : }
1500 : :
1501 : : /* Return low bound of array. Do not produce new trees
1502 : : and thus do not care about particular type of integer constant
1503 : : and placeholder exprs. */
1504 : :
1505 : : static tree
1506 : 15608085 : cheap_array_ref_low_bound (tree ref)
1507 : : {
1508 : 15608085 : tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1509 : :
1510 : : /* Avoid expensive array_ref_low_bound.
1511 : : low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1512 : : type or it is zero. */
1513 : 15608085 : if (TREE_OPERAND (ref, 2))
1514 : 103111 : return TREE_OPERAND (ref, 2);
1515 : 15504974 : else if (domain_type && TYPE_MIN_VALUE (domain_type))
1516 : 15498264 : return TYPE_MIN_VALUE (domain_type);
1517 : : else
1518 : 6710 : return integer_zero_node;
1519 : : }
1520 : :
1521 : : /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1522 : : completely disjoint.
1523 : :
1524 : : Return 1 if the refs are non-overlapping.
1525 : : Return 0 if they are possibly overlapping but if so the overlap again
1526 : : starts on the same address.
1527 : : Return -1 otherwise. */
1528 : :
1529 : : int
1530 : 7784604 : nonoverlapping_array_refs_p (tree ref1, tree ref2)
1531 : : {
1532 : 7784604 : tree index1 = TREE_OPERAND (ref1, 1);
1533 : 7784604 : tree index2 = TREE_OPERAND (ref2, 1);
1534 : 7784604 : tree low_bound1 = cheap_array_ref_low_bound (ref1);
1535 : 7784604 : tree low_bound2 = cheap_array_ref_low_bound (ref2);
1536 : :
1537 : : /* Handle zero offsets first: we do not need to match type size in this
1538 : : case. */
1539 : 7784604 : if (operand_equal_p (index1, low_bound1, 0)
1540 : 7784604 : && operand_equal_p (index2, low_bound2, 0))
1541 : : return 0;
1542 : :
1543 : : /* If type sizes are different, give up.
1544 : :
1545 : : Avoid expensive array_ref_element_size.
1546 : : If operand 3 is present it denotes size in the alignmnet units.
1547 : : Otherwise size is TYPE_SIZE of the element type.
1548 : : Handle only common cases where types are of the same "kind". */
1549 : 7709738 : if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1550 : : return -1;
1551 : :
1552 : 7709738 : tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1553 : 7709738 : tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1554 : :
1555 : 7709738 : if (TREE_OPERAND (ref1, 3))
1556 : : {
1557 : 4002 : if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1558 : 8004 : || !operand_equal_p (TREE_OPERAND (ref1, 3),
1559 : 4002 : TREE_OPERAND (ref2, 3), 0))
1560 : 646 : return -1;
1561 : : }
1562 : : else
1563 : : {
1564 : 7705736 : if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1565 : 7705736 : TYPE_SIZE_UNIT (elmt_type2), 0))
1566 : : return -1;
1567 : : }
1568 : :
1569 : : /* Since we know that type sizes are the same, there is no need to return
1570 : : -1 after this point. Partial overlap can not be introduced. */
1571 : :
1572 : : /* We may need to fold trees in this case.
1573 : : TODO: Handle integer constant case at least. */
1574 : 7700491 : if (!operand_equal_p (low_bound1, low_bound2, 0))
1575 : : return 0;
1576 : :
1577 : 7700491 : if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1578 : : {
1579 : 353831 : if (tree_int_cst_equal (index1, index2))
1580 : : return 0;
1581 : : return 1;
1582 : : }
1583 : : /* TODO: We can use VRP to further disambiguate here. */
1584 : : return 0;
1585 : : }
1586 : :
1587 : : /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1588 : : MATCH2 either point to the same address or are disjoint.
1589 : : MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1590 : : respectively or NULL in the case we established equivalence of bases.
1591 : : If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1592 : : overlap by exact multiply of their element size.
1593 : :
1594 : : This test works by matching the initial segment of the access path
1595 : : and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1596 : : match was determined without use of TBAA oracle.
1597 : :
1598 : : Return 1 if we can determine that component references REF1 and REF2,
1599 : : that are within a common DECL, cannot overlap.
1600 : :
1601 : : Return 0 if paths are same and thus there is nothing to disambiguate more
1602 : : (i.e. there is must alias assuming there is must alias between MATCH1 and
1603 : : MATCH2)
1604 : :
1605 : : Return -1 if we can not determine 0 or 1 - this happens when we met
1606 : : non-matching types was met in the path.
1607 : : In this case it may make sense to continue by other disambiguation
1608 : : oracles. */
1609 : :
1610 : : static int
1611 : 8102781 : nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1612 : : tree match2, tree ref2,
1613 : : bool partial_overlap)
1614 : : {
1615 : 8102781 : int ntbaa1 = 0, ntbaa2 = 0;
1616 : : /* Early return if there are no references to match, we do not need
1617 : : to walk the access paths.
1618 : :
1619 : : Do not consider this as may-alias for stats - it is more useful
1620 : : to have information how many disambiguations happened provided that
1621 : : the query was meaningful. */
1622 : :
1623 : 7410400 : if (match1 == ref1 || !handled_component_p (ref1)
1624 : 15511800 : || match2 == ref2 || !handled_component_p (ref2))
1625 : : return -1;
1626 : :
1627 : 7394703 : auto_vec<tree, 16> component_refs1;
1628 : 7394703 : auto_vec<tree, 16> component_refs2;
1629 : :
1630 : : /* Create the stack of handled components for REF1. */
1631 : 21072223 : while (handled_component_p (ref1) && ref1 != match1)
1632 : : {
1633 : : /* We use TBAA only to re-synchronize after mismatched refs. So we
1634 : : do not need to truncate access path after TBAA part ends. */
1635 : 13677520 : if (ends_tbaa_access_path_p (ref1))
1636 : : ntbaa1 = 0;
1637 : : else
1638 : 13468764 : ntbaa1++;
1639 : 13677520 : component_refs1.safe_push (ref1);
1640 : 13677520 : ref1 = TREE_OPERAND (ref1, 0);
1641 : : }
1642 : :
1643 : : /* Create the stack of handled components for REF2. */
1644 : 21564722 : while (handled_component_p (ref2) && ref2 != match2)
1645 : : {
1646 : 14170019 : if (ends_tbaa_access_path_p (ref2))
1647 : : ntbaa2 = 0;
1648 : : else
1649 : 13937595 : ntbaa2++;
1650 : 14170019 : component_refs2.safe_push (ref2);
1651 : 14170019 : ref2 = TREE_OPERAND (ref2, 0);
1652 : : }
1653 : :
1654 : 7394703 : if (!flag_strict_aliasing)
1655 : : {
1656 : 542476 : ntbaa1 = 0;
1657 : 542476 : ntbaa2 = 0;
1658 : : }
1659 : :
1660 : 7394703 : bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1661 : 7394703 : bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1662 : :
1663 : : /* If only one of access path starts with MEM_REF check that offset is 0
1664 : : so the addresses stays the same after stripping it.
1665 : : TODO: In this case we may walk the other access path until we get same
1666 : : offset.
1667 : :
1668 : : If both starts with MEM_REF, offset has to be same. */
1669 : 58083 : if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1670 : 7392283 : || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1671 : 14771900 : || (mem_ref1 && mem_ref2
1672 : 1786218 : && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1673 : 1786218 : TREE_OPERAND (ref2, 1))))
1674 : : {
1675 : 58383 : ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1676 : 58383 : return -1;
1677 : : }
1678 : :
1679 : : /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1680 : : to handle them here at all. */
1681 : 7336320 : gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1682 : : && TREE_CODE (ref2) != TARGET_MEM_REF);
1683 : :
1684 : : /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1685 : : rank. This is sufficient because we start from the same DECL and you
1686 : : cannot reference several fields at a time with COMPONENT_REFs (unlike
1687 : : with ARRAY_RANGE_REFs for arrays) so you always need the same number
1688 : : of them to access a sub-component, unless you're in a union, in which
1689 : : case the return value will precisely be false. */
1690 : 10296598 : while (true)
1691 : : {
1692 : : /* Track if we seen unmatched ref with non-zero offset. In this case
1693 : : we must look for partial overlaps. */
1694 : 10296598 : bool seen_unmatched_ref_p = false;
1695 : :
1696 : : /* First match ARRAY_REFs an try to disambiguate. */
1697 : 19849396 : if (!component_refs1.is_empty ()
1698 : 9980250 : && !component_refs2.is_empty ())
1699 : : {
1700 : 17700724 : unsigned int narray_refs1=0, narray_refs2=0;
1701 : :
1702 : : /* We generally assume that both access paths starts by same sequence
1703 : : of refs. However if number of array refs is not in sync, try
1704 : : to recover and pop elts until number match. This helps the case
1705 : : where one access path starts by array and other by element. */
1706 : 17700724 : for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1707 : : narray_refs1++)
1708 : 12914301 : if (TREE_CODE (component_refs1 [component_refs1.length()
1709 : : - 1 - narray_refs1]) != ARRAY_REF)
1710 : : break;
1711 : :
1712 : 17699903 : for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1713 : : narray_refs2++)
1714 : 12927343 : if (TREE_CODE (component_refs2 [component_refs2.length()
1715 : : - 1 - narray_refs2]) != ARRAY_REF)
1716 : : break;
1717 : 9528583 : for (; narray_refs1 > narray_refs2; narray_refs1--)
1718 : : {
1719 : 19849 : ref1 = component_refs1.pop ();
1720 : 19849 : ntbaa1--;
1721 : :
1722 : : /* If index is non-zero we need to check whether the reference
1723 : : does not break the main invariant that bases are either
1724 : : disjoint or equal. Consider the example:
1725 : :
1726 : : unsigned char out[][1];
1727 : : out[1]="a";
1728 : : out[i][0];
1729 : :
1730 : : Here bases out and out are same, but after removing the
1731 : : [i] index, this invariant no longer holds, because
1732 : : out[i] points to the middle of array out.
1733 : :
1734 : : TODO: If size of type of the skipped reference is an integer
1735 : : multiply of the size of type of the other reference this
1736 : : invariant can be verified, but even then it is not completely
1737 : : safe with !flag_strict_aliasing if the other reference contains
1738 : : unbounded array accesses.
1739 : : See */
1740 : :
1741 : 19849 : if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1742 : 19849 : cheap_array_ref_low_bound (ref1), 0))
1743 : : return 0;
1744 : : }
1745 : 9508853 : for (; narray_refs2 > narray_refs1; narray_refs2--)
1746 : : {
1747 : 19028 : ref2 = component_refs2.pop ();
1748 : 19028 : ntbaa2--;
1749 : 19028 : if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1750 : 19028 : cheap_array_ref_low_bound (ref2), 0))
1751 : : return 0;
1752 : : }
1753 : : /* Try to disambiguate matched arrays. */
1754 : 16995422 : for (unsigned int i = 0; i < narray_refs1; i++)
1755 : : {
1756 : 15569208 : int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
1757 : 7784604 : component_refs2.pop ());
1758 : 7784604 : ntbaa1--;
1759 : 7784604 : ntbaa2--;
1760 : 7784604 : if (cmp == 1 && !partial_overlap)
1761 : : {
1762 : 269760 : ++alias_stats
1763 : 269760 : .nonoverlapping_refs_since_match_p_no_alias;
1764 : 269760 : return 1;
1765 : : }
1766 : 7514844 : if (cmp == -1)
1767 : : {
1768 : 9247 : seen_unmatched_ref_p = true;
1769 : : /* We can not maintain the invariant that bases are either
1770 : : same or completely disjoint. However we can still recover
1771 : : from type based alias analysis if we reach references to
1772 : : same sizes. We do not attempt to match array sizes, so
1773 : : just finish array walking and look for component refs. */
1774 : 9247 : if (ntbaa1 < 0 || ntbaa2 < 0)
1775 : : {
1776 : 8099 : ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1777 : 8099 : return -1;
1778 : : }
1779 : 2087 : for (i++; i < narray_refs1; i++)
1780 : : {
1781 : 939 : component_refs1.pop ();
1782 : 939 : component_refs2.pop ();
1783 : 939 : ntbaa1--;
1784 : 939 : ntbaa2--;
1785 : : }
1786 : : break;
1787 : : }
1788 : 7505597 : partial_overlap = false;
1789 : : }
1790 : : }
1791 : :
1792 : : /* Next look for component_refs. */
1793 : 10050054 : do
1794 : : {
1795 : 10050054 : if (component_refs1.is_empty ())
1796 : : {
1797 : 5339501 : ++alias_stats
1798 : 5339501 : .nonoverlapping_refs_since_match_p_must_overlap;
1799 : 5339501 : return 0;
1800 : : }
1801 : 4710553 : ref1 = component_refs1.pop ();
1802 : 4710553 : ntbaa1--;
1803 : 4710553 : if (TREE_CODE (ref1) != COMPONENT_REF)
1804 : : {
1805 : 108433 : seen_unmatched_ref_p = true;
1806 : 108433 : if (ntbaa1 < 0 || ntbaa2 < 0)
1807 : : {
1808 : 38629 : ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1809 : 38629 : return -1;
1810 : : }
1811 : : }
1812 : : }
1813 : 9274044 : while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1814 : :
1815 : 4602120 : do
1816 : : {
1817 : 4602120 : if (component_refs2.is_empty ())
1818 : : {
1819 : 18418 : ++alias_stats
1820 : 18418 : .nonoverlapping_refs_since_match_p_must_overlap;
1821 : 18418 : return 0;
1822 : : }
1823 : 4583702 : ref2 = component_refs2.pop ();
1824 : 4583702 : ntbaa2--;
1825 : 4583702 : if (TREE_CODE (ref2) != COMPONENT_REF)
1826 : : {
1827 : 8 : if (ntbaa1 < 0 || ntbaa2 < 0)
1828 : : {
1829 : 8 : ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1830 : 8 : return -1;
1831 : : }
1832 : : seen_unmatched_ref_p = true;
1833 : : }
1834 : : }
1835 : 9167388 : while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1836 : :
1837 : : /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1838 : : earlier. */
1839 : 4583694 : gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1840 : : && TREE_CODE (ref2) == COMPONENT_REF);
1841 : :
1842 : 4583694 : tree field1 = TREE_OPERAND (ref1, 1);
1843 : 4583694 : tree field2 = TREE_OPERAND (ref2, 1);
1844 : :
1845 : : /* ??? We cannot simply use the type of operand #0 of the refs here
1846 : : as the Fortran compiler smuggles type punning into COMPONENT_REFs
1847 : : for common blocks instead of using unions like everyone else. */
1848 : 4583694 : tree type1 = DECL_CONTEXT (field1);
1849 : 4583694 : tree type2 = DECL_CONTEXT (field2);
1850 : :
1851 : 4583694 : partial_overlap = false;
1852 : :
1853 : : /* If we skipped array refs on type of different sizes, we can
1854 : : no longer be sure that there are not partial overlaps. */
1855 : 432 : if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1856 : 4584126 : && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
1857 : : {
1858 : 0 : ++alias_stats
1859 : 0 : .nonoverlapping_refs_since_match_p_may_alias;
1860 : 0 : return -1;
1861 : : }
1862 : :
1863 : 4583694 : int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1864 : 4583694 : if (cmp == -1)
1865 : : {
1866 : 2712 : ++alias_stats
1867 : 2712 : .nonoverlapping_refs_since_match_p_may_alias;
1868 : 2712 : return -1;
1869 : : }
1870 : 4580982 : else if (cmp == 1)
1871 : : {
1872 : 1620704 : ++alias_stats
1873 : 1620704 : .nonoverlapping_refs_since_match_p_no_alias;
1874 : 1620704 : return 1;
1875 : : }
1876 : : }
1877 : 7394703 : }
1878 : :
1879 : : /* Return TYPE_UID which can be used to match record types we consider
1880 : : same for TBAA purposes. */
1881 : :
1882 : : static inline int
1883 : 118140 : ncr_type_uid (const_tree field)
1884 : : {
1885 : : /* ??? We cannot simply use the type of operand #0 of the refs here
1886 : : as the Fortran compiler smuggles type punning into COMPONENT_REFs
1887 : : for common blocks instead of using unions like everyone else. */
1888 : 118140 : tree type = DECL_FIELD_CONTEXT (field);
1889 : : /* With LTO types considered same_type_for_tbaa_p
1890 : : from different translation unit may not have same
1891 : : main variant. They however have same TYPE_CANONICAL. */
1892 : 118140 : if (TYPE_CANONICAL (type))
1893 : 118140 : return TYPE_UID (TYPE_CANONICAL (type));
1894 : 0 : return TYPE_UID (type);
1895 : : }
1896 : :
1897 : : /* qsort compare function to sort FIELD_DECLs after their
1898 : : DECL_FIELD_CONTEXT TYPE_UID. */
1899 : :
1900 : : static inline int
1901 : 51603 : ncr_compar (const void *field1_, const void *field2_)
1902 : : {
1903 : 51603 : const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1904 : 51603 : const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1905 : 51603 : unsigned int uid1 = ncr_type_uid (field1);
1906 : 51603 : unsigned int uid2 = ncr_type_uid (field2);
1907 : :
1908 : 51603 : if (uid1 < uid2)
1909 : : return -1;
1910 : 20257 : else if (uid1 > uid2)
1911 : 20257 : return 1;
1912 : : return 0;
1913 : : }
1914 : :
1915 : : /* Return true if we can determine that the fields referenced cannot
1916 : : overlap for any pair of objects. This relies on TBAA. */
1917 : :
1918 : : static bool
1919 : 1090965 : nonoverlapping_component_refs_p (const_tree x, const_tree y)
1920 : : {
1921 : : /* Early return if we have nothing to do.
1922 : :
1923 : : Do not consider this as may-alias for stats - it is more useful
1924 : : to have information how many disambiguations happened provided that
1925 : : the query was meaningful. */
1926 : 1090965 : if (!flag_strict_aliasing
1927 : 1090965 : || !x || !y
1928 : 156467 : || !handled_component_p (x)
1929 : 1090965 : || !handled_component_p (y))
1930 : : return false;
1931 : :
1932 : 117628 : auto_vec<const_tree, 16> fieldsx;
1933 : 369757 : while (handled_component_p (x))
1934 : : {
1935 : 252129 : if (TREE_CODE (x) == COMPONENT_REF)
1936 : : {
1937 : 141059 : tree field = TREE_OPERAND (x, 1);
1938 : 141059 : tree type = DECL_FIELD_CONTEXT (field);
1939 : 141059 : if (TREE_CODE (type) == RECORD_TYPE)
1940 : 140773 : fieldsx.safe_push (field);
1941 : : }
1942 : 111070 : else if (ends_tbaa_access_path_p (x))
1943 : 2481 : fieldsx.truncate (0);
1944 : 252129 : x = TREE_OPERAND (x, 0);
1945 : : }
1946 : 201637 : if (fieldsx.length () == 0)
1947 : : return false;
1948 : 84009 : auto_vec<const_tree, 16> fieldsy;
1949 : 198971 : while (handled_component_p (y))
1950 : : {
1951 : 114962 : if (TREE_CODE (y) == COMPONENT_REF)
1952 : : {
1953 : 31623 : tree field = TREE_OPERAND (y, 1);
1954 : 31623 : tree type = DECL_FIELD_CONTEXT (field);
1955 : 31623 : if (TREE_CODE (type) == RECORD_TYPE)
1956 : 31337 : fieldsy.safe_push (TREE_OPERAND (y, 1));
1957 : : }
1958 : 83339 : else if (ends_tbaa_access_path_p (y))
1959 : 115 : fieldsy.truncate (0);
1960 : 114962 : y = TREE_OPERAND (y, 0);
1961 : : }
1962 : 84009 : if (fieldsy.length () == 0)
1963 : : {
1964 : 59580 : ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1965 : 59580 : return false;
1966 : : }
1967 : :
1968 : : /* Most common case first. */
1969 : 24429 : if (fieldsx.length () == 1
1970 : 24429 : && fieldsy.length () == 1)
1971 : : {
1972 : 42880 : if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1973 : 21440 : DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1974 : 41964 : && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
1975 : : {
1976 : 17149 : ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1977 : 17149 : return true;
1978 : : }
1979 : : else
1980 : : {
1981 : 4291 : ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1982 : 4291 : return false;
1983 : : }
1984 : : }
1985 : :
1986 : 2989 : if (fieldsx.length () == 2)
1987 : : {
1988 : 138 : if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1989 : 132 : std::swap (fieldsx[0], fieldsx[1]);
1990 : : }
1991 : : else
1992 : 2851 : fieldsx.qsort (ncr_compar);
1993 : :
1994 : 2989 : if (fieldsy.length () == 2)
1995 : : {
1996 : 174 : if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1997 : 110 : std::swap (fieldsy[0], fieldsy[1]);
1998 : : }
1999 : : else
2000 : 2815 : fieldsy.qsort (ncr_compar);
2001 : :
2002 : : unsigned i = 0, j = 0;
2003 : 7467 : do
2004 : : {
2005 : 7467 : const_tree fieldx = fieldsx[i];
2006 : 7467 : const_tree fieldy = fieldsy[j];
2007 : :
2008 : : /* We're left with accessing different fields of a structure,
2009 : : no possible overlap. */
2010 : 14934 : if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
2011 : 7467 : DECL_FIELD_CONTEXT (fieldy)) == 1
2012 : 7467 : && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
2013 : : {
2014 : 0 : ++alias_stats.nonoverlapping_component_refs_p_no_alias;
2015 : 0 : return true;
2016 : : }
2017 : :
2018 : 7467 : if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
2019 : : {
2020 : 2392 : i++;
2021 : 4784 : if (i == fieldsx.length ())
2022 : : break;
2023 : : }
2024 : : else
2025 : : {
2026 : 5075 : j++;
2027 : 10150 : if (j == fieldsy.length ())
2028 : : break;
2029 : : }
2030 : : }
2031 : : while (1);
2032 : :
2033 : 2989 : ++alias_stats.nonoverlapping_component_refs_p_may_alias;
2034 : 2989 : return false;
2035 : 201637 : }
2036 : :
2037 : :
2038 : : /* Return true if two memory references based on the variables BASE1
2039 : : and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2040 : : [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
2041 : : if non-NULL are the complete memory reference trees. */
2042 : :
2043 : : static bool
2044 : 1319681845 : decl_refs_may_alias_p (tree ref1, tree base1,
2045 : : poly_int64 offset1, poly_int64 max_size1,
2046 : : poly_int64 size1,
2047 : : tree ref2, tree base2,
2048 : : poly_int64 offset2, poly_int64 max_size2,
2049 : : poly_int64 size2)
2050 : : {
2051 : 1319681845 : gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
2052 : :
2053 : : /* If both references are based on different variables, they cannot alias. */
2054 : 1319681845 : if (compare_base_decls (base1, base2) == 0)
2055 : : return false;
2056 : :
2057 : : /* If both references are based on the same variable, they cannot alias if
2058 : : the accesses do not overlap. */
2059 : 180417486 : if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2060 : : return false;
2061 : :
2062 : : /* If there is must alias, there is no use disambiguating further. */
2063 : 56002585 : if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2064 : : return true;
2065 : :
2066 : : /* For components with variable position, the above test isn't sufficient,
2067 : : so we disambiguate component references manually. */
2068 : 8383222 : if (ref1 && ref2
2069 : 6477497 : && handled_component_p (ref1) && handled_component_p (ref2)
2070 : 14038507 : && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
2071 : : return false;
2072 : :
2073 : : return true;
2074 : : }
2075 : :
2076 : : /* Return true if access with BASE is view converted.
2077 : : Base must not be stripped from inner MEM_REF (&decl)
2078 : : which is done by ao_ref_base and thus one extra walk
2079 : : of handled components is needed. */
2080 : :
2081 : : bool
2082 : 1121768025 : view_converted_memref_p (tree base)
2083 : : {
2084 : 1121768025 : if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
2085 : : return false;
2086 : 723817441 : return (same_type_for_tbaa (TREE_TYPE (base),
2087 : 723817441 : TREE_TYPE (TREE_TYPE (TREE_OPERAND (base, 1))))
2088 : 723817441 : != 1);
2089 : : }
2090 : :
2091 : : /* Return true if an indirect reference based on *PTR1 constrained
2092 : : to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
2093 : : constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
2094 : : the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2095 : : in which case they are computed on-demand. REF1 and REF2
2096 : : if non-NULL are the complete memory reference trees. */
2097 : :
2098 : : static bool
2099 : 274840402 : indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2100 : : poly_int64 offset1, poly_int64 max_size1,
2101 : : poly_int64 size1,
2102 : : alias_set_type ref1_alias_set,
2103 : : alias_set_type base1_alias_set,
2104 : : tree ref2 ATTRIBUTE_UNUSED, tree base2,
2105 : : poly_int64 offset2, poly_int64 max_size2,
2106 : : poly_int64 size2,
2107 : : alias_set_type ref2_alias_set,
2108 : : alias_set_type base2_alias_set, bool tbaa_p)
2109 : : {
2110 : 274840402 : tree ptr1;
2111 : 274840402 : tree ptrtype1, dbase2;
2112 : :
2113 : 274840402 : gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2114 : : || TREE_CODE (base1) == TARGET_MEM_REF)
2115 : : && DECL_P (base2));
2116 : :
2117 : 274840402 : ptr1 = TREE_OPERAND (base1, 0);
2118 : 274840402 : poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2119 : :
2120 : : /* If only one reference is based on a variable, they cannot alias if
2121 : : the pointer access is beyond the extent of the variable access.
2122 : : (the pointer base cannot validly point to an offset less than zero
2123 : : of the variable).
2124 : : ??? IVOPTs creates bases that do not honor this restriction,
2125 : : so do not apply this optimization for TARGET_MEM_REFs. */
2126 : 274840402 : if (TREE_CODE (base1) != TARGET_MEM_REF
2127 : 274840402 : && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
2128 : 67241748 : return false;
2129 : :
2130 : : /* If the pointer based access is bigger than the variable they cannot
2131 : : alias. This is similar to the check below where we use TBAA to
2132 : : increase the size of the pointer based access based on the dynamic
2133 : : type of a containing object we can infer from it. */
2134 : 207598654 : poly_int64 dsize2;
2135 : 207598654 : if (known_size_p (size1)
2136 : 193676898 : && poly_int_tree_p (DECL_SIZE (base2), &dsize2)
2137 : 367915117 : && known_lt (dsize2, size1))
2138 : : return false;
2139 : :
2140 : : /* They also cannot alias if the pointer may not point to the decl. */
2141 : 188292613 : if (!ptr_deref_may_alias_decl_p (ptr1, base2))
2142 : : return false;
2143 : :
2144 : : /* Disambiguations that rely on strict aliasing rules follow. */
2145 : 30332425 : if (!flag_strict_aliasing || !tbaa_p)
2146 : : return true;
2147 : :
2148 : : /* If the alias set for a pointer access is zero all bets are off. */
2149 : 5545731 : if (base1_alias_set == 0 || base2_alias_set == 0)
2150 : : return true;
2151 : :
2152 : : /* When we are trying to disambiguate an access with a pointer dereference
2153 : : as base versus one with a decl as base we can use both the size
2154 : : of the decl and its dynamic type for extra disambiguation.
2155 : : ??? We do not know anything about the dynamic type of the decl
2156 : : other than that its alias-set contains base2_alias_set as a subset
2157 : : which does not help us here. */
2158 : : /* As we know nothing useful about the dynamic type of the decl just
2159 : : use the usual conflict check rather than a subset test.
2160 : : ??? We could introduce -fvery-strict-aliasing when the language
2161 : : does not allow decls to have a dynamic type that differs from their
2162 : : static type. Then we can check
2163 : : !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2164 : 1327528 : if (base1_alias_set != base2_alias_set
2165 : 1327528 : && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2166 : : return false;
2167 : :
2168 : 1156501 : ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2169 : :
2170 : : /* If the size of the access relevant for TBAA through the pointer
2171 : : is bigger than the size of the decl we can't possibly access the
2172 : : decl via that pointer. */
2173 : 1156501 : if (/* ??? This in turn may run afoul when a decl of type T which is
2174 : : a member of union type U is accessed through a pointer to
2175 : : type U and sizeof T is smaller than sizeof U. */
2176 : 1156501 : TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
2177 : 1140305 : && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
2178 : 2296806 : && compare_sizes (DECL_SIZE (base2),
2179 : 1140305 : TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
2180 : : return false;
2181 : :
2182 : 1130697 : if (!ref2)
2183 : : return true;
2184 : :
2185 : : /* If the decl is accessed via a MEM_REF, reconstruct the base
2186 : : we can use for TBAA and an appropriately adjusted offset. */
2187 : : dbase2 = ref2;
2188 : 1800405 : while (handled_component_p (dbase2))
2189 : 732835 : dbase2 = TREE_OPERAND (dbase2, 0);
2190 : 1067570 : poly_int64 doffset1 = offset1;
2191 : 1067570 : poly_offset_int doffset2 = offset2;
2192 : 1067570 : if (TREE_CODE (dbase2) == MEM_REF
2193 : 1067570 : || TREE_CODE (dbase2) == TARGET_MEM_REF)
2194 : : {
2195 : 746342 : doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
2196 : 373171 : tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
2197 : : /* If second reference is view-converted, give up now. */
2198 : 373171 : if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
2199 : : return true;
2200 : : }
2201 : :
2202 : : /* If first reference is view-converted, give up now. */
2203 : 979399 : if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
2204 : : return true;
2205 : :
2206 : : /* If both references are through the same type, they do not alias
2207 : : if the accesses do not overlap. This does extra disambiguation
2208 : : for mixed/pointer accesses but requires strict aliasing.
2209 : : For MEM_REFs we require that the component-ref offset we computed
2210 : : is relative to the start of the type which we ensure by
2211 : : comparing rvalue and access type and disregarding the constant
2212 : : pointer offset.
2213 : :
2214 : : But avoid treating variable length arrays as "objects", instead assume they
2215 : : can overlap by an exact multiple of their element size.
2216 : : See gcc.dg/torture/alias-2.c. */
2217 : 891339 : if (((TREE_CODE (base1) != TARGET_MEM_REF
2218 : 151815 : || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2219 : 862482 : && (TREE_CODE (dbase2) != TARGET_MEM_REF
2220 : 22365 : || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2221 : 1731583 : && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2222 : : {
2223 : 298141 : bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2224 : 298141 : && (TYPE_SIZE (TREE_TYPE (base1))
2225 : 2268 : && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2226 : 580606 : != INTEGER_CST));
2227 : 298141 : if (!partial_overlap
2228 : 298141 : && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
2229 : : return false;
2230 : 282465 : if (!ref1 || !ref2
2231 : : /* If there is must alias, there is no use disambiguating further. */
2232 : 282465 : || (!partial_overlap
2233 : 270877 : && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2234 : : return true;
2235 : 2777 : int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2236 : : partial_overlap);
2237 : 2777 : if (res == -1)
2238 : 2682 : return !nonoverlapping_component_refs_p (ref1, ref2);
2239 : 95 : return !res;
2240 : : }
2241 : :
2242 : : /* Do access-path based disambiguation. */
2243 : 593198 : if (ref1 && ref2
2244 : 983022 : && (handled_component_p (ref1) || handled_component_p (ref2)))
2245 : 426379 : return aliasing_component_refs_p (ref1,
2246 : : ref1_alias_set, base1_alias_set,
2247 : : offset1, max_size1,
2248 : : ref2,
2249 : : ref2_alias_set, base2_alias_set,
2250 : 426379 : offset2, max_size2);
2251 : :
2252 : : return true;
2253 : : }
2254 : :
2255 : : /* Return true if two indirect references based on *PTR1
2256 : : and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2257 : : [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2258 : : the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2259 : : in which case they are computed on-demand. REF1 and REF2
2260 : : if non-NULL are the complete memory reference trees. */
2261 : :
2262 : : static bool
2263 : 82552488 : indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2264 : : poly_int64 offset1, poly_int64 max_size1,
2265 : : poly_int64 size1,
2266 : : alias_set_type ref1_alias_set,
2267 : : alias_set_type base1_alias_set,
2268 : : tree ref2 ATTRIBUTE_UNUSED, tree base2,
2269 : : poly_int64 offset2, poly_int64 max_size2,
2270 : : poly_int64 size2,
2271 : : alias_set_type ref2_alias_set,
2272 : : alias_set_type base2_alias_set, bool tbaa_p)
2273 : : {
2274 : 82552488 : tree ptr1;
2275 : 82552488 : tree ptr2;
2276 : 82552488 : tree ptrtype1, ptrtype2;
2277 : :
2278 : 82552488 : gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2279 : : || TREE_CODE (base1) == TARGET_MEM_REF)
2280 : : && (TREE_CODE (base2) == MEM_REF
2281 : : || TREE_CODE (base2) == TARGET_MEM_REF));
2282 : :
2283 : 82552488 : ptr1 = TREE_OPERAND (base1, 0);
2284 : 82552488 : ptr2 = TREE_OPERAND (base2, 0);
2285 : :
2286 : : /* If both bases are based on pointers they cannot alias if they may not
2287 : : point to the same memory object or if they point to the same object
2288 : : and the accesses do not overlap. */
2289 : 82552488 : if ((!cfun || gimple_in_ssa_p (cfun))
2290 : 51644968 : && operand_equal_p (ptr1, ptr2, 0)
2291 : 110600152 : && (((TREE_CODE (base1) != TARGET_MEM_REF
2292 : 858328 : || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2293 : 27970686 : && (TREE_CODE (base2) != TARGET_MEM_REF
2294 : 771688 : || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2295 : 91435 : || (TREE_CODE (base1) == TARGET_MEM_REF
2296 : 78162 : && TREE_CODE (base2) == TARGET_MEM_REF
2297 : 73333 : && (TMR_STEP (base1) == TMR_STEP (base2)
2298 : 9034 : || (TMR_STEP (base1) && TMR_STEP (base2)
2299 : 1910 : && operand_equal_p (TMR_STEP (base1),
2300 : 1910 : TMR_STEP (base2), 0)))
2301 : 64299 : && (TMR_INDEX (base1) == TMR_INDEX (base2)
2302 : 11920 : || (TMR_INDEX (base1) && TMR_INDEX (base2)
2303 : 10646 : && operand_equal_p (TMR_INDEX (base1),
2304 : 10646 : TMR_INDEX (base2), 0)))
2305 : 52379 : && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2306 : 0 : || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2307 : 0 : && operand_equal_p (TMR_INDEX2 (base1),
2308 : 0 : TMR_INDEX2 (base2), 0))))))
2309 : : {
2310 : 28008608 : poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2311 : 28008608 : poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2312 : 28008608 : if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
2313 : 28008608 : offset2 + moff2, max_size2))
2314 : 27729008 : return false;
2315 : : /* If there is must alias, there is no use disambiguating further. */
2316 : 4789471 : if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2317 : : return true;
2318 : 1799927 : if (ref1 && ref2)
2319 : : {
2320 : 1527787 : int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2321 : : false);
2322 : 1527787 : if (res != -1)
2323 : 1520327 : return !res;
2324 : : }
2325 : : }
2326 : 54823480 : if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2327 : : return false;
2328 : :
2329 : : /* Disambiguations that rely on strict aliasing rules follow. */
2330 : 33747690 : if (!flag_strict_aliasing || !tbaa_p)
2331 : : return true;
2332 : :
2333 : 12753501 : ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2334 : 12753501 : ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2335 : :
2336 : : /* If the alias set for a pointer access is zero all bets are off. */
2337 : 12753501 : if (base1_alias_set == 0
2338 : 12753501 : || base2_alias_set == 0)
2339 : : return true;
2340 : :
2341 : : /* Do type-based disambiguation. */
2342 : 8777098 : if (base1_alias_set != base2_alias_set
2343 : 8777098 : && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2344 : : return false;
2345 : :
2346 : : /* If either reference is view-converted, give up now. */
2347 : 8155786 : if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2348 : 8155786 : || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2349 : 2517541 : return true;
2350 : :
2351 : : /* If both references are through the same type, they do not alias
2352 : : if the accesses do not overlap. This does extra disambiguation
2353 : : for mixed/pointer accesses but requires strict aliasing. */
2354 : 5638245 : if ((TREE_CODE (base1) != TARGET_MEM_REF
2355 : 955178 : || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2356 : 5362242 : && (TREE_CODE (base2) != TARGET_MEM_REF
2357 : 828734 : || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2358 : 10863537 : && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2359 : 5225292 : TREE_TYPE (ptrtype2)) == 1)
2360 : : {
2361 : : /* But avoid treating arrays as "objects", instead assume they
2362 : : can overlap by an exact multiple of their element size.
2363 : : See gcc.dg/torture/alias-2.c. */
2364 : 3339627 : bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2365 : :
2366 : 3339627 : if (!partial_overlap
2367 : 3339627 : && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2368 : : return false;
2369 : 3025626 : if (!ref1 || !ref2
2370 : 3025626 : || (!partial_overlap
2371 : 2474726 : && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2372 : : return true;
2373 : 182834 : int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2374 : : partial_overlap);
2375 : 182834 : if (res == -1)
2376 : 61985 : return !nonoverlapping_component_refs_p (ref1, ref2);
2377 : 120849 : return !res;
2378 : : }
2379 : :
2380 : : /* Do access-path based disambiguation. */
2381 : 2298618 : if (ref1 && ref2
2382 : 3230205 : && (handled_component_p (ref1) || handled_component_p (ref2)))
2383 : 1726419 : return aliasing_component_refs_p (ref1,
2384 : : ref1_alias_set, base1_alias_set,
2385 : : offset1, max_size1,
2386 : : ref2,
2387 : : ref2_alias_set, base2_alias_set,
2388 : 1726419 : offset2, max_size2);
2389 : :
2390 : : return true;
2391 : : }
2392 : :
2393 : : /* Return true, if the two memory references REF1 and REF2 may alias. */
2394 : :
2395 : : static bool
2396 : 1747050516 : refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2397 : : {
2398 : 1747050516 : tree base1, base2;
2399 : 1747050516 : poly_int64 offset1 = 0, offset2 = 0;
2400 : 1747050516 : poly_int64 max_size1 = -1, max_size2 = -1;
2401 : 1747050516 : bool var1_p, var2_p, ind1_p, ind2_p;
2402 : :
2403 : 1747050516 : gcc_checking_assert ((!ref1->ref
2404 : : || TREE_CODE (ref1->ref) == SSA_NAME
2405 : : || DECL_P (ref1->ref)
2406 : : || TREE_CODE (ref1->ref) == STRING_CST
2407 : : || handled_component_p (ref1->ref)
2408 : : || TREE_CODE (ref1->ref) == MEM_REF
2409 : : || TREE_CODE (ref1->ref) == TARGET_MEM_REF
2410 : : || TREE_CODE (ref1->ref) == WITH_SIZE_EXPR)
2411 : : && (!ref2->ref
2412 : : || TREE_CODE (ref2->ref) == SSA_NAME
2413 : : || DECL_P (ref2->ref)
2414 : : || TREE_CODE (ref2->ref) == STRING_CST
2415 : : || handled_component_p (ref2->ref)
2416 : : || TREE_CODE (ref2->ref) == MEM_REF
2417 : : || TREE_CODE (ref2->ref) == TARGET_MEM_REF
2418 : : || TREE_CODE (ref2->ref) == WITH_SIZE_EXPR));
2419 : :
2420 : : /* Decompose the references into their base objects and the access. */
2421 : 1747050516 : base1 = ao_ref_base (ref1);
2422 : 1747050516 : offset1 = ref1->offset;
2423 : 1747050516 : max_size1 = ref1->max_size;
2424 : 1747050516 : base2 = ao_ref_base (ref2);
2425 : 1747050516 : offset2 = ref2->offset;
2426 : 1747050516 : max_size2 = ref2->max_size;
2427 : :
2428 : : /* We can end up with registers or constants as bases for example from
2429 : : *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2430 : : which is seen as a struct copy. */
2431 : 1747050516 : if (TREE_CODE (base1) == SSA_NAME
2432 : 1747049369 : || TREE_CODE (base1) == CONST_DECL
2433 : 1745177434 : || TREE_CODE (base1) == CONSTRUCTOR
2434 : 1745177434 : || TREE_CODE (base1) == ADDR_EXPR
2435 : 1745177434 : || CONSTANT_CLASS_P (base1)
2436 : 1739246715 : || TREE_CODE (base2) == SSA_NAME
2437 : 1739246715 : || TREE_CODE (base2) == CONST_DECL
2438 : 1739152709 : || TREE_CODE (base2) == CONSTRUCTOR
2439 : 1739152709 : || TREE_CODE (base2) == ADDR_EXPR
2440 : 1739152709 : || CONSTANT_CLASS_P (base2))
2441 : : return false;
2442 : :
2443 : : /* Two volatile accesses always conflict. */
2444 : 1739110202 : if (ref1->volatile_p
2445 : 5697401 : && ref2->volatile_p)
2446 : : return true;
2447 : :
2448 : : /* refN->ref may convey size information, do not confuse our workers
2449 : : with that but strip it - ao_ref_base took it into account already. */
2450 : 1735559359 : tree ref1ref = ref1->ref;
2451 : 1735559359 : if (ref1ref && TREE_CODE (ref1ref) == WITH_SIZE_EXPR)
2452 : 162 : ref1ref = TREE_OPERAND (ref1ref, 0);
2453 : 1735559359 : tree ref2ref = ref2->ref;
2454 : 1735559359 : if (ref2ref && TREE_CODE (ref2ref) == WITH_SIZE_EXPR)
2455 : 0 : ref2ref = TREE_OPERAND (ref2ref, 0);
2456 : :
2457 : : /* Defer to simple offset based disambiguation if we have
2458 : : references based on two decls. Do this before defering to
2459 : : TBAA to handle must-alias cases in conformance with the
2460 : : GCC extension of allowing type-punning through unions. */
2461 : 1735559359 : var1_p = DECL_P (base1);
2462 : 1735559359 : var2_p = DECL_P (base2);
2463 : 1735559359 : if (var1_p && var2_p)
2464 : 1319681845 : return decl_refs_may_alias_p (ref1ref, base1, offset1, max_size1,
2465 : : ref1->size,
2466 : : ref2ref, base2, offset2, max_size2,
2467 : 1319681845 : ref2->size);
2468 : :
2469 : : /* We can end up referring to code via function and label decls.
2470 : : As we likely do not properly track code aliases conservatively
2471 : : bail out. */
2472 : 415877514 : if (TREE_CODE (base1) == FUNCTION_DECL
2473 : 415877514 : || TREE_CODE (base1) == LABEL_DECL
2474 : 414989367 : || TREE_CODE (base2) == FUNCTION_DECL
2475 : 414977578 : || TREE_CODE (base2) == LABEL_DECL)
2476 : : return true;
2477 : :
2478 : : /* Handle restrict based accesses.
2479 : : ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2480 : : here. */
2481 : 414977578 : tree rbase1 = base1;
2482 : 414977578 : tree rbase2 = base2;
2483 : 414977578 : if (var1_p)
2484 : : {
2485 : 209327650 : rbase1 = ref1ref;
2486 : 209327650 : if (rbase1)
2487 : 285016729 : while (handled_component_p (rbase1))
2488 : 109888406 : rbase1 = TREE_OPERAND (rbase1, 0);
2489 : : }
2490 : 414977578 : if (var2_p)
2491 : : {
2492 : 101295436 : rbase2 = ref2ref;
2493 : 101295436 : if (rbase2)
2494 : 172908068 : while (handled_component_p (rbase2))
2495 : 78180284 : rbase2 = TREE_OPERAND (rbase2, 0);
2496 : : }
2497 : 414977578 : if (rbase1 && rbase2
2498 : 374210599 : && (TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
2499 : 221852211 : && (TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
2500 : : /* If the accesses are in the same restrict clique... */
2501 : 152411485 : && MR_DEPENDENCE_CLIQUE (rbase1) == MR_DEPENDENCE_CLIQUE (rbase2)
2502 : : /* But based on different pointers they do not alias. */
2503 : 540436246 : && MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))
2504 : : return false;
2505 : :
2506 : 399379791 : ind1_p = (TREE_CODE (base1) == MEM_REF
2507 : 399379791 : || TREE_CODE (base1) == TARGET_MEM_REF);
2508 : 399379791 : ind2_p = (TREE_CODE (base2) == MEM_REF
2509 : 399379791 : || TREE_CODE (base2) == TARGET_MEM_REF);
2510 : :
2511 : : /* Canonicalize the pointer-vs-decl case. */
2512 : 399379791 : if (ind1_p && var2_p)
2513 : : {
2514 : 99837157 : std::swap (offset1, offset2);
2515 : 99837157 : std::swap (max_size1, max_size2);
2516 : 99837157 : std::swap (base1, base2);
2517 : 99837157 : std::swap (ref1, ref2);
2518 : 99837157 : std::swap (ref1ref, ref2ref);
2519 : 99837157 : var1_p = true;
2520 : 99837157 : ind1_p = false;
2521 : 99837157 : var2_p = false;
2522 : 99837157 : ind2_p = true;
2523 : : }
2524 : :
2525 : : /* First defer to TBAA if possible. */
2526 : 399379791 : if (tbaa_p
2527 : 175927962 : && flag_strict_aliasing
2528 : 511103690 : && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
2529 : : ao_ref_alias_set (ref2)))
2530 : : return false;
2531 : :
2532 : : /* If the reference is based on a pointer that points to memory
2533 : : that may not be written to then the other reference cannot possibly
2534 : : clobber it. */
2535 : 357911694 : if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2536 : 356869524 : && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2537 : 714392964 : || (ind1_p
2538 : 82683038 : && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2539 : 82520477 : && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2540 : : return false;
2541 : :
2542 : : /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2543 : 357392890 : if (var1_p && ind2_p)
2544 : 274840402 : return indirect_ref_may_alias_decl_p (ref2ref, base2,
2545 : : offset2, max_size2, ref2->size,
2546 : : ao_ref_alias_set (ref2),
2547 : : ao_ref_base_alias_set (ref2),
2548 : : ref1ref, base1,
2549 : : offset1, max_size1, ref1->size,
2550 : : ao_ref_alias_set (ref1),
2551 : : ao_ref_base_alias_set (ref1),
2552 : 274840402 : tbaa_p);
2553 : 82552488 : else if (ind1_p && ind2_p)
2554 : 82552488 : return indirect_refs_may_alias_p (ref1ref, base1,
2555 : : offset1, max_size1, ref1->size,
2556 : : ao_ref_alias_set (ref1),
2557 : : ao_ref_base_alias_set (ref1),
2558 : : ref2ref, base2,
2559 : : offset2, max_size2, ref2->size,
2560 : : ao_ref_alias_set (ref2),
2561 : : ao_ref_base_alias_set (ref2),
2562 : 82552488 : tbaa_p);
2563 : :
2564 : 0 : gcc_unreachable ();
2565 : : }
2566 : :
2567 : : /* Return true, if the two memory references REF1 and REF2 may alias
2568 : : and update statistics. */
2569 : :
2570 : : bool
2571 : 1747050516 : refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2572 : : {
2573 : 1747050516 : bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2574 : 1747050516 : if (res)
2575 : 125091575 : ++alias_stats.refs_may_alias_p_may_alias;
2576 : : else
2577 : 1621958941 : ++alias_stats.refs_may_alias_p_no_alias;
2578 : 1747050516 : return res;
2579 : : }
2580 : :
2581 : : static bool
2582 : 47058602 : refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2583 : : {
2584 : 47058602 : ao_ref r1;
2585 : 47058602 : ao_ref_init (&r1, ref1);
2586 : 47058602 : return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
2587 : : }
2588 : :
2589 : : bool
2590 : 1078024 : refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2591 : : {
2592 : 1078024 : ao_ref r1, r2;
2593 : 1078024 : ao_ref_init (&r1, ref1);
2594 : 1078024 : ao_ref_init (&r2, ref2);
2595 : 1078024 : return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
2596 : : }
2597 : :
2598 : : /* Returns true if there is a anti-dependence for the STORE that
2599 : : executes after the LOAD. */
2600 : :
2601 : : bool
2602 : 464067 : refs_anti_dependent_p (tree load, tree store)
2603 : : {
2604 : 464067 : ao_ref r1, r2;
2605 : 464067 : ao_ref_init (&r1, load);
2606 : 464067 : ao_ref_init (&r2, store);
2607 : 464067 : return refs_may_alias_p_1 (&r1, &r2, false);
2608 : : }
2609 : :
2610 : : /* Returns true if there is a output dependence for the stores
2611 : : STORE1 and STORE2. */
2612 : :
2613 : : bool
2614 : 2798885 : refs_output_dependent_p (tree store1, tree store2)
2615 : : {
2616 : 2798885 : ao_ref r1, r2;
2617 : 2798885 : ao_ref_init (&r1, store1);
2618 : 2798885 : ao_ref_init (&r2, store2);
2619 : 2798885 : return refs_may_alias_p_1 (&r1, &r2, false);
2620 : : }
2621 : :
2622 : : /* Returns true if and only if REF may alias any access stored in TT.
2623 : : IF TBAA_P is true, use TBAA oracle. */
2624 : :
2625 : : static bool
2626 : 36869781 : modref_may_conflict (const gcall *stmt,
2627 : : modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
2628 : : {
2629 : 36869781 : alias_set_type base_set, ref_set;
2630 : 36869781 : bool global_memory_ok = false;
2631 : :
2632 : 36869781 : if (tt->every_base)
2633 : : return true;
2634 : :
2635 : 6523100 : if (!dbg_cnt (ipa_mod_ref))
2636 : : return true;
2637 : :
2638 : 6523100 : base_set = ao_ref_base_alias_set (ref);
2639 : :
2640 : 6523100 : ref_set = ao_ref_alias_set (ref);
2641 : :
2642 : 6523100 : int num_tests = 0, max_tests = param_modref_max_tests;
2643 : 26796647 : for (auto base_node : tt->bases)
2644 : : {
2645 : 10436906 : if (tbaa_p && flag_strict_aliasing)
2646 : : {
2647 : 8376589 : if (num_tests >= max_tests)
2648 : : return true;
2649 : 8376589 : alias_stats.modref_tests++;
2650 : 8376589 : if (!alias_sets_conflict_p (base_set, base_node->base))
2651 : 2848726 : continue;
2652 : 5527863 : num_tests++;
2653 : : }
2654 : :
2655 : 7588180 : if (base_node->every_ref)
2656 : : return true;
2657 : :
2658 : 28727678 : for (auto ref_node : base_node->refs)
2659 : : {
2660 : : /* Do not repeat same test as before. */
2661 : 8797835 : if ((ref_set != base_set || base_node->base != ref_node->ref)
2662 : 5240876 : && tbaa_p && flag_strict_aliasing)
2663 : : {
2664 : 3664524 : if (num_tests >= max_tests)
2665 : : return true;
2666 : 3633252 : alias_stats.modref_tests++;
2667 : 3633252 : if (!alias_sets_conflict_p (ref_set, ref_node->ref))
2668 : 987942 : continue;
2669 : 2645310 : num_tests++;
2670 : : }
2671 : :
2672 : 7778621 : if (ref_node->every_access)
2673 : : return true;
2674 : :
2675 : : /* TBAA checks did not disambiguate, try individual accesses. */
2676 : 24820336 : for (auto access_node : ref_node->accesses)
2677 : : {
2678 : 7107176 : if (num_tests >= max_tests)
2679 : 1385182 : return true;
2680 : :
2681 : 7107176 : if (access_node.parm_index == MODREF_GLOBAL_MEMORY_PARM)
2682 : : {
2683 : 1224842 : if (global_memory_ok)
2684 : 682003 : continue;
2685 : 1224842 : if (ref_may_alias_global_p (ref, true))
2686 : : return true;
2687 : 657464 : global_memory_ok = true;
2688 : 657464 : num_tests++;
2689 : 657464 : continue;
2690 : : }
2691 : :
2692 : 5882334 : tree arg = access_node.get_call_arg (stmt);
2693 : 5882334 : if (!arg)
2694 : : return true;
2695 : :
2696 : 5881316 : alias_stats.modref_baseptr_tests++;
2697 : :
2698 : 5881316 : if (integer_zerop (arg) && flag_delete_null_pointer_checks)
2699 : 24539 : continue;
2700 : :
2701 : : /* PTA oracle will be unhapy of arg is not an pointer. */
2702 : 5856777 : if (!POINTER_TYPE_P (TREE_TYPE (arg)))
2703 : : return true;
2704 : :
2705 : : /* If we don't have base pointer, give up. */
2706 : 5856777 : if (!ref->ref && !ref->base)
2707 : 0 : continue;
2708 : :
2709 : 5856777 : ao_ref ref2;
2710 : 5856777 : if (access_node.get_ao_ref (stmt, &ref2))
2711 : : {
2712 : 4166130 : ref2.ref_alias_set = ref_node->ref;
2713 : 4166130 : ref2.base_alias_set = base_node->base;
2714 : 4166130 : if (refs_may_alias_p_1 (&ref2, ref, tbaa_p))
2715 : : return true;
2716 : : }
2717 : 1690647 : else if (ptr_deref_may_alias_ref_p_1 (arg, ref))
2718 : : return true;
2719 : :
2720 : 5039991 : num_tests++;
2721 : : }
2722 : : }
2723 : : }
2724 : : return false;
2725 : : }
2726 : :
2727 : : /* Check if REF conflicts with call using "fn spec" attribute.
2728 : : If CLOBBER is true we are checking for writes, otherwise check loads.
2729 : :
2730 : : Return 0 if there are no conflicts (except for possible function call
2731 : : argument reads), 1 if there are conflicts and -1 if we can not decide by
2732 : : fn spec. */
2733 : :
2734 : : static int
2735 : 132201483 : check_fnspec (gcall *call, ao_ref *ref, bool clobber)
2736 : : {
2737 : 132201483 : attr_fnspec fnspec = gimple_call_fnspec (call);
2738 : 132201483 : if (fnspec.known_p ())
2739 : : {
2740 : 67445356 : if (clobber
2741 : 67445356 : ? !fnspec.global_memory_written_p ()
2742 : 7383593 : : !fnspec.global_memory_read_p ())
2743 : : {
2744 : 79451699 : for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
2745 : 77234470 : if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))
2746 : 54988466 : && (!fnspec.arg_specified_p (i)
2747 : 32687537 : || (clobber ? fnspec.arg_maybe_written_p (i)
2748 : 2736184 : : fnspec.arg_maybe_read_p (i))))
2749 : : {
2750 : 20081659 : ao_ref dref;
2751 : 20081659 : tree size = NULL_TREE;
2752 : 20081659 : unsigned int size_arg;
2753 : :
2754 : 20081659 : if (!fnspec.arg_specified_p (i))
2755 : : ;
2756 : 20078691 : else if (fnspec.arg_max_access_size_given_by_arg_p
2757 : 20078691 : (i, &size_arg))
2758 : 13839095 : size = gimple_call_arg (call, size_arg);
2759 : 6239596 : else if (fnspec.arg_access_size_given_by_type_p (i))
2760 : : {
2761 : 32569 : tree callee = gimple_call_fndecl (call);
2762 : 32569 : tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
2763 : :
2764 : 66132 : for (unsigned int p = 0; p < i; p++)
2765 : 33563 : t = TREE_CHAIN (t);
2766 : 32569 : size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
2767 : : }
2768 : 13871664 : poly_int64 size_hwi;
2769 : 13871664 : if (size
2770 : 13871664 : && poly_int_tree_p (size, &size_hwi)
2771 : 25722430 : && coeffs_in_range_p (size_hwi, 0,
2772 : : HOST_WIDE_INT_MAX / BITS_PER_UNIT))
2773 : : {
2774 : 11849344 : size_hwi = size_hwi * BITS_PER_UNIT;
2775 : 11849344 : ao_ref_init_from_ptr_and_range (&dref,
2776 : : gimple_call_arg (call, i),
2777 : 11849344 : true, 0, -1, size_hwi);
2778 : : }
2779 : : else
2780 : 8232315 : ao_ref_init_from_ptr_and_range (&dref,
2781 : : gimple_call_arg (call, i),
2782 : 8232315 : false, 0, -1, -1);
2783 : 20081659 : if (refs_may_alias_p_1 (&dref, ref, false))
2784 : 3901899 : return 1;
2785 : : }
2786 : 24515190 : if (clobber
2787 : 21699504 : && fnspec.errno_maybe_written_p ()
2788 : 6095380 : && flag_errno_math
2789 : 25424394 : && targetm.ref_may_alias_errno (ref))
2790 : : return 1;
2791 : 24502084 : return 0;
2792 : : }
2793 : : }
2794 : :
2795 : : /* FIXME: we should handle barriers more consistently, but for now leave the
2796 : : check here. */
2797 : 103784394 : if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2798 : 5635253 : switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2799 : : {
2800 : : /* __sync_* builtins and some OpenMP builtins act as threading
2801 : : barriers. */
2802 : : #undef DEF_SYNC_BUILTIN
2803 : : #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2804 : : #include "sync-builtins.def"
2805 : : #undef DEF_SYNC_BUILTIN
2806 : : case BUILT_IN_GOMP_ATOMIC_START:
2807 : : case BUILT_IN_GOMP_ATOMIC_END:
2808 : : case BUILT_IN_GOMP_BARRIER:
2809 : : case BUILT_IN_GOMP_BARRIER_CANCEL:
2810 : : case BUILT_IN_GOMP_TASKWAIT:
2811 : : case BUILT_IN_GOMP_TASKGROUP_END:
2812 : : case BUILT_IN_GOMP_CRITICAL_START:
2813 : : case BUILT_IN_GOMP_CRITICAL_END:
2814 : : case BUILT_IN_GOMP_CRITICAL_NAME_START:
2815 : : case BUILT_IN_GOMP_CRITICAL_NAME_END:
2816 : : case BUILT_IN_GOMP_LOOP_END:
2817 : : case BUILT_IN_GOMP_LOOP_END_CANCEL:
2818 : : case BUILT_IN_GOMP_ORDERED_START:
2819 : : case BUILT_IN_GOMP_ORDERED_END:
2820 : : case BUILT_IN_GOMP_SECTIONS_END:
2821 : : case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2822 : : case BUILT_IN_GOMP_SINGLE_COPY_START:
2823 : : case BUILT_IN_GOMP_SINGLE_COPY_END:
2824 : : return 1;
2825 : :
2826 : : default:
2827 : : return -1;
2828 : : }
2829 : : return -1;
2830 : : }
2831 : :
2832 : : /* If the call CALL may use the memory reference REF return true,
2833 : : otherwise return false. */
2834 : :
2835 : : static bool
2836 : 34733408 : ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2837 : : {
2838 : 34733408 : tree base, callee;
2839 : 34733408 : unsigned i;
2840 : 34733408 : int flags = gimple_call_flags (call);
2841 : :
2842 : 34733408 : if (flags & (ECF_CONST|ECF_NOVOPS))
2843 : 46195 : goto process_args;
2844 : :
2845 : : /* A call that is not without side-effects might involve volatile
2846 : : accesses and thus conflicts with all other volatile accesses. */
2847 : 34687213 : if (ref->volatile_p)
2848 : : return true;
2849 : :
2850 : 34687007 : if (gimple_call_internal_p (call))
2851 : 42736 : switch (gimple_call_internal_fn (call))
2852 : : {
2853 : : case IFN_MASK_STORE:
2854 : : case IFN_SCATTER_STORE:
2855 : : case IFN_MASK_SCATTER_STORE:
2856 : : case IFN_LEN_STORE:
2857 : : case IFN_MASK_LEN_STORE:
2858 : : return false;
2859 : 0 : case IFN_MASK_STORE_LANES:
2860 : 0 : case IFN_MASK_LEN_STORE_LANES:
2861 : 0 : goto process_args;
2862 : 600 : case IFN_MASK_LOAD:
2863 : 600 : case IFN_LEN_LOAD:
2864 : 600 : case IFN_MASK_LEN_LOAD:
2865 : 600 : case IFN_MASK_LOAD_LANES:
2866 : 600 : case IFN_MASK_LEN_LOAD_LANES:
2867 : 600 : {
2868 : 600 : ao_ref rhs_ref;
2869 : 600 : tree lhs = gimple_call_lhs (call);
2870 : 600 : if (lhs)
2871 : : {
2872 : 600 : ao_ref_init_from_ptr_and_size (&rhs_ref,
2873 : : gimple_call_arg (call, 0),
2874 : 600 : TYPE_SIZE_UNIT (TREE_TYPE (lhs)));
2875 : : /* We cannot make this a known-size access since otherwise
2876 : : we disambiguate against refs to decls that are smaller. */
2877 : 600 : rhs_ref.size = -1;
2878 : 1200 : rhs_ref.ref_alias_set = rhs_ref.base_alias_set
2879 : 1200 : = tbaa_p ? get_deref_alias_set (TREE_TYPE
2880 : : (gimple_call_arg (call, 1))) : 0;
2881 : 600 : return refs_may_alias_p_1 (ref, &rhs_ref, tbaa_p);
2882 : : }
2883 : 0 : break;
2884 : : }
2885 : : default:;
2886 : : }
2887 : :
2888 : 34684844 : callee = gimple_call_fndecl (call);
2889 : 34684844 : if (callee != NULL_TREE)
2890 : : {
2891 : 33515386 : struct cgraph_node *node = cgraph_node::get (callee);
2892 : : /* We can not safely optimize based on summary of calle if it does
2893 : : not always bind to current def: it is possible that memory load
2894 : : was optimized out earlier and the interposed variant may not be
2895 : : optimized this way. */
2896 : 33515386 : if (node && node->binds_to_current_def_p ())
2897 : : {
2898 : 4166086 : modref_summary *summary = get_modref_function_summary (node);
2899 : 4166086 : if (summary && !summary->calls_interposable)
2900 : : {
2901 : 2386159 : if (!modref_may_conflict (call, summary->loads, ref, tbaa_p))
2902 : : {
2903 : 482193 : alias_stats.modref_use_no_alias++;
2904 : 482193 : if (dump_file && (dump_flags & TDF_DETAILS))
2905 : : {
2906 : 24 : fprintf (dump_file,
2907 : : "ipa-modref: call stmt ");
2908 : 24 : print_gimple_stmt (dump_file, call, 0);
2909 : 24 : fprintf (dump_file,
2910 : : "ipa-modref: call to %s does not use ",
2911 : : node->dump_name ());
2912 : 24 : if (!ref->ref && ref->base)
2913 : : {
2914 : 3 : fprintf (dump_file, "base: ");
2915 : 3 : print_generic_expr (dump_file, ref->base);
2916 : : }
2917 : 21 : else if (ref->ref)
2918 : : {
2919 : 21 : fprintf (dump_file, "ref: ");
2920 : 21 : print_generic_expr (dump_file, ref->ref);
2921 : : }
2922 : 24 : fprintf (dump_file, " alias sets: %i->%i\n",
2923 : : ao_ref_base_alias_set (ref),
2924 : : ao_ref_alias_set (ref));
2925 : : }
2926 : 482193 : goto process_args;
2927 : : }
2928 : 1903966 : alias_stats.modref_use_may_alias++;
2929 : : }
2930 : : }
2931 : : }
2932 : :
2933 : 34202651 : base = ao_ref_base (ref);
2934 : 34202651 : if (!base)
2935 : : return true;
2936 : :
2937 : : /* If the reference is based on a decl that is not aliased the call
2938 : : cannot possibly use it. */
2939 : 34202651 : if (DECL_P (base)
2940 : 30171290 : && !may_be_aliased (base)
2941 : : /* But local statics can be used through recursion. */
2942 : 49739492 : && !is_global_var (base))
2943 : 15311353 : goto process_args;
2944 : :
2945 : 18891298 : if (int res = check_fnspec (call, ref, false))
2946 : : {
2947 : 16075612 : if (res == 1)
2948 : : return true;
2949 : : }
2950 : : else
2951 : 2815686 : goto process_args;
2952 : :
2953 : : /* Check if base is a global static variable that is not read
2954 : : by the function. */
2955 : 15657972 : if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2956 : : {
2957 : 780921 : struct cgraph_node *node = cgraph_node::get (callee);
2958 : 780921 : bitmap read;
2959 : 780921 : int id;
2960 : :
2961 : : /* FIXME: Callee can be an OMP builtin that does not have a call graph
2962 : : node yet. We should enforce that there are nodes for all decls in the
2963 : : IL and remove this check instead. */
2964 : 780921 : if (node
2965 : 780377 : && (id = ipa_reference_var_uid (base)) != -1
2966 : 110288 : && (read = ipa_reference_get_read_global (node))
2967 : 793812 : && !bitmap_bit_p (read, id))
2968 : 9441 : goto process_args;
2969 : : }
2970 : :
2971 : : /* Check if the base variable is call-used. */
2972 : 15648531 : if (DECL_P (base))
2973 : : {
2974 : 12627240 : if (pt_solution_includes (gimple_call_use_set (call), base))
2975 : : return true;
2976 : : }
2977 : 3021291 : else if ((TREE_CODE (base) == MEM_REF
2978 : 3021291 : || TREE_CODE (base) == TARGET_MEM_REF)
2979 : 3021291 : && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2980 : : {
2981 : 3021068 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2982 : 3021068 : if (!pi)
2983 : : return true;
2984 : :
2985 : 3012518 : if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2986 : : return true;
2987 : : }
2988 : : else
2989 : : return true;
2990 : :
2991 : : /* Inspect call arguments for passed-by-value aliases. */
2992 : : process_args:
2993 : 61760416 : for (i = 0; i < gimple_call_num_args (call); ++i)
2994 : : {
2995 : 43896366 : tree op = gimple_call_arg (call, i);
2996 : 43896366 : int flags = gimple_call_arg_flags (call, i);
2997 : :
2998 : 43896366 : if (flags & (EAF_UNUSED | EAF_NO_DIRECT_READ))
2999 : 582777 : continue;
3000 : :
3001 : 43313589 : if (TREE_CODE (op) == WITH_SIZE_EXPR)
3002 : 458 : op = TREE_OPERAND (op, 0);
3003 : :
3004 : 43313589 : if (TREE_CODE (op) != SSA_NAME
3005 : 43313589 : && !is_gimple_min_invariant (op))
3006 : : {
3007 : 7564781 : ao_ref r;
3008 : 7564781 : ao_ref_init (&r, op);
3009 : 7564781 : if (refs_may_alias_p_1 (&r, ref, tbaa_p))
3010 : 3687102 : return true;
3011 : : }
3012 : : }
3013 : :
3014 : : return false;
3015 : : }
3016 : :
3017 : : static bool
3018 : 34727086 : ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
3019 : : {
3020 : 34727086 : bool res;
3021 : 34727086 : res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
3022 : 34727086 : if (res)
3023 : 16864926 : ++alias_stats.ref_maybe_used_by_call_p_may_alias;
3024 : : else
3025 : 17862160 : ++alias_stats.ref_maybe_used_by_call_p_no_alias;
3026 : 34727086 : return res;
3027 : : }
3028 : :
3029 : :
3030 : : /* If the statement STMT may use the memory reference REF return
3031 : : true, otherwise return false. */
3032 : :
3033 : : bool
3034 : 283254620 : ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
3035 : : {
3036 : 283254620 : if (is_gimple_assign (stmt))
3037 : : {
3038 : 243235180 : tree rhs;
3039 : :
3040 : : /* All memory assign statements are single. */
3041 : 243235180 : if (!gimple_assign_single_p (stmt))
3042 : : return false;
3043 : :
3044 : 243235180 : rhs = gimple_assign_rhs1 (stmt);
3045 : 243235180 : if (is_gimple_reg (rhs)
3046 : 199578869 : || is_gimple_min_invariant (rhs)
3047 : 311048438 : || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
3048 : 198044184 : return false;
3049 : :
3050 : 45190996 : return refs_may_alias_p (rhs, ref, tbaa_p);
3051 : : }
3052 : 40019440 : else if (is_gimple_call (stmt))
3053 : 34727086 : return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
3054 : 5292354 : else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
3055 : : {
3056 : 4631867 : tree retval = gimple_return_retval (return_stmt);
3057 : 4631867 : if (retval
3058 : 2502770 : && TREE_CODE (retval) != SSA_NAME
3059 : 2013937 : && !is_gimple_min_invariant (retval)
3060 : 6499473 : && refs_may_alias_p (retval, ref, tbaa_p))
3061 : : return true;
3062 : : /* If ref escapes the function then the return acts as a use. */
3063 : 3065723 : tree base = ao_ref_base (ref);
3064 : 3065723 : if (!base)
3065 : : ;
3066 : 3065723 : else if (DECL_P (base))
3067 : 911835 : return is_global_var (base);
3068 : 2153888 : else if (TREE_CODE (base) == MEM_REF
3069 : 2153888 : || TREE_CODE (base) == TARGET_MEM_REF)
3070 : 2153830 : return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0), false);
3071 : : return false;
3072 : : }
3073 : :
3074 : : return true;
3075 : : }
3076 : :
3077 : : bool
3078 : 742511 : ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
3079 : : {
3080 : 742511 : ao_ref r;
3081 : 742511 : ao_ref_init (&r, ref);
3082 : 742511 : return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
3083 : : }
3084 : :
3085 : : /* If the call in statement CALL may clobber the memory reference REF
3086 : : return true, otherwise return false. */
3087 : :
3088 : : bool
3089 : 313175466 : call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
3090 : : {
3091 : 313175466 : tree base;
3092 : 313175466 : tree callee;
3093 : :
3094 : : /* If the call is pure or const it cannot clobber anything. */
3095 : 313175466 : if (gimple_call_flags (call)
3096 : 313175466 : & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
3097 : : return false;
3098 : 310240161 : if (gimple_call_internal_p (call))
3099 : 548906 : switch (auto fn = gimple_call_internal_fn (call))
3100 : : {
3101 : : /* Treat these internal calls like ECF_PURE for aliasing,
3102 : : they don't write to any memory the program should care about.
3103 : : They have important other side-effects, and read memory,
3104 : : so can't be ECF_NOVOPS. */
3105 : : case IFN_UBSAN_NULL:
3106 : : case IFN_UBSAN_BOUNDS:
3107 : : case IFN_UBSAN_VPTR:
3108 : : case IFN_UBSAN_OBJECT_SIZE:
3109 : : case IFN_UBSAN_PTR:
3110 : : case IFN_ASAN_CHECK:
3111 : : return false;
3112 : 7167 : case IFN_MASK_STORE:
3113 : 7167 : case IFN_LEN_STORE:
3114 : 7167 : case IFN_MASK_LEN_STORE:
3115 : 7167 : case IFN_MASK_STORE_LANES:
3116 : 7167 : case IFN_MASK_LEN_STORE_LANES:
3117 : 7167 : {
3118 : 7167 : tree rhs = gimple_call_arg (call,
3119 : 7167 : internal_fn_stored_value_index (fn));
3120 : 7167 : ao_ref lhs_ref;
3121 : 7167 : ao_ref_init_from_ptr_and_size (&lhs_ref, gimple_call_arg (call, 0),
3122 : 7167 : TYPE_SIZE_UNIT (TREE_TYPE (rhs)));
3123 : : /* We cannot make this a known-size access since otherwise
3124 : : we disambiguate against refs to decls that are smaller. */
3125 : 7167 : lhs_ref.size = -1;
3126 : 14334 : lhs_ref.ref_alias_set = lhs_ref.base_alias_set
3127 : 7167 : = tbaa_p ? get_deref_alias_set
3128 : 6814 : (TREE_TYPE (gimple_call_arg (call, 1))) : 0;
3129 : 7167 : return refs_may_alias_p_1 (ref, &lhs_ref, tbaa_p);
3130 : : }
3131 : : default:
3132 : : break;
3133 : : }
3134 : :
3135 : 310008164 : callee = gimple_call_fndecl (call);
3136 : :
3137 : 310008164 : if (callee != NULL_TREE && !ref->volatile_p)
3138 : : {
3139 : 290749402 : struct cgraph_node *node = cgraph_node::get (callee);
3140 : 290749402 : if (node)
3141 : : {
3142 : 290480115 : modref_summary *summary = get_modref_function_summary (node);
3143 : 290480115 : if (summary)
3144 : : {
3145 : 34483622 : if (!modref_may_conflict (call, summary->stores, ref, tbaa_p)
3146 : 34483622 : && (!summary->writes_errno
3147 : 67401 : || !targetm.ref_may_alias_errno (ref)))
3148 : : {
3149 : 3209846 : alias_stats.modref_clobber_no_alias++;
3150 : 3209846 : if (dump_file && (dump_flags & TDF_DETAILS))
3151 : : {
3152 : 52 : fprintf (dump_file,
3153 : : "ipa-modref: call stmt ");
3154 : 52 : print_gimple_stmt (dump_file, call, 0);
3155 : 52 : fprintf (dump_file,
3156 : : "ipa-modref: call to %s does not clobber ",
3157 : : node->dump_name ());
3158 : 52 : if (!ref->ref && ref->base)
3159 : : {
3160 : 32 : fprintf (dump_file, "base: ");
3161 : 32 : print_generic_expr (dump_file, ref->base);
3162 : : }
3163 : 20 : else if (ref->ref)
3164 : : {
3165 : 20 : fprintf (dump_file, "ref: ");
3166 : 20 : print_generic_expr (dump_file, ref->ref);
3167 : : }
3168 : 52 : fprintf (dump_file, " alias sets: %i->%i\n",
3169 : : ao_ref_base_alias_set (ref),
3170 : : ao_ref_alias_set (ref));
3171 : : }
3172 : 3209846 : return false;
3173 : : }
3174 : 31273776 : alias_stats.modref_clobber_may_alias++;
3175 : : }
3176 : : }
3177 : : }
3178 : :
3179 : 306798318 : base = ao_ref_base (ref);
3180 : 306798318 : if (!base)
3181 : : return true;
3182 : :
3183 : 306798318 : if (TREE_CODE (base) == SSA_NAME
3184 : 306798028 : || CONSTANT_CLASS_P (base))
3185 : : return false;
3186 : :
3187 : : /* A call that is not without side-effects might involve volatile
3188 : : accesses and thus conflicts with all other volatile accesses. */
3189 : 298950573 : if (ref->volatile_p)
3190 : : return true;
3191 : :
3192 : : /* If the reference is based on a decl that is not aliased the call
3193 : : cannot possibly clobber it. */
3194 : 297499990 : if (DECL_P (base)
3195 : 271874791 : && !may_be_aliased (base)
3196 : : /* But local non-readonly statics can be modified through recursion
3197 : : or the call may implement a threading barrier which we must
3198 : : treat as may-def. */
3199 : 489362592 : && (TREE_READONLY (base)
3200 : 184871303 : || !is_global_var (base)))
3201 : : return false;
3202 : :
3203 : : /* If the reference is based on a pointer that points to memory
3204 : : that may not be written to then the call cannot possibly clobber it. */
3205 : 113651475 : if ((TREE_CODE (base) == MEM_REF
3206 : 113651475 : || TREE_CODE (base) == TARGET_MEM_REF)
3207 : 25625199 : && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
3208 : 139035624 : && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
3209 : : return false;
3210 : :
3211 : 113310185 : if (int res = check_fnspec (call, ref, true))
3212 : : {
3213 : 91623787 : if (res == 1)
3214 : : return true;
3215 : : }
3216 : : else
3217 : : return false;
3218 : :
3219 : : /* Check if base is a global static variable that is not written
3220 : : by the function. */
3221 : 86822723 : if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3222 : : {
3223 : 8994909 : struct cgraph_node *node = cgraph_node::get (callee);
3224 : 8994909 : bitmap written;
3225 : 8994909 : int id;
3226 : :
3227 : 8994909 : if (node
3228 : 8994453 : && (id = ipa_reference_var_uid (base)) != -1
3229 : 3557796 : && (written = ipa_reference_get_written_global (node))
3230 : 9061373 : && !bitmap_bit_p (written, id))
3231 : : return false;
3232 : : }
3233 : :
3234 : : /* Check if the base variable is call-clobbered. */
3235 : 86772591 : if (DECL_P (base))
3236 : 67336165 : return pt_solution_includes (gimple_call_clobber_set (call), base);
3237 : 19436426 : else if ((TREE_CODE (base) == MEM_REF
3238 : 19436426 : || TREE_CODE (base) == TARGET_MEM_REF)
3239 : 19436426 : && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3240 : : {
3241 : 19242325 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3242 : 19242325 : if (!pi)
3243 : : return true;
3244 : :
3245 : 18636553 : return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
3246 : : }
3247 : :
3248 : : return true;
3249 : : }
3250 : :
3251 : : /* If the call in statement CALL may clobber the memory reference REF
3252 : : return true, otherwise return false. */
3253 : :
3254 : : bool
3255 : 316978 : call_may_clobber_ref_p (gcall *call, tree ref, bool tbaa_p)
3256 : : {
3257 : 316978 : bool res;
3258 : 316978 : ao_ref r;
3259 : 316978 : ao_ref_init (&r, ref);
3260 : 316978 : res = call_may_clobber_ref_p_1 (call, &r, tbaa_p);
3261 : 316978 : if (res)
3262 : 58851 : ++alias_stats.call_may_clobber_ref_p_may_alias;
3263 : : else
3264 : 258127 : ++alias_stats.call_may_clobber_ref_p_no_alias;
3265 : 316978 : return res;
3266 : : }
3267 : :
3268 : :
3269 : : /* If the statement STMT may clobber the memory reference REF return true,
3270 : : otherwise return false. */
3271 : :
3272 : : bool
3273 : 1737832980 : stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3274 : : {
3275 : 1737832980 : if (is_gimple_call (stmt))
3276 : : {
3277 : 318812069 : tree lhs = gimple_call_lhs (stmt);
3278 : 318812069 : if (lhs
3279 : 153105320 : && TREE_CODE (lhs) != SSA_NAME)
3280 : : {
3281 : 59981682 : ao_ref r;
3282 : 59981682 : ao_ref_init (&r, lhs);
3283 : 59981682 : if (refs_may_alias_p_1 (ref, &r, tbaa_p))
3284 : 6039366 : return true;
3285 : : }
3286 : :
3287 : 312772703 : return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref, tbaa_p);
3288 : : }
3289 : 1419020911 : else if (gimple_assign_single_p (stmt))
3290 : : {
3291 : 1411188828 : tree lhs = gimple_assign_lhs (stmt);
3292 : 1411188828 : if (TREE_CODE (lhs) != SSA_NAME)
3293 : : {
3294 : 1410334423 : ao_ref r;
3295 : 1410334423 : ao_ref_init (&r, lhs);
3296 : 1410334423 : return refs_may_alias_p_1 (ref, &r, tbaa_p);
3297 : : }
3298 : : }
3299 : 7832083 : else if (gimple_code (stmt) == GIMPLE_ASM)
3300 : : return true;
3301 : :
3302 : : return false;
3303 : : }
3304 : :
3305 : : bool
3306 : 4270610 : stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3307 : : {
3308 : 4270610 : ao_ref r;
3309 : 4270610 : ao_ref_init (&r, ref);
3310 : 4270610 : return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
3311 : : }
3312 : :
3313 : : /* Return true if store1 and store2 described by corresponding tuples
3314 : : <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3315 : : address. */
3316 : :
3317 : : static bool
3318 : 111439246 : same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3319 : : poly_int64 max_size1,
3320 : : tree base2, poly_int64 offset2, poly_int64 size2,
3321 : : poly_int64 max_size2)
3322 : : {
3323 : : /* Offsets need to be 0. */
3324 : 111439246 : if (maybe_ne (offset1, 0)
3325 : 111439246 : || maybe_ne (offset2, 0))
3326 : : return false;
3327 : :
3328 : 30260871 : bool base1_obj_p = SSA_VAR_P (base1);
3329 : 30260871 : bool base2_obj_p = SSA_VAR_P (base2);
3330 : :
3331 : : /* We need one object. */
3332 : 30260871 : if (base1_obj_p == base2_obj_p)
3333 : : return false;
3334 : 3882190 : tree obj = base1_obj_p ? base1 : base2;
3335 : :
3336 : : /* And we need one MEM_REF. */
3337 : 3882190 : bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3338 : 3882190 : bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3339 : 3882190 : if (base1_memref_p == base2_memref_p)
3340 : : return false;
3341 : 3792917 : tree memref = base1_memref_p ? base1 : base2;
3342 : :
3343 : : /* Sizes need to be valid. */
3344 : 3792917 : if (!known_size_p (max_size1)
3345 : 3771884 : || !known_size_p (max_size2)
3346 : 3771482 : || !known_size_p (size1)
3347 : 7564399 : || !known_size_p (size2))
3348 : : return false;
3349 : :
3350 : : /* Max_size needs to match size. */
3351 : 3771482 : if (maybe_ne (max_size1, size1)
3352 : 3771482 : || maybe_ne (max_size2, size2))
3353 : : return false;
3354 : :
3355 : : /* Sizes need to match. */
3356 : 3731474 : if (maybe_ne (size1, size2))
3357 : : return false;
3358 : :
3359 : :
3360 : : /* Check that memref is a store to pointer with singleton points-to info. */
3361 : 901715 : if (!integer_zerop (TREE_OPERAND (memref, 1)))
3362 : : return false;
3363 : 701181 : tree ptr = TREE_OPERAND (memref, 0);
3364 : 701181 : if (TREE_CODE (ptr) != SSA_NAME)
3365 : : return false;
3366 : 700893 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3367 : 700893 : unsigned int pt_uid;
3368 : 700893 : if (pi == NULL
3369 : 700893 : || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3370 : 538573 : return false;
3371 : :
3372 : : /* Be conservative with non-call exceptions when the address might
3373 : : be NULL. */
3374 : 162320 : if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3375 : : return false;
3376 : :
3377 : : /* Check that ptr points relative to obj. */
3378 : 162309 : unsigned int obj_uid = DECL_PT_UID (obj);
3379 : 162309 : if (obj_uid != pt_uid)
3380 : : return false;
3381 : :
3382 : : /* Check that the object size is the same as the store size. That ensures us
3383 : : that ptr points to the start of obj. */
3384 : 34920 : return (DECL_SIZE (obj)
3385 : 34920 : && poly_int_tree_p (DECL_SIZE (obj))
3386 : 104676 : && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3387 : : }
3388 : :
3389 : : /* Return true if REF is killed by an store described by
3390 : : BASE, OFFSET, SIZE and MAX_SIZE. */
3391 : :
3392 : : static bool
3393 : 199841296 : store_kills_ref_p (tree base, poly_int64 offset, poly_int64 size,
3394 : : poly_int64 max_size, ao_ref *ref)
3395 : : {
3396 : 199841296 : poly_int64 ref_offset = ref->offset;
3397 : : /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3398 : : so base == ref->base does not always hold. */
3399 : 199841296 : if (base != ref->base)
3400 : : {
3401 : : /* Try using points-to info. */
3402 : 111439246 : if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
3403 : : ref->offset, ref->size, ref->max_size))
3404 : : return true;
3405 : :
3406 : : /* If both base and ref->base are MEM_REFs, only compare the
3407 : : first operand, and if the second operand isn't equal constant,
3408 : : try to add the offsets into offset and ref_offset. */
3409 : 31073508 : if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3410 : 135569879 : && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3411 : : {
3412 : 19209091 : if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3413 : 19209091 : TREE_OPERAND (ref->base, 1)))
3414 : : {
3415 : 6363804 : poly_offset_int off1 = mem_ref_offset (base);
3416 : 6363804 : off1 <<= LOG2_BITS_PER_UNIT;
3417 : 6363804 : off1 += offset;
3418 : 6363804 : poly_offset_int off2 = mem_ref_offset (ref->base);
3419 : 6363804 : off2 <<= LOG2_BITS_PER_UNIT;
3420 : 6363804 : off2 += ref_offset;
3421 : 6363804 : if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
3422 : 0 : size = -1;
3423 : : }
3424 : : }
3425 : : else
3426 : 92230071 : size = -1;
3427 : : }
3428 : : /* For a must-alias check we need to be able to constrain
3429 : : the access properly. */
3430 : 199841212 : return (known_eq (size, max_size)
3431 : 199841212 : && known_subrange_p (ref_offset, ref->max_size, offset, size));
3432 : : }
3433 : :
3434 : : /* If STMT kills the memory reference REF return true, otherwise
3435 : : return false. */
3436 : :
3437 : : bool
3438 : 240282789 : stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3439 : : {
3440 : 240282789 : if (!ao_ref_base (ref))
3441 : : return false;
3442 : :
3443 : 240282789 : if (gimple_has_lhs (stmt)
3444 : 211320316 : && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3445 : : /* The assignment is not necessarily carried out if it can throw
3446 : : and we can catch it in the current function where we could inspect
3447 : : the previous value. Similarly if the function can throw externally
3448 : : and the ref does not die on the function return.
3449 : : ??? We only need to care about the RHS throwing. For aggregate
3450 : : assignments or similar calls and non-call exceptions the LHS
3451 : : might throw as well.
3452 : : ??? We also should care about possible longjmp, but since we
3453 : : do not understand that longjmp is not using global memory we will
3454 : : not consider a kill here since the function call will be considered
3455 : : as possibly using REF. */
3456 : 206395363 : && !stmt_can_throw_internal (cfun, stmt)
3457 : 220890477 : && (!stmt_can_throw_external (cfun, stmt)
3458 : 3002647 : || !ref_may_alias_global_p (ref, false)))
3459 : : {
3460 : 203818107 : tree lhs = gimple_get_lhs (stmt);
3461 : : /* If LHS is literally a base of the access we are done. */
3462 : 203818107 : if (ref->ref)
3463 : : {
3464 : 202702825 : tree base = ref->ref;
3465 : 202702825 : tree innermost_dropped_array_ref = NULL_TREE;
3466 : 202702825 : if (handled_component_p (base))
3467 : : {
3468 : 154829653 : tree saved_lhs0 = NULL_TREE;
3469 : 287471387 : if (handled_component_p (lhs))
3470 : : {
3471 : 132641734 : saved_lhs0 = TREE_OPERAND (lhs, 0);
3472 : 132641734 : TREE_OPERAND (lhs, 0) = integer_zero_node;
3473 : : }
3474 : 276870412 : do
3475 : : {
3476 : : /* Just compare the outermost handled component, if
3477 : : they are equal we have found a possible common
3478 : : base. */
3479 : 276870412 : tree saved_base0 = TREE_OPERAND (base, 0);
3480 : 276870412 : TREE_OPERAND (base, 0) = integer_zero_node;
3481 : 276870412 : bool res = operand_equal_p (lhs, base, 0);
3482 : 276870412 : TREE_OPERAND (base, 0) = saved_base0;
3483 : 276870412 : if (res)
3484 : : break;
3485 : : /* Remember if we drop an array-ref that we need to
3486 : : double-check not being at struct end. */
3487 : 257910812 : if (TREE_CODE (base) == ARRAY_REF
3488 : 257910812 : || TREE_CODE (base) == ARRAY_RANGE_REF)
3489 : 72433159 : innermost_dropped_array_ref = base;
3490 : : /* Otherwise drop handled components of the access. */
3491 : 257910812 : base = saved_base0;
3492 : : }
3493 : 393780865 : while (handled_component_p (base));
3494 : 154829653 : if (saved_lhs0)
3495 : 132641734 : TREE_OPERAND (lhs, 0) = saved_lhs0;
3496 : : }
3497 : : /* Finally check if the lhs has the same address and size as the
3498 : : base candidate of the access. Watch out if we have dropped
3499 : : an array-ref that might have flexible size, this means ref->ref
3500 : : may be outside of the TYPE_SIZE of its base. */
3501 : 154829653 : if ((! innermost_dropped_array_ref
3502 : 71312862 : || ! array_ref_flexible_size_p (innermost_dropped_array_ref))
3503 : 345964678 : && (lhs == base
3504 : 188961318 : || (((TYPE_SIZE (TREE_TYPE (lhs))
3505 : 188961318 : == TYPE_SIZE (TREE_TYPE (base)))
3506 : 143334742 : || (TYPE_SIZE (TREE_TYPE (lhs))
3507 : 143334679 : && TYPE_SIZE (TREE_TYPE (base))
3508 : 143334585 : && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3509 : 143334585 : TYPE_SIZE (TREE_TYPE (base)),
3510 : : 0)))
3511 : 45626576 : && operand_equal_p (lhs, base,
3512 : : OEP_ADDRESS_OF
3513 : : | OEP_MATCH_SIDE_EFFECTS))))
3514 : : {
3515 : 2847211 : ++alias_stats.stmt_kills_ref_p_yes;
3516 : 9017909 : return true;
3517 : : }
3518 : : }
3519 : :
3520 : : /* Now look for non-literal equal bases with the restriction of
3521 : : handling constant offset and size. */
3522 : : /* For a must-alias check we need to be able to constrain
3523 : : the access properly. */
3524 : 200970896 : if (!ref->max_size_known_p ())
3525 : : {
3526 : 2004434 : ++alias_stats.stmt_kills_ref_p_no;
3527 : 2004434 : return false;
3528 : : }
3529 : 198966462 : poly_int64 size, offset, max_size;
3530 : 198966462 : bool reverse;
3531 : 198966462 : tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3532 : : &reverse);
3533 : 198966462 : if (store_kills_ref_p (base, offset, size, max_size, ref))
3534 : : {
3535 : 1319053 : ++alias_stats.stmt_kills_ref_p_yes;
3536 : 1319053 : return true;
3537 : : }
3538 : : }
3539 : :
3540 : 234112091 : if (is_gimple_call (stmt))
3541 : : {
3542 : 16632467 : tree callee = gimple_call_fndecl (stmt);
3543 : 16632467 : struct cgraph_node *node;
3544 : 16632467 : modref_summary *summary;
3545 : :
3546 : : /* Try to disambiguate using modref summary. Modref records a vector
3547 : : of stores with known offsets relative to function parameters that must
3548 : : happen every execution of function. Find if we have a matching
3549 : : store and verify that function can not use the value. */
3550 : 16632467 : if (callee != NULL_TREE
3551 : 15996160 : && (node = cgraph_node::get (callee)) != NULL
3552 : 15953527 : && node->binds_to_current_def_p ()
3553 : 1484165 : && (summary = get_modref_function_summary (node)) != NULL
3554 : 851692 : && summary->kills.length ()
3555 : : /* Check that we can not trap while evaulating function
3556 : : parameters. This check is overly conservative. */
3557 : 16710287 : && (!cfun->can_throw_non_call_exceptions
3558 : 0 : || (!stmt_can_throw_internal (cfun, stmt)
3559 : 0 : && (!stmt_can_throw_external (cfun, stmt)
3560 : 0 : || !ref_may_alias_global_p (ref, false)))))
3561 : : {
3562 : 320643 : for (auto kill : summary->kills)
3563 : : {
3564 : 93505 : ao_ref dref;
3565 : :
3566 : : /* We only can do useful compares if we know the access range
3567 : : precisely. */
3568 : 93505 : if (!kill.get_ao_ref (as_a <gcall *> (stmt), &dref))
3569 : 24 : continue;
3570 : 93481 : if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3571 : : dref.size, dref.max_size, ref))
3572 : : {
3573 : : /* For store to be killed it needs to not be used
3574 : : earlier. */
3575 : 6322 : if (ref_maybe_used_by_call_p_1 (as_a <gcall *> (stmt), ref,
3576 : : true)
3577 : 6322 : || !dbg_cnt (ipa_mod_ref))
3578 : : break;
3579 : 3980 : if (dump_file && (dump_flags & TDF_DETAILS))
3580 : : {
3581 : 2 : fprintf (dump_file,
3582 : : "ipa-modref: call stmt ");
3583 : 2 : print_gimple_stmt (dump_file, stmt, 0);
3584 : 2 : fprintf (dump_file,
3585 : : "ipa-modref: call to %s kills ",
3586 : : node->dump_name ());
3587 : 2 : print_generic_expr (dump_file, ref->base);
3588 : 2 : fprintf (dump_file, "\n");
3589 : : }
3590 : 3980 : ++alias_stats.modref_kill_yes;
3591 : 3980 : return true;
3592 : : }
3593 : : }
3594 : 73840 : ++alias_stats.modref_kill_no;
3595 : : }
3596 : 16628487 : if (callee != NULL_TREE
3597 : 16628487 : && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3598 : 3887851 : switch (DECL_FUNCTION_CODE (callee))
3599 : : {
3600 : 505198 : case BUILT_IN_FREE:
3601 : 505198 : {
3602 : 505198 : tree ptr = gimple_call_arg (stmt, 0);
3603 : 505198 : tree base = ao_ref_base (ref);
3604 : 505198 : if (base && TREE_CODE (base) == MEM_REF
3605 : 588157 : && TREE_OPERAND (base, 0) == ptr)
3606 : : {
3607 : 19123 : ++alias_stats.stmt_kills_ref_p_yes;
3608 : 19123 : return true;
3609 : : }
3610 : : break;
3611 : : }
3612 : :
3613 : 1111033 : case BUILT_IN_MEMCPY:
3614 : 1111033 : case BUILT_IN_MEMPCPY:
3615 : 1111033 : case BUILT_IN_MEMMOVE:
3616 : 1111033 : case BUILT_IN_MEMSET:
3617 : 1111033 : case BUILT_IN_MEMCPY_CHK:
3618 : 1111033 : case BUILT_IN_MEMPCPY_CHK:
3619 : 1111033 : case BUILT_IN_MEMMOVE_CHK:
3620 : 1111033 : case BUILT_IN_MEMSET_CHK:
3621 : 1111033 : case BUILT_IN_STRNCPY:
3622 : 1111033 : case BUILT_IN_STPNCPY:
3623 : 1111033 : case BUILT_IN_CALLOC:
3624 : 1111033 : {
3625 : : /* For a must-alias check we need to be able to constrain
3626 : : the access properly. */
3627 : 1111033 : if (!ref->max_size_known_p ())
3628 : : {
3629 : 56204 : ++alias_stats.stmt_kills_ref_p_no;
3630 : 390874 : return false;
3631 : : }
3632 : 1054829 : tree dest;
3633 : 1054829 : tree len;
3634 : :
3635 : : /* In execution order a calloc call will never kill
3636 : : anything. However, DSE will (ab)use this interface
3637 : : to ask if a calloc call writes the same memory locations
3638 : : as a later assignment, memset, etc. So handle calloc
3639 : : in the expected way. */
3640 : 1054829 : if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
3641 : : {
3642 : 1225 : tree arg0 = gimple_call_arg (stmt, 0);
3643 : 1225 : tree arg1 = gimple_call_arg (stmt, 1);
3644 : 1225 : if (TREE_CODE (arg0) != INTEGER_CST
3645 : 1100 : || TREE_CODE (arg1) != INTEGER_CST)
3646 : : {
3647 : 125 : ++alias_stats.stmt_kills_ref_p_no;
3648 : 125 : return false;
3649 : : }
3650 : :
3651 : 1100 : dest = gimple_call_lhs (stmt);
3652 : 1100 : if (!dest)
3653 : : {
3654 : 1 : ++alias_stats.stmt_kills_ref_p_no;
3655 : 1 : return false;
3656 : : }
3657 : 1099 : len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3658 : : }
3659 : : else
3660 : : {
3661 : 1053604 : dest = gimple_call_arg (stmt, 0);
3662 : 1053604 : len = gimple_call_arg (stmt, 2);
3663 : : }
3664 : 1054703 : if (!poly_int_tree_p (len))
3665 : : return false;
3666 : 781353 : ao_ref dref;
3667 : 781353 : ao_ref_init_from_ptr_and_size (&dref, dest, len);
3668 : 781353 : if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3669 : : dref.size, dref.max_size, ref))
3670 : : {
3671 : 4990 : ++alias_stats.stmt_kills_ref_p_yes;
3672 : 4990 : return true;
3673 : : }
3674 : 776363 : break;
3675 : : }
3676 : :
3677 : 11399 : case BUILT_IN_VA_END:
3678 : 11399 : {
3679 : 11399 : tree ptr = gimple_call_arg (stmt, 0);
3680 : 11399 : if (TREE_CODE (ptr) == ADDR_EXPR)
3681 : : {
3682 : 11348 : tree base = ao_ref_base (ref);
3683 : 11348 : if (TREE_OPERAND (ptr, 0) == base)
3684 : : {
3685 : 6668 : ++alias_stats.stmt_kills_ref_p_yes;
3686 : 6668 : return true;
3687 : : }
3688 : : }
3689 : : break;
3690 : : }
3691 : :
3692 : : default:;
3693 : : }
3694 : : }
3695 : 233747650 : ++alias_stats.stmt_kills_ref_p_no;
3696 : 233747650 : return false;
3697 : : }
3698 : :
3699 : : bool
3700 : 8023435 : stmt_kills_ref_p (gimple *stmt, tree ref)
3701 : : {
3702 : 8023435 : ao_ref r;
3703 : 8023435 : ao_ref_init (&r, ref);
3704 : 8023435 : return stmt_kills_ref_p (stmt, &r);
3705 : : }
3706 : :
3707 : : /* Return whether REF can be subject to store data races. */
3708 : :
3709 : : bool
3710 : 22852 : ref_can_have_store_data_races (tree ref)
3711 : : {
3712 : : /* With -fallow-store-data-races do not care about them. */
3713 : 22852 : if (flag_store_data_races)
3714 : : return false;
3715 : :
3716 : 22762 : tree base = get_base_address (ref);
3717 : 22762 : if (auto_var_p (base)
3718 : 22762 : && ! may_be_aliased (base))
3719 : : /* Automatic variables not aliased are not subject to
3720 : : data races. */
3721 : : return false;
3722 : :
3723 : : return true;
3724 : : }
3725 : :
3726 : :
3727 : : /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3728 : : TARGET or a statement clobbering the memory reference REF in which
3729 : : case false is returned. The walk starts with VUSE, one argument of PHI. */
3730 : :
3731 : : static bool
3732 : 116513449 : maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3733 : : ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3734 : : bitmap *visited, bool abort_on_visited,
3735 : : void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3736 : : translate_flags disambiguate_only,
3737 : : void *data)
3738 : : {
3739 : 116513449 : basic_block bb = gimple_bb (phi);
3740 : :
3741 : 116513449 : if (!*visited)
3742 : : {
3743 : 21370055 : *visited = BITMAP_ALLOC (NULL);
3744 : 21370055 : bitmap_tree_view (*visited);
3745 : : }
3746 : :
3747 : 116513449 : bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3748 : :
3749 : : /* Walk until we hit the target. */
3750 : 116513449 : while (vuse != target)
3751 : : {
3752 : 353657287 : gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3753 : : /* If we are searching for the target VUSE by walking up to
3754 : : TARGET_BB dominating the original PHI we are finished once
3755 : : we reach a default def or a definition in a block dominating
3756 : : that block. Update TARGET and return. */
3757 : 353657287 : if (!target
3758 : 353657287 : && (gimple_nop_p (def_stmt)
3759 : 66290833 : || dominated_by_p (CDI_DOMINATORS,
3760 : 66290833 : target_bb, gimple_bb (def_stmt))))
3761 : : {
3762 : 18676847 : target = vuse;
3763 : 18676847 : return true;
3764 : : }
3765 : :
3766 : : /* Recurse for PHI nodes. */
3767 : 334980440 : if (gimple_code (def_stmt) == GIMPLE_PHI)
3768 : : {
3769 : : /* An already visited PHI node ends the walk successfully. */
3770 : 67170329 : if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3771 : 31525991 : return !abort_on_visited;
3772 : 35644338 : vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3773 : : visited, abort_on_visited,
3774 : : translate, data, disambiguate_only);
3775 : 35644338 : if (!vuse)
3776 : : return false;
3777 : 31292998 : continue;
3778 : : }
3779 : 267810111 : else if (gimple_nop_p (def_stmt))
3780 : : return false;
3781 : : else
3782 : : {
3783 : : /* A clobbering statement or the end of the IL ends it failing. */
3784 : 267810111 : if ((int)limit <= 0)
3785 : : return false;
3786 : 267778525 : --limit;
3787 : 267778525 : if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3788 : : {
3789 : 15832041 : translate_flags tf = disambiguate_only;
3790 : 15832041 : if (translate
3791 : 15832041 : && (*translate) (ref, vuse, data, &tf) == NULL)
3792 : : ;
3793 : : else
3794 : 13741840 : return false;
3795 : : }
3796 : : }
3797 : : /* If we reach a new basic-block see if we already skipped it
3798 : : in a previous walk that ended successfully. */
3799 : 254036685 : if (gimple_bb (def_stmt) != bb)
3800 : : {
3801 : 116518105 : if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3802 : 8369607 : return !abort_on_visited;
3803 : 108148498 : bb = gimple_bb (def_stmt);
3804 : : }
3805 : 639140603 : vuse = gimple_vuse (def_stmt);
3806 : : }
3807 : : return true;
3808 : : }
3809 : :
3810 : :
3811 : : /* Starting from a PHI node for the virtual operand of the memory reference
3812 : : REF find a continuation virtual operand that allows to continue walking
3813 : : statements dominating PHI skipping only statements that cannot possibly
3814 : : clobber REF. Decrements LIMIT for each alias disambiguation done
3815 : : and aborts the walk, returning NULL_TREE if it reaches zero.
3816 : : Returns NULL_TREE if no suitable virtual operand can be found. */
3817 : :
3818 : : tree
3819 : 92168120 : get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3820 : : unsigned int &limit, bitmap *visited,
3821 : : bool abort_on_visited,
3822 : : void *(*translate)(ao_ref *, tree, void *,
3823 : : translate_flags *),
3824 : : void *data,
3825 : : translate_flags disambiguate_only)
3826 : : {
3827 : 92168120 : unsigned nargs = gimple_phi_num_args (phi);
3828 : :
3829 : : /* Through a single-argument PHI we can simply look through. */
3830 : 92168120 : if (nargs == 1)
3831 : 3376041 : return PHI_ARG_DEF (phi, 0);
3832 : :
3833 : : /* For two or more arguments try to pairwise skip non-aliasing code
3834 : : until we hit the phi argument definition that dominates the other one. */
3835 : 88792079 : basic_block phi_bb = gimple_bb (phi);
3836 : 88792079 : tree arg0, arg1;
3837 : 88792079 : unsigned i;
3838 : :
3839 : : /* Find a candidate for the virtual operand which definition
3840 : : dominates those of all others. */
3841 : : /* First look if any of the args themselves satisfy this. */
3842 : 163582652 : for (i = 0; i < nargs; ++i)
3843 : : {
3844 : 140696600 : arg0 = PHI_ARG_DEF (phi, i);
3845 : 140696600 : if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3846 : : break;
3847 : 137838712 : basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3848 : 137838712 : if (def_bb != phi_bb
3849 : 137838712 : && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3850 : : break;
3851 : 74790573 : arg0 = NULL_TREE;
3852 : : }
3853 : : /* If not, look if we can reach such candidate by walking defs
3854 : : until we hit the immediate dominator. maybe_skip_until will
3855 : : do that for us. */
3856 : 88792079 : basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3857 : :
3858 : : /* Then check against the (to be) found candidate. */
3859 : 341263617 : for (i = 0; i < nargs; ++i)
3860 : : {
3861 : 181909204 : arg1 = PHI_ARG_DEF (phi, i);
3862 : 181909204 : if (arg1 == arg0)
3863 : : ;
3864 : 312017965 : else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
3865 : : limit, visited,
3866 : : abort_on_visited,
3867 : : translate,
3868 : : /* Do not valueize when walking over
3869 : : backedges. */
3870 : : dominated_by_p
3871 : 116513449 : (CDI_DOMINATORS,
3872 : 116513449 : gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3873 : : phi_bb)
3874 : : ? TR_DISAMBIGUATE
3875 : : : disambiguate_only, data))
3876 : : return NULL_TREE;
3877 : : }
3878 : :
3879 : 70562334 : return arg0;
3880 : : }
3881 : :
3882 : : /* Based on the memory reference REF and its virtual use VUSE call
3883 : : WALKER for each virtual use that is equivalent to VUSE, including VUSE
3884 : : itself. That is, for each virtual use for which its defining statement
3885 : : does not clobber REF.
3886 : :
3887 : : WALKER is called with REF, the current virtual use and DATA. If
3888 : : WALKER returns non-NULL the walk stops and its result is returned.
3889 : : At the end of a non-successful walk NULL is returned.
3890 : :
3891 : : TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3892 : : use which definition is a statement that may clobber REF and DATA.
3893 : : If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3894 : : If TRANSLATE returns non-NULL the walk stops and its result is returned.
3895 : : If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3896 : : to adjust REF and *DATA to make that valid.
3897 : :
3898 : : VALUEIZE if non-NULL is called with the next VUSE that is considered
3899 : : and return value is substituted for that. This can be used to
3900 : : implement optimistic value-numbering for example. Note that the
3901 : : VUSE argument is assumed to be valueized already.
3902 : :
3903 : : LIMIT specifies the number of alias queries we are allowed to do,
3904 : : the walk stops when it reaches zero and NULL is returned. LIMIT
3905 : : is decremented by the number of alias queries (plus adjustments
3906 : : done by the callbacks) upon return.
3907 : :
3908 : : TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3909 : :
3910 : : void *
3911 : 62801924 : walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3912 : : void *(*walker)(ao_ref *, tree, void *),
3913 : : void *(*translate)(ao_ref *, tree, void *,
3914 : : translate_flags *),
3915 : : tree (*valueize)(tree),
3916 : : unsigned &limit, void *data)
3917 : : {
3918 : 62801924 : bitmap visited = NULL;
3919 : 62801924 : void *res;
3920 : 62801924 : bool translated = false;
3921 : :
3922 : 62801924 : timevar_push (TV_ALIAS_STMT_WALK);
3923 : :
3924 : 948556282 : do
3925 : : {
3926 : 948556282 : gimple *def_stmt;
3927 : :
3928 : : /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3929 : 948556282 : res = (*walker) (ref, vuse, data);
3930 : : /* Abort walk. */
3931 : 948556282 : if (res == (void *)-1)
3932 : : {
3933 : : res = NULL;
3934 : : break;
3935 : : }
3936 : : /* Lookup succeeded. */
3937 : 948556193 : else if (res != NULL)
3938 : : break;
3939 : :
3940 : 941335227 : if (valueize)
3941 : : {
3942 : 924665852 : vuse = valueize (vuse);
3943 : 924665852 : if (!vuse)
3944 : : {
3945 : : res = NULL;
3946 : : break;
3947 : : }
3948 : : }
3949 : 927176014 : def_stmt = SSA_NAME_DEF_STMT (vuse);
3950 : 927176014 : if (gimple_nop_p (def_stmt))
3951 : : break;
3952 : 924756304 : else if (gimple_code (def_stmt) == GIMPLE_PHI)
3953 : 53790140 : vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3954 : : &visited, translated, translate, data);
3955 : : else
3956 : : {
3957 : 870966164 : if ((int)limit <= 0)
3958 : : {
3959 : : res = NULL;
3960 : : break;
3961 : : }
3962 : 870771706 : --limit;
3963 : 870771706 : if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3964 : : {
3965 : 30742259 : if (!translate)
3966 : : break;
3967 : 25931657 : translate_flags disambiguate_only = TR_TRANSLATE;
3968 : 25931657 : res = (*translate) (ref, vuse, data, &disambiguate_only);
3969 : : /* Failed lookup and translation. */
3970 : 25931657 : if (res == (void *)-1)
3971 : : {
3972 : : res = NULL;
3973 : : break;
3974 : : }
3975 : : /* Lookup succeeded. */
3976 : 5193212 : else if (res != NULL)
3977 : : break;
3978 : : /* Translation succeeded, continue walking. */
3979 : 6727979 : translated = translated || disambiguate_only == TR_TRANSLATE;
3980 : : }
3981 : 844204787 : vuse = gimple_vuse (def_stmt);
3982 : : }
3983 : : }
3984 : 897994927 : while (vuse);
3985 : :
3986 : 62801924 : if (visited)
3987 : 19161427 : BITMAP_FREE (visited);
3988 : :
3989 : 62801924 : timevar_pop (TV_ALIAS_STMT_WALK);
3990 : :
3991 : 62801924 : return res;
3992 : : }
3993 : :
3994 : :
3995 : : /* Based on the memory reference REF call WALKER for each vdef whose
3996 : : defining statement may clobber REF, starting with VDEF. If REF
3997 : : is NULL_TREE, each defining statement is visited.
3998 : :
3999 : : WALKER is called with REF, the current vdef and DATA. If WALKER
4000 : : returns true the walk is stopped, otherwise it continues.
4001 : :
4002 : : If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
4003 : : The pointer may be NULL and then we do not track this information.
4004 : :
4005 : : At PHI nodes walk_aliased_vdefs forks into one walk for each
4006 : : PHI argument (but only one walk continues at merge points), the
4007 : : return value is true if any of the walks was successful.
4008 : :
4009 : : The function returns the number of statements walked or -1 if
4010 : : LIMIT stmts were walked and the walk was aborted at this point.
4011 : : If LIMIT is zero the walk is not aborted. */
4012 : :
4013 : : static int
4014 : 290920974 : walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
4015 : : bool (*walker)(ao_ref *, tree, void *), void *data,
4016 : : bitmap *visited, unsigned int cnt,
4017 : : bool *function_entry_reached, unsigned limit)
4018 : : {
4019 : 907392835 : do
4020 : : {
4021 : 1814785670 : gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
4022 : :
4023 : 907392835 : if (*visited
4024 : 907392835 : && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
4025 : 160609028 : return cnt;
4026 : :
4027 : 746783807 : if (gimple_nop_p (def_stmt))
4028 : : {
4029 : 23204174 : if (function_entry_reached)
4030 : 3588695 : *function_entry_reached = true;
4031 : 23204174 : return cnt;
4032 : : }
4033 : 723579633 : else if (gimple_code (def_stmt) == GIMPLE_PHI)
4034 : : {
4035 : 92520215 : unsigned i;
4036 : 92520215 : if (!*visited)
4037 : : {
4038 : 7929223 : *visited = BITMAP_ALLOC (NULL);
4039 : 7929223 : bitmap_tree_view (*visited);
4040 : : }
4041 : 285968771 : for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
4042 : : {
4043 : 198889188 : int res = walk_aliased_vdefs_1 (ref,
4044 : : gimple_phi_arg_def (def_stmt, i),
4045 : : walker, data, visited, cnt,
4046 : : function_entry_reached, limit);
4047 : 198889188 : if (res == -1)
4048 : : return -1;
4049 : 193448556 : cnt = res;
4050 : : }
4051 : 87079583 : return cnt;
4052 : : }
4053 : :
4054 : : /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
4055 : 631059418 : cnt++;
4056 : 631059418 : if (cnt == limit)
4057 : : return -1;
4058 : 630935721 : if ((!ref
4059 : 560124425 : || stmt_may_clobber_ref_p_1 (def_stmt, ref))
4060 : 699743649 : && (*walker) (ref, vdef, data))
4061 : 14463860 : return cnt;
4062 : :
4063 : 1523864696 : vdef = gimple_vuse (def_stmt);
4064 : : }
4065 : : while (1);
4066 : : }
4067 : :
4068 : : int
4069 : 92031786 : walk_aliased_vdefs (ao_ref *ref, tree vdef,
4070 : : bool (*walker)(ao_ref *, tree, void *), void *data,
4071 : : bitmap *visited,
4072 : : bool *function_entry_reached, unsigned int limit)
4073 : : {
4074 : 92031786 : bitmap local_visited = NULL;
4075 : 92031786 : int ret;
4076 : :
4077 : 92031786 : timevar_push (TV_ALIAS_STMT_WALK);
4078 : :
4079 : 92031786 : if (function_entry_reached)
4080 : 4496523 : *function_entry_reached = false;
4081 : :
4082 : 158751944 : ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
4083 : : visited ? visited : &local_visited, 0,
4084 : : function_entry_reached, limit);
4085 : 92031786 : if (local_visited)
4086 : 7929223 : BITMAP_FREE (local_visited);
4087 : :
4088 : 92031786 : timevar_pop (TV_ALIAS_STMT_WALK);
4089 : :
4090 : 92031786 : return ret;
4091 : : }
4092 : :
4093 : : /* Verify validity of the fnspec string.
4094 : : See attr-fnspec.h for details. */
4095 : :
4096 : : void
4097 : 401202957 : attr_fnspec::verify ()
4098 : : {
4099 : 401202957 : bool err = false;
4100 : 401202957 : if (!len)
4101 : : return;
4102 : :
4103 : : /* Check return value specifier. */
4104 : 129204797 : if (len < return_desc_size)
4105 : : err = true;
4106 : 129204797 : else if ((len - return_desc_size) % arg_desc_size)
4107 : : err = true;
4108 : 129204797 : else if ((str[0] < '1' || str[0] > '4')
4109 : 129204797 : && str[0] != '.' && str[0] != 'm')
4110 : 0 : err = true;
4111 : :
4112 : 129204797 : switch (str[1])
4113 : : {
4114 : : case ' ':
4115 : : case 'p':
4116 : : case 'P':
4117 : : case 'c':
4118 : : case 'C':
4119 : : break;
4120 : : default:
4121 : : err = true;
4122 : : }
4123 : 129204797 : if (err)
4124 : 0 : internal_error ("invalid fn spec attribute \"%s\"", str);
4125 : :
4126 : : /* Now check all parameters. */
4127 : 384845969 : for (unsigned int i = 0; arg_specified_p (i); i++)
4128 : : {
4129 : 255641172 : unsigned int idx = arg_idx (i);
4130 : 255641172 : switch (str[idx])
4131 : : {
4132 : 238552701 : case 'x':
4133 : 238552701 : case 'X':
4134 : 238552701 : case 'r':
4135 : 238552701 : case 'R':
4136 : 238552701 : case 'o':
4137 : 238552701 : case 'O':
4138 : 238552701 : case 'w':
4139 : 238552701 : case 'W':
4140 : 238552701 : case '.':
4141 : 238552701 : if ((str[idx + 1] >= '1' && str[idx + 1] <= '9')
4142 : 238552701 : || str[idx + 1] == 't')
4143 : : {
4144 : 38912001 : if (str[idx] != 'r' && str[idx] != 'R'
4145 : : && str[idx] != 'w' && str[idx] != 'W'
4146 : : && str[idx] != 'o' && str[idx] != 'O')
4147 : 38912001 : err = true;
4148 : 38912001 : if (str[idx + 1] != 't'
4149 : : /* Size specified is scalar, so it should be described
4150 : : by ". " if specified at all. */
4151 : 38912001 : && (arg_specified_p (str[idx + 1] - '1')
4152 : 0 : && str[arg_idx (str[idx + 1] - '1')] != '.'))
4153 : : err = true;
4154 : : }
4155 : 199640700 : else if (str[idx + 1] != ' ')
4156 : : err = true;
4157 : : break;
4158 : 17088471 : default:
4159 : 17088471 : if (str[idx] < '1' || str[idx] > '9')
4160 : : err = true;
4161 : : }
4162 : 255641172 : if (err)
4163 : 0 : internal_error ("invalid fn spec attribute \"%s\" arg %i", str, i);
4164 : : }
4165 : : }
4166 : :
4167 : : /* Return ture if TYPE1 and TYPE2 will always give the same answer
4168 : : when compared wit hother types using same_type_for_tbaa_p. */
4169 : :
4170 : : static bool
4171 : 240962 : types_equal_for_same_type_for_tbaa_p (tree type1, tree type2,
4172 : : bool lto_streaming_safe)
4173 : : {
4174 : : /* We use same_type_for_tbaa_p to match types in the access path.
4175 : : This check is overly conservative. */
4176 : 240962 : type1 = TYPE_MAIN_VARIANT (type1);
4177 : 240962 : type2 = TYPE_MAIN_VARIANT (type2);
4178 : :
4179 : 240962 : if (TYPE_STRUCTURAL_EQUALITY_P (type1)
4180 : 240962 : != TYPE_STRUCTURAL_EQUALITY_P (type2))
4181 : : return false;
4182 : 240962 : if (TYPE_STRUCTURAL_EQUALITY_P (type1))
4183 : : return true;
4184 : :
4185 : 238247 : if (lto_streaming_safe)
4186 : 3220 : return type1 == type2;
4187 : : else
4188 : 235027 : return TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2);
4189 : : }
4190 : :
4191 : : /* Compare REF1 and REF2 and return flags specifying their differences.
4192 : : If LTO_STREAMING_SAFE is true do not use alias sets and canonical
4193 : : types that are going to be recomputed.
4194 : : If TBAA is true also compare TBAA metadata. */
4195 : :
4196 : : int
4197 : 152390 : ao_compare::compare_ao_refs (ao_ref *ref1, ao_ref *ref2,
4198 : : bool lto_streaming_safe,
4199 : : bool tbaa)
4200 : : {
4201 : 152390 : if (TREE_THIS_VOLATILE (ref1->ref) != TREE_THIS_VOLATILE (ref2->ref))
4202 : : return SEMANTICS;
4203 : 152390 : tree base1 = ao_ref_base (ref1);
4204 : 152390 : tree base2 = ao_ref_base (ref2);
4205 : :
4206 : 152390 : if (!known_eq (ref1->offset, ref2->offset)
4207 : 152390 : || !known_eq (ref1->size, ref2->size)
4208 : 304780 : || !known_eq (ref1->max_size, ref2->max_size))
4209 : : return SEMANTICS;
4210 : :
4211 : : /* For variable accesses we need to compare actual paths
4212 : : to check that both refs are accessing same address and the access size. */
4213 : 152387 : if (!known_eq (ref1->size, ref1->max_size))
4214 : : {
4215 : 3946 : if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1->ref)),
4216 : 3946 : TYPE_SIZE (TREE_TYPE (ref2->ref)), 0))
4217 : : return SEMANTICS;
4218 : 3946 : tree r1 = ref1->ref;
4219 : 3946 : tree r2 = ref2->ref;
4220 : :
4221 : : /* Handle toplevel COMPONENT_REFs of bitfields.
4222 : : Those are special since they are not allowed in
4223 : : ADDR_EXPR. */
4224 : 3946 : if (TREE_CODE (r1) == COMPONENT_REF
4225 : 3946 : && DECL_BIT_FIELD (TREE_OPERAND (r1, 1)))
4226 : : {
4227 : 0 : if (TREE_CODE (r2) != COMPONENT_REF
4228 : 0 : || !DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4229 : : return SEMANTICS;
4230 : 0 : tree field1 = TREE_OPERAND (r1, 1);
4231 : 0 : tree field2 = TREE_OPERAND (r2, 1);
4232 : 0 : if (!operand_equal_p (DECL_FIELD_OFFSET (field1),
4233 : 0 : DECL_FIELD_OFFSET (field2), 0)
4234 : 0 : || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1),
4235 : 0 : DECL_FIELD_BIT_OFFSET (field2), 0)
4236 : 0 : || !operand_equal_p (DECL_SIZE (field1), DECL_SIZE (field2), 0)
4237 : 0 : || !types_compatible_p (TREE_TYPE (r1),
4238 : 0 : TREE_TYPE (r2)))
4239 : 0 : return SEMANTICS;
4240 : 0 : r1 = TREE_OPERAND (r1, 0);
4241 : 0 : r2 = TREE_OPERAND (r2, 0);
4242 : : }
4243 : 3946 : else if (TREE_CODE (r2) == COMPONENT_REF
4244 : 3946 : && DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4245 : : return SEMANTICS;
4246 : :
4247 : : /* Similarly for bit field refs. */
4248 : 3946 : if (TREE_CODE (r1) == BIT_FIELD_REF)
4249 : : {
4250 : 0 : if (TREE_CODE (r2) != BIT_FIELD_REF
4251 : 0 : || !operand_equal_p (TREE_OPERAND (r1, 1),
4252 : 0 : TREE_OPERAND (r2, 1), 0)
4253 : 0 : || !operand_equal_p (TREE_OPERAND (r1, 2),
4254 : 0 : TREE_OPERAND (r2, 2), 0)
4255 : 0 : || !types_compatible_p (TREE_TYPE (r1),
4256 : 0 : TREE_TYPE (r2)))
4257 : 0 : return SEMANTICS;
4258 : 0 : r1 = TREE_OPERAND (r1, 0);
4259 : 0 : r2 = TREE_OPERAND (r2, 0);
4260 : : }
4261 : 3946 : else if (TREE_CODE (r2) == BIT_FIELD_REF)
4262 : : return SEMANTICS;
4263 : :
4264 : : /* Now we can compare the address of actual memory access. */
4265 : 3946 : if (!operand_equal_p (r1, r2, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4266 : : return SEMANTICS;
4267 : : }
4268 : : /* For constant accesses we get more matches by comparing offset only. */
4269 : 148441 : else if (!operand_equal_p (base1, base2,
4270 : : OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4271 : : return SEMANTICS;
4272 : :
4273 : : /* We can't simply use get_object_alignment_1 on the full
4274 : : reference as for accesses with variable indexes this reports
4275 : : too conservative alignment. */
4276 : 152314 : unsigned int align1, align2;
4277 : 152314 : unsigned HOST_WIDE_INT bitpos1, bitpos2;
4278 : 152314 : bool known1 = get_object_alignment_1 (base1, &align1, &bitpos1);
4279 : 152314 : bool known2 = get_object_alignment_1 (base2, &align2, &bitpos2);
4280 : : /* ??? For MEMREF get_object_alignment_1 determines aligned from
4281 : : TYPE_ALIGN but still returns false. This seem to contradict
4282 : : its description. So compare even if alignment is unknown. */
4283 : 152314 : if (known1 != known2
4284 : 152314 : || (bitpos1 != bitpos2 || align1 != align2))
4285 : : return SEMANTICS;
4286 : :
4287 : : /* Now we know that accesses are semantically same. */
4288 : 152099 : int flags = 0;
4289 : :
4290 : : /* ao_ref_base strips inner MEM_REF [&decl], recover from that here. */
4291 : 152099 : tree rbase1 = ref1->ref;
4292 : 152099 : if (rbase1)
4293 : 247222 : while (handled_component_p (rbase1))
4294 : 95123 : rbase1 = TREE_OPERAND (rbase1, 0);
4295 : 152099 : tree rbase2 = ref2->ref;
4296 : 247163 : while (handled_component_p (rbase2))
4297 : 95064 : rbase2 = TREE_OPERAND (rbase2, 0);
4298 : :
4299 : : /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
4300 : : implement restrict pointers. MR_DEPENDENCE_CLIQUE 0 means no information.
4301 : : Otherwise we need to match bases and cliques. */
4302 : 152099 : if ((((TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
4303 : 80141 : && MR_DEPENDENCE_CLIQUE (rbase1))
4304 : 126484 : || ((TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
4305 : 54525 : && MR_DEPENDENCE_CLIQUE (rbase2)))
4306 : 177714 : && (TREE_CODE (rbase1) != TREE_CODE (rbase2)
4307 : 25615 : || MR_DEPENDENCE_CLIQUE (rbase1) != MR_DEPENDENCE_CLIQUE (rbase2)
4308 : 25538 : || (MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))))
4309 : : flags |= DEPENDENCE_CLIQUE;
4310 : :
4311 : 152099 : if (!tbaa)
4312 : : return flags;
4313 : :
4314 : : /* Alias sets are not stable across LTO sreaming; be conservative here
4315 : : and compare types the alias sets are ultimately based on. */
4316 : 152070 : if (lto_streaming_safe)
4317 : : {
4318 : 2798 : tree t1 = ao_ref_alias_ptr_type (ref1);
4319 : 2798 : tree t2 = ao_ref_alias_ptr_type (ref2);
4320 : 2798 : if (!alias_ptr_types_compatible_p (t1, t2))
4321 : 34 : flags |= REF_ALIAS_SET;
4322 : :
4323 : 2798 : t1 = ao_ref_base_alias_ptr_type (ref1);
4324 : 2798 : t2 = ao_ref_base_alias_ptr_type (ref2);
4325 : 2798 : if (!alias_ptr_types_compatible_p (t1, t2))
4326 : 41 : flags |= BASE_ALIAS_SET;
4327 : : }
4328 : : else
4329 : : {
4330 : 149272 : if (ao_ref_alias_set (ref1) != ao_ref_alias_set (ref2))
4331 : 0 : flags |= REF_ALIAS_SET;
4332 : 149272 : if (ao_ref_base_alias_set (ref1) != ao_ref_base_alias_set (ref2))
4333 : 0 : flags |= BASE_ALIAS_SET;
4334 : : }
4335 : :
4336 : : /* Access path is used only on non-view-converted references. */
4337 : 152070 : bool view_converted = view_converted_memref_p (rbase1);
4338 : 152070 : if (view_converted_memref_p (rbase2) != view_converted)
4339 : 1 : return flags | ACCESS_PATH;
4340 : 152069 : else if (view_converted)
4341 : : return flags;
4342 : :
4343 : :
4344 : : /* Find start of access paths and look for trailing arrays. */
4345 : 148051 : tree c1 = ref1->ref, c2 = ref2->ref;
4346 : 148051 : tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
4347 : 148051 : int nskipped1 = 0, nskipped2 = 0;
4348 : 148051 : int i = 0;
4349 : :
4350 : 243120 : for (tree p1 = ref1->ref; handled_component_p (p1); p1 = TREE_OPERAND (p1, 0))
4351 : : {
4352 : 95069 : if (component_ref_to_zero_sized_trailing_array_p (p1))
4353 : 133 : end_struct_ref1 = p1;
4354 : 95069 : if (ends_tbaa_access_path_p (p1))
4355 : 4765 : c1 = p1, nskipped1 = i;
4356 : 95069 : i++;
4357 : : }
4358 : 148051 : i = 0;
4359 : 243060 : for (tree p2 = ref2->ref; handled_component_p (p2); p2 = TREE_OPERAND (p2, 0))
4360 : : {
4361 : 95009 : if (component_ref_to_zero_sized_trailing_array_p (p2))
4362 : 140 : end_struct_ref2 = p2;
4363 : 95009 : if (ends_tbaa_access_path_p (p2))
4364 : 4765 : c2 = p2, nskipped2 = i;
4365 : 95009 : i++;
4366 : : }
4367 : :
4368 : : /* For variable accesses we can not rely on offset match bellow.
4369 : : We know that paths are struturally same, so only check that
4370 : : starts of TBAA paths did not diverge. */
4371 : 148051 : if (!known_eq (ref1->size, ref1->max_size)
4372 : 148051 : && nskipped1 != nskipped2)
4373 : 0 : return flags | ACCESS_PATH;
4374 : :
4375 : : /* Information about trailing refs is used by
4376 : : aliasing_component_refs_p that is applied only if paths
4377 : : has handled components.. */
4378 : 148051 : if (!handled_component_p (c1) && !handled_component_p (c2))
4379 : : ;
4380 : 57823 : else if ((end_struct_ref1 != NULL) != (end_struct_ref2 != NULL))
4381 : 27 : return flags | ACCESS_PATH;
4382 : 148024 : if (end_struct_ref1
4383 : 148147 : && same_type_for_tbaa (TREE_TYPE (end_struct_ref1),
4384 : 123 : TREE_TYPE (end_struct_ref2)) != 1)
4385 : 3 : return flags | ACCESS_PATH;
4386 : :
4387 : : /* Now compare all handled components of the access path.
4388 : : We have three oracles that cares about access paths:
4389 : : - aliasing_component_refs_p
4390 : : - nonoverlapping_refs_since_match_p
4391 : : - nonoverlapping_component_refs_p
4392 : : We need to match things these oracles compare.
4393 : :
4394 : : It is only necessary to check types for compatibility
4395 : : and offsets. Rest of what oracles compares are actual
4396 : : addresses. Those are already known to be same:
4397 : : - for constant accesses we check offsets
4398 : : - for variable accesses we already matched
4399 : : the path lexically with operand_equal_p. */
4400 : 333999 : while (true)
4401 : : {
4402 : 241010 : bool comp1 = handled_component_p (c1);
4403 : 241010 : bool comp2 = handled_component_p (c2);
4404 : :
4405 : 241010 : if (comp1 != comp2)
4406 : 48 : return flags | ACCESS_PATH;
4407 : 240962 : if (!comp1)
4408 : : break;
4409 : :
4410 : 93177 : if (TREE_CODE (c1) != TREE_CODE (c2))
4411 : 0 : return flags | ACCESS_PATH;
4412 : :
4413 : : /* aliasing_component_refs_p attempts to find type match within
4414 : : the paths. For that reason both types needs to be equal
4415 : : with respect to same_type_for_tbaa_p. */
4416 : 93177 : if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4417 : 93177 : TREE_TYPE (c2),
4418 : : lto_streaming_safe))
4419 : 188 : return flags | ACCESS_PATH;
4420 : 185978 : if (component_ref_to_zero_sized_trailing_array_p (c1)
4421 : 92989 : != component_ref_to_zero_sized_trailing_array_p (c2))
4422 : 0 : return flags | ACCESS_PATH;
4423 : :
4424 : : /* aliasing_matching_component_refs_p compares
4425 : : offsets within the path. Other properties are ignored.
4426 : : Do not bother to verify offsets in variable accesses. Here we
4427 : : already compared them by operand_equal_p so they are
4428 : : structurally same. */
4429 : 92989 : if (!known_eq (ref1->size, ref1->max_size))
4430 : : {
4431 : 4768 : poly_int64 offadj1, sztmc1, msztmc1;
4432 : 4768 : bool reverse1;
4433 : 4768 : get_ref_base_and_extent (c1, &offadj1, &sztmc1, &msztmc1, &reverse1);
4434 : 4768 : poly_int64 offadj2, sztmc2, msztmc2;
4435 : 4768 : bool reverse2;
4436 : 4768 : get_ref_base_and_extent (c2, &offadj2, &sztmc2, &msztmc2, &reverse2);
4437 : 4768 : if (!known_eq (offadj1, offadj2))
4438 : 0 : return flags | ACCESS_PATH;
4439 : : }
4440 : 92989 : c1 = TREE_OPERAND (c1, 0);
4441 : 92989 : c2 = TREE_OPERAND (c2, 0);
4442 : 92989 : }
4443 : : /* Finally test the access type. */
4444 : 147785 : if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4445 : 147785 : TREE_TYPE (c2),
4446 : : lto_streaming_safe))
4447 : 1297 : return flags | ACCESS_PATH;
4448 : : return flags;
4449 : : }
4450 : :
4451 : : /* Hash REF to HSTATE. If LTO_STREAMING_SAFE do not use alias sets
4452 : : and canonical types. */
4453 : : void
4454 : 7299637 : ao_compare::hash_ao_ref (ao_ref *ref, bool lto_streaming_safe, bool tbaa,
4455 : : inchash::hash &hstate)
4456 : : {
4457 : 7299637 : tree base = ao_ref_base (ref);
4458 : 7299637 : tree tbase = base;
4459 : :
4460 : 7299637 : if (!known_eq (ref->size, ref->max_size))
4461 : : {
4462 : 400400 : tree r = ref->ref;
4463 : 400400 : if (TREE_CODE (r) == COMPONENT_REF
4464 : 400400 : && DECL_BIT_FIELD (TREE_OPERAND (r, 1)))
4465 : : {
4466 : 1745 : tree field = TREE_OPERAND (r, 1);
4467 : 1745 : hash_operand (DECL_FIELD_OFFSET (field), hstate, 0);
4468 : 1745 : hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, 0);
4469 : 1745 : hash_operand (DECL_SIZE (field), hstate, 0);
4470 : 1745 : r = TREE_OPERAND (r, 0);
4471 : : }
4472 : 400400 : if (TREE_CODE (r) == BIT_FIELD_REF)
4473 : : {
4474 : 1704 : hash_operand (TREE_OPERAND (r, 1), hstate, 0);
4475 : 1704 : hash_operand (TREE_OPERAND (r, 2), hstate, 0);
4476 : 1704 : r = TREE_OPERAND (r, 0);
4477 : : }
4478 : 400400 : hash_operand (TYPE_SIZE (TREE_TYPE (ref->ref)), hstate, 0);
4479 : 400400 : hash_operand (r, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4480 : : }
4481 : : else
4482 : : {
4483 : 6899237 : hash_operand (tbase, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4484 : 6899237 : hstate.add_poly_int (ref->offset);
4485 : 6899237 : hstate.add_poly_int (ref->size);
4486 : 6899237 : hstate.add_poly_int (ref->max_size);
4487 : : }
4488 : 7299637 : if (!lto_streaming_safe && tbaa)
4489 : : {
4490 : 6991170 : hstate.add_int (ao_ref_alias_set (ref));
4491 : 6991170 : hstate.add_int (ao_ref_base_alias_set (ref));
4492 : : }
4493 : 7299637 : }
|