Branch data Line data Source code
1 : : /* Alias analysis for trees.
2 : : Copyright (C) 2004-2024 Free Software Foundation, Inc.
3 : : Contributed by Diego Novillo <dnovillo@redhat.com>
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify
8 : : it under the terms of the GNU General Public License as published by
9 : : the Free Software Foundation; either version 3, or (at your option)
10 : : any later version.
11 : :
12 : : GCC is distributed in the hope that it will be useful,
13 : : but WITHOUT ANY WARRANTY; without even the implied warranty of
14 : : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 : : GNU General Public License for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : : #include "config.h"
22 : : #include "system.h"
23 : : #include "coretypes.h"
24 : : #include "backend.h"
25 : : #include "target.h"
26 : : #include "rtl.h"
27 : : #include "tree.h"
28 : : #include "gimple.h"
29 : : #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 : : #include "ssa.h"
31 : : #include "cgraph.h"
32 : : #include "tree-pretty-print.h"
33 : : #include "alias.h"
34 : : #include "fold-const.h"
35 : : #include "langhooks.h"
36 : : #include "dumpfile.h"
37 : : #include "tree-eh.h"
38 : : #include "tree-dfa.h"
39 : : #include "ipa-reference.h"
40 : : #include "varasm.h"
41 : : #include "ipa-modref-tree.h"
42 : : #include "ipa-modref.h"
43 : : #include "attr-fnspec.h"
44 : : #include "errors.h"
45 : : #include "dbgcnt.h"
46 : : #include "gimple-pretty-print.h"
47 : : #include "print-tree.h"
48 : : #include "tree-ssa-alias-compare.h"
49 : : #include "builtins.h"
50 : : #include "internal-fn.h"
51 : :
52 : : /* Broad overview of how alias analysis on gimple works:
53 : :
54 : : Statements clobbering or using memory are linked through the
55 : : virtual operand factored use-def chain. The virtual operand
56 : : is unique per function, its symbol is accessible via gimple_vop (cfun).
57 : : Virtual operands are used for efficiently walking memory statements
58 : : in the gimple IL and are useful for things like value-numbering as
59 : : a generation count for memory references.
60 : :
61 : : SSA_NAME pointers may have associated points-to information
62 : : accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
63 : : points-to information is (re-)computed by the TODO_rebuild_alias
64 : : pass manager todo. Points-to information is also used for more
65 : : precise tracking of call-clobbered and call-used variables and
66 : : related disambiguations.
67 : :
68 : : This file contains functions for disambiguating memory references,
69 : : the so called alias-oracle and tools for walking of the gimple IL.
70 : :
71 : : The main alias-oracle entry-points are
72 : :
73 : : bool stmt_may_clobber_ref_p (gimple *, tree)
74 : :
75 : : This function queries if a statement may invalidate (parts of)
76 : : the memory designated by the reference tree argument.
77 : :
78 : : bool ref_maybe_used_by_stmt_p (gimple *, tree)
79 : :
80 : : This function queries if a statement may need (parts of) the
81 : : memory designated by the reference tree argument.
82 : :
83 : : There are variants of these functions that only handle the call
84 : : part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
85 : : Note that these do not disambiguate against a possible call lhs.
86 : :
87 : : bool refs_may_alias_p (tree, tree)
88 : :
89 : : This function tries to disambiguate two reference trees.
90 : :
91 : : bool ptr_deref_may_alias_global_p (tree, bool)
92 : :
93 : : This function queries if dereferencing a pointer variable may
94 : : alias global memory. If bool argument is true, global memory
95 : : is considered to also include function local memory that escaped.
96 : :
97 : : More low-level disambiguators are available and documented in
98 : : this file. Low-level disambiguators dealing with points-to
99 : : information are in tree-ssa-structalias.cc. */
100 : :
101 : : static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
102 : : static bool nonoverlapping_component_refs_p (const_tree, const_tree);
103 : :
104 : : /* Query statistics for the different low-level disambiguators.
105 : : A high-level query may trigger multiple of them. */
106 : :
107 : : static struct {
108 : : unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
109 : : unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
110 : : unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
111 : : unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
112 : : unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
113 : : unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
114 : : unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
115 : : unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
116 : : unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
117 : : unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
118 : : unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
119 : : unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
120 : : unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
121 : : unsigned HOST_WIDE_INT stmt_kills_ref_p_no;
122 : : unsigned HOST_WIDE_INT stmt_kills_ref_p_yes;
123 : : unsigned HOST_WIDE_INT modref_use_may_alias;
124 : : unsigned HOST_WIDE_INT modref_use_no_alias;
125 : : unsigned HOST_WIDE_INT modref_clobber_may_alias;
126 : : unsigned HOST_WIDE_INT modref_clobber_no_alias;
127 : : unsigned HOST_WIDE_INT modref_kill_no;
128 : : unsigned HOST_WIDE_INT modref_kill_yes;
129 : : unsigned HOST_WIDE_INT modref_tests;
130 : : unsigned HOST_WIDE_INT modref_baseptr_tests;
131 : : } alias_stats;
132 : :
133 : : void
134 : 0 : dump_alias_stats (FILE *s)
135 : : {
136 : 0 : fprintf (s, "\nAlias oracle query stats:\n");
137 : 0 : fprintf (s, " refs_may_alias_p: "
138 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
139 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
140 : : alias_stats.refs_may_alias_p_no_alias,
141 : 0 : alias_stats.refs_may_alias_p_no_alias
142 : 0 : + alias_stats.refs_may_alias_p_may_alias);
143 : 0 : fprintf (s, " ref_maybe_used_by_call_p: "
144 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
145 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
146 : : alias_stats.ref_maybe_used_by_call_p_no_alias,
147 : 0 : alias_stats.refs_may_alias_p_no_alias
148 : 0 : + alias_stats.ref_maybe_used_by_call_p_may_alias);
149 : 0 : fprintf (s, " call_may_clobber_ref_p: "
150 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
151 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
152 : : alias_stats.call_may_clobber_ref_p_no_alias,
153 : 0 : alias_stats.call_may_clobber_ref_p_no_alias
154 : 0 : + alias_stats.call_may_clobber_ref_p_may_alias);
155 : 0 : fprintf (s, " stmt_kills_ref_p: "
156 : : HOST_WIDE_INT_PRINT_DEC" kills, "
157 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
158 : : alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes,
159 : 0 : alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes
160 : 0 : + alias_stats.stmt_kills_ref_p_no + alias_stats.modref_kill_no);
161 : 0 : fprintf (s, " nonoverlapping_component_refs_p: "
162 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
163 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
164 : : alias_stats.nonoverlapping_component_refs_p_no_alias,
165 : 0 : alias_stats.nonoverlapping_component_refs_p_no_alias
166 : 0 : + alias_stats.nonoverlapping_component_refs_p_may_alias);
167 : 0 : fprintf (s, " nonoverlapping_refs_since_match_p: "
168 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
169 : : HOST_WIDE_INT_PRINT_DEC" must overlaps, "
170 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
171 : : alias_stats.nonoverlapping_refs_since_match_p_no_alias,
172 : : alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
173 : 0 : alias_stats.nonoverlapping_refs_since_match_p_no_alias
174 : 0 : + alias_stats.nonoverlapping_refs_since_match_p_may_alias
175 : 0 : + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
176 : 0 : fprintf (s, " aliasing_component_refs_p: "
177 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
178 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
179 : : alias_stats.aliasing_component_refs_p_no_alias,
180 : 0 : alias_stats.aliasing_component_refs_p_no_alias
181 : 0 : + alias_stats.aliasing_component_refs_p_may_alias);
182 : 0 : dump_alias_stats_in_alias_c (s);
183 : 0 : fprintf (s, "\nModref stats:\n");
184 : 0 : fprintf (s, " modref kill: "
185 : : HOST_WIDE_INT_PRINT_DEC" kills, "
186 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
187 : : alias_stats.modref_kill_yes,
188 : 0 : alias_stats.modref_kill_yes
189 : 0 : + alias_stats.modref_kill_no);
190 : 0 : fprintf (s, " modref use: "
191 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
192 : : HOST_WIDE_INT_PRINT_DEC" queries\n",
193 : : alias_stats.modref_use_no_alias,
194 : 0 : alias_stats.modref_use_no_alias
195 : 0 : + alias_stats.modref_use_may_alias);
196 : 0 : fprintf (s, " modref clobber: "
197 : : HOST_WIDE_INT_PRINT_DEC" disambiguations, "
198 : : HOST_WIDE_INT_PRINT_DEC" queries\n"
199 : : " " HOST_WIDE_INT_PRINT_DEC" tbaa queries (%f per modref query)\n"
200 : : " " HOST_WIDE_INT_PRINT_DEC" base compares (%f per modref query)\n",
201 : : alias_stats.modref_clobber_no_alias,
202 : : alias_stats.modref_clobber_no_alias
203 : : + alias_stats.modref_clobber_may_alias,
204 : : alias_stats.modref_tests,
205 : 0 : ((double)alias_stats.modref_tests)
206 : : / (alias_stats.modref_clobber_no_alias
207 : : + alias_stats.modref_clobber_may_alias),
208 : : alias_stats.modref_baseptr_tests,
209 : 0 : ((double)alias_stats.modref_baseptr_tests)
210 : 0 : / (alias_stats.modref_clobber_no_alias
211 : 0 : + alias_stats.modref_clobber_may_alias));
212 : 0 : }
213 : :
214 : :
215 : : /* Return true, if dereferencing PTR may alias with a global variable.
216 : : When ESCAPED_LOCAL_P is true escaped local memory is also considered
217 : : global. */
218 : :
219 : : bool
220 : 51605316 : ptr_deref_may_alias_global_p (tree ptr, bool escaped_local_p)
221 : : {
222 : 51605316 : struct ptr_info_def *pi;
223 : :
224 : : /* If we end up with a pointer constant here that may point
225 : : to global memory. */
226 : 51605316 : if (TREE_CODE (ptr) != SSA_NAME)
227 : : return true;
228 : :
229 : 51600076 : pi = SSA_NAME_PTR_INFO (ptr);
230 : :
231 : : /* If we do not have points-to information for this variable,
232 : : we have to punt. */
233 : 51600076 : if (!pi)
234 : : return true;
235 : :
236 : : /* ??? This does not use TBAA to prune globals ptr may not access. */
237 : 41146294 : return pt_solution_includes_global (&pi->pt, escaped_local_p);
238 : : }
239 : :
240 : : /* Return true if dereferencing PTR may alias DECL.
241 : : The caller is responsible for applying TBAA to see if PTR
242 : : may access DECL at all. */
243 : :
244 : : static bool
245 : 183940285 : ptr_deref_may_alias_decl_p (tree ptr, tree decl)
246 : : {
247 : 183940285 : struct ptr_info_def *pi;
248 : :
249 : : /* Conversions are irrelevant for points-to information and
250 : : data-dependence analysis can feed us those. */
251 : 183940285 : STRIP_NOPS (ptr);
252 : :
253 : : /* Anything we do not explicilty handle aliases. */
254 : 183940285 : if ((TREE_CODE (ptr) != SSA_NAME
255 : 1602354 : && TREE_CODE (ptr) != ADDR_EXPR
256 : 611287 : && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
257 : 183328998 : || !POINTER_TYPE_P (TREE_TYPE (ptr))
258 : 367268785 : || (!VAR_P (decl)
259 : : && TREE_CODE (decl) != PARM_DECL
260 : : && TREE_CODE (decl) != RESULT_DECL))
261 : : return true;
262 : :
263 : : /* Disregard pointer offsetting. */
264 : 183327452 : if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
265 : : {
266 : 0 : do
267 : : {
268 : 0 : ptr = TREE_OPERAND (ptr, 0);
269 : : }
270 : 0 : while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
271 : 0 : return ptr_deref_may_alias_decl_p (ptr, decl);
272 : : }
273 : :
274 : : /* ADDR_EXPR pointers either just offset another pointer or directly
275 : : specify the pointed-to set. */
276 : 183327452 : if (TREE_CODE (ptr) == ADDR_EXPR)
277 : : {
278 : 990048 : tree base = get_base_address (TREE_OPERAND (ptr, 0));
279 : 990048 : if (base
280 : 990048 : && (TREE_CODE (base) == MEM_REF
281 : 990048 : || TREE_CODE (base) == TARGET_MEM_REF))
282 : 0 : ptr = TREE_OPERAND (base, 0);
283 : 990048 : else if (base
284 : 990048 : && DECL_P (base))
285 : 989849 : return compare_base_decls (base, decl) != 0;
286 : 199 : else if (base
287 : 199 : && CONSTANT_CLASS_P (base))
288 : : return false;
289 : : else
290 : : return true;
291 : : }
292 : :
293 : : /* Non-aliased variables cannot be pointed to. */
294 : 182337404 : if (!may_be_aliased (decl))
295 : : return false;
296 : :
297 : : /* If we do not have useful points-to information for this pointer
298 : : we cannot disambiguate anything else. */
299 : 64169346 : pi = SSA_NAME_PTR_INFO (ptr);
300 : 64169346 : if (!pi)
301 : : return true;
302 : :
303 : 63622913 : return pt_solution_includes (&pi->pt, decl);
304 : : }
305 : :
306 : : /* Return true if dereferenced PTR1 and PTR2 may alias.
307 : : The caller is responsible for applying TBAA to see if accesses
308 : : through PTR1 and PTR2 may conflict at all. */
309 : :
310 : : bool
311 : 55004002 : ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
312 : : {
313 : 56256084 : struct ptr_info_def *pi1, *pi2;
314 : :
315 : : /* Conversions are irrelevant for points-to information and
316 : : data-dependence analysis can feed us those. */
317 : 56256084 : STRIP_NOPS (ptr1);
318 : 56256084 : STRIP_NOPS (ptr2);
319 : :
320 : : /* Disregard pointer offsetting. */
321 : 56256084 : if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
322 : : {
323 : 580114 : do
324 : : {
325 : 580114 : ptr1 = TREE_OPERAND (ptr1, 0);
326 : : }
327 : 580114 : while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
328 : 580114 : return ptr_derefs_may_alias_p (ptr1, ptr2);
329 : : }
330 : 55675970 : if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
331 : : {
332 : 596897 : do
333 : : {
334 : 596897 : ptr2 = TREE_OPERAND (ptr2, 0);
335 : : }
336 : 596897 : while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
337 : 596897 : return ptr_derefs_may_alias_p (ptr1, ptr2);
338 : : }
339 : :
340 : : /* ADDR_EXPR pointers either just offset another pointer or directly
341 : : specify the pointed-to set. */
342 : 55079073 : if (TREE_CODE (ptr1) == ADDR_EXPR)
343 : : {
344 : 680112 : tree base = get_base_address (TREE_OPERAND (ptr1, 0));
345 : 680112 : if (base
346 : 680112 : && (TREE_CODE (base) == MEM_REF
347 : 680112 : || TREE_CODE (base) == TARGET_MEM_REF))
348 : 34588 : return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
349 : 645524 : else if (base
350 : 645524 : && DECL_P (base))
351 : 643734 : return ptr_deref_may_alias_decl_p (ptr2, base);
352 : : /* Try ptr2 when ptr1 points to a constant. */
353 : : else if (base
354 : 1790 : && !CONSTANT_CLASS_P (base))
355 : : return true;
356 : : }
357 : 54400751 : if (TREE_CODE (ptr2) == ADDR_EXPR)
358 : : {
359 : 237224 : tree base = get_base_address (TREE_OPERAND (ptr2, 0));
360 : 237224 : if (base
361 : 237224 : && (TREE_CODE (base) == MEM_REF
362 : 237224 : || TREE_CODE (base) == TARGET_MEM_REF))
363 : 40483 : return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
364 : 196741 : else if (base
365 : 196741 : && DECL_P (base))
366 : 195552 : return ptr_deref_may_alias_decl_p (ptr1, base);
367 : : else
368 : : return true;
369 : : }
370 : :
371 : : /* From here we require SSA name pointers. Anything else aliases. */
372 : 54163527 : if (TREE_CODE (ptr1) != SSA_NAME
373 : 54090555 : || TREE_CODE (ptr2) != SSA_NAME
374 : 54079657 : || !POINTER_TYPE_P (TREE_TYPE (ptr1))
375 : 108236054 : || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
376 : : return true;
377 : :
378 : : /* We may end up with two empty points-to solutions for two same pointers.
379 : : In this case we still want to say both pointers alias, so shortcut
380 : : that here. */
381 : 54072409 : if (ptr1 == ptr2)
382 : : return true;
383 : :
384 : : /* If we do not have useful points-to information for either pointer
385 : : we cannot disambiguate anything else. */
386 : 50109933 : pi1 = SSA_NAME_PTR_INFO (ptr1);
387 : 50109933 : pi2 = SSA_NAME_PTR_INFO (ptr2);
388 : 50109933 : if (!pi1 || !pi2)
389 : : return true;
390 : :
391 : : /* ??? This does not use TBAA to prune decls from the intersection
392 : : that not both pointers may access. */
393 : 47989958 : return pt_solutions_intersect (&pi1->pt, &pi2->pt);
394 : : }
395 : :
396 : : /* Return true if dereferencing PTR may alias *REF.
397 : : The caller is responsible for applying TBAA to see if PTR
398 : : may access *REF at all. */
399 : :
400 : : static bool
401 : 1665323 : ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
402 : : {
403 : 1665323 : tree base = ao_ref_base (ref);
404 : :
405 : 1665323 : if (TREE_CODE (base) == MEM_REF
406 : 1665323 : || TREE_CODE (base) == TARGET_MEM_REF)
407 : 164445 : return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
408 : 1500878 : else if (DECL_P (base))
409 : 1499824 : return ptr_deref_may_alias_decl_p (ptr, base);
410 : :
411 : : return true;
412 : : }
413 : :
414 : : /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
415 : :
416 : : bool
417 : 49492109 : ptrs_compare_unequal (tree ptr1, tree ptr2)
418 : : {
419 : : /* First resolve the pointers down to a SSA name pointer base or
420 : : a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
421 : : not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
422 : : or STRING_CSTs which needs points-to adjustments to track them
423 : : in the points-to sets. */
424 : 49492109 : tree obj1 = NULL_TREE;
425 : 49492109 : tree obj2 = NULL_TREE;
426 : 49492109 : if (TREE_CODE (ptr1) == ADDR_EXPR)
427 : : {
428 : 3883156 : tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
429 : 3883156 : if (! tem)
430 : : return false;
431 : 3883156 : if (VAR_P (tem)
432 : : || TREE_CODE (tem) == PARM_DECL
433 : : || TREE_CODE (tem) == RESULT_DECL)
434 : : obj1 = tem;
435 : : else if (TREE_CODE (tem) == MEM_REF)
436 : 115531 : ptr1 = TREE_OPERAND (tem, 0);
437 : : }
438 : 49492109 : if (TREE_CODE (ptr2) == ADDR_EXPR)
439 : : {
440 : 4454522 : tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
441 : 4454522 : if (! tem)
442 : : return false;
443 : 4454522 : if (VAR_P (tem)
444 : : || TREE_CODE (tem) == PARM_DECL
445 : : || TREE_CODE (tem) == RESULT_DECL)
446 : : obj2 = tem;
447 : : else if (TREE_CODE (tem) == MEM_REF)
448 : 1863 : ptr2 = TREE_OPERAND (tem, 0);
449 : : }
450 : :
451 : : /* Canonicalize ptr vs. object. */
452 : 49492109 : if (TREE_CODE (ptr1) == SSA_NAME && obj2)
453 : : {
454 : 1864515 : std::swap (ptr1, ptr2);
455 : 1864515 : std::swap (obj1, obj2);
456 : : }
457 : :
458 : 49492109 : if (obj1 && obj2)
459 : : /* Other code handles this correctly, no need to duplicate it here. */;
460 : 5503973 : else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
461 : : {
462 : 5476807 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
463 : : /* We may not use restrict to optimize pointer comparisons.
464 : : See PR71062. So we have to assume that restrict-pointed-to
465 : : may be in fact obj1. */
466 : 5476807 : if (!pi
467 : : || pi->pt.vars_contains_restrict
468 : 4598314 : || pi->pt.vars_contains_interposable)
469 : : return false;
470 : 3981456 : if (VAR_P (obj1)
471 : 3981456 : && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
472 : : {
473 : 1168479 : varpool_node *node = varpool_node::get (obj1);
474 : : /* If obj1 may bind to NULL give up (see below). */
475 : 1168479 : if (! node
476 : 1168479 : || ! node->nonzero_address ()
477 : 2336958 : || ! decl_binds_to_current_def_p (obj1))
478 : 881410 : return false;
479 : : }
480 : 3100046 : return !pt_solution_includes (&pi->pt, obj1);
481 : : }
482 : :
483 : : /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
484 : : but those require pt.null to be conservatively correct. */
485 : :
486 : : return false;
487 : : }
488 : :
489 : : /* Returns whether reference REF to BASE may refer to global memory.
490 : : When ESCAPED_LOCAL_P is true escaped local memory is also considered
491 : : global. */
492 : :
493 : : static bool
494 : 44442382 : ref_may_alias_global_p_1 (tree base, bool escaped_local_p)
495 : : {
496 : 44442382 : if (DECL_P (base))
497 : 34634694 : return (is_global_var (base)
498 : 34634694 : || (escaped_local_p
499 : 679296 : && pt_solution_includes (&cfun->gimple_df->escaped_return,
500 : : base)));
501 : 9807688 : else if (TREE_CODE (base) == MEM_REF
502 : 9807688 : || TREE_CODE (base) == TARGET_MEM_REF)
503 : 9799496 : return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0),
504 : 9799496 : escaped_local_p);
505 : : return true;
506 : : }
507 : :
508 : : bool
509 : 4459543 : ref_may_alias_global_p (ao_ref *ref, bool escaped_local_p)
510 : : {
511 : 4459543 : tree base = ao_ref_base (ref);
512 : 4459543 : return ref_may_alias_global_p_1 (base, escaped_local_p);
513 : : }
514 : :
515 : : bool
516 : 39982839 : ref_may_alias_global_p (tree ref, bool escaped_local_p)
517 : : {
518 : 39982839 : tree base = get_base_address (ref);
519 : 39982839 : return ref_may_alias_global_p_1 (base, escaped_local_p);
520 : : }
521 : :
522 : : /* Return true whether STMT may clobber global memory.
523 : : When ESCAPED_LOCAL_P is true escaped local memory is also considered
524 : : global. */
525 : :
526 : : bool
527 : 160436930 : stmt_may_clobber_global_p (gimple *stmt, bool escaped_local_p)
528 : : {
529 : 160436930 : tree lhs;
530 : :
531 : 321736642 : if (!gimple_vdef (stmt))
532 : : return false;
533 : :
534 : : /* ??? We can ask the oracle whether an artificial pointer
535 : : dereference with a pointer with points-to information covering
536 : : all global memory (what about non-address taken memory?) maybe
537 : : clobbered by this call. As there is at the moment no convenient
538 : : way of doing that without generating garbage do some manual
539 : : checking instead.
540 : : ??? We could make a NULL ao_ref argument to the various
541 : : predicates special, meaning any global memory. */
542 : :
543 : 40065650 : switch (gimple_code (stmt))
544 : : {
545 : 39982839 : case GIMPLE_ASSIGN:
546 : 39982839 : lhs = gimple_assign_lhs (stmt);
547 : 39982839 : return (TREE_CODE (lhs) != SSA_NAME
548 : 39982839 : && ref_may_alias_global_p (lhs, escaped_local_p));
549 : : case GIMPLE_CALL:
550 : : return true;
551 : : default:
552 : : return true;
553 : : }
554 : : }
555 : :
556 : :
557 : : /* Dump alias information on FILE. */
558 : :
559 : : void
560 : 302 : dump_alias_info (FILE *file)
561 : : {
562 : 302 : unsigned i;
563 : 302 : tree ptr;
564 : 302 : const char *funcname
565 : 302 : = lang_hooks.decl_printable_name (current_function_decl, 2);
566 : 302 : tree var;
567 : :
568 : 302 : fprintf (file, "\n\nAlias information for %s\n\n", funcname);
569 : :
570 : 302 : fprintf (file, "Aliased symbols\n\n");
571 : :
572 : 1166 : FOR_EACH_LOCAL_DECL (cfun, i, var)
573 : : {
574 : 595 : if (may_be_aliased (var))
575 : 365 : dump_variable (file, var);
576 : : }
577 : :
578 : 302 : fprintf (file, "\nCall clobber information\n");
579 : :
580 : 302 : fprintf (file, "\nESCAPED");
581 : 302 : dump_points_to_solution (file, &cfun->gimple_df->escaped);
582 : :
583 : 302 : fprintf (file, "\nESCAPED_RETURN");
584 : 302 : dump_points_to_solution (file, &cfun->gimple_df->escaped_return);
585 : :
586 : 302 : fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
587 : :
588 : 3172 : FOR_EACH_SSA_NAME (i, ptr, cfun)
589 : : {
590 : 2641 : struct ptr_info_def *pi;
591 : :
592 : 4828 : if (!POINTER_TYPE_P (TREE_TYPE (ptr))
593 : 2698 : || SSA_NAME_IN_FREE_LIST (ptr))
594 : 2130 : continue;
595 : :
596 : 511 : pi = SSA_NAME_PTR_INFO (ptr);
597 : 511 : if (pi)
598 : 505 : dump_points_to_info_for (file, ptr);
599 : : }
600 : :
601 : 302 : fprintf (file, "\n");
602 : 302 : }
603 : :
604 : :
605 : : /* Dump alias information on stderr. */
606 : :
607 : : DEBUG_FUNCTION void
608 : 0 : debug_alias_info (void)
609 : : {
610 : 0 : dump_alias_info (stderr);
611 : 0 : }
612 : :
613 : :
614 : : /* Dump the points-to set *PT into FILE. */
615 : :
616 : : void
617 : 1109 : dump_points_to_solution (FILE *file, struct pt_solution *pt)
618 : : {
619 : 1109 : if (pt->anything)
620 : 0 : fprintf (file, ", points-to anything");
621 : :
622 : 1109 : if (pt->nonlocal)
623 : 662 : fprintf (file, ", points-to non-local");
624 : :
625 : 1109 : if (pt->escaped)
626 : 438 : fprintf (file, ", points-to escaped");
627 : :
628 : 1109 : if (pt->ipa_escaped)
629 : 0 : fprintf (file, ", points-to unit escaped");
630 : :
631 : 1109 : if (pt->null)
632 : 635 : fprintf (file, ", points-to NULL");
633 : :
634 : 1109 : if (pt->vars)
635 : : {
636 : 1109 : fprintf (file, ", points-to vars: ");
637 : 1109 : dump_decl_set (file, pt->vars);
638 : 1109 : if (pt->vars_contains_nonlocal
639 : : || pt->vars_contains_escaped
640 : : || pt->vars_contains_escaped_heap
641 : 1109 : || pt->vars_contains_restrict)
642 : : {
643 : 219 : const char *comma = "";
644 : 219 : fprintf (file, " (");
645 : 219 : if (pt->vars_contains_nonlocal)
646 : : {
647 : 154 : fprintf (file, "nonlocal");
648 : 154 : comma = ", ";
649 : : }
650 : 219 : if (pt->vars_contains_escaped)
651 : : {
652 : 135 : fprintf (file, "%sescaped", comma);
653 : 135 : comma = ", ";
654 : : }
655 : 219 : if (pt->vars_contains_escaped_heap)
656 : : {
657 : 0 : fprintf (file, "%sescaped heap", comma);
658 : 0 : comma = ", ";
659 : : }
660 : 219 : if (pt->vars_contains_restrict)
661 : : {
662 : 70 : fprintf (file, "%srestrict", comma);
663 : 70 : comma = ", ";
664 : : }
665 : 219 : if (pt->vars_contains_interposable)
666 : 0 : fprintf (file, "%sinterposable", comma);
667 : 219 : fprintf (file, ")");
668 : : }
669 : : }
670 : 1109 : }
671 : :
672 : :
673 : : /* Unified dump function for pt_solution. */
674 : :
675 : : DEBUG_FUNCTION void
676 : 0 : debug (pt_solution &ref)
677 : : {
678 : 0 : dump_points_to_solution (stderr, &ref);
679 : 0 : }
680 : :
681 : : DEBUG_FUNCTION void
682 : 0 : debug (pt_solution *ptr)
683 : : {
684 : 0 : if (ptr)
685 : 0 : debug (*ptr);
686 : : else
687 : 0 : fprintf (stderr, "<nil>\n");
688 : 0 : }
689 : :
690 : :
691 : : /* Dump points-to information for SSA_NAME PTR into FILE. */
692 : :
693 : : void
694 : 505 : dump_points_to_info_for (FILE *file, tree ptr)
695 : : {
696 : 505 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
697 : :
698 : 505 : print_generic_expr (file, ptr, dump_flags);
699 : :
700 : 505 : if (pi)
701 : 505 : dump_points_to_solution (file, &pi->pt);
702 : : else
703 : 0 : fprintf (file, ", points-to anything");
704 : :
705 : 505 : fprintf (file, "\n");
706 : 505 : }
707 : :
708 : :
709 : : /* Dump points-to information for VAR into stderr. */
710 : :
711 : : DEBUG_FUNCTION void
712 : 0 : debug_points_to_info_for (tree var)
713 : : {
714 : 0 : dump_points_to_info_for (stderr, var);
715 : 0 : }
716 : :
717 : :
718 : : /* Initializes the alias-oracle reference representation *R from REF. */
719 : :
720 : : void
721 : 2444679059 : ao_ref_init (ao_ref *r, tree ref)
722 : : {
723 : 2444679059 : r->ref = ref;
724 : 2444679059 : r->base = NULL_TREE;
725 : 2444679059 : r->offset = 0;
726 : 2444679059 : r->size = -1;
727 : 2444679059 : r->max_size = -1;
728 : 2444679059 : r->ref_alias_set = -1;
729 : 2444679059 : r->base_alias_set = -1;
730 : 2444679059 : r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
731 : 2444679059 : }
732 : :
733 : : /* Returns the base object of the memory reference *REF. */
734 : :
735 : : tree
736 : 4674096397 : ao_ref_base (ao_ref *ref)
737 : : {
738 : 4674096397 : bool reverse;
739 : :
740 : 4674096397 : if (ref->base)
741 : : return ref->base;
742 : 2205090020 : ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
743 : : &ref->max_size, &reverse);
744 : 2205090020 : return ref->base;
745 : : }
746 : :
747 : : /* Returns the base object alias set of the memory reference *REF. */
748 : :
749 : : alias_set_type
750 : 879859697 : ao_ref_base_alias_set (ao_ref *ref)
751 : : {
752 : 879859697 : tree base_ref;
753 : 879859697 : if (ref->base_alias_set != -1)
754 : : return ref->base_alias_set;
755 : 700787519 : if (!ref->ref)
756 : : return 0;
757 : 667166494 : base_ref = ref->ref;
758 : 667166494 : if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
759 : 4 : base_ref = TREE_OPERAND (base_ref, 0);
760 : 1082289124 : while (handled_component_p (base_ref))
761 : 415122630 : base_ref = TREE_OPERAND (base_ref, 0);
762 : 667166494 : ref->base_alias_set = get_alias_set (base_ref);
763 : 667166494 : return ref->base_alias_set;
764 : : }
765 : :
766 : : /* Returns the reference alias set of the memory reference *REF. */
767 : :
768 : : alias_set_type
769 : 1083764040 : ao_ref_alias_set (ao_ref *ref)
770 : : {
771 : 1083764040 : if (ref->ref_alias_set != -1)
772 : : return ref->ref_alias_set;
773 : 434334137 : if (!ref->ref)
774 : : return 0;
775 : 434334137 : ref->ref_alias_set = get_alias_set (ref->ref);
776 : 434334137 : return ref->ref_alias_set;
777 : : }
778 : :
779 : : /* Returns a type satisfying
780 : : get_deref_alias_set (type) == ao_ref_base_alias_set (REF). */
781 : :
782 : : tree
783 : 306080 : ao_ref_base_alias_ptr_type (ao_ref *ref)
784 : : {
785 : 306080 : tree base_ref;
786 : :
787 : 306080 : if (!ref->ref)
788 : : return NULL_TREE;
789 : 306080 : base_ref = ref->ref;
790 : 306080 : if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
791 : 0 : base_ref = TREE_OPERAND (base_ref, 0);
792 : 405609 : while (handled_component_p (base_ref))
793 : 99529 : base_ref = TREE_OPERAND (base_ref, 0);
794 : 306080 : tree ret = reference_alias_ptr_type (base_ref);
795 : 306080 : return ret;
796 : : }
797 : :
798 : : /* Returns a type satisfying
799 : : get_deref_alias_set (type) == ao_ref_alias_set (REF). */
800 : :
801 : : tree
802 : 306080 : ao_ref_alias_ptr_type (ao_ref *ref)
803 : : {
804 : 306080 : if (!ref->ref)
805 : : return NULL_TREE;
806 : 306080 : tree ret = reference_alias_ptr_type (ref->ref);
807 : 306080 : return ret;
808 : : }
809 : :
810 : : /* Return the alignment of the access *REF and store it in the *ALIGN
811 : : and *BITPOS pairs. Returns false if no alignment could be determined.
812 : : See get_object_alignment_2 for details. */
813 : :
814 : : bool
815 : 90888 : ao_ref_alignment (ao_ref *ref, unsigned int *align,
816 : : unsigned HOST_WIDE_INT *bitpos)
817 : : {
818 : 90888 : if (ref->ref)
819 : 89398 : return get_object_alignment_1 (ref->ref, align, bitpos);
820 : :
821 : : /* When we just have ref->base we cannot use get_object_alignment since
822 : : that will eventually use the type of the appearant access while for
823 : : example ao_ref_init_from_ptr_and_range is not careful to adjust that. */
824 : 1490 : *align = BITS_PER_UNIT;
825 : 1490 : HOST_WIDE_INT offset;
826 : 1490 : if (!ref->offset.is_constant (&offset)
827 : 1490 : || !get_object_alignment_2 (ref->base, align, bitpos, true))
828 : : return false;
829 : 1312 : *bitpos += (unsigned HOST_WIDE_INT)offset * BITS_PER_UNIT;
830 : 1312 : *bitpos = *bitpos & (*align - 1);
831 : 1312 : return true;
832 : : }
833 : :
834 : : /* Init an alias-oracle reference representation from a gimple pointer
835 : : PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
836 : : that RANGE_KNOWN is set.
837 : :
838 : : The access is assumed to be only to or after of the pointer target adjusted
839 : : by the offset, not before it (even in the case RANGE_KNOWN is false). */
840 : :
841 : : void
842 : 27439905 : ao_ref_init_from_ptr_and_range (ao_ref *ref, tree ptr,
843 : : bool range_known,
844 : : poly_int64 offset,
845 : : poly_int64 size,
846 : : poly_int64 max_size)
847 : : {
848 : 27439905 : poly_int64 t, extra_offset = 0;
849 : :
850 : 27439905 : ref->ref = NULL_TREE;
851 : 27439905 : if (TREE_CODE (ptr) == SSA_NAME)
852 : : {
853 : 17275015 : gimple *stmt = SSA_NAME_DEF_STMT (ptr);
854 : 17275015 : if (gimple_assign_single_p (stmt)
855 : 17275015 : && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
856 : 2913525 : ptr = gimple_assign_rhs1 (stmt);
857 : 14361490 : else if (is_gimple_assign (stmt)
858 : 8880249 : && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
859 : 18359086 : && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
860 : : {
861 : 270732 : ptr = gimple_assign_rhs1 (stmt);
862 : 270732 : extra_offset *= BITS_PER_UNIT;
863 : : }
864 : : }
865 : :
866 : 27439905 : if (TREE_CODE (ptr) == ADDR_EXPR)
867 : : {
868 : 13044512 : ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
869 : 13044512 : if (ref->base)
870 : 11172407 : ref->offset = BITS_PER_UNIT * t;
871 : : else
872 : : {
873 : 1872105 : range_known = false;
874 : 1872105 : ref->offset = 0;
875 : 1872105 : ref->base = get_base_address (TREE_OPERAND (ptr, 0));
876 : : }
877 : : }
878 : : else
879 : : {
880 : 14395393 : gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
881 : 14395393 : ref->base = build2 (MEM_REF, char_type_node,
882 : : ptr, null_pointer_node);
883 : 14395393 : ref->offset = 0;
884 : : }
885 : 27439905 : ref->offset += extra_offset + offset;
886 : 27439905 : if (range_known)
887 : : {
888 : 14694771 : ref->max_size = max_size;
889 : 14694771 : ref->size = size;
890 : : }
891 : : else
892 : 12745134 : ref->max_size = ref->size = -1;
893 : 27439905 : ref->ref_alias_set = 0;
894 : 27439905 : ref->base_alias_set = 0;
895 : 27439905 : ref->volatile_p = false;
896 : 27439905 : }
897 : :
898 : : /* Init an alias-oracle reference representation from a gimple pointer
899 : : PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
900 : : size is assumed to be unknown. The access is assumed to be only
901 : : to or after of the pointer target, not before it. */
902 : :
903 : : void
904 : 9258177 : ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
905 : : {
906 : 9258177 : poly_int64 size_hwi;
907 : 9258177 : if (size
908 : 4597691 : && poly_int_tree_p (size, &size_hwi)
909 : 13308352 : && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
910 : : {
911 : 4049409 : size_hwi = size_hwi * BITS_PER_UNIT;
912 : 4049409 : ao_ref_init_from_ptr_and_range (ref, ptr, true, 0, size_hwi, size_hwi);
913 : : }
914 : : else
915 : 5208768 : ao_ref_init_from_ptr_and_range (ref, ptr, false, 0, -1, -1);
916 : 9258177 : }
917 : :
918 : : /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
919 : : Return -1 if S1 < S2
920 : : Return 1 if S1 > S2
921 : : Return 0 if equal or incomparable. */
922 : :
923 : : static int
924 : 7819867 : compare_sizes (tree s1, tree s2)
925 : : {
926 : 7819867 : if (!s1 || !s2)
927 : : return 0;
928 : :
929 : 7814845 : poly_uint64 size1;
930 : 7814845 : poly_uint64 size2;
931 : :
932 : 7814845 : if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
933 : 293 : return 0;
934 : 7814552 : if (known_lt (size1, size2))
935 : : return -1;
936 : 5464831 : if (known_lt (size2, size1))
937 : : return 1;
938 : : return 0;
939 : : }
940 : :
941 : : /* Compare TYPE1 and TYPE2 by its size.
942 : : Return -1 if size of TYPE1 < size of TYPE2
943 : : Return 1 if size of TYPE1 > size of TYPE2
944 : : Return 0 if types are of equal sizes or we can not compare them. */
945 : :
946 : : static int
947 : 6806657 : compare_type_sizes (tree type1, tree type2)
948 : : {
949 : : /* Be conservative for arrays and vectors. We want to support partial
950 : : overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
951 : 6806657 : while (TREE_CODE (type1) == ARRAY_TYPE
952 : 7557531 : || VECTOR_TYPE_P (type1))
953 : 750874 : type1 = TREE_TYPE (type1);
954 : 7038822 : while (TREE_CODE (type2) == ARRAY_TYPE
955 : 7038822 : || VECTOR_TYPE_P (type2))
956 : 232165 : type2 = TREE_TYPE (type2);
957 : 6806657 : return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
958 : : }
959 : :
960 : : /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
961 : : purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
962 : : decide. */
963 : :
964 : : static inline int
965 : 22065114 : same_type_for_tbaa (tree type1, tree type2)
966 : : {
967 : 22065114 : type1 = TYPE_MAIN_VARIANT (type1);
968 : 22065114 : type2 = TYPE_MAIN_VARIANT (type2);
969 : :
970 : : /* Handle the most common case first. */
971 : 22065114 : if (type1 == type2)
972 : : return 1;
973 : :
974 : : /* If we would have to do structural comparison bail out. */
975 : 6521724 : if (TYPE_STRUCTURAL_EQUALITY_P (type1)
976 : 6521724 : || TYPE_STRUCTURAL_EQUALITY_P (type2))
977 : : return -1;
978 : :
979 : : /* Compare the canonical types. */
980 : 6472482 : if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
981 : : return 1;
982 : :
983 : : /* ??? Array types are not properly unified in all cases as we have
984 : : spurious changes in the index types for example. Removing this
985 : : causes all sorts of problems with the Fortran frontend. */
986 : 6404854 : if (TREE_CODE (type1) == ARRAY_TYPE
987 : 217229 : && TREE_CODE (type2) == ARRAY_TYPE)
988 : : return -1;
989 : :
990 : : /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
991 : : object of one of its constrained subtypes, e.g. when a function with an
992 : : unconstrained parameter passed by reference is called on an object and
993 : : inlined. But, even in the case of a fixed size, type and subtypes are
994 : : not equivalent enough as to share the same TYPE_CANONICAL, since this
995 : : would mean that conversions between them are useless, whereas they are
996 : : not (e.g. type and subtypes can have different modes). So, in the end,
997 : : they are only guaranteed to have the same alias set. */
998 : 6266322 : alias_set_type set1 = get_alias_set (type1);
999 : 6266322 : alias_set_type set2 = get_alias_set (type2);
1000 : 6266322 : if (set1 == set2)
1001 : : return -1;
1002 : :
1003 : : /* Pointers to void are considered compatible with all other pointers,
1004 : : so for two pointers see what the alias set resolution thinks. */
1005 : 3700663 : if (POINTER_TYPE_P (type1)
1006 : 1049626 : && POINTER_TYPE_P (type2)
1007 : 3876890 : && alias_sets_conflict_p (set1, set2))
1008 : : return -1;
1009 : :
1010 : : /* The types are known to be not equal. */
1011 : : return 0;
1012 : : }
1013 : :
1014 : : /* Return true if TYPE is a composite type (i.e. we may apply one of handled
1015 : : components on it). */
1016 : :
1017 : : static bool
1018 : 1924923 : type_has_components_p (tree type)
1019 : : {
1020 : 1924923 : return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
1021 : 1924923 : || TREE_CODE (type) == COMPLEX_TYPE;
1022 : : }
1023 : :
1024 : : /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
1025 : : respectively are either pointing to same address or are completely
1026 : : disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
1027 : : just partly overlap.
1028 : :
1029 : : Try to disambiguate using the access path starting from the match
1030 : : and return false if there is no conflict.
1031 : :
1032 : : Helper for aliasing_component_refs_p. */
1033 : :
1034 : : static bool
1035 : 708500 : aliasing_matching_component_refs_p (tree match1, tree ref1,
1036 : : poly_int64 offset1, poly_int64 max_size1,
1037 : : tree match2, tree ref2,
1038 : : poly_int64 offset2, poly_int64 max_size2,
1039 : : bool partial_overlap)
1040 : : {
1041 : 708500 : poly_int64 offadj, sztmp, msztmp;
1042 : 708500 : bool reverse;
1043 : :
1044 : 708500 : if (!partial_overlap)
1045 : : {
1046 : 708500 : get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
1047 : 708500 : offset2 -= offadj;
1048 : 708500 : get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
1049 : 708500 : offset1 -= offadj;
1050 : 708500 : if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1051 : : {
1052 : 63854 : ++alias_stats.aliasing_component_refs_p_no_alias;
1053 : 63854 : return false;
1054 : : }
1055 : : }
1056 : :
1057 : 644646 : int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
1058 : : partial_overlap);
1059 : 644646 : if (cmp == 1
1060 : 644646 : || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
1061 : : {
1062 : 818 : ++alias_stats.aliasing_component_refs_p_no_alias;
1063 : 818 : return false;
1064 : : }
1065 : 643828 : ++alias_stats.aliasing_component_refs_p_may_alias;
1066 : 643828 : return true;
1067 : : }
1068 : :
1069 : : /* Return true if REF is reference to zero sized trailing array. I.e.
1070 : : struct foo {int bar; int array[0];} *fooptr;
1071 : : fooptr->array. */
1072 : :
1073 : : static bool
1074 : 5380760 : component_ref_to_zero_sized_trailing_array_p (tree ref)
1075 : : {
1076 : 5380760 : return (TREE_CODE (ref) == COMPONENT_REF
1077 : 4860431 : && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
1078 : 104966 : && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
1079 : 39397 : || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
1080 : 5446645 : && array_ref_flexible_size_p (ref));
1081 : : }
1082 : :
1083 : : /* Worker for aliasing_component_refs_p. Most parameters match parameters of
1084 : : aliasing_component_refs_p.
1085 : :
1086 : : Walk access path REF2 and try to find type matching TYPE1
1087 : : (which is a start of possibly aliasing access path REF1).
1088 : : If match is found, try to disambiguate.
1089 : :
1090 : : Return 0 for sucessful disambiguation.
1091 : : Return 1 if match was found but disambiguation failed
1092 : : Return -1 if there is no match.
1093 : : In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
1094 : : in access patch REF2 and -1 if we are not sure. */
1095 : :
1096 : : static int
1097 : 2267412 : aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
1098 : : poly_int64 offset1, poly_int64 max_size1,
1099 : : tree end_struct_ref1,
1100 : : tree ref2, tree base2,
1101 : : poly_int64 offset2, poly_int64 max_size2,
1102 : : bool *maybe_match)
1103 : : {
1104 : 2267412 : tree ref = ref2;
1105 : 2267412 : int same_p = 0;
1106 : :
1107 : 6901606 : while (true)
1108 : : {
1109 : : /* We walk from inner type to the outer types. If type we see is
1110 : : already too large to be part of type1, terminate the search. */
1111 : 4584509 : int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
1112 : :
1113 : 4584509 : if (cmp < 0
1114 : 4584509 : && (!end_struct_ref1
1115 : 73 : || compare_type_sizes (TREE_TYPE (end_struct_ref1),
1116 : 73 : TREE_TYPE (ref)) < 0))
1117 : : break;
1118 : : /* If types may be of same size, see if we can decide about their
1119 : : equality. */
1120 : 3358745 : if (cmp == 0)
1121 : : {
1122 : 2312557 : same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
1123 : 2312557 : if (same_p == 1)
1124 : : break;
1125 : : /* In case we can't decide whether types are same try to
1126 : : continue looking for the exact match.
1127 : : Remember however that we possibly saw a match
1128 : : to bypass the access path continuations tests we do later. */
1129 : 1604057 : if (same_p == -1)
1130 : 556788 : *maybe_match = true;
1131 : : }
1132 : 2650245 : if (!handled_component_p (ref))
1133 : : break;
1134 : 2317097 : ref = TREE_OPERAND (ref, 0);
1135 : 2317097 : }
1136 : 2267412 : if (same_p == 1)
1137 : : {
1138 : 708500 : bool partial_overlap = false;
1139 : :
1140 : : /* We assume that arrays can overlap by multiple of their elements
1141 : : size as tested in gcc.dg/torture/alias-2.c.
1142 : : This partial overlap happen only when both arrays are bases of
1143 : : the access and not contained within another component ref.
1144 : : To be safe we also assume partial overlap for VLAs. */
1145 : 708500 : if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1146 : 708500 : && (!TYPE_SIZE (TREE_TYPE (base1))
1147 : 4333 : || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1148 : 4333 : || ref == base2))
1149 : : {
1150 : : /* Setting maybe_match to true triggers
1151 : : nonoverlapping_component_refs_p test later that still may do
1152 : : useful disambiguation. */
1153 : 0 : *maybe_match = true;
1154 : 0 : partial_overlap = true;
1155 : : }
1156 : 708500 : return aliasing_matching_component_refs_p (base1, ref1,
1157 : : offset1, max_size1,
1158 : : ref, ref2,
1159 : : offset2, max_size2,
1160 : 708500 : partial_overlap);
1161 : : }
1162 : : return -1;
1163 : : }
1164 : :
1165 : : /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1166 : : Return true if they can be composed to single access path
1167 : : base1...ref1...base2...ref2.
1168 : :
1169 : : REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1170 : : a trailing array access after REF1 in the non-TBAA part of the access.
1171 : : REF1_ALIAS_SET is the alias set of REF1.
1172 : :
1173 : : BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1174 : : a trailing array access in the TBAA part of access path2.
1175 : : BASE2_ALIAS_SET is the alias set of base2. */
1176 : :
1177 : : bool
1178 : 1924923 : access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1179 : : alias_set_type ref1_alias_set,
1180 : : tree base_type2, tree end_struct_ref2,
1181 : : alias_set_type base2_alias_set)
1182 : : {
1183 : : /* Access path can not continue past types with no components. */
1184 : 1924923 : if (!type_has_components_p (ref_type1))
1185 : : return false;
1186 : :
1187 : : /* If first access path ends by too small type to hold base of
1188 : : the second access path, typically paths can not continue.
1189 : :
1190 : : Punt if end_struct_past_end1 is true. We want to support arbitrary
1191 : : type puning past first COMPONENT_REF to union because redundant store
1192 : : elimination depends on this, see PR92152. For this reason we can not
1193 : : check size of the reference because types may partially overlap. */
1194 : 164530 : if (!end_struct_past_end1)
1195 : : {
1196 : 164481 : if (compare_type_sizes (ref_type1, base_type2) < 0)
1197 : : return false;
1198 : : /* If the path2 contains trailing array access we can strenghten the check
1199 : : to verify that also the size of element of the trailing array fits.
1200 : : In fact we could check for offset + type_size, but we do not track
1201 : : offsets and this is quite side case. */
1202 : 133797 : if (end_struct_ref2
1203 : 133797 : && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1204 : : return false;
1205 : : }
1206 : 133846 : return (base2_alias_set == ref1_alias_set
1207 : 133846 : || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1208 : : }
1209 : :
1210 : : /* Determine if the two component references REF1 and REF2 which are
1211 : : based on access types TYPE1 and TYPE2 and of which at least one is based
1212 : : on an indirect reference may alias.
1213 : : REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1214 : : are the respective alias sets. */
1215 : :
1216 : : static bool
1217 : 2057294 : aliasing_component_refs_p (tree ref1,
1218 : : alias_set_type ref1_alias_set,
1219 : : alias_set_type base1_alias_set,
1220 : : poly_int64 offset1, poly_int64 max_size1,
1221 : : tree ref2,
1222 : : alias_set_type ref2_alias_set,
1223 : : alias_set_type base2_alias_set,
1224 : : poly_int64 offset2, poly_int64 max_size2)
1225 : : {
1226 : : /* If one reference is a component references through pointers try to find a
1227 : : common base and apply offset based disambiguation. This handles
1228 : : for example
1229 : : struct A { int i; int j; } *q;
1230 : : struct B { struct A a; int k; } *p;
1231 : : disambiguating q->i and p->a.j. */
1232 : 2057294 : tree base1, base2;
1233 : 2057294 : tree type1, type2;
1234 : 2057294 : bool maybe_match = false;
1235 : 2057294 : tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1236 : 2057294 : bool end_struct_past_end1 = false;
1237 : 2057294 : bool end_struct_past_end2 = false;
1238 : :
1239 : : /* Choose bases and base types to search for.
1240 : : The access path is as follows:
1241 : : base....end_of_tbaa_ref...actual_ref
1242 : : At one place in the access path may be a reference to zero sized or
1243 : : trailing array.
1244 : :
1245 : : We generally discard the segment after end_of_tbaa_ref however
1246 : : we need to be careful in case it contains zero sized or trailing array.
1247 : : These may happen after reference to union and in this case we need to
1248 : : not disambiguate type puning scenarios.
1249 : :
1250 : : We set:
1251 : : base1 to point to base
1252 : :
1253 : : ref1 to point to end_of_tbaa_ref
1254 : :
1255 : : end_struct_ref1 to point the trailing reference (if it exists
1256 : : in range base....end_of_tbaa_ref
1257 : :
1258 : : end_struct_past_end1 is true if this trailing reference occurs in
1259 : : end_of_tbaa_ref...actual_ref. */
1260 : 2057294 : base1 = ref1;
1261 : 4651890 : while (handled_component_p (base1))
1262 : : {
1263 : : /* Generally access paths are monotous in the size of object. The
1264 : : exception are trailing arrays of structures. I.e.
1265 : : struct a {int array[0];};
1266 : : or
1267 : : struct a {int array1[0]; int array[];};
1268 : : Such struct has size 0 but accesses to a.array may have non-zero size.
1269 : : In this case the size of TREE_TYPE (base1) is smaller than
1270 : : size of TREE_TYPE (TREE_OPERAND (base1, 0)).
1271 : :
1272 : : Because we compare sizes of arrays just by sizes of their elements,
1273 : : we only need to care about zero sized array fields here. */
1274 : 2594596 : if (component_ref_to_zero_sized_trailing_array_p (base1))
1275 : : {
1276 : 62487 : gcc_checking_assert (!end_struct_ref1);
1277 : : end_struct_ref1 = base1;
1278 : : }
1279 : 2594596 : if (ends_tbaa_access_path_p (base1))
1280 : : {
1281 : 37888 : ref1 = TREE_OPERAND (base1, 0);
1282 : 37888 : if (end_struct_ref1)
1283 : : {
1284 : 1 : end_struct_past_end1 = true;
1285 : 1 : end_struct_ref1 = NULL;
1286 : : }
1287 : : }
1288 : 2594596 : base1 = TREE_OPERAND (base1, 0);
1289 : : }
1290 : 2057294 : type1 = TREE_TYPE (base1);
1291 : 2057294 : base2 = ref2;
1292 : 4772636 : while (handled_component_p (base2))
1293 : : {
1294 : 2715342 : if (component_ref_to_zero_sized_trailing_array_p (base2))
1295 : : {
1296 : 3384 : gcc_checking_assert (!end_struct_ref2);
1297 : : end_struct_ref2 = base2;
1298 : : }
1299 : 2715342 : if (ends_tbaa_access_path_p (base2))
1300 : : {
1301 : 55109 : ref2 = TREE_OPERAND (base2, 0);
1302 : 55109 : if (end_struct_ref2)
1303 : : {
1304 : 48 : end_struct_past_end2 = true;
1305 : 48 : end_struct_ref2 = NULL;
1306 : : }
1307 : : }
1308 : 2715342 : base2 = TREE_OPERAND (base2, 0);
1309 : : }
1310 : 2057294 : type2 = TREE_TYPE (base2);
1311 : :
1312 : : /* Now search for the type1 in the access path of ref2. This
1313 : : would be a common base for doing offset based disambiguation on.
1314 : : This however only makes sense if type2 is big enough to hold type1. */
1315 : 2057294 : int cmp_outer = compare_type_sizes (type2, type1);
1316 : :
1317 : : /* If type2 is big enough to contain type1 walk its access path.
1318 : : We also need to care of arrays at the end of structs that may extend
1319 : : beyond the end of structure. If this occurs in the TBAA part of the
1320 : : access path, we need to consider the increased type as well. */
1321 : 2057294 : if (cmp_outer >= 0
1322 : 2057294 : || (end_struct_ref2
1323 : 227 : && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
1324 : : {
1325 : 988364 : int res = aliasing_component_refs_walk (ref1, type1, base1,
1326 : : offset1, max_size1,
1327 : : end_struct_ref1,
1328 : : ref2, base2, offset2, max_size2,
1329 : : &maybe_match);
1330 : 988364 : if (res != -1)
1331 : 433098 : return res;
1332 : : }
1333 : :
1334 : : /* If we didn't find a common base, try the other way around. */
1335 : 1624196 : if (cmp_outer <= 0
1336 : 1624196 : || (end_struct_ref1
1337 : 73 : && compare_type_sizes (TREE_TYPE (end_struct_ref1), type2) <= 0))
1338 : : {
1339 : 1279048 : int res = aliasing_component_refs_walk (ref2, type2, base2,
1340 : : offset2, max_size2,
1341 : : end_struct_ref2,
1342 : : ref1, base1, offset1, max_size1,
1343 : : &maybe_match);
1344 : 1279048 : if (res != -1)
1345 : 275402 : return res;
1346 : : }
1347 : :
1348 : : /* In the following code we make an assumption that the types in access
1349 : : paths do not overlap and thus accesses alias only if one path can be
1350 : : continuation of another. If we was not able to decide about equivalence,
1351 : : we need to give up. */
1352 : 1348794 : if (maybe_match)
1353 : : {
1354 : 369842 : if (!nonoverlapping_component_refs_p (ref1, ref2))
1355 : : {
1356 : 352644 : ++alias_stats.aliasing_component_refs_p_may_alias;
1357 : 352644 : return true;
1358 : : }
1359 : 17198 : ++alias_stats.aliasing_component_refs_p_no_alias;
1360 : 17198 : return false;
1361 : : }
1362 : :
1363 : 978952 : if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1364 : : ref1_alias_set,
1365 : : type2, end_struct_ref2,
1366 : : base2_alias_set)
1367 : 978952 : || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
1368 : : ref2_alias_set,
1369 : : type1, end_struct_ref1,
1370 : : base1_alias_set))
1371 : : {
1372 : 133547 : ++alias_stats.aliasing_component_refs_p_may_alias;
1373 : 133547 : return true;
1374 : : }
1375 : 845405 : ++alias_stats.aliasing_component_refs_p_no_alias;
1376 : 845405 : return false;
1377 : : }
1378 : :
1379 : : /* FIELD1 and FIELD2 are two fields of component refs. We assume
1380 : : that bases of both component refs are either equivalent or nonoverlapping.
1381 : : We do not assume that the containers of FIELD1 and FIELD2 are of the
1382 : : same type or size.
1383 : :
1384 : : Return 0 in case the base address of component_refs are same then
1385 : : FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1386 : : may not be of same type or size.
1387 : :
1388 : : Return 1 if FIELD1 and FIELD2 are non-overlapping.
1389 : :
1390 : : Return -1 otherwise.
1391 : :
1392 : : Main difference between 0 and -1 is to let
1393 : : nonoverlapping_component_refs_since_match_p discover the semantically
1394 : : equivalent part of the access path.
1395 : :
1396 : : Note that this function is used even with -fno-strict-aliasing
1397 : : and makes use of no TBAA assumptions. */
1398 : :
1399 : : static int
1400 : 3974084 : nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1401 : : {
1402 : : /* If both fields are of the same type, we could save hard work of
1403 : : comparing offsets. */
1404 : 3974084 : tree type1 = DECL_CONTEXT (field1);
1405 : 3974084 : tree type2 = DECL_CONTEXT (field2);
1406 : :
1407 : 3974084 : if (TREE_CODE (type1) == RECORD_TYPE
1408 : 7820956 : && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1409 : : field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1410 : 3974084 : if (TREE_CODE (type2) == RECORD_TYPE
1411 : 7820956 : && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1412 : : field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1413 : :
1414 : : /* ??? Bitfields can overlap at RTL level so punt on them.
1415 : : FIXME: RTL expansion should be fixed by adjusting the access path
1416 : : when producing MEM_ATTRs for MEMs which are wider than
1417 : : the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1418 : 3974084 : if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1419 : : return -1;
1420 : :
1421 : : /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1422 : 3974084 : if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1423 : 3816910 : return field1 != field2;
1424 : :
1425 : : /* In common case the offsets and bit offsets will be the same.
1426 : : However if frontends do not agree on the alignment, they may be
1427 : : different even if they actually represent same address.
1428 : : Try the common case first and if that fails calcualte the
1429 : : actual bit offset. */
1430 : 157174 : if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1431 : 157174 : DECL_FIELD_OFFSET (field2))
1432 : 277196 : && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1433 : 120022 : DECL_FIELD_BIT_OFFSET (field2)))
1434 : : return 0;
1435 : :
1436 : : /* Note that it may be possible to use component_ref_field_offset
1437 : : which would provide offsets as trees. However constructing and folding
1438 : : trees is expensive and does not seem to be worth the compile time
1439 : : cost. */
1440 : :
1441 : 37205 : poly_uint64 offset1, offset2;
1442 : 37205 : poly_uint64 bit_offset1, bit_offset2;
1443 : :
1444 : 37205 : if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
1445 : 37205 : && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
1446 : 37205 : && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
1447 : 74410 : && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
1448 : : {
1449 : 37205 : offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1450 : 37205 : offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1451 : :
1452 : 37205 : if (known_eq (offset1, offset2))
1453 : 34487 : return 0;
1454 : :
1455 : 37205 : poly_uint64 size1, size2;
1456 : :
1457 : 37205 : if (poly_int_tree_p (DECL_SIZE (field1), &size1)
1458 : 37205 : && poly_int_tree_p (DECL_SIZE (field2), &size2)
1459 : 74410 : && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
1460 : : return 1;
1461 : : }
1462 : : /* Resort to slower overlap checking by looking for matching types in
1463 : : the middle of access path. */
1464 : : return -1;
1465 : : }
1466 : :
1467 : : /* Return low bound of array. Do not produce new trees
1468 : : and thus do not care about particular type of integer constant
1469 : : and placeholder exprs. */
1470 : :
1471 : : static tree
1472 : 13615751 : cheap_array_ref_low_bound (tree ref)
1473 : : {
1474 : 13615751 : tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1475 : :
1476 : : /* Avoid expensive array_ref_low_bound.
1477 : : low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1478 : : type or it is zero. */
1479 : 13615751 : if (TREE_OPERAND (ref, 2))
1480 : 63301 : return TREE_OPERAND (ref, 2);
1481 : 13552450 : else if (domain_type && TYPE_MIN_VALUE (domain_type))
1482 : 13544288 : return TYPE_MIN_VALUE (domain_type);
1483 : : else
1484 : 8162 : return integer_zero_node;
1485 : : }
1486 : :
1487 : : /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1488 : : completely disjoint.
1489 : :
1490 : : Return 1 if the refs are non-overlapping.
1491 : : Return 0 if they are possibly overlapping but if so the overlap again
1492 : : starts on the same address.
1493 : : Return -1 otherwise. */
1494 : :
1495 : : int
1496 : 6788736 : nonoverlapping_array_refs_p (tree ref1, tree ref2)
1497 : : {
1498 : 6788736 : tree index1 = TREE_OPERAND (ref1, 1);
1499 : 6788736 : tree index2 = TREE_OPERAND (ref2, 1);
1500 : 6788736 : tree low_bound1 = cheap_array_ref_low_bound (ref1);
1501 : 6788736 : tree low_bound2 = cheap_array_ref_low_bound (ref2);
1502 : :
1503 : : /* Handle zero offsets first: we do not need to match type size in this
1504 : : case. */
1505 : 6788736 : if (operand_equal_p (index1, low_bound1, 0)
1506 : 6788736 : && operand_equal_p (index2, low_bound2, 0))
1507 : : return 0;
1508 : :
1509 : : /* If type sizes are different, give up.
1510 : :
1511 : : Avoid expensive array_ref_element_size.
1512 : : If operand 3 is present it denotes size in the alignmnet units.
1513 : : Otherwise size is TYPE_SIZE of the element type.
1514 : : Handle only common cases where types are of the same "kind". */
1515 : 6718990 : if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1516 : : return -1;
1517 : :
1518 : 6718990 : tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1519 : 6718990 : tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1520 : :
1521 : 6718990 : if (TREE_OPERAND (ref1, 3))
1522 : : {
1523 : 3088 : if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1524 : 6176 : || !operand_equal_p (TREE_OPERAND (ref1, 3),
1525 : 3088 : TREE_OPERAND (ref2, 3), 0))
1526 : 646 : return -1;
1527 : : }
1528 : : else
1529 : : {
1530 : 6715902 : if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1531 : 6715902 : TYPE_SIZE_UNIT (elmt_type2), 0))
1532 : : return -1;
1533 : : }
1534 : :
1535 : : /* Since we know that type sizes are the same, there is no need to return
1536 : : -1 after this point. Partial overlap can not be introduced. */
1537 : :
1538 : : /* We may need to fold trees in this case.
1539 : : TODO: Handle integer constant case at least. */
1540 : 6709764 : if (!operand_equal_p (low_bound1, low_bound2, 0))
1541 : : return 0;
1542 : :
1543 : 6709764 : if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1544 : : {
1545 : 338999 : if (tree_int_cst_equal (index1, index2))
1546 : : return 0;
1547 : : return 1;
1548 : : }
1549 : : /* TODO: We can use VRP to further disambiguate here. */
1550 : : return 0;
1551 : : }
1552 : :
1553 : : /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1554 : : MATCH2 either point to the same address or are disjoint.
1555 : : MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1556 : : respectively or NULL in the case we established equivalence of bases.
1557 : : If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1558 : : overlap by exact multiply of their element size.
1559 : :
1560 : : This test works by matching the initial segment of the access path
1561 : : and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1562 : : match was determined without use of TBAA oracle.
1563 : :
1564 : : Return 1 if we can determine that component references REF1 and REF2,
1565 : : that are within a common DECL, cannot overlap.
1566 : :
1567 : : Return 0 if paths are same and thus there is nothing to disambiguate more
1568 : : (i.e. there is must alias assuming there is must alias between MATCH1 and
1569 : : MATCH2)
1570 : :
1571 : : Return -1 if we can not determine 0 or 1 - this happens when we met
1572 : : non-matching types was met in the path.
1573 : : In this case it may make sense to continue by other disambiguation
1574 : : oracles. */
1575 : :
1576 : : static int
1577 : 7027148 : nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1578 : : tree match2, tree ref2,
1579 : : bool partial_overlap)
1580 : : {
1581 : 7027148 : int ntbaa1 = 0, ntbaa2 = 0;
1582 : : /* Early return if there are no references to match, we do not need
1583 : : to walk the access paths.
1584 : :
1585 : : Do not consider this as may-alias for stats - it is more useful
1586 : : to have information how many disambiguations happened provided that
1587 : : the query was meaningful. */
1588 : :
1589 : 6413861 : if (match1 == ref1 || !handled_component_p (ref1)
1590 : 13439655 : || match2 == ref2 || !handled_component_p (ref2))
1591 : : return -1;
1592 : :
1593 : 6397527 : auto_vec<tree, 16> component_refs1;
1594 : 6397527 : auto_vec<tree, 16> component_refs2;
1595 : :
1596 : : /* Create the stack of handled components for REF1. */
1597 : 18389028 : while (handled_component_p (ref1) && ref1 != match1)
1598 : : {
1599 : : /* We use TBAA only to re-synchronize after mismatched refs. So we
1600 : : do not need to truncate access path after TBAA part ends. */
1601 : 11991501 : if (ends_tbaa_access_path_p (ref1))
1602 : : ntbaa1 = 0;
1603 : : else
1604 : 11782422 : ntbaa1++;
1605 : 11991501 : component_refs1.safe_push (ref1);
1606 : 11991501 : ref1 = TREE_OPERAND (ref1, 0);
1607 : : }
1608 : :
1609 : : /* Create the stack of handled components for REF2. */
1610 : 18893008 : while (handled_component_p (ref2) && ref2 != match2)
1611 : : {
1612 : 12495481 : if (ends_tbaa_access_path_p (ref2))
1613 : : ntbaa2 = 0;
1614 : : else
1615 : 12264328 : ntbaa2++;
1616 : 12495481 : component_refs2.safe_push (ref2);
1617 : 12495481 : ref2 = TREE_OPERAND (ref2, 0);
1618 : : }
1619 : :
1620 : 6397527 : if (!flag_strict_aliasing)
1621 : : {
1622 : 455964 : ntbaa1 = 0;
1623 : 455964 : ntbaa2 = 0;
1624 : : }
1625 : :
1626 : 6397527 : bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1627 : 6397527 : bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1628 : :
1629 : : /* If only one of access path starts with MEM_REF check that offset is 0
1630 : : so the addresses stays the same after stripping it.
1631 : : TODO: In this case we may walk the other access path until we get same
1632 : : offset.
1633 : :
1634 : : If both starts with MEM_REF, offset has to be same. */
1635 : 50339 : if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1636 : 6395905 : || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1637 : 12786281 : || (mem_ref1 && mem_ref2
1638 : 1346305 : && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1639 : 1346305 : TREE_OPERAND (ref2, 1))))
1640 : : {
1641 : 43340 : ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1642 : 43340 : return -1;
1643 : : }
1644 : :
1645 : : /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1646 : : to handle them here at all. */
1647 : 6354187 : gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1648 : : && TREE_CODE (ref2) != TARGET_MEM_REF);
1649 : :
1650 : : /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1651 : : rank. This is sufficient because we start from the same DECL and you
1652 : : cannot reference several fields at a time with COMPONENT_REFs (unlike
1653 : : with ARRAY_RANGE_REFs for arrays) so you always need the same number
1654 : : of them to access a sub-component, unless you're in a union, in which
1655 : : case the return value will precisely be false. */
1656 : 8859694 : while (true)
1657 : : {
1658 : : /* Track if we seen unmatched ref with non-zero offset. In this case
1659 : : we must look for partial overlaps. */
1660 : 8859694 : bool seen_unmatched_ref_p = false;
1661 : :
1662 : : /* First match ARRAY_REFs an try to disambiguate. */
1663 : 17119392 : if (!component_refs1.is_empty ()
1664 : 8553610 : && !component_refs2.is_empty ())
1665 : : {
1666 : 15392929 : unsigned int narray_refs1=0, narray_refs2=0;
1667 : :
1668 : : /* We generally assume that both access paths starts by same sequence
1669 : : of refs. However if number of array refs is not in sync, try
1670 : : to recover and pop elts until number match. This helps the case
1671 : : where one access path starts by array and other by element. */
1672 : 15392929 : for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1673 : : narray_refs1++)
1674 : 11265077 : if (TREE_CODE (component_refs1 [component_refs1.length()
1675 : : - 1 - narray_refs1]) != ARRAY_REF)
1676 : : break;
1677 : :
1678 : 15395198 : for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1679 : : narray_refs2++)
1680 : 11280116 : if (TREE_CODE (component_refs2 [component_refs2.length()
1681 : : - 1 - narray_refs2]) != ARRAY_REF)
1682 : : break;
1683 : 8236165 : for (; narray_refs1 > narray_refs2; narray_refs1--)
1684 : : {
1685 : 18005 : ref1 = component_refs1.pop ();
1686 : 18005 : ntbaa1--;
1687 : :
1688 : : /* If index is non-zero we need to check whether the reference
1689 : : does not break the main invariant that bases are either
1690 : : disjoint or equal. Consider the example:
1691 : :
1692 : : unsigned char out[][1];
1693 : : out[1]="a";
1694 : : out[i][0];
1695 : :
1696 : : Here bases out and out are same, but after removing the
1697 : : [i] index, this invariant no longer holds, because
1698 : : out[i] points to the middle of array out.
1699 : :
1700 : : TODO: If size of type of the skipped reference is an integer
1701 : : multiply of the size of type of the other reference this
1702 : : invariant can be verified, but even then it is not completely
1703 : : safe with !flag_strict_aliasing if the other reference contains
1704 : : unbounded array accesses.
1705 : : See */
1706 : :
1707 : 18005 : if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1708 : 18005 : cheap_array_ref_low_bound (ref1), 0))
1709 : : return 0;
1710 : : }
1711 : 8218287 : for (; narray_refs2 > narray_refs1; narray_refs2--)
1712 : : {
1713 : 20274 : ref2 = component_refs2.pop ();
1714 : 20274 : ntbaa2--;
1715 : 20274 : if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1716 : 20274 : cheap_array_ref_low_bound (ref2), 0))
1717 : : return 0;
1718 : : }
1719 : : /* Try to disambiguate matched arrays. */
1720 : 14717391 : for (unsigned int i = 0; i < narray_refs1; i++)
1721 : : {
1722 : 13577472 : int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
1723 : 6788736 : component_refs2.pop ());
1724 : 6788736 : ntbaa1--;
1725 : 6788736 : ntbaa2--;
1726 : 6788736 : if (cmp == 1 && !partial_overlap)
1727 : : {
1728 : 260132 : ++alias_stats
1729 : 260132 : .nonoverlapping_refs_since_match_p_no_alias;
1730 : 260132 : return 1;
1731 : : }
1732 : 6528604 : if (cmp == -1)
1733 : : {
1734 : 9226 : seen_unmatched_ref_p = true;
1735 : : /* We can not maintain the invariant that bases are either
1736 : : same or completely disjoint. However we can still recover
1737 : : from type based alias analysis if we reach references to
1738 : : same sizes. We do not attempt to match array sizes, so
1739 : : just finish array walking and look for component refs. */
1740 : 9226 : if (ntbaa1 < 0 || ntbaa2 < 0)
1741 : : {
1742 : 8091 : ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1743 : 8091 : return -1;
1744 : : }
1745 : 2074 : for (i++; i < narray_refs1; i++)
1746 : : {
1747 : 939 : component_refs1.pop ();
1748 : 939 : component_refs2.pop ();
1749 : 939 : ntbaa1--;
1750 : 939 : ntbaa2--;
1751 : : }
1752 : : break;
1753 : : }
1754 : 6519378 : partial_overlap = false;
1755 : : }
1756 : : }
1757 : :
1758 : : /* Next look for component_refs. */
1759 : 8620892 : do
1760 : : {
1761 : 8620892 : if (component_refs1.is_empty ())
1762 : : {
1763 : 4543752 : ++alias_stats
1764 : 4543752 : .nonoverlapping_refs_since_match_p_must_overlap;
1765 : 4543752 : return 0;
1766 : : }
1767 : 4077140 : ref1 = component_refs1.pop ();
1768 : 4077140 : ntbaa1--;
1769 : 4077140 : if (TREE_CODE (ref1) != COMPONENT_REF)
1770 : : {
1771 : 105617 : seen_unmatched_ref_p = true;
1772 : 105617 : if (ntbaa1 < 0 || ntbaa2 < 0)
1773 : : {
1774 : 38335 : ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1775 : 38335 : return -1;
1776 : : }
1777 : : }
1778 : : }
1779 : 8010328 : while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1780 : :
1781 : 3971523 : do
1782 : : {
1783 : 3971523 : if (component_refs2.is_empty ())
1784 : : {
1785 : 18191 : ++alias_stats
1786 : 18191 : .nonoverlapping_refs_since_match_p_must_overlap;
1787 : 18191 : return 0;
1788 : : }
1789 : 3953332 : ref2 = component_refs2.pop ();
1790 : 3953332 : ntbaa2--;
1791 : 3953332 : if (TREE_CODE (ref2) != COMPONENT_REF)
1792 : : {
1793 : 0 : if (ntbaa1 < 0 || ntbaa2 < 0)
1794 : : {
1795 : 0 : ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1796 : 0 : return -1;
1797 : : }
1798 : : seen_unmatched_ref_p = true;
1799 : : }
1800 : : }
1801 : 7906664 : while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1802 : :
1803 : : /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1804 : : earlier. */
1805 : 3953332 : gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1806 : : && TREE_CODE (ref2) == COMPONENT_REF);
1807 : :
1808 : 3953332 : tree field1 = TREE_OPERAND (ref1, 1);
1809 : 3953332 : tree field2 = TREE_OPERAND (ref2, 1);
1810 : :
1811 : : /* ??? We cannot simply use the type of operand #0 of the refs here
1812 : : as the Fortran compiler smuggles type punning into COMPONENT_REFs
1813 : : for common blocks instead of using unions like everyone else. */
1814 : 3953332 : tree type1 = DECL_CONTEXT (field1);
1815 : 3953332 : tree type2 = DECL_CONTEXT (field2);
1816 : :
1817 : 3953332 : partial_overlap = false;
1818 : :
1819 : : /* If we skipped array refs on type of different sizes, we can
1820 : : no longer be sure that there are not partial overlaps. */
1821 : 432 : if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1822 : 3953764 : && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
1823 : : {
1824 : 0 : ++alias_stats
1825 : 0 : .nonoverlapping_refs_since_match_p_may_alias;
1826 : 0 : return -1;
1827 : : }
1828 : :
1829 : 3953332 : int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1830 : 3953332 : if (cmp == -1)
1831 : : {
1832 : 2718 : ++alias_stats
1833 : 2718 : .nonoverlapping_refs_since_match_p_may_alias;
1834 : 2718 : return -1;
1835 : : }
1836 : 3950614 : else if (cmp == 1)
1837 : : {
1838 : 1445107 : ++alias_stats
1839 : 1445107 : .nonoverlapping_refs_since_match_p_no_alias;
1840 : 1445107 : return 1;
1841 : : }
1842 : : }
1843 : 6397527 : }
1844 : :
1845 : : /* Return TYPE_UID which can be used to match record types we consider
1846 : : same for TBAA purposes. */
1847 : :
1848 : : static inline int
1849 : 93202 : ncr_type_uid (const_tree field)
1850 : : {
1851 : : /* ??? We cannot simply use the type of operand #0 of the refs here
1852 : : as the Fortran compiler smuggles type punning into COMPONENT_REFs
1853 : : for common blocks instead of using unions like everyone else. */
1854 : 93202 : tree type = DECL_FIELD_CONTEXT (field);
1855 : : /* With LTO types considered same_type_for_tbaa_p
1856 : : from different translation unit may not have same
1857 : : main variant. They however have same TYPE_CANONICAL. */
1858 : 93202 : if (TYPE_CANONICAL (type))
1859 : 93202 : return TYPE_UID (TYPE_CANONICAL (type));
1860 : 0 : return TYPE_UID (type);
1861 : : }
1862 : :
1863 : : /* qsort compare function to sort FIELD_DECLs after their
1864 : : DECL_FIELD_CONTEXT TYPE_UID. */
1865 : :
1866 : : static inline int
1867 : 40524 : ncr_compar (const void *field1_, const void *field2_)
1868 : : {
1869 : 40524 : const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1870 : 40524 : const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1871 : 40524 : unsigned int uid1 = ncr_type_uid (field1);
1872 : 40524 : unsigned int uid2 = ncr_type_uid (field2);
1873 : :
1874 : 40524 : if (uid1 < uid2)
1875 : : return -1;
1876 : 16498 : else if (uid1 > uid2)
1877 : 16498 : return 1;
1878 : : return 0;
1879 : : }
1880 : :
1881 : : /* Return true if we can determine that the fields referenced cannot
1882 : : overlap for any pair of objects. This relies on TBAA. */
1883 : :
1884 : : static bool
1885 : 998848 : nonoverlapping_component_refs_p (const_tree x, const_tree y)
1886 : : {
1887 : : /* Early return if we have nothing to do.
1888 : :
1889 : : Do not consider this as may-alias for stats - it is more useful
1890 : : to have information how many disambiguations happened provided that
1891 : : the query was meaningful. */
1892 : 998848 : if (!flag_strict_aliasing
1893 : 998848 : || !x || !y
1894 : 141423 : || !handled_component_p (x)
1895 : 998848 : || !handled_component_p (y))
1896 : : return false;
1897 : :
1898 : 96319 : auto_vec<const_tree, 16> fieldsx;
1899 : 306210 : while (handled_component_p (x))
1900 : : {
1901 : 209891 : if (TREE_CODE (x) == COMPONENT_REF)
1902 : : {
1903 : 116671 : tree field = TREE_OPERAND (x, 1);
1904 : 116671 : tree type = DECL_FIELD_CONTEXT (field);
1905 : 116671 : if (TREE_CODE (type) == RECORD_TYPE)
1906 : 116391 : fieldsx.safe_push (field);
1907 : : }
1908 : 93220 : else if (ends_tbaa_access_path_p (x))
1909 : 2343 : fieldsx.truncate (0);
1910 : 209891 : x = TREE_OPERAND (x, 0);
1911 : : }
1912 : 167706 : if (fieldsx.length () == 0)
1913 : : return false;
1914 : 71387 : auto_vec<const_tree, 16> fieldsy;
1915 : 172998 : while (handled_component_p (y))
1916 : : {
1917 : 101611 : if (TREE_CODE (y) == COMPONENT_REF)
1918 : : {
1919 : 30167 : tree field = TREE_OPERAND (y, 1);
1920 : 30167 : tree type = DECL_FIELD_CONTEXT (field);
1921 : 30167 : if (TREE_CODE (type) == RECORD_TYPE)
1922 : 29887 : fieldsy.safe_push (TREE_OPERAND (y, 1));
1923 : : }
1924 : 71444 : else if (ends_tbaa_access_path_p (y))
1925 : 77 : fieldsy.truncate (0);
1926 : 101611 : y = TREE_OPERAND (y, 0);
1927 : : }
1928 : 71387 : if (fieldsy.length () == 0)
1929 : : {
1930 : 47557 : ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1931 : 47557 : return false;
1932 : : }
1933 : :
1934 : : /* Most common case first. */
1935 : 23830 : if (fieldsx.length () == 1
1936 : 23830 : && fieldsy.length () == 1)
1937 : : {
1938 : 43246 : if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1939 : 21623 : DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1940 : 42207 : && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
1941 : : {
1942 : 17198 : ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1943 : 17198 : return true;
1944 : : }
1945 : : else
1946 : : {
1947 : 4425 : ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1948 : 4425 : return false;
1949 : : }
1950 : : }
1951 : :
1952 : 2207 : if (fieldsx.length () == 2)
1953 : : {
1954 : 136 : if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1955 : 130 : std::swap (fieldsx[0], fieldsx[1]);
1956 : : }
1957 : : else
1958 : 2071 : fieldsx.qsort (ncr_compar);
1959 : :
1960 : 2207 : if (fieldsy.length () == 2)
1961 : : {
1962 : 148 : if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1963 : 104 : std::swap (fieldsy[0], fieldsy[1]);
1964 : : }
1965 : : else
1966 : 2059 : fieldsy.qsort (ncr_compar);
1967 : :
1968 : : unsigned i = 0, j = 0;
1969 : 6077 : do
1970 : : {
1971 : 6077 : const_tree fieldx = fieldsx[i];
1972 : 6077 : const_tree fieldy = fieldsy[j];
1973 : :
1974 : : /* We're left with accessing different fields of a structure,
1975 : : no possible overlap. */
1976 : 12154 : if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
1977 : 6077 : DECL_FIELD_CONTEXT (fieldy)) == 1
1978 : 6077 : && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
1979 : : {
1980 : 0 : ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1981 : 0 : return true;
1982 : : }
1983 : :
1984 : 6077 : if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
1985 : : {
1986 : 2083 : i++;
1987 : 4166 : if (i == fieldsx.length ())
1988 : : break;
1989 : : }
1990 : : else
1991 : : {
1992 : 3994 : j++;
1993 : 7988 : if (j == fieldsy.length ())
1994 : : break;
1995 : : }
1996 : : }
1997 : : while (1);
1998 : :
1999 : 2207 : ++alias_stats.nonoverlapping_component_refs_p_may_alias;
2000 : 2207 : return false;
2001 : 167706 : }
2002 : :
2003 : :
2004 : : /* Return true if two memory references based on the variables BASE1
2005 : : and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2006 : : [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
2007 : : if non-NULL are the complete memory reference trees. */
2008 : :
2009 : : static bool
2010 : 1308021290 : decl_refs_may_alias_p (tree ref1, tree base1,
2011 : : poly_int64 offset1, poly_int64 max_size1,
2012 : : poly_int64 size1,
2013 : : tree ref2, tree base2,
2014 : : poly_int64 offset2, poly_int64 max_size2,
2015 : : poly_int64 size2)
2016 : : {
2017 : 1308021290 : gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
2018 : :
2019 : : /* If both references are based on different variables, they cannot alias. */
2020 : 1308021290 : if (compare_base_decls (base1, base2) == 0)
2021 : : return false;
2022 : :
2023 : : /* If both references are based on the same variable, they cannot alias if
2024 : : the accesses do not overlap. */
2025 : 173572534 : if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2026 : : return false;
2027 : :
2028 : : /* If there is must alias, there is no use disambiguating further. */
2029 : 53191872 : if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2030 : : return true;
2031 : :
2032 : : /* For components with variable position, the above test isn't sufficient,
2033 : : so we disambiguate component references manually. */
2034 : 7749230 : if (ref1 && ref2
2035 : 5855162 : && handled_component_p (ref1) && handled_component_p (ref2)
2036 : 12831229 : && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
2037 : : return false;
2038 : :
2039 : : return true;
2040 : : }
2041 : :
2042 : : /* Return true if access with BASE is view converted.
2043 : : Base must not be stripped from inner MEM_REF (&decl)
2044 : : which is done by ao_ref_base and thus one extra walk
2045 : : of handled components is needed. */
2046 : :
2047 : : static bool
2048 : 270876 : view_converted_memref_p (tree base)
2049 : : {
2050 : 270876 : if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
2051 : : return false;
2052 : 143831 : return same_type_for_tbaa (TREE_TYPE (base),
2053 : 287662 : TREE_TYPE (TREE_OPERAND (base, 1))) != 1;
2054 : : }
2055 : :
2056 : : /* Return true if an indirect reference based on *PTR1 constrained
2057 : : to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
2058 : : constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
2059 : : the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2060 : : in which case they are computed on-demand. REF1 and REF2
2061 : : if non-NULL are the complete memory reference trees. */
2062 : :
2063 : : static bool
2064 : 268007558 : indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2065 : : poly_int64 offset1, poly_int64 max_size1,
2066 : : poly_int64 size1,
2067 : : alias_set_type ref1_alias_set,
2068 : : alias_set_type base1_alias_set,
2069 : : tree ref2 ATTRIBUTE_UNUSED, tree base2,
2070 : : poly_int64 offset2, poly_int64 max_size2,
2071 : : poly_int64 size2,
2072 : : alias_set_type ref2_alias_set,
2073 : : alias_set_type base2_alias_set, bool tbaa_p)
2074 : : {
2075 : 268007558 : tree ptr1;
2076 : 268007558 : tree ptrtype1, dbase2;
2077 : :
2078 : 268007558 : gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2079 : : || TREE_CODE (base1) == TARGET_MEM_REF)
2080 : : && DECL_P (base2));
2081 : :
2082 : 268007558 : ptr1 = TREE_OPERAND (base1, 0);
2083 : 268007558 : poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2084 : :
2085 : : /* If only one reference is based on a variable, they cannot alias if
2086 : : the pointer access is beyond the extent of the variable access.
2087 : : (the pointer base cannot validly point to an offset less than zero
2088 : : of the variable).
2089 : : ??? IVOPTs creates bases that do not honor this restriction,
2090 : : so do not apply this optimization for TARGET_MEM_REFs. */
2091 : 268007558 : if (TREE_CODE (base1) != TARGET_MEM_REF
2092 : 268007558 : && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
2093 : 67296871 : return false;
2094 : :
2095 : : /* If the pointer based access is bigger than the variable they cannot
2096 : : alias. This is similar to the check below where we use TBAA to
2097 : : increase the size of the pointer based access based on the dynamic
2098 : : type of a containing object we can infer from it. */
2099 : 200710687 : poly_int64 dsize2;
2100 : 200710687 : if (known_size_p (size1)
2101 : 190411103 : && poly_int_tree_p (DECL_SIZE (base2), &dsize2)
2102 : 357784699 : && known_lt (dsize2, size1))
2103 : : return false;
2104 : :
2105 : : /* They also cannot alias if the pointer may not point to the decl. */
2106 : 181601175 : if (!ptr_deref_may_alias_decl_p (ptr1, base2))
2107 : : return false;
2108 : :
2109 : : /* Disambiguations that rely on strict aliasing rules follow. */
2110 : 29062128 : if (!flag_strict_aliasing || !tbaa_p)
2111 : : return true;
2112 : :
2113 : : /* If the alias set for a pointer access is zero all bets are off. */
2114 : 5234890 : if (base1_alias_set == 0 || base2_alias_set == 0)
2115 : : return true;
2116 : :
2117 : : /* When we are trying to disambiguate an access with a pointer dereference
2118 : : as base versus one with a decl as base we can use both the size
2119 : : of the decl and its dynamic type for extra disambiguation.
2120 : : ??? We do not know anything about the dynamic type of the decl
2121 : : other than that its alias-set contains base2_alias_set as a subset
2122 : : which does not help us here. */
2123 : : /* As we know nothing useful about the dynamic type of the decl just
2124 : : use the usual conflict check rather than a subset test.
2125 : : ??? We could introduce -fvery-strict-aliasing when the language
2126 : : does not allow decls to have a dynamic type that differs from their
2127 : : static type. Then we can check
2128 : : !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2129 : 1217687 : if (base1_alias_set != base2_alias_set
2130 : 1217687 : && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2131 : : return false;
2132 : :
2133 : 1030920 : ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2134 : :
2135 : : /* If the size of the access relevant for TBAA through the pointer
2136 : : is bigger than the size of the decl we can't possibly access the
2137 : : decl via that pointer. */
2138 : 1030920 : if (/* ??? This in turn may run afoul when a decl of type T which is
2139 : : a member of union type U is accessed through a pointer to
2140 : : type U and sizeof T is smaller than sizeof U. */
2141 : 1030920 : TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
2142 : 1013210 : && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
2143 : 2044130 : && compare_sizes (DECL_SIZE (base2),
2144 : 1013210 : TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
2145 : : return false;
2146 : :
2147 : 1006877 : if (!ref2)
2148 : : return true;
2149 : :
2150 : : /* If the decl is accessed via a MEM_REF, reconstruct the base
2151 : : we can use for TBAA and an appropriately adjusted offset. */
2152 : : dbase2 = ref2;
2153 : 1599098 : while (handled_component_p (dbase2))
2154 : 649538 : dbase2 = TREE_OPERAND (dbase2, 0);
2155 : 949560 : poly_int64 doffset1 = offset1;
2156 : 949560 : poly_offset_int doffset2 = offset2;
2157 : 949560 : if (TREE_CODE (dbase2) == MEM_REF
2158 : 949560 : || TREE_CODE (dbase2) == TARGET_MEM_REF)
2159 : : {
2160 : 646474 : doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
2161 : 323237 : tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
2162 : : /* If second reference is view-converted, give up now. */
2163 : 323237 : if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
2164 : : return true;
2165 : : }
2166 : :
2167 : : /* If first reference is view-converted, give up now. */
2168 : 890352 : if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
2169 : : return true;
2170 : :
2171 : : /* If both references are through the same type, they do not alias
2172 : : if the accesses do not overlap. This does extra disambiguation
2173 : : for mixed/pointer accesses but requires strict aliasing.
2174 : : For MEM_REFs we require that the component-ref offset we computed
2175 : : is relative to the start of the type which we ensure by
2176 : : comparing rvalue and access type and disregarding the constant
2177 : : pointer offset.
2178 : :
2179 : : But avoid treating variable length arrays as "objects", instead assume they
2180 : : can overlap by an exact multiple of their element size.
2181 : : See gcc.dg/torture/alias-2.c. */
2182 : 806714 : if (((TREE_CODE (base1) != TARGET_MEM_REF
2183 : 116005 : || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2184 : 788013 : && (TREE_CODE (dbase2) != TARGET_MEM_REF
2185 : 19592 : || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2186 : 1575151 : && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2187 : : {
2188 : 301432 : bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2189 : 301432 : && (TYPE_SIZE (TREE_TYPE (base1))
2190 : 2343 : && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2191 : 588531 : != INTEGER_CST));
2192 : 301432 : if (!partial_overlap
2193 : 301432 : && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
2194 : : return false;
2195 : 287099 : if (!ref1 || !ref2
2196 : : /* If there is must alias, there is no use disambiguating further. */
2197 : 287099 : || (!partial_overlap
2198 : 275641 : && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2199 : : return true;
2200 : 2922 : int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2201 : : partial_overlap);
2202 : 2922 : if (res == -1)
2203 : 2825 : return !nonoverlapping_component_refs_p (ref1, ref2);
2204 : 97 : return !res;
2205 : : }
2206 : :
2207 : : /* Do access-path based disambiguation. */
2208 : 505282 : if (ref1 && ref2
2209 : 822253 : && (handled_component_p (ref1) || handled_component_p (ref2)))
2210 : 372968 : return aliasing_component_refs_p (ref1,
2211 : : ref1_alias_set, base1_alias_set,
2212 : : offset1, max_size1,
2213 : : ref2,
2214 : : ref2_alias_set, base2_alias_set,
2215 : 372968 : offset2, max_size2);
2216 : :
2217 : : return true;
2218 : : }
2219 : :
2220 : : /* Return true if two indirect references based on *PTR1
2221 : : and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2222 : : [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2223 : : the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2224 : : in which case they are computed on-demand. REF1 and REF2
2225 : : if non-NULL are the complete memory reference trees. */
2226 : :
2227 : : static bool
2228 : 76607746 : indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2229 : : poly_int64 offset1, poly_int64 max_size1,
2230 : : poly_int64 size1,
2231 : : alias_set_type ref1_alias_set,
2232 : : alias_set_type base1_alias_set,
2233 : : tree ref2 ATTRIBUTE_UNUSED, tree base2,
2234 : : poly_int64 offset2, poly_int64 max_size2,
2235 : : poly_int64 size2,
2236 : : alias_set_type ref2_alias_set,
2237 : : alias_set_type base2_alias_set, bool tbaa_p)
2238 : : {
2239 : 76607746 : tree ptr1;
2240 : 76607746 : tree ptr2;
2241 : 76607746 : tree ptrtype1, ptrtype2;
2242 : :
2243 : 76607746 : gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2244 : : || TREE_CODE (base1) == TARGET_MEM_REF)
2245 : : && (TREE_CODE (base2) == MEM_REF
2246 : : || TREE_CODE (base2) == TARGET_MEM_REF));
2247 : :
2248 : 76607746 : ptr1 = TREE_OPERAND (base1, 0);
2249 : 76607746 : ptr2 = TREE_OPERAND (base2, 0);
2250 : :
2251 : : /* If both bases are based on pointers they cannot alias if they may not
2252 : : point to the same memory object or if they point to the same object
2253 : : and the accesses do not overlap. */
2254 : 76607746 : if ((!cfun || gimple_in_ssa_p (cfun))
2255 : 47585545 : && operand_equal_p (ptr1, ptr2, 0)
2256 : 103044171 : && (((TREE_CODE (base1) != TARGET_MEM_REF
2257 : 289428 : || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2258 : 26378399 : && (TREE_CODE (base2) != TARGET_MEM_REF
2259 : 184630 : || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2260 : 68661 : || (TREE_CODE (base1) == TARGET_MEM_REF
2261 : 58580 : && TREE_CODE (base2) == TARGET_MEM_REF
2262 : 54541 : && (TMR_STEP (base1) == TMR_STEP (base2)
2263 : 4423 : || (TMR_STEP (base1) && TMR_STEP (base2)
2264 : 944 : && operand_equal_p (TMR_STEP (base1),
2265 : 944 : TMR_STEP (base2), 0)))
2266 : 50118 : && (TMR_INDEX (base1) == TMR_INDEX (base2)
2267 : 4984 : || (TMR_INDEX (base1) && TMR_INDEX (base2)
2268 : 4360 : && operand_equal_p (TMR_INDEX (base1),
2269 : 4360 : TMR_INDEX (base2), 0)))
2270 : 45134 : && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2271 : 0 : || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2272 : 0 : && operand_equal_p (TMR_INDEX2 (base1),
2273 : 0 : TMR_INDEX2 (base2), 0))))))
2274 : : {
2275 : 26412898 : poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2276 : 26412898 : poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2277 : 26412898 : if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
2278 : 26412898 : offset2 + moff2, max_size2))
2279 : 26204813 : return false;
2280 : : /* If there is must alias, there is no use disambiguating further. */
2281 : 4082109 : if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2282 : : return true;
2283 : 1324899 : if (ref1 && ref2)
2284 : : {
2285 : 1124111 : int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2286 : : false);
2287 : 1124111 : if (res != -1)
2288 : 1116814 : return !res;
2289 : : }
2290 : : }
2291 : 50402933 : if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2292 : : return false;
2293 : :
2294 : : /* Disambiguations that rely on strict aliasing rules follow. */
2295 : 32119338 : if (!flag_strict_aliasing || !tbaa_p)
2296 : : return true;
2297 : :
2298 : 11878604 : ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2299 : 11878604 : ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2300 : :
2301 : : /* If the alias set for a pointer access is zero all bets are off. */
2302 : 11878604 : if (base1_alias_set == 0
2303 : 11878604 : || base2_alias_set == 0)
2304 : : return true;
2305 : :
2306 : : /* Do type-based disambiguation. */
2307 : 7989108 : if (base1_alias_set != base2_alias_set
2308 : 7989108 : && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2309 : : return false;
2310 : :
2311 : : /* If either reference is view-converted, give up now. */
2312 : 7269380 : if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2313 : 7269380 : || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2314 : 2238145 : return true;
2315 : :
2316 : : /* If both references are through the same type, they do not alias
2317 : : if the accesses do not overlap. This does extra disambiguation
2318 : : for mixed/pointer accesses but requires strict aliasing. */
2319 : 5031235 : if ((TREE_CODE (base1) != TARGET_MEM_REF
2320 : 768949 : || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2321 : 4798184 : && (TREE_CODE (base2) != TARGET_MEM_REF
2322 : 676095 : || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2323 : 9740841 : && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2324 : 4709606 : TREE_TYPE (ptrtype2)) == 1)
2325 : : {
2326 : : /* But avoid treating arrays as "objects", instead assume they
2327 : : can overlap by an exact multiple of their element size.
2328 : : See gcc.dg/torture/alias-2.c. */
2329 : 2858342 : bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2330 : :
2331 : 2858342 : if (!partial_overlap
2332 : 2858342 : && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2333 : : return false;
2334 : 2566885 : if (!ref1 || !ref2
2335 : 2566885 : || (!partial_overlap
2336 : 2140436 : && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2337 : : return true;
2338 : 173470 : int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2339 : : partial_overlap);
2340 : 173470 : if (res == -1)
2341 : 67737 : return !nonoverlapping_component_refs_p (ref1, ref2);
2342 : 105733 : return !res;
2343 : : }
2344 : :
2345 : : /* Do access-path based disambiguation. */
2346 : 2172893 : if (ref1 && ref2
2347 : 3026947 : && (handled_component_p (ref1) || handled_component_p (ref2)))
2348 : 1684326 : return aliasing_component_refs_p (ref1,
2349 : : ref1_alias_set, base1_alias_set,
2350 : : offset1, max_size1,
2351 : : ref2,
2352 : : ref2_alias_set, base2_alias_set,
2353 : 1684326 : offset2, max_size2);
2354 : :
2355 : : return true;
2356 : : }
2357 : :
2358 : : /* Return true, if the two memory references REF1 and REF2 may alias. */
2359 : :
2360 : : static bool
2361 : 1716494617 : refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2362 : : {
2363 : 1716494617 : tree base1, base2;
2364 : 1716494617 : poly_int64 offset1 = 0, offset2 = 0;
2365 : 1716494617 : poly_int64 max_size1 = -1, max_size2 = -1;
2366 : 1716494617 : bool var1_p, var2_p, ind1_p, ind2_p;
2367 : :
2368 : 1716494617 : gcc_checking_assert ((!ref1->ref
2369 : : || TREE_CODE (ref1->ref) == SSA_NAME
2370 : : || DECL_P (ref1->ref)
2371 : : || TREE_CODE (ref1->ref) == STRING_CST
2372 : : || handled_component_p (ref1->ref)
2373 : : || TREE_CODE (ref1->ref) == MEM_REF
2374 : : || TREE_CODE (ref1->ref) == TARGET_MEM_REF
2375 : : || TREE_CODE (ref1->ref) == WITH_SIZE_EXPR)
2376 : : && (!ref2->ref
2377 : : || TREE_CODE (ref2->ref) == SSA_NAME
2378 : : || DECL_P (ref2->ref)
2379 : : || TREE_CODE (ref2->ref) == STRING_CST
2380 : : || handled_component_p (ref2->ref)
2381 : : || TREE_CODE (ref2->ref) == MEM_REF
2382 : : || TREE_CODE (ref2->ref) == TARGET_MEM_REF
2383 : : || TREE_CODE (ref2->ref) == WITH_SIZE_EXPR));
2384 : :
2385 : : /* Decompose the references into their base objects and the access. */
2386 : 1716494617 : base1 = ao_ref_base (ref1);
2387 : 1716494617 : offset1 = ref1->offset;
2388 : 1716494617 : max_size1 = ref1->max_size;
2389 : 1716494617 : base2 = ao_ref_base (ref2);
2390 : 1716494617 : offset2 = ref2->offset;
2391 : 1716494617 : max_size2 = ref2->max_size;
2392 : :
2393 : : /* We can end up with registers or constants as bases for example from
2394 : : *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2395 : : which is seen as a struct copy. */
2396 : 1716494617 : if (TREE_CODE (base1) == SSA_NAME
2397 : 1716493644 : || TREE_CODE (base1) == CONST_DECL
2398 : 1714826053 : || TREE_CODE (base1) == CONSTRUCTOR
2399 : 1714826053 : || TREE_CODE (base1) == ADDR_EXPR
2400 : 1714826053 : || CONSTANT_CLASS_P (base1)
2401 : 1708929641 : || TREE_CODE (base2) == SSA_NAME
2402 : 1708929641 : || TREE_CODE (base2) == CONST_DECL
2403 : 1708839507 : || TREE_CODE (base2) == CONSTRUCTOR
2404 : 1708839507 : || TREE_CODE (base2) == ADDR_EXPR
2405 : 1708839507 : || CONSTANT_CLASS_P (base2))
2406 : : return false;
2407 : :
2408 : : /* Two volatile accesses always conflict. */
2409 : 1708800883 : if (ref1->volatile_p
2410 : 5779269 : && ref2->volatile_p)
2411 : : return true;
2412 : :
2413 : : /* refN->ref may convey size information, do not confuse our workers
2414 : : with that but strip it - ao_ref_base took it into account already. */
2415 : 1705273734 : tree ref1ref = ref1->ref;
2416 : 1705273734 : if (ref1ref && TREE_CODE (ref1ref) == WITH_SIZE_EXPR)
2417 : 166 : ref1ref = TREE_OPERAND (ref1ref, 0);
2418 : 1705273734 : tree ref2ref = ref2->ref;
2419 : 1705273734 : if (ref2ref && TREE_CODE (ref2ref) == WITH_SIZE_EXPR)
2420 : 0 : ref2ref = TREE_OPERAND (ref2ref, 0);
2421 : :
2422 : : /* Defer to simple offset based disambiguation if we have
2423 : : references based on two decls. Do this before defering to
2424 : : TBAA to handle must-alias cases in conformance with the
2425 : : GCC extension of allowing type-punning through unions. */
2426 : 1705273734 : var1_p = DECL_P (base1);
2427 : 1705273734 : var2_p = DECL_P (base2);
2428 : 1705273734 : if (var1_p && var2_p)
2429 : 1308021290 : return decl_refs_may_alias_p (ref1ref, base1, offset1, max_size1,
2430 : : ref1->size,
2431 : : ref2ref, base2, offset2, max_size2,
2432 : 1308021290 : ref2->size);
2433 : :
2434 : : /* We can end up referring to code via function and label decls.
2435 : : As we likely do not properly track code aliases conservatively
2436 : : bail out. */
2437 : 397252444 : if (TREE_CODE (base1) == FUNCTION_DECL
2438 : 397252444 : || TREE_CODE (base1) == LABEL_DECL
2439 : 396366502 : || TREE_CODE (base2) == FUNCTION_DECL
2440 : 396353769 : || TREE_CODE (base2) == LABEL_DECL)
2441 : : return true;
2442 : :
2443 : : /* Handle restrict based accesses.
2444 : : ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2445 : : here. */
2446 : 396353769 : tree rbase1 = base1;
2447 : 396353769 : tree rbase2 = base2;
2448 : 396353769 : if (var1_p)
2449 : : {
2450 : 205475010 : rbase1 = ref1ref;
2451 : 205475010 : if (rbase1)
2452 : 276011849 : while (handled_component_p (rbase1))
2453 : 104173242 : rbase1 = TREE_OPERAND (rbase1, 0);
2454 : : }
2455 : 396353769 : if (var2_p)
2456 : : {
2457 : 93581768 : rbase2 = ref2ref;
2458 : 93581768 : if (rbase2)
2459 : 150557597 : while (handled_component_p (rbase2))
2460 : 63324165 : rbase2 = TREE_OPERAND (rbase2, 0);
2461 : : }
2462 : 396353769 : if (rbase1 && rbase2
2463 : 356369030 : && (TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
2464 : 207776487 : && (TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
2465 : : /* If the accesses are in the same restrict clique... */
2466 : 142583880 : && MR_DEPENDENCE_CLIQUE (rbase1) == MR_DEPENDENCE_CLIQUE (rbase2)
2467 : : /* But based on different pointers they do not alias. */
2468 : 513042346 : && MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))
2469 : : return false;
2470 : :
2471 : 381746385 : ind1_p = (TREE_CODE (base1) == MEM_REF
2472 : 381746385 : || TREE_CODE (base1) == TARGET_MEM_REF);
2473 : 381746385 : ind2_p = (TREE_CODE (base2) == MEM_REF
2474 : 381746385 : || TREE_CODE (base2) == TARGET_MEM_REF);
2475 : :
2476 : : /* Canonicalize the pointer-vs-decl case. */
2477 : 381746385 : if (ind1_p && var2_p)
2478 : : {
2479 : 92305206 : std::swap (offset1, offset2);
2480 : 92305206 : std::swap (max_size1, max_size2);
2481 : 92305206 : std::swap (base1, base2);
2482 : 92305206 : std::swap (ref1, ref2);
2483 : 92305206 : std::swap (ref1ref, ref2ref);
2484 : 92305206 : var1_p = true;
2485 : 92305206 : ind1_p = false;
2486 : 92305206 : var2_p = false;
2487 : 92305206 : ind2_p = true;
2488 : : }
2489 : :
2490 : : /* First defer to TBAA if possible. */
2491 : 381746385 : if (tbaa_p
2492 : 164394765 : && flag_strict_aliasing
2493 : 481318425 : && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
2494 : : ao_ref_alias_set (ref2)))
2495 : : return false;
2496 : :
2497 : : /* If the reference is based on a pointer that points to memory
2498 : : that may not be written to then the other reference cannot possibly
2499 : : clobber it. */
2500 : 345046417 : if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2501 : 344404804 : && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2502 : 689114942 : || (ind1_p
2503 : 76702580 : && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2504 : 76632214 : && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2505 : : return false;
2506 : :
2507 : : /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2508 : 344615304 : if (var1_p && ind2_p)
2509 : 268007558 : return indirect_ref_may_alias_decl_p (ref2ref, base2,
2510 : : offset2, max_size2, ref2->size,
2511 : : ao_ref_alias_set (ref2),
2512 : : ao_ref_base_alias_set (ref2),
2513 : : ref1ref, base1,
2514 : : offset1, max_size1, ref1->size,
2515 : : ao_ref_alias_set (ref1),
2516 : : ao_ref_base_alias_set (ref1),
2517 : 268007558 : tbaa_p);
2518 : 76607746 : else if (ind1_p && ind2_p)
2519 : 76607746 : return indirect_refs_may_alias_p (ref1ref, base1,
2520 : : offset1, max_size1, ref1->size,
2521 : : ao_ref_alias_set (ref1),
2522 : : ao_ref_base_alias_set (ref1),
2523 : : ref2ref, base2,
2524 : : offset2, max_size2, ref2->size,
2525 : : ao_ref_alias_set (ref2),
2526 : : ao_ref_base_alias_set (ref2),
2527 : 76607746 : tbaa_p);
2528 : :
2529 : 0 : gcc_unreachable ();
2530 : : }
2531 : :
2532 : : /* Return true, if the two memory references REF1 and REF2 may alias
2533 : : and update statistics. */
2534 : :
2535 : : bool
2536 : 1716494617 : refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2537 : : {
2538 : 1716494617 : bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2539 : 1716494617 : if (res)
2540 : 118805162 : ++alias_stats.refs_may_alias_p_may_alias;
2541 : : else
2542 : 1597689455 : ++alias_stats.refs_may_alias_p_no_alias;
2543 : 1716494617 : return res;
2544 : : }
2545 : :
2546 : : static bool
2547 : 31937496 : refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2548 : : {
2549 : 31937496 : ao_ref r1;
2550 : 31937496 : ao_ref_init (&r1, ref1);
2551 : 31937496 : return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
2552 : : }
2553 : :
2554 : : bool
2555 : 993129 : refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2556 : : {
2557 : 993129 : ao_ref r1, r2;
2558 : 993129 : ao_ref_init (&r1, ref1);
2559 : 993129 : ao_ref_init (&r2, ref2);
2560 : 993129 : return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
2561 : : }
2562 : :
2563 : : /* Returns true if there is a anti-dependence for the STORE that
2564 : : executes after the LOAD. */
2565 : :
2566 : : bool
2567 : 453117 : refs_anti_dependent_p (tree load, tree store)
2568 : : {
2569 : 453117 : ao_ref r1, r2;
2570 : 453117 : ao_ref_init (&r1, load);
2571 : 453117 : ao_ref_init (&r2, store);
2572 : 453117 : return refs_may_alias_p_1 (&r1, &r2, false);
2573 : : }
2574 : :
2575 : : /* Returns true if there is a output dependence for the stores
2576 : : STORE1 and STORE2. */
2577 : :
2578 : : bool
2579 : 2779966 : refs_output_dependent_p (tree store1, tree store2)
2580 : : {
2581 : 2779966 : ao_ref r1, r2;
2582 : 2779966 : ao_ref_init (&r1, store1);
2583 : 2779966 : ao_ref_init (&r2, store2);
2584 : 2779966 : return refs_may_alias_p_1 (&r1, &r2, false);
2585 : : }
2586 : :
2587 : : /* Returns true if and only if REF may alias any access stored in TT.
2588 : : IF TBAA_P is true, use TBAA oracle. */
2589 : :
2590 : : static bool
2591 : 35253055 : modref_may_conflict (const gcall *stmt,
2592 : : modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
2593 : : {
2594 : 35253055 : alias_set_type base_set, ref_set;
2595 : 35253055 : bool global_memory_ok = false;
2596 : :
2597 : 35253055 : if (tt->every_base)
2598 : : return true;
2599 : :
2600 : 5469205 : if (!dbg_cnt (ipa_mod_ref))
2601 : : return true;
2602 : :
2603 : 5469205 : base_set = ao_ref_base_alias_set (ref);
2604 : :
2605 : 5469205 : ref_set = ao_ref_alias_set (ref);
2606 : :
2607 : 5469205 : int num_tests = 0, max_tests = param_modref_max_tests;
2608 : 22739537 : for (auto base_node : tt->bases)
2609 : : {
2610 : 9046194 : if (tbaa_p && flag_strict_aliasing)
2611 : : {
2612 : 7285012 : if (num_tests >= max_tests)
2613 : : return true;
2614 : 7285012 : alias_stats.modref_tests++;
2615 : 7285012 : if (!alias_sets_conflict_p (base_set, base_node->base))
2616 : 2600763 : continue;
2617 : 4684249 : num_tests++;
2618 : : }
2619 : :
2620 : 6445431 : if (base_node->every_ref)
2621 : : return true;
2622 : :
2623 : 24214478 : for (auto ref_node : base_node->refs)
2624 : : {
2625 : : /* Do not repeat same test as before. */
2626 : 7302633 : if ((ref_set != base_set || base_node->base != ref_node->ref)
2627 : 4202035 : && tbaa_p && flag_strict_aliasing)
2628 : : {
2629 : 2900260 : if (num_tests >= max_tests)
2630 : : return true;
2631 : 2868988 : alias_stats.modref_tests++;
2632 : 2868988 : if (!alias_sets_conflict_p (ref_set, ref_node->ref))
2633 : 660055 : continue;
2634 : 2208933 : num_tests++;
2635 : : }
2636 : :
2637 : 6611306 : if (ref_node->every_access)
2638 : : return true;
2639 : :
2640 : : /* TBAA checks did not disambiguate, try individual accesses. */
2641 : 21024484 : for (auto access_node : ref_node->accesses)
2642 : : {
2643 : 6034870 : if (num_tests >= max_tests)
2644 : 1165299 : return true;
2645 : :
2646 : 6034870 : if (access_node.parm_index == MODREF_GLOBAL_MEMORY_PARM)
2647 : : {
2648 : 984412 : if (global_memory_ok)
2649 : 551005 : continue;
2650 : 984412 : if (ref_may_alias_global_p (ref, true))
2651 : : return true;
2652 : 527795 : global_memory_ok = true;
2653 : 527795 : num_tests++;
2654 : 527795 : continue;
2655 : : }
2656 : :
2657 : 5050458 : tree arg = access_node.get_call_arg (stmt);
2658 : 5050458 : if (!arg)
2659 : : return true;
2660 : :
2661 : 5049453 : alias_stats.modref_baseptr_tests++;
2662 : :
2663 : 5049453 : if (integer_zerop (arg) && flag_delete_null_pointer_checks)
2664 : 23210 : continue;
2665 : :
2666 : : /* PTA oracle will be unhapy of arg is not an pointer. */
2667 : 5026243 : if (!POINTER_TYPE_P (TREE_TYPE (arg)))
2668 : : return true;
2669 : :
2670 : : /* If we don't have base pointer, give up. */
2671 : 5026243 : if (!ref->ref && !ref->base)
2672 : 0 : continue;
2673 : :
2674 : 5026243 : ao_ref ref2;
2675 : 5026243 : if (access_node.get_ao_ref (stmt, &ref2))
2676 : : {
2677 : 3360920 : ref2.ref_alias_set = ref_node->ref;
2678 : 3360920 : ref2.base_alias_set = base_node->base;
2679 : 3360920 : if (refs_may_alias_p_1 (&ref2, ref, tbaa_p))
2680 : : return true;
2681 : : }
2682 : 1665323 : else if (ptr_deref_may_alias_ref_p_1 (arg, ref))
2683 : : return true;
2684 : :
2685 : 4318566 : num_tests++;
2686 : : }
2687 : : }
2688 : : }
2689 : : return false;
2690 : : }
2691 : :
2692 : : /* Check if REF conflicts with call using "fn spec" attribute.
2693 : : If CLOBBER is true we are checking for writes, otherwise check loads.
2694 : :
2695 : : Return 0 if there are no conflicts (except for possible function call
2696 : : argument reads), 1 if there are conflicts and -1 if we can not decide by
2697 : : fn spec. */
2698 : :
2699 : : static int
2700 : 124781980 : check_fnspec (gcall *call, ao_ref *ref, bool clobber)
2701 : : {
2702 : 124781980 : attr_fnspec fnspec = gimple_call_fnspec (call);
2703 : 124781980 : if (fnspec.known_p ())
2704 : : {
2705 : 58910295 : if (clobber
2706 : 58910295 : ? !fnspec.global_memory_written_p ()
2707 : 5663188 : : !fnspec.global_memory_read_p ())
2708 : : {
2709 : 59369193 : for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
2710 : 57415686 : if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))
2711 : 40600764 : && (!fnspec.arg_specified_p (i)
2712 : 23720424 : || (clobber ? fnspec.arg_maybe_written_p (i)
2713 : 1856130 : : fnspec.arg_maybe_read_p (i))))
2714 : : {
2715 : 14595193 : ao_ref dref;
2716 : 14595193 : tree size = NULL_TREE;
2717 : 14595193 : unsigned int size_arg;
2718 : :
2719 : 14595193 : if (!fnspec.arg_specified_p (i))
2720 : : ;
2721 : 14594854 : else if (fnspec.arg_max_access_size_given_by_arg_p
2722 : 14594854 : (i, &size_arg))
2723 : 10676306 : size = gimple_call_arg (call, size_arg);
2724 : 3918548 : else if (fnspec.arg_access_size_given_by_type_p (i))
2725 : : {
2726 : 31563 : tree callee = gimple_call_fndecl (call);
2727 : 31563 : tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
2728 : :
2729 : 63960 : for (unsigned int p = 0; p < i; p++)
2730 : 32397 : t = TREE_CHAIN (t);
2731 : 31563 : size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
2732 : : }
2733 : 10707869 : poly_int64 size_hwi;
2734 : 10707869 : if (size
2735 : 10707869 : && poly_int_tree_p (size, &size_hwi)
2736 : 19565501 : && coeffs_in_range_p (size_hwi, 0,
2737 : : HOST_WIDE_INT_MAX / BITS_PER_UNIT))
2738 : : {
2739 : 8855939 : size_hwi = size_hwi * BITS_PER_UNIT;
2740 : 8855939 : ao_ref_init_from_ptr_and_range (&dref,
2741 : : gimple_call_arg (call, i),
2742 : 8855939 : true, 0, -1, size_hwi);
2743 : : }
2744 : : else
2745 : 5739254 : ao_ref_init_from_ptr_and_range (&dref,
2746 : : gimple_call_arg (call, i),
2747 : 5739254 : false, 0, -1, -1);
2748 : 14595193 : if (refs_may_alias_p_1 (&dref, ref, false))
2749 : 2062040 : return 1;
2750 : : }
2751 : 18833508 : if (clobber
2752 : 17098533 : && fnspec.errno_maybe_written_p ()
2753 : 4912464 : && flag_errno_math
2754 : 19100670 : && targetm.ref_may_alias_errno (ref))
2755 : : return 1;
2756 : 18832091 : return 0;
2757 : : }
2758 : : }
2759 : :
2760 : : /* FIXME: we should handle barriers more consistently, but for now leave the
2761 : : check here. */
2762 : 103886432 : if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2763 : 5194326 : switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2764 : : {
2765 : : /* __sync_* builtins and some OpenMP builtins act as threading
2766 : : barriers. */
2767 : : #undef DEF_SYNC_BUILTIN
2768 : : #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2769 : : #include "sync-builtins.def"
2770 : : #undef DEF_SYNC_BUILTIN
2771 : : case BUILT_IN_GOMP_ATOMIC_START:
2772 : : case BUILT_IN_GOMP_ATOMIC_END:
2773 : : case BUILT_IN_GOMP_BARRIER:
2774 : : case BUILT_IN_GOMP_BARRIER_CANCEL:
2775 : : case BUILT_IN_GOMP_TASKWAIT:
2776 : : case BUILT_IN_GOMP_TASKGROUP_END:
2777 : : case BUILT_IN_GOMP_CRITICAL_START:
2778 : : case BUILT_IN_GOMP_CRITICAL_END:
2779 : : case BUILT_IN_GOMP_CRITICAL_NAME_START:
2780 : : case BUILT_IN_GOMP_CRITICAL_NAME_END:
2781 : : case BUILT_IN_GOMP_LOOP_END:
2782 : : case BUILT_IN_GOMP_LOOP_END_CANCEL:
2783 : : case BUILT_IN_GOMP_ORDERED_START:
2784 : : case BUILT_IN_GOMP_ORDERED_END:
2785 : : case BUILT_IN_GOMP_SECTIONS_END:
2786 : : case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2787 : : case BUILT_IN_GOMP_SINGLE_COPY_START:
2788 : : case BUILT_IN_GOMP_SINGLE_COPY_END:
2789 : : return 1;
2790 : :
2791 : : default:
2792 : : return -1;
2793 : : }
2794 : : return -1;
2795 : : }
2796 : :
2797 : : /* If the call CALL may use the memory reference REF return true,
2798 : : otherwise return false. */
2799 : :
2800 : : static bool
2801 : 24123767 : ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2802 : : {
2803 : 24123767 : tree base, callee;
2804 : 24123767 : unsigned i;
2805 : 24123767 : int flags = gimple_call_flags (call);
2806 : :
2807 : 24123767 : if (flags & (ECF_CONST|ECF_NOVOPS))
2808 : 36223 : goto process_args;
2809 : :
2810 : : /* A call that is not without side-effects might involve volatile
2811 : : accesses and thus conflicts with all other volatile accesses. */
2812 : 24087544 : if (ref->volatile_p)
2813 : : return true;
2814 : :
2815 : 24087326 : if (gimple_call_internal_p (call))
2816 : 38536 : switch (gimple_call_internal_fn (call))
2817 : : {
2818 : : case IFN_MASK_STORE:
2819 : : case IFN_SCATTER_STORE:
2820 : : case IFN_MASK_SCATTER_STORE:
2821 : : case IFN_LEN_STORE:
2822 : : case IFN_MASK_LEN_STORE:
2823 : : return false;
2824 : 0 : case IFN_MASK_STORE_LANES:
2825 : 0 : case IFN_MASK_LEN_STORE_LANES:
2826 : 0 : goto process_args;
2827 : 709 : case IFN_MASK_LOAD:
2828 : 709 : case IFN_LEN_LOAD:
2829 : 709 : case IFN_MASK_LEN_LOAD:
2830 : 709 : case IFN_MASK_LOAD_LANES:
2831 : 709 : case IFN_MASK_LEN_LOAD_LANES:
2832 : 709 : {
2833 : 709 : ao_ref rhs_ref;
2834 : 709 : tree lhs = gimple_call_lhs (call);
2835 : 709 : if (lhs)
2836 : : {
2837 : 709 : ao_ref_init_from_ptr_and_size (&rhs_ref,
2838 : : gimple_call_arg (call, 0),
2839 : 709 : TYPE_SIZE_UNIT (TREE_TYPE (lhs)));
2840 : : /* We cannot make this a known-size access since otherwise
2841 : : we disambiguate against refs to decls that are smaller. */
2842 : 709 : rhs_ref.size = -1;
2843 : 1418 : rhs_ref.ref_alias_set = rhs_ref.base_alias_set
2844 : 1418 : = tbaa_p ? get_deref_alias_set (TREE_TYPE
2845 : : (gimple_call_arg (call, 1))) : 0;
2846 : 709 : return refs_may_alias_p_1 (ref, &rhs_ref, tbaa_p);
2847 : : }
2848 : 0 : break;
2849 : : }
2850 : : default:;
2851 : : }
2852 : :
2853 : 24081830 : callee = gimple_call_fndecl (call);
2854 : 24081830 : if (callee != NULL_TREE)
2855 : : {
2856 : 23234201 : struct cgraph_node *node = cgraph_node::get (callee);
2857 : : /* We can not safely optimize based on summary of calle if it does
2858 : : not always bind to current def: it is possible that memory load
2859 : : was optimized out earlier and the interposed variant may not be
2860 : : optimized this way. */
2861 : 23234201 : if (node && node->binds_to_current_def_p ())
2862 : : {
2863 : 3206364 : modref_summary *summary = get_modref_function_summary (node);
2864 : 3206364 : if (summary && !summary->calls_interposable)
2865 : : {
2866 : 1814709 : if (!modref_may_conflict (call, summary->loads, ref, tbaa_p))
2867 : : {
2868 : 308184 : alias_stats.modref_use_no_alias++;
2869 : 308184 : if (dump_file && (dump_flags & TDF_DETAILS))
2870 : : {
2871 : 24 : fprintf (dump_file,
2872 : : "ipa-modref: call stmt ");
2873 : 24 : print_gimple_stmt (dump_file, call, 0);
2874 : 24 : fprintf (dump_file,
2875 : : "ipa-modref: call to %s does not use ",
2876 : : node->dump_name ());
2877 : 24 : if (!ref->ref && ref->base)
2878 : : {
2879 : 3 : fprintf (dump_file, "base: ");
2880 : 3 : print_generic_expr (dump_file, ref->base);
2881 : : }
2882 : 21 : else if (ref->ref)
2883 : : {
2884 : 21 : fprintf (dump_file, "ref: ");
2885 : 21 : print_generic_expr (dump_file, ref->ref);
2886 : : }
2887 : 24 : fprintf (dump_file, " alias sets: %i->%i\n",
2888 : : ao_ref_base_alias_set (ref),
2889 : : ao_ref_alias_set (ref));
2890 : : }
2891 : 308184 : goto process_args;
2892 : : }
2893 : 1506525 : alias_stats.modref_use_may_alias++;
2894 : : }
2895 : : }
2896 : : }
2897 : :
2898 : 23773646 : base = ao_ref_base (ref);
2899 : 23773646 : if (!base)
2900 : : return true;
2901 : :
2902 : : /* If the reference is based on a decl that is not aliased the call
2903 : : cannot possibly use it. */
2904 : 23773646 : if (DECL_P (base)
2905 : 20494131 : && !may_be_aliased (base)
2906 : : /* But local statics can be used through recursion. */
2907 : 32031890 : && !is_global_var (base))
2908 : 8066950 : goto process_args;
2909 : :
2910 : 15706696 : if (int res = check_fnspec (call, ref, false))
2911 : : {
2912 : 13971721 : if (res == 1)
2913 : : return true;
2914 : : }
2915 : : else
2916 : 1734975 : goto process_args;
2917 : :
2918 : : /* Check if base is a global static variable that is not read
2919 : : by the function. */
2920 : 13641626 : if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2921 : : {
2922 : 732471 : struct cgraph_node *node = cgraph_node::get (callee);
2923 : 732471 : bitmap read;
2924 : 732471 : int id;
2925 : :
2926 : : /* FIXME: Callee can be an OMP builtin that does not have a call graph
2927 : : node yet. We should enforce that there are nodes for all decls in the
2928 : : IL and remove this check instead. */
2929 : 732471 : if (node
2930 : 732000 : && (id = ipa_reference_var_uid (base)) != -1
2931 : 95997 : && (read = ipa_reference_get_read_global (node))
2932 : 741574 : && !bitmap_bit_p (read, id))
2933 : 6579 : goto process_args;
2934 : : }
2935 : :
2936 : : /* Check if the base variable is call-used. */
2937 : 13635047 : if (DECL_P (base))
2938 : : {
2939 : 10820243 : if (pt_solution_includes (gimple_call_use_set (call), base))
2940 : : return true;
2941 : : }
2942 : 2814804 : else if ((TREE_CODE (base) == MEM_REF
2943 : 2814804 : || TREE_CODE (base) == TARGET_MEM_REF)
2944 : 2814804 : && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2945 : : {
2946 : 2814689 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2947 : 2814689 : if (!pi)
2948 : : return true;
2949 : :
2950 : 2813892 : if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2951 : : return true;
2952 : : }
2953 : : else
2954 : : return true;
2955 : :
2956 : : /* Inspect call arguments for passed-by-value aliases. */
2957 : : process_args:
2958 : 32556965 : for (i = 0; i < gimple_call_num_args (call); ++i)
2959 : : {
2960 : 24156407 : tree op = gimple_call_arg (call, i);
2961 : 24156407 : int flags = gimple_call_arg_flags (call, i);
2962 : :
2963 : 24156407 : if (flags & (EAF_UNUSED | EAF_NO_DIRECT_READ))
2964 : 400310 : continue;
2965 : :
2966 : 23756097 : if (TREE_CODE (op) == WITH_SIZE_EXPR)
2967 : 462 : op = TREE_OPERAND (op, 0);
2968 : :
2969 : 23756097 : if (TREE_CODE (op) != SSA_NAME
2970 : 23756097 : && !is_gimple_min_invariant (op))
2971 : : {
2972 : 5527322 : ao_ref r;
2973 : 5527322 : ao_ref_init (&r, op);
2974 : 5527322 : if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2975 : 3363921 : return true;
2976 : : }
2977 : : }
2978 : :
2979 : : return false;
2980 : : }
2981 : :
2982 : : static bool
2983 : 24118390 : ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2984 : : {
2985 : 24118390 : bool res;
2986 : 24118390 : res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2987 : 24118390 : if (res)
2988 : 15715803 : ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2989 : : else
2990 : 8402587 : ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2991 : 24118390 : return res;
2992 : : }
2993 : :
2994 : :
2995 : : /* If the statement STMT may use the memory reference REF return
2996 : : true, otherwise return false. */
2997 : :
2998 : : bool
2999 : 220676039 : ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
3000 : : {
3001 : 220676039 : if (is_gimple_assign (stmt))
3002 : : {
3003 : 191785160 : tree rhs;
3004 : :
3005 : : /* All memory assign statements are single. */
3006 : 191785160 : if (!gimple_assign_single_p (stmt))
3007 : : return false;
3008 : :
3009 : 191785160 : rhs = gimple_assign_rhs1 (stmt);
3010 : 191785160 : if (is_gimple_reg (rhs)
3011 : 157590992 : || is_gimple_min_invariant (rhs)
3012 : 233460374 : || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
3013 : 161575922 : return false;
3014 : :
3015 : 30209238 : return refs_may_alias_p (rhs, ref, tbaa_p);
3016 : : }
3017 : 28890879 : else if (is_gimple_call (stmt))
3018 : 24118390 : return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
3019 : 4772489 : else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
3020 : : {
3021 : 4082864 : tree retval = gimple_return_retval (return_stmt);
3022 : 4082864 : if (retval
3023 : 2299920 : && TREE_CODE (retval) != SSA_NAME
3024 : 1864915 : && !is_gimple_min_invariant (retval)
3025 : 5811122 : && refs_may_alias_p (retval, ref, tbaa_p))
3026 : : return true;
3027 : : /* If ref escapes the function then the return acts as a use. */
3028 : 2598131 : tree base = ao_ref_base (ref);
3029 : 2598131 : if (!base)
3030 : : ;
3031 : 2598131 : else if (DECL_P (base))
3032 : 708035 : return is_global_var (base);
3033 : 1890096 : else if (TREE_CODE (base) == MEM_REF
3034 : 1890096 : || TREE_CODE (base) == TARGET_MEM_REF)
3035 : 1890038 : return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0), false);
3036 : : return false;
3037 : : }
3038 : :
3039 : : return true;
3040 : : }
3041 : :
3042 : : bool
3043 : 694213 : ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
3044 : : {
3045 : 694213 : ao_ref r;
3046 : 694213 : ao_ref_init (&r, ref);
3047 : 694213 : return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
3048 : : }
3049 : :
3050 : : /* If the call in statement CALL may clobber the memory reference REF
3051 : : return true, otherwise return false. */
3052 : :
3053 : : bool
3054 : 301605898 : call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
3055 : : {
3056 : 301605898 : tree base;
3057 : 301605898 : tree callee;
3058 : :
3059 : : /* If the call is pure or const it cannot clobber anything. */
3060 : 301605898 : if (gimple_call_flags (call)
3061 : 301605898 : & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
3062 : : return false;
3063 : 298797269 : if (gimple_call_internal_p (call))
3064 : 544607 : switch (auto fn = gimple_call_internal_fn (call))
3065 : : {
3066 : : /* Treat these internal calls like ECF_PURE for aliasing,
3067 : : they don't write to any memory the program should care about.
3068 : : They have important other side-effects, and read memory,
3069 : : so can't be ECF_NOVOPS. */
3070 : : case IFN_UBSAN_NULL:
3071 : : case IFN_UBSAN_BOUNDS:
3072 : : case IFN_UBSAN_VPTR:
3073 : : case IFN_UBSAN_OBJECT_SIZE:
3074 : : case IFN_UBSAN_PTR:
3075 : : case IFN_ASAN_CHECK:
3076 : : return false;
3077 : 12723 : case IFN_MASK_STORE:
3078 : 12723 : case IFN_LEN_STORE:
3079 : 12723 : case IFN_MASK_LEN_STORE:
3080 : 12723 : case IFN_MASK_STORE_LANES:
3081 : 12723 : case IFN_MASK_LEN_STORE_LANES:
3082 : 12723 : {
3083 : 12723 : tree rhs = gimple_call_arg (call,
3084 : 12723 : internal_fn_stored_value_index (fn));
3085 : 12723 : ao_ref lhs_ref;
3086 : 12723 : ao_ref_init_from_ptr_and_size (&lhs_ref, gimple_call_arg (call, 0),
3087 : 12723 : TYPE_SIZE_UNIT (TREE_TYPE (rhs)));
3088 : : /* We cannot make this a known-size access since otherwise
3089 : : we disambiguate against refs to decls that are smaller. */
3090 : 12723 : lhs_ref.size = -1;
3091 : 25446 : lhs_ref.ref_alias_set = lhs_ref.base_alias_set
3092 : 12723 : = tbaa_p ? get_deref_alias_set
3093 : 12369 : (TREE_TYPE (gimple_call_arg (call, 1))) : 0;
3094 : 12723 : return refs_may_alias_p_1 (ref, &lhs_ref, tbaa_p);
3095 : : }
3096 : : default:
3097 : : break;
3098 : : }
3099 : :
3100 : 298570043 : callee = gimple_call_fndecl (call);
3101 : :
3102 : 298570043 : if (callee != NULL_TREE && !ref->volatile_p)
3103 : : {
3104 : 279804232 : struct cgraph_node *node = cgraph_node::get (callee);
3105 : 279804232 : if (node)
3106 : : {
3107 : 279538067 : modref_summary *summary = get_modref_function_summary (node);
3108 : 279538067 : if (summary)
3109 : : {
3110 : 33438346 : if (!modref_may_conflict (call, summary->stores, ref, tbaa_p)
3111 : 33438346 : && (!summary->writes_errno
3112 : 22794 : || !targetm.ref_may_alias_errno (ref)))
3113 : : {
3114 : 2737545 : alias_stats.modref_clobber_no_alias++;
3115 : 2737545 : if (dump_file && (dump_flags & TDF_DETAILS))
3116 : : {
3117 : 52 : fprintf (dump_file,
3118 : : "ipa-modref: call stmt ");
3119 : 52 : print_gimple_stmt (dump_file, call, 0);
3120 : 52 : fprintf (dump_file,
3121 : : "ipa-modref: call to %s does not clobber ",
3122 : : node->dump_name ());
3123 : 52 : if (!ref->ref && ref->base)
3124 : : {
3125 : 32 : fprintf (dump_file, "base: ");
3126 : 32 : print_generic_expr (dump_file, ref->base);
3127 : : }
3128 : 20 : else if (ref->ref)
3129 : : {
3130 : 20 : fprintf (dump_file, "ref: ");
3131 : 20 : print_generic_expr (dump_file, ref->ref);
3132 : : }
3133 : 52 : fprintf (dump_file, " alias sets: %i->%i\n",
3134 : : ao_ref_base_alias_set (ref),
3135 : : ao_ref_alias_set (ref));
3136 : : }
3137 : 2737545 : return false;
3138 : : }
3139 : 30700801 : alias_stats.modref_clobber_may_alias++;
3140 : : }
3141 : : }
3142 : : }
3143 : :
3144 : 295832498 : base = ao_ref_base (ref);
3145 : 295832498 : if (!base)
3146 : : return true;
3147 : :
3148 : 295832498 : if (TREE_CODE (base) == SSA_NAME
3149 : 295832208 : || CONSTANT_CLASS_P (base))
3150 : : return false;
3151 : :
3152 : : /* A call that is not without side-effects might involve volatile
3153 : : accesses and thus conflicts with all other volatile accesses. */
3154 : 287925755 : if (ref->volatile_p)
3155 : : return true;
3156 : :
3157 : : /* If the reference is based on a decl that is not aliased the call
3158 : : cannot possibly clobber it. */
3159 : 286488389 : if (DECL_P (base)
3160 : 261181782 : && !may_be_aliased (base)
3161 : : /* But local non-readonly statics can be modified through recursion
3162 : : or the call may implement a threading barrier which we must
3163 : : treat as may-def. */
3164 : 471294216 : && (TREE_READONLY (base)
3165 : 178175259 : || !is_global_var (base)))
3166 : : return false;
3167 : :
3168 : : /* If the reference is based on a pointer that points to memory
3169 : : that may not be written to then the call cannot possibly clobber it. */
3170 : 109389812 : if ((TREE_CODE (base) == MEM_REF
3171 : 109389812 : || TREE_CODE (base) == TARGET_MEM_REF)
3172 : 25306607 : && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
3173 : 134543550 : && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
3174 : : return false;
3175 : :
3176 : 109075284 : if (int res = check_fnspec (call, ref, true))
3177 : : {
3178 : 91978168 : if (res == 1)
3179 : : return true;
3180 : : }
3181 : : else
3182 : : return false;
3183 : :
3184 : : /* Check if base is a global static variable that is not written
3185 : : by the function. */
3186 : 89025061 : if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3187 : : {
3188 : 8884097 : struct cgraph_node *node = cgraph_node::get (callee);
3189 : 8884097 : bitmap written;
3190 : 8884097 : int id;
3191 : :
3192 : 8884097 : if (node
3193 : 8883641 : && (id = ipa_reference_var_uid (base)) != -1
3194 : 3384426 : && (written = ipa_reference_get_written_global (node))
3195 : 8954536 : && !bitmap_bit_p (written, id))
3196 : : return false;
3197 : : }
3198 : :
3199 : : /* Check if the base variable is call-clobbered. */
3200 : 88970336 : if (DECL_P (base))
3201 : 68263904 : return pt_solution_includes (gimple_call_clobber_set (call), base);
3202 : 20706432 : else if ((TREE_CODE (base) == MEM_REF
3203 : 20706432 : || TREE_CODE (base) == TARGET_MEM_REF)
3204 : 20706432 : && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3205 : : {
3206 : 20581774 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3207 : 20581774 : if (!pi)
3208 : : return true;
3209 : :
3210 : 19993185 : return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
3211 : : }
3212 : :
3213 : : return true;
3214 : : }
3215 : :
3216 : : /* If the call in statement CALL may clobber the memory reference REF
3217 : : return true, otherwise return false. */
3218 : :
3219 : : bool
3220 : 248060 : call_may_clobber_ref_p (gcall *call, tree ref, bool tbaa_p)
3221 : : {
3222 : 248060 : bool res;
3223 : 248060 : ao_ref r;
3224 : 248060 : ao_ref_init (&r, ref);
3225 : 248060 : res = call_may_clobber_ref_p_1 (call, &r, tbaa_p);
3226 : 248060 : if (res)
3227 : 12522 : ++alias_stats.call_may_clobber_ref_p_may_alias;
3228 : : else
3229 : 235538 : ++alias_stats.call_may_clobber_ref_p_no_alias;
3230 : 248060 : return res;
3231 : : }
3232 : :
3233 : :
3234 : : /* If the statement STMT may clobber the memory reference REF return true,
3235 : : otherwise return false. */
3236 : :
3237 : : bool
3238 : 1719223885 : stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3239 : : {
3240 : 1719223885 : if (is_gimple_call (stmt))
3241 : : {
3242 : 307149614 : tree lhs = gimple_call_lhs (stmt);
3243 : 307149614 : if (lhs
3244 : 149398765 : && TREE_CODE (lhs) != SSA_NAME)
3245 : : {
3246 : 59310244 : ao_ref r;
3247 : 59310244 : ao_ref_init (&r, lhs);
3248 : 59310244 : if (refs_may_alias_p_1 (ref, &r, tbaa_p))
3249 : 5872314 : return true;
3250 : : }
3251 : :
3252 : 301277300 : return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref, tbaa_p);
3253 : : }
3254 : 1412074271 : else if (gimple_assign_single_p (stmt))
3255 : : {
3256 : 1404451812 : tree lhs = gimple_assign_lhs (stmt);
3257 : 1404451812 : if (TREE_CODE (lhs) != SSA_NAME)
3258 : : {
3259 : 1403621586 : ao_ref r;
3260 : 1403621586 : ao_ref_init (&r, lhs);
3261 : 1403621586 : return refs_may_alias_p_1 (ref, &r, tbaa_p);
3262 : : }
3263 : : }
3264 : 7622459 : else if (gimple_code (stmt) == GIMPLE_ASM)
3265 : : return true;
3266 : :
3267 : : return false;
3268 : : }
3269 : :
3270 : : bool
3271 : 4272062 : stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3272 : : {
3273 : 4272062 : ao_ref r;
3274 : 4272062 : ao_ref_init (&r, ref);
3275 : 4272062 : return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
3276 : : }
3277 : :
3278 : : /* Return true if store1 and store2 described by corresponding tuples
3279 : : <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3280 : : address. */
3281 : :
3282 : : static bool
3283 : 78825294 : same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3284 : : poly_int64 max_size1,
3285 : : tree base2, poly_int64 offset2, poly_int64 size2,
3286 : : poly_int64 max_size2)
3287 : : {
3288 : : /* Offsets need to be 0. */
3289 : 78825294 : if (maybe_ne (offset1, 0)
3290 : 78825294 : || maybe_ne (offset2, 0))
3291 : : return false;
3292 : :
3293 : 20050930 : bool base1_obj_p = SSA_VAR_P (base1);
3294 : 20050930 : bool base2_obj_p = SSA_VAR_P (base2);
3295 : :
3296 : : /* We need one object. */
3297 : 20050930 : if (base1_obj_p == base2_obj_p)
3298 : : return false;
3299 : 2127959 : tree obj = base1_obj_p ? base1 : base2;
3300 : :
3301 : : /* And we need one MEM_REF. */
3302 : 2127959 : bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3303 : 2127959 : bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3304 : 2127959 : if (base1_memref_p == base2_memref_p)
3305 : : return false;
3306 : 2074271 : tree memref = base1_memref_p ? base1 : base2;
3307 : :
3308 : : /* Sizes need to be valid. */
3309 : 2074271 : if (!known_size_p (max_size1)
3310 : 2064688 : || !known_size_p (max_size2)
3311 : 2064235 : || !known_size_p (size1)
3312 : 4138506 : || !known_size_p (size2))
3313 : : return false;
3314 : :
3315 : : /* Max_size needs to match size. */
3316 : 2064235 : if (maybe_ne (max_size1, size1)
3317 : 2064235 : || maybe_ne (max_size2, size2))
3318 : : return false;
3319 : :
3320 : : /* Sizes need to match. */
3321 : 2042565 : if (maybe_ne (size1, size2))
3322 : : return false;
3323 : :
3324 : :
3325 : : /* Check that memref is a store to pointer with singleton points-to info. */
3326 : 551271 : if (!integer_zerop (TREE_OPERAND (memref, 1)))
3327 : : return false;
3328 : 440418 : tree ptr = TREE_OPERAND (memref, 0);
3329 : 440418 : if (TREE_CODE (ptr) != SSA_NAME)
3330 : : return false;
3331 : 440131 : struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3332 : 440131 : unsigned int pt_uid;
3333 : 440131 : if (pi == NULL
3334 : 440131 : || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3335 : 327322 : return false;
3336 : :
3337 : : /* Be conservative with non-call exceptions when the address might
3338 : : be NULL. */
3339 : 112809 : if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3340 : : return false;
3341 : :
3342 : : /* Check that ptr points relative to obj. */
3343 : 112804 : unsigned int obj_uid = DECL_PT_UID (obj);
3344 : 112804 : if (obj_uid != pt_uid)
3345 : : return false;
3346 : :
3347 : : /* Check that the object size is the same as the store size. That ensures us
3348 : : that ptr points to the start of obj. */
3349 : 36338 : return (DECL_SIZE (obj)
3350 : 36338 : && poly_int_tree_p (DECL_SIZE (obj))
3351 : 108934 : && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3352 : : }
3353 : :
3354 : : /* Return true if REF is killed by an store described by
3355 : : BASE, OFFSET, SIZE and MAX_SIZE. */
3356 : :
3357 : : static bool
3358 : 163089339 : store_kills_ref_p (tree base, poly_int64 offset, poly_int64 size,
3359 : : poly_int64 max_size, ao_ref *ref)
3360 : : {
3361 : 163089339 : poly_int64 ref_offset = ref->offset;
3362 : : /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3363 : : so base == ref->base does not always hold. */
3364 : 163089339 : if (base != ref->base)
3365 : : {
3366 : : /* Try using points-to info. */
3367 : 78825294 : if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
3368 : : ref->offset, ref->size, ref->max_size))
3369 : : return true;
3370 : :
3371 : : /* If both base and ref->base are MEM_REFs, only compare the
3372 : : first operand, and if the second operand isn't equal constant,
3373 : : try to add the offsets into offset and ref_offset. */
3374 : 26052225 : if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3375 : 101418641 : && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3376 : : {
3377 : 18722776 : if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3378 : 18722776 : TREE_OPERAND (ref->base, 1)))
3379 : : {
3380 : 6355083 : poly_offset_int off1 = mem_ref_offset (base);
3381 : 6355083 : off1 <<= LOG2_BITS_PER_UNIT;
3382 : 6355083 : off1 += offset;
3383 : 6355083 : poly_offset_int off2 = mem_ref_offset (ref->base);
3384 : 6355083 : off2 <<= LOG2_BITS_PER_UNIT;
3385 : 6355083 : off2 += ref_offset;
3386 : 6355083 : if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
3387 : 2 : size = -1;
3388 : : }
3389 : : }
3390 : : else
3391 : 60102438 : size = -1;
3392 : : }
3393 : : /* For a must-alias check we need to be able to constrain
3394 : : the access properly. */
3395 : 163089259 : return (known_eq (size, max_size)
3396 : 163089259 : && known_subrange_p (ref_offset, ref->max_size, offset, size));
3397 : : }
3398 : :
3399 : : /* If STMT kills the memory reference REF return true, otherwise
3400 : : return false. */
3401 : :
3402 : : bool
3403 : 184893954 : stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3404 : : {
3405 : 184893954 : if (!ao_ref_base (ref))
3406 : : return false;
3407 : :
3408 : 184893954 : if (gimple_has_lhs (stmt)
3409 : 170484529 : && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3410 : : /* The assignment is not necessarily carried out if it can throw
3411 : : and we can catch it in the current function where we could inspect
3412 : : the previous value. Similarly if the function can throw externally
3413 : : and the ref does not die on the function return.
3414 : : ??? We only need to care about the RHS throwing. For aggregate
3415 : : assignments or similar calls and non-call exceptions the LHS
3416 : : might throw as well.
3417 : : ??? We also should care about possible longjmp, but since we
3418 : : do not understand that longjmp is not using global memory we will
3419 : : not consider a kill here since the function call will be considered
3420 : : as possibly using REF. */
3421 : 167689775 : && !stmt_can_throw_internal (cfun, stmt)
3422 : 174166800 : && (!stmt_can_throw_external (cfun, stmt)
3423 : 2347855 : || !ref_may_alias_global_p (ref, false)))
3424 : : {
3425 : 165952678 : tree lhs = gimple_get_lhs (stmt);
3426 : : /* If LHS is literally a base of the access we are done. */
3427 : 165952678 : if (ref->ref)
3428 : : {
3429 : 165099347 : tree base = ref->ref;
3430 : 165099347 : tree innermost_dropped_array_ref = NULL_TREE;
3431 : 165099347 : if (handled_component_p (base))
3432 : : {
3433 : 129508515 : tree saved_lhs0 = NULL_TREE;
3434 : 245978256 : if (handled_component_p (lhs))
3435 : : {
3436 : 116469741 : saved_lhs0 = TREE_OPERAND (lhs, 0);
3437 : 116469741 : TREE_OPERAND (lhs, 0) = integer_zero_node;
3438 : : }
3439 : 234819333 : do
3440 : : {
3441 : : /* Just compare the outermost handled component, if
3442 : : they are equal we have found a possible common
3443 : : base. */
3444 : 234819333 : tree saved_base0 = TREE_OPERAND (base, 0);
3445 : 234819333 : TREE_OPERAND (base, 0) = integer_zero_node;
3446 : 234819333 : bool res = operand_equal_p (lhs, base, 0);
3447 : 234819333 : TREE_OPERAND (base, 0) = saved_base0;
3448 : 234819333 : if (res)
3449 : : break;
3450 : : /* Remember if we drop an array-ref that we need to
3451 : : double-check not being at struct end. */
3452 : 217385501 : if (TREE_CODE (base) == ARRAY_REF
3453 : 217385501 : || TREE_CODE (base) == ARRAY_RANGE_REF)
3454 : 63961890 : innermost_dropped_array_ref = base;
3455 : : /* Otherwise drop handled components of the access. */
3456 : 217385501 : base = saved_base0;
3457 : : }
3458 : 329460184 : while (handled_component_p (base));
3459 : 129508515 : if (saved_lhs0)
3460 : 116469741 : TREE_OPERAND (lhs, 0) = saved_lhs0;
3461 : : }
3462 : : /* Finally check if the lhs has the same address and size as the
3463 : : base candidate of the access. Watch out if we have dropped
3464 : : an array-ref that might have flexible size, this means ref->ref
3465 : : may be outside of the TYPE_SIZE of its base. */
3466 : 129508515 : if ((! innermost_dropped_array_ref
3467 : 62875021 : || ! array_ref_flexible_size_p (innermost_dropped_array_ref))
3468 : 283874405 : && (lhs == base
3469 : 152850194 : || (((TYPE_SIZE (TREE_TYPE (lhs))
3470 : 152850194 : == TYPE_SIZE (TREE_TYPE (base)))
3471 : 115255802 : || (TYPE_SIZE (TREE_TYPE (lhs))
3472 : 115255802 : && TYPE_SIZE (TREE_TYPE (base))
3473 : 115255733 : && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3474 : 115255733 : TYPE_SIZE (TREE_TYPE (base)),
3475 : : 0)))
3476 : 37594392 : && operand_equal_p (lhs, base,
3477 : : OEP_ADDRESS_OF
3478 : : | OEP_MATCH_SIDE_EFFECTS))))
3479 : : {
3480 : 1893185 : ++alias_stats.stmt_kills_ref_p_yes;
3481 : 6640658 : return true;
3482 : : }
3483 : : }
3484 : :
3485 : : /* Now look for non-literal equal bases with the restriction of
3486 : : handling constant offset and size. */
3487 : : /* For a must-alias check we need to be able to constrain
3488 : : the access properly. */
3489 : 164059493 : if (!ref->max_size_known_p ())
3490 : : {
3491 : 1685927 : ++alias_stats.stmt_kills_ref_p_no;
3492 : 1685927 : return false;
3493 : : }
3494 : 162373566 : poly_int64 size, offset, max_size;
3495 : 162373566 : bool reverse;
3496 : 162373566 : tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3497 : : &reverse);
3498 : 162373566 : if (store_kills_ref_p (base, offset, size, max_size, ref))
3499 : : {
3500 : 1168361 : ++alias_stats.stmt_kills_ref_p_yes;
3501 : 1168361 : return true;
3502 : : }
3503 : : }
3504 : :
3505 : 180146481 : if (is_gimple_call (stmt))
3506 : : {
3507 : 7781860 : tree callee = gimple_call_fndecl (stmt);
3508 : 7781860 : struct cgraph_node *node;
3509 : 7781860 : modref_summary *summary;
3510 : :
3511 : : /* Try to disambiguate using modref summary. Modref records a vector
3512 : : of stores with known offsets relative to function parameters that must
3513 : : happen every execution of function. Find if we have a matching
3514 : : store and verify that function can not use the value. */
3515 : 15563720 : if (callee != NULL_TREE
3516 : 7408147 : && (node = cgraph_node::get (callee)) != NULL
3517 : 7382402 : && node->binds_to_current_def_p ()
3518 : 830644 : && (summary = get_modref_function_summary (node)) != NULL
3519 : 8291634 : && summary->kills.length ()
3520 : : /* Check that we can not trap while evaulating function
3521 : : parameters. This check is overly conservative. */
3522 : 7827885 : && (!cfun->can_throw_non_call_exceptions
3523 : 0 : || (!stmt_can_throw_internal (cfun, stmt)
3524 : 0 : && (!stmt_can_throw_external (cfun, stmt)
3525 : 0 : || !ref_may_alias_global_p (ref, false)))))
3526 : : {
3527 : 193940 : for (auto kill : summary->kills)
3528 : : {
3529 : 61242 : ao_ref dref;
3530 : :
3531 : : /* We only can do useful compares if we know the access range
3532 : : precisely. */
3533 : 61242 : if (!kill.get_ao_ref (as_a <gcall *> (stmt), &dref))
3534 : 24 : continue;
3535 : 61218 : if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3536 : : dref.size, dref.max_size, ref))
3537 : : {
3538 : : /* For store to be killed it needs to not be used
3539 : : earlier. */
3540 : 5377 : if (ref_maybe_used_by_call_p_1 (as_a <gcall *> (stmt), ref,
3541 : : true)
3542 : 5377 : || !dbg_cnt (ipa_mod_ref))
3543 : : break;
3544 : 3395 : if (dump_file && (dump_flags & TDF_DETAILS))
3545 : : {
3546 : 2 : fprintf (dump_file,
3547 : : "ipa-modref: call stmt ");
3548 : 2 : print_gimple_stmt (dump_file, stmt, 0);
3549 : 2 : fprintf (dump_file,
3550 : : "ipa-modref: call to %s kills ",
3551 : : node->dump_name ());
3552 : 2 : print_generic_expr (dump_file, ref->base);
3553 : 2 : fprintf (dump_file, "\n");
3554 : : }
3555 : 3395 : ++alias_stats.modref_kill_yes;
3556 : 3395 : return true;
3557 : : }
3558 : : }
3559 : 42630 : ++alias_stats.modref_kill_no;
3560 : : }
3561 : 7778465 : if (callee != NULL_TREE
3562 : 7778465 : && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3563 : 2105798 : switch (DECL_FUNCTION_CODE (callee))
3564 : : {
3565 : 196828 : case BUILT_IN_FREE:
3566 : 196828 : {
3567 : 196828 : tree ptr = gimple_call_arg (stmt, 0);
3568 : 196828 : tree base = ao_ref_base (ref);
3569 : 196828 : if (base && TREE_CODE (base) == MEM_REF
3570 : 241257 : && TREE_OPERAND (base, 0) == ptr)
3571 : : {
3572 : 7772 : ++alias_stats.stmt_kills_ref_p_yes;
3573 : 7772 : return true;
3574 : : }
3575 : : break;
3576 : : }
3577 : :
3578 : 800811 : case BUILT_IN_MEMCPY:
3579 : 800811 : case BUILT_IN_MEMPCPY:
3580 : 800811 : case BUILT_IN_MEMMOVE:
3581 : 800811 : case BUILT_IN_MEMSET:
3582 : 800811 : case BUILT_IN_MEMCPY_CHK:
3583 : 800811 : case BUILT_IN_MEMPCPY_CHK:
3584 : 800811 : case BUILT_IN_MEMMOVE_CHK:
3585 : 800811 : case BUILT_IN_MEMSET_CHK:
3586 : 800811 : case BUILT_IN_STRNCPY:
3587 : 800811 : case BUILT_IN_STPNCPY:
3588 : 800811 : case BUILT_IN_CALLOC:
3589 : 800811 : {
3590 : : /* For a must-alias check we need to be able to constrain
3591 : : the access properly. */
3592 : 800811 : if (!ref->max_size_known_p ())
3593 : : {
3594 : 36615 : ++alias_stats.stmt_kills_ref_p_no;
3595 : 187675 : return false;
3596 : : }
3597 : 764196 : tree dest;
3598 : 764196 : tree len;
3599 : :
3600 : : /* In execution order a calloc call will never kill
3601 : : anything. However, DSE will (ab)use this interface
3602 : : to ask if a calloc call writes the same memory locations
3603 : : as a later assignment, memset, etc. So handle calloc
3604 : : in the expected way. */
3605 : 764196 : if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
3606 : : {
3607 : 901 : tree arg0 = gimple_call_arg (stmt, 0);
3608 : 901 : tree arg1 = gimple_call_arg (stmt, 1);
3609 : 901 : if (TREE_CODE (arg0) != INTEGER_CST
3610 : 776 : || TREE_CODE (arg1) != INTEGER_CST)
3611 : : {
3612 : 125 : ++alias_stats.stmt_kills_ref_p_no;
3613 : 125 : return false;
3614 : : }
3615 : :
3616 : 776 : dest = gimple_call_lhs (stmt);
3617 : 776 : if (!dest)
3618 : : {
3619 : 1 : ++alias_stats.stmt_kills_ref_p_no;
3620 : 1 : return false;
3621 : : }
3622 : 775 : len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3623 : : }
3624 : : else
3625 : : {
3626 : 763295 : dest = gimple_call_arg (stmt, 0);
3627 : 763295 : len = gimple_call_arg (stmt, 2);
3628 : : }
3629 : 764070 : if (!poly_int_tree_p (len))
3630 : : return false;
3631 : 654555 : ao_ref dref;
3632 : 654555 : ao_ref_init_from_ptr_and_size (&dref, dest, len);
3633 : 654555 : if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3634 : : dref.size, dref.max_size, ref))
3635 : : {
3636 : 4804 : ++alias_stats.stmt_kills_ref_p_yes;
3637 : 4804 : return true;
3638 : : }
3639 : 649751 : break;
3640 : : }
3641 : :
3642 : 7510 : case BUILT_IN_VA_END:
3643 : 7510 : {
3644 : 7510 : tree ptr = gimple_call_arg (stmt, 0);
3645 : 7510 : if (TREE_CODE (ptr) == ADDR_EXPR)
3646 : : {
3647 : 7463 : tree base = ao_ref_base (ref);
3648 : 7463 : if (TREE_OPERAND (ptr, 0) == base)
3649 : : {
3650 : 3785 : ++alias_stats.stmt_kills_ref_p_yes;
3651 : 3785 : return true;
3652 : : }
3653 : : }
3654 : : break;
3655 : : }
3656 : :
3657 : : default:;
3658 : : }
3659 : : }
3660 : 179980469 : ++alias_stats.stmt_kills_ref_p_no;
3661 : 179980469 : return false;
3662 : : }
3663 : :
3664 : : bool
3665 : 7950415 : stmt_kills_ref_p (gimple *stmt, tree ref)
3666 : : {
3667 : 7950415 : ao_ref r;
3668 : 7950415 : ao_ref_init (&r, ref);
3669 : 7950415 : return stmt_kills_ref_p (stmt, &r);
3670 : : }
3671 : :
3672 : :
3673 : : /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3674 : : TARGET or a statement clobbering the memory reference REF in which
3675 : : case false is returned. The walk starts with VUSE, one argument of PHI. */
3676 : :
3677 : : static bool
3678 : 110012751 : maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3679 : : ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3680 : : bitmap *visited, bool abort_on_visited,
3681 : : void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3682 : : translate_flags disambiguate_only,
3683 : : void *data)
3684 : : {
3685 : 110012751 : basic_block bb = gimple_bb (phi);
3686 : :
3687 : 110012751 : if (!*visited)
3688 : : {
3689 : 20343983 : *visited = BITMAP_ALLOC (NULL);
3690 : 20343983 : bitmap_tree_view (*visited);
3691 : : }
3692 : :
3693 : 110012751 : bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3694 : :
3695 : : /* Walk until we hit the target. */
3696 : 110012751 : while (vuse != target)
3697 : : {
3698 : 338247879 : gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3699 : : /* If we are searching for the target VUSE by walking up to
3700 : : TARGET_BB dominating the original PHI we are finished once
3701 : : we reach a default def or a definition in a block dominating
3702 : : that block. Update TARGET and return. */
3703 : 338247879 : if (!target
3704 : 338247879 : && (gimple_nop_p (def_stmt)
3705 : 63326243 : || dominated_by_p (CDI_DOMINATORS,
3706 : 63326243 : target_bb, gimple_bb (def_stmt))))
3707 : : {
3708 : 18067736 : target = vuse;
3709 : 18067736 : return true;
3710 : : }
3711 : :
3712 : : /* Recurse for PHI nodes. */
3713 : 320180143 : if (gimple_code (def_stmt) == GIMPLE_PHI)
3714 : : {
3715 : : /* An already visited PHI node ends the walk successfully. */
3716 : 61487226 : if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3717 : 28018763 : return !abort_on_visited;
3718 : 33468463 : vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3719 : : visited, abort_on_visited,
3720 : : translate, data, disambiguate_only);
3721 : 33468463 : if (!vuse)
3722 : : return false;
3723 : 29344379 : continue;
3724 : : }
3725 : 258692917 : else if (gimple_nop_p (def_stmt))
3726 : : return false;
3727 : : else
3728 : : {
3729 : : /* A clobbering statement or the end of the IL ends it failing. */
3730 : 258692917 : if ((int)limit <= 0)
3731 : : return false;
3732 : 258662661 : --limit;
3733 : 258662661 : if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3734 : : {
3735 : 15081189 : translate_flags tf = disambiguate_only;
3736 : 15081189 : if (translate
3737 : 15081189 : && (*translate) (ref, vuse, data, &tf) == NULL)
3738 : : ;
3739 : : else
3740 : 13241689 : return false;
3741 : : }
3742 : : }
3743 : : /* If we reach a new basic-block see if we already skipped it
3744 : : in a previous walk that ended successfully. */
3745 : 245420972 : if (gimple_bb (def_stmt) != bb)
3746 : : {
3747 : 111631360 : if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3748 : 7860832 : return !abort_on_visited;
3749 : 103770528 : bb = gimple_bb (def_stmt);
3750 : : }
3751 : 614477410 : vuse = gimple_vuse (def_stmt);
3752 : : }
3753 : : return true;
3754 : : }
3755 : :
3756 : :
3757 : : /* Starting from a PHI node for the virtual operand of the memory reference
3758 : : REF find a continuation virtual operand that allows to continue walking
3759 : : statements dominating PHI skipping only statements that cannot possibly
3760 : : clobber REF. Decrements LIMIT for each alias disambiguation done
3761 : : and aborts the walk, returning NULL_TREE if it reaches zero.
3762 : : Returns NULL_TREE if no suitable virtual operand can be found. */
3763 : :
3764 : : tree
3765 : 85862161 : get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3766 : : unsigned int &limit, bitmap *visited,
3767 : : bool abort_on_visited,
3768 : : void *(*translate)(ao_ref *, tree, void *,
3769 : : translate_flags *),
3770 : : void *data,
3771 : : translate_flags disambiguate_only)
3772 : : {
3773 : 85862161 : unsigned nargs = gimple_phi_num_args (phi);
3774 : :
3775 : : /* Through a single-argument PHI we can simply look through. */
3776 : 85862161 : if (nargs == 1)
3777 : 2804489 : return PHI_ARG_DEF (phi, 0);
3778 : :
3779 : : /* For two or more arguments try to pairwise skip non-aliasing code
3780 : : until we hit the phi argument definition that dominates the other one. */
3781 : 83057672 : basic_block phi_bb = gimple_bb (phi);
3782 : 83057672 : tree arg0, arg1;
3783 : 83057672 : unsigned i;
3784 : :
3785 : : /* Find a candidate for the virtual operand which definition
3786 : : dominates those of all others. */
3787 : : /* First look if any of the args themselves satisfy this. */
3788 : 153771806 : for (i = 0; i < nargs; ++i)
3789 : : {
3790 : 131705013 : arg0 = PHI_ARG_DEF (phi, i);
3791 : 131705013 : if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3792 : : break;
3793 : 129040086 : basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3794 : 129040086 : if (def_bb != phi_bb
3795 : 129040086 : && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3796 : : break;
3797 : 70714134 : arg0 = NULL_TREE;
3798 : : }
3799 : : /* If not, look if we can reach such candidate by walking defs
3800 : : until we hit the immediate dominator. maybe_skip_until will
3801 : : do that for us. */
3802 : 83057672 : basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3803 : :
3804 : : /* Then check against the (to be) found candidate. */
3805 : 319179602 : for (i = 0; i < nargs; ++i)
3806 : : {
3807 : 170553674 : arg1 = PHI_ARG_DEF (phi, i);
3808 : 170553674 : if (arg1 == arg0)
3809 : : ;
3810 : 296594506 : else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
3811 : : limit, visited,
3812 : : abort_on_visited,
3813 : : translate,
3814 : : /* Do not valueize when walking over
3815 : : backedges. */
3816 : : dominated_by_p
3817 : 110012751 : (CDI_DOMINATORS,
3818 : 110012751 : gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3819 : : phi_bb)
3820 : : ? TR_DISAMBIGUATE
3821 : : : disambiguate_only, data))
3822 : : return NULL_TREE;
3823 : : }
3824 : :
3825 : 65568256 : return arg0;
3826 : : }
3827 : :
3828 : : /* Based on the memory reference REF and its virtual use VUSE call
3829 : : WALKER for each virtual use that is equivalent to VUSE, including VUSE
3830 : : itself. That is, for each virtual use for which its defining statement
3831 : : does not clobber REF.
3832 : :
3833 : : WALKER is called with REF, the current virtual use and DATA. If
3834 : : WALKER returns non-NULL the walk stops and its result is returned.
3835 : : At the end of a non-successful walk NULL is returned.
3836 : :
3837 : : TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3838 : : use which definition is a statement that may clobber REF and DATA.
3839 : : If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3840 : : If TRANSLATE returns non-NULL the walk stops and its result is returned.
3841 : : If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3842 : : to adjust REF and *DATA to make that valid.
3843 : :
3844 : : VALUEIZE if non-NULL is called with the next VUSE that is considered
3845 : : and return value is substituted for that. This can be used to
3846 : : implement optimistic value-numbering for example. Note that the
3847 : : VUSE argument is assumed to be valueized already.
3848 : :
3849 : : LIMIT specifies the number of alias queries we are allowed to do,
3850 : : the walk stops when it reaches zero and NULL is returned. LIMIT
3851 : : is decremented by the number of alias queries (plus adjustments
3852 : : done by the callbacks) upon return.
3853 : :
3854 : : TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3855 : :
3856 : : void *
3857 : 60016036 : walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3858 : : void *(*walker)(ao_ref *, tree, void *),
3859 : : void *(*translate)(ao_ref *, tree, void *,
3860 : : translate_flags *),
3861 : : tree (*valueize)(tree),
3862 : : unsigned &limit, void *data)
3863 : : {
3864 : 60016036 : bitmap visited = NULL;
3865 : 60016036 : void *res;
3866 : 60016036 : bool translated = false;
3867 : :
3868 : 60016036 : timevar_push (TV_ALIAS_STMT_WALK);
3869 : :
3870 : 945383550 : do
3871 : : {
3872 : 945383550 : gimple *def_stmt;
3873 : :
3874 : : /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3875 : 945383550 : res = (*walker) (ref, vuse, data);
3876 : : /* Abort walk. */
3877 : 945383550 : if (res == (void *)-1)
3878 : : {
3879 : : res = NULL;
3880 : : break;
3881 : : }
3882 : : /* Lookup succeeded. */
3883 : 945383467 : else if (res != NULL)
3884 : : break;
3885 : :
3886 : 938901504 : if (valueize)
3887 : : {
3888 : 923030196 : vuse = valueize (vuse);
3889 : 923030196 : if (!vuse)
3890 : : {
3891 : : res = NULL;
3892 : : break;
3893 : : }
3894 : : }
3895 : 925697315 : def_stmt = SSA_NAME_DEF_STMT (vuse);
3896 : 925697315 : if (gimple_nop_p (def_stmt))
3897 : : break;
3898 : 923321349 : else if (gimple_code (def_stmt) == GIMPLE_PHI)
3899 : 49624078 : vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3900 : : &visited, translated, translate, data);
3901 : : else
3902 : : {
3903 : 873697271 : if ((int)limit <= 0)
3904 : : {
3905 : : res = NULL;
3906 : : break;
3907 : : }
3908 : 873445903 : --limit;
3909 : 873445903 : if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3910 : : {
3911 : 30073238 : if (!translate)
3912 : : break;
3913 : 25437295 : translate_flags disambiguate_only = TR_TRANSLATE;
3914 : 25437295 : res = (*translate) (ref, vuse, data, &disambiguate_only);
3915 : : /* Failed lookup and translation. */
3916 : 25437295 : if (res == (void *)-1)
3917 : : {
3918 : : res = NULL;
3919 : : break;
3920 : : }
3921 : : /* Lookup succeeded. */
3922 : 4944755 : else if (res != NULL)
3923 : : break;
3924 : : /* Translation succeeded, continue walking. */
3925 : 6386949 : translated = translated || disambiguate_only == TR_TRANSLATE;
3926 : : }
3927 : 847399209 : vuse = gimple_vuse (def_stmt);
3928 : : }
3929 : : }
3930 : 897023287 : while (vuse);
3931 : :
3932 : 60016036 : if (visited)
3933 : 18072212 : BITMAP_FREE (visited);
3934 : :
3935 : 60016036 : timevar_pop (TV_ALIAS_STMT_WALK);
3936 : :
3937 : 60016036 : return res;
3938 : : }
3939 : :
3940 : :
3941 : : /* Based on the memory reference REF call WALKER for each vdef whose
3942 : : defining statement may clobber REF, starting with VDEF. If REF
3943 : : is NULL_TREE, each defining statement is visited.
3944 : :
3945 : : WALKER is called with REF, the current vdef and DATA. If WALKER
3946 : : returns true the walk is stopped, otherwise it continues.
3947 : :
3948 : : If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3949 : : The pointer may be NULL and then we do not track this information.
3950 : :
3951 : : At PHI nodes walk_aliased_vdefs forks into one walk for each
3952 : : PHI argument (but only one walk continues at merge points), the
3953 : : return value is true if any of the walks was successful.
3954 : :
3955 : : The function returns the number of statements walked or -1 if
3956 : : LIMIT stmts were walked and the walk was aborted at this point.
3957 : : If LIMIT is zero the walk is not aborted. */
3958 : :
3959 : : static int
3960 : 283099417 : walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3961 : : bool (*walker)(ao_ref *, tree, void *), void *data,
3962 : : bitmap *visited, unsigned int cnt,
3963 : : bool *function_entry_reached, unsigned limit)
3964 : : {
3965 : 888588428 : do
3966 : : {
3967 : 1777176856 : gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3968 : :
3969 : 888588428 : if (*visited
3970 : 888588428 : && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3971 : 157210232 : return cnt;
3972 : :
3973 : 731378196 : if (gimple_nop_p (def_stmt))
3974 : : {
3975 : 22461633 : if (function_entry_reached)
3976 : 3457682 : *function_entry_reached = true;
3977 : 22461633 : return cnt;
3978 : : }
3979 : 708916563 : else if (gimple_code (def_stmt) == GIMPLE_PHI)
3980 : : {
3981 : 88901577 : unsigned i;
3982 : 88901577 : if (!*visited)
3983 : : {
3984 : 7564596 : *visited = BITMAP_ALLOC (NULL);
3985 : 7564596 : bitmap_tree_view (*visited);
3986 : : }
3987 : 275203340 : for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3988 : : {
3989 : 191646888 : int res = walk_aliased_vdefs_1 (ref,
3990 : : gimple_phi_arg_def (def_stmt, i),
3991 : : walker, data, visited, cnt,
3992 : : function_entry_reached, limit);
3993 : 191646888 : if (res == -1)
3994 : : return -1;
3995 : 186301763 : cnt = res;
3996 : : }
3997 : 83556452 : return cnt;
3998 : : }
3999 : :
4000 : : /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
4001 : 620014986 : cnt++;
4002 : 620014986 : if (cnt == limit)
4003 : : return -1;
4004 : 619881320 : if ((!ref
4005 : 549698070 : || stmt_may_clobber_ref_p_1 (def_stmt, ref))
4006 : 687449979 : && (*walker) (ref, vdef, data))
4007 : 14392309 : return cnt;
4008 : :
4009 : 1494077439 : vdef = gimple_vuse (def_stmt);
4010 : : }
4011 : : while (1);
4012 : : }
4013 : :
4014 : : int
4015 : 91452529 : walk_aliased_vdefs (ao_ref *ref, tree vdef,
4016 : : bool (*walker)(ao_ref *, tree, void *), void *data,
4017 : : bitmap *visited,
4018 : : bool *function_entry_reached, unsigned int limit)
4019 : : {
4020 : 91452529 : bitmap local_visited = NULL;
4021 : 91452529 : int ret;
4022 : :
4023 : 91452529 : timevar_push (TV_ALIAS_STMT_WALK);
4024 : :
4025 : 91452529 : if (function_entry_reached)
4026 : 4372555 : *function_entry_reached = false;
4027 : :
4028 : 158336890 : ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
4029 : : visited ? visited : &local_visited, 0,
4030 : : function_entry_reached, limit);
4031 : 91452529 : if (local_visited)
4032 : 7564596 : BITMAP_FREE (local_visited);
4033 : :
4034 : 91452529 : timevar_pop (TV_ALIAS_STMT_WALK);
4035 : :
4036 : 91452529 : return ret;
4037 : : }
4038 : :
4039 : : /* Verify validity of the fnspec string.
4040 : : See attr-fnspec.h for details. */
4041 : :
4042 : : void
4043 : 339898153 : attr_fnspec::verify ()
4044 : : {
4045 : 339898153 : bool err = false;
4046 : 339898153 : if (!len)
4047 : : return;
4048 : :
4049 : : /* Check return value specifier. */
4050 : 107315525 : if (len < return_desc_size)
4051 : : err = true;
4052 : 107315525 : else if ((len - return_desc_size) % arg_desc_size)
4053 : : err = true;
4054 : 107315525 : else if ((str[0] < '1' || str[0] > '4')
4055 : 107315525 : && str[0] != '.' && str[0] != 'm')
4056 : 0 : err = true;
4057 : :
4058 : 107315525 : switch (str[1])
4059 : : {
4060 : : case ' ':
4061 : : case 'p':
4062 : : case 'P':
4063 : : case 'c':
4064 : : case 'C':
4065 : : break;
4066 : : default:
4067 : : err = true;
4068 : : }
4069 : 107315525 : if (err)
4070 : 0 : internal_error ("invalid fn spec attribute \"%s\"", str);
4071 : :
4072 : : /* Now check all parameters. */
4073 : 325244824 : for (unsigned int i = 0; arg_specified_p (i); i++)
4074 : : {
4075 : 217929299 : unsigned int idx = arg_idx (i);
4076 : 217929299 : switch (str[idx])
4077 : : {
4078 : 205137136 : case 'x':
4079 : 205137136 : case 'X':
4080 : 205137136 : case 'r':
4081 : 205137136 : case 'R':
4082 : 205137136 : case 'o':
4083 : 205137136 : case 'O':
4084 : 205137136 : case 'w':
4085 : 205137136 : case 'W':
4086 : 205137136 : case '.':
4087 : 205137136 : if ((str[idx + 1] >= '1' && str[idx + 1] <= '9')
4088 : 205137136 : || str[idx + 1] == 't')
4089 : : {
4090 : 32617664 : if (str[idx] != 'r' && str[idx] != 'R'
4091 : : && str[idx] != 'w' && str[idx] != 'W'
4092 : : && str[idx] != 'o' && str[idx] != 'O')
4093 : 32617664 : err = true;
4094 : 32617664 : if (str[idx + 1] != 't'
4095 : : /* Size specified is scalar, so it should be described
4096 : : by ". " if specified at all. */
4097 : 32617664 : && (arg_specified_p (str[idx + 1] - '1')
4098 : 0 : && str[arg_idx (str[idx + 1] - '1')] != '.'))
4099 : : err = true;
4100 : : }
4101 : 172519472 : else if (str[idx + 1] != ' ')
4102 : : err = true;
4103 : : break;
4104 : 12792163 : default:
4105 : 12792163 : if (str[idx] < '1' || str[idx] > '9')
4106 : : err = true;
4107 : : }
4108 : 217929299 : if (err)
4109 : 0 : internal_error ("invalid fn spec attribute \"%s\" arg %i", str, i);
4110 : : }
4111 : : }
4112 : :
4113 : : /* Return ture if TYPE1 and TYPE2 will always give the same answer
4114 : : when compared wit hother types using same_type_for_tbaa_p. */
4115 : :
4116 : : static bool
4117 : 80961 : types_equal_for_same_type_for_tbaa_p (tree type1, tree type2,
4118 : : bool lto_streaming_safe)
4119 : : {
4120 : : /* We use same_type_for_tbaa_p to match types in the access path.
4121 : : This check is overly conservative. */
4122 : 80961 : type1 = TYPE_MAIN_VARIANT (type1);
4123 : 80961 : type2 = TYPE_MAIN_VARIANT (type2);
4124 : :
4125 : 80961 : if (TYPE_STRUCTURAL_EQUALITY_P (type1)
4126 : 80961 : != TYPE_STRUCTURAL_EQUALITY_P (type2))
4127 : : return false;
4128 : 80961 : if (TYPE_STRUCTURAL_EQUALITY_P (type1))
4129 : : return true;
4130 : :
4131 : 78295 : if (lto_streaming_safe)
4132 : 2387 : return type1 == type2;
4133 : : else
4134 : 75908 : return TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2);
4135 : : }
4136 : :
4137 : : /* Compare REF1 and REF2 and return flags specifying their differences.
4138 : : If LTO_STREAMING_SAFE is true do not use alias sets and canonical
4139 : : types that are going to be recomputed.
4140 : : If TBAA is true also compare TBAA metadata. */
4141 : :
4142 : : int
4143 : 135758 : ao_compare::compare_ao_refs (ao_ref *ref1, ao_ref *ref2,
4144 : : bool lto_streaming_safe,
4145 : : bool tbaa)
4146 : : {
4147 : 135758 : if (TREE_THIS_VOLATILE (ref1->ref) != TREE_THIS_VOLATILE (ref2->ref))
4148 : : return SEMANTICS;
4149 : 135758 : tree base1 = ao_ref_base (ref1);
4150 : 135758 : tree base2 = ao_ref_base (ref2);
4151 : :
4152 : 135758 : if (!known_eq (ref1->offset, ref2->offset)
4153 : 135758 : || !known_eq (ref1->size, ref2->size)
4154 : 271516 : || !known_eq (ref1->max_size, ref2->max_size))
4155 : : return SEMANTICS;
4156 : :
4157 : : /* For variable accesses we need to compare actual paths
4158 : : to check that both refs are accessing same address and the access size. */
4159 : 135755 : if (!known_eq (ref1->size, ref1->max_size))
4160 : : {
4161 : 3489 : if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1->ref)),
4162 : 3489 : TYPE_SIZE (TREE_TYPE (ref2->ref)), 0))
4163 : : return SEMANTICS;
4164 : 3489 : tree r1 = ref1->ref;
4165 : 3489 : tree r2 = ref2->ref;
4166 : :
4167 : : /* Handle toplevel COMPONENT_REFs of bitfields.
4168 : : Those are special since they are not allowed in
4169 : : ADDR_EXPR. */
4170 : 3489 : if (TREE_CODE (r1) == COMPONENT_REF
4171 : 3489 : && DECL_BIT_FIELD (TREE_OPERAND (r1, 1)))
4172 : : {
4173 : 0 : if (TREE_CODE (r2) != COMPONENT_REF
4174 : 0 : || !DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4175 : : return SEMANTICS;
4176 : 0 : tree field1 = TREE_OPERAND (r1, 1);
4177 : 0 : tree field2 = TREE_OPERAND (r2, 1);
4178 : 0 : if (!operand_equal_p (DECL_FIELD_OFFSET (field1),
4179 : 0 : DECL_FIELD_OFFSET (field2), 0)
4180 : 0 : || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1),
4181 : 0 : DECL_FIELD_BIT_OFFSET (field2), 0)
4182 : 0 : || !operand_equal_p (DECL_SIZE (field1), DECL_SIZE (field2), 0)
4183 : 0 : || !types_compatible_p (TREE_TYPE (r1),
4184 : 0 : TREE_TYPE (r2)))
4185 : 0 : return SEMANTICS;
4186 : 0 : r1 = TREE_OPERAND (r1, 0);
4187 : 0 : r2 = TREE_OPERAND (r2, 0);
4188 : : }
4189 : 3489 : else if (TREE_CODE (r2) == COMPONENT_REF
4190 : 3489 : && DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4191 : : return SEMANTICS;
4192 : :
4193 : : /* Similarly for bit field refs. */
4194 : 3489 : if (TREE_CODE (r1) == BIT_FIELD_REF)
4195 : : {
4196 : 0 : if (TREE_CODE (r2) != BIT_FIELD_REF
4197 : 0 : || !operand_equal_p (TREE_OPERAND (r1, 1),
4198 : 0 : TREE_OPERAND (r2, 1), 0)
4199 : 0 : || !operand_equal_p (TREE_OPERAND (r1, 2),
4200 : 0 : TREE_OPERAND (r2, 2), 0)
4201 : 0 : || !types_compatible_p (TREE_TYPE (r1),
4202 : 0 : TREE_TYPE (r2)))
4203 : 0 : return SEMANTICS;
4204 : 0 : r1 = TREE_OPERAND (r1, 0);
4205 : 0 : r2 = TREE_OPERAND (r2, 0);
4206 : : }
4207 : 3489 : else if (TREE_CODE (r2) == BIT_FIELD_REF)
4208 : : return SEMANTICS;
4209 : :
4210 : : /* Now we can compare the address of actual memory access. */
4211 : 3489 : if (!operand_equal_p (r1, r2, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4212 : : return SEMANTICS;
4213 : : }
4214 : : /* For constant accesses we get more matches by comparing offset only. */
4215 : 132266 : else if (!operand_equal_p (base1, base2,
4216 : : OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4217 : : return SEMANTICS;
4218 : :
4219 : : /* We can't simply use get_object_alignment_1 on the full
4220 : : reference as for accesses with variable indexes this reports
4221 : : too conservative alignment. */
4222 : 135700 : unsigned int align1, align2;
4223 : 135700 : unsigned HOST_WIDE_INT bitpos1, bitpos2;
4224 : 135700 : bool known1 = get_object_alignment_1 (base1, &align1, &bitpos1);
4225 : 135700 : bool known2 = get_object_alignment_1 (base2, &align2, &bitpos2);
4226 : : /* ??? For MEMREF get_object_alignment_1 determines aligned from
4227 : : TYPE_ALIGN but still returns false. This seem to contradict
4228 : : its description. So compare even if alignment is unknown. */
4229 : 135700 : if (known1 != known2
4230 : 135700 : || (bitpos1 != bitpos2 || align1 != align2))
4231 : : return SEMANTICS;
4232 : :
4233 : : /* Now we know that accesses are semantically same. */
4234 : 135485 : int flags = 0;
4235 : :
4236 : : /* ao_ref_base strips inner MEM_REF [&decl], recover from that here. */
4237 : 135485 : tree rbase1 = ref1->ref;
4238 : 135485 : if (rbase1)
4239 : 211910 : while (handled_component_p (rbase1))
4240 : 76425 : rbase1 = TREE_OPERAND (rbase1, 0);
4241 : 135485 : tree rbase2 = ref2->ref;
4242 : 211919 : while (handled_component_p (rbase2))
4243 : 76434 : rbase2 = TREE_OPERAND (rbase2, 0);
4244 : :
4245 : : /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
4246 : : implement restrict pointers. MR_DEPENDENCE_CLIQUE 0 means no information.
4247 : : Otherwise we need to match bases and cliques. */
4248 : 135485 : if ((((TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
4249 : 71920 : && MR_DEPENDENCE_CLIQUE (rbase1))
4250 : 112711 : || ((TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
4251 : 49145 : && MR_DEPENDENCE_CLIQUE (rbase2)))
4252 : 158259 : && (TREE_CODE (rbase1) != TREE_CODE (rbase2)
4253 : 22774 : || MR_DEPENDENCE_CLIQUE (rbase1) != MR_DEPENDENCE_CLIQUE (rbase2)
4254 : 22703 : || (MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))))
4255 : : flags |= DEPENDENCE_CLIQUE;
4256 : :
4257 : 135485 : if (!tbaa)
4258 : : return flags;
4259 : :
4260 : : /* Alias sets are not stable across LTO sreaming; be conservative here
4261 : : and compare types the alias sets are ultimately based on. */
4262 : 135438 : if (lto_streaming_safe)
4263 : : {
4264 : 2713 : tree t1 = ao_ref_alias_ptr_type (ref1);
4265 : 2713 : tree t2 = ao_ref_alias_ptr_type (ref2);
4266 : 2713 : if (!alias_ptr_types_compatible_p (t1, t2))
4267 : 33 : flags |= REF_ALIAS_SET;
4268 : :
4269 : 2713 : t1 = ao_ref_base_alias_ptr_type (ref1);
4270 : 2713 : t2 = ao_ref_base_alias_ptr_type (ref2);
4271 : 2713 : if (!alias_ptr_types_compatible_p (t1, t2))
4272 : 40 : flags |= BASE_ALIAS_SET;
4273 : : }
4274 : : else
4275 : : {
4276 : 132725 : if (ao_ref_alias_set (ref1) != ao_ref_alias_set (ref2))
4277 : 0 : flags |= REF_ALIAS_SET;
4278 : 132725 : if (ao_ref_base_alias_set (ref1) != ao_ref_base_alias_set (ref2))
4279 : 0 : flags |= BASE_ALIAS_SET;
4280 : : }
4281 : :
4282 : : /* Access path is used only on non-view-converted references. */
4283 : 135438 : bool view_converted = view_converted_memref_p (rbase1);
4284 : 135438 : if (view_converted_memref_p (rbase2) != view_converted)
4285 : 1 : return flags | ACCESS_PATH;
4286 : 135437 : else if (view_converted)
4287 : : return flags;
4288 : :
4289 : :
4290 : : /* Find start of access paths and look for trailing arrays. */
4291 : 63522 : tree c1 = ref1->ref, c2 = ref2->ref;
4292 : 63522 : tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
4293 : 63522 : int nskipped1 = 0, nskipped2 = 0;
4294 : 63522 : int i = 0;
4295 : :
4296 : 81378 : for (tree p1 = ref1->ref; handled_component_p (p1); p1 = TREE_OPERAND (p1, 0))
4297 : : {
4298 : 17856 : if (component_ref_to_zero_sized_trailing_array_p (p1))
4299 : 0 : end_struct_ref1 = p1;
4300 : 17856 : if (ends_tbaa_access_path_p (p1))
4301 : 1536 : c1 = p1, nskipped1 = i;
4302 : 17856 : i++;
4303 : : }
4304 : 81378 : for (tree p2 = ref2->ref; handled_component_p (p2); p2 = TREE_OPERAND (p2, 0))
4305 : : {
4306 : 17856 : if (component_ref_to_zero_sized_trailing_array_p (p2))
4307 : 0 : end_struct_ref2 = p2;
4308 : 17856 : if (ends_tbaa_access_path_p (p2))
4309 : 1536 : c2 = p2, nskipped1 = i;
4310 : 17856 : i++;
4311 : : }
4312 : :
4313 : : /* For variable accesses we can not rely on offset match bellow.
4314 : : We know that paths are struturally same, so only check that
4315 : : starts of TBAA paths did not diverge. */
4316 : 63522 : if (!known_eq (ref1->size, ref1->max_size)
4317 : 63522 : && nskipped1 != nskipped2)
4318 : 116 : return flags | ACCESS_PATH;
4319 : :
4320 : : /* Information about trailing refs is used by
4321 : : aliasing_component_refs_p that is applied only if paths
4322 : : has handled components.. */
4323 : 63406 : if (!handled_component_p (c1) && !handled_component_p (c2))
4324 : : ;
4325 : 12780 : else if ((end_struct_ref1 != NULL) != (end_struct_ref2 != NULL))
4326 : 0 : return flags | ACCESS_PATH;
4327 : 63406 : if (end_struct_ref1
4328 : 63406 : && TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref1))
4329 : 0 : != TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref2)))
4330 : 0 : return flags | ACCESS_PATH;
4331 : :
4332 : : /* Now compare all handled components of the access path.
4333 : : We have three oracles that cares about access paths:
4334 : : - aliasing_component_refs_p
4335 : : - nonoverlapping_refs_since_match_p
4336 : : - nonoverlapping_component_refs_p
4337 : : We need to match things these oracles compare.
4338 : :
4339 : : It is only necessary to check types for compatibility
4340 : : and offsets. Rest of what oracles compares are actual
4341 : : addresses. Those are already known to be same:
4342 : : - for constant accesses we check offsets
4343 : : - for variable accesses we already matched
4344 : : the path lexically with operand_equal_p. */
4345 : 98516 : while (true)
4346 : : {
4347 : 80961 : bool comp1 = handled_component_p (c1);
4348 : 80961 : bool comp2 = handled_component_p (c2);
4349 : :
4350 : 80961 : if (comp1 != comp2)
4351 : 0 : return flags | ACCESS_PATH;
4352 : 80961 : if (!comp1)
4353 : : break;
4354 : :
4355 : 17555 : if (TREE_CODE (c1) != TREE_CODE (c2))
4356 : 0 : return flags | ACCESS_PATH;
4357 : :
4358 : : /* aliasing_component_refs_p attempts to find type match within
4359 : : the paths. For that reason both types needs to be equal
4360 : : with respect to same_type_for_tbaa_p. */
4361 : 17555 : if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4362 : 17555 : TREE_TYPE (c2),
4363 : : lto_streaming_safe))
4364 : 0 : return flags | ACCESS_PATH;
4365 : 35110 : if (component_ref_to_zero_sized_trailing_array_p (c1)
4366 : 17555 : != component_ref_to_zero_sized_trailing_array_p (c2))
4367 : 0 : return flags | ACCESS_PATH;
4368 : :
4369 : : /* aliasing_matching_component_refs_p compares
4370 : : offsets within the path. Other properties are ignored.
4371 : : Do not bother to verify offsets in variable accesses. Here we
4372 : : already compared them by operand_equal_p so they are
4373 : : structurally same. */
4374 : 17555 : if (!known_eq (ref1->size, ref1->max_size))
4375 : : {
4376 : 2816 : poly_int64 offadj1, sztmc1, msztmc1;
4377 : 2816 : bool reverse1;
4378 : 2816 : get_ref_base_and_extent (c1, &offadj1, &sztmc1, &msztmc1, &reverse1);
4379 : 2816 : poly_int64 offadj2, sztmc2, msztmc2;
4380 : 2816 : bool reverse2;
4381 : 2816 : get_ref_base_and_extent (c2, &offadj2, &sztmc2, &msztmc2, &reverse2);
4382 : 2816 : if (!known_eq (offadj1, offadj2))
4383 : 0 : return flags | ACCESS_PATH;
4384 : : }
4385 : 17555 : c1 = TREE_OPERAND (c1, 0);
4386 : 17555 : c2 = TREE_OPERAND (c2, 0);
4387 : 17555 : }
4388 : : /* Finally test the access type. */
4389 : 63406 : if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4390 : 63406 : TREE_TYPE (c2),
4391 : : lto_streaming_safe))
4392 : 14 : return flags | ACCESS_PATH;
4393 : : return flags;
4394 : : }
4395 : :
4396 : : /* Hash REF to HSTATE. If LTO_STREAMING_SAFE do not use alias sets
4397 : : and canonical types. */
4398 : : void
4399 : 6868164 : ao_compare::hash_ao_ref (ao_ref *ref, bool lto_streaming_safe, bool tbaa,
4400 : : inchash::hash &hstate)
4401 : : {
4402 : 6868164 : tree base = ao_ref_base (ref);
4403 : 6868164 : tree tbase = base;
4404 : :
4405 : 6868164 : if (!known_eq (ref->size, ref->max_size))
4406 : : {
4407 : 365248 : tree r = ref->ref;
4408 : 365248 : if (TREE_CODE (r) == COMPONENT_REF
4409 : 365248 : && DECL_BIT_FIELD (TREE_OPERAND (r, 1)))
4410 : : {
4411 : 1745 : tree field = TREE_OPERAND (r, 1);
4412 : 1745 : hash_operand (DECL_FIELD_OFFSET (field), hstate, 0);
4413 : 1745 : hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, 0);
4414 : 1745 : hash_operand (DECL_SIZE (field), hstate, 0);
4415 : 1745 : r = TREE_OPERAND (r, 0);
4416 : : }
4417 : 365248 : if (TREE_CODE (r) == BIT_FIELD_REF)
4418 : : {
4419 : 1704 : hash_operand (TREE_OPERAND (r, 1), hstate, 0);
4420 : 1704 : hash_operand (TREE_OPERAND (r, 2), hstate, 0);
4421 : 1704 : r = TREE_OPERAND (r, 0);
4422 : : }
4423 : 365248 : hash_operand (TYPE_SIZE (TREE_TYPE (ref->ref)), hstate, 0);
4424 : 365248 : hash_operand (r, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4425 : : }
4426 : : else
4427 : : {
4428 : 6502916 : hash_operand (tbase, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4429 : 6502916 : hstate.add_poly_int (ref->offset);
4430 : 6502916 : hstate.add_poly_int (ref->size);
4431 : 6502916 : hstate.add_poly_int (ref->max_size);
4432 : : }
4433 : 6868164 : if (!lto_streaming_safe && tbaa)
4434 : : {
4435 : 6567180 : hstate.add_int (ao_ref_alias_set (ref));
4436 : 6567180 : hstate.add_int (ao_ref_base_alias_set (ref));
4437 : : }
4438 : 6868164 : }
|