Line data Source code
1 : /* Definitions of the pointer_query and related classes.
2 :
3 : Copyright (C) 2020-2026 Free Software Foundation, Inc.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it under
8 : the terms of the GNU General Public License as published by the Free
9 : Software Foundation; either version 3, or (at your option) any later
10 : version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "tree.h"
26 : #include "gimple.h"
27 : #include "stringpool.h"
28 : #include "tree-vrp.h"
29 : #include "diagnostic-core.h"
30 : #include "fold-const.h"
31 : #include "tree-object-size.h"
32 : #include "tree-ssa-strlen.h"
33 : #include "langhooks.h"
34 : #include "stringpool.h"
35 : #include "attribs.h"
36 : #include "gimple-iterator.h"
37 : #include "gimple-fold.h"
38 : #include "gimple-ssa.h"
39 : #include "intl.h"
40 : #include "attr-fnspec.h"
41 : #include "gimple-range.h"
42 : #include "pointer-query.h"
43 : #include "tree-pretty-print.h"
44 : #include "tree-ssanames.h"
45 : #include "target.h"
46 :
47 : static bool compute_objsize_r (tree, gimple *, bool, int, access_ref *,
48 : ssa_name_limit_t &, pointer_query *);
49 :
50 : /* Wrapper around the wide_int overload of get_range that accepts
51 : offset_int instead. For middle end expressions returns the same
52 : result. For a subset of nonconstamt expressions emitted by the front
53 : end determines a more precise range than would be possible otherwise. */
54 :
55 : static bool
56 4545927 : get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
57 : {
58 4545927 : offset_int add = 0;
59 4545927 : if (TREE_CODE (x) == PLUS_EXPR)
60 : {
61 : /* Handle constant offsets in pointer addition expressions seen
62 : n the front end IL. */
63 39 : tree op = TREE_OPERAND (x, 1);
64 39 : if (TREE_CODE (op) == INTEGER_CST)
65 : {
66 39 : op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
67 39 : add = wi::to_offset (op);
68 39 : x = TREE_OPERAND (x, 0);
69 : }
70 : }
71 :
72 4545927 : if (TREE_CODE (x) == NOP_EXPR)
73 : /* Also handle conversions to sizetype seen in the front end IL. */
74 117 : x = TREE_OPERAND (x, 0);
75 :
76 4545927 : tree type = TREE_TYPE (x);
77 4545927 : if ((!INTEGRAL_TYPE_P (type)
78 : /* ??? We get along without caring about overflow by using
79 : offset_int, but that falls apart when indexes are bigger
80 : than pointer differences. */
81 1622818 : || TYPE_PRECISION (type) > TYPE_PRECISION (ptrdiff_type_node))
82 4546112 : && !POINTER_TYPE_P (type))
83 : return false;
84 :
85 4545733 : if (TREE_CODE (x) != INTEGER_CST
86 698610 : && TREE_CODE (x) != SSA_NAME)
87 : {
88 180 : if (TYPE_UNSIGNED (type)
89 180 : && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
90 48 : type = signed_type_for (type);
91 :
92 180 : r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
93 180 : r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
94 180 : return x;
95 : }
96 :
97 22727765 : wide_int wr[2];
98 4545553 : if (!get_range (x, stmt, wr, rvals))
99 : return false;
100 :
101 4374018 : signop sgn = SIGNED;
102 : /* Only convert signed integers or unsigned sizetype to a signed
103 : offset and avoid converting large positive values in narrower
104 : types to negative offsets. */
105 4374018 : if (TYPE_UNSIGNED (type)
106 4374018 : && wr[0].get_precision () < TYPE_PRECISION (sizetype))
107 : sgn = UNSIGNED;
108 :
109 4374018 : r[0] = offset_int::from (wr[0], sgn);
110 4374018 : r[1] = offset_int::from (wr[1], sgn);
111 4374018 : return true;
112 13636659 : }
113 :
114 : /* Return the argument that the call STMT to a built-in function returns
115 : or null if it doesn't. On success, set OFFRNG[] to the range of offsets
116 : from the argument reflected in the value returned by the built-in if it
117 : can be determined, otherwise to 0 and HWI_M1U respectively. Set
118 : *PAST_END for functions like mempcpy that might return a past the end
119 : pointer (most functions return a dereferenceable pointer to an existing
120 : element of an array). */
121 :
122 : static tree
123 334931 : gimple_call_return_array (gimple *stmt, offset_int offrng[2], bool *past_end,
124 : ssa_name_limit_t &snlim, pointer_query *qry)
125 : {
126 : /* Clear and set below for the rare function(s) that might return
127 : a past-the-end pointer. */
128 334931 : *past_end = false;
129 :
130 334931 : {
131 : /* Check for attribute fn spec to see if the function returns one
132 : of its arguments. */
133 334931 : attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
134 334931 : unsigned int argno;
135 334931 : if (fnspec.returns_arg (&argno))
136 : {
137 : /* Functions return the first argument (not a range). */
138 5427 : offrng[0] = offrng[1] = 0;
139 5427 : return gimple_call_arg (stmt, argno);
140 : }
141 : }
142 :
143 329504 : if (gimple_call_num_args (stmt) < 1)
144 : return NULL_TREE;
145 :
146 316855 : tree fn = gimple_call_fndecl (stmt);
147 316855 : if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
148 : {
149 : /* See if this is a call to placement new. */
150 251538 : if (!fn
151 234401 : || !DECL_IS_OPERATOR_NEW_P (fn)
152 260452 : || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn))
153 : return NULL_TREE;
154 :
155 : /* Check the mangling, keeping in mind that operator new takes
156 : a size_t which could be unsigned int or unsigned long. */
157 8914 : tree fname = DECL_ASSEMBLER_NAME (fn);
158 8914 : if (!id_equal (fname, "_ZnwjPv") // ordinary form
159 8863 : && !id_equal (fname, "_ZnwmPv") // ordinary form
160 749 : && !id_equal (fname, "_ZnajPv") // array form
161 9659 : && !id_equal (fname, "_ZnamPv")) // array form
162 : return NULL_TREE;
163 :
164 8263 : if (gimple_call_num_args (stmt) != 2)
165 : return NULL_TREE;
166 :
167 : /* Allocation functions return a pointer to the beginning. */
168 8263 : offrng[0] = offrng[1] = 0;
169 8263 : return gimple_call_arg (stmt, 1);
170 : }
171 :
172 65317 : switch (DECL_FUNCTION_CODE (fn))
173 : {
174 0 : case BUILT_IN_MEMCPY:
175 0 : case BUILT_IN_MEMCPY_CHK:
176 0 : case BUILT_IN_MEMMOVE:
177 0 : case BUILT_IN_MEMMOVE_CHK:
178 0 : case BUILT_IN_MEMSET:
179 0 : case BUILT_IN_STRCAT:
180 0 : case BUILT_IN_STRCAT_CHK:
181 0 : case BUILT_IN_STRCPY:
182 0 : case BUILT_IN_STRCPY_CHK:
183 0 : case BUILT_IN_STRNCAT:
184 0 : case BUILT_IN_STRNCAT_CHK:
185 0 : case BUILT_IN_STRNCPY:
186 0 : case BUILT_IN_STRNCPY_CHK:
187 : /* Functions return the first argument (not a range). */
188 0 : offrng[0] = offrng[1] = 0;
189 0 : return gimple_call_arg (stmt, 0);
190 :
191 199 : case BUILT_IN_MEMPCPY:
192 199 : case BUILT_IN_MEMPCPY_CHK:
193 199 : {
194 : /* The returned pointer is in a range constrained by the smaller
195 : of the upper bound of the size argument and the source object
196 : size. */
197 199 : offrng[0] = 0;
198 199 : offrng[1] = HOST_WIDE_INT_M1U;
199 199 : tree off = gimple_call_arg (stmt, 2);
200 199 : bool off_valid = get_offset_range (off, stmt, offrng, qry->rvals);
201 199 : if (!off_valid || offrng[0] != offrng[1])
202 : {
203 : /* If the offset is either indeterminate or in some range,
204 : try to constrain its upper bound to at most the size
205 : of the source object. */
206 82 : access_ref aref;
207 82 : tree src = gimple_call_arg (stmt, 1);
208 82 : if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry)
209 82 : && aref.sizrng[1] < offrng[1])
210 27 : offrng[1] = aref.sizrng[1];
211 : }
212 :
213 : /* Mempcpy may return a past-the-end pointer. */
214 199 : *past_end = true;
215 199 : return gimple_call_arg (stmt, 0);
216 : }
217 :
218 3760 : case BUILT_IN_MEMCHR:
219 3760 : {
220 3760 : tree off = gimple_call_arg (stmt, 2);
221 3760 : if (get_offset_range (off, stmt, offrng, qry->rvals))
222 3702 : offrng[1] -= 1;
223 : else
224 58 : offrng[1] = HOST_WIDE_INT_M1U;
225 :
226 3760 : offrng[0] = 0;
227 3760 : return gimple_call_arg (stmt, 0);
228 : }
229 :
230 305 : case BUILT_IN_STRCHR:
231 305 : case BUILT_IN_STRRCHR:
232 305 : case BUILT_IN_STRSTR:
233 305 : offrng[0] = 0;
234 305 : offrng[1] = HOST_WIDE_INT_M1U;
235 305 : return gimple_call_arg (stmt, 0);
236 :
237 98 : case BUILT_IN_STPCPY:
238 98 : case BUILT_IN_STPCPY_CHK:
239 98 : {
240 98 : access_ref aref;
241 98 : tree src = gimple_call_arg (stmt, 1);
242 98 : if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry))
243 98 : offrng[1] = aref.sizrng[1] - 1;
244 : else
245 0 : offrng[1] = HOST_WIDE_INT_M1U;
246 :
247 98 : offrng[0] = 0;
248 98 : return gimple_call_arg (stmt, 0);
249 : }
250 :
251 88 : case BUILT_IN_STPNCPY:
252 88 : case BUILT_IN_STPNCPY_CHK:
253 88 : {
254 : /* The returned pointer is in a range between the first argument
255 : and it plus the smaller of the upper bound of the size argument
256 : and the source object size. */
257 88 : offrng[1] = HOST_WIDE_INT_M1U;
258 88 : tree off = gimple_call_arg (stmt, 2);
259 88 : if (!get_offset_range (off, stmt, offrng, qry->rvals)
260 88 : || offrng[0] != offrng[1])
261 : {
262 : /* If the offset is either indeterminate or in some range,
263 : try to constrain its upper bound to at most the size
264 : of the source object. */
265 13 : access_ref aref;
266 13 : tree src = gimple_call_arg (stmt, 1);
267 13 : if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry)
268 13 : && aref.sizrng[1] < offrng[1])
269 13 : offrng[1] = aref.sizrng[1];
270 : }
271 :
272 : /* When the source is the empty string the returned pointer is
273 : a copy of the argument. Otherwise stpcpy can also return
274 : a past-the-end pointer. */
275 88 : offrng[0] = 0;
276 88 : *past_end = true;
277 88 : return gimple_call_arg (stmt, 0);
278 : }
279 :
280 : default:
281 : break;
282 : }
283 :
284 : return NULL_TREE;
285 : }
286 :
287 : /* Return true when EXP's range can be determined and set RANGE[] to it
288 : after adjusting it if necessary to make EXP a represents a valid size
289 : of object, or a valid size argument to an allocation function declared
290 : with attribute alloc_size (whose argument may be signed), or to a string
291 : manipulation function like memset.
292 : When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
293 : a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
294 : a (nearly) invalid argument to allocation functions like malloc but it
295 : is a valid argument to functions like memset.
296 : When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
297 : in a multi-range, otherwise to the smallest valid subrange. */
298 :
299 : bool
300 1135667 : get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
301 : int flags /* = 0 */)
302 : {
303 1135667 : if (!exp)
304 : return false;
305 :
306 1135667 : if (tree_fits_uhwi_p (exp))
307 : {
308 : /* EXP is a constant. */
309 795970 : range[0] = range[1] = exp;
310 795970 : return true;
311 : }
312 :
313 339697 : tree exptype = TREE_TYPE (exp);
314 339697 : bool integral = INTEGRAL_TYPE_P (exptype);
315 :
316 339697 : wide_int min, max;
317 339697 : enum value_range_kind range_type;
318 :
319 339697 : if (!query)
320 44034 : query = get_range_query (cfun);
321 :
322 339697 : if (integral)
323 : {
324 339675 : int_range_max vr;
325 339675 : tree tmin, tmax;
326 :
327 339675 : query->range_of_expr (vr, exp, stmt);
328 :
329 339675 : if (vr.undefined_p ())
330 4 : vr.set_varying (TREE_TYPE (exp));
331 339675 : range_type = get_legacy_range (vr, tmin, tmax);
332 339675 : min = wi::to_wide (tmin);
333 339675 : max = wi::to_wide (tmax);
334 339675 : }
335 : else
336 : range_type = VR_VARYING;
337 :
338 339675 : if (range_type == VR_VARYING)
339 : {
340 45665 : if (integral)
341 : {
342 : /* Use the full range of the type of the expression when
343 : no value range information is available. */
344 45643 : range[0] = TYPE_MIN_VALUE (exptype);
345 45643 : range[1] = TYPE_MAX_VALUE (exptype);
346 45643 : return true;
347 : }
348 :
349 22 : range[0] = NULL_TREE;
350 22 : range[1] = NULL_TREE;
351 22 : return false;
352 : }
353 :
354 294032 : unsigned expprec = TYPE_PRECISION (exptype);
355 :
356 294032 : bool signed_p = !TYPE_UNSIGNED (exptype);
357 :
358 294032 : if (range_type == VR_ANTI_RANGE)
359 : {
360 22474 : if (signed_p)
361 : {
362 81 : if (wi::les_p (max, 0))
363 : {
364 : /* EXP is not in a strictly negative range. That means
365 : it must be in some (not necessarily strictly) positive
366 : range which includes zero. Since in signed to unsigned
367 : conversions negative values end up converted to large
368 : positive values, and otherwise they are not valid sizes,
369 : the resulting range is in both cases [0, TYPE_MAX]. */
370 15 : min = wi::zero (expprec);
371 15 : max = wi::to_wide (TYPE_MAX_VALUE (exptype));
372 : }
373 66 : else if (wi::les_p (min - 1, 0))
374 : {
375 : /* EXP is not in a negative-positive range. That means EXP
376 : is either negative, or greater than max. Since negative
377 : sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
378 42 : min = max + 1;
379 42 : max = wi::to_wide (TYPE_MAX_VALUE (exptype));
380 : }
381 : else
382 : {
383 24 : max = min - 1;
384 24 : min = wi::zero (expprec);
385 : }
386 : }
387 : else
388 : {
389 22393 : wide_int maxsize = wi::to_wide (max_object_size ());
390 22393 : min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
391 22393 : max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
392 22393 : if (wi::eq_p (0, min - 1))
393 : {
394 : /* EXP is unsigned and not in the range [1, MAX]. That means
395 : it's either zero or greater than MAX. Even though 0 would
396 : normally be detected by -Walloc-zero, unless ALLOW_ZERO
397 : is set, set the range to [MAX, TYPE_MAX] so that when MAX
398 : is greater than the limit the whole range is diagnosed. */
399 429 : wide_int maxsize = wi::to_wide (max_object_size ());
400 429 : if (flags & SR_ALLOW_ZERO)
401 : {
402 586 : if (wi::leu_p (maxsize, max + 1)
403 293 : || !(flags & SR_USE_LARGEST))
404 225 : min = max = wi::zero (expprec);
405 : else
406 : {
407 68 : min = max + 1;
408 68 : max = wi::to_wide (TYPE_MAX_VALUE (exptype));
409 : }
410 : }
411 : else
412 : {
413 136 : min = max + 1;
414 136 : max = wi::to_wide (TYPE_MAX_VALUE (exptype));
415 : }
416 429 : }
417 43928 : else if ((flags & SR_USE_LARGEST)
418 31935 : && wi::ltu_p (max + 1, maxsize))
419 : {
420 : /* When USE_LARGEST is set and the larger of the two subranges
421 : is a valid size, use it... */
422 36 : min = max + 1;
423 36 : max = maxsize;
424 : }
425 : else
426 : {
427 : /* ...otherwise use the smaller subrange. */
428 21928 : max = min - 1;
429 21928 : min = wi::zero (expprec);
430 : }
431 22393 : }
432 : }
433 :
434 294032 : range[0] = wide_int_to_tree (exptype, min);
435 294032 : range[1] = wide_int_to_tree (exptype, max);
436 :
437 294032 : return true;
438 339697 : }
439 :
440 : bool
441 50666 : get_size_range (tree exp, tree range[2], int flags /* = 0 */)
442 : {
443 50666 : return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags);
444 : }
445 :
446 : /* If STMT is a call to an allocation function, returns the constant
447 : maximum size of the object allocated by the call represented as
448 : sizetype. If nonnull, sets RNG1[] to the range of the size.
449 : When nonnull, uses RVALS for range information, otherwise gets global
450 : range info.
451 : Returns null when STMT is not a call to a valid allocation function. */
452 :
453 : tree
454 478140 : gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
455 : range_query *qry /* = NULL */)
456 : {
457 478140 : if (!stmt || !is_gimple_call (stmt))
458 : return NULL_TREE;
459 :
460 478136 : tree allocfntype;
461 478136 : if (tree fndecl = gimple_call_fndecl (stmt))
462 460676 : allocfntype = TREE_TYPE (fndecl);
463 : else
464 17460 : allocfntype = gimple_call_fntype (stmt);
465 :
466 478136 : if (!allocfntype)
467 : return NULL_TREE;
468 :
469 477231 : unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
470 477231 : tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
471 477231 : if (!at)
472 : {
473 338745 : if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
474 : return NULL_TREE;
475 :
476 : argidx1 = 0;
477 : }
478 :
479 143207 : unsigned nargs = gimple_call_num_args (stmt);
480 :
481 143207 : if (argidx1 == UINT_MAX)
482 : {
483 138486 : tree atval = TREE_VALUE (at);
484 138486 : if (!atval)
485 : return NULL_TREE;
486 :
487 138486 : argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
488 138486 : if (nargs <= argidx1)
489 : return NULL_TREE;
490 :
491 138486 : atval = TREE_CHAIN (atval);
492 138486 : if (atval)
493 : {
494 1075 : argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
495 1075 : if (nargs <= argidx2)
496 : return NULL_TREE;
497 : }
498 : }
499 :
500 143207 : tree size = gimple_call_arg (stmt, argidx1);
501 :
502 859242 : wide_int rng1_buf[2];
503 : /* If RNG1 is not set, use the buffer. */
504 143207 : if (!rng1)
505 38 : rng1 = rng1_buf;
506 :
507 : /* Use maximum precision to avoid overflow below. */
508 143207 : const int prec = ADDR_MAX_PRECISION;
509 :
510 143207 : {
511 143207 : tree r[2];
512 : /* Determine the largest valid range size, including zero. */
513 143207 : if (!get_size_range (qry, size, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
514 2 : return NULL_TREE;
515 143205 : rng1[0] = wi::to_wide (r[0], prec);
516 143205 : rng1[1] = wi::to_wide (r[1], prec);
517 : }
518 :
519 143205 : if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
520 54058 : return fold_convert (sizetype, size);
521 :
522 : /* To handle ranges do the math in wide_int and return the product
523 : of the upper bounds as a constant. Ignore anti-ranges. */
524 89147 : tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
525 445735 : wide_int rng2[2];
526 89147 : {
527 89147 : tree r[2];
528 : /* As above, use the full non-negative range on failure. */
529 89147 : if (!get_size_range (qry, n, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
530 0 : return NULL_TREE;
531 89147 : rng2[0] = wi::to_wide (r[0], prec);
532 89147 : rng2[1] = wi::to_wide (r[1], prec);
533 : }
534 :
535 : /* Compute products of both bounds for the caller but return the lesser
536 : of SIZE_MAX and the product of the upper bounds as a constant. */
537 89147 : rng1[0] = rng1[0] * rng2[0];
538 89147 : rng1[1] = rng1[1] * rng2[1];
539 :
540 89147 : const tree size_max = TYPE_MAX_VALUE (sizetype);
541 89147 : if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
542 : {
543 79 : rng1[1] = wi::to_wide (size_max, prec);
544 79 : return size_max;
545 : }
546 :
547 89068 : return wide_int_to_tree (sizetype, rng1[1]);
548 697062 : }
549 :
550 : /* For an access to an object referenced to by the function parameter PTR
551 : of pointer type, and set RNG[] to the range of sizes of the object
552 : obtainedfrom the attribute access specification for the current function.
553 : Set STATIC_ARRAY if the array parameter has been declared [static].
554 : Return the function parameter on success and null otherwise. */
555 :
556 : static tree
557 807254 : gimple_parm_array_size (tree ptr, wide_int rng[2],
558 : bool *static_array /* = NULL */)
559 : {
560 : /* For a function argument try to determine the byte size of the array
561 : from the current function declaratation (e.g., attribute access or
562 : related). */
563 807254 : tree var = SSA_NAME_VAR (ptr);
564 807254 : if (TREE_CODE (var) != PARM_DECL || !POINTER_TYPE_P (TREE_TYPE (var)))
565 : return NULL_TREE;
566 :
567 762869 : const unsigned prec = TYPE_PRECISION (sizetype);
568 :
569 762869 : rdwr_map rdwr_idx;
570 762869 : attr_access *access = get_parm_access (rdwr_idx, var);
571 762869 : if (!access)
572 : return NULL_TREE;
573 :
574 2674 : if (access->sizarg != UINT_MAX)
575 : {
576 : /* TODO: Try to extract the range from the argument based on
577 : those of subsequent assertions or based on known calls to
578 : the current function. */
579 : return NULL_TREE;
580 : }
581 :
582 2654 : if (!access->minsize)
583 : return NULL_TREE;
584 :
585 : /* Only consider ordinary array bound at level 2 (or above if it's
586 : ever added). */
587 1881 : if (warn_array_parameter < 2 && !access->static_p)
588 : return NULL_TREE;
589 :
590 214 : if (static_array)
591 214 : *static_array = access->static_p;
592 :
593 214 : rng[0] = wi::zero (prec);
594 214 : rng[1] = wi::uhwi (access->minsize, prec);
595 : /* Multiply the array bound encoded in the attribute by the size
596 : of what the pointer argument to which it decays points to. */
597 214 : tree eltype = TREE_TYPE (TREE_TYPE (ptr));
598 214 : tree size = TYPE_SIZE_UNIT (eltype);
599 214 : if (!size || TREE_CODE (size) != INTEGER_CST)
600 : return NULL_TREE;
601 :
602 184 : rng[1] *= wi::to_wide (size, prec);
603 184 : return var;
604 762869 : }
605 :
606 : /* Initialize the object. */
607 :
608 19788659 : access_ref::access_ref ()
609 19788659 : : ref (), eval ([](tree x){ return x; }), deref (), ref_nullptr_p (false),
610 19788659 : trail1special (true), base0 (true), parmarray ()
611 : {
612 : /* Set to valid. */
613 19788659 : offrng[0] = offrng[1] = 0;
614 19788659 : offmax[0] = offmax[1] = 0;
615 : /* Invalidate. */
616 19788659 : sizrng[0] = sizrng[1] = -1;
617 19788659 : }
618 :
619 : /* Return the PHI node REF refers to or null if it doesn't. */
620 :
621 : gphi *
622 640707 : access_ref::phi () const
623 : {
624 640707 : if (!ref || TREE_CODE (ref) != SSA_NAME)
625 : return NULL;
626 :
627 637728 : gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
628 637728 : if (!def_stmt || gimple_code (def_stmt) != GIMPLE_PHI)
629 : return NULL;
630 :
631 636796 : return as_a <gphi *> (def_stmt);
632 : }
633 :
634 : /* Determine the size and offset for ARG, append it to ALL_REFS, and
635 : merge the result with *THIS. Ignore ARG if SKIP_NULL is set and
636 : ARG refers to the null pointer. Return true on success and false
637 : on failure. */
638 :
639 : void
640 709727 : access_ref::merge_ref (vec<access_ref> *all_refs, tree arg, gimple *stmt,
641 : int ostype, bool skip_null,
642 : ssa_name_limit_t &snlim, pointer_query &qry)
643 : {
644 709727 : access_ref aref;
645 709727 : if (!compute_objsize_r (arg, stmt, false, ostype, &aref, snlim, &qry)
646 709727 : || aref.sizrng[0] < 0)
647 : {
648 : /* This may be a PHI with all null pointer arguments. Handle it
649 : conservatively by setting all properties to the most permissive
650 : values. */
651 66158 : base0 = false;
652 66158 : offrng[0] = offrng[1] = 0;
653 66158 : add_max_offset ();
654 66158 : set_max_size_range ();
655 66158 : return;
656 : }
657 :
658 643569 : if (all_refs)
659 : {
660 264 : access_ref dummy_ref;
661 264 : aref.get_ref (all_refs, &dummy_ref, ostype, &snlim, &qry);
662 : }
663 :
664 643569 : if (TREE_CODE (arg) == SSA_NAME)
665 536995 : qry.put_ref (arg, aref, ostype);
666 :
667 643569 : if (all_refs)
668 264 : all_refs->safe_push (aref);
669 :
670 643569 : aref.deref += deref;
671 :
672 643569 : bool merged_parmarray = aref.parmarray;
673 :
674 643569 : const bool nullp = skip_null && integer_zerop (arg);
675 643569 : const offset_int maxobjsize = wi::to_offset (max_object_size ());
676 643569 : offset_int minsize = sizrng[0];
677 :
678 643569 : if (sizrng[0] < 0)
679 : {
680 : /* If *THIS doesn't contain a meaningful result yet set it to AREF
681 : unless the argument is null and it's okay to ignore it. */
682 562506 : if (!nullp)
683 540867 : *this = aref;
684 :
685 : /* Set if the current argument refers to one or more objects of
686 : known size (or range of sizes), as opposed to referring to
687 : one or more unknown object(s). */
688 562506 : const bool arg_known_size = (aref.sizrng[0] != 0
689 1032442 : || aref.sizrng[1] != maxobjsize);
690 466543 : if (arg_known_size)
691 95963 : sizrng[0] = aref.sizrng[0];
692 :
693 562506 : return;
694 : }
695 :
696 : /* Disregard null pointers in PHIs with two or more arguments.
697 : TODO: Handle this better! */
698 81063 : if (nullp)
699 : return;
700 :
701 78083 : const bool known_size = (sizrng[0] != 0 || sizrng[1] != maxobjsize);
702 :
703 78083 : if (known_size && aref.sizrng[0] < minsize)
704 24567 : minsize = aref.sizrng[0];
705 :
706 : /* Extend the size and offset of *THIS to account for AREF. The result
707 : can be cached but results in false negatives. */
708 :
709 78083 : offset_int orng[2];
710 78083 : if (sizrng[1] < aref.sizrng[1])
711 : {
712 28491 : orng[0] = offrng[0];
713 28491 : orng[1] = offrng[1];
714 28491 : *this = aref;
715 : }
716 : else
717 : {
718 49592 : orng[0] = aref.offrng[0];
719 49592 : orng[1] = aref.offrng[1];
720 : }
721 :
722 78083 : if (orng[0] < offrng[0])
723 5354 : offrng[0] = orng[0];
724 78083 : if (offrng[1] < orng[1])
725 18550 : offrng[1] = orng[1];
726 :
727 : /* Reset the PHI's BASE0 flag if any of the nonnull arguments
728 : refers to an object at an unknown offset. */
729 78083 : if (!aref.base0)
730 15512 : base0 = false;
731 :
732 78083 : sizrng[0] = minsize;
733 78083 : parmarray = merged_parmarray;
734 :
735 78083 : return;
736 : }
737 :
738 : /* Determine and return the largest object to which *THIS refers. If
739 : *THIS refers to a PHI and PREF is nonnull, fill *PREF with the details
740 : of the object determined by compute_objsize(ARG, OSTYPE) for each PHI
741 : argument ARG. */
742 :
743 : tree
744 636790 : access_ref::get_ref (vec<access_ref> *all_refs,
745 : access_ref *pref /* = NULL */,
746 : int ostype /* = 1 */,
747 : ssa_name_limit_t *psnlim /* = NULL */,
748 : pointer_query *qry /* = NULL */) const
749 : {
750 636790 : if (!ref || TREE_CODE (ref) != SSA_NAME)
751 : return NULL;
752 :
753 : /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
754 : cause unbounded recursion. */
755 636584 : ssa_name_limit_t snlim_buf;
756 636584 : if (!psnlim)
757 82 : psnlim = &snlim_buf;
758 :
759 636584 : pointer_query empty_qry;
760 636584 : if (!qry)
761 82 : qry = &empty_qry;
762 :
763 636584 : if (gimple *def_stmt = SSA_NAME_DEF_STMT (ref))
764 : {
765 636584 : if (is_gimple_assign (def_stmt))
766 : {
767 0 : tree_code code = gimple_assign_rhs_code (def_stmt);
768 0 : if (code != MIN_EXPR && code != MAX_EXPR)
769 : return NULL_TREE;
770 :
771 0 : access_ref aref;
772 0 : tree arg1 = gimple_assign_rhs1 (def_stmt);
773 0 : aref.merge_ref (all_refs, arg1, def_stmt, ostype, false,
774 : *psnlim, *qry);
775 :
776 0 : tree arg2 = gimple_assign_rhs2 (def_stmt);
777 0 : aref.merge_ref (all_refs, arg2, def_stmt, ostype, false,
778 : *psnlim, *qry);
779 :
780 0 : if (pref && pref != this)
781 : {
782 0 : tree ref = pref->ref;
783 0 : *pref = aref;
784 0 : pref->ref = ref;
785 : }
786 :
787 0 : return aref.ref;
788 : }
789 : }
790 : else
791 : return NULL_TREE;
792 :
793 636584 : gphi *phi_stmt = this->phi ();
794 636584 : if (!phi_stmt)
795 23 : return ref;
796 :
797 636561 : if (!psnlim->visit_phi (ref))
798 : return NULL_TREE;
799 :
800 : /* The conservative result of the PHI reflecting the offset and size
801 : of the largest PHI argument, regardless of whether or not they all
802 : refer to the same object. */
803 570387 : access_ref phi_ref;
804 570387 : if (pref)
805 : {
806 : /* The identity of the object has not been determined yet but
807 : PREF->REF is set by the caller to the PHI for convenience.
808 : The size is negative/invalid and the offset is zero (it's
809 : updated only after the identity of the object has been
810 : established). */
811 570387 : gcc_assert (pref->sizrng[0] < 0);
812 570387 : gcc_assert (pref->offrng[0] == 0 && pref->offrng[1] == 0);
813 :
814 570387 : phi_ref = *pref;
815 : }
816 :
817 570387 : const offset_int maxobjsize = wi::to_offset (max_object_size ());
818 570387 : const unsigned nargs = gimple_phi_num_args (phi_stmt);
819 756274 : for (unsigned i = 0; i < nargs; ++i)
820 : {
821 709727 : access_ref phi_arg_ref;
822 709727 : bool skip_null = i || i + 1 < nargs;
823 709727 : tree arg = gimple_phi_arg_def (phi_stmt, i);
824 709727 : phi_ref.merge_ref (all_refs, arg, phi_stmt, ostype, skip_null,
825 : *psnlim, *qry);
826 :
827 709727 : if (!phi_ref.base0
828 1233567 : && phi_ref.sizrng[0] == 0
829 1233567 : && phi_ref.sizrng[1] >= maxobjsize)
830 : /* When an argument results in the most permissive result,
831 : the remaining arguments cannot constrain it. Short-circuit
832 : the evaluation. */
833 : break;
834 : }
835 :
836 570387 : if (phi_ref.sizrng[0] < 0)
837 : {
838 : /* Fail if none of the PHI's arguments resulted in updating PHI_REF
839 : (perhaps because they have all been already visited by prior
840 : recursive calls). */
841 3 : psnlim->leave_phi (ref);
842 3 : return NULL_TREE;
843 : }
844 :
845 : /* Avoid changing *THIS. */
846 570384 : if (pref && pref != this)
847 : {
848 : /* Keep the SSA_NAME of the PHI unchanged so that all PHI arguments
849 : can be referred to later if necessary. This is useful even if
850 : they all refer to the same object. */
851 570384 : tree ref = pref->ref;
852 570384 : *pref = phi_ref;
853 570384 : pref->ref = ref;
854 : }
855 :
856 570384 : psnlim->leave_phi (ref);
857 :
858 570384 : return phi_ref.ref;
859 636584 : }
860 :
861 : /* Return the maximum amount of space remaining and if non-null, set
862 : argument to the minimum. */
863 :
864 : offset_int
865 15392990 : access_ref::size_remaining (offset_int *pmin /* = NULL */) const
866 : {
867 15392990 : offset_int minbuf;
868 15392990 : if (!pmin)
869 11337842 : pmin = &minbuf;
870 :
871 15392990 : if (sizrng[0] < 0)
872 : {
873 : /* If the identity of the object hasn't been determined return
874 : the maximum size range. */
875 0 : *pmin = 0;
876 0 : return wi::to_offset (max_object_size ());
877 : }
878 :
879 : /* add_offset() ensures the offset range isn't inverted. */
880 15392990 : gcc_checking_assert (offrng[0] <= offrng[1]);
881 :
882 15392990 : if (base0)
883 : {
884 : /* The offset into referenced object is zero-based (i.e., it's
885 : not referenced by a pointer into middle of some unknown object). */
886 9931540 : if (offrng[0] < 0 && offrng[1] < 0)
887 : {
888 : /* If the offset is negative the remaining size is zero. */
889 2513 : *pmin = 0;
890 2513 : return 0;
891 : }
892 :
893 9929027 : if (sizrng[1] <= offrng[0])
894 : {
895 : /* If the starting offset is greater than or equal to the upper
896 : bound on the size of the object, the space remaining is zero.
897 : As a special case, if it's equal, set *PMIN to -1 to let
898 : the caller know the offset is valid and just past the end. */
899 75741 : *pmin = sizrng[1] == offrng[0] ? -1 : 0;
900 71198 : return 0;
901 : }
902 :
903 : /* Otherwise return the size minus the lower bound of the offset. */
904 9857829 : offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
905 :
906 9857829 : *pmin = sizrng[0] - or0;
907 9857829 : return sizrng[1] - or0;
908 : }
909 :
910 : /* The offset to the referenced object isn't zero-based (i.e., it may
911 : refer to a byte other than the first. The size of such an object
912 : is constrained only by the size of the address space (the result
913 : of max_object_size()). */
914 5461450 : if (sizrng[1] <= offrng[0])
915 : {
916 5 : *pmin = 0;
917 5 : return 0;
918 : }
919 :
920 5461445 : offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
921 :
922 5461445 : *pmin = sizrng[0] - or0;
923 5461445 : return sizrng[1] - or0;
924 : }
925 :
926 : /* Return true if the offset and object size are in range for SIZE. */
927 :
928 : bool
929 607765 : access_ref::offset_in_range (const offset_int &size) const
930 : {
931 607765 : if (size_remaining () < size)
932 : return false;
933 :
934 596871 : if (base0)
935 60956 : return offmax[0] >= 0 && offmax[1] <= sizrng[1];
936 :
937 535933 : offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
938 535933 : return offmax[0] > -maxoff && offmax[1] < maxoff;
939 : }
940 :
941 : /* Add the range [MIN, MAX] to the offset range. For known objects (with
942 : zero-based offsets) at least one of whose offset's bounds is in range,
943 : constrain the other (or both) to the bounds of the object (i.e., zero
944 : and the upper bound of its size). This improves the quality of
945 : diagnostics. */
946 :
947 7944140 : void access_ref::add_offset (const offset_int &min, const offset_int &max)
948 : {
949 7944140 : if (min <= max)
950 : {
951 : /* To add an ordinary range just add it to the bounds. */
952 7723505 : offrng[0] += min;
953 7723505 : offrng[1] += max;
954 : }
955 220635 : else if (!base0)
956 : {
957 : /* To add an inverted range to an offset to an unknown object
958 : expand it to the maximum. */
959 191461 : add_max_offset ();
960 4480278 : return;
961 : }
962 : else
963 : {
964 : /* To add an inverted range to an offset to an known object set
965 : the upper bound to the maximum representable offset value
966 : (which may be greater than MAX_OBJECT_SIZE).
967 : The lower bound is either the sum of the current offset and
968 : MIN when abs(MAX) is greater than the former, or zero otherwise.
969 : Zero because then the inverted range includes the negative of
970 : the lower bound. */
971 29174 : offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
972 29174 : offrng[1] = maxoff;
973 :
974 29174 : if (max >= 0)
975 : {
976 0 : offrng[0] = 0;
977 0 : if (offmax[0] > 0)
978 0 : offmax[0] = 0;
979 0 : return;
980 : }
981 :
982 29174 : offset_int absmax = wi::abs (max);
983 29174 : if (offrng[0] < absmax)
984 : {
985 26714 : offrng[0] += min;
986 : /* Cap the lower bound at the upper (set to MAXOFF above)
987 : to avoid inadvertently recreating an inverted range. */
988 26714 : if (offrng[1] < offrng[0])
989 2 : offrng[0] = offrng[1];
990 : }
991 : else
992 2460 : offrng[0] = 0;
993 : }
994 :
995 : /* Set the minimum and maximmum computed so far. */
996 7752679 : if (offrng[1] < 0 && offrng[1] < offmax[0])
997 29930 : offmax[0] = offrng[1];
998 7752679 : if (offrng[0] > 0 && offrng[0] > offmax[1])
999 2746533 : offmax[1] = offrng[0];
1000 :
1001 7752679 : if (!base0)
1002 : return;
1003 :
1004 : /* When referencing a known object check to see if the offset computed
1005 : so far is in bounds... */
1006 3655323 : offset_int remrng[2];
1007 3655323 : remrng[1] = size_remaining (remrng);
1008 3655323 : if (remrng[1] > 0 || remrng[0] < 0)
1009 : {
1010 : /* ...if so, constrain it so that neither bound exceeds the size of
1011 : the object. Out of bounds offsets are left unchanged, and, for
1012 : better or worse, become in bounds later. They should be detected
1013 : and diagnosed at the point they first become invalid by
1014 : -Warray-bounds. */
1015 3651946 : if (offrng[0] < 0)
1016 112288 : offrng[0] = 0;
1017 3651946 : if (offrng[1] > sizrng[1])
1018 142720 : offrng[1] = sizrng[1];
1019 : }
1020 : }
1021 :
1022 : /* Issue one inform message describing each target of an access REF.
1023 : WRITE is set for a write access and clear for a read access. */
1024 :
1025 : void
1026 3577 : access_ref::inform_access (access_mode mode, int ostype /* = 1 */) const
1027 : {
1028 3577 : const access_ref &aref = *this;
1029 3577 : if (!aref.ref)
1030 3078 : return;
1031 :
1032 3436 : if (phi ())
1033 : {
1034 : /* Set MAXREF to refer to the largest object and fill ALL_REFS
1035 : with data for all objects referenced by the PHI arguments. */
1036 82 : access_ref maxref;
1037 82 : auto_vec<access_ref> all_refs;
1038 82 : if (!get_ref (&all_refs, &maxref, ostype))
1039 : return;
1040 :
1041 82 : if (all_refs.length ())
1042 : {
1043 : /* Except for MAXREF, the rest of the arguments' offsets need not
1044 : reflect one added to the PHI itself. Determine the latter from
1045 : MAXREF on which the result is based. */
1046 82 : const offset_int orng[] =
1047 : {
1048 82 : offrng[0] - maxref.offrng[0],
1049 82 : wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
1050 : };
1051 :
1052 : /* Add the final PHI's offset to that of each of the arguments
1053 : and recurse to issue an inform message for it. */
1054 692 : for (unsigned i = 0; i != all_refs.length (); ++i)
1055 : {
1056 : /* Skip any PHIs; those could lead to infinite recursion. */
1057 264 : if (all_refs[i].phi ())
1058 35 : continue;
1059 :
1060 229 : all_refs[i].add_offset (orng[0], orng[1]);
1061 229 : all_refs[i].inform_access (mode, ostype);
1062 : }
1063 82 : return;
1064 : }
1065 82 : }
1066 :
1067 : /* Convert offset range and avoid including a zero range since it
1068 : isn't necessarily meaningful. */
1069 3354 : HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
1070 3354 : HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1071 3354 : HOST_WIDE_INT minoff;
1072 3354 : HOST_WIDE_INT maxoff = diff_max;
1073 3354 : if (wi::fits_shwi_p (aref.offrng[0]))
1074 3354 : minoff = aref.offrng[0].to_shwi ();
1075 : else
1076 0 : minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
1077 :
1078 3354 : if (wi::fits_shwi_p (aref.offrng[1]))
1079 3351 : maxoff = aref.offrng[1].to_shwi ();
1080 :
1081 3354 : if (maxoff <= diff_min || maxoff >= diff_max)
1082 : /* Avoid mentioning an upper bound that's equal to or in excess
1083 : of the maximum of ptrdiff_t. */
1084 124 : maxoff = minoff;
1085 :
1086 : /* Convert size range and always include it since all sizes are
1087 : meaningful. */
1088 3354 : unsigned long long minsize = 0, maxsize = 0;
1089 3354 : if (wi::fits_shwi_p (aref.sizrng[0])
1090 3354 : && wi::fits_shwi_p (aref.sizrng[1]))
1091 : {
1092 3354 : minsize = aref.sizrng[0].to_shwi ();
1093 3354 : maxsize = aref.sizrng[1].to_shwi ();
1094 : }
1095 :
1096 : /* SIZRNG doesn't necessarily have the same range as the allocation
1097 : size determined by gimple_call_alloc_size (). */
1098 3354 : char sizestr[80];
1099 3354 : if (minsize == maxsize)
1100 3099 : sprintf (sizestr, "%llu", minsize);
1101 : else
1102 255 : sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
1103 :
1104 3354 : char offstr[80];
1105 3354 : if (minoff == 0
1106 3354 : && (maxoff == 0 || aref.sizrng[1] <= maxoff))
1107 977 : offstr[0] = '\0';
1108 2377 : else if (minoff == maxoff)
1109 2112 : sprintf (offstr, "%lli", (long long) minoff);
1110 : else
1111 265 : sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
1112 :
1113 3354 : location_t loc = UNKNOWN_LOCATION;
1114 :
1115 3354 : tree ref = this->ref;
1116 3354 : tree allocfn = NULL_TREE;
1117 3354 : if (TREE_CODE (ref) == SSA_NAME)
1118 : {
1119 772 : gimple *stmt = SSA_NAME_DEF_STMT (ref);
1120 772 : if (!stmt)
1121 : return;
1122 :
1123 772 : if (is_gimple_call (stmt))
1124 : {
1125 732 : loc = gimple_location (stmt);
1126 732 : if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1127 : {
1128 : /* Strip the SSA_NAME suffix from the variable name and
1129 : recreate an identifier with the VLA's original name. */
1130 23 : ref = gimple_call_lhs (stmt);
1131 23 : if (SSA_NAME_IDENTIFIER (ref))
1132 : {
1133 22 : ref = SSA_NAME_IDENTIFIER (ref);
1134 22 : const char *id = IDENTIFIER_POINTER (ref);
1135 22 : size_t len = strcspn (id, ".$");
1136 22 : if (!len)
1137 0 : len = strlen (id);
1138 22 : ref = get_identifier_with_length (id, len);
1139 : }
1140 : }
1141 : else
1142 : {
1143 : /* Except for VLAs, retrieve the allocation function. */
1144 709 : allocfn = gimple_call_fndecl (stmt);
1145 709 : if (!allocfn)
1146 7 : allocfn = gimple_call_fn (stmt);
1147 709 : if (TREE_CODE (allocfn) == SSA_NAME)
1148 : {
1149 : /* For an ALLOC_CALL via a function pointer make a small
1150 : effort to determine the destination of the pointer. */
1151 4 : gimple *def = SSA_NAME_DEF_STMT (allocfn);
1152 4 : if (gimple_assign_single_p (def))
1153 : {
1154 3 : tree rhs = gimple_assign_rhs1 (def);
1155 3 : if (DECL_P (rhs))
1156 : allocfn = rhs;
1157 2 : else if (TREE_CODE (rhs) == COMPONENT_REF)
1158 1 : allocfn = TREE_OPERAND (rhs, 1);
1159 : }
1160 : }
1161 : }
1162 : }
1163 40 : else if (gimple_nop_p (stmt))
1164 : /* Handle DECL_PARM below. */
1165 5 : ref = SSA_NAME_VAR (ref);
1166 35 : else if (is_gimple_assign (stmt)
1167 35 : && (gimple_assign_rhs_code (stmt) == MIN_EXPR
1168 18 : || gimple_assign_rhs_code (stmt) == MAX_EXPR))
1169 : {
1170 : /* MIN or MAX_EXPR here implies a reference to a known object
1171 : and either an unknown or distinct one (the latter being
1172 : the result of an invalid relational expression). Determine
1173 : the identity of the former and point to it in the note.
1174 : TODO: Consider merging with PHI handling. */
1175 105 : access_ref arg_ref[2];
1176 35 : tree arg = gimple_assign_rhs1 (stmt);
1177 35 : compute_objsize (arg, /* ostype = */ 1 , &arg_ref[0]);
1178 35 : arg = gimple_assign_rhs2 (stmt);
1179 35 : compute_objsize (arg, /* ostype = */ 1 , &arg_ref[1]);
1180 :
1181 : /* Use the argument that references a known object with more
1182 : space remaining. */
1183 35 : const bool idx
1184 35 : = (!arg_ref[0].ref || !arg_ref[0].base0
1185 52 : || (arg_ref[0].base0 && arg_ref[1].base0
1186 11 : && (arg_ref[0].size_remaining ()
1187 22 : < arg_ref[1].size_remaining ())));
1188 :
1189 35 : arg_ref[idx].offrng[0] = offrng[0];
1190 35 : arg_ref[idx].offrng[1] = offrng[1];
1191 35 : arg_ref[idx].inform_access (mode);
1192 35 : return;
1193 : }
1194 : }
1195 :
1196 3319 : if (DECL_P (ref))
1197 2479 : loc = DECL_SOURCE_LOCATION (ref);
1198 840 : else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
1199 0 : loc = EXPR_LOCATION (ref);
1200 840 : else if (TREE_CODE (ref) != IDENTIFIER_NODE
1201 840 : && TREE_CODE (ref) != SSA_NAME)
1202 : {
1203 108 : if (TREE_CODE (ref) == INTEGER_CST && ref_nullptr_p)
1204 : {
1205 5 : if (mode == access_read_write || mode == access_write_only)
1206 1 : inform (loc, "destination object is likely at address zero");
1207 : else
1208 4 : inform (loc, "source object is likely at address zero");
1209 : }
1210 108 : return;
1211 : }
1212 :
1213 3211 : if (mode == access_read_write || mode == access_write_only)
1214 : {
1215 1622 : if (allocfn == NULL_TREE)
1216 : {
1217 1419 : if (*offstr)
1218 1118 : inform (loc, "at offset %s into destination object %qE of size %s",
1219 : offstr, ref, sizestr);
1220 : else
1221 301 : inform (loc, "destination object %qE of size %s", ref, sizestr);
1222 1419 : return;
1223 : }
1224 :
1225 203 : if (*offstr)
1226 55 : inform (loc,
1227 : "at offset %s into destination object of size %s "
1228 : "allocated by %qE", offstr, sizestr, allocfn);
1229 : else
1230 148 : inform (loc, "destination object of size %s allocated by %qE",
1231 : sizestr, allocfn);
1232 203 : return;
1233 : }
1234 :
1235 1589 : if (mode == access_read_only)
1236 : {
1237 379 : if (allocfn == NULL_TREE)
1238 : {
1239 372 : if (*offstr)
1240 249 : inform (loc, "at offset %s into source object %qE of size %s",
1241 : offstr, ref, sizestr);
1242 : else
1243 123 : inform (loc, "source object %qE of size %s", ref, sizestr);
1244 :
1245 372 : return;
1246 : }
1247 :
1248 7 : if (*offstr)
1249 0 : inform (loc,
1250 : "at offset %s into source object of size %s allocated by %qE",
1251 : offstr, sizestr, allocfn);
1252 : else
1253 7 : inform (loc, "source object of size %s allocated by %qE",
1254 : sizestr, allocfn);
1255 7 : return;
1256 : }
1257 :
1258 1210 : if (allocfn == NULL_TREE)
1259 : {
1260 711 : if (*offstr)
1261 588 : inform (loc, "at offset %s into object %qE of size %s",
1262 : offstr, ref, sizestr);
1263 : else
1264 123 : inform (loc, "object %qE of size %s", ref, sizestr);
1265 :
1266 711 : return;
1267 : }
1268 :
1269 499 : if (*offstr)
1270 252 : inform (loc,
1271 : "at offset %s into object of size %s allocated by %qE",
1272 : offstr, sizestr, allocfn);
1273 : else
1274 247 : inform (loc, "object of size %s allocated by %qE",
1275 : sizestr, allocfn);
1276 : }
1277 :
1278 : /* Dump *THIS to FILE. */
1279 :
1280 : void
1281 0 : access_ref::dump (FILE *file) const
1282 : {
1283 0 : for (int i = deref; i < 0; ++i)
1284 0 : fputc ('&', file);
1285 :
1286 0 : for (int i = 0; i < deref; ++i)
1287 0 : fputc ('*', file);
1288 :
1289 0 : if (gphi *phi_stmt = phi ())
1290 : {
1291 0 : fputs ("PHI <", file);
1292 0 : unsigned nargs = gimple_phi_num_args (phi_stmt);
1293 0 : for (unsigned i = 0; i != nargs; ++i)
1294 : {
1295 0 : tree arg = gimple_phi_arg_def (phi_stmt, i);
1296 0 : print_generic_expr (file, arg);
1297 0 : if (i + 1 < nargs)
1298 0 : fputs (", ", file);
1299 : }
1300 0 : fputc ('>', file);
1301 : }
1302 : else
1303 0 : print_generic_expr (file, ref);
1304 :
1305 0 : if (offrng[0] != offrng[1])
1306 0 : fprintf (file, " + [%lli, %lli]",
1307 0 : (long long) offrng[0].to_shwi (),
1308 0 : (long long) offrng[1].to_shwi ());
1309 0 : else if (offrng[0] != 0)
1310 0 : fprintf (file, " %c %lli",
1311 0 : offrng[0] < 0 ? '-' : '+',
1312 0 : (long long) offrng[0].to_shwi ());
1313 :
1314 0 : if (base0)
1315 0 : fputs (" (base0)", file);
1316 :
1317 0 : fputs ("; size: ", file);
1318 0 : if (sizrng[0] != sizrng[1])
1319 : {
1320 0 : offset_int maxsize = wi::to_offset (max_object_size ());
1321 0 : if (sizrng[0] == 0 && sizrng[1] >= maxsize)
1322 0 : fputs ("unknown", file);
1323 : else
1324 0 : fprintf (file, "[%llu, %llu]",
1325 0 : (unsigned long long) sizrng[0].to_uhwi (),
1326 0 : (unsigned long long) sizrng[1].to_uhwi ());
1327 : }
1328 0 : else if (sizrng[0] != 0)
1329 0 : fprintf (file, "%llu",
1330 0 : (unsigned long long) sizrng[0].to_uhwi ());
1331 :
1332 0 : fputc ('\n', file);
1333 0 : }
1334 :
1335 : /* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1336 : when MINWRITE or MINREAD, respectively, is set. */
1337 744275 : access_data::access_data (range_query *query, gimple *stmt, access_mode mode,
1338 : tree maxwrite /* = NULL_TREE */,
1339 : bool minwrite /* = false */,
1340 : tree maxread /* = NULL_TREE */,
1341 744275 : bool minread /* = false */)
1342 744275 : : stmt (stmt), call (), dst (), src (), mode (mode), ostype ()
1343 : {
1344 744275 : set_bound (dst_bndrng, maxwrite, minwrite, query, stmt);
1345 744275 : set_bound (src_bndrng, maxread, minread, query, stmt);
1346 744275 : }
1347 :
1348 : /* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1349 : when MINWRITE or MINREAD, respectively, is set. */
1350 109 : access_data::access_data (range_query *query, tree expr, access_mode mode,
1351 : tree maxwrite /* = NULL_TREE */,
1352 : bool minwrite /* = false */,
1353 : tree maxread /* = NULL_TREE */,
1354 109 : bool minread /* = false */)
1355 109 : : stmt (), call (expr), dst (), src (), mode (mode), ostype ()
1356 : {
1357 109 : set_bound (dst_bndrng, maxwrite, minwrite, query, stmt);
1358 109 : set_bound (src_bndrng, maxread, minread, query, stmt);
1359 109 : }
1360 :
1361 : /* Set BNDRNG to the range of BOUND for the statement STMT. */
1362 :
1363 : void
1364 1488768 : access_data::set_bound (offset_int bndrng[2], tree bound, bool minaccess,
1365 : range_query *query, gimple *stmt)
1366 : {
1367 : /* Set the default bounds of the access and adjust below. */
1368 2638288 : bndrng[0] = minaccess ? 1 : 0;
1369 1488768 : bndrng[1] = HOST_WIDE_INT_M1U;
1370 :
1371 : /* When BOUND is nonnull and a range can be extracted from it,
1372 : set the bounds of the access to reflect both it and MINACCESS.
1373 : BNDRNG[0] is the size of the minimum access. */
1374 1488768 : tree rng[2];
1375 1488768 : if (bound && get_size_range (query, bound, stmt, rng, SR_ALLOW_ZERO))
1376 : {
1377 48817 : bndrng[0] = wi::to_offset (rng[0]);
1378 48817 : bndrng[1] = wi::to_offset (rng[1]);
1379 55838 : bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
1380 : }
1381 1488768 : }
1382 :
1383 : /* Set a bit for the PHI in VISITED and return true if it wasn't
1384 : already set. */
1385 :
1386 : bool
1387 773542 : ssa_name_limit_t::visit_phi (tree ssa_name)
1388 : {
1389 773542 : if (!visited)
1390 512536 : visited = BITMAP_ALLOC (NULL);
1391 :
1392 : /* Return false if SSA_NAME has already been visited. */
1393 773542 : return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
1394 : }
1395 :
1396 : /* Clear a bit for the PHI in VISITED. */
1397 :
1398 : void
1399 570387 : ssa_name_limit_t::leave_phi (tree ssa_name)
1400 : {
1401 : /* Return false if SSA_NAME has already been visited. */
1402 570387 : bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
1403 570387 : }
1404 :
1405 : /* Return false if the SSA_NAME chain length counter has reached
1406 : the limit, otherwise increment the counter and return true. */
1407 :
1408 : bool
1409 6789015 : ssa_name_limit_t::next ()
1410 : {
1411 : /* Return a negative value to let caller avoid recursing beyond
1412 : the specified limit. */
1413 6789015 : if (ssa_def_max == 0)
1414 : return false;
1415 :
1416 6789006 : --ssa_def_max;
1417 6789006 : return true;
1418 : }
1419 :
1420 : /* If the SSA_NAME has already been "seen" return a positive value.
1421 : Otherwise add it to VISITED. If the SSA_NAME limit has been
1422 : reached, return a negative value. Otherwise return zero. */
1423 :
1424 : int
1425 136981 : ssa_name_limit_t::next_phi (tree ssa_name)
1426 : {
1427 136981 : {
1428 136981 : gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
1429 : /* Return a positive value if the PHI has already been visited. */
1430 136981 : if (gimple_code (def_stmt) == GIMPLE_PHI
1431 136981 : && !visit_phi (ssa_name))
1432 : return 1;
1433 : }
1434 :
1435 : /* Return a negative value to let caller avoid recursing beyond
1436 : the specified limit. */
1437 98366 : if (ssa_def_max == 0)
1438 : return -1;
1439 :
1440 98366 : --ssa_def_max;
1441 :
1442 98366 : return 0;
1443 : }
1444 :
1445 12209440 : ssa_name_limit_t::~ssa_name_limit_t ()
1446 : {
1447 12209440 : if (visited)
1448 512536 : BITMAP_FREE (visited);
1449 12209440 : }
1450 :
1451 : /* Default ctor. Initialize object with pointers to the range_query
1452 : instance to use or null. */
1453 :
1454 14237735 : pointer_query::pointer_query (range_query *qry /* = NULL */)
1455 14237735 : : rvals (qry), hits (), misses (), failures (), depth (), max_depth (),
1456 14237735 : var_cache ()
1457 : {
1458 : /* No op. */
1459 14237735 : }
1460 :
1461 : /* Return a pointer to the cached access_ref instance for the SSA_NAME
1462 : PTR if it's there or null otherwise. */
1463 :
1464 : const access_ref *
1465 6789006 : pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
1466 : {
1467 6789006 : unsigned version = SSA_NAME_VERSION (ptr);
1468 6789006 : unsigned idx = version << 1 | (ostype & 1);
1469 6789006 : if (var_cache.indices.length () <= idx)
1470 : {
1471 3004253 : ++misses;
1472 3004253 : return NULL;
1473 : }
1474 :
1475 3784753 : unsigned cache_idx = var_cache.indices[idx];
1476 3784753 : if (var_cache.access_refs.length () <= cache_idx)
1477 : {
1478 0 : ++misses;
1479 0 : return NULL;
1480 : }
1481 :
1482 3784753 : const access_ref &cache_ref = var_cache.access_refs[cache_idx];
1483 3784753 : if (cache_ref.ref)
1484 : {
1485 1790359 : ++hits;
1486 1790359 : return &cache_ref;
1487 : }
1488 :
1489 1994394 : ++misses;
1490 1994394 : return NULL;
1491 : }
1492 :
1493 : /* Retrieve the access_ref instance for a variable from the cache if it's
1494 : there or compute it and insert it into the cache if it's nonnonull. */
1495 :
1496 : bool
1497 8502098 : pointer_query::get_ref (tree ptr, gimple *stmt, access_ref *pref,
1498 : int ostype /* = 1 */)
1499 : {
1500 8502098 : const unsigned version
1501 8502098 : = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
1502 :
1503 2149264 : if (version)
1504 : {
1505 2149264 : unsigned idx = version << 1 | (ostype & 1);
1506 2149264 : if (idx < var_cache.indices.length ())
1507 : {
1508 1117893 : unsigned cache_idx = var_cache.indices[idx] - 1;
1509 1117893 : if (cache_idx < var_cache.access_refs.length ()
1510 1117893 : && var_cache.access_refs[cache_idx].ref)
1511 : {
1512 0 : ++hits;
1513 0 : *pref = var_cache.access_refs[cache_idx];
1514 0 : return true;
1515 : }
1516 : }
1517 :
1518 2149264 : ++misses;
1519 : }
1520 :
1521 8502098 : if (!compute_objsize (ptr, stmt, ostype, pref, this))
1522 : {
1523 9354 : ++failures;
1524 9354 : return false;
1525 : }
1526 :
1527 : return true;
1528 : }
1529 :
1530 : /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
1531 : nonnull. */
1532 :
1533 : void
1534 3652800 : pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
1535 : {
1536 : /* Only add populated/valid entries. */
1537 3652800 : if (!ref.ref || ref.sizrng[0] < 0)
1538 0 : return;
1539 :
1540 : /* Add REF to the two-level cache. */
1541 3652800 : unsigned version = SSA_NAME_VERSION (ptr);
1542 3652800 : unsigned idx = version << 1 | (ostype & 1);
1543 :
1544 : /* Grow INDICES if necessary. An index is valid if it's nonzero.
1545 : Its value minus one is the index into ACCESS_REFS. Not all
1546 : entries are valid. */
1547 3652800 : if (var_cache.indices.length () <= idx)
1548 1861495 : var_cache.indices.safe_grow_cleared (idx + 1);
1549 :
1550 3652800 : if (!var_cache.indices[idx])
1551 5603536 : var_cache.indices[idx] = var_cache.access_refs.length () + 1;
1552 :
1553 : /* Grow ACCESS_REF cache if necessary. An entry is valid if its
1554 : REF member is nonnull. All entries except for the last two
1555 : are valid. Once nonnull, the REF value must stay unchanged. */
1556 3652800 : unsigned cache_idx = var_cache.indices[idx];
1557 3652800 : if (var_cache.access_refs.length () <= cache_idx)
1558 3297931 : var_cache.access_refs.safe_grow_cleared (cache_idx + 1);
1559 :
1560 3652800 : access_ref &cache_ref = var_cache.access_refs[cache_idx];
1561 3652800 : if (cache_ref.ref)
1562 : {
1563 354869 : gcc_checking_assert (cache_ref.ref == ref.ref);
1564 : return;
1565 : }
1566 :
1567 3297931 : cache_ref = ref;
1568 : }
1569 :
1570 : /* Flush the cache if it's nonnull. */
1571 :
1572 : void
1573 8278329 : pointer_query::flush_cache ()
1574 : {
1575 8278329 : var_cache.indices.release ();
1576 8278329 : var_cache.access_refs.release ();
1577 8278329 : }
1578 :
1579 : /* Dump statistics and, optionally, cache contents to DUMP_FILE. */
1580 :
1581 : void
1582 167 : pointer_query::dump (FILE *dump_file, bool contents /* = false */)
1583 : {
1584 167 : unsigned nused = 0, nrefs = 0;
1585 167 : unsigned nidxs = var_cache.indices.length ();
1586 167 : for (unsigned i = 0; i != nidxs; ++i)
1587 : {
1588 0 : unsigned ari = var_cache.indices[i];
1589 0 : if (!ari)
1590 0 : continue;
1591 :
1592 0 : ++nused;
1593 :
1594 0 : const access_ref &aref = var_cache.access_refs[ari];
1595 0 : if (!aref.ref)
1596 0 : continue;
1597 :
1598 0 : ++nrefs;
1599 : }
1600 :
1601 167 : fprintf (dump_file, "pointer_query counters:\n"
1602 : " index cache size: %u\n"
1603 : " index entries: %u\n"
1604 : " access cache size: %u\n"
1605 : " access entries: %u\n"
1606 : " hits: %u\n"
1607 : " misses: %u\n"
1608 : " failures: %u\n"
1609 : " max_depth: %u\n",
1610 : nidxs, nused,
1611 : var_cache.access_refs.length (), nrefs,
1612 : hits, misses, failures, max_depth);
1613 :
1614 167 : if (!contents || !nidxs)
1615 : return;
1616 :
1617 0 : fputs ("\npointer_query cache contents:\n", dump_file);
1618 :
1619 0 : for (unsigned i = 0; i != nidxs; ++i)
1620 : {
1621 0 : unsigned ari = var_cache.indices[i];
1622 0 : if (!ari)
1623 0 : continue;
1624 :
1625 0 : const access_ref &aref = var_cache.access_refs[ari];
1626 0 : if (!aref.ref)
1627 0 : continue;
1628 :
1629 : /* The level-1 cache index corresponds to the SSA_NAME_VERSION
1630 : shifted left by one and ORed with the Object Size Type in
1631 : the lowest bit. Print the two separately. */
1632 0 : unsigned ver = i >> 1;
1633 0 : unsigned ost = i & 1;
1634 :
1635 0 : fprintf (dump_file, " %u.%u[%u]: ", ver, ost, ari);
1636 0 : if (tree name = ssa_name (ver))
1637 : {
1638 0 : print_generic_expr (dump_file, name);
1639 0 : fputs (" = ", dump_file);
1640 : }
1641 : else
1642 0 : fprintf (dump_file, " _%u = ", ver);
1643 :
1644 0 : aref.dump (dump_file);
1645 : }
1646 :
1647 0 : fputc ('\n', dump_file);
1648 : }
1649 :
1650 : /* A helper of compute_objsize_r() to determine the size from an assignment
1651 : statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. On success
1652 : set PREF->REF to the operand with more or less space remaining,
1653 : respectively, if both refer to the same (sub)object, or to PTR if they
1654 : might not, and return true. Otherwise, if the identity of neither
1655 : operand can be determined, return false. */
1656 :
1657 : static bool
1658 1047 : handle_min_max_size (tree ptr, int ostype, access_ref *pref,
1659 : ssa_name_limit_t &snlim, pointer_query *qry)
1660 : {
1661 1047 : gimple *stmt = SSA_NAME_DEF_STMT (ptr);
1662 1047 : const tree_code code = gimple_assign_rhs_code (stmt);
1663 :
1664 : /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
1665 : Determine the size/offset of each and use the one with more or less
1666 : space remaining, respectively. If either fails, use the information
1667 : determined from the other instead, adjusted up or down as appropriate
1668 : for the expression. */
1669 1047 : access_ref aref[2] = { *pref, *pref };
1670 1047 : tree arg1 = gimple_assign_rhs1 (stmt);
1671 1047 : if (!compute_objsize_r (arg1, stmt, false, ostype, &aref[0], snlim, qry))
1672 : {
1673 7 : aref[0].base0 = false;
1674 7 : aref[0].offrng[0] = aref[0].offrng[1] = 0;
1675 7 : aref[0].add_max_offset ();
1676 7 : aref[0].set_max_size_range ();
1677 : }
1678 :
1679 1047 : tree arg2 = gimple_assign_rhs2 (stmt);
1680 1047 : if (!compute_objsize_r (arg2, stmt, false, ostype, &aref[1], snlim, qry))
1681 : {
1682 2 : aref[1].base0 = false;
1683 2 : aref[1].offrng[0] = aref[1].offrng[1] = 0;
1684 2 : aref[1].add_max_offset ();
1685 2 : aref[1].set_max_size_range ();
1686 : }
1687 :
1688 1047 : if (!aref[0].ref && !aref[1].ref)
1689 : /* Fail if the identity of neither argument could be determined. */
1690 : return false;
1691 :
1692 1047 : bool i0 = false;
1693 1047 : if (aref[0].ref && aref[0].base0)
1694 : {
1695 120 : if (aref[1].ref && aref[1].base0)
1696 : {
1697 : /* If the object referenced by both arguments has been determined
1698 : set *PREF to the one with more or less space remainng, whichever
1699 : is appopriate for CODE.
1700 : TODO: Indicate when the objects are distinct so it can be
1701 : diagnosed. */
1702 81 : i0 = code == MAX_EXPR;
1703 81 : const bool i1 = !i0;
1704 :
1705 81 : if (aref[i0].size_remaining () < aref[i1].size_remaining ())
1706 21 : *pref = aref[i1];
1707 : else
1708 60 : *pref = aref[i0];
1709 :
1710 81 : if (aref[i0].ref != aref[i1].ref)
1711 : /* If the operands don't refer to the same (sub)object set
1712 : PREF->REF to the SSA_NAME from which STMT was obtained
1713 : so that both can be identified in a diagnostic. */
1714 63 : pref->ref = ptr;
1715 :
1716 81 : return true;
1717 : }
1718 :
1719 : /* If only the object referenced by one of the arguments could be
1720 : determined, use it and... */
1721 39 : *pref = aref[0];
1722 39 : i0 = true;
1723 39 : }
1724 : else
1725 927 : *pref = aref[1];
1726 :
1727 966 : const bool i1 = !i0;
1728 : /* ...see if the offset obtained from the other pointer can be used
1729 : to tighten up the bound on the offset obtained from the first. */
1730 451 : if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
1731 1405 : || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
1732 : {
1733 95 : pref->offrng[0] = aref[i0].offrng[0];
1734 95 : pref->offrng[1] = aref[i0].offrng[1];
1735 : }
1736 :
1737 : /* Replace PTR->REF with the SSA_NAME to indicate the expression
1738 : might not refer to the same (sub)object. */
1739 966 : pref->ref = ptr;
1740 966 : return true;
1741 : }
1742 :
1743 : /* A helper of compute_objsize_r() to determine the size of a DECL.
1744 : Return true on success and (possibly in the future) false on failure. */
1745 :
1746 : static bool
1747 4868003 : handle_decl (tree decl, bool addr, access_ref *pref)
1748 : {
1749 4868003 : tree decl_type = TREE_TYPE (decl);
1750 :
1751 4868003 : pref->ref = decl;
1752 :
1753 : /* Reset the offset in case it was set by a prior call and not
1754 : cleared by the caller. The offset is only adjusted after
1755 : the identity of the object has been determined. */
1756 4868003 : pref->offrng[0] = pref->offrng[1] = 0;
1757 :
1758 4868003 : if (!addr && POINTER_TYPE_P (decl_type))
1759 : {
1760 : /* Set the maximum size if the reference is to the pointer
1761 : itself (as opposed to what it points to), and clear
1762 : BASE0 since the offset isn't necessarily zero-based. */
1763 48441 : pref->set_max_size_range ();
1764 48441 : pref->base0 = false;
1765 48441 : return true;
1766 : }
1767 :
1768 : /* Valid offsets into the object are nonnegative. */
1769 4819562 : pref->base0 = true;
1770 :
1771 4819562 : if (tree size = decl_init_size (decl, false))
1772 4794745 : if (TREE_CODE (size) == INTEGER_CST)
1773 : {
1774 4794617 : pref->sizrng[0] = wi::to_offset (size);
1775 4794617 : pref->sizrng[1] = pref->sizrng[0];
1776 4794617 : return true;
1777 : }
1778 :
1779 24945 : pref->set_max_size_range ();
1780 24945 : return true;
1781 : }
1782 :
1783 : /* A helper of compute_objsize_r() to determine the size from ARRAY_REF
1784 : AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
1785 : on success and false on failure. */
1786 :
1787 : static bool
1788 873960 : handle_array_ref (tree aref, gimple *stmt, bool addr, int ostype,
1789 : access_ref *pref, ssa_name_limit_t &snlim,
1790 : pointer_query *qry)
1791 : {
1792 873960 : gcc_assert (TREE_CODE (aref) == ARRAY_REF);
1793 :
1794 873960 : tree arefop = TREE_OPERAND (aref, 0);
1795 873960 : tree reftype = TREE_TYPE (arefop);
1796 873960 : if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
1797 : /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
1798 : of known bound. */
1799 : return false;
1800 :
1801 864600 : if (!compute_objsize_r (arefop, stmt, addr, ostype, pref, snlim, qry))
1802 : return false;
1803 :
1804 864600 : offset_int orng[2];
1805 864600 : tree off = pref->eval (TREE_OPERAND (aref, 1));
1806 864600 : range_query *const rvals = qry ? qry->rvals : NULL;
1807 864600 : if (!get_offset_range (off, stmt, orng, rvals))
1808 : {
1809 : /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1810 45158 : orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1811 45158 : orng[0] = -orng[1] - 1;
1812 : }
1813 :
1814 : /* Convert the array index range determined above to a byte offset. */
1815 864600 : tree lowbnd = array_ref_low_bound (aref);
1816 864600 : if (TREE_CODE (lowbnd) == INTEGER_CST && !integer_zerop (lowbnd))
1817 : {
1818 : /* Adjust the index by the low bound of the array domain (0 in C/C++,
1819 : 1 in Fortran and anything in Ada) by applying the same processing
1820 : as in get_offset_range. */
1821 17004 : const wide_int wlb = wi::to_wide (lowbnd);
1822 17004 : signop sgn = SIGNED;
1823 17004 : if (TYPE_UNSIGNED (TREE_TYPE (lowbnd))
1824 17004 : && wlb.get_precision () < TYPE_PRECISION (sizetype))
1825 : sgn = UNSIGNED;
1826 17004 : const offset_int lb = offset_int::from (wlb, sgn);
1827 17004 : orng[0] -= lb;
1828 17004 : orng[1] -= lb;
1829 17004 : }
1830 :
1831 864600 : tree eltype = TREE_TYPE (aref);
1832 864600 : tree tpsize = TYPE_SIZE_UNIT (eltype);
1833 864600 : if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
1834 : {
1835 499 : pref->add_max_offset ();
1836 499 : return true;
1837 : }
1838 :
1839 864101 : offset_int sz = wi::to_offset (tpsize);
1840 864101 : orng[0] *= sz;
1841 864101 : orng[1] *= sz;
1842 :
1843 864101 : if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
1844 : {
1845 : /* Except for the permissive raw memory functions which use
1846 : the size of the whole object determined above, use the size
1847 : of the referenced array. Because the overall offset is from
1848 : the beginning of the complete array object add this overall
1849 : offset to the size of array. */
1850 7144 : offset_int sizrng[2] =
1851 : {
1852 7144 : pref->offrng[0] + orng[0] + sz,
1853 7144 : pref->offrng[1] + orng[1] + sz
1854 : };
1855 7144 : if (sizrng[1] < sizrng[0])
1856 2 : std::swap (sizrng[0], sizrng[1]);
1857 7144 : if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
1858 6610 : pref->sizrng[0] = sizrng[0];
1859 7144 : if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
1860 6749 : pref->sizrng[1] = sizrng[1];
1861 : }
1862 :
1863 864101 : pref->add_offset (orng[0], orng[1]);
1864 864101 : return true;
1865 : }
1866 :
1867 : /* Given a COMPONENT_REF CREF, set *PREF size to the size of the referenced
1868 : member. */
1869 :
1870 : static void
1871 54081 : set_component_ref_size (tree cref, access_ref *pref)
1872 : {
1873 54081 : const tree base = TREE_OPERAND (cref, 0);
1874 54081 : const tree base_type = TREE_TYPE (base);
1875 :
1876 : /* SAM is set for array members that might need special treatment. */
1877 54081 : special_array_member sam;
1878 54081 : tree size = component_ref_size (cref, &sam);
1879 54081 : if (sam == special_array_member::int_0)
1880 191 : pref->sizrng[0] = pref->sizrng[1] = 0;
1881 53890 : else if (!pref->trail1special && sam == special_array_member::trail_1)
1882 27 : pref->sizrng[0] = pref->sizrng[1] = 1;
1883 53863 : else if (size && TREE_CODE (size) == INTEGER_CST)
1884 51617 : pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1885 : else
1886 : {
1887 : /* When the size of the member is unknown it's either a flexible
1888 : array member or a trailing special array member (either zero
1889 : length or one-element). Set the size to the maximum minus
1890 : the constant size of the base object's type. */
1891 2246 : pref->sizrng[0] = 0;
1892 2246 : pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1893 2246 : if (tree base_size = TYPE_SIZE_UNIT (base_type))
1894 2246 : if (TREE_CODE (base_size) == INTEGER_CST)
1895 2205 : pref->sizrng[1] -= wi::to_offset (base_size);
1896 : }
1897 54081 : }
1898 :
1899 : /* A helper of compute_objsize_r() to determine the size from COMPONENT_REF
1900 : CREF. Return true on success and false on failure. */
1901 :
1902 : static bool
1903 3110715 : handle_component_ref (tree cref, gimple *stmt, bool addr, int ostype,
1904 : access_ref *pref, ssa_name_limit_t &snlim,
1905 : pointer_query *qry)
1906 : {
1907 3110715 : gcc_assert (TREE_CODE (cref) == COMPONENT_REF);
1908 :
1909 3110715 : const tree base = TREE_OPERAND (cref, 0);
1910 3110715 : const tree field = TREE_OPERAND (cref, 1);
1911 3110715 : access_ref base_ref = *pref;
1912 :
1913 : /* Unconditionally determine the size of the base object (it could
1914 : be smaller than the referenced member when the object is stored
1915 : in a buffer with an insufficient size). */
1916 3110715 : if (!compute_objsize_r (base, stmt, addr, 0, &base_ref, snlim, qry))
1917 : return false;
1918 :
1919 : /* Add the offset of the member to the offset into the object computed
1920 : so far. */
1921 3110715 : tree offset = byte_position (field);
1922 3110715 : if (TREE_CODE (offset) == INTEGER_CST)
1923 3110688 : base_ref.add_offset (wi::to_offset (offset));
1924 : else
1925 27 : base_ref.add_max_offset ();
1926 :
1927 3110715 : if (!base_ref.ref)
1928 : /* PREF->REF may have been already set to an SSA_NAME earlier
1929 : to provide better context for diagnostics. In that case,
1930 : leave it unchanged. */
1931 0 : base_ref.ref = base;
1932 :
1933 3110715 : const tree base_type = TREE_TYPE (base);
1934 3110715 : if (TREE_CODE (base_type) == UNION_TYPE)
1935 : /* In accesses through union types consider the entire unions
1936 : rather than just their members. */
1937 : ostype = 0;
1938 :
1939 2711406 : if (ostype == 0)
1940 : {
1941 : /* In OSTYPE zero (for raw memory functions like memcpy), use
1942 : the maximum size instead if the identity of the enclosing
1943 : object cannot be determined. */
1944 3056623 : *pref = base_ref;
1945 3056623 : return true;
1946 : }
1947 :
1948 54092 : pref->ref = field;
1949 :
1950 54092 : if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
1951 : {
1952 : /* Set maximum size if the reference is to the pointer member
1953 : itself (as opposed to what it points to). */
1954 11 : pref->set_max_size_range ();
1955 11 : return true;
1956 : }
1957 :
1958 54081 : set_component_ref_size (cref, pref);
1959 :
1960 54081 : if (base_ref.size_remaining () < pref->size_remaining ())
1961 : /* Use the base object if it's smaller than the member. */
1962 441 : *pref = base_ref;
1963 :
1964 : return true;
1965 : }
1966 :
1967 : /* A helper of compute_objsize_r() to determine the size from MEM_REF
1968 : MREF. Return true on success and false on failure. */
1969 :
1970 : static bool
1971 2923109 : handle_mem_ref (tree mref, gimple *stmt, int ostype, access_ref *pref,
1972 : ssa_name_limit_t &snlim, pointer_query *qry)
1973 : {
1974 2923109 : gcc_assert (TREE_CODE (mref) == MEM_REF);
1975 :
1976 2923109 : tree mreftype = TYPE_MAIN_VARIANT (TREE_TYPE (mref));
1977 2923109 : if (VECTOR_TYPE_P (mreftype))
1978 : {
1979 : /* Hack: Handle MEM_REFs of vector types as those to complete
1980 : objects; those may be synthesized from multiple assignments
1981 : to consecutive data members (see PR 93200 and 96963).
1982 : FIXME: Vectorized assignments should only be present after
1983 : vectorization so this hack is only necessary after it has
1984 : run and could be avoided in calls from prior passes (e.g.,
1985 : tree-ssa-strlen.cc).
1986 : FIXME: Deal with this more generally, e.g., by marking up
1987 : such MEM_REFs at the time they're created. */
1988 64623 : ostype = 0;
1989 : }
1990 :
1991 2923109 : tree mrefop = TREE_OPERAND (mref, 0);
1992 2923109 : if (!compute_objsize_r (mrefop, stmt, false, ostype, pref, snlim, qry))
1993 : return false;
1994 :
1995 2923100 : ++pref->deref;
1996 :
1997 2923100 : offset_int orng[2];
1998 2923100 : tree off = pref->eval (TREE_OPERAND (mref, 1));
1999 2923100 : range_query *const rvals = qry ? qry->rvals : NULL;
2000 2923100 : if (!get_offset_range (off, stmt, orng, rvals))
2001 : {
2002 : /* Set ORNG to the maximum offset representable in ptrdiff_t. */
2003 0 : orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
2004 0 : orng[0] = -orng[1] - 1;
2005 : }
2006 :
2007 2923100 : pref->add_offset (orng[0], orng[1]);
2008 2923100 : return true;
2009 : }
2010 :
2011 : /* A helper of compute_objsize_r() to determine the size from SSA_NAME
2012 : PTR. Return true on success and false on failure. */
2013 :
2014 : static bool
2015 6789015 : handle_ssa_name (tree ptr, bool addr, int ostype,
2016 : access_ref *pref, ssa_name_limit_t &snlim,
2017 : pointer_query *qry)
2018 : {
2019 6789015 : if (!snlim.next ())
2020 : return false;
2021 :
2022 : /* Only process an SSA_NAME if the recursion limit has not yet
2023 : been reached. */
2024 6789006 : if (qry)
2025 : {
2026 6789006 : if (++qry->depth > qry->max_depth)
2027 701402 : qry->max_depth = qry->depth;
2028 6789006 : if (const access_ref *cache_ref = qry->get_ref (ptr, ostype))
2029 : {
2030 : /* Add the number of DEREFerences accummulated so far. */
2031 1790359 : const int deref = pref->deref;
2032 1790359 : *pref = *cache_ref;
2033 1790359 : pref->deref += deref;
2034 1790359 : return true;
2035 : }
2036 : }
2037 :
2038 4998647 : gimple *stmt = SSA_NAME_DEF_STMT (ptr);
2039 4998647 : if (is_gimple_call (stmt))
2040 : {
2041 : /* If STMT is a call to an allocation function get the size
2042 : from its argument(s). If successful, also set *PREF->REF
2043 : to PTR for the caller to include in diagnostics. */
2044 2390490 : wide_int wr[2];
2045 478098 : range_query *const rvals = qry ? qry->rvals : NULL;
2046 478098 : if (gimple_call_alloc_size (stmt, wr, rvals))
2047 : {
2048 143167 : pref->ref = ptr;
2049 143167 : pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
2050 143167 : pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
2051 : /* Constrain both bounds to a valid size. */
2052 143167 : offset_int maxsize = wi::to_offset (max_object_size ());
2053 143167 : if (pref->sizrng[0] > maxsize)
2054 338 : pref->sizrng[0] = maxsize;
2055 143167 : if (pref->sizrng[1] > maxsize)
2056 10510 : pref->sizrng[1] = maxsize;
2057 : }
2058 : else
2059 : {
2060 : /* For functions known to return one of their pointer arguments
2061 : try to determine what the returned pointer points to, and on
2062 : success add OFFRNG which was set to the offset added by
2063 : the function (e.g., memchr) to the overall offset. */
2064 : bool past_end;
2065 334931 : offset_int offrng[2];
2066 334931 : if (tree ret = gimple_call_return_array (stmt, offrng, &past_end,
2067 : snlim, qry))
2068 : {
2069 18140 : if (!compute_objsize_r (ret, stmt, addr, ostype, pref, snlim, qry))
2070 1567 : return false;
2071 :
2072 : /* Cap OFFRNG[1] to at most the remaining size of
2073 : the object. */
2074 16573 : offset_int remrng[2];
2075 16573 : remrng[1] = pref->size_remaining (remrng);
2076 16573 : if (remrng[1] != 0 && !past_end)
2077 : /* Decrement the size for functions that never return
2078 : a past-the-end pointer. */
2079 16282 : remrng[1] -= 1;
2080 :
2081 16573 : if (remrng[1] < offrng[1])
2082 357 : offrng[1] = remrng[1];
2083 16573 : pref->add_offset (offrng[0], offrng[1]);
2084 : }
2085 : else
2086 : {
2087 : /* For other calls that might return arbitrary pointers
2088 : including into the middle of objects set the size
2089 : range to maximum, clear PREF->BASE0, and also set
2090 : PREF->REF to include in diagnostics. */
2091 316791 : pref->set_max_size_range ();
2092 316791 : pref->base0 = false;
2093 316791 : pref->ref = ptr;
2094 : }
2095 : }
2096 476531 : qry->put_ref (ptr, *pref, ostype);
2097 476531 : return true;
2098 1434294 : }
2099 :
2100 4520549 : if (gimple_nop_p (stmt))
2101 : {
2102 : /* For a function argument try to determine the byte size
2103 : of the array from the current function declaratation
2104 : (e.g., attribute access or related). */
2105 4036270 : wide_int wr[2];
2106 807254 : bool static_array = false;
2107 807254 : if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
2108 : {
2109 184 : pref->parmarray = !static_array;
2110 184 : pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
2111 184 : pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
2112 184 : pref->ref = ref;
2113 184 : qry->put_ref (ptr, *pref, ostype);
2114 184 : return true;
2115 : }
2116 :
2117 807070 : pref->set_max_size_range ();
2118 807070 : pref->base0 = false;
2119 807070 : pref->ref = ptr;
2120 807070 : qry->put_ref (ptr, *pref, ostype);
2121 807070 : return true;
2122 2421762 : }
2123 :
2124 3713295 : if (gimple_code (stmt) == GIMPLE_PHI)
2125 : {
2126 : /* Pass PTR to get_ref() via PREF. If all PHI arguments refer
2127 : to the same object the function will replace it with it. */
2128 636444 : pref->ref = ptr;
2129 636444 : access_ref phi_ref = *pref;
2130 636444 : if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
2131 : return false;
2132 570267 : *pref = phi_ref;
2133 570267 : qry->put_ref (ptr, *pref, ostype);
2134 570267 : return true;
2135 : }
2136 :
2137 3076851 : if (!is_gimple_assign (stmt))
2138 : {
2139 : /* Clear BASE0 since the assigned pointer might point into
2140 : the middle of the object, set the maximum size range and,
2141 : if the SSA_NAME refers to a function argumnent, set
2142 : PREF->REF to it. */
2143 7773 : pref->base0 = false;
2144 7773 : pref->set_max_size_range ();
2145 7773 : pref->ref = ptr;
2146 7773 : return true;
2147 : }
2148 :
2149 3069078 : tree_code code = gimple_assign_rhs_code (stmt);
2150 :
2151 3069078 : if (code == MAX_EXPR || code == MIN_EXPR)
2152 : {
2153 1047 : if (!handle_min_max_size (ptr, ostype, pref, snlim, qry))
2154 : return false;
2155 :
2156 1047 : qry->put_ref (ptr, *pref, ostype);
2157 1047 : return true;
2158 : }
2159 :
2160 3068031 : tree rhs = gimple_assign_rhs1 (stmt);
2161 :
2162 3068031 : if (code == POINTER_PLUS_EXPR
2163 3068031 : && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
2164 : {
2165 : /* Compute the size of the object first. */
2166 805812 : if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry))
2167 : return false;
2168 :
2169 753743 : offset_int orng[2];
2170 753743 : tree off = gimple_assign_rhs2 (stmt);
2171 753743 : range_query *const rvals = qry ? qry->rvals : NULL;
2172 753743 : if (get_offset_range (off, stmt, orng, rvals))
2173 627275 : pref->add_offset (orng[0], orng[1]);
2174 : else
2175 126468 : pref->add_max_offset ();
2176 :
2177 753743 : qry->put_ref (ptr, *pref, ostype);
2178 753743 : return true;
2179 : }
2180 :
2181 2262219 : if (code == ADDR_EXPR || code == SSA_NAME)
2182 : {
2183 508503 : if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry))
2184 : return false;
2185 506963 : qry->put_ref (ptr, *pref, ostype);
2186 506963 : return true;
2187 : }
2188 :
2189 1753716 : if (ostype > 1 && POINTER_TYPE_P (TREE_TYPE (rhs)))
2190 : {
2191 : /* When determining the qualifiers follow the pointer but
2192 : avoid caching the result. As the pointer is added to
2193 : and/or dereferenced the computed size and offset need
2194 : not be meaningful for other queries involving the same
2195 : pointer. */
2196 0 : if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry))
2197 : return false;
2198 :
2199 0 : rhs = pref->ref;
2200 : }
2201 :
2202 : /* (This could also be an assignment from a nonlocal pointer.) Save
2203 : PTR to mention in diagnostics but otherwise treat it as a pointer
2204 : to an unknown object. */
2205 1753716 : pref->ref = rhs;
2206 1753716 : pref->base0 = false;
2207 1753716 : pref->set_max_size_range ();
2208 1753716 : return true;
2209 : }
2210 :
2211 : /* Helper to compute the size of the object referenced by the PTR
2212 : expression which must have pointer type, using Object Size type
2213 : OSTYPE (only the least significant 2 bits are used).
2214 : On success, sets PREF->REF to the DECL of the referenced object
2215 : if it's unique, otherwise to null, PREF->OFFRNG to the range of
2216 : offsets into it, and PREF->SIZRNG to the range of sizes of
2217 : the object(s).
2218 : ADDR is true for an enclosing ADDR_EXPR.
2219 : SNLIM is used to avoid visiting the same PHI operand multiple
2220 : times, and, when nonnull, RVALS to determine range information.
2221 : Returns true on success, false when a meaningful size (or range)
2222 : cannot be determined.
2223 :
2224 : The function is intended for diagnostics and should not be used
2225 : to influence code generation or optimization. */
2226 :
2227 : static bool
2228 21263629 : compute_objsize_r (tree ptr, gimple *stmt, bool addr, int ostype,
2229 : access_ref *pref, ssa_name_limit_t &snlim,
2230 : pointer_query *qry)
2231 : {
2232 21263629 : STRIP_NOPS (ptr);
2233 :
2234 21263629 : if (DECL_P (ptr))
2235 4868003 : return handle_decl (ptr, addr, pref);
2236 :
2237 16395626 : switch (TREE_CODE (ptr))
2238 : {
2239 1724265 : case ADDR_EXPR:
2240 1724265 : {
2241 1724265 : tree ref = TREE_OPERAND (ptr, 0);
2242 1724265 : if (!compute_objsize_r (ref, stmt, true, ostype, pref, snlim, qry))
2243 : return false;
2244 :
2245 1724265 : --pref->deref;
2246 1724265 : return true;
2247 : }
2248 :
2249 4016 : case BIT_FIELD_REF:
2250 4016 : {
2251 4016 : tree ref = TREE_OPERAND (ptr, 0);
2252 4016 : if (!compute_objsize_r (ref, stmt, addr, ostype, pref, snlim, qry))
2253 : return false;
2254 :
2255 4016 : offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
2256 4016 : pref->add_offset (off / BITS_PER_UNIT);
2257 4016 : return true;
2258 : }
2259 :
2260 873960 : case ARRAY_REF:
2261 873960 : return handle_array_ref (ptr, stmt, addr, ostype, pref, snlim, qry);
2262 :
2263 3110715 : case COMPONENT_REF:
2264 3110715 : return handle_component_ref (ptr, stmt, addr, ostype, pref, snlim, qry);
2265 :
2266 2923109 : case MEM_REF:
2267 2923109 : return handle_mem_ref (ptr, stmt, ostype, pref, snlim, qry);
2268 :
2269 13099 : case TARGET_MEM_REF:
2270 13099 : {
2271 13099 : tree ref = TREE_OPERAND (ptr, 0);
2272 13099 : if (!compute_objsize_r (ref, stmt, addr, ostype, pref, snlim, qry))
2273 : return false;
2274 :
2275 : /* TODO: Handle remaining operands. Until then, add maximum offset. */
2276 13099 : pref->ref = ptr;
2277 13099 : pref->add_max_offset ();
2278 13099 : return true;
2279 : }
2280 :
2281 594628 : case INTEGER_CST:
2282 : /* Pointer constants other than null smaller than param_min_pagesize
2283 : might be the result of erroneous null pointer addition/subtraction.
2284 : Unless zero is a valid address set size to zero. For null pointers,
2285 : set size to the maximum for now since those may be the result of
2286 : jump threading. Similarly, for values >= param_min_pagesize in
2287 : order to support (type *) 0x7cdeab00. */
2288 594628 : if (integer_zerop (ptr)
2289 641567 : || wi::to_widest (ptr) >= param_min_pagesize)
2290 551266 : pref->set_max_size_range ();
2291 43362 : else if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2292 : {
2293 582 : tree deref_type = TREE_TYPE (TREE_TYPE (ptr));
2294 582 : addr_space_t as = TYPE_ADDR_SPACE (deref_type);
2295 582 : if (targetm.addr_space.zero_address_valid (as))
2296 0 : pref->set_max_size_range ();
2297 : else
2298 : {
2299 582 : pref->sizrng[0] = pref->sizrng[1] = 0;
2300 582 : pref->ref_nullptr_p = true;
2301 : }
2302 : }
2303 : else
2304 42780 : pref->sizrng[0] = pref->sizrng[1] = 0;
2305 :
2306 594628 : pref->ref = ptr;
2307 594628 : return true;
2308 :
2309 277224 : case STRING_CST:
2310 277224 : pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
2311 277224 : pref->ref = ptr;
2312 277224 : return true;
2313 :
2314 500 : case POINTER_PLUS_EXPR:
2315 500 : {
2316 500 : tree ref = TREE_OPERAND (ptr, 0);
2317 500 : if (!compute_objsize_r (ref, stmt, addr, ostype, pref, snlim, qry))
2318 : return false;
2319 :
2320 : /* The below only makes sense if the offset is being applied to the
2321 : address of the object. */
2322 500 : if (pref->deref != -1)
2323 : return false;
2324 :
2325 437 : offset_int orng[2];
2326 437 : tree off = pref->eval (TREE_OPERAND (ptr, 1));
2327 437 : if (get_offset_range (off, stmt, orng, qry->rvals))
2328 428 : pref->add_offset (orng[0], orng[1]);
2329 : else
2330 9 : pref->add_max_offset ();
2331 : return true;
2332 : }
2333 :
2334 585 : case VIEW_CONVERT_EXPR:
2335 585 : ptr = TREE_OPERAND (ptr, 0);
2336 585 : return compute_objsize_r (ptr, stmt, addr, ostype, pref, snlim, qry);
2337 :
2338 6789015 : case SSA_NAME:
2339 6789015 : return handle_ssa_name (ptr, addr, ostype, pref, snlim, qry);
2340 :
2341 84510 : default:
2342 84510 : break;
2343 : }
2344 :
2345 : /* Assume all other expressions point into an unknown object
2346 : of the maximum valid size. */
2347 84510 : pref->ref = ptr;
2348 84510 : pref->base0 = false;
2349 84510 : pref->set_max_size_range ();
2350 84510 : if (TREE_CODE (ptr) == SSA_NAME)
2351 0 : qry->put_ref (ptr, *pref);
2352 : return true;
2353 : }
2354 :
2355 : /* A "public" wrapper around the above. Clients should use this overload
2356 : instead. */
2357 :
2358 : tree
2359 10578271 : compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref,
2360 : pointer_query *ptr_qry)
2361 : {
2362 10578271 : pointer_query qry;
2363 10578271 : if (ptr_qry)
2364 10577157 : ptr_qry->depth = 0;
2365 : else
2366 : ptr_qry = &qry;
2367 :
2368 : /* Clear and invalidate in case *PREF is being reused. */
2369 10578271 : pref->offrng[0] = pref->offrng[1] = 0;
2370 10578271 : pref->sizrng[0] = pref->sizrng[1] = -1;
2371 :
2372 10578271 : ssa_name_limit_t snlim;
2373 10578271 : if (!compute_objsize_r (ptr, stmt, false, ostype, pref, snlim, ptr_qry))
2374 : return NULL_TREE;
2375 :
2376 10568836 : offset_int maxsize = pref->size_remaining ();
2377 10568836 : if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
2378 103 : pref->offrng[0] = 0;
2379 10568836 : return wide_int_to_tree (sizetype, maxsize);
2380 10578271 : }
2381 :
2382 : /* Transitional wrapper. The function should be removed once callers
2383 : transition to the pointer_query API. */
2384 :
2385 : tree
2386 615960 : compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref,
2387 : range_query *rvals /* = NULL */)
2388 : {
2389 615960 : pointer_query qry;
2390 615960 : qry.rvals = rvals;
2391 615960 : return compute_objsize (ptr, stmt, ostype, pref, &qry);
2392 615960 : }
2393 :
2394 : /* Legacy wrapper around the above. The function should be removed
2395 : once callers transition to one of the two above. */
2396 :
2397 : tree
2398 0 : compute_objsize (tree ptr, gimple *stmt, int ostype, tree *pdecl /* = NULL */,
2399 : tree *poff /* = NULL */, range_query *rvals /* = NULL */)
2400 : {
2401 : /* Set the initial offsets to zero and size to negative to indicate
2402 : none has been computed yet. */
2403 0 : access_ref ref;
2404 0 : tree size = compute_objsize (ptr, stmt, ostype, &ref, rvals);
2405 0 : if (!size || !ref.base0)
2406 : return NULL_TREE;
2407 :
2408 0 : if (pdecl)
2409 0 : *pdecl = ref.ref;
2410 :
2411 0 : if (poff)
2412 0 : *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
2413 :
2414 : return size;
2415 : }
2416 :
2417 : /* Determine the offset *FLDOFF of the first byte of a struct member
2418 : of TYPE (possibly recursively) into which the byte offset OFF points,
2419 : starting after the field START_AFTER if it's non-null. On success,
2420 : if nonnull, set *FLDOFF to the offset of the first byte, and return
2421 : the field decl. If nonnull, set *NEXTOFF to the offset of the next
2422 : field (which reflects any padding between the returned field and
2423 : the next). Otherwise, if no such member can be found, return null. */
2424 :
2425 : tree
2426 1404 : field_at_offset (tree type, tree start_after, HOST_WIDE_INT off,
2427 : HOST_WIDE_INT *fldoff /* = nullptr */,
2428 : HOST_WIDE_INT *nextoff /* = nullptr */)
2429 : {
2430 1404 : tree first_fld = TYPE_FIELDS (type);
2431 :
2432 1404 : HOST_WIDE_INT offbuf = 0, nextbuf = 0;
2433 1404 : if (!fldoff)
2434 9 : fldoff = &offbuf;
2435 1404 : if (!nextoff)
2436 556 : nextoff = &nextbuf;
2437 :
2438 1404 : *nextoff = 0;
2439 :
2440 : /* The field to return. */
2441 1404 : tree last_fld = NULL_TREE;
2442 : /* The next field to advance to. */
2443 1404 : tree next_fld = NULL_TREE;
2444 :
2445 : /* NEXT_FLD's cached offset. */
2446 1404 : HOST_WIDE_INT next_pos = -1;
2447 :
2448 1664 : for (tree fld = first_fld; fld; fld = next_fld)
2449 : {
2450 : next_fld = fld;
2451 1645 : do
2452 : /* Advance to the next relevant data member. */
2453 1645 : next_fld = TREE_CHAIN (next_fld);
2454 : while (next_fld
2455 3170 : && (TREE_CODE (next_fld) != FIELD_DECL
2456 1525 : || DECL_ARTIFICIAL (next_fld)));
2457 :
2458 1645 : if (TREE_CODE (fld) != FIELD_DECL || DECL_ARTIFICIAL (fld))
2459 0 : continue;
2460 :
2461 1645 : if (fld == start_after)
2462 0 : continue;
2463 :
2464 1645 : tree fldtype = TREE_TYPE (fld);
2465 : /* The offset of FLD within its immediately enclosing structure. */
2466 1645 : HOST_WIDE_INT fldpos = next_pos < 0 ? int_byte_position (fld) : next_pos;
2467 :
2468 1645 : tree typesize = TYPE_SIZE_UNIT (fldtype);
2469 1645 : if (typesize && TREE_CODE (typesize) != INTEGER_CST)
2470 : /* Bail if FLD is a variable length member. */
2471 : return NULL_TREE;
2472 :
2473 : /* If the size is not available the field is a flexible array
2474 : member. Treat this case as success. */
2475 3280 : HOST_WIDE_INT fldsize = (tree_fits_uhwi_p (typesize)
2476 1645 : ? tree_to_uhwi (typesize)
2477 : : off);
2478 :
2479 : /* If OFF is beyond the end of the current field continue. */
2480 1645 : HOST_WIDE_INT fldend = fldpos + fldsize;
2481 1645 : if (fldend < off)
2482 236 : continue;
2483 :
2484 1409 : if (next_fld)
2485 : {
2486 : /* If OFF is equal to the offset of the next field continue
2487 : to it and skip the array/struct business below. */
2488 1303 : tree pos = byte_position (next_fld);
2489 1303 : if (!tree_fits_shwi_p (pos))
2490 : /* Bail if NEXT_FLD is a variable length member. */
2491 : return NULL_TREE;
2492 1303 : next_pos = tree_to_shwi (pos);
2493 1303 : *nextoff = *fldoff + next_pos;
2494 1303 : if (*nextoff == off && TREE_CODE (type) != UNION_TYPE)
2495 19 : continue;
2496 : }
2497 : else
2498 106 : *nextoff = HOST_WIDE_INT_MAX;
2499 :
2500 : /* OFF refers somewhere into the current field or just past its end,
2501 : which could mean it refers to the next field. */
2502 1390 : if (TREE_CODE (fldtype) == ARRAY_TYPE)
2503 : {
2504 : /* Will be set to the offset of the first byte of the array
2505 : element (which may be an array) of FLDTYPE into which
2506 : OFF - FLDPOS points (which may be past ELTOFF). */
2507 549 : HOST_WIDE_INT eltoff = 0;
2508 549 : if (tree ft = array_elt_at_offset (fldtype, off - fldpos, &eltoff))
2509 549 : fldtype = ft;
2510 : else
2511 0 : continue;
2512 :
2513 : /* Advance the position to include the array element above.
2514 : If OFF - FLPOS refers to a member of FLDTYPE, the member
2515 : will be determined below. */
2516 549 : fldpos += eltoff;
2517 : }
2518 :
2519 1390 : *fldoff += fldpos;
2520 :
2521 1390 : if (TREE_CODE (fldtype) == RECORD_TYPE)
2522 : /* Drill down into the current field if it's a struct. */
2523 848 : fld = field_at_offset (fldtype, start_after, off - fldpos,
2524 : fldoff, nextoff);
2525 :
2526 1390 : last_fld = fld;
2527 :
2528 : /* Unless the offset is just past the end of the field return it.
2529 : Otherwise save it and return it only if the offset of the next
2530 : next field is greater (i.e., there is padding between the two)
2531 : or if there is no next field. */
2532 1390 : if (off < fldend)
2533 : break;
2534 : }
2535 :
2536 1404 : if (*nextoff == HOST_WIDE_INT_MAX && next_fld)
2537 35 : *nextoff = next_pos;
2538 :
2539 : return last_fld;
2540 : }
2541 :
2542 : /* Determine the offset *ELTOFF of the first byte of the array element
2543 : of array ARTYPE into which the byte offset OFF points. On success
2544 : set *ELTOFF to the offset of the first byte and return type.
2545 : Otherwise, if no such element can be found, return null. */
2546 :
2547 : tree
2548 581 : array_elt_at_offset (tree artype, HOST_WIDE_INT off,
2549 : HOST_WIDE_INT *eltoff /* = nullptr */,
2550 : HOST_WIDE_INT *subar_size /* = nullptr */)
2551 : {
2552 581 : gcc_assert (TREE_CODE (artype) == ARRAY_TYPE);
2553 :
2554 581 : HOST_WIDE_INT dummy;
2555 581 : if (!eltoff)
2556 0 : eltoff = &dummy;
2557 581 : if (!subar_size)
2558 549 : subar_size = &dummy;
2559 :
2560 581 : tree eltype = artype;
2561 613 : while (TREE_CODE (TREE_TYPE (eltype)) == ARRAY_TYPE)
2562 32 : eltype = TREE_TYPE (eltype);
2563 :
2564 581 : tree subartype = eltype;
2565 581 : if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (eltype))
2566 565 : || TYPE_MODE (TREE_TYPE (eltype)) != TYPE_MODE (char_type_node))
2567 16 : eltype = TREE_TYPE (eltype);
2568 :
2569 581 : *subar_size = int_size_in_bytes (subartype);
2570 :
2571 581 : if (eltype == artype)
2572 : {
2573 533 : *eltoff = 0;
2574 533 : return artype;
2575 : }
2576 :
2577 48 : HOST_WIDE_INT artype_size = int_size_in_bytes (artype);
2578 48 : HOST_WIDE_INT eltype_size = int_size_in_bytes (eltype);
2579 :
2580 48 : if (off < artype_size)// * eltype_size)
2581 : {
2582 32 : *eltoff = (off / eltype_size) * eltype_size;
2583 32 : return TREE_CODE (eltype) == ARRAY_TYPE ? TREE_TYPE (eltype) : eltype;
2584 : }
2585 :
2586 : return NULL_TREE;
2587 : }
2588 :
2589 : /* Wrapper around build_array_type_nelts that makes sure the array
2590 : can be created at all and handles zero sized arrays specially. */
2591 :
2592 : tree
2593 10985 : build_printable_array_type (tree eltype, unsigned HOST_WIDE_INT nelts)
2594 : {
2595 : /* Cannot build an array type of functions or methods without
2596 : an error diagnostic. */
2597 10985 : if (FUNC_OR_METHOD_TYPE_P (eltype))
2598 : {
2599 1 : tree arrtype = make_node (ARRAY_TYPE);
2600 1 : TREE_TYPE (arrtype) = eltype;
2601 1 : TYPE_SIZE (arrtype) = bitsize_zero_node;
2602 1 : TYPE_SIZE_UNIT (arrtype) = size_zero_node;
2603 1 : return arrtype;
2604 : }
2605 :
2606 10984 : if (TYPE_SIZE_UNIT (eltype)
2607 10710 : && TREE_CODE (TYPE_SIZE_UNIT (eltype)) == INTEGER_CST
2608 10694 : && !integer_zerop (TYPE_SIZE_UNIT (eltype))
2609 10604 : && TYPE_ALIGN_UNIT (eltype) > 1
2610 26126 : && wi::zext (wi::to_wide (TYPE_SIZE_UNIT (eltype)),
2611 26126 : ffs_hwi (TYPE_ALIGN_UNIT (eltype)) - 1) != 0)
2612 3 : eltype = TYPE_MAIN_VARIANT (eltype);
2613 :
2614 : /* Consider excessive NELTS an array of unknown bound. */
2615 10984 : tree idxtype = NULL_TREE;
2616 10984 : if (nelts < HOST_WIDE_INT_MAX)
2617 : {
2618 10951 : if (nelts)
2619 10651 : return build_array_type_nelts (eltype, nelts);
2620 300 : idxtype = build_range_type (sizetype, size_zero_node, NULL_TREE);
2621 : }
2622 :
2623 333 : tree arrtype = build_array_type (eltype, idxtype);
2624 333 : arrtype = build_distinct_type_copy (TYPE_MAIN_VARIANT (arrtype));
2625 333 : TYPE_SIZE (arrtype) = bitsize_zero_node;
2626 333 : TYPE_SIZE_UNIT (arrtype) = size_zero_node;
2627 333 : return arrtype;
2628 : }
|