Branch data Line data Source code
1 : : /* __builtin_object_size (ptr, object_size_type) computation
2 : : Copyright (C) 2004-2025 Free Software Foundation, Inc.
3 : : Contributed by Jakub Jelinek <jakub@redhat.com>
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify
8 : : it under the terms of the GNU General Public License as published by
9 : : the Free Software Foundation; either version 3, or (at your option)
10 : : any later version.
11 : :
12 : : GCC is distributed in the hope that it will be useful,
13 : : but WITHOUT ANY WARRANTY; without even the implied warranty of
14 : : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 : : GNU General Public License for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : : #include "config.h"
22 : : #include "system.h"
23 : : #include "coretypes.h"
24 : : #include "backend.h"
25 : : #include "tree.h"
26 : : #include "gimple.h"
27 : : #include "tree-pass.h"
28 : : #include "ssa.h"
29 : : #include "gimple-pretty-print.h"
30 : : #include "fold-const.h"
31 : : #include "tree-object-size.h"
32 : : #include "gimple-iterator.h"
33 : : #include "gimple-fold.h"
34 : : #include "tree-cfg.h"
35 : : #include "tree-dfa.h"
36 : : #include "stringpool.h"
37 : : #include "attribs.h"
38 : : #include "builtins.h"
39 : : #include "gimplify-me.h"
40 : : #include "gimplify.h"
41 : : #include "tree-ssa-dce.h"
42 : :
43 : : struct object_size_info
44 : : {
45 : : int object_size_type;
46 : : unsigned char pass;
47 : : bool changed;
48 : : bitmap visited, reexamine;
49 : : unsigned int *depths;
50 : : unsigned int *stack, *tos;
51 : : };
52 : :
53 : : struct GTY(()) object_size
54 : : {
55 : : /* Estimate of bytes till the end of the object. */
56 : : tree size;
57 : : /* Estimate of the size of the whole object. */
58 : : tree wholesize;
59 : : };
60 : :
61 : : static tree compute_object_offset (tree, const_tree);
62 : : static bool addr_object_size (struct object_size_info *,
63 : : const_tree, int, tree *, tree *t = NULL);
64 : : static tree alloc_object_size (const gcall *, int);
65 : : static tree access_with_size_object_size (const gcall *, int);
66 : : static tree pass_through_call (const gcall *);
67 : : static void collect_object_sizes_for (struct object_size_info *, tree);
68 : : static void expr_object_size (struct object_size_info *, tree, tree);
69 : : static bool merge_object_sizes (struct object_size_info *, tree, tree);
70 : : static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
71 : : static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
72 : : static void init_offset_limit (void);
73 : : static void check_for_plus_in_loops (struct object_size_info *, tree);
74 : : static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
75 : : unsigned int);
76 : :
77 : : /* object_sizes[0] is upper bound for the object size and number of bytes till
78 : : the end of the object.
79 : : object_sizes[1] is upper bound for the object size and number of bytes till
80 : : the end of the subobject (innermost array or field with address taken).
81 : : object_sizes[2] is lower bound for the object size and number of bytes till
82 : : the end of the object and object_sizes[3] lower bound for subobject.
83 : :
84 : : For static object sizes, the object size and the bytes till the end of the
85 : : object are both INTEGER_CST. In the dynamic case, they are finally either a
86 : : gimple variable or an INTEGER_CST. */
87 : : static vec<object_size> object_sizes[OST_END];
88 : :
89 : : /* Bitmaps what object sizes have been computed already. */
90 : : static bitmap computed[OST_END];
91 : :
92 : : /* Maximum value of offset we consider to be addition. */
93 : : static unsigned HOST_WIDE_INT offset_limit;
94 : :
95 : : /* Tell the generic SSA updater what kind of update is needed after the pass
96 : : executes. */
97 : : static unsigned todo;
98 : :
99 : : /* Return true if VAL represents an initial size for OBJECT_SIZE_TYPE. */
100 : :
101 : : static inline bool
102 : 5434 : size_initval_p (tree val, int object_size_type)
103 : : {
104 : 5434 : return ((object_size_type & OST_MINIMUM)
105 : 5434 : ? integer_all_onesp (val) : integer_zerop (val));
106 : : }
107 : :
108 : : /* Return true if VAL represents an unknown size for OBJECT_SIZE_TYPE. */
109 : :
110 : : static inline bool
111 : 99374 : size_unknown_p (tree val, int object_size_type)
112 : : {
113 : 99374 : return ((object_size_type & OST_MINIMUM)
114 : 98278 : ? integer_zerop (val) : integer_all_onesp (val));
115 : : }
116 : :
117 : : /* Return true if VAL represents a valid size for OBJECT_SIZE_TYPE. */
118 : :
119 : : static inline bool
120 : 53534 : size_valid_p (tree val, int object_size_type)
121 : : {
122 : 51483 : return ((object_size_type & OST_DYNAMIC) || TREE_CODE (val) == INTEGER_CST);
123 : : }
124 : :
125 : : /* Return true if VAL is usable as an object size in the object_sizes
126 : : vectors. */
127 : :
128 : : static inline bool
129 : 10093 : size_usable_p (tree val)
130 : : {
131 : 8967 : return TREE_CODE (val) == SSA_NAME || TREE_CODE (val) == INTEGER_CST;
132 : : }
133 : :
134 : : /* Return a tree with initial value for OBJECT_SIZE_TYPE. */
135 : :
136 : : static inline tree
137 : 11568 : size_initval (int object_size_type)
138 : : {
139 : 11568 : return ((object_size_type & OST_MINIMUM)
140 : 11568 : ? TYPE_MAX_VALUE (sizetype) : size_zero_node);
141 : : }
142 : :
143 : : /* Return a tree with unknown value for OBJECT_SIZE_TYPE. */
144 : :
145 : : static inline tree
146 : 165015 : size_unknown (int object_size_type)
147 : : {
148 : 165015 : return ((object_size_type & OST_MINIMUM)
149 : 165015 : ? size_zero_node : TYPE_MAX_VALUE (sizetype));
150 : : }
151 : :
152 : : /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
153 : :
154 : : static inline void
155 : 31233 : object_sizes_grow (int object_size_type)
156 : : {
157 : 71939 : if (num_ssa_names > object_sizes[object_size_type].length ())
158 : 23890 : object_sizes[object_size_type].safe_grow (num_ssa_names, true);
159 : 31233 : }
160 : :
161 : : /* Release object_sizes[OBJECT_SIZE_TYPE]. */
162 : :
163 : : static inline void
164 : 27639536 : object_sizes_release (int object_size_type)
165 : : {
166 : 27639536 : object_sizes[object_size_type].release ();
167 : : }
168 : :
169 : : /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
170 : :
171 : : static inline bool
172 : 20134 : object_sizes_unknown_p (int object_size_type, unsigned varno)
173 : : {
174 : 20134 : return size_unknown_p (object_sizes[object_size_type][varno].size,
175 : 20134 : object_size_type);
176 : : }
177 : :
178 : : /* Return the raw size expression for VARNO corresponding to OSI. This returns
179 : : the TREE_VEC as is and should only be used during gimplification. */
180 : :
181 : : static inline object_size
182 : 822 : object_sizes_get_raw (struct object_size_info *osi, unsigned varno)
183 : : {
184 : 822 : gcc_assert (osi->pass != 0);
185 : 822 : return object_sizes[osi->object_size_type][varno];
186 : : }
187 : :
188 : : /* Return a size tree for VARNO corresponding to OSI. If WHOLE is true, return
189 : : the whole object size. Use this for building size expressions based on size
190 : : of VARNO. */
191 : :
192 : : static inline tree
193 : 25144 : object_sizes_get (struct object_size_info *osi, unsigned varno,
194 : : bool whole = false)
195 : : {
196 : 25144 : tree ret;
197 : 25144 : int object_size_type = osi->object_size_type;
198 : :
199 : 25144 : if (whole)
200 : 5121 : ret = object_sizes[object_size_type][varno].wholesize;
201 : : else
202 : 20023 : ret = object_sizes[object_size_type][varno].size;
203 : :
204 : 25144 : if (object_size_type & OST_DYNAMIC)
205 : : {
206 : 7025 : if (TREE_CODE (ret) == MODIFY_EXPR)
207 : 491 : return TREE_OPERAND (ret, 0);
208 : 6534 : else if (TREE_CODE (ret) == TREE_VEC)
209 : 410 : return TREE_VEC_ELT (ret, TREE_VEC_LENGTH (ret) - 1);
210 : : else
211 : 6124 : gcc_checking_assert (size_usable_p (ret));
212 : : }
213 : :
214 : : return ret;
215 : : }
216 : :
217 : : /* Set size for VARNO corresponding to OSI to VAL. */
218 : :
219 : : static inline void
220 : 11979 : object_sizes_initialize (struct object_size_info *osi, unsigned varno,
221 : : tree val, tree wholeval)
222 : : {
223 : 11979 : int object_size_type = osi->object_size_type;
224 : :
225 : 11979 : object_sizes[object_size_type][varno].size = val;
226 : 11979 : object_sizes[object_size_type][varno].wholesize = wholeval;
227 : 11979 : }
228 : :
229 : : /* Return a MODIFY_EXPR for cases where SSA and EXPR have the same type. The
230 : : TREE_VEC is returned only in case of PHI nodes. */
231 : :
232 : : static tree
233 : 506 : bundle_sizes (tree name, tree expr)
234 : : {
235 : 506 : gcc_checking_assert (TREE_TYPE (name) == sizetype);
236 : :
237 : 506 : if (TREE_CODE (expr) == TREE_VEC)
238 : : {
239 : 263 : TREE_VEC_ELT (expr, TREE_VEC_LENGTH (expr) - 1) = name;
240 : 263 : return expr;
241 : : }
242 : :
243 : 243 : gcc_checking_assert (types_compatible_p (TREE_TYPE (expr), sizetype));
244 : 243 : return build2 (MODIFY_EXPR, sizetype, name, expr);
245 : : }
246 : :
247 : : /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
248 : : maximum. For static sizes, each element of TREE_VEC is always INTEGER_CST
249 : : throughout the computation. For dynamic sizes, each element may either be a
250 : : gimple variable, a MODIFY_EXPR or a TREE_VEC. The MODIFY_EXPR is for
251 : : expressions that need to be gimplified. TREE_VECs are special, they're
252 : : emitted only for GIMPLE_PHI and the PHI result variable is the last element
253 : : of the vector. */
254 : :
255 : : static bool
256 : 14401 : object_sizes_set (struct object_size_info *osi, unsigned varno, tree val,
257 : : tree wholeval)
258 : : {
259 : 14401 : int object_size_type = osi->object_size_type;
260 : 14401 : object_size osize = object_sizes[object_size_type][varno];
261 : 14401 : bool changed = true;
262 : :
263 : 14401 : tree oldval = osize.size;
264 : 14401 : tree old_wholeval = osize.wholesize;
265 : :
266 : 14401 : if (object_size_type & OST_DYNAMIC)
267 : : {
268 : 2744 : if (bitmap_bit_p (osi->reexamine, varno))
269 : : {
270 : 68 : val = bundle_sizes (oldval, val);
271 : 68 : wholeval = bundle_sizes (old_wholeval, wholeval);
272 : : }
273 : : else
274 : : {
275 : 2676 : gcc_checking_assert (size_initval_p (oldval, object_size_type));
276 : 2676 : gcc_checking_assert (size_initval_p (old_wholeval,
277 : : object_size_type));
278 : : /* For dynamic object sizes, all object sizes that are not gimple
279 : : variables will need to be gimplified. */
280 : 2676 : if (wholeval != val && !size_usable_p (wholeval))
281 : : {
282 : 55 : bitmap_set_bit (osi->reexamine, varno);
283 : 55 : wholeval = bundle_sizes (make_ssa_name (sizetype), wholeval);
284 : : }
285 : 2676 : if (!size_usable_p (val))
286 : : {
287 : 315 : bitmap_set_bit (osi->reexamine, varno);
288 : 315 : tree newval = bundle_sizes (make_ssa_name (sizetype), val);
289 : 315 : if (val == wholeval)
290 : 119 : wholeval = newval;
291 : : val = newval;
292 : : }
293 : : /* If the new value is a temporary variable, mark it for
294 : : reexamination. */
295 : 2361 : else if (TREE_CODE (val) == SSA_NAME && !SSA_NAME_DEF_STMT (val))
296 : 90 : bitmap_set_bit (osi->reexamine, varno);
297 : : }
298 : : }
299 : : else
300 : : {
301 : 9084 : enum tree_code code = (object_size_type & OST_MINIMUM
302 : 11657 : ? MIN_EXPR : MAX_EXPR);
303 : :
304 : 11657 : val = size_binop (code, val, oldval);
305 : 11657 : wholeval = size_binop (code, wholeval, old_wholeval);
306 : 11657 : changed = (tree_int_cst_compare (val, oldval) != 0
307 : 11657 : || tree_int_cst_compare (old_wholeval, wholeval) != 0);
308 : : }
309 : :
310 : 14401 : object_sizes[object_size_type][varno].size = val;
311 : 14401 : object_sizes[object_size_type][varno].wholesize = wholeval;
312 : :
313 : 14401 : return changed;
314 : : }
315 : :
316 : : /* Set temporary SSA names for object size and whole size to resolve dependency
317 : : loops in dynamic size computation. */
318 : :
319 : : static inline void
320 : 82 : object_sizes_set_temp (struct object_size_info *osi, unsigned varno)
321 : : {
322 : 82 : tree val = object_sizes_get (osi, varno);
323 : :
324 : 82 : if (size_initval_p (val, osi->object_size_type))
325 : 68 : object_sizes_set (osi, varno,
326 : : make_ssa_name (sizetype),
327 : : make_ssa_name (sizetype));
328 : 82 : }
329 : :
330 : : /* Initialize OFFSET_LIMIT variable. */
331 : : static void
332 : 3791 : init_offset_limit (void)
333 : : {
334 : 3791 : if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
335 : 3791 : offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
336 : : else
337 : 0 : offset_limit = -1;
338 : 3791 : offset_limit /= 2;
339 : 3791 : }
340 : :
341 : : /* Bytes at end of the object with SZ from offset OFFSET. If WHOLESIZE is not
342 : : NULL_TREE, use it to get the net offset of the pointer, which should always
343 : : be positive and hence, be within OFFSET_LIMIT for valid offsets. */
344 : :
345 : : static tree
346 : 12653 : size_for_offset (tree sz, tree offset, tree wholesize = NULL_TREE,
347 : : bool strict = true)
348 : : {
349 : 12653 : gcc_checking_assert (types_compatible_p (TREE_TYPE (sz), sizetype));
350 : :
351 : : /* For negative offsets, if we have a distinct WHOLESIZE, use it to get a net
352 : : offset from the whole object. */
353 : 12653 : if (wholesize && wholesize != sz
354 : 12653 : && (TREE_CODE (sz) != INTEGER_CST
355 : 345 : || TREE_CODE (wholesize) != INTEGER_CST
356 : 345 : || tree_int_cst_compare (sz, wholesize)))
357 : : {
358 : 439 : gcc_checking_assert (types_compatible_p (TREE_TYPE (wholesize),
359 : : sizetype));
360 : :
361 : : /* Restructure SZ - OFFSET as
362 : : WHOLESIZE - (WHOLESIZE + OFFSET - SZ) so that the offset part, i.e.
363 : : WHOLESIZE + OFFSET - SZ is only allowed to be positive. */
364 : 439 : tree tmp = size_binop (MAX_EXPR, wholesize, sz);
365 : 439 : offset = fold_build2 (PLUS_EXPR, sizetype, tmp, offset);
366 : 439 : offset = fold_build2 (MINUS_EXPR, sizetype, offset, sz);
367 : 439 : sz = tmp;
368 : : }
369 : :
370 : : /* Safe to convert now, since a valid net offset should be non-negative. */
371 : 12653 : if (!useless_type_conversion_p (sizetype, TREE_TYPE (offset)))
372 : 3095 : offset = fold_convert (sizetype, offset);
373 : :
374 : 12653 : if (TREE_CODE (offset) == INTEGER_CST)
375 : : {
376 : 12540 : if (integer_zerop (offset))
377 : : return sz;
378 : :
379 : : /* Negative or too large offset even after adjustment, cannot be within
380 : : bounds of an object. The exception here is when the base object size
381 : : has been overestimated (e.g. through PHI nodes or a COND_EXPR) and the
382 : : adjusted offset remains negative. If the caller wants to be
383 : : permissive, return the base size. */
384 : 6398 : if (compare_tree_int (offset, offset_limit) > 0)
385 : : {
386 : 41 : if (strict)
387 : 28 : return size_zero_node;
388 : : else
389 : : return sz;
390 : : }
391 : : }
392 : :
393 : 6470 : return size_binop (MINUS_EXPR, size_binop (MAX_EXPR, sz, offset), offset);
394 : : }
395 : :
396 : : /* Compute offset of EXPR within VAR. Return error_mark_node
397 : : if unknown. */
398 : :
399 : : static tree
400 : 19910 : compute_object_offset (tree expr, const_tree var)
401 : : {
402 : 19918 : enum tree_code code = PLUS_EXPR;
403 : 19918 : tree base, off, t;
404 : :
405 : 19918 : if (expr == var)
406 : 8688 : return size_zero_node;
407 : :
408 : 11230 : switch (TREE_CODE (expr))
409 : : {
410 : 6955 : case COMPONENT_REF:
411 : 6955 : base = compute_object_offset (TREE_OPERAND (expr, 0), var);
412 : 6955 : if (base == error_mark_node)
413 : : return base;
414 : :
415 : 6955 : t = TREE_OPERAND (expr, 1);
416 : 6955 : off = size_binop (PLUS_EXPR,
417 : : component_ref_field_offset (expr),
418 : : size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
419 : : / BITS_PER_UNIT));
420 : 6955 : break;
421 : :
422 : 8 : case REALPART_EXPR:
423 : 8 : CASE_CONVERT:
424 : 8 : case VIEW_CONVERT_EXPR:
425 : 8 : case NON_LVALUE_EXPR:
426 : 8 : return compute_object_offset (TREE_OPERAND (expr, 0), var);
427 : :
428 : 8 : case IMAGPART_EXPR:
429 : 8 : base = compute_object_offset (TREE_OPERAND (expr, 0), var);
430 : 8 : if (base == error_mark_node)
431 : : return base;
432 : :
433 : 8 : off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
434 : 8 : break;
435 : :
436 : 4258 : case ARRAY_REF:
437 : 4258 : base = compute_object_offset (TREE_OPERAND (expr, 0), var);
438 : 4258 : if (base == error_mark_node)
439 : : return base;
440 : :
441 : 4258 : t = TREE_OPERAND (expr, 1);
442 : 4258 : tree low_bound, unit_size;
443 : 4258 : low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
444 : 4258 : unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
445 : 4258 : if (! integer_zerop (low_bound))
446 : 3 : t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
447 : 4258 : if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
448 : : {
449 : 24 : code = MINUS_EXPR;
450 : 24 : t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
451 : : }
452 : 4258 : t = fold_convert (sizetype, t);
453 : 4258 : off = size_binop (MULT_EXPR, unit_size, t);
454 : 4258 : break;
455 : :
456 : 0 : case MEM_REF:
457 : 0 : gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
458 : 0 : return wide_int_to_tree (sizetype, mem_ref_offset (expr));
459 : :
460 : 1 : default:
461 : 1 : return error_mark_node;
462 : : }
463 : :
464 : 11221 : return size_binop (code, base, off);
465 : : }
466 : :
467 : : /* Return true if CONTAINER has a field of type INNER at OFFSET. */
468 : :
469 : : static bool
470 : 18 : inner_at_offset (tree container, tree inner, tree offset)
471 : : {
472 : 18 : gcc_assert (RECORD_OR_UNION_TYPE_P (container));
473 : :
474 : 39 : for (tree t = TYPE_FIELDS (container); t; t = DECL_CHAIN (t))
475 : : {
476 : 39 : if (TREE_CODE (t) != FIELD_DECL)
477 : 0 : continue;
478 : :
479 : : /* Skip over fields at bit offsets that are not BITS_PER_UNIT aligned
480 : : to avoid an accidental truncated match with BYTE_POSITION below since
481 : : the address of such fields cannot be taken. */
482 : 39 : if (wi::bit_and (wi::to_offset (DECL_FIELD_BIT_OFFSET (t)),
483 : 78 : BITS_PER_UNIT - 1) != 0)
484 : 0 : continue;
485 : :
486 : 39 : tree byte_offset = byte_position (t);
487 : 39 : if (TREE_CODE (byte_offset) != INTEGER_CST
488 : 39 : || tree_int_cst_lt (offset, byte_offset))
489 : 0 : return false;
490 : :
491 : : /* For an array, check the element type, otherwise the actual type. This
492 : : deliberately does not support the case of jumping from a pointer to
493 : : the middle of an array to its containing struct. */
494 : 39 : tree t_type = TREE_TYPE (t);
495 : 13 : if (((TREE_CODE (t_type) == ARRAY_TYPE && TREE_TYPE (t_type) == inner)
496 : 37 : || t_type == inner)
497 : 50 : && tree_int_cst_equal (byte_offset, offset))
498 : : return true;
499 : :
500 : : /* Nested structure or union, adjust the expected offset and dive in. */
501 : 52 : if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (t))
502 : 31 : && inner_at_offset (TREE_TYPE (t), inner,
503 : : fold_build2 (MINUS_EXPR, sizetype, offset,
504 : : byte_offset)))
505 : : return true;
506 : : }
507 : :
508 : : return false;
509 : : }
510 : :
511 : : /* For the input MEMREF of type MEMREF_TYPE, look for the presence of a field
512 : : of BASE_TYPE at OFFSET and return an adjusted WHOLESIZE if found. */
513 : :
514 : : static tree
515 : 5699 : get_wholesize_for_memref (tree memref, tree wholesize)
516 : : {
517 : 5699 : tree base = TREE_OPERAND (memref, 0);
518 : 5699 : tree offset = fold_convert (sizetype, TREE_OPERAND (memref, 1));
519 : 5699 : tree memref_type = TREE_TYPE (memref);
520 : 5699 : tree base_type = TREE_TYPE (base);
521 : :
522 : 5699 : if (POINTER_TYPE_P (base_type))
523 : 5699 : base_type = TREE_TYPE ((base_type));
524 : :
525 : 5699 : if (dump_file && (dump_flags & TDF_DETAILS))
526 : : {
527 : 4 : fprintf (dump_file, "wholesize_for_memref: ");
528 : 4 : print_generic_expr (dump_file, wholesize, dump_flags);
529 : 4 : fprintf (dump_file, ", offset: ");
530 : 4 : print_generic_expr (dump_file, offset, dump_flags);
531 : 4 : fprintf (dump_file, "\n");
532 : : }
533 : :
534 : 5699 : if (TREE_CODE (offset) != INTEGER_CST
535 : 5699 : || compare_tree_int (offset, offset_limit) < 0
536 : 5712 : || !RECORD_OR_UNION_TYPE_P (memref_type))
537 : 5688 : return wholesize;
538 : :
539 : 11 : offset = fold_build1 (NEGATE_EXPR, sizetype, offset);
540 : :
541 : 11 : if (inner_at_offset (memref_type, base_type, offset))
542 : 11 : wholesize = size_binop (PLUS_EXPR, wholesize, offset);
543 : :
544 : 11 : if (dump_file && (dump_flags & TDF_DETAILS))
545 : : {
546 : 0 : fprintf (dump_file, " new wholesize: ");
547 : 0 : print_generic_expr (dump_file, wholesize, dump_flags);
548 : 0 : fprintf (dump_file, "\n");
549 : : }
550 : :
551 : : return wholesize;
552 : : }
553 : :
554 : : /* Returns the size of the object designated by DECL considering its
555 : : initializer if it either has one or if it would not affect its size,
556 : : otherwise the size of the object without the initializer when MIN
557 : : is true, else null. An object's initializer affects the object's
558 : : size if it's a struct type with a flexible array member. */
559 : :
560 : : tree
561 : 5020062 : decl_init_size (tree decl, bool min)
562 : : {
563 : 5020062 : tree size = DECL_SIZE_UNIT (decl);
564 : 5020062 : tree type = TREE_TYPE (decl);
565 : 5020062 : if (TREE_CODE (type) != RECORD_TYPE)
566 : : return size;
567 : :
568 : 2690804 : tree last = last_field (type);
569 : 2690804 : if (!last)
570 : : return size;
571 : :
572 : 2688996 : tree last_type = TREE_TYPE (last);
573 : 2688996 : if (TREE_CODE (last_type) != ARRAY_TYPE
574 : 2688996 : || TYPE_SIZE (last_type))
575 : : return size;
576 : :
577 : : /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
578 : : of the initializer and sometimes doesn't. */
579 : 2218 : size = TYPE_SIZE_UNIT (type);
580 : 2218 : tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
581 : 2218 : tree compsize = component_ref_size (ref);
582 : 2218 : if (!compsize)
583 : 1008 : return min ? size : NULL_TREE;
584 : :
585 : : /* The size includes tail padding and initializer elements. */
586 : 1708 : tree pos = byte_position (last);
587 : 1708 : size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
588 : 1708 : return size;
589 : : }
590 : :
591 : : /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
592 : : OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
593 : : If unknown, return size_unknown (object_size_type). */
594 : :
595 : : static bool
596 : 25458 : addr_object_size (struct object_size_info *osi, const_tree ptr,
597 : : int object_size_type, tree *psize, tree *pwholesize)
598 : : {
599 : 25458 : tree pt_var, pt_var_size = NULL_TREE, pt_var_wholesize = NULL_TREE;
600 : 25458 : tree var_size, bytes, wholebytes;
601 : :
602 : 25458 : gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
603 : :
604 : : /* Set to unknown and overwrite just before returning if the size
605 : : could be determined. */
606 : 25458 : *psize = size_unknown (object_size_type);
607 : 25458 : if (pwholesize)
608 : 7018 : *pwholesize = size_unknown (object_size_type);
609 : :
610 : 25458 : pt_var = TREE_OPERAND (ptr, 0);
611 : 42018 : while (handled_component_p (pt_var))
612 : 16560 : pt_var = TREE_OPERAND (pt_var, 0);
613 : :
614 : 25458 : if (!pt_var)
615 : : return false;
616 : :
617 : 25458 : if (TREE_CODE (pt_var) == MEM_REF)
618 : : {
619 : 5699 : tree sz, wholesize;
620 : :
621 : 4766 : if (!osi || (object_size_type & OST_SUBOBJECT) != 0
622 : 7013 : || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
623 : : {
624 : 4386 : compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
625 : : object_size_type & ~OST_SUBOBJECT, &sz);
626 : 4386 : wholesize = get_wholesize_for_memref (pt_var, sz);
627 : : }
628 : : else
629 : : {
630 : 1313 : tree var = TREE_OPERAND (pt_var, 0);
631 : 1313 : if (osi->pass == 0)
632 : 1313 : collect_object_sizes_for (osi, var);
633 : 2626 : if (bitmap_bit_p (computed[object_size_type],
634 : 1313 : SSA_NAME_VERSION (var)))
635 : : {
636 : 1313 : sz = object_sizes_get (osi, SSA_NAME_VERSION (var));
637 : 1313 : wholesize = object_sizes_get (osi, SSA_NAME_VERSION (var), true);
638 : 1313 : wholesize = get_wholesize_for_memref (pt_var, wholesize);
639 : : }
640 : : else
641 : 0 : sz = wholesize = size_unknown (object_size_type);
642 : : }
643 : 5699 : if (!size_unknown_p (sz, object_size_type))
644 : 3106 : sz = size_for_offset (sz, TREE_OPERAND (pt_var, 1), wholesize);
645 : :
646 : 5699 : if (!size_unknown_p (sz, object_size_type)
647 : 5699 : && (TREE_CODE (sz) != INTEGER_CST
648 : 3052 : || compare_tree_int (sz, offset_limit) < 0))
649 : : {
650 : 3096 : pt_var_size = sz;
651 : 3096 : pt_var_wholesize = wholesize;
652 : : }
653 : : }
654 : 19759 : else if (DECL_P (pt_var))
655 : : {
656 : 39316 : pt_var_size = pt_var_wholesize
657 : 19658 : = decl_init_size (pt_var, object_size_type & OST_MINIMUM);
658 : 19658 : if (!pt_var_size)
659 : : return false;
660 : : }
661 : 101 : else if (TREE_CODE (pt_var) == STRING_CST)
662 : 101 : pt_var_size = pt_var_wholesize = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
663 : : else
664 : : return false;
665 : :
666 : 5800 : if (pt_var_size)
667 : : {
668 : : /* Validate the size determined above if it is a constant. */
669 : 22742 : if (TREE_CODE (pt_var_size) == INTEGER_CST
670 : 22742 : && compare_tree_int (pt_var_size, offset_limit) >= 0)
671 : : return false;
672 : : }
673 : :
674 : 25309 : if (pt_var != TREE_OPERAND (ptr, 0))
675 : : {
676 : 9187 : tree var;
677 : :
678 : 9187 : if (object_size_type & OST_SUBOBJECT)
679 : : {
680 : 4009 : var = TREE_OPERAND (ptr, 0);
681 : :
682 : 4009 : while (var != pt_var
683 : 4009 : && TREE_CODE (var) != BIT_FIELD_REF
684 : : && TREE_CODE (var) != COMPONENT_REF
685 : : && TREE_CODE (var) != ARRAY_REF
686 : : && TREE_CODE (var) != ARRAY_RANGE_REF
687 : : && TREE_CODE (var) != REALPART_EXPR
688 : 4009 : && TREE_CODE (var) != IMAGPART_EXPR)
689 : 0 : var = TREE_OPERAND (var, 0);
690 : 4009 : if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
691 : 1763 : var = TREE_OPERAND (var, 0);
692 : 4009 : if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
693 : 3796 : || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
694 : 7785 : || (pt_var_size && TREE_CODE (pt_var_size) == INTEGER_CST
695 : 1749 : && tree_int_cst_lt (pt_var_size,
696 : 1749 : TYPE_SIZE_UNIT (TREE_TYPE (var)))))
697 : : var = pt_var;
698 : 3660 : else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
699 : : {
700 : : tree v = var;
701 : : /* For &X->fld, compute object size if fld isn't a flexible array
702 : : member. */
703 : 6302 : bool is_flexible_array_mem_ref = false;
704 : 6302 : while (v && v != pt_var)
705 : 3153 : switch (TREE_CODE (v))
706 : : {
707 : 0 : case ARRAY_REF:
708 : 0 : if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))))
709 : : {
710 : 0 : tree domain
711 : 0 : = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
712 : 0 : if (domain && TYPE_MAX_VALUE (domain))
713 : : {
714 : : v = NULL_TREE;
715 : : break;
716 : : }
717 : : }
718 : 0 : v = TREE_OPERAND (v, 0);
719 : 0 : break;
720 : : case REALPART_EXPR:
721 : : case IMAGPART_EXPR:
722 : : v = NULL_TREE;
723 : : break;
724 : 3153 : case COMPONENT_REF:
725 : : /* When the ref is not to an aggregate type, i.e, an array,
726 : : a record or a union, it will not have flexible size,
727 : : compute the object size directly. */
728 : 3153 : if (!AGGREGATE_TYPE_P (TREE_TYPE (v)))
729 : : {
730 : : v = NULL_TREE;
731 : : break;
732 : : }
733 : : /* if the ref is to a record or union type, but the type
734 : : does not include a flexible array recursively, compute
735 : : the object size directly. */
736 : 2255 : if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (v)))
737 : : {
738 : 58 : if (!TYPE_INCLUDES_FLEXARRAY (TREE_TYPE (v)))
739 : : {
740 : : v = NULL_TREE;
741 : : break;
742 : : }
743 : : else
744 : : {
745 : 16 : v = TREE_OPERAND (v, 0);
746 : 16 : break;
747 : : }
748 : : }
749 : : /* Now the ref is to an array type. */
750 : 2197 : gcc_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
751 : 2197 : is_flexible_array_mem_ref = array_ref_flexible_size_p (v);
752 : 5439 : while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
753 : 2997 : if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
754 : : != UNION_TYPE
755 : 2997 : && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
756 : : != QUAL_UNION_TYPE)
757 : : break;
758 : : else
759 : 1045 : v = TREE_OPERAND (v, 0);
760 : 2197 : if (TREE_CODE (v) == COMPONENT_REF
761 : 2197 : && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
762 : : == RECORD_TYPE)
763 : : {
764 : : /* compute object size only if v is not a
765 : : flexible array member. */
766 : 1952 : if (!is_flexible_array_mem_ref)
767 : : {
768 : : v = NULL_TREE;
769 : : break;
770 : : }
771 : 949 : v = TREE_OPERAND (v, 0);
772 : : }
773 : 1504 : while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
774 : 446 : if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
775 : : != UNION_TYPE
776 : 446 : && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
777 : : != QUAL_UNION_TYPE)
778 : : break;
779 : : else
780 : 310 : v = TREE_OPERAND (v, 0);
781 : 1194 : if (v != pt_var)
782 : : v = NULL_TREE;
783 : : else
784 : 3153 : v = pt_var;
785 : : break;
786 : : default:
787 : 6302 : v = pt_var;
788 : : break;
789 : : }
790 : 3149 : if (v == pt_var)
791 : 6597 : var = pt_var;
792 : : }
793 : : }
794 : : else
795 : : var = pt_var;
796 : :
797 : 9187 : if (var != pt_var)
798 : : {
799 : 2481 : var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
800 : 2481 : if (!TREE_CONSTANT (var_size))
801 : 0 : var_size = get_or_create_ssa_default_def (cfun, var_size);
802 : 2481 : if (!var_size)
803 : : return false;
804 : : }
805 : 6706 : else if (!pt_var_size)
806 : : return false;
807 : : else
808 : : var_size = pt_var_size;
809 : 7910 : bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
810 : 7910 : if (bytes != error_mark_node)
811 : : {
812 : 7909 : bytes = size_for_offset (var_size, bytes);
813 : 7909 : if (var != pt_var && pt_var_size && TREE_CODE (pt_var) == MEM_REF)
814 : : {
815 : 779 : tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0),
816 : : pt_var);
817 : 779 : if (bytes2 != error_mark_node)
818 : : {
819 : 779 : bytes2 = size_for_offset (pt_var_size, bytes2);
820 : 779 : bytes = size_binop (MIN_EXPR, bytes, bytes2);
821 : : }
822 : : }
823 : : }
824 : : else
825 : 1 : bytes = size_unknown (object_size_type);
826 : :
827 : 7910 : wholebytes
828 : 7910 : = object_size_type & OST_SUBOBJECT ? var_size : pt_var_wholesize;
829 : : }
830 : 16122 : else if (!pt_var_size)
831 : : return false;
832 : : else
833 : : {
834 : : bytes = pt_var_size;
835 : : wholebytes = pt_var_wholesize;
836 : : }
837 : :
838 : 24006 : if (!size_unknown_p (bytes, object_size_type)
839 : 49351 : && size_valid_p (bytes, object_size_type)
840 : 23893 : && !size_unknown_p (bytes, object_size_type)
841 : 24006 : && size_valid_p (wholebytes, object_size_type))
842 : : {
843 : 23893 : *psize = bytes;
844 : 23893 : if (pwholesize)
845 : 5703 : *pwholesize = wholebytes;
846 : 23893 : return true;
847 : : }
848 : :
849 : : return false;
850 : : }
851 : :
852 : : /* Compute __builtin_object_size for a CALL to .ACCESS_WITH_SIZE,
853 : : OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
854 : : The 2nd, 3rd, and the 4th parameters of the call determine the size of
855 : : the CALL:
856 : :
857 : : 2nd argument REF_TO_SIZE: The reference to the size of the object,
858 : : 3rd argument CLASS_OF_SIZE: The size referenced by the REF_TO_SIZE represents
859 : : 0: the number of bytes;
860 : : 1: the number of the elements of the object type;
861 : : 4th argument TYPE_OF_SIZE: A constant 0 with its TYPE being the same as the TYPE
862 : : of the object referenced by REF_TO_SIZE
863 : : 6th argument: A constant 0 with the pointer TYPE to the original flexible
864 : : array type or pointer field type.
865 : :
866 : : The size of the element can be retrived from the TYPE of the 6th argument
867 : : of the call, which is the pointer to the original flexible array type or
868 : : the type of the original pointer field. */
869 : :
870 : : static tree
871 : 114 : access_with_size_object_size (const gcall *call, int object_size_type)
872 : : {
873 : : /* If not for dynamic object size, return. */
874 : 114 : if ((object_size_type & OST_DYNAMIC) == 0)
875 : 78 : return size_unknown (object_size_type);
876 : :
877 : 36 : gcc_assert (gimple_call_internal_p (call, IFN_ACCESS_WITH_SIZE));
878 : : /* The type of the 6th argument type is the pointer TYPE to the original
879 : : flexible array type or to the original pointer type. */
880 : 36 : tree pointer_to_array_type = TREE_TYPE (gimple_call_arg (call, 5));
881 : 36 : gcc_assert (POINTER_TYPE_P (pointer_to_array_type));
882 : 36 : tree element_type = TREE_TYPE (TREE_TYPE (pointer_to_array_type));
883 : 36 : tree element_size = TYPE_SIZE_UNIT (element_type);
884 : 36 : tree ref_to_size = gimple_call_arg (call, 1);
885 : 36 : unsigned int class_of_size = TREE_INT_CST_LOW (gimple_call_arg (call, 2));
886 : 36 : tree type = TREE_TYPE (gimple_call_arg (call, 3));
887 : :
888 : 36 : tree size = fold_build2 (MEM_REF, type, ref_to_size,
889 : : build_int_cst (ptr_type_node, 0));
890 : :
891 : : /* If size is negative value, treat it as zero. */
892 : 36 : if (!TYPE_UNSIGNED (type))
893 : : {
894 : 18 : tree cond_expr = fold_build2 (LT_EXPR, boolean_type_node,
895 : : unshare_expr (size), build_zero_cst (type));
896 : 18 : size = fold_build3 (COND_EXPR, integer_type_node, cond_expr,
897 : : build_zero_cst (type), size);
898 : : }
899 : :
900 : 36 : if (class_of_size == 1)
901 : 36 : size = size_binop (MULT_EXPR,
902 : : fold_convert (sizetype, size),
903 : : fold_convert (sizetype, element_size));
904 : : else
905 : 0 : size = fold_convert (sizetype, size);
906 : :
907 : 36 : if (!todo)
908 : 14 : todo = TODO_update_ssa_only_virtuals;
909 : :
910 : : return size;
911 : : }
912 : :
913 : : /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
914 : : Handles calls to functions declared with attribute alloc_size.
915 : : OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
916 : : If unknown, return size_unknown (object_size_type). */
917 : :
918 : : static tree
919 : 1858 : alloc_object_size (const gcall *call, int object_size_type)
920 : : {
921 : 1858 : gcc_assert (is_gimple_call (call));
922 : :
923 : 1858 : tree calltype;
924 : 1858 : tree callfn = gimple_call_fndecl (call);
925 : 1858 : if (callfn)
926 : 1714 : calltype = TREE_TYPE (callfn);
927 : : else
928 : 144 : calltype = gimple_call_fntype (call);
929 : :
930 : 1858 : if (!calltype)
931 : 0 : return size_unknown (object_size_type);
932 : :
933 : : /* Set to positions of alloc_size arguments. */
934 : 1858 : int arg1 = -1, arg2 = -1;
935 : 1858 : tree alloc_size = lookup_attribute ("alloc_size",
936 : 1858 : TYPE_ATTRIBUTES (calltype));
937 : 3541 : if (alloc_size && TREE_VALUE (alloc_size))
938 : : {
939 : 1683 : tree p = TREE_VALUE (alloc_size);
940 : :
941 : 1683 : arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
942 : 1683 : if (TREE_CHAIN (p))
943 : 187 : arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
944 : : }
945 : 175 : else if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
946 : 114 : && callfn
947 : 289 : && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callfn)))
948 : : arg1 = 0;
949 : :
950 : : /* Non-const arguments are OK here, let the caller handle constness. */
951 : 1683 : if (arg1 < 0
952 : 1765 : || (unsigned) arg1 >= gimple_call_num_args (call)
953 : 3448 : || (arg2 >= 0 && (unsigned) arg2 >= gimple_call_num_args (call)))
954 : 93 : return size_unknown (object_size_type);
955 : :
956 : 1765 : tree targ1 = gimple_call_arg (call, arg1);
957 : 3530 : if (!INTEGRAL_TYPE_P (TREE_TYPE (targ1))
958 : 3528 : || TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (sizetype))
959 : 2 : return size_unknown (object_size_type);
960 : 1763 : targ1 = fold_convert (sizetype, targ1);
961 : 1763 : tree bytes = NULL_TREE;
962 : 1763 : if (arg2 >= 0)
963 : : {
964 : 187 : tree targ2 = gimple_call_arg (call, arg2);
965 : 374 : if (!INTEGRAL_TYPE_P (TREE_TYPE (targ2))
966 : 374 : || TYPE_PRECISION (TREE_TYPE (targ2)) > TYPE_PRECISION (sizetype))
967 : 0 : return size_unknown (object_size_type);
968 : 187 : targ2 = fold_convert (sizetype, targ2);
969 : 187 : bytes = size_binop (MULT_EXPR, targ1, targ2);
970 : : }
971 : : else
972 : : bytes = targ1;
973 : :
974 : 1763 : return bytes ? bytes : size_unknown (object_size_type);
975 : : }
976 : :
977 : : /* Compute __builtin_object_size for CALL, which is a call to either
978 : : BUILT_IN_STRDUP or BUILT_IN_STRNDUP; IS_STRNDUP indicates which it is.
979 : : OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
980 : : If unknown, return size_unknown (object_size_type). */
981 : :
982 : : static tree
983 : 160 : strdup_object_size (const gcall *call, int object_size_type, bool is_strndup)
984 : : {
985 : 160 : tree src = gimple_call_arg (call, 0);
986 : 160 : tree sz = size_unknown (object_size_type);
987 : 160 : tree n = NULL_TREE;
988 : :
989 : 160 : if (is_strndup)
990 : 110 : n = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
991 : : gimple_call_arg (call, 1));
992 : : /* For strdup, simply emit strlen (SRC) + 1 and let the optimizer fold it the
993 : : way it likes. */
994 : : else
995 : : {
996 : 50 : tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
997 : 50 : if (strlen_fn)
998 : : {
999 : 50 : sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
1000 : : build_call_expr (strlen_fn, 1, src));
1001 : 50 : todo = TODO_update_ssa_only_virtuals;
1002 : : }
1003 : : }
1004 : :
1005 : : /* In all other cases, return the size of SRC since the object size cannot
1006 : : exceed that. We cannot do this for OST_MINIMUM unless SRC points into a
1007 : : string constant since otherwise the object size could go all the way down
1008 : : to zero. */
1009 : 160 : if (!size_valid_p (sz, object_size_type)
1010 : 152 : || size_unknown_p (sz, object_size_type))
1011 : : {
1012 : 118 : tree wholesrc = NULL_TREE;
1013 : 118 : if (TREE_CODE (src) == ADDR_EXPR)
1014 : 48 : wholesrc = get_base_address (TREE_OPERAND (src, 0));
1015 : :
1016 : : /* If the source points within a string constant, we try to get its
1017 : : length. */
1018 : 48 : if (wholesrc && TREE_CODE (wholesrc) == STRING_CST)
1019 : : {
1020 : 48 : tree len = c_strlen (src, 0);
1021 : 48 : if (len)
1022 : 48 : sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node, len);
1023 : : }
1024 : :
1025 : : /* For maximum estimate, our next best guess is the object size of the
1026 : : source. */
1027 : 118 : if (size_unknown_p (sz, object_size_type)
1028 : 118 : && !(object_size_type & OST_MINIMUM))
1029 : 31 : compute_builtin_object_size (src, object_size_type, &sz);
1030 : : }
1031 : :
1032 : : /* String duplication allocates at least one byte, so we should never fail
1033 : : for OST_MINIMUM. */
1034 : 160 : if ((!size_valid_p (sz, object_size_type)
1035 : 152 : || size_unknown_p (sz, object_size_type))
1036 : 40 : && (object_size_type & OST_MINIMUM))
1037 : 35 : sz = size_one_node;
1038 : :
1039 : : /* Factor in the N. */
1040 : 160 : return n ? fold_build2 (MIN_EXPR, sizetype, n, sz) : sz;
1041 : : }
1042 : :
1043 : : /* If object size is propagated from one of function's arguments directly
1044 : : to its return value, return that argument for GIMPLE_CALL statement CALL.
1045 : : Otherwise return NULL. */
1046 : :
1047 : : static tree
1048 : 2189 : pass_through_call (const gcall *call)
1049 : : {
1050 : 2189 : unsigned rf = gimple_call_return_flags (call);
1051 : 2189 : if (rf & ERF_RETURNS_ARG)
1052 : : {
1053 : 57 : unsigned argnum = rf & ERF_RETURN_ARG_MASK;
1054 : 57 : if (argnum < gimple_call_num_args (call))
1055 : 57 : return gimple_call_arg (call, argnum);
1056 : : }
1057 : :
1058 : : /* __builtin_assume_aligned is intentionally not marked RET1. */
1059 : 2132 : if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
1060 : 0 : return gimple_call_arg (call, 0);
1061 : :
1062 : : return NULL_TREE;
1063 : : }
1064 : :
1065 : : /* Emit PHI nodes for size expressions fo. */
1066 : :
1067 : : static void
1068 : 142 : emit_phi_nodes (gimple *stmt, tree size, tree wholesize)
1069 : : {
1070 : 142 : tree phires;
1071 : 142 : gphi *wholephi = NULL;
1072 : :
1073 : 142 : if (wholesize != size)
1074 : : {
1075 : 121 : phires = TREE_VEC_ELT (wholesize, TREE_VEC_LENGTH (wholesize) - 1);
1076 : 121 : wholephi = create_phi_node (phires, gimple_bb (stmt));
1077 : : }
1078 : :
1079 : 142 : phires = TREE_VEC_ELT (size, TREE_VEC_LENGTH (size) - 1);
1080 : 142 : gphi *phi = create_phi_node (phires, gimple_bb (stmt));
1081 : 142 : gphi *obj_phi = as_a <gphi *> (stmt);
1082 : :
1083 : 142 : gcc_checking_assert (TREE_CODE (wholesize) == TREE_VEC);
1084 : 142 : gcc_checking_assert (TREE_CODE (size) == TREE_VEC);
1085 : :
1086 : 456 : for (unsigned i = 0; i < gimple_phi_num_args (stmt); i++)
1087 : : {
1088 : 314 : gimple_seq seq = NULL;
1089 : 314 : tree wsz = TREE_VEC_ELT (wholesize, i);
1090 : 314 : tree sz = TREE_VEC_ELT (size, i);
1091 : :
1092 : : /* If we built an expression, we will need to build statements
1093 : : and insert them on the edge right away. */
1094 : 314 : if (TREE_CODE (wsz) != SSA_NAME)
1095 : 149 : wsz = force_gimple_operand (wsz, &seq, true, NULL);
1096 : 314 : if (TREE_CODE (sz) != SSA_NAME)
1097 : : {
1098 : 161 : gimple_seq s;
1099 : 161 : sz = force_gimple_operand (sz, &s, true, NULL);
1100 : 161 : gimple_seq_add_seq (&seq, s);
1101 : : }
1102 : :
1103 : 314 : if (seq)
1104 : 0 : gsi_insert_seq_on_edge (gimple_phi_arg_edge (obj_phi, i), seq);
1105 : :
1106 : 314 : if (wholephi)
1107 : 272 : add_phi_arg (wholephi, wsz,
1108 : : gimple_phi_arg_edge (obj_phi, i),
1109 : : gimple_phi_arg_location (obj_phi, i));
1110 : :
1111 : 314 : add_phi_arg (phi, sz,
1112 : : gimple_phi_arg_edge (obj_phi, i),
1113 : : gimple_phi_arg_location (obj_phi, i));
1114 : : }
1115 : 142 : }
1116 : :
1117 : : /* Descend through EXPR and return size_unknown if it uses any SSA variable
1118 : : object_size_set or object_size_set_temp generated, which turned out to be
1119 : : size_unknown, as noted in UNKNOWNS. */
1120 : :
1121 : : static tree
1122 : 3284 : propagate_unknowns (object_size_info *osi, tree expr, bitmap unknowns)
1123 : : {
1124 : 3284 : int object_size_type = osi->object_size_type;
1125 : :
1126 : 3284 : switch (TREE_CODE (expr))
1127 : : {
1128 : 1208 : case SSA_NAME:
1129 : 1208 : if (bitmap_bit_p (unknowns, SSA_NAME_VERSION (expr)))
1130 : 0 : return size_unknown (object_size_type);
1131 : : return expr;
1132 : :
1133 : 363 : case MIN_EXPR:
1134 : 363 : case MAX_EXPR:
1135 : 363 : {
1136 : 363 : tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
1137 : : unknowns);
1138 : 363 : if (size_unknown_p (res, object_size_type))
1139 : : return res;
1140 : :
1141 : 363 : res = propagate_unknowns (osi, TREE_OPERAND (expr, 1), unknowns);
1142 : 363 : if (size_unknown_p (res, object_size_type))
1143 : : return res;
1144 : :
1145 : : return expr;
1146 : : }
1147 : 341 : case MODIFY_EXPR:
1148 : 341 : {
1149 : 341 : tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 1),
1150 : : unknowns);
1151 : 341 : if (size_unknown_p (res, object_size_type))
1152 : : return res;
1153 : : return expr;
1154 : : }
1155 : : case TREE_VEC:
1156 : 1196 : for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
1157 : : {
1158 : 912 : tree res = propagate_unknowns (osi, TREE_VEC_ELT (expr, i),
1159 : : unknowns);
1160 : 912 : if (size_unknown_p (res, object_size_type))
1161 : : return res;
1162 : : }
1163 : : return expr;
1164 : 483 : case PLUS_EXPR:
1165 : 483 : case MINUS_EXPR:
1166 : 483 : {
1167 : 483 : tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
1168 : : unknowns);
1169 : 483 : if (size_unknown_p (res, object_size_type))
1170 : : return res;
1171 : :
1172 : : return expr;
1173 : : }
1174 : : default:
1175 : : return expr;
1176 : : }
1177 : : }
1178 : :
1179 : : /* Walk through size expressions that need reexamination and generate
1180 : : statements for them. */
1181 : :
1182 : : static void
1183 : 2094 : gimplify_size_expressions (object_size_info *osi)
1184 : : {
1185 : 2094 : int object_size_type = osi->object_size_type;
1186 : 2094 : bitmap_iterator bi;
1187 : 2094 : unsigned int i;
1188 : 2094 : bool changed;
1189 : :
1190 : : /* Step 1: Propagate unknowns into expressions. */
1191 : 2094 : bitmap reexamine = BITMAP_ALLOC (NULL);
1192 : 2094 : bitmap_copy (reexamine, osi->reexamine);
1193 : 2094 : bitmap unknowns = BITMAP_ALLOC (NULL);
1194 : 2094 : do
1195 : : {
1196 : 2094 : changed = false;
1197 : 2505 : EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1198 : : {
1199 : 411 : object_size cur = object_sizes_get_raw (osi, i);
1200 : :
1201 : 411 : if (size_unknown_p (propagate_unknowns (osi, cur.size, unknowns),
1202 : : object_size_type)
1203 : 411 : || size_unknown_p (propagate_unknowns (osi, cur.wholesize,
1204 : : unknowns),
1205 : : object_size_type))
1206 : : {
1207 : : /* Record the SSAs we're overwriting to propagate the
1208 : : unknwons. */
1209 : 0 : tree oldval = object_sizes_get (osi, i);
1210 : 0 : tree old_wholeval = object_sizes_get (osi, i, true);
1211 : :
1212 : 0 : bitmap_set_bit (unknowns, SSA_NAME_VERSION (oldval));
1213 : 0 : bitmap_set_bit (unknowns, SSA_NAME_VERSION (old_wholeval));
1214 : 0 : object_sizes_initialize (osi, i,
1215 : : size_unknown (object_size_type),
1216 : : size_unknown (object_size_type));
1217 : 0 : bitmap_clear_bit (osi->reexamine, i);
1218 : 0 : changed = true;
1219 : : }
1220 : : }
1221 : 2094 : bitmap_copy (reexamine, osi->reexamine);
1222 : : }
1223 : : while (changed);
1224 : :
1225 : : /* Release all unknowns. */
1226 : 2094 : EXECUTE_IF_SET_IN_BITMAP (unknowns, 0, i, bi)
1227 : 0 : release_ssa_name (ssa_name (i));
1228 : :
1229 : 2094 : BITMAP_FREE (unknowns);
1230 : 2094 : BITMAP_FREE (reexamine);
1231 : :
1232 : : /* Expand all size expressions to put their definitions close to the objects
1233 : : for which size is being computed. */
1234 : 2505 : EXECUTE_IF_SET_IN_BITMAP (osi->reexamine, 0, i, bi)
1235 : : {
1236 : 411 : gimple_seq seq = NULL;
1237 : 411 : object_size osize = object_sizes_get_raw (osi, i);
1238 : :
1239 : 411 : gimple *stmt = SSA_NAME_DEF_STMT (ssa_name (i));
1240 : 411 : enum gimple_code code = gimple_code (stmt);
1241 : :
1242 : : /* PHI nodes need special attention. */
1243 : 411 : if (code == GIMPLE_PHI)
1244 : 142 : emit_phi_nodes (stmt, osize.size, osize.wholesize);
1245 : : else
1246 : : {
1247 : 269 : tree size_expr = NULL_TREE;
1248 : :
1249 : : /* Bundle wholesize in with the size to gimplify if needed. */
1250 : 269 : if (osize.wholesize != osize.size
1251 : 269 : && !size_usable_p (osize.wholesize))
1252 : 2 : size_expr = size_binop (COMPOUND_EXPR,
1253 : : osize.wholesize,
1254 : : osize.size);
1255 : 267 : else if (!size_usable_p (osize.size))
1256 : : size_expr = osize.size;
1257 : :
1258 : 241 : if (size_expr)
1259 : : {
1260 : 241 : gimple_stmt_iterator gsi;
1261 : 241 : if (code == GIMPLE_NOP)
1262 : 22 : gsi = gsi_start_bb (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1263 : : else
1264 : 230 : gsi = gsi_for_stmt (stmt);
1265 : :
1266 : 241 : force_gimple_operand (size_expr, &seq, true, NULL);
1267 : 241 : gsi_insert_seq_before (&gsi, seq, GSI_CONTINUE_LINKING);
1268 : : }
1269 : : }
1270 : :
1271 : : /* We're done, so replace the MODIFY_EXPRs with the SSA names. */
1272 : 411 : object_sizes_initialize (osi, i,
1273 : : object_sizes_get (osi, i),
1274 : : object_sizes_get (osi, i, true));
1275 : : }
1276 : 2094 : }
1277 : :
1278 : : /* Compute __builtin_object_size value for PTR and set *PSIZE to
1279 : : the resulting value. If the declared object is known and PDECL
1280 : : is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
1281 : : is the second argument to __builtin_object_size.
1282 : : Returns true on success and false when the object size could not
1283 : : be determined. */
1284 : :
1285 : : bool
1286 : 129848 : compute_builtin_object_size (tree ptr, int object_size_type,
1287 : : tree *psize)
1288 : : {
1289 : 129848 : gcc_assert (object_size_type >= 0 && object_size_type < OST_END);
1290 : :
1291 : : /* Set to unknown and overwrite just before returning if the size
1292 : : could be determined. */
1293 : 129848 : *psize = size_unknown (object_size_type);
1294 : :
1295 : 129848 : if (! offset_limit)
1296 : 1071 : init_offset_limit ();
1297 : :
1298 : 129848 : if (TREE_CODE (ptr) == ADDR_EXPR)
1299 : 18440 : return addr_object_size (NULL, ptr, object_size_type, psize);
1300 : :
1301 : 111408 : if (TREE_CODE (ptr) != SSA_NAME
1302 : 111408 : || !POINTER_TYPE_P (TREE_TYPE (ptr)))
1303 : : return false;
1304 : :
1305 : 111103 : if (computed[object_size_type] == NULL)
1306 : : {
1307 : 96317 : if (optimize || object_size_type & OST_SUBOBJECT)
1308 : : return false;
1309 : :
1310 : : /* When not optimizing, rather than failing, make a small effort
1311 : : to determine the object size without the full benefit of
1312 : : the (costly) computation below. */
1313 : 2089 : gimple *def = SSA_NAME_DEF_STMT (ptr);
1314 : 2089 : if (gimple_code (def) == GIMPLE_ASSIGN)
1315 : : {
1316 : 1496 : tree_code code = gimple_assign_rhs_code (def);
1317 : 1496 : if (code == POINTER_PLUS_EXPR)
1318 : : {
1319 : 945 : tree offset = gimple_assign_rhs2 (def);
1320 : 945 : ptr = gimple_assign_rhs1 (def);
1321 : :
1322 : 945 : if (((object_size_type & OST_DYNAMIC)
1323 : 823 : || (tree_fits_shwi_p (offset)
1324 : 505 : && compare_tree_int (offset, offset_limit) <= 0))
1325 : 1450 : && compute_builtin_object_size (ptr, object_size_type,
1326 : : psize))
1327 : : {
1328 : 243 : *psize = size_for_offset (*psize, offset);
1329 : 243 : return true;
1330 : : }
1331 : : }
1332 : : }
1333 : 1846 : return false;
1334 : : }
1335 : :
1336 : 14786 : struct object_size_info osi;
1337 : 14786 : osi.object_size_type = object_size_type;
1338 : 14786 : if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
1339 : : {
1340 : 9473 : bitmap_iterator bi;
1341 : 9473 : unsigned int i;
1342 : :
1343 : 9473 : object_sizes_grow (object_size_type);
1344 : 9473 : if (dump_file)
1345 : : {
1346 : 12 : fprintf (dump_file, "Computing %s %s%sobject size for ",
1347 : 12 : (object_size_type & OST_MINIMUM) ? "minimum" : "maximum",
1348 : 12 : (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1349 : 12 : (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1350 : 12 : print_generic_expr (dump_file, ptr, dump_flags);
1351 : 12 : fprintf (dump_file, ":\n");
1352 : : }
1353 : :
1354 : 9473 : osi.visited = BITMAP_ALLOC (NULL);
1355 : 9473 : osi.reexamine = BITMAP_ALLOC (NULL);
1356 : :
1357 : 9473 : if (!(object_size_type & OST_DYNAMIC))
1358 : : {
1359 : 7379 : osi.depths = NULL;
1360 : 7379 : osi.stack = NULL;
1361 : 7379 : osi.tos = NULL;
1362 : : }
1363 : :
1364 : : /* First pass: walk UD chains, compute object sizes that can be computed.
1365 : : osi.reexamine bitmap at the end will contain versions of SSA_NAMES
1366 : : that need to be reexamined. For both static and dynamic size
1367 : : computation, reexamination is for propagation across dependency loops.
1368 : : The dynamic case has the additional use case where the computed
1369 : : expression needs to be gimplified. */
1370 : 9473 : osi.pass = 0;
1371 : 9473 : osi.changed = false;
1372 : 9473 : collect_object_sizes_for (&osi, ptr);
1373 : :
1374 : 9473 : if (object_size_type & OST_DYNAMIC)
1375 : : {
1376 : 2094 : osi.pass = 1;
1377 : 2094 : gimplify_size_expressions (&osi);
1378 : 2094 : bitmap_clear (osi.reexamine);
1379 : : }
1380 : :
1381 : : /* Second pass: keep recomputing object sizes of variables
1382 : : that need reexamination, until no object sizes are
1383 : : increased or all object sizes are computed. */
1384 : 9473 : if (! bitmap_empty_p (osi.reexamine))
1385 : : {
1386 : 291 : bitmap reexamine = BITMAP_ALLOC (NULL);
1387 : :
1388 : : /* If looking for minimum instead of maximum object size,
1389 : : detect cases where a pointer is increased in a loop.
1390 : : Although even without this detection pass 2 would eventually
1391 : : terminate, it could take a long time. If a pointer is
1392 : : increasing this way, we need to assume 0 object size.
1393 : : E.g. p = &buf[0]; while (cond) p = p + 4; */
1394 : 291 : if (object_size_type & OST_MINIMUM)
1395 : : {
1396 : 54 : osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
1397 : 54 : osi.stack = XNEWVEC (unsigned int, num_ssa_names);
1398 : 27 : osi.tos = osi.stack;
1399 : 27 : osi.pass = 1;
1400 : : /* collect_object_sizes_for is changing
1401 : : osi.reexamine bitmap, so iterate over a copy. */
1402 : 27 : bitmap_copy (reexamine, osi.reexamine);
1403 : 82 : EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1404 : 55 : if (bitmap_bit_p (osi.reexamine, i))
1405 : 55 : check_for_plus_in_loops (&osi, ssa_name (i));
1406 : :
1407 : 27 : free (osi.depths);
1408 : 27 : osi.depths = NULL;
1409 : 27 : free (osi.stack);
1410 : 27 : osi.stack = NULL;
1411 : 27 : osi.tos = NULL;
1412 : : }
1413 : :
1414 : 381 : do
1415 : : {
1416 : 381 : osi.pass = 2;
1417 : 381 : osi.changed = false;
1418 : : /* collect_object_sizes_for is changing
1419 : : osi.reexamine bitmap, so iterate over a copy. */
1420 : 381 : bitmap_copy (reexamine, osi.reexamine);
1421 : 1238 : EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1422 : 857 : if (bitmap_bit_p (osi.reexamine, i))
1423 : : {
1424 : 857 : collect_object_sizes_for (&osi, ssa_name (i));
1425 : 857 : if (dump_file && (dump_flags & TDF_DETAILS))
1426 : : {
1427 : 0 : fprintf (dump_file, "Reexamining ");
1428 : 0 : print_generic_expr (dump_file, ssa_name (i),
1429 : : dump_flags);
1430 : 0 : fprintf (dump_file, "\n");
1431 : : }
1432 : : }
1433 : : }
1434 : 381 : while (osi.changed);
1435 : :
1436 : 291 : BITMAP_FREE (reexamine);
1437 : : }
1438 : 10096 : EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
1439 : 623 : bitmap_set_bit (computed[object_size_type], i);
1440 : :
1441 : : /* Debugging dumps. */
1442 : 9473 : if (dump_file)
1443 : : {
1444 : 66 : EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
1445 : 54 : if (!object_sizes_unknown_p (object_size_type, i))
1446 : : {
1447 : 54 : print_generic_expr (dump_file, ssa_name (i),
1448 : : dump_flags);
1449 : 108 : fprintf (dump_file,
1450 : : ": %s %s%sobject size ",
1451 : 54 : ((object_size_type & OST_MINIMUM) ? "minimum"
1452 : : : "maximum"),
1453 : : (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1454 : 54 : (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1455 : 54 : print_generic_expr (dump_file, object_sizes_get (&osi, i),
1456 : : dump_flags);
1457 : 54 : fprintf (dump_file, "\n");
1458 : : }
1459 : : }
1460 : :
1461 : 9473 : BITMAP_FREE (osi.reexamine);
1462 : 9473 : BITMAP_FREE (osi.visited);
1463 : : }
1464 : :
1465 : 14786 : *psize = object_sizes_get (&osi, SSA_NAME_VERSION (ptr));
1466 : 14786 : return !size_unknown_p (*psize, object_size_type);
1467 : : }
1468 : :
1469 : : /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
1470 : :
1471 : : static void
1472 : 8186 : expr_object_size (struct object_size_info *osi, tree ptr, tree value)
1473 : : {
1474 : 8186 : int object_size_type = osi->object_size_type;
1475 : 8186 : unsigned int varno = SSA_NAME_VERSION (ptr);
1476 : 8186 : tree bytes, wholesize;
1477 : :
1478 : 8186 : gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1479 : 8186 : gcc_assert (osi->pass == 0);
1480 : :
1481 : 8186 : if (TREE_CODE (value) == WITH_SIZE_EXPR)
1482 : 0 : value = TREE_OPERAND (value, 0);
1483 : :
1484 : : /* Pointer variables should have been handled by merge_object_sizes. */
1485 : 8186 : gcc_assert (TREE_CODE (value) != SSA_NAME
1486 : : || !POINTER_TYPE_P (TREE_TYPE (value)));
1487 : :
1488 : 8186 : if (TREE_CODE (value) == ADDR_EXPR)
1489 : 6677 : addr_object_size (osi, value, object_size_type, &bytes, &wholesize);
1490 : : else
1491 : 1509 : bytes = wholesize = size_unknown (object_size_type);
1492 : :
1493 : 8186 : object_sizes_set (osi, varno, bytes, wholesize);
1494 : 8186 : }
1495 : :
1496 : :
1497 : : /* Compute object_sizes for PTR, defined to the result of a call. */
1498 : :
1499 : : static void
1500 : 2132 : call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
1501 : : {
1502 : 2132 : int object_size_type = osi->object_size_type;
1503 : 2132 : unsigned int varno = SSA_NAME_VERSION (ptr);
1504 : 2132 : tree bytes = NULL_TREE;
1505 : :
1506 : 2132 : gcc_assert (is_gimple_call (call));
1507 : :
1508 : 2132 : gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1509 : 2132 : gcc_assert (osi->pass == 0);
1510 : :
1511 : 2132 : bool is_strdup = gimple_call_builtin_p (call, BUILT_IN_STRDUP);
1512 : 2132 : bool is_strndup = gimple_call_builtin_p (call, BUILT_IN_STRNDUP);
1513 : 2132 : bool is_access_with_size
1514 : 2132 : = gimple_call_internal_p (call, IFN_ACCESS_WITH_SIZE);
1515 : 2132 : if (is_strdup || is_strndup)
1516 : 160 : bytes = strdup_object_size (call, object_size_type, is_strndup);
1517 : 1972 : else if (is_access_with_size)
1518 : 114 : bytes = access_with_size_object_size (call, object_size_type);
1519 : : else
1520 : 1858 : bytes = alloc_object_size (call, object_size_type);
1521 : :
1522 : 2132 : if (!size_valid_p (bytes, object_size_type))
1523 : 367 : bytes = size_unknown (object_size_type);
1524 : :
1525 : 2132 : object_sizes_set (osi, varno, bytes, bytes);
1526 : 2132 : }
1527 : :
1528 : :
1529 : : /* Compute object_sizes for PTR, defined to an unknown value. */
1530 : :
1531 : : static void
1532 : 0 : unknown_object_size (struct object_size_info *osi, tree ptr)
1533 : : {
1534 : 0 : int object_size_type = osi->object_size_type;
1535 : 0 : unsigned int varno = SSA_NAME_VERSION (ptr);
1536 : :
1537 : 0 : gcc_checking_assert (!object_sizes_unknown_p (object_size_type, varno));
1538 : 0 : gcc_checking_assert (osi->pass == 0);
1539 : 0 : tree bytes = size_unknown (object_size_type);
1540 : :
1541 : 0 : object_sizes_set (osi, varno, bytes, bytes);
1542 : 0 : }
1543 : :
1544 : :
1545 : : /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
1546 : : the object size might need reexamination later. */
1547 : :
1548 : : static bool
1549 : 2268 : merge_object_sizes (struct object_size_info *osi, tree dest, tree orig)
1550 : : {
1551 : 2268 : int object_size_type = osi->object_size_type;
1552 : 2268 : unsigned int varno = SSA_NAME_VERSION (dest);
1553 : 2268 : tree orig_bytes, wholesize;
1554 : :
1555 : 2268 : if (object_sizes_unknown_p (object_size_type, varno))
1556 : : return false;
1557 : :
1558 : 2268 : if (osi->pass == 0)
1559 : 1414 : collect_object_sizes_for (osi, orig);
1560 : :
1561 : 2268 : orig_bytes = object_sizes_get (osi, SSA_NAME_VERSION (orig));
1562 : 2268 : wholesize = object_sizes_get (osi, SSA_NAME_VERSION (orig), true);
1563 : :
1564 : 2268 : if (object_sizes_set (osi, varno, orig_bytes, wholesize))
1565 : 1117 : osi->changed = true;
1566 : :
1567 : 2268 : return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
1568 : : }
1569 : :
1570 : :
1571 : : /* Compute object_sizes for VAR, defined to the result of an assignment
1572 : : with operator POINTER_PLUS_EXPR. Return true if the object size might
1573 : : need reexamination later. */
1574 : :
1575 : : static bool
1576 : 1101 : plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1577 : : {
1578 : 1101 : int object_size_type = osi->object_size_type;
1579 : 1101 : unsigned int varno = SSA_NAME_VERSION (var);
1580 : 1101 : tree bytes, wholesize;
1581 : 1101 : tree op0, op1;
1582 : 1101 : bool reexamine = false;
1583 : :
1584 : 1101 : if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1585 : : {
1586 : 1101 : op0 = gimple_assign_rhs1 (stmt);
1587 : 1101 : op1 = gimple_assign_rhs2 (stmt);
1588 : : }
1589 : 0 : else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
1590 : : {
1591 : 0 : tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
1592 : 0 : gcc_assert (TREE_CODE (rhs) == MEM_REF);
1593 : 0 : op0 = TREE_OPERAND (rhs, 0);
1594 : 0 : op1 = TREE_OPERAND (rhs, 1);
1595 : : }
1596 : : else
1597 : 0 : gcc_unreachable ();
1598 : :
1599 : 1101 : if (object_sizes_unknown_p (object_size_type, varno))
1600 : : return false;
1601 : :
1602 : : /* Handle PTR + OFFSET here. */
1603 : 1101 : if ((TREE_CODE (op0) == SSA_NAME || TREE_CODE (op0) == ADDR_EXPR))
1604 : : {
1605 : 1096 : if (TREE_CODE (op0) == SSA_NAME)
1606 : : {
1607 : 878 : if (osi->pass == 0)
1608 : 707 : collect_object_sizes_for (osi, op0);
1609 : :
1610 : 878 : bytes = object_sizes_get (osi, SSA_NAME_VERSION (op0));
1611 : 878 : wholesize = object_sizes_get (osi, SSA_NAME_VERSION (op0), true);
1612 : 878 : reexamine = bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (op0));
1613 : : }
1614 : : else
1615 : : {
1616 : : /* op0 will be ADDR_EXPR here. We should never come here during
1617 : : reexamination. */
1618 : 218 : gcc_checking_assert (osi->pass == 0);
1619 : 218 : addr_object_size (osi, op0, object_size_type, &bytes, &wholesize);
1620 : : }
1621 : :
1622 : 1096 : bool pos_offset = (size_valid_p (op1, 0)
1623 : 886 : && compare_tree_int (op1, offset_limit) <= 0);
1624 : :
1625 : : /* size_for_offset doesn't make sense for -1 size, but it does for size 0
1626 : : since the wholesize could be non-zero and a negative offset could give
1627 : : a non-zero size. */
1628 : 1096 : if (size_unknown_p (bytes, 0))
1629 : : ;
1630 : : /* In the static case, We want SIZE_FOR_OFFSET to go a bit easy on us if
1631 : : it sees a negative offset since BYTES could have been
1632 : : overestimated. */
1633 : 829 : else if ((object_size_type & OST_DYNAMIC)
1634 : 699 : || bytes != wholesize
1635 : 370 : || pos_offset)
1636 : 616 : bytes = size_for_offset (bytes, op1, wholesize,
1637 : : ((object_size_type & OST_DYNAMIC)
1638 : : || pos_offset));
1639 : : /* In the static case, with a negative offset, the best estimate for
1640 : : minimum size is size_unknown but for maximum size, the wholesize is a
1641 : : better estimate than size_unknown. */
1642 : 213 : else if (object_size_type & OST_MINIMUM)
1643 : 0 : bytes = size_unknown (object_size_type);
1644 : : else
1645 : 213 : bytes = wholesize;
1646 : : }
1647 : : else
1648 : 5 : bytes = wholesize = size_unknown (object_size_type);
1649 : :
1650 : 1101 : if (!size_valid_p (bytes, object_size_type)
1651 : 1099 : || !size_valid_p (wholesize, object_size_type))
1652 : 2 : bytes = wholesize = size_unknown (object_size_type);
1653 : :
1654 : 1101 : if (object_sizes_set (osi, varno, bytes, wholesize))
1655 : 987 : osi->changed = true;
1656 : : return reexamine;
1657 : : }
1658 : :
1659 : : /* Compute the dynamic object size for VAR. Return the result in SIZE and
1660 : : WHOLESIZE. */
1661 : :
1662 : : static void
1663 : 355 : dynamic_object_size (struct object_size_info *osi, tree var,
1664 : : tree *size, tree *wholesize)
1665 : : {
1666 : 355 : int object_size_type = osi->object_size_type;
1667 : :
1668 : 355 : if (TREE_CODE (var) == SSA_NAME)
1669 : : {
1670 : 231 : unsigned varno = SSA_NAME_VERSION (var);
1671 : :
1672 : 231 : collect_object_sizes_for (osi, var);
1673 : 231 : *size = object_sizes_get (osi, varno);
1674 : 231 : *wholesize = object_sizes_get (osi, varno, true);
1675 : : }
1676 : 124 : else if (TREE_CODE (var) == ADDR_EXPR)
1677 : 123 : addr_object_size (osi, var, object_size_type, size, wholesize);
1678 : : else
1679 : 1 : *size = *wholesize = size_unknown (object_size_type);
1680 : 355 : }
1681 : :
1682 : : /* Compute object_sizes for VAR, defined at STMT, which is
1683 : : a COND_EXPR. Return true if the object size might need reexamination
1684 : : later. */
1685 : :
1686 : : static bool
1687 : 0 : cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1688 : : {
1689 : 0 : tree then_, else_;
1690 : 0 : int object_size_type = osi->object_size_type;
1691 : 0 : unsigned int varno = SSA_NAME_VERSION (var);
1692 : 0 : bool reexamine = false;
1693 : :
1694 : 0 : gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
1695 : :
1696 : 0 : if (object_sizes_unknown_p (object_size_type, varno))
1697 : : return false;
1698 : :
1699 : 0 : then_ = gimple_assign_rhs2 (stmt);
1700 : 0 : else_ = gimple_assign_rhs3 (stmt);
1701 : :
1702 : 0 : if (object_size_type & OST_DYNAMIC)
1703 : : {
1704 : 0 : tree then_size, then_wholesize, else_size, else_wholesize;
1705 : :
1706 : 0 : dynamic_object_size (osi, then_, &then_size, &then_wholesize);
1707 : 0 : if (!size_unknown_p (then_size, object_size_type))
1708 : 0 : dynamic_object_size (osi, else_, &else_size, &else_wholesize);
1709 : :
1710 : 0 : tree cond_size, cond_wholesize;
1711 : 0 : if (size_unknown_p (then_size, object_size_type)
1712 : 0 : || size_unknown_p (else_size, object_size_type))
1713 : 0 : cond_size = cond_wholesize = size_unknown (object_size_type);
1714 : : else
1715 : : {
1716 : 0 : cond_size = fold_build3 (COND_EXPR, sizetype,
1717 : : gimple_assign_rhs1 (stmt),
1718 : : then_size, else_size);
1719 : 0 : cond_wholesize = fold_build3 (COND_EXPR, sizetype,
1720 : : gimple_assign_rhs1 (stmt),
1721 : : then_wholesize, else_wholesize);
1722 : : }
1723 : :
1724 : 0 : object_sizes_set (osi, varno, cond_size, cond_wholesize);
1725 : :
1726 : 0 : return false;
1727 : : }
1728 : :
1729 : 0 : if (TREE_CODE (then_) == SSA_NAME)
1730 : 0 : reexamine |= merge_object_sizes (osi, var, then_);
1731 : : else
1732 : 0 : expr_object_size (osi, var, then_);
1733 : :
1734 : 0 : if (object_sizes_unknown_p (object_size_type, varno))
1735 : : return reexamine;
1736 : :
1737 : 0 : if (TREE_CODE (else_) == SSA_NAME)
1738 : 0 : reexamine |= merge_object_sizes (osi, var, else_);
1739 : : else
1740 : 0 : expr_object_size (osi, var, else_);
1741 : :
1742 : : return reexamine;
1743 : : }
1744 : :
1745 : : /* Find size of an object passed as a parameter to the function. */
1746 : :
1747 : : static void
1748 : 1600 : parm_object_size (struct object_size_info *osi, tree var)
1749 : : {
1750 : 1600 : int object_size_type = osi->object_size_type;
1751 : 1600 : tree parm = SSA_NAME_VAR (var);
1752 : :
1753 : 1600 : if (!(object_size_type & OST_DYNAMIC) || !POINTER_TYPE_P (TREE_TYPE (parm)))
1754 : : {
1755 : 1155 : expr_object_size (osi, var, parm);
1756 : 1155 : return;
1757 : : }
1758 : :
1759 : : /* Look for access attribute. */
1760 : 445 : rdwr_map rdwr_idx;
1761 : :
1762 : 445 : tree fndecl = cfun->decl;
1763 : 445 : const attr_access *access = get_parm_access (rdwr_idx, parm, fndecl);
1764 : 445 : tree typesize = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (parm)));
1765 : 445 : tree sz = NULL_TREE;
1766 : :
1767 : : /* If we have an access attribute with a usable size argument... */
1768 : 18 : if (access && access->sizarg != UINT_MAX
1769 : : /* ... and either PARM is void * or has a type that is complete and has a
1770 : : constant size... */
1771 : 461 : && ((typesize && poly_int_tree_p (typesize))
1772 : 4 : || (!typesize && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))))
1773 : : {
1774 : 11 : tree fnargs = DECL_ARGUMENTS (fndecl);
1775 : 11 : tree arg = NULL_TREE;
1776 : 11 : unsigned argpos = 0;
1777 : :
1778 : : /* ... then walk through the parameters to pick the size parameter and
1779 : : safely scale it by the type size if needed.
1780 : :
1781 : : TODO: we could also compute the size of VLAs where the size is
1782 : : given by a function parameter. */
1783 : 18 : for (arg = fnargs; arg; arg = TREE_CHAIN (arg), ++argpos)
1784 : 18 : if (argpos == access->sizarg)
1785 : : {
1786 : 11 : gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1787 : 11 : sz = get_or_create_ssa_default_def (cfun, arg);
1788 : 11 : if (sz != NULL_TREE)
1789 : : {
1790 : 11 : sz = fold_convert (sizetype, sz);
1791 : 11 : if (typesize)
1792 : 8 : sz = size_binop (MULT_EXPR, sz, typesize);
1793 : : }
1794 : : break;
1795 : : }
1796 : : }
1797 : 11 : if (!sz)
1798 : 434 : sz = size_unknown (object_size_type);
1799 : :
1800 : 445 : object_sizes_set (osi, SSA_NAME_VERSION (var), sz, sz);
1801 : 445 : }
1802 : :
1803 : : /* Compute an object size expression for VAR, which is the result of a PHI
1804 : : node. */
1805 : :
1806 : : static void
1807 : 181 : phi_dynamic_object_size (struct object_size_info *osi, tree var)
1808 : : {
1809 : 181 : int object_size_type = osi->object_size_type;
1810 : 181 : unsigned int varno = SSA_NAME_VERSION (var);
1811 : 181 : gimple *stmt = SSA_NAME_DEF_STMT (var);
1812 : 181 : unsigned i, num_args = gimple_phi_num_args (stmt);
1813 : 181 : bool wholesize_needed = false;
1814 : :
1815 : : /* The extra space is for the PHI result at the end, which object_sizes_set
1816 : : sets for us. */
1817 : 181 : tree sizes = make_tree_vec (num_args + 1);
1818 : 181 : tree wholesizes = make_tree_vec (num_args + 1);
1819 : :
1820 : : /* Bail out if the size of any of the PHI arguments cannot be
1821 : : determined. */
1822 : 502 : for (i = 0; i < num_args; i++)
1823 : : {
1824 : 360 : edge e = gimple_phi_arg_edge (as_a <gphi *> (stmt), i);
1825 : 360 : if (e->flags & EDGE_COMPLEX)
1826 : : break;
1827 : :
1828 : 355 : tree rhs = gimple_phi_arg_def (stmt, i);
1829 : 355 : tree size, wholesize;
1830 : :
1831 : 355 : dynamic_object_size (osi, rhs, &size, &wholesize);
1832 : :
1833 : 355 : if (size_unknown_p (size, object_size_type))
1834 : : break;
1835 : :
1836 : 321 : if (size != wholesize)
1837 : 245 : wholesize_needed = true;
1838 : :
1839 : 321 : TREE_VEC_ELT (sizes, i) = size;
1840 : 321 : TREE_VEC_ELT (wholesizes, i) = wholesize;
1841 : : }
1842 : :
1843 : 181 : if (i < num_args)
1844 : : {
1845 : 39 : ggc_free (sizes);
1846 : 39 : ggc_free (wholesizes);
1847 : 39 : sizes = wholesizes = size_unknown (object_size_type);
1848 : : }
1849 : :
1850 : : /* Point to the same TREE_VEC so that we can avoid emitting two PHI
1851 : : nodes. */
1852 : 142 : else if (!wholesize_needed)
1853 : : {
1854 : 21 : ggc_free (wholesizes);
1855 : 21 : wholesizes = sizes;
1856 : : }
1857 : :
1858 : 181 : object_sizes_set (osi, varno, sizes, wholesizes);
1859 : 181 : }
1860 : :
1861 : : /* Compute object sizes for VAR.
1862 : : For ADDR_EXPR an object size is the number of remaining bytes
1863 : : to the end of the object (where what is considered an object depends on
1864 : : OSI->object_size_type).
1865 : : For allocation GIMPLE_CALL like malloc or calloc object size is the size
1866 : : of the allocation.
1867 : : For POINTER_PLUS_EXPR where second operand is a constant integer,
1868 : : object size is object size of the first operand minus the constant.
1869 : : If the constant is bigger than the number of remaining bytes until the
1870 : : end of the object, object size is 0, but if it is instead a pointer
1871 : : subtraction, object size is size_unknown (object_size_type).
1872 : : To differentiate addition from subtraction, ADDR_EXPR returns
1873 : : size_unknown (object_size_type) for all objects bigger than half of the
1874 : : address space, and constants less than half of the address space are
1875 : : considered addition, while bigger constants subtraction.
1876 : : For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
1877 : : object size is object size of that argument.
1878 : : Otherwise, object size is the maximum of object sizes of variables
1879 : : that it might be set to. */
1880 : :
1881 : : static void
1882 : 13995 : collect_object_sizes_for (struct object_size_info *osi, tree var)
1883 : : {
1884 : 13995 : int object_size_type = osi->object_size_type;
1885 : 13995 : unsigned int varno = SSA_NAME_VERSION (var);
1886 : 13995 : gimple *stmt;
1887 : 13995 : bool reexamine;
1888 : :
1889 : 13995 : if (bitmap_bit_p (computed[object_size_type], varno))
1890 : : return;
1891 : :
1892 : 12837 : if (osi->pass == 0)
1893 : : {
1894 : 11980 : if (bitmap_set_bit (osi->visited, varno))
1895 : : {
1896 : : /* Initialize to 0 for maximum size and M1U for minimum size so that
1897 : : it gets immediately overridden. */
1898 : 11568 : object_sizes_initialize (osi, varno,
1899 : : size_initval (object_size_type),
1900 : : size_initval (object_size_type));
1901 : : }
1902 : : else
1903 : : {
1904 : : /* Found a dependency loop. Mark the variable for later
1905 : : re-examination. */
1906 : 412 : if (object_size_type & OST_DYNAMIC)
1907 : 82 : object_sizes_set_temp (osi, varno);
1908 : :
1909 : 412 : bitmap_set_bit (osi->reexamine, varno);
1910 : 412 : if (dump_file && (dump_flags & TDF_DETAILS))
1911 : : {
1912 : 0 : fprintf (dump_file, "Found a dependency loop at ");
1913 : 0 : print_generic_expr (dump_file, var, dump_flags);
1914 : 0 : fprintf (dump_file, "\n");
1915 : : }
1916 : 412 : return;
1917 : : }
1918 : : }
1919 : :
1920 : 12425 : if (dump_file && (dump_flags & TDF_DETAILS))
1921 : : {
1922 : 6 : fprintf (dump_file, "Visiting use-def links for ");
1923 : 6 : print_generic_expr (dump_file, var, dump_flags);
1924 : 6 : fprintf (dump_file, "\n");
1925 : : }
1926 : :
1927 : 12425 : stmt = SSA_NAME_DEF_STMT (var);
1928 : 12425 : reexamine = false;
1929 : :
1930 : 12425 : switch (gimple_code (stmt))
1931 : : {
1932 : 6722 : case GIMPLE_ASSIGN:
1933 : 6722 : {
1934 : 6722 : tree rhs = gimple_assign_rhs1 (stmt);
1935 : 6722 : if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1936 : 6722 : || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1937 : 4952 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1938 : 1101 : reexamine = plus_stmt_object_size (osi, var, stmt);
1939 : 5621 : else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1940 : 0 : reexamine = cond_expr_object_size (osi, var, stmt);
1941 : 5621 : else if (gimple_assign_single_p (stmt)
1942 : 5621 : || gimple_assign_unary_nop_p (stmt))
1943 : : {
1944 : 5621 : if (TREE_CODE (rhs) == SSA_NAME
1945 : 5621 : && POINTER_TYPE_P (TREE_TYPE (rhs)))
1946 : 249 : reexamine = merge_object_sizes (osi, var, rhs);
1947 : : /* Handle the following stmt #2 to propagate the size from the
1948 : : stmt #1 to #3:
1949 : : 1 _1 = .ACCESS_WITH_SIZE (_3, _4, 1, 0, -1, 0B);
1950 : : 2 _5 = *_1;
1951 : : 3 _6 = __builtin_dynamic_object_size (_5, 1);
1952 : : */
1953 : 5372 : else if (TREE_CODE (rhs) == MEM_REF
1954 : 68 : && POINTER_TYPE_P (TREE_TYPE (rhs))
1955 : 68 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
1956 : 5440 : && integer_zerop (TREE_OPERAND (rhs, 1)))
1957 : 68 : reexamine = merge_object_sizes (osi, var, TREE_OPERAND (rhs, 0));
1958 : : else
1959 : 5304 : expr_object_size (osi, var, rhs);
1960 : : }
1961 : : else
1962 : 0 : unknown_object_size (osi, var);
1963 : : break;
1964 : : }
1965 : :
1966 : 2189 : case GIMPLE_CALL:
1967 : 2189 : {
1968 : 2189 : gcall *call_stmt = as_a <gcall *> (stmt);
1969 : 2189 : tree arg = pass_through_call (call_stmt);
1970 : 2189 : if (arg)
1971 : : {
1972 : 57 : if (TREE_CODE (arg) == SSA_NAME
1973 : 57 : && POINTER_TYPE_P (TREE_TYPE (arg)))
1974 : 46 : reexamine = merge_object_sizes (osi, var, arg);
1975 : : else
1976 : 11 : expr_object_size (osi, var, arg);
1977 : : }
1978 : : else
1979 : 2132 : call_object_size (osi, var, call_stmt);
1980 : : break;
1981 : : }
1982 : :
1983 : 0 : case GIMPLE_ASM:
1984 : : /* Pointers defined by __asm__ statements can point anywhere. */
1985 : 0 : unknown_object_size (osi, var);
1986 : 0 : break;
1987 : :
1988 : 1600 : case GIMPLE_NOP:
1989 : 1600 : if (SSA_NAME_VAR (var)
1990 : 1600 : && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1991 : 1600 : parm_object_size (osi, var);
1992 : : else
1993 : : /* Uninitialized SSA names point nowhere. */
1994 : 0 : unknown_object_size (osi, var);
1995 : : break;
1996 : :
1997 : 1914 : case GIMPLE_PHI:
1998 : 1914 : {
1999 : 1914 : unsigned i;
2000 : :
2001 : 1914 : if (object_size_type & OST_DYNAMIC)
2002 : : {
2003 : 181 : phi_dynamic_object_size (osi, var);
2004 : 181 : break;
2005 : : }
2006 : :
2007 : 6558 : for (i = 0; i < gimple_phi_num_args (stmt); i++)
2008 : : {
2009 : 4886 : tree rhs = gimple_phi_arg (stmt, i)->def;
2010 : :
2011 : 4886 : if (object_sizes_unknown_p (object_size_type, varno))
2012 : : break;
2013 : :
2014 : 4825 : if (TREE_CODE (rhs) == SSA_NAME)
2015 : 1905 : reexamine |= merge_object_sizes (osi, var, rhs);
2016 : 2920 : else if (osi->pass == 0)
2017 : 1716 : expr_object_size (osi, var, rhs);
2018 : : }
2019 : : break;
2020 : : }
2021 : :
2022 : 0 : default:
2023 : 0 : gcc_unreachable ();
2024 : : }
2025 : :
2026 : : /* Dynamic sizes use placeholder temps to return an answer, so it is always
2027 : : safe to set COMPUTED for them. */
2028 : 12425 : if ((object_size_type & OST_DYNAMIC)
2029 : 12425 : || !reexamine || object_sizes_unknown_p (object_size_type, varno))
2030 : : {
2031 : 10925 : bitmap_set_bit (computed[object_size_type], varno);
2032 : 10925 : if (!(object_size_type & OST_DYNAMIC))
2033 : 8249 : bitmap_clear_bit (osi->reexamine, varno);
2034 : 2676 : else if (reexamine)
2035 : 103 : bitmap_set_bit (osi->reexamine, varno);
2036 : : }
2037 : : else
2038 : : {
2039 : 1500 : bitmap_set_bit (osi->reexamine, varno);
2040 : 1500 : if (dump_file && (dump_flags & TDF_DETAILS))
2041 : : {
2042 : 0 : fprintf (dump_file, "Need to reexamine ");
2043 : 0 : print_generic_expr (dump_file, var, dump_flags);
2044 : 0 : fprintf (dump_file, "\n");
2045 : : }
2046 : : }
2047 : : }
2048 : :
2049 : :
2050 : : /* Helper function for check_for_plus_in_loops. Called recursively
2051 : : to detect loops. */
2052 : :
2053 : : static void
2054 : 20 : check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
2055 : : unsigned int depth)
2056 : : {
2057 : 20 : gimple *stmt = SSA_NAME_DEF_STMT (var);
2058 : 20 : unsigned int varno = SSA_NAME_VERSION (var);
2059 : :
2060 : 20 : if (osi->depths[varno])
2061 : : {
2062 : 10 : if (osi->depths[varno] != depth)
2063 : : {
2064 : 10 : unsigned int *sp;
2065 : :
2066 : : /* Found a loop involving pointer addition. */
2067 : 20 : for (sp = osi->tos; sp > osi->stack; )
2068 : : {
2069 : 20 : --sp;
2070 : 20 : bitmap_clear_bit (osi->reexamine, *sp);
2071 : 20 : bitmap_set_bit (computed[osi->object_size_type], *sp);
2072 : 20 : object_sizes_set (osi, *sp, size_zero_node,
2073 : : object_sizes_get (osi, *sp, true));
2074 : 20 : if (*sp == varno)
2075 : : break;
2076 : : }
2077 : : }
2078 : 10 : return;
2079 : : }
2080 : 10 : else if (! bitmap_bit_p (osi->reexamine, varno))
2081 : : return;
2082 : :
2083 : 10 : osi->depths[varno] = depth;
2084 : 10 : *osi->tos++ = varno;
2085 : :
2086 : 10 : switch (gimple_code (stmt))
2087 : : {
2088 : :
2089 : 10 : case GIMPLE_ASSIGN:
2090 : 10 : {
2091 : 10 : if ((gimple_assign_single_p (stmt)
2092 : 10 : || gimple_assign_unary_nop_p (stmt))
2093 : 10 : && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2094 : : {
2095 : 0 : tree rhs = gimple_assign_rhs1 (stmt);
2096 : :
2097 : 0 : check_for_plus_in_loops_1 (osi, rhs, depth);
2098 : : }
2099 : 10 : else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2100 : : {
2101 : 10 : tree basevar = gimple_assign_rhs1 (stmt);
2102 : 10 : tree cst = gimple_assign_rhs2 (stmt);
2103 : :
2104 : 10 : gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2105 : :
2106 : 20 : check_for_plus_in_loops_1 (osi, basevar,
2107 : 10 : depth + !integer_zerop (cst));
2108 : : }
2109 : : else
2110 : 0 : gcc_unreachable ();
2111 : : break;
2112 : : }
2113 : :
2114 : 0 : case GIMPLE_CALL:
2115 : 0 : {
2116 : 0 : gcall *call_stmt = as_a <gcall *> (stmt);
2117 : 0 : tree arg = pass_through_call (call_stmt);
2118 : 0 : if (arg)
2119 : : {
2120 : 0 : if (TREE_CODE (arg) == SSA_NAME)
2121 : 0 : check_for_plus_in_loops_1 (osi, arg, depth);
2122 : : else
2123 : 0 : gcc_unreachable ();
2124 : : }
2125 : : break;
2126 : : }
2127 : :
2128 : : case GIMPLE_PHI:
2129 : : {
2130 : : unsigned i;
2131 : :
2132 : 0 : for (i = 0; i < gimple_phi_num_args (stmt); i++)
2133 : : {
2134 : 0 : tree rhs = gimple_phi_arg (stmt, i)->def;
2135 : :
2136 : 0 : if (TREE_CODE (rhs) == SSA_NAME)
2137 : 0 : check_for_plus_in_loops_1 (osi, rhs, depth);
2138 : : }
2139 : : break;
2140 : : }
2141 : :
2142 : 0 : default:
2143 : 0 : gcc_unreachable ();
2144 : : }
2145 : :
2146 : 10 : osi->depths[varno] = 0;
2147 : 10 : osi->tos--;
2148 : : }
2149 : :
2150 : :
2151 : : /* Check if some pointer we are computing object size of is being increased
2152 : : within a loop. If yes, assume all the SSA variables participating in
2153 : : that loop have minimum object sizes 0. */
2154 : :
2155 : : static void
2156 : 55 : check_for_plus_in_loops (struct object_size_info *osi, tree var)
2157 : : {
2158 : 55 : gimple *stmt = SSA_NAME_DEF_STMT (var);
2159 : :
2160 : : /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
2161 : : and looked for a POINTER_PLUS_EXPR in the pass-through
2162 : : argument, if any. In GIMPLE, however, such an expression
2163 : : is not a valid call operand. */
2164 : :
2165 : 55 : if (is_gimple_assign (stmt)
2166 : 55 : && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2167 : : {
2168 : 19 : tree basevar = gimple_assign_rhs1 (stmt);
2169 : 19 : tree cst = gimple_assign_rhs2 (stmt);
2170 : :
2171 : 19 : gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2172 : :
2173 : : /* Skip non-positive offsets. */
2174 : 19 : if (integer_zerop (cst) || compare_tree_int (cst, offset_limit) > 0)
2175 : 9 : return;
2176 : :
2177 : 10 : osi->depths[SSA_NAME_VERSION (basevar)] = 1;
2178 : 10 : *osi->tos++ = SSA_NAME_VERSION (basevar);
2179 : 10 : check_for_plus_in_loops_1 (osi, var, 2);
2180 : 10 : osi->depths[SSA_NAME_VERSION (basevar)] = 0;
2181 : 10 : osi->tos--;
2182 : : }
2183 : : }
2184 : :
2185 : :
2186 : : /* Initialize data structures for the object size computation. */
2187 : :
2188 : : void
2189 : 18198 : init_object_sizes (void)
2190 : : {
2191 : 18198 : int object_size_type;
2192 : :
2193 : 18198 : if (computed[0])
2194 : : return;
2195 : :
2196 : 24480 : for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2197 : : {
2198 : 21760 : object_sizes_grow (object_size_type);
2199 : 21760 : computed[object_size_type] = BITMAP_ALLOC (NULL);
2200 : : }
2201 : :
2202 : 2720 : init_offset_limit ();
2203 : : }
2204 : :
2205 : :
2206 : : /* Destroy data structures after the object size computation. */
2207 : :
2208 : : void
2209 : 3454942 : fini_object_sizes (void)
2210 : : {
2211 : 3454942 : int object_size_type;
2212 : :
2213 : 31094478 : for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2214 : : {
2215 : 27639536 : object_sizes_release (object_size_type);
2216 : 27639536 : BITMAP_FREE (computed[object_size_type]);
2217 : : }
2218 : 3454942 : }
2219 : :
2220 : : /* Dummy valueize function. */
2221 : :
2222 : : static tree
2223 : 17593 : do_valueize (tree t)
2224 : : {
2225 : 17593 : return t;
2226 : : }
2227 : :
2228 : : /* Process a __builtin_object_size or __builtin_dynamic_object_size call in
2229 : : CALL early for subobjects before any object information is lost due to
2230 : : optimization. Insert a MIN or MAX expression of the result and
2231 : : __builtin_object_size at I so that it may be processed in the second pass.
2232 : : __builtin_dynamic_object_size is treated like __builtin_object_size here
2233 : : since we're only looking for constant bounds. */
2234 : :
2235 : : static void
2236 : 11577 : early_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2237 : : {
2238 : 11577 : tree ost = gimple_call_arg (call, 1);
2239 : 11577 : tree lhs = gimple_call_lhs (call);
2240 : 11577 : gcc_assert (lhs != NULL_TREE);
2241 : :
2242 : 11577 : if (!tree_fits_uhwi_p (ost))
2243 : 9674 : return;
2244 : :
2245 : 11577 : unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2246 : 11577 : tree ptr = gimple_call_arg (call, 0);
2247 : :
2248 : 11577 : if (object_size_type != 1 && object_size_type != 3)
2249 : : return;
2250 : :
2251 : 3595 : if (TREE_CODE (ptr) != ADDR_EXPR && TREE_CODE (ptr) != SSA_NAME)
2252 : : return;
2253 : :
2254 : 3595 : tree type = TREE_TYPE (lhs);
2255 : 3595 : tree bytes;
2256 : 3595 : if (!compute_builtin_object_size (ptr, object_size_type, &bytes)
2257 : 3595 : || !int_fits_type_p (bytes, type))
2258 : : return;
2259 : :
2260 : 1903 : tree tem = make_ssa_name (type);
2261 : 1903 : gimple_call_set_lhs (call, tem);
2262 : 1903 : enum tree_code code = object_size_type & OST_MINIMUM ? MAX_EXPR : MIN_EXPR;
2263 : 1903 : tree cst = fold_convert (type, bytes);
2264 : 1903 : gimple *g = gimple_build_assign (lhs, code, tem, cst);
2265 : 1903 : gsi_insert_after (i, g, GSI_NEW_STMT);
2266 : 1903 : update_stmt (call);
2267 : : }
2268 : :
2269 : : /* Attempt to fold one __builtin_dynamic_object_size call in CALL into an
2270 : : expression and insert it at I. Return true if it succeeds. */
2271 : :
2272 : : static bool
2273 : 3333 : dynamic_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2274 : : {
2275 : 3333 : gcc_assert (gimple_call_num_args (call) == 2);
2276 : :
2277 : 3333 : tree args[2];
2278 : 3333 : args[0] = gimple_call_arg (call, 0);
2279 : 3333 : args[1] = gimple_call_arg (call, 1);
2280 : :
2281 : 3333 : location_t loc = EXPR_LOC_OR_LOC (args[0], input_location);
2282 : 3333 : tree result_type = gimple_call_return_type (as_a <gcall *> (call));
2283 : 3333 : tree result = fold_builtin_call_array (loc, result_type,
2284 : : gimple_call_fn (call), 2, args);
2285 : :
2286 : 3333 : if (!result)
2287 : : return false;
2288 : :
2289 : : /* fold_builtin_call_array may wrap the result inside a
2290 : : NOP_EXPR. */
2291 : 2218 : STRIP_NOPS (result);
2292 : 2218 : gimplify_and_update_call_from_tree (i, result);
2293 : :
2294 : 2218 : if (dump_file && (dump_flags & TDF_DETAILS))
2295 : : {
2296 : 0 : fprintf (dump_file, "Simplified (dynamic)\n ");
2297 : 0 : print_gimple_stmt (dump_file, call, 0, dump_flags);
2298 : 0 : fprintf (dump_file, " to ");
2299 : 0 : print_generic_expr (dump_file, result);
2300 : 0 : fprintf (dump_file, "\n");
2301 : : }
2302 : : return true;
2303 : : }
2304 : :
2305 : : static unsigned int
2306 : 3454942 : object_sizes_execute (function *fun, bool early)
2307 : : {
2308 : 3454942 : todo = 0;
2309 : 3454942 : auto_bitmap sdce_worklist;
2310 : :
2311 : 3454942 : basic_block bb;
2312 : 29526421 : FOR_EACH_BB_FN (bb, fun)
2313 : : {
2314 : 26071479 : gimple_stmt_iterator i;
2315 : 227444852 : for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
2316 : : {
2317 : 175301894 : tree result;
2318 : 175301894 : bool dynamic = false;
2319 : :
2320 : 175301894 : gimple *call = gsi_stmt (i);
2321 : 175301894 : if (gimple_call_builtin_p (call, BUILT_IN_DYNAMIC_OBJECT_SIZE))
2322 : : dynamic = true;
2323 : 175294139 : else if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
2324 : 175283696 : continue;
2325 : :
2326 : 18198 : tree lhs = gimple_call_lhs (call);
2327 : 18198 : if (!lhs)
2328 : 0 : continue;
2329 : :
2330 : 18198 : init_object_sizes ();
2331 : :
2332 : : /* If early, only attempt to fold
2333 : : __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
2334 : : and rather than folding the builtin to the constant if any,
2335 : : create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
2336 : : call result and the computed constant. Do the same for
2337 : : __builtin_dynamic_object_size too. */
2338 : 18198 : if (early)
2339 : : {
2340 : 11577 : early_object_sizes_execute_one (&i, call);
2341 : 11577 : continue;
2342 : : }
2343 : :
2344 : 6621 : if (dynamic)
2345 : : {
2346 : 3333 : if (dynamic_object_sizes_execute_one (&i, call))
2347 : 2218 : continue;
2348 : : else
2349 : : {
2350 : : /* If we could not find a suitable size expression, lower to
2351 : : __builtin_object_size so that we may at least get a
2352 : : constant lower or higher estimate. */
2353 : 1115 : tree bosfn = builtin_decl_implicit (BUILT_IN_OBJECT_SIZE);
2354 : 1115 : gimple_call_set_fndecl (call, bosfn);
2355 : 1115 : update_stmt (call);
2356 : :
2357 : 1115 : if (dump_file && (dump_flags & TDF_DETAILS))
2358 : : {
2359 : 0 : print_generic_expr (dump_file, gimple_call_arg (call, 0),
2360 : : dump_flags);
2361 : 0 : fprintf (dump_file,
2362 : : ": Retrying as __builtin_object_size\n");
2363 : : }
2364 : : }
2365 : : }
2366 : :
2367 : 4403 : result = gimple_fold_stmt_to_constant (call, do_valueize);
2368 : 4403 : if (!result)
2369 : : {
2370 : 2028 : tree ost = gimple_call_arg (call, 1);
2371 : :
2372 : 2028 : if (tree_fits_uhwi_p (ost))
2373 : : {
2374 : 2028 : unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2375 : :
2376 : 2028 : if (object_size_type & OST_MINIMUM)
2377 : 309 : result = build_zero_cst (size_type_node);
2378 : 1719 : else if (object_size_type < OST_END)
2379 : 1719 : result = fold_convert (size_type_node,
2380 : : integer_minus_one_node);
2381 : : }
2382 : :
2383 : 2028 : if (!result)
2384 : 0 : continue;
2385 : : }
2386 : :
2387 : 4403 : gcc_assert (TREE_CODE (result) == INTEGER_CST);
2388 : :
2389 : 4403 : if (dump_file && (dump_flags & TDF_DETAILS))
2390 : : {
2391 : 0 : fprintf (dump_file, "Simplified\n ");
2392 : 0 : print_gimple_stmt (dump_file, call, 0, dump_flags);
2393 : 0 : fprintf (dump_file, " to ");
2394 : 0 : print_generic_expr (dump_file, result);
2395 : 0 : fprintf (dump_file, "\n");
2396 : : }
2397 : :
2398 : : /* Propagate into all uses and fold those stmts. */
2399 : 4403 : if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2400 : : {
2401 : 4403 : replace_uses_by (lhs, result);
2402 : : /* Mark lhs as being possiblely DCEd. */
2403 : 4403 : bitmap_set_bit (sdce_worklist, SSA_NAME_VERSION (lhs));
2404 : : }
2405 : : else
2406 : 0 : replace_call_with_value (&i, result);
2407 : : }
2408 : : }
2409 : :
2410 : 3454942 : fini_object_sizes ();
2411 : 3454942 : simple_dce_from_worklist (sdce_worklist);
2412 : 3454942 : return todo;
2413 : 3454942 : }
2414 : :
2415 : : /* Simple pass to optimize all __builtin_object_size () builtins. */
2416 : :
2417 : : namespace {
2418 : :
2419 : : const pass_data pass_data_object_sizes =
2420 : : {
2421 : : GIMPLE_PASS, /* type */
2422 : : "objsz", /* name */
2423 : : OPTGROUP_NONE, /* optinfo_flags */
2424 : : TV_NONE, /* tv_id */
2425 : : ( PROP_cfg | PROP_ssa ), /* properties_required */
2426 : : PROP_objsz, /* properties_provided */
2427 : : 0, /* properties_destroyed */
2428 : : 0, /* todo_flags_start */
2429 : : 0, /* todo_flags_finish */
2430 : : };
2431 : :
2432 : : class pass_object_sizes : public gimple_opt_pass
2433 : : {
2434 : : public:
2435 : 573364 : pass_object_sizes (gcc::context *ctxt)
2436 : 1146728 : : gimple_opt_pass (pass_data_object_sizes, ctxt)
2437 : : {}
2438 : :
2439 : : /* opt_pass methods: */
2440 : 286682 : opt_pass * clone () final override { return new pass_object_sizes (m_ctxt); }
2441 : 1023964 : unsigned int execute (function *fun) final override
2442 : : {
2443 : 1023964 : return object_sizes_execute (fun, false);
2444 : : }
2445 : : }; // class pass_object_sizes
2446 : :
2447 : : } // anon namespace
2448 : :
2449 : : gimple_opt_pass *
2450 : 286682 : make_pass_object_sizes (gcc::context *ctxt)
2451 : : {
2452 : 286682 : return new pass_object_sizes (ctxt);
2453 : : }
2454 : :
2455 : : /* Early version of pass to optimize all __builtin_object_size () builtins. */
2456 : :
2457 : : namespace {
2458 : :
2459 : : const pass_data pass_data_early_object_sizes =
2460 : : {
2461 : : GIMPLE_PASS, /* type */
2462 : : "early_objsz", /* name */
2463 : : OPTGROUP_NONE, /* optinfo_flags */
2464 : : TV_NONE, /* tv_id */
2465 : : ( PROP_cfg | PROP_ssa ), /* properties_required */
2466 : : 0, /* properties_provided */
2467 : : 0, /* properties_destroyed */
2468 : : 0, /* todo_flags_start */
2469 : : 0, /* todo_flags_finish */
2470 : : };
2471 : :
2472 : : class pass_early_object_sizes : public gimple_opt_pass
2473 : : {
2474 : : public:
2475 : 286682 : pass_early_object_sizes (gcc::context *ctxt)
2476 : 573364 : : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
2477 : : {}
2478 : :
2479 : : /* opt_pass methods: */
2480 : 2430978 : unsigned int execute (function *fun) final override
2481 : : {
2482 : 2430978 : return object_sizes_execute (fun, true);
2483 : : }
2484 : : }; // class pass_object_sizes
2485 : :
2486 : : } // anon namespace
2487 : :
2488 : : gimple_opt_pass *
2489 : 286682 : make_pass_early_object_sizes (gcc::context *ctxt)
2490 : : {
2491 : 286682 : return new pass_early_object_sizes (ctxt);
2492 : : }
|