Branch data Line data Source code
1 : : /* __builtin_object_size (ptr, object_size_type) computation
2 : : Copyright (C) 2004-2025 Free Software Foundation, Inc.
3 : : Contributed by Jakub Jelinek <jakub@redhat.com>
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify
8 : : it under the terms of the GNU General Public License as published by
9 : : the Free Software Foundation; either version 3, or (at your option)
10 : : any later version.
11 : :
12 : : GCC is distributed in the hope that it will be useful,
13 : : but WITHOUT ANY WARRANTY; without even the implied warranty of
14 : : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 : : GNU General Public License for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : : #include "config.h"
22 : : #include "system.h"
23 : : #include "coretypes.h"
24 : : #include "backend.h"
25 : : #include "tree.h"
26 : : #include "gimple.h"
27 : : #include "tree-pass.h"
28 : : #include "ssa.h"
29 : : #include "gimple-pretty-print.h"
30 : : #include "fold-const.h"
31 : : #include "tree-object-size.h"
32 : : #include "gimple-iterator.h"
33 : : #include "gimple-fold.h"
34 : : #include "tree-cfg.h"
35 : : #include "tree-dfa.h"
36 : : #include "stringpool.h"
37 : : #include "attribs.h"
38 : : #include "builtins.h"
39 : : #include "gimplify-me.h"
40 : : #include "gimplify.h"
41 : : #include "tree-ssa-dce.h"
42 : :
43 : : struct object_size_info
44 : : {
45 : : int object_size_type;
46 : : unsigned char pass;
47 : : bool changed;
48 : : bitmap visited, reexamine;
49 : : unsigned int *depths;
50 : : unsigned int *stack, *tos;
51 : : };
52 : :
53 : : struct GTY(()) object_size
54 : : {
55 : : /* Estimate of bytes till the end of the object. */
56 : : tree size;
57 : : /* Estimate of the size of the whole object. */
58 : : tree wholesize;
59 : : };
60 : :
61 : : static tree compute_object_offset (tree, const_tree);
62 : : static bool addr_object_size (struct object_size_info *,
63 : : const_tree, int, tree *, tree *t = NULL);
64 : : static tree alloc_object_size (const gcall *, int);
65 : : static tree access_with_size_object_size (const gcall *, int);
66 : : static tree pass_through_call (const gcall *);
67 : : static void collect_object_sizes_for (struct object_size_info *, tree);
68 : : static void expr_object_size (struct object_size_info *, tree, tree);
69 : : static bool merge_object_sizes (struct object_size_info *, tree, tree);
70 : : static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
71 : : static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
72 : : static void init_offset_limit (void);
73 : : static void check_for_plus_in_loops (struct object_size_info *, tree);
74 : : static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
75 : : unsigned int);
76 : :
77 : : /* object_sizes[0] is upper bound for the object size and number of bytes till
78 : : the end of the object.
79 : : object_sizes[1] is upper bound for the object size and number of bytes till
80 : : the end of the subobject (innermost array or field with address taken).
81 : : object_sizes[2] is lower bound for the object size and number of bytes till
82 : : the end of the object and object_sizes[3] lower bound for subobject.
83 : :
84 : : For static object sizes, the object size and the bytes till the end of the
85 : : object are both INTEGER_CST. In the dynamic case, they are finally either a
86 : : gimple variable or an INTEGER_CST. */
87 : : static vec<object_size> object_sizes[OST_END];
88 : :
89 : : /* Bitmaps what object sizes have been computed already. */
90 : : static bitmap computed[OST_END];
91 : :
92 : : /* Maximum value of offset we consider to be addition. */
93 : : static unsigned HOST_WIDE_INT offset_limit;
94 : :
95 : : /* Tell the generic SSA updater what kind of update is needed after the pass
96 : : executes. */
97 : : static unsigned todo;
98 : :
99 : : /* Return true if VAL represents an initial size for OBJECT_SIZE_TYPE. */
100 : :
101 : : static inline bool
102 : 5406 : size_initval_p (tree val, int object_size_type)
103 : : {
104 : 5406 : return ((object_size_type & OST_MINIMUM)
105 : 5406 : ? integer_all_onesp (val) : integer_zerop (val));
106 : : }
107 : :
108 : : /* Return true if VAL represents an unknown size for OBJECT_SIZE_TYPE. */
109 : :
110 : : static inline bool
111 : 99484 : size_unknown_p (tree val, int object_size_type)
112 : : {
113 : 99484 : return ((object_size_type & OST_MINIMUM)
114 : 98394 : ? integer_zerop (val) : integer_all_onesp (val));
115 : : }
116 : :
117 : : /* Return true if VAL represents a valid size for OBJECT_SIZE_TYPE. */
118 : :
119 : : static inline bool
120 : 53674 : size_valid_p (tree val, int object_size_type)
121 : : {
122 : 51632 : return ((object_size_type & OST_DYNAMIC) || TREE_CODE (val) == INTEGER_CST);
123 : : }
124 : :
125 : : /* Return true if VAL is usable as an object size in the object_sizes
126 : : vectors. */
127 : :
128 : : static inline bool
129 : 10041 : size_usable_p (tree val)
130 : : {
131 : 8974 : return TREE_CODE (val) == SSA_NAME || TREE_CODE (val) == INTEGER_CST;
132 : : }
133 : :
134 : : /* Return a tree with initial value for OBJECT_SIZE_TYPE. */
135 : :
136 : : static inline tree
137 : 11521 : size_initval (int object_size_type)
138 : : {
139 : 11521 : return ((object_size_type & OST_MINIMUM)
140 : 11521 : ? TYPE_MAX_VALUE (sizetype) : size_zero_node);
141 : : }
142 : :
143 : : /* Return a tree with unknown value for OBJECT_SIZE_TYPE. */
144 : :
145 : : static inline tree
146 : 165993 : size_unknown (int object_size_type)
147 : : {
148 : 165993 : return ((object_size_type & OST_MINIMUM)
149 : 165993 : ? size_zero_node : TYPE_MAX_VALUE (sizetype));
150 : : }
151 : :
152 : : /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
153 : :
154 : : static inline void
155 : 31336 : object_sizes_grow (int object_size_type)
156 : : {
157 : 72160 : if (num_ssa_names > object_sizes[object_size_type].length ())
158 : 23978 : object_sizes[object_size_type].safe_grow (num_ssa_names, true);
159 : 31336 : }
160 : :
161 : : /* Release object_sizes[OBJECT_SIZE_TYPE]. */
162 : :
163 : : static inline void
164 : 27926184 : object_sizes_release (int object_size_type)
165 : : {
166 : 27926184 : object_sizes[object_size_type].release ();
167 : : }
168 : :
169 : : /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
170 : :
171 : : static inline bool
172 : 20087 : object_sizes_unknown_p (int object_size_type, unsigned varno)
173 : : {
174 : 20087 : return size_unknown_p (object_sizes[object_size_type][varno].size,
175 : 20087 : object_size_type);
176 : : }
177 : :
178 : : /* Return the raw size expression for VARNO corresponding to OSI. This returns
179 : : the TREE_VEC as is and should only be used during gimplification. */
180 : :
181 : : static inline object_size
182 : 798 : object_sizes_get_raw (struct object_size_info *osi, unsigned varno)
183 : : {
184 : 798 : gcc_assert (osi->pass != 0);
185 : 798 : return object_sizes[osi->object_size_type][varno];
186 : : }
187 : :
188 : : /* Return a size tree for VARNO corresponding to OSI. If WHOLE is true, return
189 : : the whole object size. Use this for building size expressions based on size
190 : : of VARNO. */
191 : :
192 : : static inline tree
193 : 25007 : object_sizes_get (struct object_size_info *osi, unsigned varno,
194 : : bool whole = false)
195 : : {
196 : 25007 : tree ret;
197 : 25007 : int object_size_type = osi->object_size_type;
198 : :
199 : 25007 : if (whole)
200 : 5035 : ret = object_sizes[object_size_type][varno].wholesize;
201 : : else
202 : 19972 : ret = object_sizes[object_size_type][varno].size;
203 : :
204 : 25007 : if (object_size_type & OST_DYNAMIC)
205 : : {
206 : 6977 : if (TREE_CODE (ret) == MODIFY_EXPR)
207 : 469 : return TREE_OPERAND (ret, 0);
208 : 6508 : else if (TREE_CODE (ret) == TREE_VEC)
209 : 410 : return TREE_VEC_ELT (ret, TREE_VEC_LENGTH (ret) - 1);
210 : : else
211 : 6098 : gcc_checking_assert (size_usable_p (ret));
212 : : }
213 : :
214 : : return ret;
215 : : }
216 : :
217 : : /* Set size for VARNO corresponding to OSI to VAL. */
218 : :
219 : : static inline void
220 : 11920 : object_sizes_initialize (struct object_size_info *osi, unsigned varno,
221 : : tree val, tree wholeval)
222 : : {
223 : 11920 : int object_size_type = osi->object_size_type;
224 : :
225 : 11920 : object_sizes[object_size_type][varno].size = val;
226 : 11920 : object_sizes[object_size_type][varno].wholesize = wholeval;
227 : 11920 : }
228 : :
229 : : /* Return a MODIFY_EXPR for cases where SSA and EXPR have the same type. The
230 : : TREE_VEC is returned only in case of PHI nodes. */
231 : :
232 : : static tree
233 : 509 : bundle_sizes (tree name, tree expr)
234 : : {
235 : 509 : gcc_checking_assert (TREE_TYPE (name) == sizetype);
236 : :
237 : 509 : if (TREE_CODE (expr) == TREE_VEC)
238 : : {
239 : 263 : TREE_VEC_ELT (expr, TREE_VEC_LENGTH (expr) - 1) = name;
240 : 263 : return expr;
241 : : }
242 : :
243 : 246 : gcc_checking_assert (types_compatible_p (TREE_TYPE (expr), sizetype));
244 : 246 : return build2 (MODIFY_EXPR, sizetype, name, expr);
245 : : }
246 : :
247 : : /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
248 : : maximum. For static sizes, each element of TREE_VEC is always INTEGER_CST
249 : : throughout the computation. For dynamic sizes, each element may either be a
250 : : gimple variable, a MODIFY_EXPR or a TREE_VEC. The MODIFY_EXPR is for
251 : : expressions that need to be gimplified. TREE_VECs are special, they're
252 : : emitted only for GIMPLE_PHI and the PHI result variable is the last element
253 : : of the vector. */
254 : :
255 : : static bool
256 : 14354 : object_sizes_set (struct object_size_info *osi, unsigned varno, tree val,
257 : : tree wholeval)
258 : : {
259 : 14354 : int object_size_type = osi->object_size_type;
260 : 14354 : object_size osize = object_sizes[object_size_type][varno];
261 : 14354 : bool changed = true;
262 : :
263 : 14354 : tree oldval = osize.size;
264 : 14354 : tree old_wholeval = osize.wholesize;
265 : :
266 : 14354 : if (object_size_type & OST_DYNAMIC)
267 : : {
268 : 2730 : if (bitmap_bit_p (osi->reexamine, varno))
269 : : {
270 : 68 : val = bundle_sizes (oldval, val);
271 : 68 : wholeval = bundle_sizes (old_wholeval, wholeval);
272 : : }
273 : : else
274 : : {
275 : 2662 : gcc_checking_assert (size_initval_p (oldval, object_size_type));
276 : 2662 : gcc_checking_assert (size_initval_p (old_wholeval,
277 : : object_size_type));
278 : : /* For dynamic object sizes, all object sizes that are not gimple
279 : : variables will need to be gimplified. */
280 : 2662 : if (wholeval != val && !size_usable_p (wholeval))
281 : : {
282 : 55 : bitmap_set_bit (osi->reexamine, varno);
283 : 55 : wholeval = bundle_sizes (make_ssa_name (sizetype), wholeval);
284 : : }
285 : 2662 : if (!size_usable_p (val))
286 : : {
287 : 318 : bitmap_set_bit (osi->reexamine, varno);
288 : 318 : tree newval = bundle_sizes (make_ssa_name (sizetype), val);
289 : 318 : if (val == wholeval)
290 : 122 : wholeval = newval;
291 : : val = newval;
292 : : }
293 : : /* If the new value is a temporary variable, mark it for
294 : : reexamination. */
295 : 2344 : else if (TREE_CODE (val) == SSA_NAME && !SSA_NAME_DEF_STMT (val))
296 : 75 : bitmap_set_bit (osi->reexamine, varno);
297 : : }
298 : : }
299 : : else
300 : : {
301 : 9051 : enum tree_code code = (object_size_type & OST_MINIMUM
302 : 11624 : ? MIN_EXPR : MAX_EXPR);
303 : :
304 : 11624 : val = size_binop (code, val, oldval);
305 : 11624 : wholeval = size_binop (code, wholeval, old_wholeval);
306 : 11624 : changed = (tree_int_cst_compare (val, oldval) != 0
307 : 11624 : || tree_int_cst_compare (old_wholeval, wholeval) != 0);
308 : : }
309 : :
310 : 14354 : object_sizes[object_size_type][varno].size = val;
311 : 14354 : object_sizes[object_size_type][varno].wholesize = wholeval;
312 : :
313 : 14354 : return changed;
314 : : }
315 : :
316 : : /* Set temporary SSA names for object size and whole size to resolve dependency
317 : : loops in dynamic size computation. */
318 : :
319 : : static inline void
320 : 82 : object_sizes_set_temp (struct object_size_info *osi, unsigned varno)
321 : : {
322 : 82 : tree val = object_sizes_get (osi, varno);
323 : :
324 : 82 : if (size_initval_p (val, osi->object_size_type))
325 : 68 : object_sizes_set (osi, varno,
326 : : make_ssa_name (sizetype),
327 : : make_ssa_name (sizetype));
328 : 82 : }
329 : :
330 : : /* Initialize OFFSET_LIMIT variable. */
331 : : static void
332 : 3804 : init_offset_limit (void)
333 : : {
334 : 3804 : if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
335 : 3804 : offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
336 : : else
337 : 0 : offset_limit = -1;
338 : 3804 : offset_limit /= 2;
339 : 3804 : }
340 : :
341 : : /* Bytes at end of the object with SZ from offset OFFSET. If WHOLESIZE is not
342 : : NULL_TREE, use it to get the net offset of the pointer, which should always
343 : : be positive and hence, be within OFFSET_LIMIT for valid offsets. */
344 : :
345 : : static tree
346 : 12654 : size_for_offset (tree sz, tree offset, tree wholesize = NULL_TREE,
347 : : bool strict = true)
348 : : {
349 : 12654 : gcc_checking_assert (types_compatible_p (TREE_TYPE (sz), sizetype));
350 : :
351 : : /* For negative offsets, if we have a distinct WHOLESIZE, use it to get a net
352 : : offset from the whole object. */
353 : 12654 : if (wholesize && wholesize != sz
354 : 12654 : && (TREE_CODE (sz) != INTEGER_CST
355 : 345 : || TREE_CODE (wholesize) != INTEGER_CST
356 : 345 : || tree_int_cst_compare (sz, wholesize)))
357 : : {
358 : 439 : gcc_checking_assert (types_compatible_p (TREE_TYPE (wholesize),
359 : : sizetype));
360 : :
361 : : /* Restructure SZ - OFFSET as
362 : : WHOLESIZE - (WHOLESIZE + OFFSET - SZ) so that the offset part, i.e.
363 : : WHOLESIZE + OFFSET - SZ is only allowed to be positive. */
364 : 439 : tree tmp = size_binop (MAX_EXPR, wholesize, sz);
365 : 439 : offset = fold_build2 (PLUS_EXPR, sizetype, tmp, offset);
366 : 439 : offset = fold_build2 (MINUS_EXPR, sizetype, offset, sz);
367 : 439 : sz = tmp;
368 : : }
369 : :
370 : : /* Safe to convert now, since a valid net offset should be non-negative. */
371 : 12654 : if (!useless_type_conversion_p (sizetype, TREE_TYPE (offset)))
372 : 3095 : offset = fold_convert (sizetype, offset);
373 : :
374 : 12654 : if (TREE_CODE (offset) == INTEGER_CST)
375 : : {
376 : 12541 : if (integer_zerop (offset))
377 : : return sz;
378 : :
379 : : /* Negative or too large offset even after adjustment, cannot be within
380 : : bounds of an object. The exception here is when the base object size
381 : : has been overestimated (e.g. through PHI nodes or a COND_EXPR) and the
382 : : adjusted offset remains negative. If the caller wants to be
383 : : permissive, return the base size. */
384 : 6399 : if (compare_tree_int (offset, offset_limit) > 0)
385 : : {
386 : 41 : if (strict)
387 : 28 : return size_zero_node;
388 : : else
389 : : return sz;
390 : : }
391 : : }
392 : :
393 : 6471 : return size_binop (MINUS_EXPR, size_binop (MAX_EXPR, sz, offset), offset);
394 : : }
395 : :
396 : : /* Compute offset of EXPR within VAR. Return error_mark_node
397 : : if unknown. */
398 : :
399 : : static tree
400 : 19912 : compute_object_offset (tree expr, const_tree var)
401 : : {
402 : 19920 : enum tree_code code = PLUS_EXPR;
403 : 19920 : tree base, off, t;
404 : :
405 : 19920 : if (expr == var)
406 : 8689 : return size_zero_node;
407 : :
408 : 11231 : switch (TREE_CODE (expr))
409 : : {
410 : 6956 : case COMPONENT_REF:
411 : 6956 : base = compute_object_offset (TREE_OPERAND (expr, 0), var);
412 : 6956 : if (base == error_mark_node)
413 : : return base;
414 : :
415 : 6956 : t = TREE_OPERAND (expr, 1);
416 : 6956 : off = size_binop (PLUS_EXPR,
417 : : component_ref_field_offset (expr),
418 : : size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
419 : : / BITS_PER_UNIT));
420 : 6956 : break;
421 : :
422 : 8 : case REALPART_EXPR:
423 : 8 : CASE_CONVERT:
424 : 8 : case VIEW_CONVERT_EXPR:
425 : 8 : case NON_LVALUE_EXPR:
426 : 8 : return compute_object_offset (TREE_OPERAND (expr, 0), var);
427 : :
428 : 8 : case IMAGPART_EXPR:
429 : 8 : base = compute_object_offset (TREE_OPERAND (expr, 0), var);
430 : 8 : if (base == error_mark_node)
431 : : return base;
432 : :
433 : 8 : off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
434 : 8 : break;
435 : :
436 : 4258 : case ARRAY_REF:
437 : 4258 : base = compute_object_offset (TREE_OPERAND (expr, 0), var);
438 : 4258 : if (base == error_mark_node)
439 : : return base;
440 : :
441 : 4258 : t = TREE_OPERAND (expr, 1);
442 : 4258 : tree low_bound, unit_size;
443 : 4258 : low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
444 : 4258 : unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
445 : 4258 : if (! integer_zerop (low_bound))
446 : 3 : t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
447 : 4258 : if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
448 : : {
449 : 24 : code = MINUS_EXPR;
450 : 24 : t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
451 : : }
452 : 4258 : t = fold_convert (sizetype, t);
453 : 4258 : off = size_binop (MULT_EXPR, unit_size, t);
454 : 4258 : break;
455 : :
456 : 0 : case MEM_REF:
457 : 0 : gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
458 : 0 : return wide_int_to_tree (sizetype, mem_ref_offset (expr));
459 : :
460 : 1 : default:
461 : 1 : return error_mark_node;
462 : : }
463 : :
464 : 11222 : return size_binop (code, base, off);
465 : : }
466 : :
467 : : /* Return true if CONTAINER has a field of type INNER at OFFSET. */
468 : :
469 : : static bool
470 : 18 : inner_at_offset (tree container, tree inner, tree offset)
471 : : {
472 : 18 : gcc_assert (RECORD_OR_UNION_TYPE_P (container));
473 : :
474 : 39 : for (tree t = TYPE_FIELDS (container); t; t = DECL_CHAIN (t))
475 : : {
476 : 39 : if (TREE_CODE (t) != FIELD_DECL)
477 : 0 : continue;
478 : :
479 : : /* Skip over fields at bit offsets that are not BITS_PER_UNIT aligned
480 : : to avoid an accidental truncated match with BYTE_POSITION below since
481 : : the address of such fields cannot be taken. */
482 : 39 : if (wi::bit_and (wi::to_offset (DECL_FIELD_BIT_OFFSET (t)),
483 : 78 : BITS_PER_UNIT - 1) != 0)
484 : 0 : continue;
485 : :
486 : 39 : tree byte_offset = byte_position (t);
487 : 39 : if (TREE_CODE (byte_offset) != INTEGER_CST
488 : 39 : || tree_int_cst_lt (offset, byte_offset))
489 : 0 : return false;
490 : :
491 : : /* For an array, check the element type, otherwise the actual type. This
492 : : deliberately does not support the case of jumping from a pointer to
493 : : the middle of an array to its containing struct. */
494 : 39 : tree t_type = TREE_TYPE (t);
495 : 13 : if (((TREE_CODE (t_type) == ARRAY_TYPE && TREE_TYPE (t_type) == inner)
496 : 37 : || t_type == inner)
497 : 50 : && tree_int_cst_equal (byte_offset, offset))
498 : : return true;
499 : :
500 : : /* Nested structure or union, adjust the expected offset and dive in. */
501 : 52 : if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (t))
502 : 31 : && inner_at_offset (TREE_TYPE (t), inner,
503 : : fold_build2 (MINUS_EXPR, sizetype, offset,
504 : : byte_offset)))
505 : : return true;
506 : : }
507 : :
508 : : return false;
509 : : }
510 : :
511 : : /* For the input MEMREF of type MEMREF_TYPE, look for the presence of a field
512 : : of BASE_TYPE at OFFSET and return an adjusted WHOLESIZE if found. */
513 : :
514 : : static tree
515 : 5699 : get_wholesize_for_memref (tree memref, tree wholesize)
516 : : {
517 : 5699 : tree base = TREE_OPERAND (memref, 0);
518 : 5699 : tree offset = fold_convert (sizetype, TREE_OPERAND (memref, 1));
519 : 5699 : tree memref_type = TREE_TYPE (memref);
520 : 5699 : tree base_type = TREE_TYPE (base);
521 : :
522 : 5699 : if (POINTER_TYPE_P (base_type))
523 : 5699 : base_type = TREE_TYPE ((base_type));
524 : :
525 : 5699 : if (dump_file && (dump_flags & TDF_DETAILS))
526 : : {
527 : 4 : fprintf (dump_file, "wholesize_for_memref: ");
528 : 4 : print_generic_expr (dump_file, wholesize, dump_flags);
529 : 4 : fprintf (dump_file, ", offset: ");
530 : 4 : print_generic_expr (dump_file, offset, dump_flags);
531 : 4 : fprintf (dump_file, "\n");
532 : : }
533 : :
534 : 5699 : if (TREE_CODE (offset) != INTEGER_CST
535 : 5699 : || compare_tree_int (offset, offset_limit) < 0
536 : 5712 : || !RECORD_OR_UNION_TYPE_P (memref_type))
537 : 5688 : return wholesize;
538 : :
539 : 11 : offset = fold_build1 (NEGATE_EXPR, sizetype, offset);
540 : :
541 : 11 : if (inner_at_offset (memref_type, base_type, offset))
542 : 11 : wholesize = size_binop (PLUS_EXPR, wholesize, offset);
543 : :
544 : 11 : if (dump_file && (dump_flags & TDF_DETAILS))
545 : : {
546 : 0 : fprintf (dump_file, " new wholesize: ");
547 : 0 : print_generic_expr (dump_file, wholesize, dump_flags);
548 : 0 : fprintf (dump_file, "\n");
549 : : }
550 : :
551 : : return wholesize;
552 : : }
553 : :
554 : : /* Returns the size of the object designated by DECL considering its
555 : : initializer if it either has one or if it would not affect its size,
556 : : otherwise the size of the object without the initializer when MIN
557 : : is true, else null. An object's initializer affects the object's
558 : : size if it's a struct type with a flexible array member. */
559 : :
560 : : tree
561 : 5063913 : decl_init_size (tree decl, bool min)
562 : : {
563 : 5063913 : tree size = DECL_SIZE_UNIT (decl);
564 : 5063913 : tree type = TREE_TYPE (decl);
565 : 5063913 : if (TREE_CODE (type) != RECORD_TYPE)
566 : : return size;
567 : :
568 : 2742001 : tree last = last_field (type);
569 : 2742001 : if (!last)
570 : : return size;
571 : :
572 : 2740180 : tree last_type = TREE_TYPE (last);
573 : 2740180 : if (TREE_CODE (last_type) != ARRAY_TYPE
574 : 2740180 : || TYPE_SIZE (last_type))
575 : : return size;
576 : :
577 : : /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
578 : : of the initializer and sometimes doesn't. */
579 : 2235 : size = TYPE_SIZE_UNIT (type);
580 : 2235 : tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
581 : 2235 : tree compsize = component_ref_size (ref);
582 : 2235 : if (!compsize)
583 : 1008 : return min ? size : NULL_TREE;
584 : :
585 : : /* The size includes tail padding and initializer elements. */
586 : 1725 : tree pos = byte_position (last);
587 : 1725 : size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
588 : 1725 : return size;
589 : : }
590 : :
591 : : /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
592 : : OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
593 : : If unknown, return size_unknown (object_size_type). */
594 : :
595 : : static bool
596 : 25531 : addr_object_size (struct object_size_info *osi, const_tree ptr,
597 : : int object_size_type, tree *psize, tree *pwholesize)
598 : : {
599 : 25531 : tree pt_var, pt_var_size = NULL_TREE, pt_var_wholesize = NULL_TREE;
600 : 25531 : tree var_size, bytes, wholebytes;
601 : :
602 : 25531 : gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
603 : :
604 : : /* Set to unknown and overwrite just before returning if the size
605 : : could be determined. */
606 : 25531 : *psize = size_unknown (object_size_type);
607 : 25531 : if (pwholesize)
608 : 7018 : *pwholesize = size_unknown (object_size_type);
609 : :
610 : 25531 : pt_var = TREE_OPERAND (ptr, 0);
611 : 42092 : while (handled_component_p (pt_var))
612 : 16561 : pt_var = TREE_OPERAND (pt_var, 0);
613 : :
614 : 25531 : if (!pt_var)
615 : : return false;
616 : :
617 : 25531 : if (TREE_CODE (pt_var) == MEM_REF)
618 : : {
619 : 5699 : tree sz, wholesize;
620 : :
621 : 4766 : if (!osi || (object_size_type & OST_SUBOBJECT) != 0
622 : 7013 : || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
623 : : {
624 : 4386 : compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
625 : : object_size_type & ~OST_SUBOBJECT, &sz);
626 : 4386 : wholesize = get_wholesize_for_memref (pt_var, sz);
627 : : }
628 : : else
629 : : {
630 : 1313 : tree var = TREE_OPERAND (pt_var, 0);
631 : 1313 : if (osi->pass == 0)
632 : 1313 : collect_object_sizes_for (osi, var);
633 : 2626 : if (bitmap_bit_p (computed[object_size_type],
634 : 1313 : SSA_NAME_VERSION (var)))
635 : : {
636 : 1313 : sz = object_sizes_get (osi, SSA_NAME_VERSION (var));
637 : 1313 : wholesize = object_sizes_get (osi, SSA_NAME_VERSION (var), true);
638 : 1313 : wholesize = get_wholesize_for_memref (pt_var, wholesize);
639 : : }
640 : : else
641 : 0 : sz = wholesize = size_unknown (object_size_type);
642 : : }
643 : 5699 : if (!size_unknown_p (sz, object_size_type))
644 : 3106 : sz = size_for_offset (sz, TREE_OPERAND (pt_var, 1), wholesize);
645 : :
646 : 5699 : if (!size_unknown_p (sz, object_size_type)
647 : 5699 : && (TREE_CODE (sz) != INTEGER_CST
648 : 3052 : || compare_tree_int (sz, offset_limit) < 0))
649 : : {
650 : 3096 : pt_var_size = sz;
651 : 3096 : pt_var_wholesize = wholesize;
652 : : }
653 : : }
654 : 19832 : else if (DECL_P (pt_var))
655 : : {
656 : 39462 : pt_var_size = pt_var_wholesize
657 : 19731 : = decl_init_size (pt_var, object_size_type & OST_MINIMUM);
658 : 19731 : if (!pt_var_size)
659 : : return false;
660 : : }
661 : 101 : else if (TREE_CODE (pt_var) == STRING_CST)
662 : 101 : pt_var_size = pt_var_wholesize = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
663 : : else
664 : : return false;
665 : :
666 : 5800 : if (pt_var_size)
667 : : {
668 : : /* Validate the size determined above if it is a constant. */
669 : 22815 : if (TREE_CODE (pt_var_size) == INTEGER_CST
670 : 22815 : && compare_tree_int (pt_var_size, offset_limit) >= 0)
671 : : return false;
672 : : }
673 : :
674 : 25382 : if (pt_var != TREE_OPERAND (ptr, 0))
675 : : {
676 : 9188 : tree var;
677 : :
678 : 9188 : if (object_size_type & OST_SUBOBJECT)
679 : : {
680 : 4009 : var = TREE_OPERAND (ptr, 0);
681 : :
682 : 4009 : while (var != pt_var
683 : 4009 : && TREE_CODE (var) != BIT_FIELD_REF
684 : : && TREE_CODE (var) != COMPONENT_REF
685 : : && TREE_CODE (var) != ARRAY_REF
686 : : && TREE_CODE (var) != ARRAY_RANGE_REF
687 : : && TREE_CODE (var) != REALPART_EXPR
688 : 4009 : && TREE_CODE (var) != IMAGPART_EXPR)
689 : 0 : var = TREE_OPERAND (var, 0);
690 : 4009 : if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
691 : 1763 : var = TREE_OPERAND (var, 0);
692 : 4009 : if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
693 : 3796 : || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
694 : 7785 : || (pt_var_size && TREE_CODE (pt_var_size) == INTEGER_CST
695 : 1749 : && tree_int_cst_lt (pt_var_size,
696 : 1749 : TYPE_SIZE_UNIT (TREE_TYPE (var)))))
697 : : var = pt_var;
698 : 3660 : else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
699 : : {
700 : : tree v = var;
701 : : /* For &X->fld, compute object size if fld isn't a flexible array
702 : : member. */
703 : 6302 : bool is_flexible_array_mem_ref = false;
704 : 6302 : while (v && v != pt_var)
705 : 3153 : switch (TREE_CODE (v))
706 : : {
707 : 0 : case ARRAY_REF:
708 : 0 : if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))))
709 : : {
710 : 0 : tree domain
711 : 0 : = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
712 : 0 : if (domain && TYPE_MAX_VALUE (domain))
713 : : {
714 : : v = NULL_TREE;
715 : : break;
716 : : }
717 : : }
718 : 0 : v = TREE_OPERAND (v, 0);
719 : 0 : break;
720 : : case REALPART_EXPR:
721 : : case IMAGPART_EXPR:
722 : : v = NULL_TREE;
723 : : break;
724 : 3153 : case COMPONENT_REF:
725 : : /* When the ref is not to an aggregate type, i.e, an array,
726 : : a record or a union, it will not have flexible size,
727 : : compute the object size directly. */
728 : 3153 : if (!AGGREGATE_TYPE_P (TREE_TYPE (v)))
729 : : {
730 : : v = NULL_TREE;
731 : : break;
732 : : }
733 : : /* if the ref is to a record or union type, but the type
734 : : does not include a flexible array recursively, compute
735 : : the object size directly. */
736 : 2255 : if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (v)))
737 : : {
738 : 58 : if (!TYPE_INCLUDES_FLEXARRAY (TREE_TYPE (v)))
739 : : {
740 : : v = NULL_TREE;
741 : : break;
742 : : }
743 : : else
744 : : {
745 : 16 : v = TREE_OPERAND (v, 0);
746 : 16 : break;
747 : : }
748 : : }
749 : : /* Now the ref is to an array type. */
750 : 2197 : gcc_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
751 : 2197 : is_flexible_array_mem_ref = array_ref_flexible_size_p (v);
752 : 5439 : while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
753 : 2997 : if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
754 : : != UNION_TYPE
755 : 2997 : && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
756 : : != QUAL_UNION_TYPE)
757 : : break;
758 : : else
759 : 1045 : v = TREE_OPERAND (v, 0);
760 : 2197 : if (TREE_CODE (v) == COMPONENT_REF
761 : 2197 : && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
762 : : == RECORD_TYPE)
763 : : {
764 : : /* compute object size only if v is not a
765 : : flexible array member. */
766 : 1952 : if (!is_flexible_array_mem_ref)
767 : : {
768 : : v = NULL_TREE;
769 : : break;
770 : : }
771 : 949 : v = TREE_OPERAND (v, 0);
772 : : }
773 : 1504 : while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
774 : 446 : if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
775 : : != UNION_TYPE
776 : 446 : && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
777 : : != QUAL_UNION_TYPE)
778 : : break;
779 : : else
780 : 310 : v = TREE_OPERAND (v, 0);
781 : 1194 : if (v != pt_var)
782 : : v = NULL_TREE;
783 : : else
784 : 3153 : v = pt_var;
785 : : break;
786 : : default:
787 : 6302 : v = pt_var;
788 : : break;
789 : : }
790 : 3149 : if (v == pt_var)
791 : 6598 : var = pt_var;
792 : : }
793 : : }
794 : : else
795 : : var = pt_var;
796 : :
797 : 9188 : if (var != pt_var)
798 : : {
799 : 2481 : var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
800 : 2481 : if (!TREE_CONSTANT (var_size))
801 : 0 : var_size = get_or_create_ssa_default_def (cfun, var_size);
802 : 2481 : if (!var_size)
803 : : return false;
804 : : }
805 : 6707 : else if (!pt_var_size)
806 : : return false;
807 : : else
808 : : var_size = pt_var_size;
809 : 7911 : bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
810 : 7911 : if (bytes != error_mark_node)
811 : : {
812 : 7910 : bytes = size_for_offset (var_size, bytes);
813 : 7910 : if (var != pt_var && pt_var_size && TREE_CODE (pt_var) == MEM_REF)
814 : : {
815 : 779 : tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0),
816 : : pt_var);
817 : 779 : if (bytes2 != error_mark_node)
818 : : {
819 : 779 : bytes2 = size_for_offset (pt_var_size, bytes2);
820 : 779 : bytes = size_binop (MIN_EXPR, bytes, bytes2);
821 : : }
822 : : }
823 : : }
824 : : else
825 : 1 : bytes = size_unknown (object_size_type);
826 : :
827 : 7911 : wholebytes
828 : 7911 : = object_size_type & OST_SUBOBJECT ? var_size : pt_var_wholesize;
829 : : }
830 : 16194 : else if (!pt_var_size)
831 : : return false;
832 : : else
833 : : {
834 : : bytes = pt_var_size;
835 : : wholebytes = pt_var_wholesize;
836 : : }
837 : :
838 : 24079 : if (!size_unknown_p (bytes, object_size_type)
839 : 49497 : && size_valid_p (bytes, object_size_type)
840 : 23966 : && !size_unknown_p (bytes, object_size_type)
841 : 24079 : && size_valid_p (wholebytes, object_size_type))
842 : : {
843 : 23966 : *psize = bytes;
844 : 23966 : if (pwholesize)
845 : 5703 : *pwholesize = wholebytes;
846 : 23966 : return true;
847 : : }
848 : :
849 : : return false;
850 : : }
851 : :
852 : : /* Compute __builtin_object_size for a CALL to .ACCESS_WITH_SIZE,
853 : : OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
854 : :
855 : : The 2nd, 3rd, and 4th parameters of the call determine the size of
856 : : the CALL:
857 : :
858 : : 2nd argument REF_TO_SIZE: The reference to the size of the object,
859 : : 3rd argument TYPE_OF_SIZE + ACCESS_MODE: An integer constant with a pointer
860 : : TYPE.
861 : : The pointee TYPE of this pointer TYPE is the TYPE of the object referenced
862 : : by REF_TO_SIZE.
863 : :
864 : : 4th argument: The TYPE_SIZE_UNIT of the element TYPE of the array. */
865 : :
866 : : static tree
867 : 126 : access_with_size_object_size (const gcall *call, int object_size_type)
868 : : {
869 : : /* If not for dynamic object size, return. */
870 : 126 : if ((object_size_type & OST_DYNAMIC) == 0)
871 : 87 : return size_unknown (object_size_type);
872 : 39 : gcc_assert (gimple_call_internal_p (call, IFN_ACCESS_WITH_SIZE));
873 : :
874 : 39 : tree ref_to_size = gimple_call_arg (call, 1);
875 : 39 : tree type = TREE_TYPE (TREE_TYPE (gimple_call_arg (call, 2)));
876 : :
877 : : /* The 4th argument is the TYPE_SIZE_UNIT for the element of the original
878 : : flexible array. */
879 : 39 : tree element_size = gimple_call_arg (call, 3);
880 : :
881 : 39 : tree size = fold_build2 (MEM_REF, type, ref_to_size,
882 : : build_int_cst (ptr_type_node, 0));
883 : :
884 : : /* If size is negative value, treat it as zero. */
885 : 39 : if (!TYPE_UNSIGNED (type))
886 : : {
887 : 21 : tree cond_expr = fold_build2 (LT_EXPR, boolean_type_node,
888 : : unshare_expr (size), build_zero_cst (type));
889 : 21 : size = fold_build3 (COND_EXPR, integer_type_node, cond_expr,
890 : : build_zero_cst (type), size);
891 : : }
892 : :
893 : 39 : size = size_binop (MULT_EXPR,
894 : : fold_convert (sizetype, size),
895 : : fold_convert (sizetype, element_size));
896 : :
897 : 39 : if (!todo)
898 : 17 : todo = TODO_update_ssa_only_virtuals;
899 : :
900 : : return size;
901 : : }
902 : :
903 : : /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
904 : : Handles calls to functions declared with attribute alloc_size.
905 : : OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
906 : : If unknown, return size_unknown (object_size_type). */
907 : :
908 : : static tree
909 : 1858 : alloc_object_size (const gcall *call, int object_size_type)
910 : : {
911 : 1858 : gcc_assert (is_gimple_call (call));
912 : :
913 : 1858 : tree calltype;
914 : 1858 : tree callfn = gimple_call_fndecl (call);
915 : 1858 : if (callfn)
916 : 1714 : calltype = TREE_TYPE (callfn);
917 : : else
918 : 144 : calltype = gimple_call_fntype (call);
919 : :
920 : 1858 : if (!calltype)
921 : 0 : return size_unknown (object_size_type);
922 : :
923 : : /* Set to positions of alloc_size arguments. */
924 : 1858 : int arg1 = -1, arg2 = -1;
925 : 1858 : tree alloc_size = lookup_attribute ("alloc_size",
926 : 1858 : TYPE_ATTRIBUTES (calltype));
927 : 3541 : if (alloc_size && TREE_VALUE (alloc_size))
928 : : {
929 : 1683 : tree p = TREE_VALUE (alloc_size);
930 : :
931 : 1683 : arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
932 : 1683 : if (TREE_CHAIN (p))
933 : 187 : arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
934 : : }
935 : 175 : else if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
936 : 114 : && callfn
937 : 289 : && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callfn)))
938 : : arg1 = 0;
939 : :
940 : : /* Non-const arguments are OK here, let the caller handle constness. */
941 : 1683 : if (arg1 < 0
942 : 1765 : || (unsigned) arg1 >= gimple_call_num_args (call)
943 : 3448 : || (arg2 >= 0 && (unsigned) arg2 >= gimple_call_num_args (call)))
944 : 93 : return size_unknown (object_size_type);
945 : :
946 : 1765 : tree targ1 = gimple_call_arg (call, arg1);
947 : 3530 : if (!INTEGRAL_TYPE_P (TREE_TYPE (targ1))
948 : 3528 : || TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (sizetype))
949 : 2 : return size_unknown (object_size_type);
950 : 1763 : targ1 = fold_convert (sizetype, targ1);
951 : 1763 : tree bytes = NULL_TREE;
952 : 1763 : if (arg2 >= 0)
953 : : {
954 : 187 : tree targ2 = gimple_call_arg (call, arg2);
955 : 374 : if (!INTEGRAL_TYPE_P (TREE_TYPE (targ2))
956 : 374 : || TYPE_PRECISION (TREE_TYPE (targ2)) > TYPE_PRECISION (sizetype))
957 : 0 : return size_unknown (object_size_type);
958 : 187 : targ2 = fold_convert (sizetype, targ2);
959 : 187 : bytes = size_binop (MULT_EXPR, targ1, targ2);
960 : : }
961 : : else
962 : : bytes = targ1;
963 : :
964 : 1763 : return bytes ? bytes : size_unknown (object_size_type);
965 : : }
966 : :
967 : : /* Compute __builtin_object_size for CALL, which is a call to either
968 : : BUILT_IN_STRDUP or BUILT_IN_STRNDUP; IS_STRNDUP indicates which it is.
969 : : OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
970 : : If unknown, return size_unknown (object_size_type). */
971 : :
972 : : static tree
973 : 160 : strdup_object_size (const gcall *call, int object_size_type, bool is_strndup)
974 : : {
975 : 160 : tree src = gimple_call_arg (call, 0);
976 : 160 : tree sz = size_unknown (object_size_type);
977 : 160 : tree n = NULL_TREE;
978 : :
979 : 160 : if (is_strndup)
980 : 110 : n = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
981 : : gimple_call_arg (call, 1));
982 : : /* For strdup, simply emit strlen (SRC) + 1 and let the optimizer fold it the
983 : : way it likes. */
984 : : else
985 : : {
986 : 50 : tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
987 : 50 : if (strlen_fn)
988 : : {
989 : 50 : sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
990 : : build_call_expr (strlen_fn, 1, src));
991 : 50 : todo = TODO_update_ssa_only_virtuals;
992 : : }
993 : : }
994 : :
995 : : /* In all other cases, return the size of SRC since the object size cannot
996 : : exceed that. We cannot do this for OST_MINIMUM unless SRC points into a
997 : : string constant since otherwise the object size could go all the way down
998 : : to zero. */
999 : 160 : if (!size_valid_p (sz, object_size_type)
1000 : 152 : || size_unknown_p (sz, object_size_type))
1001 : : {
1002 : 118 : tree wholesrc = NULL_TREE;
1003 : 118 : if (TREE_CODE (src) == ADDR_EXPR)
1004 : 48 : wholesrc = get_base_address (TREE_OPERAND (src, 0));
1005 : :
1006 : : /* If the source points within a string constant, we try to get its
1007 : : length. */
1008 : 48 : if (wholesrc && TREE_CODE (wholesrc) == STRING_CST)
1009 : : {
1010 : 48 : tree len = c_strlen (src, 0);
1011 : 48 : if (len)
1012 : 48 : sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node, len);
1013 : : }
1014 : :
1015 : : /* For maximum estimate, our next best guess is the object size of the
1016 : : source. */
1017 : 118 : if (size_unknown_p (sz, object_size_type)
1018 : 118 : && !(object_size_type & OST_MINIMUM))
1019 : 31 : compute_builtin_object_size (src, object_size_type, &sz);
1020 : : }
1021 : :
1022 : : /* String duplication allocates at least one byte, so we should never fail
1023 : : for OST_MINIMUM. */
1024 : 160 : if ((!size_valid_p (sz, object_size_type)
1025 : 152 : || size_unknown_p (sz, object_size_type))
1026 : 40 : && (object_size_type & OST_MINIMUM))
1027 : 35 : sz = size_one_node;
1028 : :
1029 : : /* Factor in the N. */
1030 : 160 : return n ? fold_build2 (MIN_EXPR, sizetype, n, sz) : sz;
1031 : : }
1032 : :
1033 : : /* If object size is propagated from one of function's arguments directly
1034 : : to its return value, return that argument for GIMPLE_CALL statement CALL.
1035 : : Otherwise return NULL. */
1036 : :
1037 : : static tree
1038 : 2201 : pass_through_call (const gcall *call)
1039 : : {
1040 : 2201 : unsigned rf = gimple_call_return_flags (call);
1041 : 2201 : if (rf & ERF_RETURNS_ARG)
1042 : : {
1043 : 57 : unsigned argnum = rf & ERF_RETURN_ARG_MASK;
1044 : 57 : if (argnum < gimple_call_num_args (call))
1045 : 57 : return gimple_call_arg (call, argnum);
1046 : : }
1047 : :
1048 : : /* __builtin_assume_aligned is intentionally not marked RET1. */
1049 : 2144 : if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
1050 : 0 : return gimple_call_arg (call, 0);
1051 : :
1052 : : return NULL_TREE;
1053 : : }
1054 : :
1055 : : /* Emit PHI nodes for size expressions fo. */
1056 : :
1057 : : static void
1058 : 142 : emit_phi_nodes (gimple *stmt, tree size, tree wholesize)
1059 : : {
1060 : 142 : tree phires;
1061 : 142 : gphi *wholephi = NULL;
1062 : :
1063 : 142 : if (wholesize != size)
1064 : : {
1065 : 121 : phires = TREE_VEC_ELT (wholesize, TREE_VEC_LENGTH (wholesize) - 1);
1066 : 121 : wholephi = create_phi_node (phires, gimple_bb (stmt));
1067 : : }
1068 : :
1069 : 142 : phires = TREE_VEC_ELT (size, TREE_VEC_LENGTH (size) - 1);
1070 : 142 : gphi *phi = create_phi_node (phires, gimple_bb (stmt));
1071 : 142 : gphi *obj_phi = as_a <gphi *> (stmt);
1072 : :
1073 : 142 : gcc_checking_assert (TREE_CODE (wholesize) == TREE_VEC);
1074 : 142 : gcc_checking_assert (TREE_CODE (size) == TREE_VEC);
1075 : :
1076 : 456 : for (unsigned i = 0; i < gimple_phi_num_args (stmt); i++)
1077 : : {
1078 : 314 : gimple_seq seq = NULL;
1079 : 314 : tree wsz = TREE_VEC_ELT (wholesize, i);
1080 : 314 : tree sz = TREE_VEC_ELT (size, i);
1081 : :
1082 : : /* If we built an expression, we will need to build statements
1083 : : and insert them on the edge right away. */
1084 : 314 : if (TREE_CODE (wsz) != SSA_NAME)
1085 : 149 : wsz = force_gimple_operand (wsz, &seq, true, NULL);
1086 : 314 : if (TREE_CODE (sz) != SSA_NAME)
1087 : : {
1088 : 161 : gimple_seq s;
1089 : 161 : sz = force_gimple_operand (sz, &s, true, NULL);
1090 : 161 : gimple_seq_add_seq (&seq, s);
1091 : : }
1092 : :
1093 : 314 : if (seq)
1094 : 0 : gsi_insert_seq_on_edge (gimple_phi_arg_edge (obj_phi, i), seq);
1095 : :
1096 : 314 : if (wholephi)
1097 : 272 : add_phi_arg (wholephi, wsz,
1098 : : gimple_phi_arg_edge (obj_phi, i),
1099 : : gimple_phi_arg_location (obj_phi, i));
1100 : :
1101 : 314 : add_phi_arg (phi, sz,
1102 : : gimple_phi_arg_edge (obj_phi, i),
1103 : : gimple_phi_arg_location (obj_phi, i));
1104 : : }
1105 : 142 : }
1106 : :
1107 : : /* Descend through EXPR and return size_unknown if it uses any SSA variable
1108 : : object_size_set or object_size_set_temp generated, which turned out to be
1109 : : size_unknown, as noted in UNKNOWNS. */
1110 : :
1111 : : static tree
1112 : 3266 : propagate_unknowns (object_size_info *osi, tree expr, bitmap unknowns)
1113 : : {
1114 : 3266 : int object_size_type = osi->object_size_type;
1115 : :
1116 : 3266 : switch (TREE_CODE (expr))
1117 : : {
1118 : 1178 : case SSA_NAME:
1119 : 1178 : if (bitmap_bit_p (unknowns, SSA_NAME_VERSION (expr)))
1120 : 0 : return size_unknown (object_size_type);
1121 : : return expr;
1122 : :
1123 : 363 : case MIN_EXPR:
1124 : 363 : case MAX_EXPR:
1125 : 363 : {
1126 : 363 : tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
1127 : : unknowns);
1128 : 363 : if (size_unknown_p (res, object_size_type))
1129 : : return res;
1130 : :
1131 : 363 : res = propagate_unknowns (osi, TREE_OPERAND (expr, 1), unknowns);
1132 : 363 : if (size_unknown_p (res, object_size_type))
1133 : : return res;
1134 : :
1135 : : return expr;
1136 : : }
1137 : 347 : case MODIFY_EXPR:
1138 : 347 : {
1139 : 347 : tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 1),
1140 : : unknowns);
1141 : 347 : if (size_unknown_p (res, object_size_type))
1142 : : return res;
1143 : : return expr;
1144 : : }
1145 : : case TREE_VEC:
1146 : 1196 : for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
1147 : : {
1148 : 912 : tree res = propagate_unknowns (osi, TREE_VEC_ELT (expr, i),
1149 : : unknowns);
1150 : 912 : if (size_unknown_p (res, object_size_type))
1151 : : return res;
1152 : : }
1153 : : return expr;
1154 : 483 : case PLUS_EXPR:
1155 : 483 : case MINUS_EXPR:
1156 : 483 : {
1157 : 483 : tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
1158 : : unknowns);
1159 : 483 : if (size_unknown_p (res, object_size_type))
1160 : : return res;
1161 : :
1162 : : return expr;
1163 : : }
1164 : : default:
1165 : : return expr;
1166 : : }
1167 : : }
1168 : :
1169 : : /* Walk through size expressions that need reexamination and generate
1170 : : statements for them. */
1171 : :
1172 : : static void
1173 : 2098 : gimplify_size_expressions (object_size_info *osi)
1174 : : {
1175 : 2098 : int object_size_type = osi->object_size_type;
1176 : 2098 : bitmap_iterator bi;
1177 : 2098 : unsigned int i;
1178 : 2098 : bool changed;
1179 : :
1180 : : /* Step 1: Propagate unknowns into expressions. */
1181 : 2098 : bitmap reexamine = BITMAP_ALLOC (NULL);
1182 : 2098 : bitmap_copy (reexamine, osi->reexamine);
1183 : 2098 : bitmap unknowns = BITMAP_ALLOC (NULL);
1184 : 2098 : do
1185 : : {
1186 : 2098 : changed = false;
1187 : 2497 : EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1188 : : {
1189 : 399 : object_size cur = object_sizes_get_raw (osi, i);
1190 : :
1191 : 399 : if (size_unknown_p (propagate_unknowns (osi, cur.size, unknowns),
1192 : : object_size_type)
1193 : 399 : || size_unknown_p (propagate_unknowns (osi, cur.wholesize,
1194 : : unknowns),
1195 : : object_size_type))
1196 : : {
1197 : : /* Record the SSAs we're overwriting to propagate the
1198 : : unknwons. */
1199 : 0 : tree oldval = object_sizes_get (osi, i);
1200 : 0 : tree old_wholeval = object_sizes_get (osi, i, true);
1201 : :
1202 : 0 : bitmap_set_bit (unknowns, SSA_NAME_VERSION (oldval));
1203 : 0 : bitmap_set_bit (unknowns, SSA_NAME_VERSION (old_wholeval));
1204 : 0 : object_sizes_initialize (osi, i,
1205 : : size_unknown (object_size_type),
1206 : : size_unknown (object_size_type));
1207 : 0 : bitmap_clear_bit (osi->reexamine, i);
1208 : 0 : changed = true;
1209 : : }
1210 : : }
1211 : 2098 : bitmap_copy (reexamine, osi->reexamine);
1212 : : }
1213 : : while (changed);
1214 : :
1215 : : /* Release all unknowns. */
1216 : 2098 : EXECUTE_IF_SET_IN_BITMAP (unknowns, 0, i, bi)
1217 : 0 : release_ssa_name (ssa_name (i));
1218 : :
1219 : 2098 : BITMAP_FREE (unknowns);
1220 : 2098 : BITMAP_FREE (reexamine);
1221 : :
1222 : : /* Expand all size expressions to put their definitions close to the objects
1223 : : for which size is being computed. */
1224 : 2497 : EXECUTE_IF_SET_IN_BITMAP (osi->reexamine, 0, i, bi)
1225 : : {
1226 : 399 : gimple_seq seq = NULL;
1227 : 399 : object_size osize = object_sizes_get_raw (osi, i);
1228 : :
1229 : 399 : gimple *stmt = SSA_NAME_DEF_STMT (ssa_name (i));
1230 : 399 : enum gimple_code code = gimple_code (stmt);
1231 : :
1232 : : /* PHI nodes need special attention. */
1233 : 399 : if (code == GIMPLE_PHI)
1234 : 142 : emit_phi_nodes (stmt, osize.size, osize.wholesize);
1235 : : else
1236 : : {
1237 : 257 : tree size_expr = NULL_TREE;
1238 : :
1239 : : /* Bundle wholesize in with the size to gimplify if needed. */
1240 : 257 : if (osize.wholesize != osize.size
1241 : 257 : && !size_usable_p (osize.wholesize))
1242 : 2 : size_expr = size_binop (COMPOUND_EXPR,
1243 : : osize.wholesize,
1244 : : osize.size);
1245 : 255 : else if (!size_usable_p (osize.size))
1246 : : size_expr = osize.size;
1247 : :
1248 : 244 : if (size_expr)
1249 : : {
1250 : 244 : gimple_stmt_iterator gsi;
1251 : 244 : if (code == GIMPLE_NOP)
1252 : 22 : gsi = gsi_start_bb (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1253 : : else
1254 : 233 : gsi = gsi_for_stmt (stmt);
1255 : :
1256 : 244 : force_gimple_operand (size_expr, &seq, true, NULL);
1257 : 244 : gsi_insert_seq_before (&gsi, seq, GSI_CONTINUE_LINKING);
1258 : : }
1259 : : }
1260 : :
1261 : : /* We're done, so replace the MODIFY_EXPRs with the SSA names. */
1262 : 399 : object_sizes_initialize (osi, i,
1263 : : object_sizes_get (osi, i),
1264 : : object_sizes_get (osi, i, true));
1265 : : }
1266 : 2098 : }
1267 : :
1268 : : /* Compute __builtin_object_size value for PTR and set *PSIZE to
1269 : : the resulting value. If the declared object is known and PDECL
1270 : : is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
1271 : : is the second argument to __builtin_object_size.
1272 : : Returns true on success and false when the object size could not
1273 : : be determined. */
1274 : :
1275 : : bool
1276 : 130729 : compute_builtin_object_size (tree ptr, int object_size_type,
1277 : : tree *psize)
1278 : : {
1279 : 130729 : gcc_assert (object_size_type >= 0 && object_size_type < OST_END);
1280 : :
1281 : : /* Set to unknown and overwrite just before returning if the size
1282 : : could be determined. */
1283 : 130729 : *psize = size_unknown (object_size_type);
1284 : :
1285 : 130729 : if (! offset_limit)
1286 : 1073 : init_offset_limit ();
1287 : :
1288 : 130729 : if (TREE_CODE (ptr) == ADDR_EXPR)
1289 : 18513 : return addr_object_size (NULL, ptr, object_size_type, psize);
1290 : :
1291 : 112216 : if (TREE_CODE (ptr) != SSA_NAME
1292 : 112216 : || !POINTER_TYPE_P (TREE_TYPE (ptr)))
1293 : : return false;
1294 : :
1295 : 111911 : if (computed[object_size_type] == NULL)
1296 : : {
1297 : 97090 : if (optimize || object_size_type & OST_SUBOBJECT)
1298 : : return false;
1299 : :
1300 : : /* When not optimizing, rather than failing, make a small effort
1301 : : to determine the object size without the full benefit of
1302 : : the (costly) computation below. */
1303 : 2101 : gimple *def = SSA_NAME_DEF_STMT (ptr);
1304 : 2101 : if (gimple_code (def) == GIMPLE_ASSIGN)
1305 : : {
1306 : 1496 : tree_code code = gimple_assign_rhs_code (def);
1307 : 1496 : if (code == POINTER_PLUS_EXPR)
1308 : : {
1309 : 945 : tree offset = gimple_assign_rhs2 (def);
1310 : 945 : ptr = gimple_assign_rhs1 (def);
1311 : :
1312 : 945 : if (((object_size_type & OST_DYNAMIC)
1313 : 823 : || (tree_fits_shwi_p (offset)
1314 : 505 : && compare_tree_int (offset, offset_limit) <= 0))
1315 : 1450 : && compute_builtin_object_size (ptr, object_size_type,
1316 : : psize))
1317 : : {
1318 : 243 : *psize = size_for_offset (*psize, offset);
1319 : 243 : return true;
1320 : : }
1321 : : }
1322 : : }
1323 : 1858 : return false;
1324 : : }
1325 : :
1326 : 14821 : struct object_size_info osi;
1327 : 14821 : osi.object_size_type = object_size_type;
1328 : 14821 : if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
1329 : : {
1330 : 9488 : bitmap_iterator bi;
1331 : 9488 : unsigned int i;
1332 : :
1333 : 9488 : object_sizes_grow (object_size_type);
1334 : 9488 : if (dump_file)
1335 : : {
1336 : 12 : fprintf (dump_file, "Computing %s %s%sobject size for ",
1337 : 12 : (object_size_type & OST_MINIMUM) ? "minimum" : "maximum",
1338 : 12 : (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1339 : 12 : (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1340 : 12 : print_generic_expr (dump_file, ptr, dump_flags);
1341 : 12 : fprintf (dump_file, ":\n");
1342 : : }
1343 : :
1344 : 9488 : osi.visited = BITMAP_ALLOC (NULL);
1345 : 9488 : osi.reexamine = BITMAP_ALLOC (NULL);
1346 : :
1347 : 9488 : if (!(object_size_type & OST_DYNAMIC))
1348 : : {
1349 : 7390 : osi.depths = NULL;
1350 : 7390 : osi.stack = NULL;
1351 : 7390 : osi.tos = NULL;
1352 : : }
1353 : :
1354 : : /* First pass: walk UD chains, compute object sizes that can be computed.
1355 : : osi.reexamine bitmap at the end will contain versions of SSA_NAMES
1356 : : that need to be reexamined. For both static and dynamic size
1357 : : computation, reexamination is for propagation across dependency loops.
1358 : : The dynamic case has the additional use case where the computed
1359 : : expression needs to be gimplified. */
1360 : 9488 : osi.pass = 0;
1361 : 9488 : osi.changed = false;
1362 : 9488 : collect_object_sizes_for (&osi, ptr);
1363 : :
1364 : 9488 : if (object_size_type & OST_DYNAMIC)
1365 : : {
1366 : 2098 : osi.pass = 1;
1367 : 2098 : gimplify_size_expressions (&osi);
1368 : 2098 : bitmap_clear (osi.reexamine);
1369 : : }
1370 : :
1371 : : /* Second pass: keep recomputing object sizes of variables
1372 : : that need reexamination, until no object sizes are
1373 : : increased or all object sizes are computed. */
1374 : 9488 : if (! bitmap_empty_p (osi.reexamine))
1375 : : {
1376 : 291 : bitmap reexamine = BITMAP_ALLOC (NULL);
1377 : :
1378 : : /* If looking for minimum instead of maximum object size,
1379 : : detect cases where a pointer is increased in a loop.
1380 : : Although even without this detection pass 2 would eventually
1381 : : terminate, it could take a long time. If a pointer is
1382 : : increasing this way, we need to assume 0 object size.
1383 : : E.g. p = &buf[0]; while (cond) p = p + 4; */
1384 : 291 : if (object_size_type & OST_MINIMUM)
1385 : : {
1386 : 54 : osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
1387 : 54 : osi.stack = XNEWVEC (unsigned int, num_ssa_names);
1388 : 27 : osi.tos = osi.stack;
1389 : 27 : osi.pass = 1;
1390 : : /* collect_object_sizes_for is changing
1391 : : osi.reexamine bitmap, so iterate over a copy. */
1392 : 27 : bitmap_copy (reexamine, osi.reexamine);
1393 : 82 : EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1394 : 55 : if (bitmap_bit_p (osi.reexamine, i))
1395 : 55 : check_for_plus_in_loops (&osi, ssa_name (i));
1396 : :
1397 : 27 : free (osi.depths);
1398 : 27 : osi.depths = NULL;
1399 : 27 : free (osi.stack);
1400 : 27 : osi.stack = NULL;
1401 : 27 : osi.tos = NULL;
1402 : : }
1403 : :
1404 : 381 : do
1405 : : {
1406 : 381 : osi.pass = 2;
1407 : 381 : osi.changed = false;
1408 : : /* collect_object_sizes_for is changing
1409 : : osi.reexamine bitmap, so iterate over a copy. */
1410 : 381 : bitmap_copy (reexamine, osi.reexamine);
1411 : 1238 : EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1412 : 857 : if (bitmap_bit_p (osi.reexamine, i))
1413 : : {
1414 : 857 : collect_object_sizes_for (&osi, ssa_name (i));
1415 : 857 : if (dump_file && (dump_flags & TDF_DETAILS))
1416 : : {
1417 : 0 : fprintf (dump_file, "Reexamining ");
1418 : 0 : print_generic_expr (dump_file, ssa_name (i),
1419 : : dump_flags);
1420 : 0 : fprintf (dump_file, "\n");
1421 : : }
1422 : : }
1423 : : }
1424 : 381 : while (osi.changed);
1425 : :
1426 : 291 : BITMAP_FREE (reexamine);
1427 : : }
1428 : 10111 : EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
1429 : 623 : bitmap_set_bit (computed[object_size_type], i);
1430 : :
1431 : : /* Debugging dumps. */
1432 : 9488 : if (dump_file)
1433 : : {
1434 : 66 : EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
1435 : 54 : if (!object_sizes_unknown_p (object_size_type, i))
1436 : : {
1437 : 54 : print_generic_expr (dump_file, ssa_name (i),
1438 : : dump_flags);
1439 : 108 : fprintf (dump_file,
1440 : : ": %s %s%sobject size ",
1441 : 54 : ((object_size_type & OST_MINIMUM) ? "minimum"
1442 : : : "maximum"),
1443 : : (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1444 : 54 : (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1445 : 54 : print_generic_expr (dump_file, object_sizes_get (&osi, i),
1446 : : dump_flags);
1447 : 54 : fprintf (dump_file, "\n");
1448 : : }
1449 : : }
1450 : :
1451 : 9488 : BITMAP_FREE (osi.reexamine);
1452 : 9488 : BITMAP_FREE (osi.visited);
1453 : : }
1454 : :
1455 : 14821 : *psize = object_sizes_get (&osi, SSA_NAME_VERSION (ptr));
1456 : 14821 : return !size_unknown_p (*psize, object_size_type);
1457 : : }
1458 : :
1459 : : /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
1460 : :
1461 : : static void
1462 : 8201 : expr_object_size (struct object_size_info *osi, tree ptr, tree value)
1463 : : {
1464 : 8201 : int object_size_type = osi->object_size_type;
1465 : 8201 : unsigned int varno = SSA_NAME_VERSION (ptr);
1466 : 8201 : tree bytes, wholesize;
1467 : :
1468 : 8201 : gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1469 : 8201 : gcc_assert (osi->pass == 0);
1470 : :
1471 : 8201 : if (TREE_CODE (value) == WITH_SIZE_EXPR)
1472 : 0 : value = TREE_OPERAND (value, 0);
1473 : :
1474 : : /* Pointer variables should have been handled by merge_object_sizes. */
1475 : 8201 : gcc_assert (TREE_CODE (value) != SSA_NAME
1476 : : || !POINTER_TYPE_P (TREE_TYPE (value)));
1477 : :
1478 : 8201 : if (TREE_CODE (value) == ADDR_EXPR)
1479 : 6677 : addr_object_size (osi, value, object_size_type, &bytes, &wholesize);
1480 : : else
1481 : 1524 : bytes = wholesize = size_unknown (object_size_type);
1482 : :
1483 : 8201 : object_sizes_set (osi, varno, bytes, wholesize);
1484 : 8201 : }
1485 : :
1486 : :
1487 : : /* Compute object_sizes for PTR, defined to the result of a call. */
1488 : :
1489 : : static void
1490 : 2144 : call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
1491 : : {
1492 : 2144 : int object_size_type = osi->object_size_type;
1493 : 2144 : unsigned int varno = SSA_NAME_VERSION (ptr);
1494 : 2144 : tree bytes = NULL_TREE;
1495 : :
1496 : 2144 : gcc_assert (is_gimple_call (call));
1497 : :
1498 : 2144 : gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1499 : 2144 : gcc_assert (osi->pass == 0);
1500 : :
1501 : 2144 : bool is_strdup = gimple_call_builtin_p (call, BUILT_IN_STRDUP);
1502 : 2144 : bool is_strndup = gimple_call_builtin_p (call, BUILT_IN_STRNDUP);
1503 : 2144 : bool is_access_with_size
1504 : 2144 : = gimple_call_internal_p (call, IFN_ACCESS_WITH_SIZE);
1505 : 2144 : if (is_strdup || is_strndup)
1506 : 160 : bytes = strdup_object_size (call, object_size_type, is_strndup);
1507 : 1984 : else if (is_access_with_size)
1508 : 126 : bytes = access_with_size_object_size (call, object_size_type);
1509 : : else
1510 : 1858 : bytes = alloc_object_size (call, object_size_type);
1511 : :
1512 : 2144 : if (!size_valid_p (bytes, object_size_type))
1513 : 367 : bytes = size_unknown (object_size_type);
1514 : :
1515 : 2144 : object_sizes_set (osi, varno, bytes, bytes);
1516 : 2144 : }
1517 : :
1518 : :
1519 : : /* Compute object_sizes for PTR, defined to an unknown value. */
1520 : :
1521 : : static void
1522 : 0 : unknown_object_size (struct object_size_info *osi, tree ptr)
1523 : : {
1524 : 0 : int object_size_type = osi->object_size_type;
1525 : 0 : unsigned int varno = SSA_NAME_VERSION (ptr);
1526 : :
1527 : 0 : gcc_checking_assert (!object_sizes_unknown_p (object_size_type, varno));
1528 : 0 : gcc_checking_assert (osi->pass == 0);
1529 : 0 : tree bytes = size_unknown (object_size_type);
1530 : :
1531 : 0 : object_sizes_set (osi, varno, bytes, bytes);
1532 : 0 : }
1533 : :
1534 : :
1535 : : /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
1536 : : the object size might need reexamination later. */
1537 : :
1538 : : static bool
1539 : 2200 : merge_object_sizes (struct object_size_info *osi, tree dest, tree orig)
1540 : : {
1541 : 2200 : int object_size_type = osi->object_size_type;
1542 : 2200 : unsigned int varno = SSA_NAME_VERSION (dest);
1543 : 2200 : tree orig_bytes, wholesize;
1544 : :
1545 : 2200 : if (object_sizes_unknown_p (object_size_type, varno))
1546 : : return false;
1547 : :
1548 : 2200 : if (osi->pass == 0)
1549 : 1346 : collect_object_sizes_for (osi, orig);
1550 : :
1551 : 2200 : orig_bytes = object_sizes_get (osi, SSA_NAME_VERSION (orig));
1552 : 2200 : wholesize = object_sizes_get (osi, SSA_NAME_VERSION (orig), true);
1553 : :
1554 : 2200 : if (object_sizes_set (osi, varno, orig_bytes, wholesize))
1555 : 1049 : osi->changed = true;
1556 : :
1557 : 2200 : return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
1558 : : }
1559 : :
1560 : :
1561 : : /* Compute object_sizes for VAR, defined to the result of an assignment
1562 : : with operator POINTER_PLUS_EXPR. Return true if the object size might
1563 : : need reexamination later. */
1564 : :
1565 : : static bool
1566 : 1095 : plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1567 : : {
1568 : 1095 : int object_size_type = osi->object_size_type;
1569 : 1095 : unsigned int varno = SSA_NAME_VERSION (var);
1570 : 1095 : tree bytes, wholesize;
1571 : 1095 : tree op0, op1;
1572 : 1095 : bool reexamine = false;
1573 : :
1574 : 1095 : if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1575 : : {
1576 : 1095 : op0 = gimple_assign_rhs1 (stmt);
1577 : 1095 : op1 = gimple_assign_rhs2 (stmt);
1578 : : }
1579 : 0 : else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
1580 : : {
1581 : 0 : tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
1582 : 0 : gcc_assert (TREE_CODE (rhs) == MEM_REF);
1583 : 0 : op0 = TREE_OPERAND (rhs, 0);
1584 : 0 : op1 = TREE_OPERAND (rhs, 1);
1585 : : }
1586 : : else
1587 : 0 : gcc_unreachable ();
1588 : :
1589 : 1095 : if (object_sizes_unknown_p (object_size_type, varno))
1590 : : return false;
1591 : :
1592 : : /* Handle PTR + OFFSET here. */
1593 : 1095 : if ((TREE_CODE (op0) == SSA_NAME || TREE_CODE (op0) == ADDR_EXPR))
1594 : : {
1595 : 1090 : if (TREE_CODE (op0) == SSA_NAME)
1596 : : {
1597 : 872 : if (osi->pass == 0)
1598 : 701 : collect_object_sizes_for (osi, op0);
1599 : :
1600 : 872 : bytes = object_sizes_get (osi, SSA_NAME_VERSION (op0));
1601 : 872 : wholesize = object_sizes_get (osi, SSA_NAME_VERSION (op0), true);
1602 : 872 : reexamine = bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (op0));
1603 : : }
1604 : : else
1605 : : {
1606 : : /* op0 will be ADDR_EXPR here. We should never come here during
1607 : : reexamination. */
1608 : 218 : gcc_checking_assert (osi->pass == 0);
1609 : 218 : addr_object_size (osi, op0, object_size_type, &bytes, &wholesize);
1610 : : }
1611 : :
1612 : 1090 : bool pos_offset = (size_valid_p (op1, 0)
1613 : 886 : && compare_tree_int (op1, offset_limit) <= 0);
1614 : :
1615 : : /* size_for_offset doesn't make sense for -1 size, but it does for size 0
1616 : : since the wholesize could be non-zero and a negative offset could give
1617 : : a non-zero size. */
1618 : 1090 : if (size_unknown_p (bytes, 0))
1619 : : ;
1620 : : /* In the static case, We want SIZE_FOR_OFFSET to go a bit easy on us if
1621 : : it sees a negative offset since BYTES could have been
1622 : : overestimated. */
1623 : 829 : else if ((object_size_type & OST_DYNAMIC)
1624 : 699 : || bytes != wholesize
1625 : 370 : || pos_offset)
1626 : 616 : bytes = size_for_offset (bytes, op1, wholesize,
1627 : : ((object_size_type & OST_DYNAMIC)
1628 : : || pos_offset));
1629 : : /* In the static case, with a negative offset, the best estimate for
1630 : : minimum size is size_unknown but for maximum size, the wholesize is a
1631 : : better estimate than size_unknown. */
1632 : 213 : else if (object_size_type & OST_MINIMUM)
1633 : 0 : bytes = size_unknown (object_size_type);
1634 : : else
1635 : 213 : bytes = wholesize;
1636 : : }
1637 : : else
1638 : 5 : bytes = wholesize = size_unknown (object_size_type);
1639 : :
1640 : 1095 : if (!size_valid_p (bytes, object_size_type)
1641 : 1093 : || !size_valid_p (wholesize, object_size_type))
1642 : 2 : bytes = wholesize = size_unknown (object_size_type);
1643 : :
1644 : 1095 : if (object_sizes_set (osi, varno, bytes, wholesize))
1645 : 981 : osi->changed = true;
1646 : : return reexamine;
1647 : : }
1648 : :
1649 : : /* Compute the dynamic object size for VAR. Return the result in SIZE and
1650 : : WHOLESIZE. */
1651 : :
1652 : : static void
1653 : 355 : dynamic_object_size (struct object_size_info *osi, tree var,
1654 : : tree *size, tree *wholesize)
1655 : : {
1656 : 355 : int object_size_type = osi->object_size_type;
1657 : :
1658 : 355 : if (TREE_CODE (var) == SSA_NAME)
1659 : : {
1660 : 231 : unsigned varno = SSA_NAME_VERSION (var);
1661 : :
1662 : 231 : collect_object_sizes_for (osi, var);
1663 : 231 : *size = object_sizes_get (osi, varno);
1664 : 231 : *wholesize = object_sizes_get (osi, varno, true);
1665 : : }
1666 : 124 : else if (TREE_CODE (var) == ADDR_EXPR)
1667 : 123 : addr_object_size (osi, var, object_size_type, size, wholesize);
1668 : : else
1669 : 1 : *size = *wholesize = size_unknown (object_size_type);
1670 : 355 : }
1671 : :
1672 : : /* Compute object_sizes for VAR, defined at STMT, which is
1673 : : a COND_EXPR. Return true if the object size might need reexamination
1674 : : later. */
1675 : :
1676 : : static bool
1677 : 0 : cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1678 : : {
1679 : 0 : tree then_, else_;
1680 : 0 : int object_size_type = osi->object_size_type;
1681 : 0 : unsigned int varno = SSA_NAME_VERSION (var);
1682 : 0 : bool reexamine = false;
1683 : :
1684 : 0 : gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
1685 : :
1686 : 0 : if (object_sizes_unknown_p (object_size_type, varno))
1687 : : return false;
1688 : :
1689 : 0 : then_ = gimple_assign_rhs2 (stmt);
1690 : 0 : else_ = gimple_assign_rhs3 (stmt);
1691 : :
1692 : 0 : if (object_size_type & OST_DYNAMIC)
1693 : : {
1694 : 0 : tree then_size, then_wholesize, else_size, else_wholesize;
1695 : :
1696 : 0 : dynamic_object_size (osi, then_, &then_size, &then_wholesize);
1697 : 0 : if (!size_unknown_p (then_size, object_size_type))
1698 : 0 : dynamic_object_size (osi, else_, &else_size, &else_wholesize);
1699 : :
1700 : 0 : tree cond_size, cond_wholesize;
1701 : 0 : if (size_unknown_p (then_size, object_size_type)
1702 : 0 : || size_unknown_p (else_size, object_size_type))
1703 : 0 : cond_size = cond_wholesize = size_unknown (object_size_type);
1704 : : else
1705 : : {
1706 : 0 : cond_size = fold_build3 (COND_EXPR, sizetype,
1707 : : gimple_assign_rhs1 (stmt),
1708 : : then_size, else_size);
1709 : 0 : cond_wholesize = fold_build3 (COND_EXPR, sizetype,
1710 : : gimple_assign_rhs1 (stmt),
1711 : : then_wholesize, else_wholesize);
1712 : : }
1713 : :
1714 : 0 : object_sizes_set (osi, varno, cond_size, cond_wholesize);
1715 : :
1716 : 0 : return false;
1717 : : }
1718 : :
1719 : 0 : if (TREE_CODE (then_) == SSA_NAME)
1720 : 0 : reexamine |= merge_object_sizes (osi, var, then_);
1721 : : else
1722 : 0 : expr_object_size (osi, var, then_);
1723 : :
1724 : 0 : if (object_sizes_unknown_p (object_size_type, varno))
1725 : : return reexamine;
1726 : :
1727 : 0 : if (TREE_CODE (else_) == SSA_NAME)
1728 : 0 : reexamine |= merge_object_sizes (osi, var, else_);
1729 : : else
1730 : 0 : expr_object_size (osi, var, else_);
1731 : :
1732 : : return reexamine;
1733 : : }
1734 : :
1735 : : /* Find size of an object passed as a parameter to the function. */
1736 : :
1737 : : static void
1738 : 1601 : parm_object_size (struct object_size_info *osi, tree var)
1739 : : {
1740 : 1601 : int object_size_type = osi->object_size_type;
1741 : 1601 : tree parm = SSA_NAME_VAR (var);
1742 : :
1743 : 1601 : if (!(object_size_type & OST_DYNAMIC) || !POINTER_TYPE_P (TREE_TYPE (parm)))
1744 : : {
1745 : 1156 : expr_object_size (osi, var, parm);
1746 : 1156 : return;
1747 : : }
1748 : :
1749 : : /* Look for access attribute. */
1750 : 445 : rdwr_map rdwr_idx;
1751 : :
1752 : 445 : tree fndecl = cfun->decl;
1753 : 445 : const attr_access *access = get_parm_access (rdwr_idx, parm, fndecl);
1754 : 445 : tree typesize = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (parm)));
1755 : 445 : tree sz = NULL_TREE;
1756 : :
1757 : : /* If we have an access attribute with a usable size argument... */
1758 : 18 : if (access && access->sizarg != UINT_MAX
1759 : : /* ... and either PARM is void * or has a type that is complete and has a
1760 : : constant size... */
1761 : 461 : && ((typesize && poly_int_tree_p (typesize))
1762 : 4 : || (!typesize && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))))
1763 : : {
1764 : 11 : tree fnargs = DECL_ARGUMENTS (fndecl);
1765 : 11 : tree arg = NULL_TREE;
1766 : 11 : unsigned argpos = 0;
1767 : :
1768 : : /* ... then walk through the parameters to pick the size parameter and
1769 : : safely scale it by the type size if needed.
1770 : :
1771 : : TODO: we could also compute the size of VLAs where the size is
1772 : : given by a function parameter. */
1773 : 18 : for (arg = fnargs; arg; arg = TREE_CHAIN (arg), ++argpos)
1774 : 18 : if (argpos == access->sizarg)
1775 : : {
1776 : 11 : gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1777 : 11 : sz = get_or_create_ssa_default_def (cfun, arg);
1778 : 11 : if (sz != NULL_TREE)
1779 : : {
1780 : 11 : sz = fold_convert (sizetype, sz);
1781 : 11 : if (typesize)
1782 : 8 : sz = size_binop (MULT_EXPR, sz, typesize);
1783 : : }
1784 : : break;
1785 : : }
1786 : : }
1787 : 11 : if (!sz)
1788 : 434 : sz = size_unknown (object_size_type);
1789 : :
1790 : 445 : object_sizes_set (osi, SSA_NAME_VERSION (var), sz, sz);
1791 : 445 : }
1792 : :
1793 : : /* Compute an object size expression for VAR, which is the result of a PHI
1794 : : node. */
1795 : :
1796 : : static void
1797 : 181 : phi_dynamic_object_size (struct object_size_info *osi, tree var)
1798 : : {
1799 : 181 : int object_size_type = osi->object_size_type;
1800 : 181 : unsigned int varno = SSA_NAME_VERSION (var);
1801 : 181 : gimple *stmt = SSA_NAME_DEF_STMT (var);
1802 : 181 : unsigned i, num_args = gimple_phi_num_args (stmt);
1803 : 181 : bool wholesize_needed = false;
1804 : :
1805 : : /* The extra space is for the PHI result at the end, which object_sizes_set
1806 : : sets for us. */
1807 : 181 : tree sizes = make_tree_vec (num_args + 1);
1808 : 181 : tree wholesizes = make_tree_vec (num_args + 1);
1809 : :
1810 : : /* Bail out if the size of any of the PHI arguments cannot be
1811 : : determined. */
1812 : 502 : for (i = 0; i < num_args; i++)
1813 : : {
1814 : 360 : edge e = gimple_phi_arg_edge (as_a <gphi *> (stmt), i);
1815 : 360 : if (e->flags & EDGE_COMPLEX)
1816 : : break;
1817 : :
1818 : 355 : tree rhs = gimple_phi_arg_def (stmt, i);
1819 : 355 : tree size, wholesize;
1820 : :
1821 : 355 : dynamic_object_size (osi, rhs, &size, &wholesize);
1822 : :
1823 : 355 : if (size_unknown_p (size, object_size_type))
1824 : : break;
1825 : :
1826 : 321 : if (size != wholesize)
1827 : 245 : wholesize_needed = true;
1828 : :
1829 : 321 : TREE_VEC_ELT (sizes, i) = size;
1830 : 321 : TREE_VEC_ELT (wholesizes, i) = wholesize;
1831 : : }
1832 : :
1833 : 181 : if (i < num_args)
1834 : : {
1835 : 39 : ggc_free (sizes);
1836 : 39 : ggc_free (wholesizes);
1837 : 39 : sizes = wholesizes = size_unknown (object_size_type);
1838 : : }
1839 : :
1840 : : /* Point to the same TREE_VEC so that we can avoid emitting two PHI
1841 : : nodes. */
1842 : 142 : else if (!wholesize_needed)
1843 : : {
1844 : 21 : ggc_free (wholesizes);
1845 : 21 : wholesizes = sizes;
1846 : : }
1847 : :
1848 : 181 : object_sizes_set (osi, varno, sizes, wholesizes);
1849 : 181 : }
1850 : :
1851 : : /* Compute object sizes for VAR.
1852 : : For ADDR_EXPR an object size is the number of remaining bytes
1853 : : to the end of the object (where what is considered an object depends on
1854 : : OSI->object_size_type).
1855 : : For allocation GIMPLE_CALL like malloc or calloc object size is the size
1856 : : of the allocation.
1857 : : For POINTER_PLUS_EXPR where second operand is a constant integer,
1858 : : object size is object size of the first operand minus the constant.
1859 : : If the constant is bigger than the number of remaining bytes until the
1860 : : end of the object, object size is 0, but if it is instead a pointer
1861 : : subtraction, object size is size_unknown (object_size_type).
1862 : : To differentiate addition from subtraction, ADDR_EXPR returns
1863 : : size_unknown (object_size_type) for all objects bigger than half of the
1864 : : address space, and constants less than half of the address space are
1865 : : considered addition, while bigger constants subtraction.
1866 : : For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
1867 : : object size is object size of that argument.
1868 : : Otherwise, object size is the maximum of object sizes of variables
1869 : : that it might be set to. */
1870 : :
1871 : : static void
1872 : 13936 : collect_object_sizes_for (struct object_size_info *osi, tree var)
1873 : : {
1874 : 13936 : int object_size_type = osi->object_size_type;
1875 : 13936 : unsigned int varno = SSA_NAME_VERSION (var);
1876 : 13936 : gimple *stmt;
1877 : 13936 : bool reexamine;
1878 : :
1879 : 13936 : if (bitmap_bit_p (computed[object_size_type], varno))
1880 : : return;
1881 : :
1882 : 12790 : if (osi->pass == 0)
1883 : : {
1884 : 11933 : if (bitmap_set_bit (osi->visited, varno))
1885 : : {
1886 : : /* Initialize to 0 for maximum size and M1U for minimum size so that
1887 : : it gets immediately overridden. */
1888 : 11521 : object_sizes_initialize (osi, varno,
1889 : : size_initval (object_size_type),
1890 : : size_initval (object_size_type));
1891 : : }
1892 : : else
1893 : : {
1894 : : /* Found a dependency loop. Mark the variable for later
1895 : : re-examination. */
1896 : 412 : if (object_size_type & OST_DYNAMIC)
1897 : 82 : object_sizes_set_temp (osi, varno);
1898 : :
1899 : 412 : bitmap_set_bit (osi->reexamine, varno);
1900 : 412 : if (dump_file && (dump_flags & TDF_DETAILS))
1901 : : {
1902 : 0 : fprintf (dump_file, "Found a dependency loop at ");
1903 : 0 : print_generic_expr (dump_file, var, dump_flags);
1904 : 0 : fprintf (dump_file, "\n");
1905 : : }
1906 : 412 : return;
1907 : : }
1908 : : }
1909 : :
1910 : 12378 : if (dump_file && (dump_flags & TDF_DETAILS))
1911 : : {
1912 : 6 : fprintf (dump_file, "Visiting use-def links for ");
1913 : 6 : print_generic_expr (dump_file, var, dump_flags);
1914 : 6 : fprintf (dump_file, "\n");
1915 : : }
1916 : :
1917 : 12378 : stmt = SSA_NAME_DEF_STMT (var);
1918 : 12378 : reexamine = false;
1919 : :
1920 : 12378 : switch (gimple_code (stmt))
1921 : : {
1922 : 6662 : case GIMPLE_ASSIGN:
1923 : 6662 : {
1924 : 6662 : tree rhs = gimple_assign_rhs1 (stmt);
1925 : 6662 : if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1926 : 6662 : || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1927 : 4952 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1928 : 1095 : reexamine = plus_stmt_object_size (osi, var, stmt);
1929 : 5567 : else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1930 : 0 : reexamine = cond_expr_object_size (osi, var, stmt);
1931 : 5567 : else if (gimple_assign_single_p (stmt)
1932 : 5567 : || gimple_assign_unary_nop_p (stmt))
1933 : : {
1934 : 5567 : if (TREE_CODE (rhs) == SSA_NAME
1935 : 5567 : && POINTER_TYPE_P (TREE_TYPE (rhs)))
1936 : 249 : reexamine = merge_object_sizes (osi, var, rhs);
1937 : : else
1938 : 5318 : expr_object_size (osi, var, rhs);
1939 : : }
1940 : : else
1941 : 0 : unknown_object_size (osi, var);
1942 : : break;
1943 : : }
1944 : :
1945 : 2201 : case GIMPLE_CALL:
1946 : 2201 : {
1947 : 2201 : gcall *call_stmt = as_a <gcall *> (stmt);
1948 : 2201 : tree arg = pass_through_call (call_stmt);
1949 : 2201 : if (arg)
1950 : : {
1951 : 57 : if (TREE_CODE (arg) == SSA_NAME
1952 : 57 : && POINTER_TYPE_P (TREE_TYPE (arg)))
1953 : 46 : reexamine = merge_object_sizes (osi, var, arg);
1954 : : else
1955 : 11 : expr_object_size (osi, var, arg);
1956 : : }
1957 : : else
1958 : 2144 : call_object_size (osi, var, call_stmt);
1959 : : break;
1960 : : }
1961 : :
1962 : 0 : case GIMPLE_ASM:
1963 : : /* Pointers defined by __asm__ statements can point anywhere. */
1964 : 0 : unknown_object_size (osi, var);
1965 : 0 : break;
1966 : :
1967 : 1601 : case GIMPLE_NOP:
1968 : 1601 : if (SSA_NAME_VAR (var)
1969 : 1601 : && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1970 : 1601 : parm_object_size (osi, var);
1971 : : else
1972 : : /* Uninitialized SSA names point nowhere. */
1973 : 0 : unknown_object_size (osi, var);
1974 : : break;
1975 : :
1976 : 1914 : case GIMPLE_PHI:
1977 : 1914 : {
1978 : 1914 : unsigned i;
1979 : :
1980 : 1914 : if (object_size_type & OST_DYNAMIC)
1981 : : {
1982 : 181 : phi_dynamic_object_size (osi, var);
1983 : 181 : break;
1984 : : }
1985 : :
1986 : 6558 : for (i = 0; i < gimple_phi_num_args (stmt); i++)
1987 : : {
1988 : 4886 : tree rhs = gimple_phi_arg (stmt, i)->def;
1989 : :
1990 : 4886 : if (object_sizes_unknown_p (object_size_type, varno))
1991 : : break;
1992 : :
1993 : 4825 : if (TREE_CODE (rhs) == SSA_NAME)
1994 : 1905 : reexamine |= merge_object_sizes (osi, var, rhs);
1995 : 2920 : else if (osi->pass == 0)
1996 : 1716 : expr_object_size (osi, var, rhs);
1997 : : }
1998 : : break;
1999 : : }
2000 : :
2001 : 0 : default:
2002 : 0 : gcc_unreachable ();
2003 : : }
2004 : :
2005 : : /* Dynamic sizes use placeholder temps to return an answer, so it is always
2006 : : safe to set COMPUTED for them. */
2007 : 12378 : if ((object_size_type & OST_DYNAMIC)
2008 : 12378 : || !reexamine || object_sizes_unknown_p (object_size_type, varno))
2009 : : {
2010 : 10878 : bitmap_set_bit (computed[object_size_type], varno);
2011 : 10878 : if (!(object_size_type & OST_DYNAMIC))
2012 : 8216 : bitmap_clear_bit (osi->reexamine, varno);
2013 : 2662 : else if (reexamine)
2014 : 88 : bitmap_set_bit (osi->reexamine, varno);
2015 : : }
2016 : : else
2017 : : {
2018 : 1500 : bitmap_set_bit (osi->reexamine, varno);
2019 : 1500 : if (dump_file && (dump_flags & TDF_DETAILS))
2020 : : {
2021 : 0 : fprintf (dump_file, "Need to reexamine ");
2022 : 0 : print_generic_expr (dump_file, var, dump_flags);
2023 : 0 : fprintf (dump_file, "\n");
2024 : : }
2025 : : }
2026 : : }
2027 : :
2028 : :
2029 : : /* Helper function for check_for_plus_in_loops. Called recursively
2030 : : to detect loops. */
2031 : :
2032 : : static void
2033 : 20 : check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
2034 : : unsigned int depth)
2035 : : {
2036 : 20 : gimple *stmt = SSA_NAME_DEF_STMT (var);
2037 : 20 : unsigned int varno = SSA_NAME_VERSION (var);
2038 : :
2039 : 20 : if (osi->depths[varno])
2040 : : {
2041 : 10 : if (osi->depths[varno] != depth)
2042 : : {
2043 : 10 : unsigned int *sp;
2044 : :
2045 : : /* Found a loop involving pointer addition. */
2046 : 20 : for (sp = osi->tos; sp > osi->stack; )
2047 : : {
2048 : 20 : --sp;
2049 : 20 : bitmap_clear_bit (osi->reexamine, *sp);
2050 : 20 : bitmap_set_bit (computed[osi->object_size_type], *sp);
2051 : 20 : object_sizes_set (osi, *sp, size_zero_node,
2052 : : object_sizes_get (osi, *sp, true));
2053 : 20 : if (*sp == varno)
2054 : : break;
2055 : : }
2056 : : }
2057 : 10 : return;
2058 : : }
2059 : 10 : else if (! bitmap_bit_p (osi->reexamine, varno))
2060 : : return;
2061 : :
2062 : 10 : osi->depths[varno] = depth;
2063 : 10 : *osi->tos++ = varno;
2064 : :
2065 : 10 : switch (gimple_code (stmt))
2066 : : {
2067 : :
2068 : 10 : case GIMPLE_ASSIGN:
2069 : 10 : {
2070 : 10 : if ((gimple_assign_single_p (stmt)
2071 : 10 : || gimple_assign_unary_nop_p (stmt))
2072 : 10 : && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2073 : : {
2074 : 0 : tree rhs = gimple_assign_rhs1 (stmt);
2075 : :
2076 : 0 : check_for_plus_in_loops_1 (osi, rhs, depth);
2077 : : }
2078 : 10 : else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2079 : : {
2080 : 10 : tree basevar = gimple_assign_rhs1 (stmt);
2081 : 10 : tree cst = gimple_assign_rhs2 (stmt);
2082 : :
2083 : 10 : gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2084 : :
2085 : 20 : check_for_plus_in_loops_1 (osi, basevar,
2086 : 10 : depth + !integer_zerop (cst));
2087 : : }
2088 : : else
2089 : 0 : gcc_unreachable ();
2090 : : break;
2091 : : }
2092 : :
2093 : 0 : case GIMPLE_CALL:
2094 : 0 : {
2095 : 0 : gcall *call_stmt = as_a <gcall *> (stmt);
2096 : 0 : tree arg = pass_through_call (call_stmt);
2097 : 0 : if (arg)
2098 : : {
2099 : 0 : if (TREE_CODE (arg) == SSA_NAME)
2100 : 0 : check_for_plus_in_loops_1 (osi, arg, depth);
2101 : : else
2102 : 0 : gcc_unreachable ();
2103 : : }
2104 : : break;
2105 : : }
2106 : :
2107 : : case GIMPLE_PHI:
2108 : : {
2109 : : unsigned i;
2110 : :
2111 : 0 : for (i = 0; i < gimple_phi_num_args (stmt); i++)
2112 : : {
2113 : 0 : tree rhs = gimple_phi_arg (stmt, i)->def;
2114 : :
2115 : 0 : if (TREE_CODE (rhs) == SSA_NAME)
2116 : 0 : check_for_plus_in_loops_1 (osi, rhs, depth);
2117 : : }
2118 : : break;
2119 : : }
2120 : :
2121 : 0 : default:
2122 : 0 : gcc_unreachable ();
2123 : : }
2124 : :
2125 : 10 : osi->depths[varno] = 0;
2126 : 10 : osi->tos--;
2127 : : }
2128 : :
2129 : :
2130 : : /* Check if some pointer we are computing object size of is being increased
2131 : : within a loop. If yes, assume all the SSA variables participating in
2132 : : that loop have minimum object sizes 0. */
2133 : :
2134 : : static void
2135 : 55 : check_for_plus_in_loops (struct object_size_info *osi, tree var)
2136 : : {
2137 : 55 : gimple *stmt = SSA_NAME_DEF_STMT (var);
2138 : :
2139 : : /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
2140 : : and looked for a POINTER_PLUS_EXPR in the pass-through
2141 : : argument, if any. In GIMPLE, however, such an expression
2142 : : is not a valid call operand. */
2143 : :
2144 : 55 : if (is_gimple_assign (stmt)
2145 : 55 : && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2146 : : {
2147 : 19 : tree basevar = gimple_assign_rhs1 (stmt);
2148 : 19 : tree cst = gimple_assign_rhs2 (stmt);
2149 : :
2150 : 19 : gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2151 : :
2152 : : /* Skip non-positive offsets. */
2153 : 19 : if (integer_zerop (cst) || compare_tree_int (cst, offset_limit) > 0)
2154 : 9 : return;
2155 : :
2156 : 10 : osi->depths[SSA_NAME_VERSION (basevar)] = 1;
2157 : 10 : *osi->tos++ = SSA_NAME_VERSION (basevar);
2158 : 10 : check_for_plus_in_loops_1 (osi, var, 2);
2159 : 10 : osi->depths[SSA_NAME_VERSION (basevar)] = 0;
2160 : 10 : osi->tos--;
2161 : : }
2162 : : }
2163 : :
2164 : :
2165 : : /* Initialize data structures for the object size computation. */
2166 : :
2167 : : void
2168 : 18295 : init_object_sizes (void)
2169 : : {
2170 : 18295 : int object_size_type;
2171 : :
2172 : 18295 : if (computed[0])
2173 : : return;
2174 : :
2175 : 24579 : for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2176 : : {
2177 : 21848 : object_sizes_grow (object_size_type);
2178 : 21848 : computed[object_size_type] = BITMAP_ALLOC (NULL);
2179 : : }
2180 : :
2181 : 2731 : init_offset_limit ();
2182 : : }
2183 : :
2184 : :
2185 : : /* Destroy data structures after the object size computation. */
2186 : :
2187 : : void
2188 : 3490773 : fini_object_sizes (void)
2189 : : {
2190 : 3490773 : int object_size_type;
2191 : :
2192 : 31416957 : for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2193 : : {
2194 : 27926184 : object_sizes_release (object_size_type);
2195 : 27926184 : BITMAP_FREE (computed[object_size_type]);
2196 : : }
2197 : 3490773 : }
2198 : :
2199 : : /* Dummy valueize function. */
2200 : :
2201 : : static tree
2202 : 17601 : do_valueize (tree t)
2203 : : {
2204 : 17601 : return t;
2205 : : }
2206 : :
2207 : : /* Process a __builtin_object_size or __builtin_dynamic_object_size call in
2208 : : CALL early for subobjects before any object information is lost due to
2209 : : optimization. Insert a MIN or MAX expression of the result and
2210 : : __builtin_object_size at I so that it may be processed in the second pass.
2211 : : __builtin_dynamic_object_size is treated like __builtin_object_size here
2212 : : since we're only looking for constant bounds. */
2213 : :
2214 : : static void
2215 : 11649 : early_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2216 : : {
2217 : 11649 : tree ost = gimple_call_arg (call, 1);
2218 : 11649 : tree lhs = gimple_call_lhs (call);
2219 : 11649 : gcc_assert (lhs != NULL_TREE);
2220 : :
2221 : 11649 : if (!tree_fits_uhwi_p (ost))
2222 : 9746 : return;
2223 : :
2224 : 11649 : unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2225 : 11649 : tree ptr = gimple_call_arg (call, 0);
2226 : :
2227 : 11649 : if (object_size_type != 1 && object_size_type != 3)
2228 : : return;
2229 : :
2230 : 3604 : if (TREE_CODE (ptr) != ADDR_EXPR && TREE_CODE (ptr) != SSA_NAME)
2231 : : return;
2232 : :
2233 : 3604 : tree type = TREE_TYPE (lhs);
2234 : 3604 : tree bytes;
2235 : 3604 : if (!compute_builtin_object_size (ptr, object_size_type, &bytes)
2236 : 3604 : || !int_fits_type_p (bytes, type))
2237 : : return;
2238 : :
2239 : 1903 : tree tem = make_ssa_name (type);
2240 : 1903 : gimple_call_set_lhs (call, tem);
2241 : 1903 : enum tree_code code = object_size_type & OST_MINIMUM ? MAX_EXPR : MIN_EXPR;
2242 : 1903 : tree cst = fold_convert (type, bytes);
2243 : 1903 : gimple *g = gimple_build_assign (lhs, code, tem, cst);
2244 : 1903 : gsi_insert_after (i, g, GSI_NEW_STMT);
2245 : 1903 : update_stmt (call);
2246 : : }
2247 : :
2248 : : /* Attempt to fold one __builtin_dynamic_object_size call in CALL into an
2249 : : expression and insert it at I. Return true if it succeeds. */
2250 : :
2251 : : static bool
2252 : 3357 : dynamic_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2253 : : {
2254 : 3357 : gcc_assert (gimple_call_num_args (call) == 2);
2255 : :
2256 : 3357 : tree args[2];
2257 : 3357 : args[0] = gimple_call_arg (call, 0);
2258 : 3357 : args[1] = gimple_call_arg (call, 1);
2259 : :
2260 : 3357 : location_t loc = EXPR_LOC_OR_LOC (args[0], input_location);
2261 : 3357 : tree result_type = gimple_call_return_type (as_a <gcall *> (call));
2262 : 3357 : tree result = fold_builtin_call_array (loc, result_type,
2263 : : gimple_call_fn (call), 2, args);
2264 : :
2265 : 3357 : if (!result)
2266 : : return false;
2267 : :
2268 : : /* fold_builtin_call_array may wrap the result inside a
2269 : : NOP_EXPR. */
2270 : 2241 : STRIP_NOPS (result);
2271 : 2241 : gimplify_and_update_call_from_tree (i, result);
2272 : :
2273 : 2241 : if (dump_file && (dump_flags & TDF_DETAILS))
2274 : : {
2275 : 0 : fprintf (dump_file, "Simplified (dynamic)\n ");
2276 : 0 : print_gimple_stmt (dump_file, call, 0, dump_flags);
2277 : 0 : fprintf (dump_file, " to ");
2278 : 0 : print_generic_expr (dump_file, result);
2279 : 0 : fprintf (dump_file, "\n");
2280 : : }
2281 : : return true;
2282 : : }
2283 : :
2284 : : static unsigned int
2285 : 3490773 : object_sizes_execute (function *fun, bool early)
2286 : : {
2287 : 3490773 : todo = 0;
2288 : 3490773 : auto_bitmap sdce_worklist;
2289 : :
2290 : 3490773 : basic_block bb;
2291 : 29888617 : FOR_EACH_BB_FN (bb, fun)
2292 : : {
2293 : 26397844 : gimple_stmt_iterator i;
2294 : 230605413 : for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
2295 : : {
2296 : 177809725 : tree result;
2297 : 177809725 : bool dynamic = false;
2298 : :
2299 : 177809725 : gimple *call = gsi_stmt (i);
2300 : 177809725 : if (gimple_call_builtin_p (call, BUILT_IN_DYNAMIC_OBJECT_SIZE))
2301 : : dynamic = true;
2302 : 177801876 : else if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
2303 : 177791430 : continue;
2304 : :
2305 : 18295 : tree lhs = gimple_call_lhs (call);
2306 : 18295 : if (!lhs)
2307 : 0 : continue;
2308 : :
2309 : 18295 : init_object_sizes ();
2310 : :
2311 : : /* If early, only attempt to fold
2312 : : __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
2313 : : and rather than folding the builtin to the constant if any,
2314 : : create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
2315 : : call result and the computed constant. Do the same for
2316 : : __builtin_dynamic_object_size too. */
2317 : 18295 : if (early)
2318 : : {
2319 : 11649 : early_object_sizes_execute_one (&i, call);
2320 : 11649 : continue;
2321 : : }
2322 : :
2323 : 6646 : if (dynamic)
2324 : : {
2325 : 3357 : if (dynamic_object_sizes_execute_one (&i, call))
2326 : 2241 : continue;
2327 : : else
2328 : : {
2329 : : /* If we could not find a suitable size expression, lower to
2330 : : __builtin_object_size so that we may at least get a
2331 : : constant lower or higher estimate. */
2332 : 1116 : tree bosfn = builtin_decl_implicit (BUILT_IN_OBJECT_SIZE);
2333 : 1116 : gimple_call_set_fndecl (call, bosfn);
2334 : 1116 : update_stmt (call);
2335 : :
2336 : 1116 : if (dump_file && (dump_flags & TDF_DETAILS))
2337 : : {
2338 : 0 : print_generic_expr (dump_file, gimple_call_arg (call, 0),
2339 : : dump_flags);
2340 : 0 : fprintf (dump_file,
2341 : : ": Retrying as __builtin_object_size\n");
2342 : : }
2343 : : }
2344 : : }
2345 : :
2346 : 4405 : result = gimple_fold_stmt_to_constant (call, do_valueize);
2347 : 4405 : if (!result)
2348 : : {
2349 : 2030 : tree ost = gimple_call_arg (call, 1);
2350 : :
2351 : 2030 : if (tree_fits_uhwi_p (ost))
2352 : : {
2353 : 2030 : unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2354 : :
2355 : 2030 : if (object_size_type & OST_MINIMUM)
2356 : 309 : result = build_zero_cst (size_type_node);
2357 : 1721 : else if (object_size_type < OST_END)
2358 : 1721 : result = fold_convert (size_type_node,
2359 : : integer_minus_one_node);
2360 : : }
2361 : :
2362 : 2030 : if (!result)
2363 : 0 : continue;
2364 : : }
2365 : :
2366 : 4405 : gcc_assert (TREE_CODE (result) == INTEGER_CST);
2367 : :
2368 : 4405 : if (dump_file && (dump_flags & TDF_DETAILS))
2369 : : {
2370 : 0 : fprintf (dump_file, "Simplified\n ");
2371 : 0 : print_gimple_stmt (dump_file, call, 0, dump_flags);
2372 : 0 : fprintf (dump_file, " to ");
2373 : 0 : print_generic_expr (dump_file, result);
2374 : 0 : fprintf (dump_file, "\n");
2375 : : }
2376 : :
2377 : : /* Propagate into all uses and fold those stmts. */
2378 : 4405 : if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2379 : : {
2380 : 4405 : replace_uses_by (lhs, result);
2381 : : /* Mark lhs as being possiblely DCEd. */
2382 : 4405 : bitmap_set_bit (sdce_worklist, SSA_NAME_VERSION (lhs));
2383 : : }
2384 : : else
2385 : 0 : replace_call_with_value (&i, result);
2386 : : }
2387 : : }
2388 : :
2389 : 3490773 : fini_object_sizes ();
2390 : 3490773 : simple_dce_from_worklist (sdce_worklist);
2391 : 3490773 : return todo;
2392 : 3490773 : }
2393 : :
2394 : : /* Simple pass to optimize all __builtin_object_size () builtins. */
2395 : :
2396 : : namespace {
2397 : :
2398 : : const pass_data pass_data_object_sizes =
2399 : : {
2400 : : GIMPLE_PASS, /* type */
2401 : : "objsz", /* name */
2402 : : OPTGROUP_NONE, /* optinfo_flags */
2403 : : TV_NONE, /* tv_id */
2404 : : ( PROP_cfg | PROP_ssa ), /* properties_required */
2405 : : PROP_objsz, /* properties_provided */
2406 : : 0, /* properties_destroyed */
2407 : : 0, /* todo_flags_start */
2408 : : 0, /* todo_flags_finish */
2409 : : };
2410 : :
2411 : : class pass_object_sizes : public gimple_opt_pass
2412 : : {
2413 : : public:
2414 : 574698 : pass_object_sizes (gcc::context *ctxt)
2415 : 1149396 : : gimple_opt_pass (pass_data_object_sizes, ctxt)
2416 : : {}
2417 : :
2418 : : /* opt_pass methods: */
2419 : 287349 : opt_pass * clone () final override { return new pass_object_sizes (m_ctxt); }
2420 : 1030401 : unsigned int execute (function *fun) final override
2421 : : {
2422 : 1030401 : return object_sizes_execute (fun, false);
2423 : : }
2424 : : }; // class pass_object_sizes
2425 : :
2426 : : } // anon namespace
2427 : :
2428 : : gimple_opt_pass *
2429 : 287349 : make_pass_object_sizes (gcc::context *ctxt)
2430 : : {
2431 : 287349 : return new pass_object_sizes (ctxt);
2432 : : }
2433 : :
2434 : : /* Early version of pass to optimize all __builtin_object_size () builtins. */
2435 : :
2436 : : namespace {
2437 : :
2438 : : const pass_data pass_data_early_object_sizes =
2439 : : {
2440 : : GIMPLE_PASS, /* type */
2441 : : "early_objsz", /* name */
2442 : : OPTGROUP_NONE, /* optinfo_flags */
2443 : : TV_NONE, /* tv_id */
2444 : : ( PROP_cfg | PROP_ssa ), /* properties_required */
2445 : : 0, /* properties_provided */
2446 : : 0, /* properties_destroyed */
2447 : : 0, /* todo_flags_start */
2448 : : 0, /* todo_flags_finish */
2449 : : };
2450 : :
2451 : : class pass_early_object_sizes : public gimple_opt_pass
2452 : : {
2453 : : public:
2454 : 287349 : pass_early_object_sizes (gcc::context *ctxt)
2455 : 574698 : : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
2456 : : {}
2457 : :
2458 : : /* opt_pass methods: */
2459 : 2460372 : unsigned int execute (function *fun) final override
2460 : : {
2461 : 2460372 : return object_sizes_execute (fun, true);
2462 : : }
2463 : : }; // class pass_object_sizes
2464 : :
2465 : : } // anon namespace
2466 : :
2467 : : gimple_opt_pass *
2468 : 287349 : make_pass_early_object_sizes (gcc::context *ctxt)
2469 : : {
2470 : 287349 : return new pass_early_object_sizes (ctxt);
2471 : : }
|