Line data Source code
1 : /* __builtin_object_size (ptr, object_size_type) computation
2 : Copyright (C) 2004-2026 Free Software Foundation, Inc.
3 : Contributed by Jakub Jelinek <jakub@redhat.com>
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify
8 : it under the terms of the GNU General Public License as published by
9 : the Free Software Foundation; either version 3, or (at your option)
10 : any later version.
11 :
12 : GCC is distributed in the hope that it will be useful,
13 : but WITHOUT ANY WARRANTY; without even the implied warranty of
14 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 : GNU General Public License for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "tree.h"
26 : #include "gimple.h"
27 : #include "tree-pass.h"
28 : #include "ssa.h"
29 : #include "gimple-pretty-print.h"
30 : #include "fold-const.h"
31 : #include "tree-object-size.h"
32 : #include "gimple-iterator.h"
33 : #include "gimple-fold.h"
34 : #include "tree-cfg.h"
35 : #include "tree-dfa.h"
36 : #include "stringpool.h"
37 : #include "attribs.h"
38 : #include "builtins.h"
39 : #include "gimplify-me.h"
40 : #include "gimplify.h"
41 : #include "tree-ssa-dce.h"
42 :
43 : struct object_size_info
44 : {
45 : int object_size_type;
46 : unsigned char pass;
47 : bool changed;
48 : bitmap visited, reexamine;
49 : unsigned int *depths;
50 : unsigned int *stack, *tos;
51 : };
52 :
53 : struct GTY(()) object_size
54 : {
55 : /* Estimate of bytes till the end of the object. */
56 : tree size;
57 : /* Estimate of the size of the whole object. */
58 : tree wholesize;
59 : };
60 :
61 : static tree compute_object_offset (tree, const_tree);
62 : static bool addr_object_size (struct object_size_info *,
63 : const_tree, int, tree *, tree *t = NULL);
64 : static tree alloc_object_size (const gcall *, int);
65 : static tree access_with_size_object_size (const gcall *, int);
66 : static tree pass_through_call (const gcall *);
67 : static void collect_object_sizes_for (struct object_size_info *, tree);
68 : static void expr_object_size (struct object_size_info *, tree, tree);
69 : static bool merge_object_sizes (struct object_size_info *, tree, tree);
70 : static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
71 : static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
72 : static void init_offset_limit (void);
73 : static void check_for_plus_in_loops (struct object_size_info *, tree);
74 : static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
75 : unsigned int);
76 :
77 : /* object_sizes[0] is upper bound for the object size and number of bytes till
78 : the end of the object.
79 : object_sizes[1] is upper bound for the object size and number of bytes till
80 : the end of the subobject (innermost array or field with address taken).
81 : object_sizes[2] is lower bound for the object size and number of bytes till
82 : the end of the object and object_sizes[3] lower bound for subobject.
83 :
84 : For static object sizes, the object size and the bytes till the end of the
85 : object are both INTEGER_CST. In the dynamic case, they are finally either a
86 : gimple variable or an INTEGER_CST. */
87 : static vec<object_size> object_sizes[OST_END];
88 :
89 : /* Bitmaps what object sizes have been computed already. */
90 : static bitmap computed[OST_END];
91 :
92 : /* Maximum value of offset we consider to be addition. */
93 : static unsigned HOST_WIDE_INT offset_limit;
94 :
95 : /* Tell the generic SSA updater what kind of update is needed after the pass
96 : executes. */
97 : static unsigned todo;
98 :
99 : /* Return true if VAL represents an initial size for OBJECT_SIZE_TYPE. */
100 :
101 : static inline bool
102 5435 : size_initval_p (tree val, int object_size_type)
103 : {
104 5435 : return ((object_size_type & OST_MINIMUM)
105 5435 : ? integer_all_onesp (val) : integer_zerop (val));
106 : }
107 :
108 : /* Return true if VAL represents an unknown size for OBJECT_SIZE_TYPE. */
109 :
110 : static inline bool
111 99711 : size_unknown_p (tree val, int object_size_type)
112 : {
113 99711 : return ((object_size_type & OST_MINIMUM)
114 98585 : ? integer_zerop (val) : integer_all_onesp (val));
115 : }
116 :
117 : /* Return true if VAL represents a valid size for OBJECT_SIZE_TYPE. */
118 :
119 : static inline bool
120 53858 : size_valid_p (tree val, int object_size_type)
121 : {
122 51758 : return ((object_size_type & OST_DYNAMIC) || TREE_CODE (val) == INTEGER_CST);
123 : }
124 :
125 : /* Return true if VAL is usable as an object size in the object_sizes
126 : vectors. */
127 :
128 : static inline bool
129 10060 : size_usable_p (tree val)
130 : {
131 8975 : return TREE_CODE (val) == SSA_NAME || TREE_CODE (val) == INTEGER_CST;
132 : }
133 :
134 : /* Return a tree with initial value for OBJECT_SIZE_TYPE. */
135 :
136 : static inline tree
137 11591 : size_initval (int object_size_type)
138 : {
139 11591 : return ((object_size_type & OST_MINIMUM)
140 11591 : ? TYPE_MAX_VALUE (sizetype) : size_zero_node);
141 : }
142 :
143 : /* Return a tree with unknown value for OBJECT_SIZE_TYPE. */
144 :
145 : static inline tree
146 166498 : size_unknown (int object_size_type)
147 : {
148 166498 : return ((object_size_type & OST_MINIMUM)
149 166498 : ? size_zero_node : TYPE_MAX_VALUE (sizetype));
150 : }
151 :
152 : /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
153 :
154 : static inline void
155 31402 : object_sizes_grow (int object_size_type)
156 : {
157 72350 : if (num_ssa_names > object_sizes[object_size_type].length ())
158 23992 : object_sizes[object_size_type].safe_grow (num_ssa_names, true);
159 31402 : }
160 :
161 : /* Release object_sizes[OBJECT_SIZE_TYPE]. */
162 :
163 : static inline void
164 27651384 : object_sizes_release (int object_size_type)
165 : {
166 27651384 : object_sizes[object_size_type].release ();
167 : }
168 :
169 : /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
170 :
171 : static inline bool
172 20150 : object_sizes_unknown_p (int object_size_type, unsigned varno)
173 : {
174 20150 : return size_unknown_p (object_sizes[object_size_type][varno].size,
175 20150 : object_size_type);
176 : }
177 :
178 : /* Return the raw size expression for VARNO corresponding to OSI. This returns
179 : the TREE_VEC as is and should only be used during gimplification. */
180 :
181 : static inline object_size
182 822 : object_sizes_get_raw (struct object_size_info *osi, unsigned varno)
183 : {
184 822 : gcc_assert (osi->pass != 0);
185 822 : return object_sizes[osi->object_size_type][varno];
186 : }
187 :
188 : /* Return a size tree for VARNO corresponding to OSI. If WHOLE is true, return
189 : the whole object size. Use this for building size expressions based on size
190 : of VARNO. */
191 :
192 : static inline tree
193 25100 : object_sizes_get (struct object_size_info *osi, unsigned varno,
194 : bool whole = false)
195 : {
196 25100 : tree ret;
197 25100 : int object_size_type = osi->object_size_type;
198 :
199 25100 : if (whole)
200 5073 : ret = object_sizes[object_size_type][varno].wholesize;
201 : else
202 20027 : ret = object_sizes[object_size_type][varno].size;
203 :
204 25100 : if (object_size_type & OST_DYNAMIC)
205 : {
206 6977 : if (TREE_CODE (ret) == MODIFY_EXPR)
207 502 : return TREE_OPERAND (ret, 0);
208 6475 : else if (TREE_CODE (ret) == TREE_VEC)
209 392 : return TREE_VEC_ELT (ret, TREE_VEC_LENGTH (ret) - 1);
210 : else
211 6083 : gcc_checking_assert (size_usable_p (ret));
212 : }
213 :
214 : return ret;
215 : }
216 :
217 : /* Set size for VARNO corresponding to OSI to VAL. */
218 :
219 : static inline void
220 12002 : object_sizes_initialize (struct object_size_info *osi, unsigned varno,
221 : tree val, tree wholeval)
222 : {
223 12002 : int object_size_type = osi->object_size_type;
224 :
225 12002 : object_sizes[object_size_type][varno].size = val;
226 12002 : object_sizes[object_size_type][varno].wholesize = wholeval;
227 12002 : }
228 :
229 : /* Return a MODIFY_EXPR for cases where SSA and EXPR have the same type. The
230 : TREE_VEC is returned only in case of PHI nodes. */
231 :
232 : static tree
233 518 : bundle_sizes (tree name, tree expr)
234 : {
235 518 : gcc_checking_assert (TREE_TYPE (name) == sizetype);
236 :
237 518 : if (TREE_CODE (expr) == TREE_VEC)
238 : {
239 251 : TREE_VEC_ELT (expr, TREE_VEC_LENGTH (expr) - 1) = name;
240 251 : return expr;
241 : }
242 :
243 267 : gcc_checking_assert (types_compatible_p (TREE_TYPE (expr), sizetype));
244 267 : return build2 (MODIFY_EXPR, sizetype, name, expr);
245 : }
246 :
247 : /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
248 : maximum. For static sizes, each element of TREE_VEC is always INTEGER_CST
249 : throughout the computation. For dynamic sizes, each element may either be a
250 : gimple variable, a MODIFY_EXPR or a TREE_VEC. The MODIFY_EXPR is for
251 : expressions that need to be gimplified. TREE_VECs are special, they're
252 : emitted only for GIMPLE_PHI and the PHI result variable is the last element
253 : of the vector. */
254 :
255 : static bool
256 14439 : object_sizes_set (struct object_size_info *osi, unsigned varno, tree val,
257 : tree wholeval)
258 : {
259 14439 : int object_size_type = osi->object_size_type;
260 14439 : object_size osize = object_sizes[object_size_type][varno];
261 14439 : bool changed = true;
262 :
263 14439 : tree oldval = osize.size;
264 14439 : tree old_wholeval = osize.wholesize;
265 :
266 14439 : if (object_size_type & OST_DYNAMIC)
267 : {
268 2746 : if (bitmap_bit_p (osi->reexamine, varno))
269 : {
270 71 : val = bundle_sizes (oldval, val);
271 71 : wholeval = bundle_sizes (old_wholeval, wholeval);
272 : }
273 : else
274 : {
275 2675 : gcc_checking_assert (size_initval_p (oldval, object_size_type));
276 2675 : gcc_checking_assert (size_initval_p (old_wholeval,
277 : object_size_type));
278 : /* For dynamic object sizes, all object sizes that are not gimple
279 : variables will need to be gimplified. */
280 2675 : if (wholeval != val && !size_usable_p (wholeval))
281 : {
282 49 : bitmap_set_bit (osi->reexamine, varno);
283 49 : wholeval = bundle_sizes (make_ssa_name (sizetype), wholeval);
284 : }
285 2675 : if (!size_usable_p (val))
286 : {
287 327 : bitmap_set_bit (osi->reexamine, varno);
288 327 : tree newval = bundle_sizes (make_ssa_name (sizetype), val);
289 327 : if (val == wholeval)
290 134 : wholeval = newval;
291 : val = newval;
292 : }
293 : /* If the new value is a temporary variable, mark it for
294 : reexamination. */
295 2348 : else if (TREE_CODE (val) == SSA_NAME && !SSA_NAME_DEF_STMT (val))
296 78 : bitmap_set_bit (osi->reexamine, varno);
297 : }
298 : }
299 : else
300 : {
301 9074 : enum tree_code code = (object_size_type & OST_MINIMUM
302 11693 : ? MIN_EXPR : MAX_EXPR);
303 :
304 11693 : val = size_binop (code, val, oldval);
305 11693 : wholeval = size_binop (code, wholeval, old_wholeval);
306 11693 : changed = (tree_int_cst_compare (val, oldval) != 0
307 11693 : || tree_int_cst_compare (old_wholeval, wholeval) != 0);
308 : }
309 :
310 14439 : object_sizes[object_size_type][varno].size = val;
311 14439 : object_sizes[object_size_type][varno].wholesize = wholeval;
312 :
313 14439 : return changed;
314 : }
315 :
316 : /* Set temporary SSA names for object size and whole size to resolve dependency
317 : loops in dynamic size computation. */
318 :
319 : static inline void
320 85 : object_sizes_set_temp (struct object_size_info *osi, unsigned varno)
321 : {
322 85 : tree val = object_sizes_get (osi, varno);
323 :
324 85 : if (size_initval_p (val, osi->object_size_type))
325 : {
326 71 : val = make_ssa_name (sizetype);
327 71 : tree wholeval = make_ssa_name (sizetype);
328 71 : object_sizes_set (osi, varno, val, wholeval);
329 : }
330 85 : }
331 :
332 : /* Initialize OFFSET_LIMIT variable. */
333 : static void
334 3815 : init_offset_limit (void)
335 : {
336 3815 : if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
337 3815 : offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
338 : else
339 0 : offset_limit = -1;
340 3815 : offset_limit /= 2;
341 3815 : }
342 :
343 : /* Bytes at end of the object with SZ from offset OFFSET. If WHOLESIZE is not
344 : NULL_TREE, use it to get the net offset of the pointer, which should always
345 : be positive and hence, be within OFFSET_LIMIT for valid offsets. */
346 :
347 : static tree
348 12685 : size_for_offset (tree sz, tree offset, tree wholesize = NULL_TREE,
349 : bool strict = true)
350 : {
351 12685 : gcc_checking_assert (types_compatible_p (TREE_TYPE (sz), sizetype));
352 :
353 : /* For negative offsets, if we have a distinct WHOLESIZE, use it to get a net
354 : offset from the whole object. */
355 12685 : if (wholesize && wholesize != sz
356 12685 : && (TREE_CODE (sz) != INTEGER_CST
357 363 : || TREE_CODE (wholesize) != INTEGER_CST
358 363 : || tree_int_cst_compare (sz, wholesize)))
359 : {
360 460 : gcc_checking_assert (types_compatible_p (TREE_TYPE (wholesize),
361 : sizetype));
362 :
363 : /* Restructure SZ - OFFSET as
364 : WHOLESIZE - (WHOLESIZE + OFFSET - SZ) so that the offset part, i.e.
365 : WHOLESIZE + OFFSET - SZ is only allowed to be positive. */
366 460 : tree tmp = size_binop (MAX_EXPR, wholesize, sz);
367 460 : offset = fold_build2 (PLUS_EXPR, sizetype, tmp, offset);
368 460 : offset = fold_build2 (MINUS_EXPR, sizetype, offset, sz);
369 460 : sz = tmp;
370 : }
371 :
372 : /* Safe to convert now, since a valid net offset should be non-negative. */
373 12685 : if (!useless_type_conversion_p (sizetype, TREE_TYPE (offset)))
374 3097 : offset = fold_convert (sizetype, offset);
375 :
376 12685 : if (TREE_CODE (offset) == INTEGER_CST)
377 : {
378 12560 : if (integer_zerop (offset))
379 : return sz;
380 :
381 : /* Negative or too large offset even after adjustment, cannot be within
382 : bounds of an object. The exception here is when the base object size
383 : has been overestimated (e.g. through PHI nodes or a COND_EXPR) and the
384 : adjusted offset remains negative. If the caller wants to be
385 : permissive, return the base size. */
386 6401 : if (compare_tree_int (offset, offset_limit) > 0)
387 : {
388 41 : if (strict)
389 28 : return size_zero_node;
390 : else
391 : return sz;
392 : }
393 : }
394 :
395 6485 : return size_binop (MINUS_EXPR, size_binop (MAX_EXPR, sz, offset), offset);
396 : }
397 :
398 : /* Compute offset of EXPR within VAR. Return error_mark_node
399 : if unknown. */
400 :
401 : static tree
402 19910 : compute_object_offset (tree expr, const_tree var)
403 : {
404 19918 : enum tree_code code = PLUS_EXPR;
405 19918 : tree base, off, t;
406 :
407 19918 : if (expr == var)
408 8688 : return size_zero_node;
409 :
410 11230 : switch (TREE_CODE (expr))
411 : {
412 6956 : case COMPONENT_REF:
413 6956 : base = compute_object_offset (TREE_OPERAND (expr, 0), var);
414 6956 : if (base == error_mark_node)
415 : return base;
416 :
417 6956 : t = TREE_OPERAND (expr, 1);
418 6956 : off = size_binop (PLUS_EXPR,
419 : component_ref_field_offset (expr),
420 : size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
421 : / BITS_PER_UNIT));
422 6956 : break;
423 :
424 8 : case REALPART_EXPR:
425 8 : CASE_CONVERT:
426 8 : case VIEW_CONVERT_EXPR:
427 8 : case NON_LVALUE_EXPR:
428 8 : return compute_object_offset (TREE_OPERAND (expr, 0), var);
429 :
430 8 : case IMAGPART_EXPR:
431 8 : base = compute_object_offset (TREE_OPERAND (expr, 0), var);
432 8 : if (base == error_mark_node)
433 : return base;
434 :
435 8 : off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
436 8 : break;
437 :
438 4257 : case ARRAY_REF:
439 4257 : base = compute_object_offset (TREE_OPERAND (expr, 0), var);
440 4257 : if (base == error_mark_node)
441 : return base;
442 :
443 4257 : t = TREE_OPERAND (expr, 1);
444 4257 : tree low_bound, unit_size;
445 4257 : low_bound = array_ref_low_bound (const_cast<tree> (expr));
446 4257 : unit_size = array_ref_element_size (const_cast<tree> (expr));
447 4257 : if (! integer_zerop (low_bound))
448 3 : t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
449 4257 : if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
450 : {
451 24 : code = MINUS_EXPR;
452 24 : t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
453 : }
454 4257 : t = fold_convert (sizetype, t);
455 4257 : off = size_binop (MULT_EXPR, unit_size, t);
456 4257 : break;
457 :
458 0 : case MEM_REF:
459 0 : gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
460 0 : return wide_int_to_tree (sizetype, mem_ref_offset (expr));
461 :
462 1 : default:
463 1 : return error_mark_node;
464 : }
465 :
466 11221 : return size_binop (code, base, off);
467 : }
468 :
469 : /* Return true if CONTAINER has a field of type INNER at OFFSET. */
470 :
471 : static bool
472 18 : inner_at_offset (tree container, tree inner, tree offset)
473 : {
474 18 : gcc_assert (RECORD_OR_UNION_TYPE_P (container));
475 :
476 39 : for (tree t = TYPE_FIELDS (container); t; t = DECL_CHAIN (t))
477 : {
478 39 : if (TREE_CODE (t) != FIELD_DECL)
479 0 : continue;
480 :
481 : /* Skip over fields at bit offsets that are not BITS_PER_UNIT aligned
482 : to avoid an accidental truncated match with BYTE_POSITION below since
483 : the address of such fields cannot be taken. */
484 39 : if (wi::bit_and (wi::to_offset (DECL_FIELD_BIT_OFFSET (t)),
485 78 : BITS_PER_UNIT - 1) != 0)
486 0 : continue;
487 :
488 39 : tree byte_offset = byte_position (t);
489 39 : if (TREE_CODE (byte_offset) != INTEGER_CST
490 39 : || tree_int_cst_lt (offset, byte_offset))
491 0 : return false;
492 :
493 : /* For an array, check the element type, otherwise the actual type. This
494 : deliberately does not support the case of jumping from a pointer to
495 : the middle of an array to its containing struct. */
496 39 : tree t_type = TREE_TYPE (t);
497 13 : if (((TREE_CODE (t_type) == ARRAY_TYPE && TREE_TYPE (t_type) == inner)
498 37 : || t_type == inner)
499 50 : && tree_int_cst_equal (byte_offset, offset))
500 : return true;
501 :
502 : /* Nested structure or union, adjust the expected offset and dive in. */
503 52 : if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (t))
504 31 : && inner_at_offset (TREE_TYPE (t), inner,
505 : fold_build2 (MINUS_EXPR, sizetype, offset,
506 : byte_offset)))
507 : return true;
508 : }
509 :
510 : return false;
511 : }
512 :
513 : /* For the input MEMREF of type MEMREF_TYPE, look for the presence of a field
514 : of BASE_TYPE at OFFSET and return an adjusted WHOLESIZE if found. */
515 :
516 : static tree
517 5701 : get_wholesize_for_memref (tree memref, tree wholesize)
518 : {
519 5701 : tree base = TREE_OPERAND (memref, 0);
520 5701 : tree offset = fold_convert (sizetype, TREE_OPERAND (memref, 1));
521 5701 : tree memref_type = TREE_TYPE (memref);
522 5701 : tree base_type = TREE_TYPE (base);
523 :
524 5701 : if (POINTER_TYPE_P (base_type))
525 5701 : base_type = TREE_TYPE ((base_type));
526 :
527 5701 : if (dump_file && (dump_flags & TDF_DETAILS))
528 : {
529 4 : fprintf (dump_file, "wholesize_for_memref: ");
530 4 : print_generic_expr (dump_file, wholesize, dump_flags);
531 4 : fprintf (dump_file, ", offset: ");
532 4 : print_generic_expr (dump_file, offset, dump_flags);
533 4 : fprintf (dump_file, "\n");
534 : }
535 :
536 5701 : if (TREE_CODE (offset) != INTEGER_CST
537 5701 : || compare_tree_int (offset, offset_limit) < 0
538 5714 : || !RECORD_OR_UNION_TYPE_P (memref_type))
539 5690 : return wholesize;
540 :
541 11 : offset = fold_build1 (NEGATE_EXPR, sizetype, offset);
542 :
543 11 : if (inner_at_offset (memref_type, base_type, offset))
544 11 : wholesize = size_binop (PLUS_EXPR, wholesize, offset);
545 :
546 11 : if (dump_file && (dump_flags & TDF_DETAILS))
547 : {
548 0 : fprintf (dump_file, " new wholesize: ");
549 0 : print_generic_expr (dump_file, wholesize, dump_flags);
550 0 : fprintf (dump_file, "\n");
551 : }
552 :
553 : return wholesize;
554 : }
555 :
556 : /* Returns the size of the object designated by DECL considering its
557 : initializer if it either has one or if it would not affect its size,
558 : otherwise the size of the object without the initializer when MIN
559 : is true, else null. An object's initializer affects the object's
560 : size if it's a struct type with a flexible array member. */
561 :
562 : tree
563 4932105 : decl_init_size (tree decl, bool min)
564 : {
565 4932105 : tree size = DECL_SIZE_UNIT (decl);
566 4932105 : tree type = TREE_TYPE (decl);
567 4932105 : if (TREE_CODE (type) != RECORD_TYPE)
568 : return size;
569 :
570 2670427 : tree last = last_field (type);
571 2670427 : if (!last)
572 : return size;
573 :
574 2668644 : tree last_type = TREE_TYPE (last);
575 2668644 : if (TREE_CODE (last_type) != ARRAY_TYPE
576 2668644 : || TYPE_SIZE (last_type))
577 : return size;
578 :
579 : /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
580 : of the initializer and sometimes doesn't. */
581 2268 : size = TYPE_SIZE_UNIT (type);
582 2268 : tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
583 2268 : tree compsize = component_ref_size (ref);
584 2268 : if (!compsize)
585 1008 : return min ? size : NULL_TREE;
586 :
587 : /* The size includes tail padding and initializer elements. */
588 1758 : tree pos = byte_position (last);
589 1758 : size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
590 1758 : return size;
591 : }
592 :
593 : /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
594 : OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
595 : If unknown, return size_unknown (object_size_type). */
596 :
597 : static bool
598 25547 : addr_object_size (struct object_size_info *osi, const_tree ptr,
599 : int object_size_type, tree *psize, tree *pwholesize)
600 : {
601 25547 : tree pt_var, pt_var_size = NULL_TREE, pt_var_wholesize = NULL_TREE;
602 25547 : tree var_size, bytes, wholebytes;
603 :
604 25547 : gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
605 :
606 : /* Set to unknown and overwrite just before returning if the size
607 : could be determined. */
608 25547 : *psize = size_unknown (object_size_type);
609 25547 : if (pwholesize)
610 7019 : *pwholesize = size_unknown (object_size_type);
611 :
612 25547 : pt_var = TREE_OPERAND (ptr, 0);
613 42107 : while (handled_component_p (pt_var))
614 16560 : pt_var = TREE_OPERAND (pt_var, 0);
615 :
616 25547 : if (!pt_var)
617 : return false;
618 :
619 25547 : if (TREE_CODE (pt_var) == MEM_REF)
620 : {
621 5701 : tree sz, wholesize;
622 :
623 4766 : if (!osi || (object_size_type & OST_SUBOBJECT) != 0
624 7015 : || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
625 : {
626 4388 : compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
627 : object_size_type & ~OST_SUBOBJECT, &sz);
628 4388 : wholesize = get_wholesize_for_memref (pt_var, sz);
629 : }
630 : else
631 : {
632 1313 : tree var = TREE_OPERAND (pt_var, 0);
633 1313 : if (osi->pass == 0)
634 1313 : collect_object_sizes_for (osi, var);
635 2626 : if (bitmap_bit_p (computed[object_size_type],
636 1313 : SSA_NAME_VERSION (var)))
637 : {
638 1313 : sz = object_sizes_get (osi, SSA_NAME_VERSION (var));
639 1313 : wholesize = object_sizes_get (osi, SSA_NAME_VERSION (var), true);
640 1313 : wholesize = get_wholesize_for_memref (pt_var, wholesize);
641 : }
642 : else
643 0 : sz = wholesize = size_unknown (object_size_type);
644 : }
645 5701 : if (!size_unknown_p (sz, object_size_type))
646 3108 : sz = size_for_offset (sz, TREE_OPERAND (pt_var, 1), wholesize);
647 :
648 5701 : if (!size_unknown_p (sz, object_size_type)
649 5701 : && (TREE_CODE (sz) != INTEGER_CST
650 3054 : || compare_tree_int (sz, offset_limit) < 0))
651 : {
652 3098 : pt_var_size = sz;
653 3098 : pt_var_wholesize = wholesize;
654 : }
655 : }
656 19846 : else if (DECL_P (pt_var))
657 : {
658 39490 : pt_var_size = pt_var_wholesize
659 19745 : = decl_init_size (pt_var, object_size_type & OST_MINIMUM);
660 19745 : if (!pt_var_size)
661 : return false;
662 : }
663 101 : else if (TREE_CODE (pt_var) == STRING_CST)
664 101 : pt_var_size = pt_var_wholesize = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
665 : else
666 : return false;
667 :
668 5802 : if (pt_var_size)
669 : {
670 : /* Validate the size determined above if it is a constant. */
671 22831 : if (TREE_CODE (pt_var_size) == INTEGER_CST
672 22831 : && compare_tree_int (pt_var_size, offset_limit) >= 0)
673 : return false;
674 : }
675 :
676 25398 : if (pt_var != TREE_OPERAND (ptr, 0))
677 : {
678 9187 : tree var;
679 :
680 9187 : if (object_size_type & OST_SUBOBJECT)
681 : {
682 4009 : var = TREE_OPERAND (ptr, 0);
683 :
684 4009 : while (var != pt_var
685 4009 : && TREE_CODE (var) != BIT_FIELD_REF
686 : && TREE_CODE (var) != COMPONENT_REF
687 : && TREE_CODE (var) != ARRAY_REF
688 : && TREE_CODE (var) != ARRAY_RANGE_REF
689 : && TREE_CODE (var) != REALPART_EXPR
690 4009 : && TREE_CODE (var) != IMAGPART_EXPR)
691 0 : var = TREE_OPERAND (var, 0);
692 4009 : if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
693 1763 : var = TREE_OPERAND (var, 0);
694 4009 : if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
695 3796 : || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
696 7785 : || (pt_var_size && TREE_CODE (pt_var_size) == INTEGER_CST
697 1749 : && tree_int_cst_lt (pt_var_size,
698 1749 : TYPE_SIZE_UNIT (TREE_TYPE (var)))))
699 : var = pt_var;
700 3660 : else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
701 : {
702 : tree v = var;
703 : /* For &X->fld, compute object size if fld isn't a flexible array
704 : member. */
705 6302 : bool is_flexible_array_mem_ref = false;
706 6302 : while (v && v != pt_var)
707 3153 : switch (TREE_CODE (v))
708 : {
709 0 : case ARRAY_REF:
710 0 : if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))))
711 : {
712 0 : tree domain
713 0 : = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
714 0 : if (domain && TYPE_MAX_VALUE (domain))
715 : {
716 : v = NULL_TREE;
717 : break;
718 : }
719 : }
720 0 : v = TREE_OPERAND (v, 0);
721 0 : break;
722 : case REALPART_EXPR:
723 : case IMAGPART_EXPR:
724 : v = NULL_TREE;
725 : break;
726 3153 : case COMPONENT_REF:
727 : /* When the ref is not to an aggregate type, i.e, an array,
728 : a record or a union, it will not have flexible size,
729 : compute the object size directly. */
730 3153 : if (!AGGREGATE_TYPE_P (TREE_TYPE (v)))
731 : {
732 : v = NULL_TREE;
733 : break;
734 : }
735 : /* if the ref is to a record or union type, but the type
736 : does not include a flexible array recursively, compute
737 : the object size directly. */
738 2255 : if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (v)))
739 : {
740 58 : if (!TYPE_INCLUDES_FLEXARRAY (TREE_TYPE (v)))
741 : {
742 : v = NULL_TREE;
743 : break;
744 : }
745 : else
746 : {
747 16 : v = TREE_OPERAND (v, 0);
748 16 : break;
749 : }
750 : }
751 : /* Now the ref is to an array type. */
752 2197 : gcc_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
753 2197 : is_flexible_array_mem_ref = array_ref_flexible_size_p (v);
754 5439 : while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
755 2997 : if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
756 : != UNION_TYPE
757 2997 : && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
758 : != QUAL_UNION_TYPE)
759 : break;
760 : else
761 1045 : v = TREE_OPERAND (v, 0);
762 2197 : if (TREE_CODE (v) == COMPONENT_REF
763 2197 : && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
764 : == RECORD_TYPE)
765 : {
766 : /* compute object size only if v is not a
767 : flexible array member. */
768 1952 : if (!is_flexible_array_mem_ref)
769 : {
770 : v = NULL_TREE;
771 : break;
772 : }
773 949 : v = TREE_OPERAND (v, 0);
774 : }
775 1504 : while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
776 446 : if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
777 : != UNION_TYPE
778 446 : && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
779 : != QUAL_UNION_TYPE)
780 : break;
781 : else
782 310 : v = TREE_OPERAND (v, 0);
783 1194 : if (v != pt_var)
784 : v = NULL_TREE;
785 : else
786 3153 : v = pt_var;
787 : break;
788 : default:
789 6302 : v = pt_var;
790 : break;
791 : }
792 3149 : if (v == pt_var)
793 6597 : var = pt_var;
794 : }
795 : }
796 : else
797 : var = pt_var;
798 :
799 9187 : if (var != pt_var)
800 : {
801 2481 : var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
802 2481 : if (!TREE_CONSTANT (var_size))
803 0 : var_size = get_or_create_ssa_default_def (cfun, var_size);
804 2481 : if (!var_size)
805 : return false;
806 : }
807 6706 : else if (!pt_var_size)
808 : return false;
809 : else
810 : var_size = pt_var_size;
811 7910 : bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
812 7910 : if (bytes != error_mark_node)
813 : {
814 7909 : bytes = size_for_offset (var_size, bytes);
815 7909 : if (var != pt_var && pt_var_size && TREE_CODE (pt_var) == MEM_REF)
816 : {
817 779 : tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0),
818 : pt_var);
819 779 : if (bytes2 != error_mark_node)
820 : {
821 779 : bytes2 = size_for_offset (pt_var_size, bytes2);
822 779 : bytes = size_binop (MIN_EXPR, bytes, bytes2);
823 : }
824 : }
825 : }
826 : else
827 1 : bytes = size_unknown (object_size_type);
828 :
829 7910 : wholebytes
830 7910 : = object_size_type & OST_SUBOBJECT ? var_size : pt_var_wholesize;
831 : }
832 16211 : else if (!pt_var_size)
833 : return false;
834 : else
835 : {
836 : bytes = pt_var_size;
837 : wholebytes = pt_var_wholesize;
838 : }
839 :
840 24095 : if (!size_unknown_p (bytes, object_size_type)
841 49529 : && size_valid_p (bytes, object_size_type)
842 23982 : && !size_unknown_p (bytes, object_size_type)
843 24095 : && size_valid_p (wholebytes, object_size_type))
844 : {
845 23982 : *psize = bytes;
846 23982 : if (pwholesize)
847 5704 : *pwholesize = wholebytes;
848 23982 : return true;
849 : }
850 :
851 : return false;
852 : }
853 :
854 : /* Compute __builtin_object_size for a CALL to .ACCESS_WITH_SIZE,
855 : OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
856 :
857 : The 2nd, 3rd, and 4th parameters of the call determine the size of
858 : the CALL:
859 :
860 : 2nd argument REF_TO_SIZE: The reference to the size of the object,
861 : 3rd argument TYPE_OF_SIZE + ACCESS_MODE: An integer constant with a pointer
862 : TYPE.
863 : The pointee TYPE of this pointer TYPE is the TYPE of the object referenced
864 : by REF_TO_SIZE.
865 :
866 : 4th argument: The TYPE_SIZE_UNIT of the element TYPE of the array. */
867 :
868 : static tree
869 174 : access_with_size_object_size (const gcall *call, int object_size_type)
870 : {
871 : /* If not for dynamic object size, return. */
872 174 : if ((object_size_type & OST_DYNAMIC) == 0)
873 123 : return size_unknown (object_size_type);
874 51 : gcc_assert (gimple_call_internal_p (call, IFN_ACCESS_WITH_SIZE));
875 :
876 51 : tree ref_to_size = gimple_call_arg (call, 1);
877 51 : tree type = TREE_TYPE (TREE_TYPE (gimple_call_arg (call, 2)));
878 :
879 : /* The 4th argument is the TYPE_SIZE_UNIT for the element of the original
880 : flexible array. */
881 51 : tree element_size = gimple_call_arg (call, 3);
882 :
883 51 : tree size = fold_build2 (MEM_REF, type, ref_to_size,
884 : build_int_cst (ptr_type_node, 0));
885 :
886 : /* If size is negative value, treat it as zero. */
887 51 : if (!TYPE_UNSIGNED (type))
888 : {
889 33 : tree cond_expr = fold_build2 (LT_EXPR, boolean_type_node,
890 : unshare_expr (size), build_zero_cst (type));
891 33 : size = fold_build3 (COND_EXPR, integer_type_node, cond_expr,
892 : build_zero_cst (type), size);
893 : }
894 :
895 51 : size = size_binop (MULT_EXPR,
896 : fold_convert (sizetype, size),
897 : fold_convert (sizetype, element_size));
898 :
899 51 : if (!todo)
900 23 : todo = TODO_update_ssa_only_virtuals;
901 :
902 : return size;
903 : }
904 :
905 : /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
906 : Handles calls to functions declared with attribute alloc_size.
907 : OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
908 : If unknown, return size_unknown (object_size_type). */
909 :
910 : static tree
911 1860 : alloc_object_size (const gcall *call, int object_size_type)
912 : {
913 1860 : gcc_assert (is_gimple_call (call));
914 :
915 1860 : tree calltype;
916 1860 : tree callfn = gimple_call_fndecl (call);
917 1860 : if (callfn)
918 1716 : calltype = TREE_TYPE (callfn);
919 : else
920 144 : calltype = gimple_call_fntype (call);
921 :
922 1860 : if (!calltype)
923 0 : return size_unknown (object_size_type);
924 :
925 : /* Set to positions of alloc_size arguments. */
926 1860 : int arg1 = -1, arg2 = -1;
927 1860 : tree alloc_size = lookup_attribute ("alloc_size",
928 1860 : TYPE_ATTRIBUTES (calltype));
929 3547 : if (alloc_size && TREE_VALUE (alloc_size))
930 : {
931 1687 : tree p = TREE_VALUE (alloc_size);
932 :
933 1687 : arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
934 1687 : if (TREE_CHAIN (p))
935 187 : arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
936 : }
937 173 : else if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
938 112 : && callfn
939 285 : && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callfn)))
940 : arg1 = 0;
941 :
942 : /* Non-const arguments are OK here, let the caller handle constness. */
943 1687 : if (arg1 < 0
944 1767 : || (unsigned) arg1 >= gimple_call_num_args (call)
945 3454 : || (arg2 >= 0 && (unsigned) arg2 >= gimple_call_num_args (call)))
946 93 : return size_unknown (object_size_type);
947 :
948 1767 : tree targ1 = gimple_call_arg (call, arg1);
949 3534 : if (!INTEGRAL_TYPE_P (TREE_TYPE (targ1))
950 3532 : || TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (sizetype))
951 2 : return size_unknown (object_size_type);
952 1765 : targ1 = fold_convert (sizetype, targ1);
953 1765 : tree bytes = NULL_TREE;
954 1765 : if (arg2 >= 0)
955 : {
956 187 : tree targ2 = gimple_call_arg (call, arg2);
957 374 : if (!INTEGRAL_TYPE_P (TREE_TYPE (targ2))
958 374 : || TYPE_PRECISION (TREE_TYPE (targ2)) > TYPE_PRECISION (sizetype))
959 0 : return size_unknown (object_size_type);
960 187 : targ2 = fold_convert (sizetype, targ2);
961 187 : bytes = size_binop (MULT_EXPR, targ1, targ2);
962 : }
963 : else
964 : bytes = targ1;
965 :
966 1765 : return bytes ? bytes : size_unknown (object_size_type);
967 : }
968 :
969 : /* Compute __builtin_object_size for CALL, which is a call to either
970 : BUILT_IN_STRDUP or BUILT_IN_STRNDUP; IS_STRNDUP indicates which it is.
971 : OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
972 : If unknown, return size_unknown (object_size_type). */
973 :
974 : static tree
975 160 : strdup_object_size (const gcall *call, int object_size_type, bool is_strndup)
976 : {
977 160 : tree src = gimple_call_arg (call, 0);
978 160 : tree sz = size_unknown (object_size_type);
979 160 : tree n = NULL_TREE;
980 :
981 160 : if (is_strndup)
982 110 : n = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
983 : gimple_call_arg (call, 1));
984 : /* For strdup, simply emit strlen (SRC) + 1 and let the optimizer fold it the
985 : way it likes. */
986 : else
987 : {
988 50 : tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
989 50 : if (strlen_fn)
990 : {
991 50 : sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node,
992 : build_call_expr (strlen_fn, 1, src));
993 50 : todo = TODO_update_ssa_only_virtuals;
994 : }
995 : }
996 :
997 : /* In all other cases, return the size of SRC since the object size cannot
998 : exceed that. We cannot do this for OST_MINIMUM unless SRC points into a
999 : string constant since otherwise the object size could go all the way down
1000 : to zero. */
1001 160 : if (!size_valid_p (sz, object_size_type)
1002 152 : || size_unknown_p (sz, object_size_type))
1003 : {
1004 118 : tree wholesrc = NULL_TREE;
1005 118 : if (TREE_CODE (src) == ADDR_EXPR)
1006 48 : wholesrc = get_base_address (TREE_OPERAND (src, 0));
1007 :
1008 : /* If the source points within a string constant, we try to get its
1009 : length. */
1010 48 : if (wholesrc && TREE_CODE (wholesrc) == STRING_CST)
1011 : {
1012 48 : tree len = c_strlen (src, 0);
1013 48 : if (len)
1014 48 : sz = fold_build2 (PLUS_EXPR, sizetype, size_one_node, len);
1015 : }
1016 :
1017 : /* For maximum estimate, our next best guess is the object size of the
1018 : source. */
1019 118 : if (size_unknown_p (sz, object_size_type)
1020 118 : && !(object_size_type & OST_MINIMUM))
1021 31 : compute_builtin_object_size (src, object_size_type, &sz);
1022 : }
1023 :
1024 : /* String duplication allocates at least one byte, so we should never fail
1025 : for OST_MINIMUM. */
1026 160 : if ((!size_valid_p (sz, object_size_type)
1027 152 : || size_unknown_p (sz, object_size_type))
1028 40 : && (object_size_type & OST_MINIMUM))
1029 35 : sz = size_one_node;
1030 :
1031 : /* Factor in the N. */
1032 160 : return n ? fold_build2 (MIN_EXPR, sizetype, n, sz) : sz;
1033 : }
1034 :
1035 : /* If object size is propagated from one of function's arguments directly
1036 : to its return value, return that argument for GIMPLE_CALL statement CALL.
1037 : Otherwise return NULL. */
1038 :
1039 : static tree
1040 2251 : pass_through_call (const gcall *call)
1041 : {
1042 2251 : unsigned rf = gimple_call_return_flags (call);
1043 2251 : if (rf & ERF_RETURNS_ARG)
1044 : {
1045 57 : unsigned argnum = rf & ERF_RETURN_ARG_MASK;
1046 57 : if (argnum < gimple_call_num_args (call))
1047 57 : return gimple_call_arg (call, argnum);
1048 : }
1049 :
1050 : /* __builtin_assume_aligned is intentionally not marked RET1. */
1051 2194 : if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
1052 0 : return gimple_call_arg (call, 0);
1053 :
1054 : return NULL_TREE;
1055 : }
1056 :
1057 : /* Emit PHI nodes for size expressions fo. */
1058 :
1059 : static void
1060 136 : emit_phi_nodes (gimple *stmt, tree size, tree wholesize)
1061 : {
1062 136 : tree phires;
1063 136 : gphi *wholephi = NULL;
1064 :
1065 136 : if (wholesize != size)
1066 : {
1067 115 : phires = TREE_VEC_ELT (wholesize, TREE_VEC_LENGTH (wholesize) - 1);
1068 115 : wholephi = create_phi_node (phires, gimple_bb (stmt));
1069 : }
1070 :
1071 136 : phires = TREE_VEC_ELT (size, TREE_VEC_LENGTH (size) - 1);
1072 136 : gphi *phi = create_phi_node (phires, gimple_bb (stmt));
1073 136 : gphi *obj_phi = as_a <gphi *> (stmt);
1074 :
1075 136 : gcc_checking_assert (TREE_CODE (wholesize) == TREE_VEC);
1076 136 : gcc_checking_assert (TREE_CODE (size) == TREE_VEC);
1077 :
1078 444 : for (unsigned i = 0; i < gimple_phi_num_args (stmt); i++)
1079 : {
1080 308 : gimple_seq seq = NULL;
1081 308 : tree wsz = TREE_VEC_ELT (wholesize, i);
1082 308 : tree sz = TREE_VEC_ELT (size, i);
1083 :
1084 : /* If we built an expression, we will need to build statements
1085 : and insert them on the edge right away. */
1086 308 : if (TREE_CODE (wsz) != SSA_NAME)
1087 149 : wsz = force_gimple_operand (wsz, &seq, true, NULL);
1088 308 : if (TREE_CODE (sz) != SSA_NAME)
1089 : {
1090 161 : gimple_seq s;
1091 161 : sz = force_gimple_operand (sz, &s, true, NULL);
1092 161 : gimple_seq_add_seq (&seq, s);
1093 : }
1094 :
1095 308 : if (seq)
1096 0 : gsi_insert_seq_on_edge (gimple_phi_arg_edge (obj_phi, i), seq);
1097 :
1098 308 : if (wholephi)
1099 266 : add_phi_arg (wholephi, wsz,
1100 : gimple_phi_arg_edge (obj_phi, i),
1101 : gimple_phi_arg_location (obj_phi, i));
1102 :
1103 308 : add_phi_arg (phi, sz,
1104 : gimple_phi_arg_edge (obj_phi, i),
1105 : gimple_phi_arg_location (obj_phi, i));
1106 : }
1107 136 : }
1108 :
1109 : /* Descend through EXPR and return size_unknown if it uses any SSA variable
1110 : object_size_set or object_size_set_temp generated, which turned out to be
1111 : size_unknown, as noted in UNKNOWNS. */
1112 :
1113 : static tree
1114 3338 : propagate_unknowns (object_size_info *osi, tree expr, bitmap unknowns)
1115 : {
1116 3338 : int object_size_type = osi->object_size_type;
1117 :
1118 3338 : switch (TREE_CODE (expr))
1119 : {
1120 1172 : case SSA_NAME:
1121 1172 : if (bitmap_bit_p (unknowns, SSA_NAME_VERSION (expr)))
1122 0 : return size_unknown (object_size_type);
1123 : return expr;
1124 :
1125 375 : case MIN_EXPR:
1126 375 : case MAX_EXPR:
1127 375 : {
1128 375 : tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
1129 : unknowns);
1130 375 : if (size_unknown_p (res, object_size_type))
1131 : return res;
1132 :
1133 375 : res = propagate_unknowns (osi, TREE_OPERAND (expr, 1), unknowns);
1134 375 : if (size_unknown_p (res, object_size_type))
1135 : return res;
1136 :
1137 : return expr;
1138 : }
1139 380 : case MODIFY_EXPR:
1140 380 : {
1141 380 : tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 1),
1142 : unknowns);
1143 380 : if (size_unknown_p (res, object_size_type))
1144 : return res;
1145 : return expr;
1146 : }
1147 : case TREE_VEC:
1148 1160 : for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
1149 : {
1150 888 : tree res = propagate_unknowns (osi, TREE_VEC_ELT (expr, i),
1151 : unknowns);
1152 888 : if (size_unknown_p (res, object_size_type))
1153 : return res;
1154 : }
1155 : return expr;
1156 498 : case PLUS_EXPR:
1157 498 : case MINUS_EXPR:
1158 498 : {
1159 498 : tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0),
1160 : unknowns);
1161 498 : if (size_unknown_p (res, object_size_type))
1162 : return res;
1163 :
1164 : return expr;
1165 : }
1166 : default:
1167 : return expr;
1168 : }
1169 : }
1170 :
1171 : /* Walk through size expressions that need reexamination and generate
1172 : statements for them. */
1173 :
1174 : static void
1175 2114 : gimplify_size_expressions (object_size_info *osi)
1176 : {
1177 2114 : int object_size_type = osi->object_size_type;
1178 2114 : bitmap_iterator bi;
1179 2114 : unsigned int i;
1180 2114 : bool changed;
1181 :
1182 : /* Step 1: Propagate unknowns into expressions. */
1183 2114 : bitmap reexamine = BITMAP_ALLOC (NULL);
1184 2114 : bitmap_copy (reexamine, osi->reexamine);
1185 2114 : bitmap unknowns = BITMAP_ALLOC (NULL);
1186 2114 : do
1187 : {
1188 2114 : changed = false;
1189 2525 : EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1190 : {
1191 411 : object_size cur = object_sizes_get_raw (osi, i);
1192 :
1193 411 : if (size_unknown_p (propagate_unknowns (osi, cur.size, unknowns),
1194 : object_size_type)
1195 411 : || size_unknown_p (propagate_unknowns (osi, cur.wholesize,
1196 : unknowns),
1197 : object_size_type))
1198 : {
1199 : /* Record the SSAs we're overwriting to propagate the
1200 : unknwons. */
1201 0 : tree oldval = object_sizes_get (osi, i);
1202 0 : tree old_wholeval = object_sizes_get (osi, i, true);
1203 :
1204 0 : bitmap_set_bit (unknowns, SSA_NAME_VERSION (oldval));
1205 0 : bitmap_set_bit (unknowns, SSA_NAME_VERSION (old_wholeval));
1206 0 : object_sizes_initialize (osi, i,
1207 : size_unknown (object_size_type),
1208 : size_unknown (object_size_type));
1209 0 : bitmap_clear_bit (osi->reexamine, i);
1210 0 : changed = true;
1211 : }
1212 : }
1213 2114 : bitmap_copy (reexamine, osi->reexamine);
1214 : }
1215 : while (changed);
1216 :
1217 : /* Release all unknowns. */
1218 2114 : EXECUTE_IF_SET_IN_BITMAP (unknowns, 0, i, bi)
1219 0 : release_ssa_name (ssa_name (i));
1220 :
1221 2114 : BITMAP_FREE (unknowns);
1222 2114 : BITMAP_FREE (reexamine);
1223 :
1224 : /* Expand all size expressions to put their definitions close to the objects
1225 : for which size is being computed. */
1226 2525 : EXECUTE_IF_SET_IN_BITMAP (osi->reexamine, 0, i, bi)
1227 : {
1228 411 : gimple_seq seq = NULL;
1229 411 : object_size osize = object_sizes_get_raw (osi, i);
1230 :
1231 411 : gimple *stmt = SSA_NAME_DEF_STMT (ssa_name (i));
1232 411 : enum gimple_code code = gimple_code (stmt);
1233 :
1234 : /* PHI nodes need special attention. */
1235 411 : if (code == GIMPLE_PHI)
1236 136 : emit_phi_nodes (stmt, osize.size, osize.wholesize);
1237 : else
1238 : {
1239 275 : tree size_expr = NULL_TREE;
1240 :
1241 : /* Bundle wholesize in with the size to gimplify if needed. */
1242 275 : if (osize.wholesize != osize.size
1243 275 : && !size_usable_p (osize.wholesize))
1244 5 : size_expr = size_binop (COMPOUND_EXPR,
1245 : osize.wholesize,
1246 : osize.size);
1247 270 : else if (!size_usable_p (osize.size))
1248 : size_expr = osize.size;
1249 :
1250 262 : if (size_expr)
1251 : {
1252 262 : gimple_stmt_iterator gsi;
1253 262 : if (code == GIMPLE_NOP)
1254 22 : gsi = gsi_start_bb (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1255 : else
1256 251 : gsi = gsi_for_stmt (stmt);
1257 :
1258 262 : force_gimple_operand (size_expr, &seq, true, NULL);
1259 262 : gsi_insert_seq_before (&gsi, seq, GSI_CONTINUE_LINKING);
1260 : }
1261 : }
1262 :
1263 : /* We're done, so replace the MODIFY_EXPRs with the SSA names. */
1264 411 : object_sizes_initialize (osi, i,
1265 : object_sizes_get (osi, i),
1266 : object_sizes_get (osi, i, true));
1267 : }
1268 2114 : }
1269 :
1270 : /* Compute __builtin_object_size value for PTR and set *PSIZE to
1271 : the resulting value. If the declared object is known and PDECL
1272 : is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
1273 : is the second argument to __builtin_object_size.
1274 : Returns true on success and false when the object size could not
1275 : be determined. */
1276 :
1277 : bool
1278 131172 : compute_builtin_object_size (tree ptr, int object_size_type,
1279 : tree *psize)
1280 : {
1281 131172 : gcc_assert (object_size_type >= 0 && object_size_type < OST_END);
1282 :
1283 : /* Set to unknown and overwrite just before returning if the size
1284 : could be determined. */
1285 131172 : *psize = size_unknown (object_size_type);
1286 :
1287 131172 : if (! offset_limit)
1288 1083 : init_offset_limit ();
1289 :
1290 131172 : if (TREE_CODE (ptr) == ADDR_EXPR)
1291 18528 : return addr_object_size (NULL, ptr, object_size_type, psize);
1292 :
1293 112644 : if (TREE_CODE (ptr) != SSA_NAME
1294 112644 : || !POINTER_TYPE_P (TREE_TYPE (ptr)))
1295 : return false;
1296 :
1297 112339 : if (computed[object_size_type] == NULL)
1298 : {
1299 97492 : if (optimize || object_size_type & OST_SUBOBJECT)
1300 : return false;
1301 :
1302 : /* When not optimizing, rather than failing, make a small effort
1303 : to determine the object size without the full benefit of
1304 : the (costly) computation below. */
1305 2159 : gimple *def = SSA_NAME_DEF_STMT (ptr);
1306 2159 : if (gimple_code (def) == GIMPLE_ASSIGN)
1307 : {
1308 1504 : tree_code code = gimple_assign_rhs_code (def);
1309 1504 : if (code == POINTER_PLUS_EXPR)
1310 : {
1311 945 : tree offset = gimple_assign_rhs2 (def);
1312 945 : ptr = gimple_assign_rhs1 (def);
1313 :
1314 945 : if (((object_size_type & OST_DYNAMIC)
1315 823 : || (tree_fits_shwi_p (offset)
1316 505 : && compare_tree_int (offset, offset_limit) <= 0))
1317 1450 : && compute_builtin_object_size (ptr, object_size_type,
1318 : psize))
1319 : {
1320 243 : *psize = size_for_offset (*psize, offset);
1321 243 : return true;
1322 : }
1323 : }
1324 : }
1325 1916 : return false;
1326 : }
1327 :
1328 14847 : struct object_size_info osi;
1329 14847 : osi.object_size_type = object_size_type;
1330 14847 : if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
1331 : {
1332 9546 : bitmap_iterator bi;
1333 9546 : unsigned int i;
1334 :
1335 9546 : object_sizes_grow (object_size_type);
1336 9546 : if (dump_file)
1337 : {
1338 12 : fprintf (dump_file, "Computing %s %s%sobject size for ",
1339 12 : (object_size_type & OST_MINIMUM) ? "minimum" : "maximum",
1340 12 : (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1341 12 : (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1342 12 : print_generic_expr (dump_file, ptr, dump_flags);
1343 12 : fprintf (dump_file, ":\n");
1344 : }
1345 :
1346 9546 : osi.visited = BITMAP_ALLOC (NULL);
1347 9546 : osi.reexamine = BITMAP_ALLOC (NULL);
1348 :
1349 9546 : if (!(object_size_type & OST_DYNAMIC))
1350 : {
1351 7432 : osi.depths = NULL;
1352 7432 : osi.stack = NULL;
1353 7432 : osi.tos = NULL;
1354 : }
1355 :
1356 : /* First pass: walk UD chains, compute object sizes that can be computed.
1357 : osi.reexamine bitmap at the end will contain versions of SSA_NAMES
1358 : that need to be reexamined. For both static and dynamic size
1359 : computation, reexamination is for propagation across dependency loops.
1360 : The dynamic case has the additional use case where the computed
1361 : expression needs to be gimplified. */
1362 9546 : osi.pass = 0;
1363 9546 : osi.changed = false;
1364 9546 : collect_object_sizes_for (&osi, ptr);
1365 :
1366 9546 : if (object_size_type & OST_DYNAMIC)
1367 : {
1368 2114 : osi.pass = 1;
1369 2114 : gimplify_size_expressions (&osi);
1370 2114 : bitmap_clear (osi.reexamine);
1371 : }
1372 :
1373 : /* Second pass: keep recomputing object sizes of variables
1374 : that need reexamination, until no object sizes are
1375 : increased or all object sizes are computed. */
1376 9546 : if (! bitmap_empty_p (osi.reexamine))
1377 : {
1378 297 : bitmap reexamine = BITMAP_ALLOC (NULL);
1379 :
1380 : /* If looking for minimum instead of maximum object size,
1381 : detect cases where a pointer is increased in a loop.
1382 : Although even without this detection pass 2 would eventually
1383 : terminate, it could take a long time. If a pointer is
1384 : increasing this way, we need to assume 0 object size.
1385 : E.g. p = &buf[0]; while (cond) p = p + 4; */
1386 297 : if (object_size_type & OST_MINIMUM)
1387 : {
1388 66 : osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
1389 66 : osi.stack = XNEWVEC (unsigned int, num_ssa_names);
1390 33 : osi.tos = osi.stack;
1391 33 : osi.pass = 1;
1392 : /* collect_object_sizes_for is changing
1393 : osi.reexamine bitmap, so iterate over a copy. */
1394 33 : bitmap_copy (reexamine, osi.reexamine);
1395 106 : EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1396 73 : if (bitmap_bit_p (osi.reexamine, i))
1397 73 : check_for_plus_in_loops (&osi, ssa_name (i));
1398 :
1399 33 : free (osi.depths);
1400 33 : osi.depths = NULL;
1401 33 : free (osi.stack);
1402 33 : osi.stack = NULL;
1403 33 : osi.tos = NULL;
1404 : }
1405 :
1406 393 : do
1407 : {
1408 393 : osi.pass = 2;
1409 393 : osi.changed = false;
1410 : /* collect_object_sizes_for is changing
1411 : osi.reexamine bitmap, so iterate over a copy. */
1412 393 : bitmap_copy (reexamine, osi.reexamine);
1413 1247 : EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1414 854 : if (bitmap_bit_p (osi.reexamine, i))
1415 : {
1416 854 : collect_object_sizes_for (&osi, ssa_name (i));
1417 854 : if (dump_file && (dump_flags & TDF_DETAILS))
1418 : {
1419 0 : fprintf (dump_file, "Reexamining ");
1420 0 : print_generic_expr (dump_file, ssa_name (i),
1421 : dump_flags);
1422 0 : fprintf (dump_file, "\n");
1423 : }
1424 : }
1425 : }
1426 393 : while (osi.changed);
1427 :
1428 297 : BITMAP_FREE (reexamine);
1429 : }
1430 10166 : EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
1431 620 : bitmap_set_bit (computed[object_size_type], i);
1432 :
1433 : /* Debugging dumps. */
1434 9546 : if (dump_file)
1435 : {
1436 66 : EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
1437 54 : if (!object_sizes_unknown_p (object_size_type, i))
1438 : {
1439 54 : print_generic_expr (dump_file, ssa_name (i),
1440 : dump_flags);
1441 108 : fprintf (dump_file,
1442 : ": %s %s%sobject size ",
1443 54 : ((object_size_type & OST_MINIMUM) ? "minimum"
1444 : : "maximum"),
1445 : (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1446 54 : (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1447 54 : print_generic_expr (dump_file, object_sizes_get (&osi, i),
1448 : dump_flags);
1449 54 : fprintf (dump_file, "\n");
1450 : }
1451 : }
1452 :
1453 9546 : BITMAP_FREE (osi.reexamine);
1454 9546 : BITMAP_FREE (osi.visited);
1455 : }
1456 :
1457 14847 : *psize = object_sizes_get (&osi, SSA_NAME_VERSION (ptr));
1458 14847 : return !size_unknown_p (*psize, object_size_type);
1459 : }
1460 :
1461 : /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
1462 :
1463 : static void
1464 8202 : expr_object_size (struct object_size_info *osi, tree ptr, tree value)
1465 : {
1466 8202 : int object_size_type = osi->object_size_type;
1467 8202 : unsigned int varno = SSA_NAME_VERSION (ptr);
1468 8202 : tree bytes, wholesize;
1469 :
1470 8202 : gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1471 8202 : gcc_assert (osi->pass == 0);
1472 :
1473 8202 : if (TREE_CODE (value) == WITH_SIZE_EXPR)
1474 0 : value = TREE_OPERAND (value, 0);
1475 :
1476 : /* Pointer variables should have been handled by merge_object_sizes. */
1477 8202 : gcc_assert (TREE_CODE (value) != SSA_NAME
1478 : || !POINTER_TYPE_P (TREE_TYPE (value)));
1479 :
1480 8202 : if (TREE_CODE (value) == ADDR_EXPR)
1481 6675 : addr_object_size (osi, value, object_size_type, &bytes, &wholesize);
1482 : else
1483 1527 : bytes = wholesize = size_unknown (object_size_type);
1484 :
1485 8202 : object_sizes_set (osi, varno, bytes, wholesize);
1486 8202 : }
1487 :
1488 :
1489 : /* Compute object_sizes for PTR, defined to the result of a call. */
1490 :
1491 : static void
1492 2194 : call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
1493 : {
1494 2194 : int object_size_type = osi->object_size_type;
1495 2194 : unsigned int varno = SSA_NAME_VERSION (ptr);
1496 2194 : tree bytes = NULL_TREE;
1497 :
1498 2194 : gcc_assert (is_gimple_call (call));
1499 :
1500 2194 : gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1501 2194 : gcc_assert (osi->pass == 0);
1502 :
1503 2194 : bool is_strdup = gimple_call_builtin_p (call, BUILT_IN_STRDUP);
1504 2194 : bool is_strndup = gimple_call_builtin_p (call, BUILT_IN_STRNDUP);
1505 2194 : bool is_access_with_size
1506 2194 : = gimple_call_internal_p (call, IFN_ACCESS_WITH_SIZE);
1507 2194 : if (is_strdup || is_strndup)
1508 160 : bytes = strdup_object_size (call, object_size_type, is_strndup);
1509 2034 : else if (is_access_with_size)
1510 174 : bytes = access_with_size_object_size (call, object_size_type);
1511 : else
1512 1860 : bytes = alloc_object_size (call, object_size_type);
1513 :
1514 2194 : if (!size_valid_p (bytes, object_size_type))
1515 365 : bytes = size_unknown (object_size_type);
1516 :
1517 2194 : object_sizes_set (osi, varno, bytes, bytes);
1518 2194 : }
1519 :
1520 :
1521 : /* Compute object_sizes for PTR, defined to an unknown value. */
1522 :
1523 : static void
1524 0 : unknown_object_size (struct object_size_info *osi, tree ptr)
1525 : {
1526 0 : int object_size_type = osi->object_size_type;
1527 0 : unsigned int varno = SSA_NAME_VERSION (ptr);
1528 :
1529 0 : gcc_checking_assert (!object_sizes_unknown_p (object_size_type, varno));
1530 0 : gcc_checking_assert (osi->pass == 0);
1531 0 : tree bytes = size_unknown (object_size_type);
1532 :
1533 0 : object_sizes_set (osi, varno, bytes, bytes);
1534 0 : }
1535 :
1536 :
1537 : /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
1538 : the object size might need reexamination later. */
1539 :
1540 : static bool
1541 2187 : merge_object_sizes (struct object_size_info *osi, tree dest, tree orig)
1542 : {
1543 2187 : int object_size_type = osi->object_size_type;
1544 2187 : unsigned int varno = SSA_NAME_VERSION (dest);
1545 2187 : tree orig_bytes, wholesize;
1546 :
1547 2187 : if (object_sizes_unknown_p (object_size_type, varno))
1548 : return false;
1549 :
1550 2187 : if (osi->pass == 0)
1551 1352 : collect_object_sizes_for (osi, orig);
1552 :
1553 2187 : orig_bytes = object_sizes_get (osi, SSA_NAME_VERSION (orig));
1554 2187 : wholesize = object_sizes_get (osi, SSA_NAME_VERSION (orig), true);
1555 :
1556 2187 : if (object_sizes_set (osi, varno, orig_bytes, wholesize))
1557 1056 : osi->changed = true;
1558 :
1559 2187 : return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
1560 : }
1561 :
1562 :
1563 : /* Compute object_sizes for VAR, defined to the result of an assignment
1564 : with operator POINTER_PLUS_EXPR. Return true if the object size might
1565 : need reexamination later. */
1566 :
1567 : static bool
1568 1131 : plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1569 : {
1570 1131 : int object_size_type = osi->object_size_type;
1571 1131 : unsigned int varno = SSA_NAME_VERSION (var);
1572 1131 : tree bytes, wholesize;
1573 1131 : tree op0, op1;
1574 1131 : bool reexamine = false;
1575 :
1576 1131 : if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1577 : {
1578 1131 : op0 = gimple_assign_rhs1 (stmt);
1579 1131 : op1 = gimple_assign_rhs2 (stmt);
1580 : }
1581 0 : else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
1582 : {
1583 0 : tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
1584 0 : gcc_assert (TREE_CODE (rhs) == MEM_REF);
1585 0 : op0 = TREE_OPERAND (rhs, 0);
1586 0 : op1 = TREE_OPERAND (rhs, 1);
1587 : }
1588 : else
1589 0 : gcc_unreachable ();
1590 :
1591 1131 : if (object_sizes_unknown_p (object_size_type, varno))
1592 : return false;
1593 :
1594 : /* Handle PTR + OFFSET here. */
1595 1131 : if ((TREE_CODE (op0) == SSA_NAME || TREE_CODE (op0) == ADDR_EXPR))
1596 : {
1597 1126 : if (TREE_CODE (op0) == SSA_NAME)
1598 : {
1599 905 : if (osi->pass == 0)
1600 719 : collect_object_sizes_for (osi, op0);
1601 :
1602 905 : bytes = object_sizes_get (osi, SSA_NAME_VERSION (op0));
1603 905 : wholesize = object_sizes_get (osi, SSA_NAME_VERSION (op0), true);
1604 905 : reexamine = bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (op0));
1605 : }
1606 : else
1607 : {
1608 : /* op0 will be ADDR_EXPR here. We should never come here during
1609 : reexamination. */
1610 221 : gcc_checking_assert (osi->pass == 0);
1611 221 : addr_object_size (osi, op0, object_size_type, &bytes, &wholesize);
1612 : }
1613 :
1614 1126 : bool pos_offset = (size_valid_p (op1, 0)
1615 907 : && compare_tree_int (op1, offset_limit) <= 0);
1616 :
1617 : /* size_for_offset doesn't make sense for -1 size, but it does for size 0
1618 : since the wholesize could be non-zero and a negative offset could give
1619 : a non-zero size. */
1620 1126 : if (size_unknown_p (bytes, 0))
1621 : ;
1622 : /* In the static case, We want SIZE_FOR_OFFSET to go a bit easy on us if
1623 : it sees a negative offset since BYTES could have been
1624 : overestimated. */
1625 859 : else if ((object_size_type & OST_DYNAMIC)
1626 723 : || bytes != wholesize
1627 376 : || pos_offset)
1628 646 : bytes = size_for_offset (bytes, op1, wholesize,
1629 : ((object_size_type & OST_DYNAMIC)
1630 : || pos_offset));
1631 : /* In the static case, with a negative offset, the best estimate for
1632 : minimum size is size_unknown but for maximum size, the wholesize is a
1633 : better estimate than size_unknown. */
1634 213 : else if (object_size_type & OST_MINIMUM)
1635 0 : bytes = size_unknown (object_size_type);
1636 : else
1637 213 : bytes = wholesize;
1638 : }
1639 : else
1640 5 : bytes = wholesize = size_unknown (object_size_type);
1641 :
1642 1131 : if (!size_valid_p (bytes, object_size_type)
1643 1123 : || !size_valid_p (wholesize, object_size_type))
1644 8 : bytes = wholesize = size_unknown (object_size_type);
1645 :
1646 1131 : if (object_sizes_set (osi, varno, bytes, wholesize))
1647 1002 : osi->changed = true;
1648 : return reexamine;
1649 : }
1650 :
1651 : /* Compute the dynamic object size for VAR. Return the result in SIZE and
1652 : WHOLESIZE. */
1653 :
1654 : static void
1655 349 : dynamic_object_size (struct object_size_info *osi, tree var,
1656 : tree *size, tree *wholesize)
1657 : {
1658 349 : int object_size_type = osi->object_size_type;
1659 :
1660 349 : if (TREE_CODE (var) == SSA_NAME)
1661 : {
1662 225 : unsigned varno = SSA_NAME_VERSION (var);
1663 :
1664 225 : collect_object_sizes_for (osi, var);
1665 225 : *size = object_sizes_get (osi, varno);
1666 225 : *wholesize = object_sizes_get (osi, varno, true);
1667 : }
1668 124 : else if (TREE_CODE (var) == ADDR_EXPR)
1669 123 : addr_object_size (osi, var, object_size_type, size, wholesize);
1670 : else
1671 1 : *size = *wholesize = size_unknown (object_size_type);
1672 349 : }
1673 :
1674 : /* Compute object_sizes for VAR, defined at STMT, which is
1675 : a COND_EXPR. Return true if the object size might need reexamination
1676 : later. */
1677 :
1678 : static bool
1679 0 : cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1680 : {
1681 0 : tree then_, else_;
1682 0 : int object_size_type = osi->object_size_type;
1683 0 : unsigned int varno = SSA_NAME_VERSION (var);
1684 0 : bool reexamine = false;
1685 :
1686 0 : gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
1687 :
1688 0 : if (object_sizes_unknown_p (object_size_type, varno))
1689 : return false;
1690 :
1691 0 : then_ = gimple_assign_rhs2 (stmt);
1692 0 : else_ = gimple_assign_rhs3 (stmt);
1693 :
1694 0 : if (object_size_type & OST_DYNAMIC)
1695 : {
1696 0 : tree then_size, then_wholesize, else_size, else_wholesize;
1697 :
1698 0 : dynamic_object_size (osi, then_, &then_size, &then_wholesize);
1699 0 : if (!size_unknown_p (then_size, object_size_type))
1700 0 : dynamic_object_size (osi, else_, &else_size, &else_wholesize);
1701 :
1702 0 : tree cond_size, cond_wholesize;
1703 0 : if (size_unknown_p (then_size, object_size_type)
1704 0 : || size_unknown_p (else_size, object_size_type))
1705 0 : cond_size = cond_wholesize = size_unknown (object_size_type);
1706 : else
1707 : {
1708 0 : cond_size = fold_build3 (COND_EXPR, sizetype,
1709 : gimple_assign_rhs1 (stmt),
1710 : then_size, else_size);
1711 0 : cond_wholesize = fold_build3 (COND_EXPR, sizetype,
1712 : gimple_assign_rhs1 (stmt),
1713 : then_wholesize, else_wholesize);
1714 : }
1715 :
1716 0 : object_sizes_set (osi, varno, cond_size, cond_wholesize);
1717 :
1718 0 : return false;
1719 : }
1720 :
1721 0 : if (TREE_CODE (then_) == SSA_NAME)
1722 0 : reexamine |= merge_object_sizes (osi, var, then_);
1723 : else
1724 0 : expr_object_size (osi, var, then_);
1725 :
1726 0 : if (object_sizes_unknown_p (object_size_type, varno))
1727 : return reexamine;
1728 :
1729 0 : if (TREE_CODE (else_) == SSA_NAME)
1730 0 : reexamine |= merge_object_sizes (osi, var, else_);
1731 : else
1732 0 : expr_object_size (osi, var, else_);
1733 :
1734 : return reexamine;
1735 : }
1736 :
1737 : /* Find size of an object passed as a parameter to the function. */
1738 :
1739 : static void
1740 1601 : parm_object_size (struct object_size_info *osi, tree var)
1741 : {
1742 1601 : int object_size_type = osi->object_size_type;
1743 1601 : tree parm = SSA_NAME_VAR (var);
1744 :
1745 1601 : if (!(object_size_type & OST_DYNAMIC) || !POINTER_TYPE_P (TREE_TYPE (parm)))
1746 : {
1747 1154 : expr_object_size (osi, var, parm);
1748 1154 : return;
1749 : }
1750 :
1751 : /* Look for access attribute. */
1752 447 : rdwr_map rdwr_idx;
1753 :
1754 447 : tree fndecl = cfun->decl;
1755 447 : const attr_access *access = get_parm_access (rdwr_idx, parm, fndecl);
1756 447 : tree typesize = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (parm)));
1757 447 : tree sz = NULL_TREE;
1758 :
1759 : /* If we have an access attribute with a usable size argument... */
1760 18 : if (access && access->sizarg != UINT_MAX
1761 : /* ... and either PARM is void * or has a type that is complete and has a
1762 : constant size... */
1763 463 : && ((typesize && poly_int_tree_p (typesize))
1764 4 : || (!typesize && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))))
1765 : {
1766 11 : tree fnargs = DECL_ARGUMENTS (fndecl);
1767 11 : tree arg = NULL_TREE;
1768 11 : unsigned argpos = 0;
1769 :
1770 : /* ... then walk through the parameters to pick the size parameter and
1771 : safely scale it by the type size if needed.
1772 :
1773 : TODO: we could also compute the size of VLAs where the size is
1774 : given by a function parameter. */
1775 18 : for (arg = fnargs; arg; arg = TREE_CHAIN (arg), ++argpos)
1776 18 : if (argpos == access->sizarg)
1777 : {
1778 11 : gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1779 11 : sz = get_or_create_ssa_default_def (cfun, arg);
1780 11 : if (sz != NULL_TREE)
1781 : {
1782 11 : sz = fold_convert (sizetype, sz);
1783 11 : if (typesize)
1784 8 : sz = size_binop (MULT_EXPR, sz, typesize);
1785 : }
1786 : break;
1787 : }
1788 : }
1789 11 : if (!sz)
1790 436 : sz = size_unknown (object_size_type);
1791 :
1792 447 : object_sizes_set (osi, SSA_NAME_VERSION (var), sz, sz);
1793 447 : }
1794 :
1795 : /* Compute an object size expression for VAR, which is the result of a PHI
1796 : node. */
1797 :
1798 : static void
1799 175 : phi_dynamic_object_size (struct object_size_info *osi, tree var)
1800 : {
1801 175 : int object_size_type = osi->object_size_type;
1802 175 : unsigned int varno = SSA_NAME_VERSION (var);
1803 175 : gimple *stmt = SSA_NAME_DEF_STMT (var);
1804 175 : unsigned i, num_args = gimple_phi_num_args (stmt);
1805 175 : bool wholesize_needed = false;
1806 :
1807 : /* The extra space is for the PHI result at the end, which object_sizes_set
1808 : sets for us. */
1809 175 : tree sizes = make_tree_vec (num_args + 1);
1810 175 : tree wholesizes = make_tree_vec (num_args + 1);
1811 :
1812 : /* Bail out if the size of any of the PHI arguments cannot be
1813 : determined. */
1814 490 : for (i = 0; i < num_args; i++)
1815 : {
1816 354 : edge e = gimple_phi_arg_edge (as_a <gphi *> (stmt), i);
1817 354 : if (e->flags & EDGE_COMPLEX)
1818 : break;
1819 :
1820 349 : tree rhs = gimple_phi_arg_def (stmt, i);
1821 349 : tree size, wholesize;
1822 :
1823 349 : dynamic_object_size (osi, rhs, &size, &wholesize);
1824 :
1825 349 : if (size_unknown_p (size, object_size_type))
1826 : break;
1827 :
1828 315 : if (size != wholesize)
1829 239 : wholesize_needed = true;
1830 :
1831 315 : TREE_VEC_ELT (sizes, i) = size;
1832 315 : TREE_VEC_ELT (wholesizes, i) = wholesize;
1833 : }
1834 :
1835 175 : if (i < num_args)
1836 : {
1837 39 : ggc_free (sizes);
1838 39 : ggc_free (wholesizes);
1839 39 : sizes = wholesizes = size_unknown (object_size_type);
1840 : }
1841 :
1842 : /* Point to the same TREE_VEC so that we can avoid emitting two PHI
1843 : nodes. */
1844 136 : else if (!wholesize_needed)
1845 : {
1846 21 : ggc_free (wholesizes);
1847 21 : wholesizes = sizes;
1848 : }
1849 :
1850 175 : object_sizes_set (osi, varno, sizes, wholesizes);
1851 175 : }
1852 :
1853 : /* Compute object sizes for VAR.
1854 : For ADDR_EXPR an object size is the number of remaining bytes
1855 : to the end of the object (where what is considered an object depends on
1856 : OSI->object_size_type).
1857 : For allocation GIMPLE_CALL like malloc or calloc object size is the size
1858 : of the allocation.
1859 : For POINTER_PLUS_EXPR where second operand is a constant integer,
1860 : object size is object size of the first operand minus the constant.
1861 : If the constant is bigger than the number of remaining bytes until the
1862 : end of the object, object size is 0, but if it is instead a pointer
1863 : subtraction, object size is size_unknown (object_size_type).
1864 : To differentiate addition from subtraction, ADDR_EXPR returns
1865 : size_unknown (object_size_type) for all objects bigger than half of the
1866 : address space, and constants less than half of the address space are
1867 : considered addition, while bigger constants subtraction.
1868 : For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
1869 : object size is object size of that argument.
1870 : Otherwise, object size is the maximum of object sizes of variables
1871 : that it might be set to. */
1872 :
1873 : static void
1874 14009 : collect_object_sizes_for (struct object_size_info *osi, tree var)
1875 : {
1876 14009 : int object_size_type = osi->object_size_type;
1877 14009 : unsigned int varno = SSA_NAME_VERSION (var);
1878 14009 : gimple *stmt;
1879 14009 : bool reexamine;
1880 :
1881 14009 : if (bitmap_bit_p (computed[object_size_type], varno))
1882 : return;
1883 :
1884 12866 : if (osi->pass == 0)
1885 : {
1886 12012 : if (bitmap_set_bit (osi->visited, varno))
1887 : {
1888 : /* Initialize to 0 for maximum size and M1U for minimum size so that
1889 : it gets immediately overridden. */
1890 11591 : object_sizes_initialize (osi, varno,
1891 : size_initval (object_size_type),
1892 : size_initval (object_size_type));
1893 : }
1894 : else
1895 : {
1896 : /* Found a dependency loop. Mark the variable for later
1897 : re-examination. */
1898 421 : if (object_size_type & OST_DYNAMIC)
1899 85 : object_sizes_set_temp (osi, varno);
1900 :
1901 421 : bitmap_set_bit (osi->reexamine, varno);
1902 421 : if (dump_file && (dump_flags & TDF_DETAILS))
1903 : {
1904 0 : fprintf (dump_file, "Found a dependency loop at ");
1905 0 : print_generic_expr (dump_file, var, dump_flags);
1906 0 : fprintf (dump_file, "\n");
1907 : }
1908 421 : return;
1909 : }
1910 : }
1911 :
1912 12445 : if (dump_file && (dump_flags & TDF_DETAILS))
1913 : {
1914 6 : fprintf (dump_file, "Visiting use-def links for ");
1915 6 : print_generic_expr (dump_file, var, dump_flags);
1916 6 : fprintf (dump_file, "\n");
1917 : }
1918 :
1919 12445 : stmt = SSA_NAME_DEF_STMT (var);
1920 12445 : reexamine = false;
1921 :
1922 12445 : switch (gimple_code (stmt))
1923 : {
1924 6703 : case GIMPLE_ASSIGN:
1925 6703 : {
1926 6703 : tree rhs = gimple_assign_rhs1 (stmt);
1927 6703 : if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1928 6703 : || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1929 4952 : && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1930 1131 : reexamine = plus_stmt_object_size (osi, var, stmt);
1931 5572 : else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1932 0 : reexamine = cond_expr_object_size (osi, var, stmt);
1933 5572 : else if (gimple_assign_single_p (stmt)
1934 5572 : || gimple_assign_unary_nop_p (stmt))
1935 : {
1936 5572 : if (TREE_CODE (rhs) == SSA_NAME
1937 5572 : && POINTER_TYPE_P (TREE_TYPE (rhs)))
1938 249 : reexamine = merge_object_sizes (osi, var, rhs);
1939 : else
1940 5323 : expr_object_size (osi, var, rhs);
1941 : }
1942 : else
1943 0 : unknown_object_size (osi, var);
1944 : break;
1945 : }
1946 :
1947 2251 : case GIMPLE_CALL:
1948 2251 : {
1949 2251 : gcall *call_stmt = as_a <gcall *> (stmt);
1950 2251 : tree arg = pass_through_call (call_stmt);
1951 2251 : if (arg)
1952 : {
1953 57 : if (TREE_CODE (arg) == SSA_NAME
1954 57 : && POINTER_TYPE_P (TREE_TYPE (arg)))
1955 46 : reexamine = merge_object_sizes (osi, var, arg);
1956 : else
1957 11 : expr_object_size (osi, var, arg);
1958 : }
1959 : else
1960 2194 : call_object_size (osi, var, call_stmt);
1961 : break;
1962 : }
1963 :
1964 0 : case GIMPLE_ASM:
1965 : /* Pointers defined by __asm__ statements can point anywhere. */
1966 0 : unknown_object_size (osi, var);
1967 0 : break;
1968 :
1969 1601 : case GIMPLE_NOP:
1970 1601 : if (SSA_NAME_VAR (var)
1971 1601 : && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1972 1601 : parm_object_size (osi, var);
1973 : else
1974 : /* Uninitialized SSA names point nowhere. */
1975 0 : unknown_object_size (osi, var);
1976 : break;
1977 :
1978 1890 : case GIMPLE_PHI:
1979 1890 : {
1980 1890 : unsigned i;
1981 :
1982 1890 : if (object_size_type & OST_DYNAMIC)
1983 : {
1984 175 : phi_dynamic_object_size (osi, var);
1985 175 : break;
1986 : }
1987 :
1988 6523 : for (i = 0; i < gimple_phi_num_args (stmt); i++)
1989 : {
1990 4870 : tree rhs = gimple_phi_arg (stmt, i)->def;
1991 :
1992 4870 : if (object_sizes_unknown_p (object_size_type, varno))
1993 : break;
1994 :
1995 4808 : if (TREE_CODE (rhs) == SSA_NAME)
1996 1892 : reexamine |= merge_object_sizes (osi, var, rhs);
1997 2916 : else if (osi->pass == 0)
1998 1714 : expr_object_size (osi, var, rhs);
1999 : }
2000 : break;
2001 : }
2002 :
2003 0 : default:
2004 0 : gcc_unreachable ();
2005 : }
2006 :
2007 : /* Dynamic sizes use placeholder temps to return an answer, so it is always
2008 : safe to set COMPUTED for them. */
2009 12445 : if ((object_size_type & OST_DYNAMIC)
2010 12445 : || !reexamine || object_sizes_unknown_p (object_size_type, varno))
2011 : {
2012 10939 : bitmap_set_bit (computed[object_size_type], varno);
2013 10939 : if (!(object_size_type & OST_DYNAMIC))
2014 8264 : bitmap_clear_bit (osi->reexamine, varno);
2015 2675 : else if (reexamine)
2016 91 : bitmap_set_bit (osi->reexamine, varno);
2017 : }
2018 : else
2019 : {
2020 1506 : bitmap_set_bit (osi->reexamine, varno);
2021 1506 : if (dump_file && (dump_flags & TDF_DETAILS))
2022 : {
2023 0 : fprintf (dump_file, "Need to reexamine ");
2024 0 : print_generic_expr (dump_file, var, dump_flags);
2025 0 : fprintf (dump_file, "\n");
2026 : }
2027 : }
2028 : }
2029 :
2030 :
2031 : /* Helper function for check_for_plus_in_loops. Called recursively
2032 : to detect loops. */
2033 :
2034 : static void
2035 32 : check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
2036 : unsigned int depth)
2037 : {
2038 32 : gimple *stmt = SSA_NAME_DEF_STMT (var);
2039 32 : unsigned int varno = SSA_NAME_VERSION (var);
2040 :
2041 32 : if (osi->depths[varno])
2042 : {
2043 16 : if (osi->depths[varno] != depth)
2044 : {
2045 16 : unsigned int *sp;
2046 :
2047 : /* Found a loop involving pointer addition. */
2048 32 : for (sp = osi->tos; sp > osi->stack; )
2049 : {
2050 32 : --sp;
2051 32 : bitmap_clear_bit (osi->reexamine, *sp);
2052 32 : bitmap_set_bit (computed[osi->object_size_type], *sp);
2053 32 : object_sizes_set (osi, *sp, size_zero_node,
2054 : object_sizes_get (osi, *sp, true));
2055 32 : if (*sp == varno)
2056 : break;
2057 : }
2058 : }
2059 16 : return;
2060 : }
2061 16 : else if (! bitmap_bit_p (osi->reexamine, varno))
2062 : return;
2063 :
2064 16 : osi->depths[varno] = depth;
2065 16 : *osi->tos++ = varno;
2066 :
2067 16 : switch (gimple_code (stmt))
2068 : {
2069 :
2070 16 : case GIMPLE_ASSIGN:
2071 16 : {
2072 16 : if ((gimple_assign_single_p (stmt)
2073 16 : || gimple_assign_unary_nop_p (stmt))
2074 16 : && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2075 : {
2076 0 : tree rhs = gimple_assign_rhs1 (stmt);
2077 :
2078 0 : check_for_plus_in_loops_1 (osi, rhs, depth);
2079 : }
2080 16 : else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2081 : {
2082 16 : tree basevar = gimple_assign_rhs1 (stmt);
2083 16 : tree cst = gimple_assign_rhs2 (stmt);
2084 :
2085 16 : gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2086 :
2087 32 : check_for_plus_in_loops_1 (osi, basevar,
2088 16 : depth + !integer_zerop (cst));
2089 : }
2090 : else
2091 0 : gcc_unreachable ();
2092 : break;
2093 : }
2094 :
2095 0 : case GIMPLE_CALL:
2096 0 : {
2097 0 : gcall *call_stmt = as_a <gcall *> (stmt);
2098 0 : tree arg = pass_through_call (call_stmt);
2099 0 : if (arg)
2100 : {
2101 0 : if (TREE_CODE (arg) == SSA_NAME)
2102 0 : check_for_plus_in_loops_1 (osi, arg, depth);
2103 : else
2104 0 : gcc_unreachable ();
2105 : }
2106 : break;
2107 : }
2108 :
2109 : case GIMPLE_PHI:
2110 : {
2111 : unsigned i;
2112 :
2113 0 : for (i = 0; i < gimple_phi_num_args (stmt); i++)
2114 : {
2115 0 : tree rhs = gimple_phi_arg (stmt, i)->def;
2116 :
2117 0 : if (TREE_CODE (rhs) == SSA_NAME)
2118 0 : check_for_plus_in_loops_1 (osi, rhs, depth);
2119 : }
2120 : break;
2121 : }
2122 :
2123 0 : default:
2124 0 : gcc_unreachable ();
2125 : }
2126 :
2127 16 : osi->depths[varno] = 0;
2128 16 : osi->tos--;
2129 : }
2130 :
2131 :
2132 : /* Check if some pointer we are computing object size of is being increased
2133 : within a loop. If yes, assume all the SSA variables participating in
2134 : that loop have minimum object sizes 0. */
2135 :
2136 : static void
2137 73 : check_for_plus_in_loops (struct object_size_info *osi, tree var)
2138 : {
2139 73 : gimple *stmt = SSA_NAME_DEF_STMT (var);
2140 :
2141 : /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
2142 : and looked for a POINTER_PLUS_EXPR in the pass-through
2143 : argument, if any. In GIMPLE, however, such an expression
2144 : is not a valid call operand. */
2145 :
2146 73 : if (is_gimple_assign (stmt)
2147 73 : && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2148 : {
2149 31 : tree basevar = gimple_assign_rhs1 (stmt);
2150 31 : tree offset = gimple_assign_rhs2 (stmt);
2151 :
2152 : /* Skip non-positive offsets. */
2153 31 : if (TREE_CODE (offset) != INTEGER_CST
2154 31 : || integer_zerop (offset) || compare_tree_int (offset, offset_limit) > 0)
2155 15 : return;
2156 :
2157 16 : osi->depths[SSA_NAME_VERSION (basevar)] = 1;
2158 16 : *osi->tos++ = SSA_NAME_VERSION (basevar);
2159 16 : check_for_plus_in_loops_1 (osi, var, 2);
2160 16 : osi->depths[SSA_NAME_VERSION (basevar)] = 0;
2161 16 : osi->tos--;
2162 : }
2163 : }
2164 :
2165 :
2166 : /* Initialize data structures for the object size computation. */
2167 :
2168 : void
2169 18304 : init_object_sizes (void)
2170 : {
2171 18304 : int object_size_type;
2172 :
2173 18304 : if (computed[0])
2174 : return;
2175 :
2176 24588 : for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2177 : {
2178 21856 : object_sizes_grow (object_size_type);
2179 21856 : computed[object_size_type] = BITMAP_ALLOC (NULL);
2180 : }
2181 :
2182 2732 : init_offset_limit ();
2183 : }
2184 :
2185 :
2186 : /* Destroy data structures after the object size computation. */
2187 :
2188 : void
2189 3456423 : fini_object_sizes (void)
2190 : {
2191 3456423 : int object_size_type;
2192 :
2193 31107807 : for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
2194 : {
2195 27651384 : object_sizes_release (object_size_type);
2196 27651384 : BITMAP_FREE (computed[object_size_type]);
2197 : }
2198 3456423 : }
2199 :
2200 : /* Dummy valueize function. */
2201 :
2202 : static tree
2203 17633 : do_valueize (tree t)
2204 : {
2205 17633 : return t;
2206 : }
2207 :
2208 : /* Process a __builtin_object_size or __builtin_dynamic_object_size call in
2209 : CALL early for subobjects before any object information is lost due to
2210 : optimization. Insert a MIN or MAX expression of the result and
2211 : __builtin_object_size at I so that it may be processed in the second pass.
2212 : __builtin_dynamic_object_size is treated like __builtin_object_size here
2213 : since we're only looking for constant bounds. */
2214 :
2215 : static void
2216 11679 : early_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2217 : {
2218 11679 : tree ost = gimple_call_arg (call, 1);
2219 11679 : tree lhs = gimple_call_lhs (call);
2220 11679 : gcc_assert (lhs != NULL_TREE);
2221 :
2222 11679 : if (!tree_fits_uhwi_p (ost))
2223 9776 : return;
2224 :
2225 11679 : unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2226 11679 : tree ptr = gimple_call_arg (call, 0);
2227 :
2228 11679 : if (object_size_type != 1 && object_size_type != 3)
2229 : return;
2230 :
2231 3643 : if (TREE_CODE (ptr) != ADDR_EXPR && TREE_CODE (ptr) != SSA_NAME)
2232 : return;
2233 :
2234 3643 : tree type = TREE_TYPE (lhs);
2235 3643 : tree bytes;
2236 3643 : if (!compute_builtin_object_size (ptr, object_size_type, &bytes)
2237 3643 : || !int_fits_type_p (bytes, type))
2238 : return;
2239 :
2240 1903 : tree tem = make_ssa_name (type);
2241 1903 : gimple_call_set_lhs (call, tem);
2242 1903 : enum tree_code code = object_size_type & OST_MINIMUM ? MAX_EXPR : MIN_EXPR;
2243 1903 : tree cst = fold_convert (type, bytes);
2244 1903 : gimple *g = gimple_build_assign (lhs, code, tem, cst);
2245 1903 : gsi_insert_after (i, g, GSI_NEW_STMT);
2246 1903 : update_stmt (call);
2247 : }
2248 :
2249 : /* Attempt to fold one __builtin_dynamic_object_size call in CALL into an
2250 : expression and insert it at I. Return true if it succeeds. */
2251 :
2252 : static bool
2253 3336 : dynamic_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
2254 : {
2255 3336 : gcc_assert (gimple_call_num_args (call) == 2);
2256 :
2257 3336 : tree args[2];
2258 3336 : args[0] = gimple_call_arg (call, 0);
2259 3336 : args[1] = gimple_call_arg (call, 1);
2260 :
2261 3336 : location_t loc = EXPR_LOC_OR_LOC (args[0], input_location);
2262 3336 : tree result_type = gimple_call_return_type (as_a <gcall *> (call));
2263 3336 : tree result = fold_builtin_call_array (loc, result_type,
2264 : gimple_call_fn (call), 2, args);
2265 :
2266 3336 : if (!result)
2267 : return false;
2268 :
2269 : /* fold_builtin_call_array may wrap the result inside a
2270 : NOP_EXPR. */
2271 2212 : STRIP_NOPS (result);
2272 2212 : gimplify_and_update_call_from_tree (i, result);
2273 :
2274 2212 : if (dump_file && (dump_flags & TDF_DETAILS))
2275 : {
2276 0 : fprintf (dump_file, "Simplified (dynamic)\n ");
2277 0 : print_gimple_stmt (dump_file, call, 0, dump_flags);
2278 0 : fprintf (dump_file, " to ");
2279 0 : print_generic_expr (dump_file, result);
2280 0 : fprintf (dump_file, "\n");
2281 : }
2282 : return true;
2283 : }
2284 :
2285 : static unsigned int
2286 3456423 : object_sizes_execute (function *fun, bool early)
2287 : {
2288 3456423 : todo = 0;
2289 3456423 : auto_bitmap sdce_worklist;
2290 :
2291 3456423 : basic_block bb;
2292 29844713 : FOR_EACH_BB_FN (bb, fun)
2293 : {
2294 26388290 : gimple_stmt_iterator i;
2295 229621194 : for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
2296 : {
2297 176844614 : tree result;
2298 176844614 : bool dynamic = false;
2299 :
2300 176844614 : gimple *call = gsi_stmt (i);
2301 176844614 : if (gimple_call_builtin_p (call, BUILT_IN_DYNAMIC_OBJECT_SIZE))
2302 : dynamic = true;
2303 176836742 : else if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
2304 176826310 : continue;
2305 :
2306 18304 : tree lhs = gimple_call_lhs (call);
2307 18304 : if (!lhs)
2308 0 : continue;
2309 :
2310 18304 : init_object_sizes ();
2311 :
2312 : /* If early, only attempt to fold
2313 : __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
2314 : and rather than folding the builtin to the constant if any,
2315 : create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
2316 : call result and the computed constant. Do the same for
2317 : __builtin_dynamic_object_size too. */
2318 18304 : if (early)
2319 : {
2320 11679 : early_object_sizes_execute_one (&i, call);
2321 11679 : continue;
2322 : }
2323 :
2324 6625 : if (dynamic)
2325 : {
2326 3336 : if (dynamic_object_sizes_execute_one (&i, call))
2327 2212 : continue;
2328 : else
2329 : {
2330 : /* If we could not find a suitable size expression, lower to
2331 : __builtin_object_size so that we may at least get a
2332 : constant lower or higher estimate. */
2333 1124 : tree bosfn = builtin_decl_implicit (BUILT_IN_OBJECT_SIZE);
2334 1124 : gimple_call_set_fndecl (call, bosfn);
2335 1124 : update_stmt (call);
2336 :
2337 1124 : if (dump_file && (dump_flags & TDF_DETAILS))
2338 : {
2339 0 : print_generic_expr (dump_file, gimple_call_arg (call, 0),
2340 : dump_flags);
2341 0 : fprintf (dump_file,
2342 : ": Retrying as __builtin_object_size\n");
2343 : }
2344 : }
2345 : }
2346 :
2347 4413 : result = gimple_fold_stmt_to_constant (call, do_valueize);
2348 4413 : if (!result)
2349 : {
2350 2038 : tree ost = gimple_call_arg (call, 1);
2351 :
2352 2038 : if (tree_fits_uhwi_p (ost))
2353 : {
2354 2038 : unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2355 :
2356 2038 : if (object_size_type & OST_MINIMUM)
2357 313 : result = build_zero_cst (size_type_node);
2358 1725 : else if (object_size_type < OST_END)
2359 1725 : result = fold_convert (size_type_node,
2360 : integer_minus_one_node);
2361 : }
2362 :
2363 2038 : if (!result)
2364 0 : continue;
2365 : }
2366 :
2367 4413 : gcc_assert (TREE_CODE (result) == INTEGER_CST);
2368 :
2369 4413 : if (dump_file && (dump_flags & TDF_DETAILS))
2370 : {
2371 0 : fprintf (dump_file, "Simplified\n ");
2372 0 : print_gimple_stmt (dump_file, call, 0, dump_flags);
2373 0 : fprintf (dump_file, " to ");
2374 0 : print_generic_expr (dump_file, result);
2375 0 : fprintf (dump_file, "\n");
2376 : }
2377 :
2378 : /* Propagate into all uses and fold those stmts. */
2379 4413 : if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2380 : {
2381 4413 : replace_uses_by (lhs, result);
2382 : /* Mark lhs as being possiblely DCEd. */
2383 4413 : bitmap_set_bit (sdce_worklist, SSA_NAME_VERSION (lhs));
2384 : }
2385 : else
2386 0 : replace_call_with_value (&i, result);
2387 : }
2388 : }
2389 :
2390 3456423 : fini_object_sizes ();
2391 3456423 : simple_dce_from_worklist (sdce_worklist);
2392 3456423 : return todo;
2393 3456423 : }
2394 :
2395 : /* Simple pass to optimize all __builtin_object_size () builtins. */
2396 :
2397 : namespace {
2398 :
2399 : const pass_data pass_data_object_sizes =
2400 : {
2401 : GIMPLE_PASS, /* type */
2402 : "objsz", /* name */
2403 : OPTGROUP_NONE, /* optinfo_flags */
2404 : TV_NONE, /* tv_id */
2405 : ( PROP_cfg | PROP_ssa ), /* properties_required */
2406 : PROP_objsz, /* properties_provided */
2407 : 0, /* properties_destroyed */
2408 : 0, /* todo_flags_start */
2409 : 0, /* todo_flags_finish */
2410 : };
2411 :
2412 : class pass_object_sizes : public gimple_opt_pass
2413 : {
2414 : public:
2415 571444 : pass_object_sizes (gcc::context *ctxt)
2416 1142888 : : gimple_opt_pass (pass_data_object_sizes, ctxt)
2417 : {}
2418 :
2419 : /* opt_pass methods: */
2420 285722 : opt_pass * clone () final override { return new pass_object_sizes (m_ctxt); }
2421 1044129 : unsigned int execute (function *fun) final override
2422 : {
2423 1044129 : return object_sizes_execute (fun, false);
2424 : }
2425 : }; // class pass_object_sizes
2426 :
2427 : } // anon namespace
2428 :
2429 : gimple_opt_pass *
2430 285722 : make_pass_object_sizes (gcc::context *ctxt)
2431 : {
2432 285722 : return new pass_object_sizes (ctxt);
2433 : }
2434 :
2435 : /* Early version of pass to optimize all __builtin_object_size () builtins. */
2436 :
2437 : namespace {
2438 :
2439 : const pass_data pass_data_early_object_sizes =
2440 : {
2441 : GIMPLE_PASS, /* type */
2442 : "early_objsz", /* name */
2443 : OPTGROUP_NONE, /* optinfo_flags */
2444 : TV_NONE, /* tv_id */
2445 : ( PROP_cfg | PROP_ssa ), /* properties_required */
2446 : 0, /* properties_provided */
2447 : 0, /* properties_destroyed */
2448 : 0, /* todo_flags_start */
2449 : 0, /* todo_flags_finish */
2450 : };
2451 :
2452 : class pass_early_object_sizes : public gimple_opt_pass
2453 : {
2454 : public:
2455 285722 : pass_early_object_sizes (gcc::context *ctxt)
2456 571444 : : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
2457 : {}
2458 :
2459 : /* opt_pass methods: */
2460 2412294 : unsigned int execute (function *fun) final override
2461 : {
2462 2412294 : return object_sizes_execute (fun, true);
2463 : }
2464 : }; // class pass_object_sizes
2465 :
2466 : } // anon namespace
2467 :
2468 : gimple_opt_pass *
2469 285722 : make_pass_early_object_sizes (gcc::context *ctxt)
2470 : {
2471 285722 : return new pass_early_object_sizes (ctxt);
2472 : }
|