Line data Source code
1 : /* Data flow functions for trees.
2 : Copyright (C) 2001-2026 Free Software Foundation, Inc.
3 : Contributed by Diego Novillo <dnovillo@redhat.com>
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify
8 : it under the terms of the GNU General Public License as published by
9 : the Free Software Foundation; either version 3, or (at your option)
10 : any later version.
11 :
12 : GCC is distributed in the hope that it will be useful,
13 : but WITHOUT ANY WARRANTY; without even the implied warranty of
14 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 : GNU General Public License for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "rtl.h"
26 : #include "tree.h"
27 : #include "gimple.h"
28 : #include "tree-pass.h"
29 : #include "ssa.h"
30 : #include "tree-pretty-print.h"
31 : #include "fold-const.h"
32 : #include "stor-layout.h"
33 : #include "langhooks.h"
34 : #include "gimple-iterator.h"
35 : #include "gimple-walk.h"
36 : #include "tree-dfa.h"
37 : #include "gimple-range.h"
38 :
39 : /* Build and maintain data flow information for trees. */
40 :
41 : /* Counters used to display DFA and SSA statistics. */
42 : struct dfa_stats_d
43 : {
44 : long num_defs;
45 : long num_uses;
46 : long num_phis;
47 : long num_phi_args;
48 : size_t max_num_phi_args;
49 : long num_vdefs;
50 : long num_vuses;
51 : };
52 :
53 :
54 : /* Local functions. */
55 : static void collect_dfa_stats (struct dfa_stats_d *);
56 :
57 :
58 : /*---------------------------------------------------------------------------
59 : Dataflow analysis (DFA) routines
60 : ---------------------------------------------------------------------------*/
61 :
62 : /* Renumber the gimple stmt uids in one block. The caller is responsible
63 : of calling set_gimple_stmt_max_uid (fun, 0) at some point. */
64 :
65 : void
66 73786047 : renumber_gimple_stmt_uids_in_block (struct function *fun, basic_block bb)
67 : {
68 73786047 : gimple_stmt_iterator bsi;
69 101115995 : for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
70 : {
71 27329948 : gimple *stmt = gsi_stmt (bsi);
72 27329948 : gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
73 : }
74 604700057 : for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
75 : {
76 457127963 : gimple *stmt = gsi_stmt (bsi);
77 457127963 : gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
78 : }
79 73786047 : }
80 :
81 : /* Renumber all of the gimple stmt uids. */
82 :
83 : void
84 6508384 : renumber_gimple_stmt_uids (struct function *fun)
85 : {
86 6508384 : basic_block bb;
87 :
88 6508384 : set_gimple_stmt_max_uid (fun, 0);
89 75948769 : FOR_ALL_BB_FN (bb, fun)
90 69440385 : renumber_gimple_stmt_uids_in_block (fun, bb);
91 6508384 : }
92 :
93 : /* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
94 : in BLOCKS, of which there are N_BLOCKS. Also renumbers PHIs. */
95 :
96 : void
97 633330 : renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks)
98 : {
99 633330 : int i;
100 :
101 633330 : set_gimple_stmt_max_uid (cfun, 0);
102 4640082 : for (i = 0; i < n_blocks; i++)
103 4006752 : renumber_gimple_stmt_uids_in_block (cfun, blocks[i]);
104 633330 : }
105 :
106 :
107 :
108 : /*---------------------------------------------------------------------------
109 : Debugging functions
110 : ---------------------------------------------------------------------------*/
111 :
112 : /* Dump variable VAR and its may-aliases to FILE. */
113 :
114 : void
115 357 : dump_variable (FILE *file, tree var)
116 : {
117 357 : if (TREE_CODE (var) == SSA_NAME)
118 : {
119 0 : if (POINTER_TYPE_P (TREE_TYPE (var)))
120 0 : dump_points_to_info_for (file, var);
121 0 : var = SSA_NAME_VAR (var);
122 : }
123 :
124 : if (var == NULL_TREE)
125 : {
126 0 : fprintf (file, "<nil>");
127 0 : return;
128 : }
129 :
130 357 : print_generic_expr (file, var, dump_flags);
131 :
132 357 : fprintf (file, ", UID D.%u", (unsigned) DECL_UID (var));
133 357 : if (DECL_PT_UID (var) != DECL_UID (var))
134 0 : fprintf (file, ", PT-UID D.%u", (unsigned) DECL_PT_UID (var));
135 :
136 357 : fprintf (file, ", ");
137 357 : print_generic_expr (file, TREE_TYPE (var), dump_flags);
138 :
139 357 : if (TREE_ADDRESSABLE (var))
140 357 : fprintf (file, ", is addressable");
141 :
142 357 : if (is_global_var (var))
143 63 : fprintf (file, ", is global");
144 :
145 357 : if (TREE_THIS_VOLATILE (var))
146 0 : fprintf (file, ", is volatile");
147 :
148 357 : if (cfun && ssa_default_def (cfun, var))
149 : {
150 0 : fprintf (file, ", default def: ");
151 0 : print_generic_expr (file, ssa_default_def (cfun, var), dump_flags);
152 : }
153 :
154 357 : if (DECL_INITIAL (var))
155 : {
156 63 : fprintf (file, ", initial: ");
157 63 : print_generic_expr (file, DECL_INITIAL (var), dump_flags);
158 : }
159 :
160 357 : fprintf (file, "\n");
161 : }
162 :
163 :
164 : /* Dump variable VAR and its may-aliases to stderr. */
165 :
166 : DEBUG_FUNCTION void
167 0 : debug_variable (tree var)
168 : {
169 0 : dump_variable (stderr, var);
170 0 : }
171 :
172 :
173 : /* Dump various DFA statistics to FILE. */
174 :
175 : void
176 491 : dump_dfa_stats (FILE *file)
177 : {
178 491 : struct dfa_stats_d dfa_stats;
179 :
180 491 : unsigned long size, total = 0;
181 491 : const char * const fmt_str = "%-30s%-13s%12s\n";
182 491 : const char * const fmt_str_1 = "%-30s%13lu" PRsa (11) "\n";
183 491 : const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
184 491 : const char *funcname
185 491 : = lang_hooks.decl_printable_name (current_function_decl, 2);
186 :
187 491 : collect_dfa_stats (&dfa_stats);
188 :
189 491 : fprintf (file, "\nDFA Statistics for %s\n\n", funcname);
190 :
191 491 : fprintf (file, "---------------------------------------------------------\n");
192 491 : fprintf (file, fmt_str, "", " Number of ", "Memory");
193 491 : fprintf (file, fmt_str, "", " instances ", "used ");
194 491 : fprintf (file, "---------------------------------------------------------\n");
195 :
196 491 : size = dfa_stats.num_uses * sizeof (tree *);
197 491 : total += size;
198 491 : fprintf (file, fmt_str_1, "USE operands", dfa_stats.num_uses,
199 0 : SIZE_AMOUNT (size));
200 :
201 491 : size = dfa_stats.num_defs * sizeof (tree *);
202 491 : total += size;
203 491 : fprintf (file, fmt_str_1, "DEF operands", dfa_stats.num_defs,
204 0 : SIZE_AMOUNT (size));
205 :
206 491 : size = dfa_stats.num_vuses * sizeof (tree *);
207 491 : total += size;
208 491 : fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
209 0 : SIZE_AMOUNT (size));
210 :
211 491 : size = dfa_stats.num_vdefs * sizeof (tree *);
212 491 : total += size;
213 491 : fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
214 0 : SIZE_AMOUNT (size));
215 :
216 491 : size = dfa_stats.num_phis * sizeof (struct gphi);
217 491 : total += size;
218 493 : fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis,
219 4 : SIZE_AMOUNT (size));
220 :
221 491 : size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d);
222 491 : total += size;
223 493 : fprintf (file, fmt_str_1, "PHI arguments", dfa_stats.num_phi_args,
224 4 : SIZE_AMOUNT (size));
225 :
226 491 : fprintf (file, "---------------------------------------------------------\n");
227 507 : fprintf (file, fmt_str_3, "Total memory used by DFA/SSA data",
228 32 : SIZE_AMOUNT (total));
229 491 : fprintf (file, "---------------------------------------------------------\n");
230 491 : fprintf (file, "\n");
231 :
232 491 : if (dfa_stats.num_phis)
233 484 : fprintf (file, "Average number of arguments per PHI node: %.1f (max: "
234 : HOST_SIZE_T_PRINT_DEC ")\n",
235 484 : (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis,
236 484 : (fmt_size_t) dfa_stats.max_num_phi_args);
237 :
238 491 : fprintf (file, "\n");
239 491 : }
240 :
241 :
242 : /* Dump DFA statistics on stderr. */
243 :
244 : DEBUG_FUNCTION void
245 0 : debug_dfa_stats (void)
246 : {
247 0 : dump_dfa_stats (stderr);
248 0 : }
249 :
250 :
251 : /* Collect DFA statistics and store them in the structure pointed to by
252 : DFA_STATS_P. */
253 :
254 : static void
255 491 : collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
256 : {
257 491 : basic_block bb;
258 :
259 491 : gcc_assert (dfa_stats_p);
260 :
261 491 : memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d));
262 :
263 : /* Walk all the statements in the function counting references. */
264 8592 : FOR_EACH_BB_FN (bb, cfun)
265 : {
266 13466 : for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
267 5365 : gsi_next (&si))
268 : {
269 5365 : gphi *phi = si.phi ();
270 5365 : dfa_stats_p->num_phis++;
271 5365 : dfa_stats_p->num_phi_args += gimple_phi_num_args (phi);
272 5365 : if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args)
273 673 : dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi);
274 : }
275 :
276 30772 : for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
277 14570 : gsi_next (&si))
278 : {
279 14570 : gimple *stmt = gsi_stmt (si);
280 14570 : dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
281 14570 : dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
282 27138 : dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0;
283 27138 : dfa_stats_p->num_vuses += gimple_vuse (stmt) ? 1 : 0;
284 : }
285 : }
286 491 : }
287 :
288 :
289 : /*---------------------------------------------------------------------------
290 : Miscellaneous helpers
291 : ---------------------------------------------------------------------------*/
292 :
293 : /* Lookup VAR UID in the default_defs hashtable and return the associated
294 : variable. */
295 :
296 : tree
297 558287489 : ssa_default_def (struct function *fn, tree var)
298 : {
299 558287489 : struct tree_decl_minimal ind;
300 558287489 : struct tree_ssa_name in;
301 558287489 : gcc_assert (VAR_P (var)
302 : || TREE_CODE (var) == PARM_DECL
303 : || TREE_CODE (var) == RESULT_DECL);
304 :
305 : /* Always NULL_TREE for rtl function dumps. */
306 558287489 : if (!fn->gimple_df)
307 : return NULL_TREE;
308 :
309 558287489 : in.var = (tree)&ind;
310 558287489 : ind.uid = DECL_UID (var);
311 558287489 : return DEFAULT_DEFS (fn)->find_with_hash ((tree)&in, DECL_UID (var));
312 : }
313 :
314 : /* Insert the pair VAR's UID, DEF into the default_defs hashtable
315 : of function FN. */
316 :
317 : void
318 12122192 : set_ssa_default_def (struct function *fn, tree var, tree def)
319 : {
320 12122192 : struct tree_decl_minimal ind;
321 12122192 : struct tree_ssa_name in;
322 :
323 12122192 : gcc_assert (VAR_P (var)
324 : || TREE_CODE (var) == PARM_DECL
325 : || TREE_CODE (var) == RESULT_DECL);
326 12122192 : in.var = (tree)&ind;
327 12122192 : ind.uid = DECL_UID (var);
328 12122192 : if (!def)
329 : {
330 3376119 : tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
331 1125373 : DECL_UID (var),
332 : NO_INSERT);
333 1125373 : if (loc)
334 : {
335 1125363 : SSA_NAME_IS_DEFAULT_DEF (*(tree *)loc) = false;
336 1125363 : DEFAULT_DEFS (fn)->clear_slot (loc);
337 : }
338 1125373 : return;
339 : }
340 10996819 : gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var);
341 32990457 : tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
342 10996819 : DECL_UID (var), INSERT);
343 :
344 : /* Default definition might be changed by tail call optimization. */
345 10996819 : if (*loc)
346 1297 : SSA_NAME_IS_DEFAULT_DEF (*loc) = false;
347 :
348 : /* Mark DEF as the default definition for VAR. */
349 10996819 : *loc = def;
350 10996819 : SSA_NAME_IS_DEFAULT_DEF (def) = true;
351 : }
352 :
353 : /* Retrieve or create a default definition for VAR. */
354 :
355 : tree
356 26676341 : get_or_create_ssa_default_def (struct function *fn, tree var)
357 : {
358 26676341 : tree ddef = ssa_default_def (fn, var);
359 26676341 : if (ddef == NULL_TREE)
360 : {
361 10490616 : ddef = make_ssa_name_fn (fn, var, gimple_build_nop ());
362 10490616 : set_ssa_default_def (fn, var, ddef);
363 : }
364 26676341 : return ddef;
365 : }
366 :
367 :
368 : /* If EXP is a handled component reference for a structure, return the
369 : base variable. The access range is delimited by bit positions *POFFSET and
370 : *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
371 : *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
372 : and *PMAX_SIZE are equal, the access is non-variable. If *PREVERSE is
373 : true, the storage order of the reference is reversed. */
374 :
375 : tree
376 3100201869 : get_ref_base_and_extent (tree exp, poly_int64 *poffset,
377 : poly_int64 *psize,
378 : poly_int64 *pmax_size,
379 : bool *preverse)
380 : {
381 3100201869 : poly_offset_int bitsize = -1;
382 3100201869 : poly_offset_int maxsize;
383 3100201869 : tree size_tree = NULL_TREE;
384 3100201869 : poly_offset_int bit_offset = 0;
385 3100201869 : bool seen_variable_array_ref = false;
386 :
387 : /* First get the final access size and the storage order from just the
388 : outermost expression. */
389 3100201869 : if (TREE_CODE (exp) == COMPONENT_REF)
390 1529320145 : size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
391 1570881724 : else if (TREE_CODE (exp) == BIT_FIELD_REF)
392 7743228 : size_tree = TREE_OPERAND (exp, 1);
393 1563138496 : else if (TREE_CODE (exp) == WITH_SIZE_EXPR)
394 : {
395 174 : size_tree = TREE_OPERAND (exp, 1);
396 174 : exp = TREE_OPERAND (exp, 0);
397 : }
398 1563138322 : else if (!VOID_TYPE_P (TREE_TYPE (exp)))
399 : {
400 1522099750 : machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
401 1522099750 : if (mode == BLKmode)
402 375178715 : size_tree = TYPE_SIZE (TREE_TYPE (exp));
403 : else
404 2293842070 : bitsize = GET_MODE_BITSIZE (mode);
405 : }
406 1146921035 : if (size_tree != NULL_TREE
407 1912242262 : && poly_int_tree_p (size_tree))
408 1911287186 : bitsize = wi::to_poly_offset (size_tree);
409 :
410 3100201869 : *preverse = reverse_storage_order_for_component_p (exp);
411 :
412 : /* Initially, maxsize is the same as the accessed element size.
413 : In the following it will only grow (or become -1). */
414 3100201869 : maxsize = bitsize;
415 :
416 : /* Compute cumulative bit-offset for nested component-refs and array-refs,
417 : and find the ultimate containing object. */
418 9165382065 : while (1)
419 : {
420 6132791967 : switch (TREE_CODE (exp))
421 : {
422 7743228 : case BIT_FIELD_REF:
423 7743228 : bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
424 7743228 : break;
425 :
426 2265175884 : case COMPONENT_REF:
427 2265175884 : {
428 2265175884 : tree field = TREE_OPERAND (exp, 1);
429 2265175884 : tree this_offset = component_ref_field_offset (exp);
430 :
431 2265175884 : if (this_offset && poly_int_tree_p (this_offset))
432 : {
433 2265172837 : poly_offset_int woffset = (wi::to_poly_offset (this_offset)
434 2265172837 : << LOG2_BITS_PER_UNIT);
435 2265172837 : woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
436 2265172837 : bit_offset += woffset;
437 :
438 : /* If we had seen a variable array ref already and we just
439 : referenced the last field of a struct or a union member
440 : then we have to adjust maxsize by the padding at the end
441 : of our field. */
442 2265172837 : if (seen_variable_array_ref)
443 : {
444 20435999 : tree stype = TREE_TYPE (TREE_OPERAND (exp, 0));
445 20435999 : tree next = DECL_CHAIN (field);
446 44361765 : while (next && TREE_CODE (next) != FIELD_DECL)
447 23925766 : next = DECL_CHAIN (next);
448 20435999 : if (!next
449 2946562 : || TREE_CODE (stype) != RECORD_TYPE)
450 : {
451 18215938 : tree fsize = DECL_SIZE (field);
452 18215938 : tree ssize = TYPE_SIZE (stype);
453 18215938 : if (fsize == NULL
454 11402121 : || !poly_int_tree_p (fsize)
455 11400207 : || ssize == NULL
456 29616145 : || !poly_int_tree_p (ssize))
457 6815731 : maxsize = -1;
458 11400207 : else if (known_size_p (maxsize))
459 : {
460 11400057 : poly_offset_int tem
461 11400057 : = (wi::to_poly_offset (ssize)
462 11400057 : - wi::to_poly_offset (fsize));
463 22800114 : tem -= woffset;
464 11400057 : maxsize += tem;
465 : }
466 : }
467 : /* An component ref with an adjacent field up in the
468 : structure hierarchy constrains the size of any variable
469 : array ref lower in the access hierarchy. */
470 : else
471 : seen_variable_array_ref = false;
472 : }
473 : }
474 : else
475 : {
476 3047 : tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
477 : /* We need to adjust maxsize to the whole structure bitsize.
478 : But we can subtract any constant offset seen so far,
479 : because that would get us out of the structure otherwise. */
480 3047 : if (known_size_p (maxsize)
481 2035 : && csize
482 5082 : && poly_int_tree_p (csize))
483 0 : maxsize = wi::to_poly_offset (csize) - bit_offset;
484 : else
485 3047 : maxsize = -1;
486 : }
487 : }
488 : break;
489 :
490 741101843 : case ARRAY_REF:
491 741101843 : case ARRAY_RANGE_REF:
492 741101843 : {
493 741101843 : tree index = TREE_OPERAND (exp, 1);
494 741101843 : tree low_bound, unit_size;
495 :
496 : /* If the resulting bit-offset is constant, track it. */
497 741101843 : if (poly_int_tree_p (index)
498 640534891 : && (low_bound = array_ref_low_bound (exp),
499 640534891 : poly_int_tree_p (low_bound))
500 1381631682 : && (unit_size = array_ref_element_size (exp),
501 640529839 : TREE_CODE (unit_size) == INTEGER_CST))
502 : {
503 640443608 : poly_offset_int woffset
504 640443608 : = wi::sext (wi::to_poly_offset (index)
505 1280887216 : - wi::to_poly_offset (low_bound),
506 640443608 : TYPE_PRECISION (sizetype));
507 640443608 : woffset *= wi::to_offset (unit_size);
508 640443608 : woffset <<= LOG2_BITS_PER_UNIT;
509 640443608 : bit_offset += woffset;
510 :
511 : /* An array ref with a constant index up in the structure
512 : hierarchy will constrain the size of any variable array ref
513 : lower in the access hierarchy. */
514 640443608 : seen_variable_array_ref = false;
515 : }
516 : else
517 : {
518 100658235 : tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
519 : /* We need to adjust maxsize to the whole array bitsize.
520 : But we can subtract any constant offset seen so far,
521 : because that would get us outside of the array otherwise. */
522 100658235 : if (known_size_p (maxsize)
523 100417284 : && asize
524 180396069 : && poly_int_tree_p (asize))
525 77512720 : maxsize = wi::to_poly_offset (asize) - bit_offset;
526 : else
527 23145515 : maxsize = -1;
528 :
529 : /* Remember that we have seen an array ref with a variable
530 : index. */
531 100658235 : seen_variable_array_ref = true;
532 :
533 100658235 : int_range_max vr;
534 100658235 : range_query *query;
535 100658235 : query = get_range_query (cfun);
536 :
537 100658235 : if (TREE_CODE (index) == SSA_NAME
538 100111847 : && (low_bound = array_ref_low_bound (exp),
539 100111847 : poly_int_tree_p (low_bound))
540 100111535 : && (unit_size = array_ref_element_size (exp),
541 100111535 : TREE_CODE (unit_size) == INTEGER_CST)
542 99890158 : && query->range_of_expr (vr, index)
543 99890158 : && !vr.varying_p ()
544 160010359 : && !vr.undefined_p ())
545 : {
546 59347147 : wide_int min = vr.lower_bound ();
547 59347147 : wide_int max = vr.upper_bound ();
548 59347147 : poly_offset_int lbound = wi::to_poly_offset (low_bound);
549 : /* Try to constrain maxsize with range information. */
550 59347147 : offset_int omax
551 59347147 : = offset_int::from (max, TYPE_SIGN (TREE_TYPE (index)));
552 59347147 : if (wi::get_precision (max) <= ADDR_MAX_BITSIZE
553 118539096 : && known_lt (lbound, omax))
554 : {
555 59175199 : poly_offset_int rmaxsize;
556 118350398 : rmaxsize = (omax - lbound + 1)
557 118350398 : * wi::to_offset (unit_size) << LOG2_BITS_PER_UNIT;
558 59175199 : if (!known_size_p (maxsize)
559 105167096 : || known_lt (rmaxsize, maxsize))
560 : {
561 : /* If we know an upper bound below the declared
562 : one this is no longer variable. */
563 23239054 : if (known_size_p (maxsize))
564 : seen_variable_array_ref = false;
565 23239054 : maxsize = rmaxsize;
566 : }
567 : }
568 : /* Try to adjust bit_offset with range information. */
569 59347147 : offset_int omin
570 59347147 : = offset_int::from (min, TYPE_SIGN (TREE_TYPE (index)));
571 59347147 : if (wi::get_precision (min) <= ADDR_MAX_BITSIZE
572 118539096 : && known_le (lbound, omin))
573 : {
574 51799346 : poly_offset_int woffset
575 51799346 : = wi::sext (omin - lbound,
576 51799346 : TYPE_PRECISION (sizetype));
577 51799346 : woffset *= wi::to_offset (unit_size);
578 51799346 : woffset <<= LOG2_BITS_PER_UNIT;
579 51799346 : bit_offset += woffset;
580 51799346 : if (known_size_p (maxsize))
581 51799346 : maxsize -= woffset;
582 : }
583 59347147 : }
584 100658235 : }
585 : }
586 : break;
587 :
588 : case REALPART_EXPR:
589 : break;
590 :
591 : case IMAGPART_EXPR:
592 3032590098 : bit_offset += bitsize;
593 : break;
594 :
595 : case VIEW_CONVERT_EXPR:
596 : break;
597 :
598 53677167 : case TARGET_MEM_REF:
599 : /* Via the variable index or index2 we can reach the
600 : whole object. Still hand back the decl here. */
601 53677167 : if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
602 53677167 : && (TMR_INDEX (exp) || TMR_INDEX2 (exp)))
603 : {
604 7797862 : exp = TREE_OPERAND (TMR_BASE (exp), 0);
605 7797862 : bit_offset = 0;
606 7797862 : maxsize = -1;
607 7797862 : goto done;
608 : }
609 : /* Fallthru. */
610 1170436117 : case MEM_REF:
611 : /* We need to deal with variable arrays ending structures such as
612 : struct { int length; int a[1]; } x; x.a[d]
613 : struct { struct { int a; int b; } a[1]; } x; x.a[d].a
614 : struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
615 : struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d]
616 : where we do not know maxsize for variable index accesses to
617 : the array. The simplest way to conservatively deal with this
618 : is to punt in the case that offset + maxsize reaches the
619 : base type boundary. This needs to include possible trailing
620 : padding that is there for alignment purposes. */
621 1170436117 : if (seen_variable_array_ref
622 32314974 : && known_size_p (maxsize)
623 1189094193 : && (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE
624 11274980 : || !poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
625 9876787 : || (maybe_eq
626 9876787 : (bit_offset + maxsize,
627 1161654830 : wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))))))
628 18658074 : maxsize = -1;
629 :
630 : /* Hand back the decl for MEM[&decl, off]. */
631 1170436117 : if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
632 : {
633 498450870 : if (integer_zerop (TREE_OPERAND (exp, 1)))
634 197085946 : exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
635 : else
636 : {
637 301364924 : poly_offset_int off = mem_ref_offset (exp);
638 301364924 : off <<= LOG2_BITS_PER_UNIT;
639 301364924 : off += bit_offset;
640 301364924 : poly_int64 off_hwi;
641 301364924 : if (off.to_shwi (&off_hwi))
642 : {
643 301363313 : bit_offset = off_hwi;
644 301363313 : exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
645 : }
646 : }
647 : }
648 1170436117 : goto done;
649 :
650 1921967890 : default:
651 1921967890 : goto done;
652 : }
653 :
654 3032590098 : exp = TREE_OPERAND (exp, 0);
655 3032590098 : }
656 :
657 3100201869 : done:
658 3100201869 : if (!bitsize.to_shwi (psize) || maybe_lt (*psize, 0))
659 : {
660 41993677 : *poffset = 0;
661 41993677 : *psize = -1;
662 41993677 : *pmax_size = -1;
663 :
664 41993677 : return exp;
665 : }
666 :
667 : /* ??? Due to negative offsets in ARRAY_REF we can end up with
668 : negative bit_offset here. We might want to store a zero offset
669 : in this case. */
670 3058208192 : if (!bit_offset.to_shwi (poffset))
671 : {
672 7027 : *poffset = 0;
673 7027 : *pmax_size = -1;
674 :
675 7027 : return exp;
676 : }
677 :
678 : /* In case of a decl or constant base object we can do better. */
679 :
680 3058201165 : if (DECL_P (exp))
681 : {
682 2359278781 : if (VAR_P (exp)
683 2359278781 : && ((flag_unconstrained_commons && DECL_COMMON (exp))
684 2297602639 : || (DECL_EXTERNAL (exp) && seen_variable_array_ref)))
685 : {
686 522272 : tree sz_tree = TYPE_SIZE (TREE_TYPE (exp));
687 : /* If size is unknown, or we have read to the end, assume there
688 : may be more to the structure than we are told. */
689 522272 : if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
690 522272 : || (seen_variable_array_ref
691 35577 : && (sz_tree == NULL_TREE
692 35577 : || !poly_int_tree_p (sz_tree)
693 35577 : || maybe_eq (bit_offset + maxsize,
694 36275 : wi::to_poly_offset (sz_tree)))))
695 521574 : maxsize = -1;
696 : }
697 : /* If maxsize is unknown adjust it according to the size of the
698 : base decl. */
699 2358756509 : else if (!known_size_p (maxsize)
700 16007094 : && DECL_SIZE (exp)
701 2374758770 : && poly_int_tree_p (DECL_SIZE (exp)))
702 16002199 : maxsize = wi::to_poly_offset (DECL_SIZE (exp)) - bit_offset;
703 : }
704 698922384 : else if (CONSTANT_CLASS_P (exp))
705 : {
706 : /* If maxsize is unknown adjust it according to the size of the
707 : base type constant. */
708 7390508 : if (!known_size_p (maxsize)
709 35474 : && TYPE_SIZE (TREE_TYPE (exp))
710 7425982 : && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp))))
711 35474 : maxsize = (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp)))
712 35474 : - bit_offset);
713 : }
714 :
715 3058201165 : if (!maxsize.to_shwi (pmax_size)
716 3058201165 : || maybe_lt (*pmax_size, 0)
717 6091820971 : || !endpoint_representable_p (*poffset, *pmax_size))
718 24581419 : *pmax_size = -1;
719 :
720 : /* Punt if *POFFSET + *PSIZE overflows in HOST_WIDE_INT, the callers don't
721 : check for such overflows individually and assume it works. */
722 6116402330 : if (!endpoint_representable_p (*poffset, *psize))
723 : {
724 60 : *poffset = 0;
725 60 : *psize = -1;
726 60 : *pmax_size = -1;
727 :
728 60 : return exp;
729 : }
730 :
731 : return exp;
732 : }
733 :
734 : /* Like get_ref_base_and_extent, but for cases in which we only care
735 : about constant-width accesses at constant offsets. Return null
736 : if the access is anything else. */
737 :
738 : tree
739 47963010 : get_ref_base_and_extent_hwi (tree exp, HOST_WIDE_INT *poffset,
740 : HOST_WIDE_INT *psize, bool *preverse)
741 : {
742 47963010 : poly_int64 offset, size, max_size;
743 47963010 : HOST_WIDE_INT const_offset, const_size;
744 47963010 : bool reverse;
745 47963010 : tree decl = get_ref_base_and_extent (exp, &offset, &size, &max_size,
746 : &reverse);
747 47963010 : if (!offset.is_constant (&const_offset)
748 47963010 : || !size.is_constant (&const_size)
749 47963010 : || const_offset < 0
750 47962252 : || !known_size_p (max_size)
751 47444515 : || maybe_ne (max_size, const_size))
752 : return NULL_TREE;
753 :
754 46514412 : *poffset = const_offset;
755 46514412 : *psize = const_size;
756 46514412 : *preverse = reverse;
757 46514412 : return decl;
758 : }
759 :
760 : /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
761 : denotes the starting address of the memory access EXP.
762 : Returns NULL_TREE if the offset is not constant or any component
763 : is not BITS_PER_UNIT-aligned.
764 : VALUEIZE if non-NULL is used to valueize SSA names. It should return
765 : its argument or a constant if the argument is known to be constant. */
766 :
767 : tree
768 215186637 : get_addr_base_and_unit_offset_1 (tree exp, poly_int64 *poffset,
769 : tree (*valueize) (tree))
770 : {
771 215186637 : poly_int64 byte_offset = 0;
772 :
773 : /* Compute cumulative byte-offset for nested component-refs and array-refs,
774 : and find the ultimate containing object. */
775 331398503 : while (1)
776 : {
777 273292570 : switch (TREE_CODE (exp))
778 : {
779 22166 : case BIT_FIELD_REF:
780 22166 : {
781 22166 : poly_int64 this_byte_offset;
782 22166 : poly_uint64 this_bit_offset;
783 22166 : if (!poly_int_tree_p (TREE_OPERAND (exp, 2), &this_bit_offset)
784 44332 : || !multiple_p (this_bit_offset, BITS_PER_UNIT,
785 : &this_byte_offset))
786 0 : return NULL_TREE;
787 22166 : byte_offset += this_byte_offset;
788 : }
789 22166 : break;
790 :
791 53593826 : case COMPONENT_REF:
792 53593826 : {
793 53593826 : tree field = TREE_OPERAND (exp, 1);
794 53593826 : tree this_offset = component_ref_field_offset (exp);
795 53593826 : poly_int64 hthis_offset;
796 :
797 53593826 : if (!this_offset
798 53593826 : || !poly_int_tree_p (this_offset, &hthis_offset)
799 107184701 : || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
800 : % BITS_PER_UNIT))
801 3946 : return NULL_TREE;
802 :
803 53589880 : hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
804 53589880 : / BITS_PER_UNIT);
805 53589880 : byte_offset += hthis_offset;
806 : }
807 53589880 : break;
808 :
809 8100620 : case ARRAY_REF:
810 8100620 : case ARRAY_RANGE_REF:
811 8100620 : {
812 8100620 : tree index = TREE_OPERAND (exp, 1);
813 8100620 : tree low_bound, unit_size;
814 :
815 8100620 : if (valueize
816 1742278 : && TREE_CODE (index) == SSA_NAME)
817 1098009 : index = (*valueize) (index);
818 8100620 : if (!poly_int_tree_p (index))
819 3992465 : return NULL_TREE;
820 4120223 : low_bound = array_ref_low_bound (exp);
821 4120223 : if (valueize
822 906135 : && TREE_CODE (low_bound) == SSA_NAME)
823 4075 : low_bound = (*valueize) (low_bound);
824 4120223 : if (!poly_int_tree_p (low_bound))
825 : return NULL_TREE;
826 4120223 : unit_size = array_ref_element_size (exp);
827 4120223 : if (TREE_CODE (unit_size) != INTEGER_CST)
828 : return NULL_TREE;
829 :
830 : /* If the resulting bit-offset is constant, track it. */
831 4108155 : poly_offset_int woffset
832 4108155 : = wi::sext (wi::to_poly_offset (index)
833 8216310 : - wi::to_poly_offset (low_bound),
834 4108155 : TYPE_PRECISION (sizetype));
835 4108155 : woffset *= wi::to_offset (unit_size);
836 4108155 : byte_offset += woffset.force_shwi ();
837 : }
838 4108155 : break;
839 :
840 : case REALPART_EXPR:
841 : break;
842 :
843 25848 : case IMAGPART_EXPR:
844 25848 : byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
845 25848 : break;
846 :
847 : case VIEW_CONVERT_EXPR:
848 : break;
849 :
850 31484164 : case MEM_REF:
851 31484164 : {
852 31484164 : tree base = TREE_OPERAND (exp, 0);
853 31484164 : if (valueize
854 7920292 : && TREE_CODE (base) == SSA_NAME)
855 7470222 : base = (*valueize) (base);
856 :
857 : /* Hand back the decl for MEM[&decl, off]. */
858 31484164 : if (TREE_CODE (base) == ADDR_EXPR)
859 : {
860 16177895 : if (!integer_zerop (TREE_OPERAND (exp, 1)))
861 : {
862 13067741 : poly_offset_int off = mem_ref_offset (exp);
863 13067741 : byte_offset += off.force_shwi ();
864 : }
865 16177895 : exp = TREE_OPERAND (base, 0);
866 : }
867 31484164 : goto done;
868 : }
869 :
870 711654 : case TARGET_MEM_REF:
871 711654 : {
872 711654 : tree base = TREE_OPERAND (exp, 0);
873 711654 : if (valueize
874 3108 : && TREE_CODE (base) == SSA_NAME)
875 2319 : base = (*valueize) (base);
876 :
877 : /* Hand back the decl for MEM[&decl, off]. */
878 711654 : if (TREE_CODE (base) == ADDR_EXPR)
879 : {
880 13430 : if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
881 : return NULL_TREE;
882 12 : if (!integer_zerop (TMR_OFFSET (exp)))
883 : {
884 5 : poly_offset_int off = mem_ref_offset (exp);
885 5 : byte_offset += off.force_shwi ();
886 : }
887 12 : exp = TREE_OPERAND (base, 0);
888 : }
889 698236 : goto done;
890 : }
891 :
892 178994408 : default:
893 178994408 : goto done;
894 : }
895 :
896 58105933 : exp = TREE_OPERAND (exp, 0);
897 58105933 : }
898 211176808 : done:
899 :
900 211176808 : *poffset = byte_offset;
901 211176808 : return exp;
902 : }
903 :
904 : /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
905 : denotes the starting address of the memory access EXP.
906 : Returns NULL_TREE if the offset is not constant or any component
907 : is not BITS_PER_UNIT-aligned. */
908 :
909 : tree
910 80138827 : get_addr_base_and_unit_offset (tree exp, poly_int64 *poffset)
911 : {
912 80138827 : return get_addr_base_and_unit_offset_1 (exp, poffset, NULL);
913 : }
914 :
915 : /* Returns true if STMT references an SSA_NAME that has
916 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
917 :
918 : bool
919 17682759 : stmt_references_abnormal_ssa_name (gimple *stmt)
920 : {
921 17682759 : ssa_op_iter oi;
922 17682759 : use_operand_p use_p;
923 :
924 39541337 : FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
925 : {
926 21859308 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
927 : return true;
928 : }
929 :
930 : return false;
931 : }
932 :
933 : /* If STMT takes any abnormal PHI values as input, replace them with
934 : local copies. */
935 :
936 : void
937 1812 : replace_abnormal_ssa_names (gimple *stmt)
938 : {
939 1812 : ssa_op_iter oi;
940 1812 : use_operand_p use_p;
941 :
942 3615 : FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
943 : {
944 1803 : tree op = USE_FROM_PTR (use_p);
945 1803 : if (TREE_CODE (op) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op))
946 : {
947 20 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
948 20 : tree new_name = make_ssa_name (TREE_TYPE (op));
949 20 : gassign *assign = gimple_build_assign (new_name, op);
950 20 : gsi_insert_before (&gsi, assign, GSI_SAME_STMT);
951 20 : SET_USE (use_p, new_name);
952 : }
953 : }
954 1812 : }
955 :
956 : /* Pair of tree and a sorting index, for dump_enumerated_decls. */
957 : struct GTY(()) numbered_tree
958 : {
959 : tree t;
960 : int num;
961 : };
962 :
963 :
964 : /* Compare two declarations references by their DECL_UID / sequence number.
965 : Called via qsort. */
966 :
967 : static int
968 364003 : compare_decls_by_uid (const void *pa, const void *pb)
969 : {
970 364003 : const numbered_tree *nt_a = ((const numbered_tree *)pa);
971 364003 : const numbered_tree *nt_b = ((const numbered_tree *)pb);
972 :
973 364003 : if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t))
974 296067 : return DECL_UID (nt_a->t) - DECL_UID (nt_b->t);
975 67936 : return nt_a->num - nt_b->num;
976 : }
977 :
978 : /* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls. */
979 : static tree
980 111878 : dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
981 : {
982 111878 : struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
983 111878 : vec<numbered_tree> *list = (vec<numbered_tree> *) wi->info;
984 111878 : numbered_tree nt;
985 :
986 111878 : if (!DECL_P (*tp))
987 : return NULL_TREE;
988 20487 : nt.t = *tp;
989 20487 : nt.num = list->length ();
990 20487 : list->safe_push (nt);
991 20487 : *walk_subtrees = 0;
992 20487 : return NULL_TREE;
993 : }
994 :
995 : /* Find all the declarations used by the current function, sort them by uid,
996 : and emit the sorted list. Each declaration is tagged with a sequence
997 : number indicating when it was found during statement / tree walking,
998 : so that TDF_NOUID comparisons of anonymous declarations are still
999 : meaningful. Where a declaration was encountered more than once, we
1000 : emit only the sequence number of the first encounter.
1001 : FILE is the dump file where to output the list and FLAGS is as in
1002 : print_generic_expr. */
1003 : void
1004 3951 : dump_enumerated_decls (FILE *file, dump_flags_t flags)
1005 : {
1006 3951 : if (!cfun->cfg)
1007 4 : return;
1008 :
1009 3947 : basic_block bb;
1010 3947 : struct walk_stmt_info wi;
1011 3947 : auto_vec<numbered_tree, 40> decl_list;
1012 :
1013 3947 : memset (&wi, '\0', sizeof (wi));
1014 3947 : wi.info = (void *) &decl_list;
1015 15035 : FOR_EACH_BB_FN (bb, cfun)
1016 : {
1017 11088 : gimple_stmt_iterator gsi;
1018 :
1019 74654 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1020 52478 : if (!is_gimple_debug (gsi_stmt (gsi)))
1021 35149 : walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
1022 : }
1023 3947 : decl_list.qsort (compare_decls_by_uid);
1024 3947 : if (decl_list.length ())
1025 : {
1026 3545 : unsigned ix;
1027 3545 : numbered_tree *ntp;
1028 3545 : tree last = NULL_TREE;
1029 :
1030 3545 : fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n",
1031 : current_function_name ());
1032 31524 : FOR_EACH_VEC_ELT (decl_list, ix, ntp)
1033 : {
1034 20487 : if (ntp->t == last)
1035 7633 : continue;
1036 12854 : fprintf (file, "%d: ", ntp->num);
1037 12854 : print_generic_decl (file, ntp->t, flags);
1038 12854 : fprintf (file, "\n");
1039 12854 : last = ntp->t;
1040 : }
1041 : }
1042 3947 : }
|