Branch data Line data Source code
1 : : /* Generic SSA value propagation engine.
2 : : Copyright (C) 2004-2025 Free Software Foundation, Inc.
3 : : Contributed by Diego Novillo <dnovillo@redhat.com>
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify it
8 : : under the terms of the GNU General Public License as published by the
9 : : Free Software Foundation; either version 3, or (at your option) any
10 : : later version.
11 : :
12 : : GCC is distributed in the hope that it will be useful, but WITHOUT
13 : : ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : : for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : : #include "config.h"
22 : : #include "system.h"
23 : : #include "coretypes.h"
24 : : #include "backend.h"
25 : : #include "tree.h"
26 : : #include "gimple.h"
27 : : #include "ssa.h"
28 : : #include "gimple-pretty-print.h"
29 : : #include "dumpfile.h"
30 : : #include "gimple-iterator.h"
31 : : #include "gimple-fold.h"
32 : : #include "tree-eh.h"
33 : : #include "gimplify.h"
34 : : #include "tree-cfg.h"
35 : : #include "tree-ssa.h"
36 : : #include "tree-ssa-propagate.h"
37 : : #include "domwalk.h"
38 : : #include "cfgloop.h"
39 : : #include "tree-cfgcleanup.h"
40 : : #include "cfganal.h"
41 : : #include "tree-ssa-dce.h"
42 : :
43 : : /* This file implements a generic value propagation engine based on
44 : : the same propagation used by the SSA-CCP algorithm [1].
45 : :
46 : : Propagation is performed by simulating the execution of every
47 : : statement that produces the value being propagated. Simulation
48 : : proceeds as follows:
49 : :
50 : : 1- Initially, all edges of the CFG are marked not executable and
51 : : the CFG worklist is seeded with all the statements in the entry
52 : : basic block (block 0).
53 : :
54 : : 2- Every statement S is simulated with a call to the call-back
55 : : function SSA_PROP_VISIT_STMT. This evaluation may produce 3
56 : : results:
57 : :
58 : : SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
59 : : interest and does not affect any of the work lists.
60 : : The statement may be simulated again if any of its input
61 : : operands change in future iterations of the simulator.
62 : :
63 : : SSA_PROP_VARYING: The value produced by S cannot be determined
64 : : at compile time. Further simulation of S is not required.
65 : : If S is a conditional jump, all the outgoing edges for the
66 : : block are considered executable and added to the work
67 : : list.
68 : :
69 : : SSA_PROP_INTERESTING: S produces a value that can be computed
70 : : at compile time. Its result can be propagated into the
71 : : statements that feed from S. Furthermore, if S is a
72 : : conditional jump, only the edge known to be taken is added
73 : : to the work list. Edges that are known not to execute are
74 : : never simulated.
75 : :
76 : : 3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI. The
77 : : return value from SSA_PROP_VISIT_PHI has the same semantics as
78 : : described in #2.
79 : :
80 : : 4- Three work lists are kept. Statements are only added to these
81 : : lists if they produce one of SSA_PROP_INTERESTING or
82 : : SSA_PROP_VARYING.
83 : :
84 : : CFG_BLOCKS contains the list of blocks to be simulated.
85 : : Blocks are added to this list if their incoming edges are
86 : : found executable.
87 : :
88 : : SSA_EDGE_WORKLIST contains the list of statements that we
89 : : need to revisit.
90 : :
91 : : 5- Simulation terminates when all three work lists are drained.
92 : :
93 : : Before calling ssa_propagate, it is important to clear
94 : : prop_simulate_again_p for all the statements in the program that
95 : : should be simulated. This initialization allows an implementation
96 : : to specify which statements should never be simulated.
97 : :
98 : : It is also important to compute def-use information before calling
99 : : ssa_propagate.
100 : :
101 : : References:
102 : :
103 : : [1] Constant propagation with conditional branches,
104 : : Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
105 : :
106 : : [2] Building an Optimizing Compiler,
107 : : Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
108 : :
109 : : [3] Advanced Compiler Design and Implementation,
110 : : Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
111 : :
112 : : /* Worklists of control flow edge destinations. This contains
113 : : the CFG order number of the blocks so we can iterate in CFG
114 : : order by visiting in bit-order. We use two worklists to
115 : : first make forward progress before iterating. */
116 : : static bitmap cfg_blocks;
117 : : static int *bb_to_cfg_order;
118 : : static int *cfg_order_to_bb;
119 : :
120 : : /* Worklists of SSA edges which will need reexamination as their
121 : : definition has changed. SSA edges are def-use edges in the SSA
122 : : web. For each D-U edge, we store the target statement or PHI node
123 : : UID in a bitmap. UIDs order stmts in execution order. We use
124 : : two worklists to first make forward progress before iterating. */
125 : : static bitmap ssa_edge_worklist;
126 : : static vec<gimple *> uid_to_stmt;
127 : :
128 : : /* Current RPO index in the iteration. */
129 : : static int curr_order;
130 : :
131 : :
132 : : /* We have just defined a new value for VAR. If IS_VARYING is true,
133 : : add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
134 : : them to INTERESTING_SSA_EDGES. */
135 : :
136 : : static void
137 : 281515671 : add_ssa_edge (tree var)
138 : : {
139 : 281515671 : imm_use_iterator iter;
140 : 281515671 : use_operand_p use_p;
141 : :
142 : 861879017 : FOR_EACH_IMM_USE_FAST (use_p, iter, var)
143 : : {
144 : 580363346 : gimple *use_stmt = USE_STMT (use_p);
145 : 580363346 : if (!prop_simulate_again_p (use_stmt))
146 : 245636784 : continue;
147 : :
148 : : /* If we did not yet simulate the block wait for this to happen
149 : : and do not add the stmt to the SSA edge worklist. */
150 : 334726562 : basic_block use_bb = gimple_bb (use_stmt);
151 : 334726562 : if (! (use_bb->flags & BB_VISITED))
152 : 138755714 : continue;
153 : :
154 : : /* If this is a use on a not yet executable edge do not bother to
155 : : queue it. */
156 : 195970848 : if (gimple_code (use_stmt) == GIMPLE_PHI
157 : 195970848 : && !(EDGE_PRED (use_bb, PHI_ARG_INDEX_FROM_USE (use_p))->flags
158 : 64545294 : & EDGE_EXECUTABLE))
159 : 5955467 : continue;
160 : :
161 : 190015381 : if (bitmap_set_bit (ssa_edge_worklist, gimple_uid (use_stmt)))
162 : : {
163 : 175842730 : uid_to_stmt[gimple_uid (use_stmt)] = use_stmt;
164 : 175842730 : if (dump_file && (dump_flags & TDF_DETAILS))
165 : : {
166 : 0 : fprintf (dump_file, "ssa_edge_worklist: adding SSA use in ");
167 : 0 : print_gimple_stmt (dump_file, use_stmt, 0, TDF_SLIM);
168 : : }
169 : : }
170 : : }
171 : 281515671 : }
172 : :
173 : :
174 : : /* Add edge E to the control flow worklist. */
175 : :
176 : : static void
177 : 139836610 : add_control_edge (edge e)
178 : : {
179 : 139836610 : basic_block bb = e->dest;
180 : 139836610 : if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
181 : : return;
182 : :
183 : : /* If the edge had already been executed, skip it. */
184 : 124217248 : if (e->flags & EDGE_EXECUTABLE)
185 : : return;
186 : :
187 : 106852463 : e->flags |= EDGE_EXECUTABLE;
188 : :
189 : 106852463 : int bb_order = bb_to_cfg_order[bb->index];
190 : 106852463 : bitmap_set_bit (cfg_blocks, bb_order);
191 : :
192 : 106852463 : if (dump_file && (dump_flags & TDF_DETAILS))
193 : 61 : fprintf (dump_file, "Adding destination of edge (%d -> %d) to worklist\n",
194 : 61 : e->src->index, e->dest->index);
195 : : }
196 : :
197 : :
198 : : /* Simulate the execution of STMT and update the work lists accordingly. */
199 : :
200 : : void
201 : 802936614 : ssa_propagation_engine::simulate_stmt (gimple *stmt)
202 : : {
203 : 802936614 : enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
204 : 802936614 : edge taken_edge = NULL;
205 : 802936614 : tree output_name = NULL_TREE;
206 : :
207 : : /* Pull the stmt off the SSA edge worklist. */
208 : 802936614 : bitmap_clear_bit (ssa_edge_worklist, gimple_uid (stmt));
209 : :
210 : : /* Don't bother visiting statements that are already
211 : : considered varying by the propagator. */
212 : 802936614 : if (!prop_simulate_again_p (stmt))
213 : 571845861 : return;
214 : :
215 : 353694966 : if (gimple_code (stmt) == GIMPLE_PHI)
216 : : {
217 : 79807552 : val = visit_phi (as_a <gphi *> (stmt));
218 : 79807552 : output_name = gimple_phi_result (stmt);
219 : : }
220 : : else
221 : 273887414 : val = visit_stmt (stmt, &taken_edge, &output_name);
222 : :
223 : 353694966 : if (val == SSA_PROP_VARYING)
224 : : {
225 : 122604213 : prop_set_simulate_again (stmt, false);
226 : :
227 : : /* If the statement produced a new varying value, add the SSA
228 : : edges coming out of OUTPUT_NAME. */
229 : 122604213 : if (output_name)
230 : 73327198 : add_ssa_edge (output_name);
231 : :
232 : : /* If STMT transfers control out of its basic block, add
233 : : all outgoing edges to the work list. */
234 : 122604213 : if (stmt_ends_bb_p (stmt))
235 : : {
236 : 50788087 : edge e;
237 : 50788087 : edge_iterator ei;
238 : 50788087 : basic_block bb = gimple_bb (stmt);
239 : 131017811 : FOR_EACH_EDGE (e, ei, bb->succs)
240 : 80229724 : add_control_edge (e);
241 : : }
242 : 122604213 : return;
243 : : }
244 : 231090753 : else if (val == SSA_PROP_INTERESTING)
245 : : {
246 : : /* If the statement produced new value, add the SSA edges coming
247 : : out of OUTPUT_NAME. */
248 : 214710102 : if (output_name)
249 : 208188473 : add_ssa_edge (output_name);
250 : :
251 : : /* If we know which edge is going to be taken out of this block,
252 : : add it to the CFG work list. */
253 : 214710102 : if (taken_edge)
254 : 6521629 : add_control_edge (taken_edge);
255 : : }
256 : :
257 : : /* If there are no SSA uses on the stmt whose defs are simulated
258 : : again then this stmt will be never visited again. */
259 : 231090753 : bool has_simulate_again_uses = false;
260 : 231090753 : use_operand_p use_p;
261 : 231090753 : ssa_op_iter iter;
262 : 231090753 : if (gimple_code (stmt) == GIMPLE_PHI)
263 : : {
264 : 66052693 : edge_iterator ei;
265 : 66052693 : edge e;
266 : 66052693 : tree arg;
267 : 102741450 : FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->preds)
268 : 100528152 : if (!(e->flags & EDGE_EXECUTABLE)
269 : 100528152 : || ((arg = PHI_ARG_DEF_FROM_EDGE (stmt, e))
270 : 92403608 : && TREE_CODE (arg) == SSA_NAME
271 : 75945272 : && !SSA_NAME_IS_DEFAULT_DEF (arg)
272 : 75725019 : && prop_simulate_again_p (SSA_NAME_DEF_STMT (arg))))
273 : : {
274 : : has_simulate_again_uses = true;
275 : : break;
276 : : }
277 : : }
278 : : else
279 : 203332985 : FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
280 : : {
281 : 163866155 : gimple *def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p));
282 : 163866155 : if (!gimple_nop_p (def_stmt)
283 : 163866155 : && prop_simulate_again_p (def_stmt))
284 : : {
285 : : has_simulate_again_uses = true;
286 : : break;
287 : : }
288 : : }
289 : 231090753 : if (!has_simulate_again_uses)
290 : : {
291 : 41680128 : if (dump_file && (dump_flags & TDF_DETAILS))
292 : 39 : fprintf (dump_file, "marking stmt to be not simulated again\n");
293 : 41680128 : prop_set_simulate_again (stmt, false);
294 : : }
295 : : }
296 : :
297 : :
298 : : /* Simulate the execution of BLOCK. Evaluate the statement associated
299 : : with each variable reference inside the block. */
300 : :
301 : : void
302 : 81366416 : ssa_propagation_engine::simulate_block (basic_block block)
303 : : {
304 : 81366416 : gimple_stmt_iterator gsi;
305 : :
306 : : /* There is nothing to do for the exit block. */
307 : 81366416 : if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
308 : 81366416 : return;
309 : :
310 : 81366416 : if (dump_file && (dump_flags & TDF_DETAILS))
311 : 57 : fprintf (dump_file, "\nSimulating block %d\n", block->index);
312 : :
313 : : /* Always simulate PHI nodes, even if we have simulated this block
314 : : before. */
315 : 124812523 : for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
316 : 43446107 : simulate_stmt (gsi_stmt (gsi));
317 : :
318 : : /* If this is the first time we've simulated this block, then we
319 : : must simulate each of its statements. */
320 : 81366416 : if (! (block->flags & BB_VISITED))
321 : : {
322 : 76135619 : gimple_stmt_iterator j;
323 : 76135619 : unsigned int normal_edge_count;
324 : 76135619 : edge e, normal_edge;
325 : 76135619 : edge_iterator ei;
326 : :
327 : 736030658 : for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
328 : 583759420 : simulate_stmt (gsi_stmt (j));
329 : :
330 : : /* Note that we have simulated this block. */
331 : 76135619 : block->flags |= BB_VISITED;
332 : :
333 : : /* We cannot predict when abnormal and EH edges will be executed, so
334 : : once a block is considered executable, we consider any
335 : : outgoing abnormal edges as executable.
336 : :
337 : : TODO: This is not exactly true. Simplifying statement might
338 : : prove it non-throwing and also computed goto can be handled
339 : : when destination is known.
340 : :
341 : : At the same time, if this block has only one successor that is
342 : : reached by non-abnormal edges, then add that successor to the
343 : : worklist. */
344 : 76135619 : normal_edge_count = 0;
345 : 76135619 : normal_edge = NULL;
346 : 183234057 : FOR_EACH_EDGE (e, ei, block->succs)
347 : : {
348 : 107098438 : if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
349 : 7481276 : add_control_edge (e);
350 : : else
351 : : {
352 : 99617162 : normal_edge_count++;
353 : 99617162 : normal_edge = e;
354 : : }
355 : : }
356 : :
357 : 76135619 : if (normal_edge_count == 1)
358 : 37625305 : add_control_edge (normal_edge);
359 : : }
360 : : }
361 : :
362 : :
363 : : /* Initialize local data structures and work lists. */
364 : :
365 : : static void
366 : 7978676 : ssa_prop_init (void)
367 : : {
368 : 7978676 : edge e;
369 : 7978676 : edge_iterator ei;
370 : 7978676 : basic_block bb;
371 : :
372 : : /* Worklists of SSA edges. */
373 : 7978676 : ssa_edge_worklist = BITMAP_ALLOC (NULL);
374 : 7978676 : bitmap_tree_view (ssa_edge_worklist);
375 : :
376 : : /* Worklist of basic-blocks. */
377 : 7978676 : bb_to_cfg_order = XNEWVEC (int, last_basic_block_for_fn (cfun) + 1);
378 : 7978676 : cfg_order_to_bb = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
379 : 7978676 : int n = pre_and_rev_post_order_compute_fn (cfun, NULL,
380 : : cfg_order_to_bb, false);
381 : 84584420 : for (int i = 0; i < n; ++i)
382 : 76605744 : bb_to_cfg_order[cfg_order_to_bb[i]] = i;
383 : 7978676 : cfg_blocks = BITMAP_ALLOC (NULL);
384 : :
385 : : /* Initially assume that every edge in the CFG is not executable.
386 : : (including the edges coming out of the entry block). Mark blocks
387 : : as not visited, blocks not yet visited will have all their statements
388 : : simulated once an incoming edge gets executable. */
389 : 7978676 : set_gimple_stmt_max_uid (cfun, 0);
390 : 84584420 : for (int i = 0; i < n; ++i)
391 : : {
392 : 76605744 : gimple_stmt_iterator si;
393 : 76605744 : bb = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb[i]);
394 : :
395 : 108701467 : for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
396 : : {
397 : 32095723 : gimple *stmt = gsi_stmt (si);
398 : 32095723 : gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
399 : : }
400 : :
401 : 738534248 : for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
402 : : {
403 : 585322760 : gimple *stmt = gsi_stmt (si);
404 : 585322760 : gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
405 : : }
406 : :
407 : 76605744 : bb->flags &= ~BB_VISITED;
408 : 184142208 : FOR_EACH_EDGE (e, ei, bb->succs)
409 : 107536464 : e->flags &= ~EDGE_EXECUTABLE;
410 : : }
411 : 7978676 : uid_to_stmt.safe_grow (gimple_stmt_max_uid (cfun), true);
412 : 7978676 : }
413 : :
414 : :
415 : : /* Free allocated storage. */
416 : :
417 : : static void
418 : 7978676 : ssa_prop_fini (void)
419 : : {
420 : 7978676 : BITMAP_FREE (cfg_blocks);
421 : 7978676 : free (bb_to_cfg_order);
422 : 7978676 : free (cfg_order_to_bb);
423 : 7978676 : BITMAP_FREE (ssa_edge_worklist);
424 : 7978676 : uid_to_stmt.release ();
425 : 7978676 : }
426 : :
427 : :
428 : : /* Entry point to the propagation engine.
429 : :
430 : : The VISIT_STMT virtual function is called for every statement
431 : : visited and the VISIT_PHI virtual function is called for every PHI
432 : : node visited. */
433 : :
434 : : void
435 : 7978676 : ssa_propagation_engine::ssa_propagate (void)
436 : : {
437 : 7978676 : ssa_prop_init ();
438 : :
439 : 7978676 : curr_order = 0;
440 : :
441 : : /* Iterate until the worklists are empty. We iterate both blocks
442 : : and stmts in RPO order, prioritizing backedge processing.
443 : : Seed the algorithm by adding the successors of the entry block to the
444 : : edge worklist. */
445 : 7978676 : edge e;
446 : 7978676 : edge_iterator ei;
447 : 15957352 : FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
448 : : {
449 : 7978676 : e->flags &= ~EDGE_EXECUTABLE;
450 : 7978676 : add_control_edge (e);
451 : : }
452 : 265076179 : while (1)
453 : : {
454 : 265076179 : int next_block_order = (bitmap_empty_p (cfg_blocks)
455 : 265076179 : ? -1 : bitmap_first_set_bit (cfg_blocks));
456 : 265076179 : int next_stmt_uid = (bitmap_empty_p (ssa_edge_worklist)
457 : 265076179 : ? -1 : bitmap_first_set_bit (ssa_edge_worklist));
458 : 265076179 : if (next_block_order == -1 && next_stmt_uid == -1)
459 : : break;
460 : :
461 : 257097503 : int next_stmt_bb_order = -1;
462 : 257097503 : gimple *next_stmt = NULL;
463 : 257097503 : if (next_stmt_uid != -1)
464 : : {
465 : 179580306 : next_stmt = uid_to_stmt[next_stmt_uid];
466 : 179580306 : next_stmt_bb_order = bb_to_cfg_order[gimple_bb (next_stmt)->index];
467 : : }
468 : :
469 : : /* Pull the next block to simulate off the worklist if it comes first. */
470 : 257097503 : if (next_block_order != -1
471 : 227705790 : && (next_stmt_bb_order == -1
472 : 227705790 : || next_block_order <= next_stmt_bb_order))
473 : : {
474 : 81366416 : curr_order = next_block_order;
475 : 81366416 : bitmap_clear_bit (cfg_blocks, next_block_order);
476 : 81366416 : basic_block bb
477 : 81366416 : = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb [next_block_order]);
478 : 81366416 : simulate_block (bb);
479 : 81366416 : }
480 : : /* Else simulate from the SSA edge worklist. */
481 : : else
482 : : {
483 : 175731087 : curr_order = next_stmt_bb_order;
484 : 175731087 : if (dump_file && (dump_flags & TDF_DETAILS))
485 : : {
486 : 0 : fprintf (dump_file, "\nSimulating statement: ");
487 : 0 : print_gimple_stmt (dump_file, next_stmt, 0, dump_flags);
488 : : }
489 : 175731087 : simulate_stmt (next_stmt);
490 : : }
491 : : }
492 : :
493 : 7978676 : ssa_prop_fini ();
494 : 7978676 : }
495 : :
496 : : /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
497 : : is a non-volatile pointer dereference, a structure reference or a
498 : : reference to a single _DECL. Ignore volatile memory references
499 : : because they are not interesting for the optimizers. */
500 : :
501 : : bool
502 : 30962365 : stmt_makes_single_store (gimple *stmt)
503 : : {
504 : 30962365 : tree lhs;
505 : :
506 : 30962365 : if (gimple_code (stmt) != GIMPLE_ASSIGN
507 : 30962365 : && gimple_code (stmt) != GIMPLE_CALL)
508 : : return false;
509 : :
510 : 30976978 : if (!gimple_vdef (stmt))
511 : : return false;
512 : :
513 : 2607692 : lhs = gimple_get_lhs (stmt);
514 : :
515 : : /* A call statement may have a null LHS. */
516 : 2607692 : if (!lhs)
517 : : return false;
518 : :
519 : 2607692 : return (!TREE_THIS_VOLATILE (lhs)
520 : 2607692 : && (DECL_P (lhs)
521 : 2593079 : || REFERENCE_CLASS_P (lhs)));
522 : : }
523 : :
524 : :
525 : : /* Propagation statistics. */
526 : : struct prop_stats_d
527 : : {
528 : : long num_const_prop;
529 : : long num_copy_prop;
530 : : long num_stmts_folded;
531 : : };
532 : :
533 : : static struct prop_stats_d prop_stats;
534 : :
535 : : // range_query default methods to drive from a value_of_expr() ranther than
536 : : // range_of_expr.
537 : :
538 : : tree
539 : 56689252 : substitute_and_fold_engine::value_on_edge (edge, tree expr)
540 : : {
541 : 56689252 : return value_of_expr (expr);
542 : : }
543 : :
544 : : tree
545 : 132285605 : substitute_and_fold_engine::value_of_stmt (gimple *stmt, tree name)
546 : : {
547 : 132285605 : if (!name)
548 : 0 : name = gimple_get_lhs (stmt);
549 : :
550 : 132285605 : gcc_checking_assert (!name || name == gimple_get_lhs (stmt));
551 : :
552 : 132285605 : if (name)
553 : 132285605 : return value_of_expr (name);
554 : : return NULL_TREE;
555 : : }
556 : :
557 : : bool
558 : 0 : substitute_and_fold_engine::range_of_expr (vrange &, tree, gimple *)
559 : : {
560 : 0 : return false;
561 : : }
562 : :
563 : : /* Replace USE references in statement STMT with the values stored in
564 : : PROP_VALUE. Return true if at least one reference was replaced. */
565 : :
566 : : bool
567 : 814592553 : substitute_and_fold_engine::replace_uses_in (gimple *stmt)
568 : : {
569 : 814592553 : bool replaced = false;
570 : 814592553 : use_operand_p use;
571 : 814592553 : ssa_op_iter iter;
572 : :
573 : 1194301157 : FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
574 : : {
575 : 379708604 : tree tuse = USE_FROM_PTR (use);
576 : 379708604 : tree val = value_of_expr (tuse, stmt);
577 : :
578 : 379708604 : if (val == tuse || val == NULL_TREE)
579 : 365716072 : continue;
580 : :
581 : 13992532 : if (gimple_code (stmt) == GIMPLE_ASM
582 : 13992532 : && !may_propagate_copy_into_asm (tuse))
583 : 0 : continue;
584 : :
585 : 13992532 : if (!may_propagate_copy (tuse, val))
586 : 485 : continue;
587 : :
588 : 13992047 : if (TREE_CODE (val) != SSA_NAME)
589 : 5383445 : prop_stats.num_const_prop++;
590 : : else
591 : 8608602 : prop_stats.num_copy_prop++;
592 : :
593 : 13992047 : propagate_value (use, val);
594 : :
595 : 13992047 : replaced = true;
596 : : }
597 : :
598 : 814592553 : return replaced;
599 : : }
600 : :
601 : :
602 : : /* Replace propagated values into all the arguments for PHI using the
603 : : values from PROP_VALUE. */
604 : :
605 : : bool
606 : 24056207 : substitute_and_fold_engine::replace_phi_args_in (gphi *phi)
607 : : {
608 : 24056207 : size_t i;
609 : 24056207 : bool replaced = false;
610 : :
611 : 79177934 : for (i = 0; i < gimple_phi_num_args (phi); i++)
612 : : {
613 : 55121727 : tree arg = gimple_phi_arg_def (phi, i);
614 : :
615 : 55121727 : if (TREE_CODE (arg) == SSA_NAME)
616 : : {
617 : 39178985 : edge e = gimple_phi_arg_edge (phi, i);
618 : 39178985 : tree val = value_on_edge (e, arg);
619 : :
620 : 39178985 : if (val && val != arg && may_propagate_copy (arg, val))
621 : : {
622 : 1199798 : if (TREE_CODE (val) != SSA_NAME)
623 : 31690 : prop_stats.num_const_prop++;
624 : : else
625 : 1168108 : prop_stats.num_copy_prop++;
626 : :
627 : 1199798 : propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
628 : 1199798 : replaced = true;
629 : :
630 : : /* If we propagated a copy and this argument flows
631 : : through an abnormal edge, update the replacement
632 : : accordingly. */
633 : 1199798 : if (TREE_CODE (val) == SSA_NAME
634 : 1168108 : && e->flags & EDGE_ABNORMAL
635 : 1199798 : && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
636 : : {
637 : : /* This can only occur for virtual operands, since
638 : : for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
639 : : would prevent replacement. */
640 : 0 : gcc_checking_assert (virtual_operand_p (val));
641 : 0 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
642 : : }
643 : : }
644 : : }
645 : : }
646 : :
647 : 24056207 : if (dump_file && (dump_flags & TDF_DETAILS))
648 : : {
649 : 146 : if (!replaced)
650 : 146 : fprintf (dump_file, "No folding possible\n");
651 : : else
652 : : {
653 : 0 : fprintf (dump_file, "Folded into: ");
654 : 0 : print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
655 : 0 : fprintf (dump_file, "\n");
656 : : }
657 : : }
658 : :
659 : 24056207 : return replaced;
660 : : }
661 : :
662 : :
663 : : class substitute_and_fold_dom_walker : public dom_walker
664 : : {
665 : : public:
666 : 12310382 : substitute_and_fold_dom_walker (cdi_direction direction,
667 : : class substitute_and_fold_engine *engine)
668 : 12310382 : : dom_walker (direction),
669 : 12310382 : something_changed (false),
670 : 12310382 : substitute_and_fold_engine (engine)
671 : : {
672 : 12310382 : dceworklist = BITMAP_ALLOC (NULL);
673 : 12310382 : stmts_to_fixup.create (0);
674 : 12310382 : need_eh_cleanup = BITMAP_ALLOC (NULL);
675 : 12310382 : need_ab_cleanup = BITMAP_ALLOC (NULL);
676 : 12310382 : }
677 : 12310382 : ~substitute_and_fold_dom_walker ()
678 : 12310382 : {
679 : 12310382 : BITMAP_FREE (dceworklist);
680 : 12310382 : stmts_to_fixup.release ();
681 : 12310382 : BITMAP_FREE (need_eh_cleanup);
682 : 12310382 : BITMAP_FREE (need_ab_cleanup);
683 : 12310382 : }
684 : :
685 : : edge before_dom_children (basic_block) final override;
686 : 122500739 : void after_dom_children (basic_block bb) final override
687 : : {
688 : 122500739 : substitute_and_fold_engine->post_fold_bb (bb);
689 : 122500739 : }
690 : :
691 : : bool something_changed;
692 : : bitmap dceworklist;
693 : : vec<gimple *> stmts_to_fixup;
694 : : bitmap need_eh_cleanup;
695 : : bitmap need_ab_cleanup;
696 : :
697 : : class substitute_and_fold_engine *substitute_and_fold_engine;
698 : :
699 : : private:
700 : : void foreach_new_stmt_in_bb (gimple_stmt_iterator old_gsi,
701 : : gimple_stmt_iterator new_gsi);
702 : : };
703 : :
704 : : /* Call post_new_stmt for each new statement that has been added
705 : : to the current BB. OLD_GSI is the statement iterator before the BB
706 : : changes ocurred. NEW_GSI is the iterator which may contain new
707 : : statements. */
708 : :
709 : : void
710 : 14852120 : substitute_and_fold_dom_walker::foreach_new_stmt_in_bb
711 : : (gimple_stmt_iterator old_gsi,
712 : : gimple_stmt_iterator new_gsi)
713 : : {
714 : 14852120 : basic_block bb = gsi_bb (new_gsi);
715 : 14852120 : if (gsi_end_p (old_gsi))
716 : 3224728 : old_gsi = gsi_start_bb (bb);
717 : : else
718 : 13239756 : gsi_next (&old_gsi);
719 : 14912837 : while (gsi_stmt (old_gsi) != gsi_stmt (new_gsi))
720 : : {
721 : 60717 : gimple *stmt = gsi_stmt (old_gsi);
722 : 60717 : substitute_and_fold_engine->post_new_stmt (stmt);
723 : 60717 : gsi_next (&old_gsi);
724 : : }
725 : 14852120 : }
726 : :
727 : : bool
728 : 122500739 : substitute_and_fold_engine::propagate_into_phi_args (basic_block bb)
729 : : {
730 : 122500739 : edge e;
731 : 122500739 : edge_iterator ei;
732 : 122500739 : bool propagated = false;
733 : :
734 : : /* Visit BB successor PHI nodes and replace PHI args. */
735 : 289187450 : FOR_EACH_EDGE (e, ei, bb->succs)
736 : : {
737 : 166686711 : for (gphi_iterator gpi = gsi_start_phis (e->dest);
738 : 277910602 : !gsi_end_p (gpi); gsi_next (&gpi))
739 : : {
740 : 111223891 : gphi *phi = gpi.phi ();
741 : 111223891 : use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
742 : 111223891 : tree arg = USE_FROM_PTR (use_p);
743 : 179597178 : if (TREE_CODE (arg) != SSA_NAME
744 : 111223891 : || virtual_operand_p (arg))
745 : 68373287 : continue;
746 : 42850604 : tree val = value_on_edge (e, arg);
747 : 42850604 : if (val
748 : 3188511 : && is_gimple_min_invariant (val)
749 : 44797789 : && may_propagate_copy (arg, val))
750 : : {
751 : 1945184 : propagate_value (use_p, val);
752 : 1945184 : propagated = true;
753 : : }
754 : : }
755 : : }
756 : 122500739 : return propagated;
757 : : }
758 : :
759 : : edge
760 : 122500739 : substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
761 : : {
762 : 122500739 : substitute_and_fold_engine->pre_fold_bb (bb);
763 : :
764 : : /* Propagate known values into PHI nodes. */
765 : 122500739 : for (gphi_iterator i = gsi_start_phis (bb);
766 : 170129818 : !gsi_end_p (i);
767 : 47629079 : gsi_next (&i))
768 : : {
769 : 47629079 : gphi *phi = i.phi ();
770 : 47629079 : tree res = gimple_phi_result (phi);
771 : 95258158 : if (virtual_operand_p (res))
772 : 21290824 : continue;
773 : 26338255 : if (dump_file && (dump_flags & TDF_DETAILS))
774 : : {
775 : 154 : fprintf (dump_file, "Folding PHI node: ");
776 : 154 : print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
777 : : }
778 : 26338255 : if (res && TREE_CODE (res) == SSA_NAME)
779 : : {
780 : 26338255 : tree sprime = substitute_and_fold_engine->value_of_expr (res, phi);
781 : 28620303 : if (sprime
782 : 26338255 : && sprime != res
783 : 26338255 : && may_propagate_copy (res, sprime))
784 : : {
785 : 2282048 : if (dump_file && (dump_flags & TDF_DETAILS))
786 : : {
787 : 8 : fprintf (dump_file, "Queued PHI for removal. Folds to: ");
788 : 8 : print_generic_expr (dump_file, sprime);
789 : 8 : fprintf (dump_file, "\n");
790 : : }
791 : 2282048 : bitmap_set_bit (dceworklist, SSA_NAME_VERSION (res));
792 : : /* As this now constitutes a copy duplicate points-to
793 : : and range info appropriately. */
794 : 2282048 : if (TREE_CODE (sprime) == SSA_NAME)
795 : 1423967 : maybe_duplicate_ssa_info_at_copy (res, sprime);
796 : 2282048 : continue;
797 : : }
798 : : }
799 : 24056207 : something_changed |= substitute_and_fold_engine->replace_phi_args_in (phi);
800 : : }
801 : :
802 : : /* Propagate known values into stmts. In some case it exposes
803 : : more trivially deletable stmts to walk backward. */
804 : 245001478 : for (gimple_stmt_iterator i = gsi_start_bb (bb);
805 : 951951636 : !gsi_end_p (i);
806 : 829450897 : gsi_next (&i))
807 : : {
808 : 829450897 : bool did_replace;
809 : 829450897 : gimple *stmt = gsi_stmt (i);
810 : :
811 : 829450897 : substitute_and_fold_engine->pre_fold_stmt (stmt);
812 : :
813 : 829450897 : if (dump_file && (dump_flags & TDF_DETAILS))
814 : : {
815 : 1766 : fprintf (dump_file, "Folding statement: ");
816 : 1766 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
817 : : }
818 : :
819 : : /* No point propagating into a stmt we have a value for we
820 : : can propagate into all uses. Mark it for removal instead. */
821 : 829450897 : tree lhs = gimple_get_lhs (stmt);
822 : 829450897 : if (lhs && TREE_CODE (lhs) == SSA_NAME)
823 : : {
824 : 180051241 : tree sprime = substitute_and_fold_engine->value_of_stmt (stmt, lhs);
825 : 194909585 : if (sprime
826 : 180051241 : && sprime != lhs
827 : 14877283 : && may_propagate_copy (lhs, sprime)
828 : 14875811 : && !stmt_could_throw_p (cfun, stmt)
829 : 194913658 : && !gimple_has_side_effects (stmt))
830 : : {
831 : 14858344 : if (dump_file && (dump_flags & TDF_DETAILS))
832 : : {
833 : 56 : fprintf (dump_file, "Queued stmt for removal. Folds to: ");
834 : 56 : print_generic_expr (dump_file, sprime);
835 : 56 : fprintf (dump_file, "\n");
836 : : }
837 : 14858344 : bitmap_set_bit (dceworklist, SSA_NAME_VERSION (lhs));
838 : : /* As this now constitutes a copy duplicate points-to
839 : : and range info appropriately. */
840 : 14858344 : if (TREE_CODE (sprime) == SSA_NAME)
841 : 8871691 : maybe_duplicate_ssa_info_at_copy (lhs, sprime);
842 : 14858344 : continue;
843 : : }
844 : : }
845 : :
846 : : /* Replace the statement with its folded version and mark it
847 : : folded. */
848 : 814592553 : did_replace = false;
849 : 814592553 : gimple *old_stmt = stmt;
850 : 814592553 : bool was_noreturn = false;
851 : 814592553 : bool can_make_abnormal_goto = false;
852 : 814592553 : if (is_gimple_call (stmt))
853 : : {
854 : 52957152 : was_noreturn = gimple_call_noreturn_p (stmt);
855 : 52957152 : can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
856 : : }
857 : :
858 : : /* Replace real uses in the statement. */
859 : 814592553 : did_replace |= substitute_and_fold_engine->replace_uses_in (stmt);
860 : :
861 : 814592553 : gimple_stmt_iterator prev_gsi = i;
862 : 814592553 : gsi_prev (&prev_gsi);
863 : :
864 : : /* If we made a replacement, fold the statement. */
865 : 814592553 : if (did_replace)
866 : : {
867 : 13166694 : fold_stmt (&i, follow_single_use_edges);
868 : 13166694 : stmt = gsi_stmt (i);
869 : 13166694 : gimple_set_modified (stmt, true);
870 : : }
871 : : /* Also fold if we want to fold all statements. */
872 : 801425859 : else if (substitute_and_fold_engine->fold_all_stmts
873 : 801425859 : && fold_stmt (&i, follow_single_use_edges))
874 : : {
875 : 0 : did_replace = true;
876 : 0 : stmt = gsi_stmt (i);
877 : 0 : gimple_set_modified (stmt, true);
878 : : }
879 : :
880 : : /* Some statements may be simplified using propagator
881 : : specific information. Do this before propagating
882 : : into the stmt to not disturb pass specific information. */
883 : 814592553 : update_stmt_if_modified (stmt);
884 : 814592553 : if (substitute_and_fold_engine->fold_stmt (&i))
885 : : {
886 : 2056030 : did_replace = true;
887 : 2056030 : prop_stats.num_stmts_folded++;
888 : 2056030 : stmt = gsi_stmt (i);
889 : 2056030 : gimple_set_modified (stmt, true);
890 : : }
891 : :
892 : : /* If this is a control statement the propagator left edges
893 : : unexecuted on force the condition in a way consistent with
894 : : that. See PR66945 for cases where the propagator can end
895 : : up with a different idea of a taken edge than folding
896 : : (once undefined behavior is involved). */
897 : 814592553 : if (gimple_code (stmt) == GIMPLE_COND)
898 : : {
899 : 42665508 : if ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE)
900 : 42665508 : ^ (EDGE_SUCC (bb, 1)->flags & EDGE_EXECUTABLE))
901 : : {
902 : 413531 : if (((EDGE_SUCC (bb, 0)->flags & EDGE_TRUE_VALUE) != 0)
903 : 413531 : == ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE) != 0))
904 : 96365 : gimple_cond_make_true (as_a <gcond *> (stmt));
905 : : else
906 : 317166 : gimple_cond_make_false (as_a <gcond *> (stmt));
907 : 413531 : gimple_set_modified (stmt, true);
908 : 413531 : did_replace = true;
909 : : }
910 : : }
911 : :
912 : : /* Now cleanup. */
913 : 814592553 : if (did_replace)
914 : : {
915 : 14852120 : foreach_new_stmt_in_bb (prev_gsi, i);
916 : :
917 : : /* If we cleaned up EH information from the statement,
918 : : remove EH edges. */
919 : 14852120 : if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
920 : 155894 : bitmap_set_bit (need_eh_cleanup, bb->index);
921 : :
922 : : /* If we turned a call with possible abnormal control transfer
923 : : into one that doesn't, remove abnormal edges. */
924 : 14852120 : if (can_make_abnormal_goto
925 : 14852120 : && !stmt_can_make_abnormal_goto (stmt))
926 : 3 : bitmap_set_bit (need_ab_cleanup, bb->index);
927 : :
928 : : /* If we turned a not noreturn call into a noreturn one
929 : : schedule it for fixup. */
930 : 14852120 : if (!was_noreturn
931 : 14722331 : && is_gimple_call (stmt)
932 : 16290891 : && gimple_call_noreturn_p (stmt))
933 : 71 : stmts_to_fixup.safe_push (stmt);
934 : :
935 : 14852120 : if (gimple_assign_single_p (stmt))
936 : : {
937 : 3562961 : tree rhs = gimple_assign_rhs1 (stmt);
938 : :
939 : 3562961 : if (TREE_CODE (rhs) == ADDR_EXPR)
940 : 409890 : recompute_tree_invariant_for_addr_expr (rhs);
941 : : }
942 : :
943 : : /* Determine what needs to be done to update the SSA form. */
944 : 14852120 : update_stmt_if_modified (stmt);
945 : 14852120 : if (!is_gimple_debug (stmt))
946 : 10958676 : something_changed = true;
947 : : }
948 : :
949 : 814592553 : if (dump_file && (dump_flags & TDF_DETAILS))
950 : : {
951 : 1710 : if (did_replace)
952 : : {
953 : 145 : fprintf (dump_file, "Folded into: ");
954 : 145 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
955 : 145 : fprintf (dump_file, "\n");
956 : : }
957 : : else
958 : 1565 : fprintf (dump_file, "Not folded\n");
959 : : }
960 : : }
961 : :
962 : 122500739 : something_changed |= substitute_and_fold_engine->propagate_into_phi_args (bb);
963 : :
964 : 122500739 : return NULL;
965 : : }
966 : :
967 : :
968 : :
969 : : /* Perform final substitution and folding of propagated values.
970 : : Process the whole function if BLOCK is null, otherwise only
971 : : process the blocks that BLOCK dominates. In the latter case,
972 : : it is the caller's responsibility to ensure that dominator
973 : : information is available and up-to-date.
974 : :
975 : : PROP_VALUE[I] contains the single value that should be substituted
976 : : at every use of SSA name N_I. If PROP_VALUE is NULL, no values are
977 : : substituted.
978 : :
979 : : If FOLD_FN is non-NULL the function will be invoked on all statements
980 : : before propagating values for pass specific simplification.
981 : :
982 : : DO_DCE is true if trivially dead stmts can be removed.
983 : :
984 : : If DO_DCE is true, the statements within a BB are walked from
985 : : last to first element. Otherwise we scan from first to last element.
986 : :
987 : : Return TRUE when something changed. */
988 : :
989 : : bool
990 : 12310382 : substitute_and_fold_engine::substitute_and_fold (basic_block block)
991 : : {
992 : 12310382 : if (dump_file && (dump_flags & TDF_DETAILS))
993 : 157 : fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
994 : :
995 : 12310382 : memset (&prop_stats, 0, sizeof (prop_stats));
996 : :
997 : : /* Don't call calculate_dominance_info when iterating over a subgraph.
998 : : Callers that are using the interface this way are likely to want to
999 : : iterate over several disjoint subgraphs, and it would be expensive
1000 : : in enable-checking builds to revalidate the whole dominance tree
1001 : : each time. */
1002 : 12310382 : if (block)
1003 : 1181 : gcc_assert (dom_info_state (CDI_DOMINATORS));
1004 : : else
1005 : 12309201 : calculate_dominance_info (CDI_DOMINATORS);
1006 : 12310382 : substitute_and_fold_dom_walker walker (CDI_DOMINATORS, this);
1007 : 12310382 : walker.walk (block ? block : ENTRY_BLOCK_PTR_FOR_FN (cfun));
1008 : :
1009 : 12310382 : simple_dce_from_worklist (walker.dceworklist, walker.need_eh_cleanup);
1010 : 12310382 : if (!bitmap_empty_p (walker.need_eh_cleanup))
1011 : 36826 : gimple_purge_all_dead_eh_edges (walker.need_eh_cleanup);
1012 : 12310382 : if (!bitmap_empty_p (walker.need_ab_cleanup))
1013 : 3 : gimple_purge_all_dead_abnormal_call_edges (walker.need_ab_cleanup);
1014 : :
1015 : : /* Fixup stmts that became noreturn calls. This may require splitting
1016 : : blocks and thus isn't possible during the dominator walk. Do this
1017 : : in reverse order so we don't inadvertedly remove a stmt we want to
1018 : : fixup by visiting a dominating now noreturn call first. */
1019 : 12310453 : while (!walker.stmts_to_fixup.is_empty ())
1020 : : {
1021 : 71 : gimple *stmt = walker.stmts_to_fixup.pop ();
1022 : 77 : if (!gimple_bb (stmt))
1023 : 6 : continue;
1024 : 65 : if (dump_file && dump_flags & TDF_DETAILS)
1025 : : {
1026 : 0 : fprintf (dump_file, "Fixing up noreturn call ");
1027 : 0 : print_gimple_stmt (dump_file, stmt, 0);
1028 : 0 : fprintf (dump_file, "\n");
1029 : : }
1030 : 65 : fixup_noreturn_call (stmt);
1031 : : }
1032 : :
1033 : 12310382 : statistics_counter_event (cfun, "Constants propagated",
1034 : 12310382 : prop_stats.num_const_prop);
1035 : 12310382 : statistics_counter_event (cfun, "Copies propagated",
1036 : 12310382 : prop_stats.num_copy_prop);
1037 : 12310382 : statistics_counter_event (cfun, "Statements folded",
1038 : 12310382 : prop_stats.num_stmts_folded);
1039 : :
1040 : 12310382 : return walker.something_changed;
1041 : 12310382 : }
1042 : :
1043 : :
1044 : : /* Return true if we may propagate ORIG into DEST, false otherwise.
1045 : : If DEST_NOT_ABNORMAL_PHI_EDGE_P is true then assume the propagation does
1046 : : not happen into a PHI argument which flows in from an abnormal edge
1047 : : which relaxes some constraints. */
1048 : :
1049 : : bool
1050 : 153113044 : may_propagate_copy (tree dest, tree orig, bool dest_not_abnormal_phi_edge_p)
1051 : : {
1052 : 153113044 : tree type_d = TREE_TYPE (dest);
1053 : 153113044 : tree type_o = TREE_TYPE (orig);
1054 : :
1055 : : /* If ORIG is a default definition which flows in from an abnormal edge
1056 : : then the copy can be propagated. It is important that we do so to avoid
1057 : : uninitialized copies. */
1058 : 153113044 : if (TREE_CODE (orig) == SSA_NAME
1059 : 106402747 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
1060 : 27977 : && SSA_NAME_IS_DEFAULT_DEF (orig)
1061 : 153116360 : && (SSA_NAME_VAR (orig) == NULL_TREE
1062 : 3316 : || VAR_P (SSA_NAME_VAR (orig))))
1063 : : ;
1064 : : /* Otherwise if ORIG just flows in from an abnormal edge then the copy cannot
1065 : : be propagated. */
1066 : 153110127 : else if (TREE_CODE (orig) == SSA_NAME
1067 : 153110127 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1068 : : return false;
1069 : : /* Similarly if DEST flows in from an abnormal edge then the copy cannot be
1070 : : propagated. If we know we do not propagate into such a PHI argument this
1071 : : does not apply. */
1072 : 153085067 : else if (!dest_not_abnormal_phi_edge_p
1073 : 79812578 : && TREE_CODE (dest) == SSA_NAME
1074 : 232897645 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest))
1075 : : return false;
1076 : :
1077 : : /* Do not copy between types for which we *do* need a conversion. */
1078 : 153071727 : if (!useless_type_conversion_p (type_d, type_o))
1079 : : return false;
1080 : :
1081 : : /* Generally propagating virtual operands is not ok as that may
1082 : : create overlapping life-ranges. */
1083 : 153070277 : if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest))
1084 : : return false;
1085 : :
1086 : : /* Keep lhs of [[gnu::musttail]] calls as is, those need to be still
1087 : : tail callable. */
1088 : 152581798 : if (TREE_CODE (dest) == SSA_NAME
1089 : 152421875 : && is_gimple_call (SSA_NAME_DEF_STMT (dest))
1090 : 153885663 : && gimple_call_must_tail_p (as_a <gcall *> (SSA_NAME_DEF_STMT (dest))))
1091 : : return false;
1092 : :
1093 : : /* Anything else is OK. */
1094 : : return true;
1095 : : }
1096 : :
1097 : : /* Like may_propagate_copy, but use as the destination expression
1098 : : the principal expression (typically, the RHS) contained in
1099 : : statement DEST. This is more efficient when working with the
1100 : : gimple tuples representation. */
1101 : :
1102 : : bool
1103 : 334718 : may_propagate_copy_into_stmt (gimple *dest, tree orig)
1104 : : {
1105 : 334718 : tree type_d;
1106 : 334718 : tree type_o;
1107 : :
1108 : : /* If the statement is a switch or a single-rhs assignment,
1109 : : then the expression to be replaced by the propagation may
1110 : : be an SSA_NAME. Fortunately, there is an explicit tree
1111 : : for the expression, so we delegate to may_propagate_copy. */
1112 : :
1113 : 334718 : if (gimple_assign_single_p (dest))
1114 : 159951 : return may_propagate_copy (gimple_assign_rhs1 (dest), orig, true);
1115 : 174767 : else if (gswitch *dest_swtch = dyn_cast <gswitch *> (dest))
1116 : 0 : return may_propagate_copy (gimple_switch_index (dest_swtch), orig, true);
1117 : :
1118 : : /* In other cases, the expression is not materialized, so there
1119 : : is no destination to pass to may_propagate_copy. On the other
1120 : : hand, the expression cannot be an SSA_NAME, so the analysis
1121 : : is much simpler. */
1122 : :
1123 : 174767 : if (TREE_CODE (orig) == SSA_NAME
1124 : 174767 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1125 : : return false;
1126 : :
1127 : 174767 : if (is_gimple_assign (dest))
1128 : 145199 : type_d = TREE_TYPE (gimple_assign_lhs (dest));
1129 : 29568 : else if (gimple_code (dest) == GIMPLE_COND)
1130 : 29005 : type_d = boolean_type_node;
1131 : 563 : else if (is_gimple_call (dest)
1132 : 563 : && gimple_call_lhs (dest) != NULL_TREE)
1133 : 563 : type_d = TREE_TYPE (gimple_call_lhs (dest));
1134 : : else
1135 : 0 : gcc_unreachable ();
1136 : :
1137 : 174767 : type_o = TREE_TYPE (orig);
1138 : :
1139 : 174767 : if (!useless_type_conversion_p (type_d, type_o))
1140 : : return false;
1141 : :
1142 : : return true;
1143 : : }
1144 : :
1145 : : /* Similarly, but we know that we're propagating into an ASM_EXPR. */
1146 : :
1147 : : bool
1148 : 12456 : may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED)
1149 : : {
1150 : 12456 : return true;
1151 : : }
1152 : :
1153 : :
1154 : : /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
1155 : :
1156 : : Use this version when not const/copy propagating values. For example,
1157 : : PRE uses this version when building expressions as they would appear
1158 : : in specific blocks taking into account actions of PHI nodes.
1159 : :
1160 : : The statement in which an expression has been replaced should be
1161 : : folded using fold_stmt_inplace. */
1162 : :
1163 : : void
1164 : 55499283 : replace_exp (use_operand_p op_p, tree val)
1165 : : {
1166 : 55499283 : if (TREE_CODE (val) == SSA_NAME || CONSTANT_CLASS_P (val))
1167 : 51489106 : SET_USE (op_p, val);
1168 : : else
1169 : 4010177 : SET_USE (op_p, unshare_expr (val));
1170 : 55499283 : }
1171 : :
1172 : :
1173 : : /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1174 : : into the operand pointed to by OP_P.
1175 : :
1176 : : Use this version for const/copy propagation as it will perform additional
1177 : : checks to ensure validity of the const/copy propagation. */
1178 : :
1179 : : void
1180 : 48575850 : propagate_value (use_operand_p op_p, tree val)
1181 : : {
1182 : 48575850 : if (flag_checking)
1183 : : {
1184 : 48575370 : bool ab = (is_a <gphi *> (USE_STMT (op_p))
1185 : 65148794 : && (gimple_phi_arg_edge (as_a <gphi *> (USE_STMT (op_p)),
1186 : 16573424 : PHI_ARG_INDEX_FROM_USE (op_p))
1187 : 16573424 : ->flags & EDGE_ABNORMAL));
1188 : 48575370 : gcc_assert (may_propagate_copy (USE_FROM_PTR (op_p), val, !ab));
1189 : : }
1190 : 48575850 : replace_exp (op_p, val);
1191 : 48575850 : }
1192 : :
1193 : :
1194 : : /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1195 : : into the tree pointed to by OP_P.
1196 : :
1197 : : Use this version for const/copy propagation when SSA operands are not
1198 : : available. It will perform the additional checks to ensure validity of
1199 : : the const/copy propagation, but will not update any operand information.
1200 : : Be sure to mark the stmt as modified. */
1201 : :
1202 : : void
1203 : 370783 : propagate_tree_value (tree *op_p, tree val)
1204 : : {
1205 : 370783 : if (TREE_CODE (val) == SSA_NAME)
1206 : 334691 : *op_p = val;
1207 : : else
1208 : 36092 : *op_p = unshare_expr (val);
1209 : 370783 : }
1210 : :
1211 : :
1212 : : /* Like propagate_tree_value, but use as the operand to replace
1213 : : the principal expression (typically, the RHS) contained in the
1214 : : statement referenced by iterator GSI. Note that it is not
1215 : : always possible to update the statement in-place, so a new
1216 : : statement may be created to replace the original. */
1217 : :
1218 : : void
1219 : 370783 : propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
1220 : : {
1221 : 370783 : gimple *stmt = gsi_stmt (*gsi);
1222 : :
1223 : 370783 : if (is_gimple_assign (stmt))
1224 : : {
1225 : 338543 : tree expr = NULL_TREE;
1226 : 338543 : if (gimple_assign_single_p (stmt))
1227 : 188836 : expr = gimple_assign_rhs1 (stmt);
1228 : 338543 : propagate_tree_value (&expr, val);
1229 : 338543 : gimple_assign_set_rhs_from_tree (gsi, expr);
1230 : : }
1231 : 32240 : else if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
1232 : : {
1233 : 31460 : tree lhs = NULL_TREE;
1234 : 31460 : tree rhs = build_zero_cst (TREE_TYPE (val));
1235 : 31460 : propagate_tree_value (&lhs, val);
1236 : 31460 : gimple_cond_set_code (cond_stmt, NE_EXPR);
1237 : 31460 : gimple_cond_set_lhs (cond_stmt, lhs);
1238 : 31460 : gimple_cond_set_rhs (cond_stmt, rhs);
1239 : : }
1240 : 780 : else if (is_gimple_call (stmt)
1241 : 780 : && gimple_call_lhs (stmt) != NULL_TREE)
1242 : : {
1243 : 780 : tree expr = NULL_TREE;
1244 : 780 : propagate_tree_value (&expr, val);
1245 : 780 : replace_call_with_value (gsi, expr);
1246 : : }
1247 : 0 : else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
1248 : 0 : propagate_tree_value (gimple_switch_index_ptr (swtch_stmt), val);
1249 : : else
1250 : 0 : gcc_unreachable ();
1251 : 370783 : }
1252 : :
1253 : : /* Check exits of each loop in FUN, walk over loop closed PHIs in
1254 : : each exit basic block and propagate degenerate PHIs. */
1255 : :
1256 : : unsigned
1257 : 241712 : clean_up_loop_closed_phi (function *fun)
1258 : : {
1259 : 241712 : gphi *phi;
1260 : 241712 : tree rhs;
1261 : 241712 : tree lhs;
1262 : 241712 : gphi_iterator gsi;
1263 : :
1264 : : /* Avoid possibly quadratic work when scanning for loop exits across
1265 : : all loops of a nest. */
1266 : 241712 : if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1267 : : return 0;
1268 : :
1269 : : /* replace_uses_by might purge dead EH edges and we want it to also
1270 : : remove dominated blocks. */
1271 : 241712 : calculate_dominance_info (CDI_DOMINATORS);
1272 : :
1273 : : /* Walk over loop in function. */
1274 : 1363533 : for (auto loop : loops_list (fun, 0))
1275 : : {
1276 : : /* Check each exit edege of loop. */
1277 : 638397 : auto_vec<edge> exits = get_loop_exit_edges (loop);
1278 : 3149945 : for (edge e : exits)
1279 : 2261359 : if (single_pred_p (e->dest))
1280 : : /* Walk over loop-closed PHIs. */
1281 : 2223828 : for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi);)
1282 : : {
1283 : 1209721 : phi = gsi.phi ();
1284 : 1209721 : rhs = gimple_phi_arg_def (phi, 0);
1285 : 1209721 : lhs = gimple_phi_result (phi);
1286 : :
1287 : 2419018 : if (virtual_operand_p (rhs))
1288 : : {
1289 : 614895 : imm_use_iterator iter;
1290 : 614895 : use_operand_p use_p;
1291 : 614895 : gimple *stmt;
1292 : :
1293 : 1442227 : FOR_EACH_IMM_USE_STMT (stmt, iter, lhs)
1294 : 2495332 : FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1295 : 1448895 : SET_USE (use_p, rhs);
1296 : :
1297 : 614895 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1298 : 48 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
1299 : 614895 : remove_phi_node (&gsi, true);
1300 : : }
1301 : 594826 : else if (may_propagate_copy (lhs, rhs))
1302 : : {
1303 : : /* Dump details. */
1304 : 594790 : if (dump_file && (dump_flags & TDF_DETAILS))
1305 : : {
1306 : 3 : fprintf (dump_file, " Replacing '");
1307 : 3 : print_generic_expr (dump_file, lhs, dump_flags);
1308 : 3 : fprintf (dump_file, "' with '");
1309 : 3 : print_generic_expr (dump_file, rhs, dump_flags);
1310 : 3 : fprintf (dump_file, "'\n");
1311 : : }
1312 : :
1313 : 594790 : replace_uses_by (lhs, rhs);
1314 : 594790 : remove_phi_node (&gsi, true);
1315 : : }
1316 : : else
1317 : 36 : gsi_next (&gsi);
1318 : : }
1319 : 638397 : }
1320 : :
1321 : 241712 : return 0;
1322 : : }
|