Line data Source code
1 : /* Generic SSA value propagation engine.
2 : Copyright (C) 2004-2026 Free Software Foundation, Inc.
3 : Contributed by Diego Novillo <dnovillo@redhat.com>
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it
8 : under the terms of the GNU General Public License as published by the
9 : Free Software Foundation; either version 3, or (at your option) any
10 : later version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT
13 : ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "tree.h"
26 : #include "gimple.h"
27 : #include "ssa.h"
28 : #include "gimple-pretty-print.h"
29 : #include "dumpfile.h"
30 : #include "gimple-iterator.h"
31 : #include "gimple-fold.h"
32 : #include "tree-eh.h"
33 : #include "gimplify.h"
34 : #include "tree-cfg.h"
35 : #include "tree-ssa.h"
36 : #include "tree-ssa-propagate.h"
37 : #include "domwalk.h"
38 : #include "cfgloop.h"
39 : #include "tree-cfgcleanup.h"
40 : #include "cfganal.h"
41 : #include "tree-ssa-dce.h"
42 :
43 : /* This file implements a generic value propagation engine based on
44 : the same propagation used by the SSA-CCP algorithm [1].
45 :
46 : Propagation is performed by simulating the execution of every
47 : statement that produces the value being propagated. Simulation
48 : proceeds as follows:
49 :
50 : 1- Initially, all edges of the CFG are marked not executable and
51 : the CFG worklist is seeded with all the statements in the entry
52 : basic block (block 0).
53 :
54 : 2- Every statement S is simulated with a call to the call-back
55 : function SSA_PROP_VISIT_STMT. This evaluation may produce 3
56 : results:
57 :
58 : SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
59 : interest and does not affect any of the work lists.
60 : The statement may be simulated again if any of its input
61 : operands change in future iterations of the simulator.
62 :
63 : SSA_PROP_VARYING: The value produced by S cannot be determined
64 : at compile time. Further simulation of S is not required.
65 : If S is a conditional jump, all the outgoing edges for the
66 : block are considered executable and added to the work
67 : list.
68 :
69 : SSA_PROP_INTERESTING: S produces a value that can be computed
70 : at compile time. Its result can be propagated into the
71 : statements that feed from S. Furthermore, if S is a
72 : conditional jump, only the edge known to be taken is added
73 : to the work list. Edges that are known not to execute are
74 : never simulated.
75 :
76 : 3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI. The
77 : return value from SSA_PROP_VISIT_PHI has the same semantics as
78 : described in #2.
79 :
80 : 4- Three work lists are kept. Statements are only added to these
81 : lists if they produce one of SSA_PROP_INTERESTING or
82 : SSA_PROP_VARYING.
83 :
84 : CFG_BLOCKS contains the list of blocks to be simulated.
85 : Blocks are added to this list if their incoming edges are
86 : found executable.
87 :
88 : SSA_EDGE_WORKLIST contains the list of statements that we
89 : need to revisit.
90 :
91 : 5- Simulation terminates when all three work lists are drained.
92 :
93 : Before calling ssa_propagate, it is important to clear
94 : prop_simulate_again_p for all the statements in the program that
95 : should be simulated. This initialization allows an implementation
96 : to specify which statements should never be simulated.
97 :
98 : It is also important to compute def-use information before calling
99 : ssa_propagate.
100 :
101 : References:
102 :
103 : [1] Constant propagation with conditional branches,
104 : Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
105 :
106 : [2] Building an Optimizing Compiler,
107 : Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
108 :
109 : [3] Advanced Compiler Design and Implementation,
110 : Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
111 :
112 : /* Worklists of control flow edge destinations. This contains
113 : the CFG order number of the blocks so we can iterate in CFG
114 : order by visiting in bit-order. We use two worklists to
115 : first make forward progress before iterating. */
116 : static bitmap cfg_blocks;
117 : static int *bb_to_cfg_order;
118 : static int *cfg_order_to_bb;
119 :
120 : /* Worklists of SSA edges which will need reexamination as their
121 : definition has changed. SSA edges are def-use edges in the SSA
122 : web. For each D-U edge, we store the target statement or PHI node
123 : UID in a bitmap. UIDs order stmts in execution order. We use
124 : two worklists to first make forward progress before iterating. */
125 : static bitmap ssa_edge_worklist;
126 : static vec<gimple *> uid_to_stmt;
127 :
128 : /* Current RPO index in the iteration. */
129 : static int curr_order;
130 :
131 :
132 : /* We have just defined a new value for VAR. If IS_VARYING is true,
133 : add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
134 : them to INTERESTING_SSA_EDGES. */
135 :
136 : static void
137 282091327 : add_ssa_edge (tree var)
138 : {
139 282091327 : imm_use_iterator iter;
140 282091327 : use_operand_p use_p;
141 :
142 1147960021 : FOR_EACH_IMM_USE_FAST (use_p, iter, var)
143 : {
144 583777367 : gimple *use_stmt = USE_STMT (use_p);
145 583777367 : if (!prop_simulate_again_p (use_stmt))
146 246835752 : continue;
147 :
148 : /* If we did not yet simulate the block wait for this to happen
149 : and do not add the stmt to the SSA edge worklist. */
150 336941615 : basic_block use_bb = gimple_bb (use_stmt);
151 336941615 : if (! (use_bb->flags & BB_VISITED))
152 136964858 : continue;
153 :
154 : /* If this is a use on a not yet executable edge do not bother to
155 : queue it. */
156 199976757 : if (gimple_code (use_stmt) == GIMPLE_PHI
157 199976757 : && !(EDGE_PRED (use_bb, PHI_ARG_INDEX_FROM_USE (use_p))->flags
158 64912207 : & EDGE_EXECUTABLE))
159 5833090 : continue;
160 :
161 194143667 : if (bitmap_set_bit (ssa_edge_worklist, gimple_uid (use_stmt)))
162 : {
163 179660187 : uid_to_stmt[gimple_uid (use_stmt)] = use_stmt;
164 179660187 : if (dump_file && (dump_flags & TDF_DETAILS))
165 : {
166 0 : fprintf (dump_file, "ssa_edge_worklist: adding SSA use in ");
167 0 : print_gimple_stmt (dump_file, use_stmt, 0, TDF_SLIM);
168 : }
169 : }
170 282091327 : }
171 282091327 : }
172 :
173 :
174 : /* Add edge E to the control flow worklist. */
175 :
176 : static void
177 135257363 : add_control_edge (edge e)
178 : {
179 135257363 : basic_block bb = e->dest;
180 135257363 : if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
181 : return;
182 :
183 : /* If the edge had already been executed, skip it. */
184 119787685 : if (e->flags & EDGE_EXECUTABLE)
185 : return;
186 :
187 103613291 : e->flags |= EDGE_EXECUTABLE;
188 :
189 103613291 : int bb_order = bb_to_cfg_order[bb->index];
190 103613291 : bitmap_set_bit (cfg_blocks, bb_order);
191 :
192 103613291 : if (dump_file && (dump_flags & TDF_DETAILS))
193 61 : fprintf (dump_file, "Adding destination of edge (%d -> %d) to worklist\n",
194 61 : e->src->index, e->dest->index);
195 : }
196 :
197 :
198 : /* Simulate the execution of STMT and update the work lists accordingly. */
199 :
200 : void
201 792575507 : ssa_propagation_engine::simulate_stmt (gimple *stmt)
202 : {
203 792575507 : enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
204 792575507 : edge taken_edge = NULL;
205 792575507 : tree output_name = NULL_TREE;
206 :
207 : /* Pull the stmt off the SSA edge worklist. */
208 792575507 : bitmap_clear_bit (ssa_edge_worklist, gimple_uid (stmt));
209 :
210 : /* Don't bother visiting statements that are already
211 : considered varying by the propagator. */
212 792575507 : if (!prop_simulate_again_p (stmt))
213 558545670 : return;
214 :
215 354119600 : if (gimple_code (stmt) == GIMPLE_PHI)
216 : {
217 79036677 : val = visit_phi (as_a <gphi *> (stmt));
218 79036677 : output_name = gimple_phi_result (stmt);
219 : }
220 : else
221 275082923 : val = visit_stmt (stmt, &taken_edge, &output_name);
222 :
223 354119600 : if (val == SSA_PROP_VARYING)
224 : {
225 120089763 : prop_set_simulate_again (stmt, false);
226 :
227 : /* If the statement produced a new varying value, add the SSA
228 : edges coming out of OUTPUT_NAME. */
229 120089763 : if (output_name)
230 71677753 : add_ssa_edge (output_name);
231 :
232 : /* If STMT transfers control out of its basic block, add
233 : all outgoing edges to the work list. */
234 120089763 : if (stmt_ends_bb_p (stmt))
235 : {
236 49831826 : edge e;
237 49831826 : edge_iterator ei;
238 49831826 : basic_block bb = gimple_bb (stmt);
239 128329083 : FOR_EACH_EDGE (e, ei, bb->succs)
240 78497257 : add_control_edge (e);
241 : }
242 120089763 : return;
243 : }
244 234029837 : else if (val == SSA_PROP_INTERESTING)
245 : {
246 : /* If the statement produced new value, add the SSA edges coming
247 : out of OUTPUT_NAME. */
248 216965195 : if (output_name)
249 210413574 : add_ssa_edge (output_name);
250 :
251 : /* If we know which edge is going to be taken out of this block,
252 : add it to the CFG work list. */
253 216965195 : if (taken_edge)
254 6551621 : add_control_edge (taken_edge);
255 : }
256 :
257 : /* If there are no SSA uses on the stmt whose defs are simulated
258 : again then this stmt will be never visited again. */
259 234029837 : bool has_simulate_again_uses = false;
260 234029837 : use_operand_p use_p;
261 234029837 : ssa_op_iter iter;
262 234029837 : if (gimple_code (stmt) == GIMPLE_PHI)
263 : {
264 66006533 : edge_iterator ei;
265 66006533 : edge e;
266 66006533 : tree arg;
267 102757637 : FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->preds)
268 100896693 : if (!(e->flags & EDGE_EXECUTABLE)
269 100896693 : || ((arg = PHI_ARG_DEF_FROM_EDGE (stmt, e))
270 92843578 : && TREE_CODE (arg) == SSA_NAME
271 76228227 : && !SSA_NAME_IS_DEFAULT_DEF (arg)
272 76024448 : && prop_simulate_again_p (SSA_NAME_DEF_STMT (arg))))
273 : {
274 : has_simulate_again_uses = true;
275 : break;
276 : }
277 : }
278 : else
279 205734815 : FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
280 : {
281 166720405 : gimple *def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p));
282 166720405 : if (!gimple_nop_p (def_stmt)
283 166720405 : && prop_simulate_again_p (def_stmt))
284 : {
285 : has_simulate_again_uses = true;
286 : break;
287 : }
288 : }
289 234029837 : if (!has_simulate_again_uses)
290 : {
291 40875354 : if (dump_file && (dump_flags & TDF_DETAILS))
292 40 : fprintf (dump_file, "marking stmt to be not simulated again\n");
293 40875354 : prop_set_simulate_again (stmt, false);
294 : }
295 : }
296 :
297 :
298 : /* Simulate the execution of BLOCK. Evaluate the statement associated
299 : with each variable reference inside the block. */
300 :
301 : void
302 78979961 : ssa_propagation_engine::simulate_block (basic_block block)
303 : {
304 78979961 : gimple_stmt_iterator gsi;
305 :
306 : /* There is nothing to do for the exit block. */
307 78979961 : if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
308 78979961 : return;
309 :
310 78979961 : if (dump_file && (dump_flags & TDF_DETAILS))
311 57 : fprintf (dump_file, "\nSimulating block %d\n", block->index);
312 :
313 : /* Always simulate PHI nodes, even if we have simulated this block
314 : before. */
315 120224423 : for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
316 41244462 : simulate_stmt (gsi_stmt (gsi));
317 :
318 : /* If this is the first time we've simulated this block, then we
319 : must simulate each of its statements. */
320 78979961 : if (! (block->flags & BB_VISITED))
321 : {
322 73814261 : gimple_stmt_iterator j;
323 73814261 : unsigned int normal_edge_count;
324 73814261 : edge e, normal_edge;
325 73814261 : edge_iterator ei;
326 :
327 719405845 : for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
328 571777323 : simulate_stmt (gsi_stmt (j));
329 :
330 : /* Note that we have simulated this block. */
331 73814261 : block->flags |= BB_VISITED;
332 :
333 : /* We cannot predict when abnormal and EH edges will be executed, so
334 : once a block is considered executable, we consider any
335 : outgoing abnormal edges as executable.
336 :
337 : TODO: This is not exactly true. Simplifying statement might
338 : prove it non-throwing and also computed goto can be handled
339 : when destination is known.
340 :
341 : At the same time, if this block has only one successor that is
342 : reached by non-abnormal edges, then add that successor to the
343 : worklist. */
344 73814261 : normal_edge_count = 0;
345 73814261 : normal_edge = NULL;
346 177703233 : FOR_EACH_EDGE (e, ei, block->succs)
347 : {
348 103888972 : if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
349 6652201 : add_control_edge (e);
350 : else
351 : {
352 97236771 : normal_edge_count++;
353 97236771 : normal_edge = e;
354 : }
355 : }
356 :
357 73814261 : if (normal_edge_count == 1)
358 35654613 : add_control_edge (normal_edge);
359 : }
360 : }
361 :
362 :
363 : /* Initialize local data structures and work lists. */
364 :
365 : static void
366 7901671 : ssa_prop_init (void)
367 : {
368 7901671 : edge e;
369 7901671 : edge_iterator ei;
370 7901671 : basic_block bb;
371 :
372 : /* Worklists of SSA edges. */
373 7901671 : ssa_edge_worklist = BITMAP_ALLOC (NULL);
374 7901671 : bitmap_tree_view (ssa_edge_worklist);
375 :
376 : /* Worklist of basic-blocks. */
377 7901671 : bb_to_cfg_order = XNEWVEC (int, last_basic_block_for_fn (cfun) + 1);
378 7901671 : cfg_order_to_bb = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
379 7901671 : int n = pre_and_rev_post_order_compute_fn (cfun, NULL,
380 : cfg_order_to_bb, false);
381 82292222 : for (int i = 0; i < n; ++i)
382 74390551 : bb_to_cfg_order[cfg_order_to_bb[i]] = i;
383 7901671 : cfg_blocks = BITMAP_ALLOC (NULL);
384 :
385 : /* Initially assume that every edge in the CFG is not executable.
386 : (including the edges coming out of the entry block). Mark blocks
387 : as not visited, blocks not yet visited will have all their statements
388 : simulated once an incoming edge gets executable. */
389 7901671 : set_gimple_stmt_max_uid (cfun, 0);
390 82292222 : for (int i = 0; i < n; ++i)
391 : {
392 74390551 : gimple_stmt_iterator si;
393 74390551 : bb = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb[i]);
394 :
395 104498471 : for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
396 : {
397 30107920 : gimple *stmt = gsi_stmt (si);
398 30107920 : gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
399 : }
400 :
401 722829940 : for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
402 : {
403 574048838 : gimple *stmt = gsi_stmt (si);
404 574048838 : gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
405 : }
406 :
407 74390551 : bb->flags &= ~BB_VISITED;
408 178863797 : FOR_EACH_EDGE (e, ei, bb->succs)
409 104473246 : e->flags &= ~EDGE_EXECUTABLE;
410 : }
411 7901671 : uid_to_stmt.safe_grow (gimple_stmt_max_uid (cfun), true);
412 7901671 : }
413 :
414 :
415 : /* Free allocated storage. */
416 :
417 : static void
418 7901671 : ssa_prop_fini (void)
419 : {
420 7901671 : BITMAP_FREE (cfg_blocks);
421 7901671 : free (bb_to_cfg_order);
422 7901671 : free (cfg_order_to_bb);
423 7901671 : BITMAP_FREE (ssa_edge_worklist);
424 7901671 : uid_to_stmt.release ();
425 7901671 : }
426 :
427 :
428 : /* Entry point to the propagation engine.
429 :
430 : The VISIT_STMT virtual function is called for every statement
431 : visited and the VISIT_PHI virtual function is called for every PHI
432 : node visited. */
433 :
434 : void
435 7901671 : ssa_propagation_engine::ssa_propagate (void)
436 : {
437 7901671 : ssa_prop_init ();
438 :
439 7901671 : curr_order = 0;
440 :
441 : /* Iterate until the worklists are empty. We iterate both blocks
442 : and stmts in RPO order, prioritizing backedge processing.
443 : Seed the algorithm by adding the successors of the entry block to the
444 : edge worklist. */
445 7901671 : edge e;
446 7901671 : edge_iterator ei;
447 15803342 : FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
448 : {
449 7901671 : e->flags &= ~EDGE_EXECUTABLE;
450 7901671 : add_control_edge (e);
451 : }
452 266435354 : while (1)
453 : {
454 266435354 : int next_block_order = (bitmap_empty_p (cfg_blocks)
455 266435354 : ? -1 : bitmap_first_set_bit (cfg_blocks));
456 266435354 : int next_stmt_uid = (bitmap_empty_p (ssa_edge_worklist)
457 266435354 : ? -1 : bitmap_first_set_bit (ssa_edge_worklist));
458 266435354 : if (next_block_order == -1 && next_stmt_uid == -1)
459 : break;
460 :
461 258533683 : int next_stmt_bb_order = -1;
462 258533683 : gimple *next_stmt = NULL;
463 258533683 : if (next_stmt_uid != -1)
464 : {
465 183135527 : next_stmt = uid_to_stmt[next_stmt_uid];
466 183135527 : next_stmt_bb_order = bb_to_cfg_order[gimple_bb (next_stmt)->index];
467 : }
468 :
469 : /* Pull the next block to simulate off the worklist if it comes first. */
470 258533683 : if (next_block_order != -1
471 228201747 : && (next_stmt_bb_order == -1
472 228201747 : || next_block_order <= next_stmt_bb_order))
473 : {
474 78979961 : curr_order = next_block_order;
475 78979961 : bitmap_clear_bit (cfg_blocks, next_block_order);
476 78979961 : basic_block bb
477 78979961 : = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb [next_block_order]);
478 78979961 : simulate_block (bb);
479 78979961 : }
480 : /* Else simulate from the SSA edge worklist. */
481 : else
482 : {
483 179553722 : curr_order = next_stmt_bb_order;
484 179553722 : if (dump_file && (dump_flags & TDF_DETAILS))
485 : {
486 0 : fprintf (dump_file, "\nSimulating statement: ");
487 0 : print_gimple_stmt (dump_file, next_stmt, 0, dump_flags);
488 : }
489 179553722 : simulate_stmt (next_stmt);
490 : }
491 : }
492 :
493 7901671 : ssa_prop_fini ();
494 7901671 : }
495 :
496 : /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
497 : is a non-volatile pointer dereference, a structure reference or a
498 : reference to a single _DECL. Ignore volatile memory references
499 : because they are not interesting for the optimizers. */
500 :
501 : bool
502 30498035 : stmt_makes_single_store (gimple *stmt)
503 : {
504 30498035 : tree lhs;
505 :
506 30498035 : if (gimple_code (stmt) != GIMPLE_ASSIGN
507 30498035 : && gimple_code (stmt) != GIMPLE_CALL)
508 : return false;
509 :
510 30512651 : if (!gimple_vdef (stmt))
511 : return false;
512 :
513 2501706 : lhs = gimple_get_lhs (stmt);
514 :
515 : /* A call statement may have a null LHS. */
516 2501706 : if (!lhs)
517 : return false;
518 :
519 2501706 : return (!TREE_THIS_VOLATILE (lhs)
520 2501706 : && (DECL_P (lhs)
521 2487090 : || REFERENCE_CLASS_P (lhs)));
522 : }
523 :
524 :
525 : /* Propagation statistics. */
526 : struct prop_stats_d
527 : {
528 : long num_const_prop;
529 : long num_copy_prop;
530 : long num_stmts_folded;
531 : };
532 :
533 : static struct prop_stats_d prop_stats;
534 :
535 : // range_query default methods to drive from a value_of_expr() ranther than
536 : // range_of_expr.
537 :
538 : tree
539 55895255 : substitute_and_fold_engine::value_on_edge (edge, tree expr)
540 : {
541 55895255 : return value_of_expr (expr);
542 : }
543 :
544 : tree
545 130619751 : substitute_and_fold_engine::value_of_stmt (gimple *stmt, tree name)
546 : {
547 130619751 : if (!name)
548 0 : name = gimple_get_lhs (stmt);
549 :
550 130619751 : gcc_checking_assert (!name || name == gimple_get_lhs (stmt));
551 :
552 130619751 : if (name)
553 130619751 : return value_of_expr (name);
554 : return NULL_TREE;
555 : }
556 :
557 : bool
558 0 : substitute_and_fold_engine::range_of_expr (vrange &, tree, gimple *)
559 : {
560 0 : return false;
561 : }
562 :
563 : /* Replace USE references in statement STMT with the values stored in
564 : PROP_VALUE. Return true if at least one reference was replaced. */
565 :
566 : bool
567 798134583 : substitute_and_fold_engine::replace_uses_in (gimple *stmt)
568 : {
569 798134583 : bool replaced = false;
570 798134583 : use_operand_p use;
571 798134583 : ssa_op_iter iter;
572 :
573 1174490673 : FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
574 : {
575 376356090 : tree tuse = USE_FROM_PTR (use);
576 376356090 : tree val = value_of_expr (tuse, stmt);
577 :
578 376356090 : if (val == tuse || val == NULL_TREE)
579 362055607 : continue;
580 :
581 14300483 : if (!may_propagate_copy (tuse, val))
582 512 : continue;
583 :
584 14299971 : if (TREE_CODE (val) != SSA_NAME)
585 5476954 : prop_stats.num_const_prop++;
586 : else
587 8823017 : prop_stats.num_copy_prop++;
588 :
589 14299971 : propagate_value (use, val);
590 :
591 14299971 : replaced = true;
592 : }
593 :
594 798134583 : return replaced;
595 : }
596 :
597 :
598 : /* Replace propagated values into all the arguments for PHI using the
599 : values from PROP_VALUE. */
600 :
601 : bool
602 22773446 : substitute_and_fold_engine::replace_phi_args_in (gphi *phi)
603 : {
604 22773446 : size_t i;
605 22773446 : bool replaced = false;
606 :
607 77082567 : for (i = 0; i < gimple_phi_num_args (phi); i++)
608 : {
609 54309121 : tree arg = gimple_phi_arg_def (phi, i);
610 :
611 54309121 : if (TREE_CODE (arg) == SSA_NAME)
612 : {
613 38466292 : edge e = gimple_phi_arg_edge (phi, i);
614 38466292 : tree val = value_on_edge (e, arg);
615 :
616 38466292 : if (val && val != arg && may_propagate_copy (arg, val))
617 : {
618 1153168 : if (TREE_CODE (val) != SSA_NAME)
619 30689 : prop_stats.num_const_prop++;
620 : else
621 1122479 : prop_stats.num_copy_prop++;
622 :
623 1153168 : propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
624 1153168 : replaced = true;
625 :
626 : /* If we propagated a copy and this argument flows
627 : through an abnormal edge, update the replacement
628 : accordingly. */
629 1153168 : if (TREE_CODE (val) == SSA_NAME
630 1122479 : && e->flags & EDGE_ABNORMAL
631 1153168 : && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
632 : {
633 : /* This can only occur for virtual operands, since
634 : for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
635 : would prevent replacement. */
636 0 : gcc_checking_assert (virtual_operand_p (val));
637 0 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
638 : }
639 : }
640 : }
641 : }
642 :
643 22773446 : if (dump_file && (dump_flags & TDF_DETAILS))
644 : {
645 147 : if (!replaced)
646 147 : fprintf (dump_file, "No folding possible\n");
647 : else
648 : {
649 0 : fprintf (dump_file, "Folded into: ");
650 0 : print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
651 0 : fprintf (dump_file, "\n");
652 : }
653 : }
654 :
655 22773446 : return replaced;
656 : }
657 :
658 :
659 : class substitute_and_fold_dom_walker : public dom_walker
660 : {
661 : public:
662 12153753 : substitute_and_fold_dom_walker (cdi_direction direction,
663 : class substitute_and_fold_engine *engine)
664 12153753 : : dom_walker (direction),
665 12153753 : something_changed (false),
666 12153753 : substitute_and_fold_engine (engine)
667 : {
668 12153753 : dceworklist = BITMAP_ALLOC (NULL);
669 12153753 : stmts_to_fixup.create (0);
670 12153753 : need_eh_cleanup = BITMAP_ALLOC (NULL);
671 12153753 : need_ab_cleanup = BITMAP_ALLOC (NULL);
672 12153753 : }
673 12153753 : ~substitute_and_fold_dom_walker ()
674 12153753 : {
675 12153753 : BITMAP_FREE (dceworklist);
676 12153753 : stmts_to_fixup.release ();
677 12153753 : BITMAP_FREE (need_eh_cleanup);
678 12153753 : BITMAP_FREE (need_ab_cleanup);
679 12153753 : }
680 :
681 : edge before_dom_children (basic_block) final override;
682 118945006 : void after_dom_children (basic_block bb) final override
683 : {
684 118945006 : substitute_and_fold_engine->post_fold_bb (bb);
685 118945006 : }
686 :
687 : bool something_changed;
688 : bitmap dceworklist;
689 : vec<gimple *> stmts_to_fixup;
690 : bitmap need_eh_cleanup;
691 : bitmap need_ab_cleanup;
692 :
693 : class substitute_and_fold_engine *substitute_and_fold_engine;
694 :
695 : private:
696 : void foreach_new_stmt_in_bb (gimple_stmt_iterator old_gsi,
697 : gimple_stmt_iterator new_gsi);
698 : };
699 :
700 : /* Call post_new_stmt for each new statement that has been added
701 : to the current BB. OLD_GSI is the statement iterator before the BB
702 : changes ocurred. NEW_GSI is the iterator which may contain new
703 : statements. */
704 :
705 : void
706 14830621 : substitute_and_fold_dom_walker::foreach_new_stmt_in_bb
707 : (gimple_stmt_iterator old_gsi,
708 : gimple_stmt_iterator new_gsi)
709 : {
710 14830621 : basic_block bb = gsi_bb (new_gsi);
711 14830621 : if (gsi_end_p (old_gsi))
712 3298906 : old_gsi = gsi_start_bb (bb);
713 : else
714 13181168 : gsi_next (&old_gsi);
715 14897859 : while (gsi_stmt (old_gsi) != gsi_stmt (new_gsi))
716 : {
717 67238 : gimple *stmt = gsi_stmt (old_gsi);
718 67238 : substitute_and_fold_engine->post_new_stmt (stmt);
719 67238 : gsi_next (&old_gsi);
720 : }
721 14830621 : }
722 :
723 : bool
724 118945006 : substitute_and_fold_engine::propagate_into_phi_args (basic_block bb)
725 : {
726 118945006 : edge e;
727 118945006 : edge_iterator ei;
728 118945006 : bool propagated = false;
729 :
730 : /* Visit BB successor PHI nodes and replace PHI args. */
731 280837022 : FOR_EACH_EDGE (e, ei, bb->succs)
732 : {
733 161892016 : for (gphi_iterator gpi = gsi_start_phis (e->dest);
734 269572859 : !gsi_end_p (gpi); gsi_next (&gpi))
735 : {
736 107680843 : gphi *phi = gpi.phi ();
737 107680843 : use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
738 107680843 : tree arg = USE_FROM_PTR (use_p);
739 173568319 : if (TREE_CODE (arg) != SSA_NAME
740 107680843 : || virtual_operand_p (arg))
741 65887476 : continue;
742 41793367 : tree val = value_on_edge (e, arg);
743 41793367 : if (val
744 3114993 : && is_gimple_min_invariant (val)
745 43693598 : && may_propagate_copy (arg, val))
746 : {
747 1898315 : propagate_value (use_p, val);
748 1898315 : propagated = true;
749 : }
750 : }
751 : }
752 118945006 : return propagated;
753 : }
754 :
755 : edge
756 118945006 : substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
757 : {
758 118945006 : substitute_and_fold_engine->pre_fold_bb (bb);
759 :
760 : /* Propagate known values into PHI nodes. */
761 118945006 : for (gphi_iterator i = gsi_start_phis (bb);
762 163447326 : !gsi_end_p (i);
763 44502320 : gsi_next (&i))
764 : {
765 44502320 : gphi *phi = i.phi ();
766 44502320 : tree res = gimple_phi_result (phi);
767 89004640 : if (virtual_operand_p (res))
768 19902310 : continue;
769 24600010 : if (dump_file && (dump_flags & TDF_DETAILS))
770 : {
771 151 : fprintf (dump_file, "Folding PHI node: ");
772 151 : print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
773 : }
774 24600010 : if (res && TREE_CODE (res) == SSA_NAME)
775 : {
776 24600010 : tree sprime = substitute_and_fold_engine->value_of_expr (res, phi);
777 26426574 : if (sprime
778 24600010 : && sprime != res
779 24600010 : && may_propagate_copy (res, sprime))
780 : {
781 1826564 : if (dump_file && (dump_flags & TDF_DETAILS))
782 : {
783 4 : fprintf (dump_file, "Queued PHI for removal. Folds to: ");
784 4 : print_generic_expr (dump_file, sprime);
785 4 : fprintf (dump_file, "\n");
786 : }
787 1826564 : bitmap_set_bit (dceworklist, SSA_NAME_VERSION (res));
788 : /* As this now constitutes a copy duplicate points-to
789 : and range info appropriately. */
790 1826564 : if (TREE_CODE (sprime) == SSA_NAME)
791 1166561 : maybe_duplicate_ssa_info_at_copy (res, sprime);
792 1826564 : continue;
793 : }
794 : }
795 22773446 : something_changed |= substitute_and_fold_engine->replace_phi_args_in (phi);
796 : }
797 :
798 : /* Propagate known values into stmts. In some case it exposes
799 : more trivially deletable stmts to walk backward. */
800 237890012 : for (gimple_stmt_iterator i = gsi_start_bb (bb);
801 931910187 : !gsi_end_p (i);
802 812965181 : gsi_next (&i))
803 : {
804 812965181 : bool did_replace;
805 812965181 : gimple *stmt = gsi_stmt (i);
806 :
807 812965181 : substitute_and_fold_engine->pre_fold_stmt (stmt);
808 :
809 812965181 : if (dump_file && (dump_flags & TDF_DETAILS))
810 : {
811 1779 : fprintf (dump_file, "Folding statement: ");
812 1779 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
813 : }
814 :
815 : /* No point propagating into a stmt we have a value for we
816 : can propagate into all uses. Mark it for removal instead. */
817 812965181 : tree lhs = gimple_get_lhs (stmt);
818 812965181 : if (lhs && TREE_CODE (lhs) == SSA_NAME)
819 : {
820 177709061 : tree sprime = substitute_and_fold_engine->value_of_stmt (stmt, lhs);
821 192539659 : if (sprime
822 177709061 : && sprime != lhs
823 14849482 : && may_propagate_copy (lhs, sprime)
824 14848125 : && !stmt_could_throw_p (cfun, stmt)
825 192543904 : && !gimple_has_side_effects (stmt))
826 : {
827 14830598 : if (dump_file && (dump_flags & TDF_DETAILS))
828 : {
829 54 : fprintf (dump_file, "Queued stmt for removal. Folds to: ");
830 54 : print_generic_expr (dump_file, sprime);
831 54 : fprintf (dump_file, "\n");
832 : }
833 14830598 : bitmap_set_bit (dceworklist, SSA_NAME_VERSION (lhs));
834 : /* As this now constitutes a copy duplicate points-to
835 : and range info appropriately. */
836 14830598 : if (TREE_CODE (sprime) == SSA_NAME)
837 8810991 : maybe_duplicate_ssa_info_at_copy (lhs, sprime);
838 14830598 : continue;
839 : }
840 : }
841 :
842 : /* Replace the statement with its folded version and mark it
843 : folded. */
844 798134583 : did_replace = false;
845 798134583 : gimple *old_stmt = stmt;
846 798134583 : bool was_noreturn = false;
847 798134583 : bool can_make_abnormal_goto = false;
848 798134583 : if (is_gimple_call (stmt))
849 : {
850 53028091 : was_noreturn = gimple_call_noreturn_p (stmt);
851 53028091 : can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
852 : }
853 :
854 : /* Replace real uses in the statement. */
855 798134583 : did_replace |= substitute_and_fold_engine->replace_uses_in (stmt);
856 :
857 798134583 : gimple_stmt_iterator prev_gsi = i;
858 798134583 : gsi_prev (&prev_gsi);
859 :
860 : /* If we made a replacement, fold the statement. */
861 798134583 : if (did_replace)
862 : {
863 13468608 : update_stmt (stmt);
864 13468608 : fold_stmt (&i, follow_single_use_edges);
865 13468608 : stmt = gsi_stmt (i);
866 13468608 : gimple_set_modified (stmt, true);
867 : }
868 : /* Also fold if we want to fold all statements. */
869 784665975 : else if (substitute_and_fold_engine->fold_all_stmts
870 784665975 : && fold_stmt (&i, follow_single_use_edges))
871 : {
872 0 : did_replace = true;
873 0 : stmt = gsi_stmt (i);
874 0 : gimple_set_modified (stmt, true);
875 : }
876 :
877 : /* Some statements may be simplified using propagator
878 : specific information. Do this before propagating
879 : into the stmt to not disturb pass specific information. */
880 798134583 : update_stmt_if_modified (stmt);
881 798134583 : if (substitute_and_fold_engine->fold_stmt (&i))
882 : {
883 1775502 : did_replace = true;
884 1775502 : prop_stats.num_stmts_folded++;
885 1775502 : stmt = gsi_stmt (i);
886 1775502 : gimple_set_modified (stmt, true);
887 : }
888 :
889 : /* If this is a control statement the propagator left edges
890 : unexecuted on force the condition in a way consistent with
891 : that. See PR66945 for cases where the propagator can end
892 : up with a different idea of a taken edge than folding
893 : (once undefined behavior is involved). */
894 798134583 : if (gimple_code (stmt) == GIMPLE_COND)
895 : {
896 42531717 : if ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE)
897 42531717 : ^ (EDGE_SUCC (bb, 1)->flags & EDGE_EXECUTABLE))
898 : {
899 441152 : if (((EDGE_SUCC (bb, 0)->flags & EDGE_TRUE_VALUE) != 0)
900 441152 : == ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE) != 0))
901 102768 : gimple_cond_make_true (as_a <gcond *> (stmt));
902 : else
903 338384 : gimple_cond_make_false (as_a <gcond *> (stmt));
904 441152 : gimple_set_modified (stmt, true);
905 441152 : did_replace = true;
906 : }
907 : }
908 :
909 : /* Now cleanup. */
910 798134583 : if (did_replace)
911 : {
912 14830621 : foreach_new_stmt_in_bb (prev_gsi, i);
913 :
914 : /* If we cleaned up EH information from the statement,
915 : remove EH edges. */
916 14830621 : if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
917 155817 : bitmap_set_bit (need_eh_cleanup, bb->index);
918 :
919 : /* If we turned a call with possible abnormal control transfer
920 : into one that doesn't, remove abnormal edges. */
921 14830621 : if (can_make_abnormal_goto
922 14830621 : && !stmt_can_make_abnormal_goto (stmt))
923 3 : bitmap_set_bit (need_ab_cleanup, bb->index);
924 :
925 : /* If we turned a not noreturn call into a noreturn one
926 : schedule it for fixup. */
927 14830621 : if (!was_noreturn
928 14700324 : && is_gimple_call (stmt)
929 16253747 : && gimple_call_noreturn_p (stmt))
930 70 : stmts_to_fixup.safe_push (stmt);
931 :
932 14830621 : if (gimple_assign_single_p (stmt))
933 : {
934 3487297 : tree rhs = gimple_assign_rhs1 (stmt);
935 :
936 3487297 : if (TREE_CODE (rhs) == ADDR_EXPR)
937 379125 : recompute_tree_invariant_for_addr_expr (rhs);
938 : }
939 :
940 : /* Determine what needs to be done to update the SSA form. */
941 14830621 : update_stmt_if_modified (stmt);
942 14830621 : if (!is_gimple_debug (stmt))
943 10709801 : something_changed = true;
944 : }
945 :
946 798134583 : if (dump_file && (dump_flags & TDF_DETAILS))
947 : {
948 1725 : if (did_replace)
949 : {
950 141 : fprintf (dump_file, "Folded into: ");
951 141 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
952 141 : fprintf (dump_file, "\n");
953 : }
954 : else
955 1584 : fprintf (dump_file, "Not folded\n");
956 : }
957 : }
958 :
959 118945006 : something_changed |= substitute_and_fold_engine->propagate_into_phi_args (bb);
960 :
961 118945006 : return NULL;
962 : }
963 :
964 :
965 :
966 : /* Perform final substitution and folding of propagated values.
967 : Process the whole function if BLOCK is null, otherwise only
968 : process the blocks that BLOCK dominates. In the latter case,
969 : it is the caller's responsibility to ensure that dominator
970 : information is available and up-to-date.
971 :
972 : PROP_VALUE[I] contains the single value that should be substituted
973 : at every use of SSA name N_I. If PROP_VALUE is NULL, no values are
974 : substituted.
975 :
976 : If FOLD_FN is non-NULL the function will be invoked on all statements
977 : before propagating values for pass specific simplification.
978 :
979 : DO_DCE is true if trivially dead stmts can be removed.
980 :
981 : If DO_DCE is true, the statements within a BB are walked from
982 : last to first element. Otherwise we scan from first to last element.
983 :
984 : Return TRUE when something changed. */
985 :
986 : bool
987 12153753 : substitute_and_fold_engine::substitute_and_fold (basic_block block)
988 : {
989 12153753 : if (dump_file && (dump_flags & TDF_DETAILS))
990 158 : fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
991 :
992 12153753 : memset (&prop_stats, 0, sizeof (prop_stats));
993 :
994 : /* Don't call calculate_dominance_info when iterating over a subgraph.
995 : Callers that are using the interface this way are likely to want to
996 : iterate over several disjoint subgraphs, and it would be expensive
997 : in enable-checking builds to revalidate the whole dominance tree
998 : each time. */
999 12153753 : if (block)
1000 1203 : gcc_assert (dom_info_state (CDI_DOMINATORS));
1001 : else
1002 12152550 : calculate_dominance_info (CDI_DOMINATORS);
1003 12153753 : substitute_and_fold_dom_walker walker (CDI_DOMINATORS, this);
1004 12153753 : walker.walk (block ? block : ENTRY_BLOCK_PTR_FOR_FN (cfun));
1005 :
1006 12153753 : simple_dce_from_worklist (walker.dceworklist, walker.need_eh_cleanup);
1007 12153753 : if (!bitmap_empty_p (walker.need_eh_cleanup))
1008 36782 : gimple_purge_all_dead_eh_edges (walker.need_eh_cleanup);
1009 12153753 : if (!bitmap_empty_p (walker.need_ab_cleanup))
1010 3 : gimple_purge_all_dead_abnormal_call_edges (walker.need_ab_cleanup);
1011 :
1012 : /* Fixup stmts that became noreturn calls. This may require splitting
1013 : blocks and thus isn't possible during the dominator walk. Do this
1014 : in reverse order so we don't inadvertedly remove a stmt we want to
1015 : fixup by visiting a dominating now noreturn call first. */
1016 12153823 : while (!walker.stmts_to_fixup.is_empty ())
1017 : {
1018 70 : gimple *stmt = walker.stmts_to_fixup.pop ();
1019 76 : if (!gimple_bb (stmt))
1020 6 : continue;
1021 64 : if (dump_file && dump_flags & TDF_DETAILS)
1022 : {
1023 0 : fprintf (dump_file, "Fixing up noreturn call ");
1024 0 : print_gimple_stmt (dump_file, stmt, 0);
1025 0 : fprintf (dump_file, "\n");
1026 : }
1027 64 : fixup_noreturn_call (stmt);
1028 : }
1029 :
1030 12153753 : statistics_counter_event (cfun, "Constants propagated",
1031 12153753 : prop_stats.num_const_prop);
1032 12153753 : statistics_counter_event (cfun, "Copies propagated",
1033 12153753 : prop_stats.num_copy_prop);
1034 12153753 : statistics_counter_event (cfun, "Statements folded",
1035 12153753 : prop_stats.num_stmts_folded);
1036 :
1037 12153753 : return walker.something_changed;
1038 12153753 : }
1039 :
1040 :
1041 : /* Return true if we may propagate ORIG into DEST, false otherwise.
1042 : If DEST_NOT_ABNORMAL_PHI_EDGE_P is true then assume the propagation does
1043 : not happen into a PHI argument which flows in from an abnormal edge
1044 : which relaxes some constraints. */
1045 :
1046 : bool
1047 150089050 : may_propagate_copy (tree dest, tree orig, bool dest_not_abnormal_phi_edge_p)
1048 : {
1049 150089050 : tree type_d = TREE_TYPE (dest);
1050 150089050 : tree type_o = TREE_TYPE (orig);
1051 :
1052 : /* If ORIG is a default definition which flows in from an abnormal edge
1053 : then the copy can be propagated. It is important that we do so to avoid
1054 : uninitialized copies. */
1055 150089050 : if (TREE_CODE (orig) == SSA_NAME
1056 104091639 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
1057 28977 : && SSA_NAME_IS_DEFAULT_DEF (orig)
1058 150092290 : && (SSA_NAME_VAR (orig) == NULL_TREE
1059 3240 : || VAR_P (SSA_NAME_VAR (orig))))
1060 : ;
1061 : /* Otherwise if ORIG just flows in from an abnormal edge then the copy cannot
1062 : be propagated. */
1063 150086213 : else if (TREE_CODE (orig) == SSA_NAME
1064 150086213 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1065 : return false;
1066 : /* Similarly if DEST flows in from an abnormal edge then the copy cannot be
1067 : propagated. If we know we do not propagate into such a PHI argument this
1068 : does not apply. */
1069 150060073 : else if (!dest_not_abnormal_phi_edge_p
1070 77485535 : && TREE_CODE (dest) == SSA_NAME
1071 227545608 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest))
1072 : return false;
1073 :
1074 : /* Do not copy between types for which we *do* need a conversion. */
1075 150047671 : if (!useless_type_conversion_p (type_d, type_o))
1076 : return false;
1077 :
1078 : /* Generally propagating virtual operands is not ok as that may
1079 : create overlapping life-ranges. */
1080 150032857 : if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest))
1081 : return false;
1082 :
1083 : /* Keep lhs of [[gnu::musttail]] calls as is, those need to be still
1084 : tail callable. */
1085 149595117 : if (TREE_CODE (dest) == SSA_NAME
1086 149422877 : && is_gimple_call (SSA_NAME_DEF_STMT (dest))
1087 150954869 : && gimple_call_must_tail_p (as_a <gcall *> (SSA_NAME_DEF_STMT (dest))))
1088 : return false;
1089 :
1090 : /* Anything else is OK. */
1091 : return true;
1092 : }
1093 :
1094 : /* Like may_propagate_copy, but use as the destination expression
1095 : the principal expression (typically, the RHS) contained in
1096 : statement DEST. This is more efficient when working with the
1097 : gimple tuples representation. */
1098 :
1099 : bool
1100 380883 : may_propagate_copy_into_stmt (gimple *dest, tree orig)
1101 : {
1102 380883 : tree type_d;
1103 380883 : tree type_o;
1104 :
1105 : /* If the statement is a switch or a single-rhs assignment,
1106 : then the expression to be replaced by the propagation may
1107 : be an SSA_NAME. Fortunately, there is an explicit tree
1108 : for the expression, so we delegate to may_propagate_copy. */
1109 :
1110 380883 : if (gimple_assign_single_p (dest))
1111 185630 : return may_propagate_copy (gimple_assign_rhs1 (dest), orig, true);
1112 195253 : else if (gswitch *dest_swtch = dyn_cast <gswitch *> (dest))
1113 0 : return may_propagate_copy (gimple_switch_index (dest_swtch), orig, true);
1114 :
1115 : /* In other cases, the expression is not materialized, so there
1116 : is no destination to pass to may_propagate_copy. On the other
1117 : hand, the expression cannot be an SSA_NAME, so the analysis
1118 : is much simpler. */
1119 :
1120 195253 : if (TREE_CODE (orig) == SSA_NAME
1121 195253 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1122 : return false;
1123 :
1124 195253 : if (is_gimple_assign (dest))
1125 166164 : type_d = TREE_TYPE (gimple_assign_lhs (dest));
1126 29089 : else if (gimple_code (dest) == GIMPLE_COND)
1127 24246 : type_d = boolean_type_node;
1128 4843 : else if (is_gimple_call (dest)
1129 4843 : && gimple_call_lhs (dest) != NULL_TREE)
1130 4843 : type_d = TREE_TYPE (gimple_call_lhs (dest));
1131 : else
1132 0 : gcc_unreachable ();
1133 :
1134 195253 : type_o = TREE_TYPE (orig);
1135 :
1136 195253 : if (!useless_type_conversion_p (type_d, type_o))
1137 : return false;
1138 :
1139 : return true;
1140 : }
1141 :
1142 : /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
1143 :
1144 : Use this version when not const/copy propagating values. For example,
1145 : PRE uses this version when building expressions as they would appear
1146 : in specific blocks taking into account actions of PHI nodes.
1147 :
1148 : The statement in which an expression has been replaced should be
1149 : folded using fold_stmt_inplace. */
1150 :
1151 : void
1152 54286709 : replace_exp (use_operand_p op_p, tree val)
1153 : {
1154 54286709 : if (TREE_CODE (val) == SSA_NAME || CONSTANT_CLASS_P (val))
1155 50483856 : SET_USE (op_p, val);
1156 : else
1157 3802853 : SET_USE (op_p, unshare_expr (val));
1158 54286709 : }
1159 :
1160 :
1161 : /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1162 : into the operand pointed to by OP_P.
1163 :
1164 : Use this version for const/copy propagation as it will perform additional
1165 : checks to ensure validity of the const/copy propagation. */
1166 :
1167 : void
1168 48096801 : propagate_value (use_operand_p op_p, tree val)
1169 : {
1170 48096801 : if (flag_checking)
1171 : {
1172 48096331 : bool ab = (is_a <gphi *> (USE_STMT (op_p))
1173 64073330 : && (gimple_phi_arg_edge (as_a <gphi *> (USE_STMT (op_p)),
1174 15976999 : PHI_ARG_INDEX_FROM_USE (op_p))
1175 15976999 : ->flags & EDGE_ABNORMAL));
1176 48096331 : gcc_assert (may_propagate_copy (USE_FROM_PTR (op_p), val, !ab));
1177 : }
1178 48096801 : replace_exp (op_p, val);
1179 48096801 : }
1180 :
1181 :
1182 : /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1183 : into the tree pointed to by OP_P.
1184 :
1185 : Use this version for const/copy propagation when SSA operands are not
1186 : available. It will perform the additional checks to ensure validity of
1187 : the const/copy propagation, but will not update any operand information.
1188 : Be sure to mark the stmt as modified. */
1189 :
1190 : void
1191 408619 : propagate_tree_value (tree *op_p, tree val)
1192 : {
1193 408619 : if (TREE_CODE (val) == SSA_NAME)
1194 367494 : *op_p = val;
1195 : else
1196 41125 : *op_p = unshare_expr (val);
1197 408619 : }
1198 :
1199 :
1200 : /* Like propagate_tree_value, but use as the operand to replace
1201 : the principal expression (typically, the RHS) contained in the
1202 : statement referenced by iterator GSI. Note that it is not
1203 : always possible to update the statement in-place, so a new
1204 : statement may be created to replace the original. */
1205 :
1206 : void
1207 408619 : propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
1208 : {
1209 408619 : gimple *stmt = gsi_stmt (*gsi);
1210 :
1211 408619 : if (is_gimple_assign (stmt))
1212 : {
1213 374030 : tree expr = NULL_TREE;
1214 374030 : if (gimple_assign_single_p (stmt))
1215 204176 : expr = gimple_assign_rhs1 (stmt);
1216 374030 : propagate_tree_value (&expr, val);
1217 374030 : gimple_assign_set_rhs_from_tree (gsi, expr);
1218 : }
1219 34589 : else if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
1220 : {
1221 27688 : tree lhs = NULL_TREE;
1222 27688 : tree rhs = build_zero_cst (TREE_TYPE (val));
1223 27688 : propagate_tree_value (&lhs, val);
1224 27688 : gimple_cond_set_code (cond_stmt, NE_EXPR);
1225 27688 : gimple_cond_set_lhs (cond_stmt, lhs);
1226 27688 : gimple_cond_set_rhs (cond_stmt, rhs);
1227 : }
1228 6901 : else if (is_gimple_call (stmt)
1229 6901 : && gimple_call_lhs (stmt) != NULL_TREE)
1230 : {
1231 6901 : tree expr = NULL_TREE;
1232 6901 : propagate_tree_value (&expr, val);
1233 6901 : replace_call_with_value (gsi, expr);
1234 : }
1235 0 : else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
1236 0 : propagate_tree_value (gimple_switch_index_ptr (swtch_stmt), val);
1237 : else
1238 0 : gcc_unreachable ();
1239 408619 : }
1240 :
1241 : /* Check exits of each loop in FUN, walk over loop closed PHIs in
1242 : each exit basic block and propagate degenerate PHIs. */
1243 :
1244 : unsigned
1245 240520 : clean_up_loop_closed_phi (function *fun)
1246 : {
1247 240520 : gphi *phi;
1248 240520 : tree rhs;
1249 240520 : tree lhs;
1250 240520 : gphi_iterator gsi;
1251 :
1252 : /* Avoid possibly quadratic work when scanning for loop exits across
1253 : all loops of a nest. */
1254 240520 : if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1255 : return 0;
1256 :
1257 : /* replace_uses_by might purge dead EH edges and we want it to also
1258 : remove dominated blocks. */
1259 240520 : calculate_dominance_info (CDI_DOMINATORS);
1260 :
1261 : /* Walk over loop in function. */
1262 1348967 : for (auto loop : loops_list (fun, 0))
1263 : {
1264 : /* Check each exit edege of loop. */
1265 627407 : auto_vec<edge> exits = get_loop_exit_edges (loop);
1266 3017784 : for (edge e : exits)
1267 1913540 : if (single_pred_p (e->dest))
1268 : /* Walk over loop-closed PHIs. */
1269 1670142 : for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi);)
1270 : {
1271 904863 : phi = gsi.phi ();
1272 904863 : rhs = gimple_phi_arg_def (phi, 0);
1273 904863 : lhs = gimple_phi_result (phi);
1274 :
1275 1808798 : if (virtual_operand_p (rhs))
1276 : {
1277 478989 : imm_use_iterator iter;
1278 478989 : use_operand_p use_p;
1279 478989 : gimple *stmt;
1280 :
1281 1642167 : FOR_EACH_IMM_USE_STMT (stmt, iter, lhs)
1282 2066321 : FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1283 691066 : SET_USE (use_p, rhs);
1284 :
1285 478989 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1286 48 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
1287 478989 : remove_phi_node (&gsi, true);
1288 : }
1289 425874 : else if (may_propagate_copy (lhs, rhs))
1290 : {
1291 : /* Dump details. */
1292 425833 : if (dump_file && (dump_flags & TDF_DETAILS))
1293 : {
1294 3 : fprintf (dump_file, " Replacing '");
1295 3 : print_generic_expr (dump_file, lhs, dump_flags);
1296 3 : fprintf (dump_file, "' with '");
1297 3 : print_generic_expr (dump_file, rhs, dump_flags);
1298 3 : fprintf (dump_file, "'\n");
1299 : }
1300 :
1301 425833 : replace_uses_by (lhs, rhs);
1302 425833 : remove_phi_node (&gsi, true);
1303 : }
1304 : else
1305 41 : gsi_next (&gsi);
1306 : }
1307 627407 : }
1308 :
1309 240520 : return 0;
1310 : }
|