Branch data Line data Source code
1 : : /* Generic SSA value propagation engine.
2 : : Copyright (C) 2004-2025 Free Software Foundation, Inc.
3 : : Contributed by Diego Novillo <dnovillo@redhat.com>
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify it
8 : : under the terms of the GNU General Public License as published by the
9 : : Free Software Foundation; either version 3, or (at your option) any
10 : : later version.
11 : :
12 : : GCC is distributed in the hope that it will be useful, but WITHOUT
13 : : ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : : for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : : #include "config.h"
22 : : #include "system.h"
23 : : #include "coretypes.h"
24 : : #include "backend.h"
25 : : #include "tree.h"
26 : : #include "gimple.h"
27 : : #include "ssa.h"
28 : : #include "gimple-pretty-print.h"
29 : : #include "dumpfile.h"
30 : : #include "gimple-iterator.h"
31 : : #include "gimple-fold.h"
32 : : #include "tree-eh.h"
33 : : #include "gimplify.h"
34 : : #include "tree-cfg.h"
35 : : #include "tree-ssa.h"
36 : : #include "tree-ssa-propagate.h"
37 : : #include "domwalk.h"
38 : : #include "cfgloop.h"
39 : : #include "tree-cfgcleanup.h"
40 : : #include "cfganal.h"
41 : : #include "tree-ssa-dce.h"
42 : :
43 : : /* This file implements a generic value propagation engine based on
44 : : the same propagation used by the SSA-CCP algorithm [1].
45 : :
46 : : Propagation is performed by simulating the execution of every
47 : : statement that produces the value being propagated. Simulation
48 : : proceeds as follows:
49 : :
50 : : 1- Initially, all edges of the CFG are marked not executable and
51 : : the CFG worklist is seeded with all the statements in the entry
52 : : basic block (block 0).
53 : :
54 : : 2- Every statement S is simulated with a call to the call-back
55 : : function SSA_PROP_VISIT_STMT. This evaluation may produce 3
56 : : results:
57 : :
58 : : SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
59 : : interest and does not affect any of the work lists.
60 : : The statement may be simulated again if any of its input
61 : : operands change in future iterations of the simulator.
62 : :
63 : : SSA_PROP_VARYING: The value produced by S cannot be determined
64 : : at compile time. Further simulation of S is not required.
65 : : If S is a conditional jump, all the outgoing edges for the
66 : : block are considered executable and added to the work
67 : : list.
68 : :
69 : : SSA_PROP_INTERESTING: S produces a value that can be computed
70 : : at compile time. Its result can be propagated into the
71 : : statements that feed from S. Furthermore, if S is a
72 : : conditional jump, only the edge known to be taken is added
73 : : to the work list. Edges that are known not to execute are
74 : : never simulated.
75 : :
76 : : 3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI. The
77 : : return value from SSA_PROP_VISIT_PHI has the same semantics as
78 : : described in #2.
79 : :
80 : : 4- Three work lists are kept. Statements are only added to these
81 : : lists if they produce one of SSA_PROP_INTERESTING or
82 : : SSA_PROP_VARYING.
83 : :
84 : : CFG_BLOCKS contains the list of blocks to be simulated.
85 : : Blocks are added to this list if their incoming edges are
86 : : found executable.
87 : :
88 : : SSA_EDGE_WORKLIST contains the list of statements that we
89 : : need to revisit.
90 : :
91 : : 5- Simulation terminates when all three work lists are drained.
92 : :
93 : : Before calling ssa_propagate, it is important to clear
94 : : prop_simulate_again_p for all the statements in the program that
95 : : should be simulated. This initialization allows an implementation
96 : : to specify which statements should never be simulated.
97 : :
98 : : It is also important to compute def-use information before calling
99 : : ssa_propagate.
100 : :
101 : : References:
102 : :
103 : : [1] Constant propagation with conditional branches,
104 : : Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
105 : :
106 : : [2] Building an Optimizing Compiler,
107 : : Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
108 : :
109 : : [3] Advanced Compiler Design and Implementation,
110 : : Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
111 : :
112 : : /* Worklists of control flow edge destinations. This contains
113 : : the CFG order number of the blocks so we can iterate in CFG
114 : : order by visiting in bit-order. We use two worklists to
115 : : first make forward progress before iterating. */
116 : : static bitmap cfg_blocks;
117 : : static int *bb_to_cfg_order;
118 : : static int *cfg_order_to_bb;
119 : :
120 : : /* Worklists of SSA edges which will need reexamination as their
121 : : definition has changed. SSA edges are def-use edges in the SSA
122 : : web. For each D-U edge, we store the target statement or PHI node
123 : : UID in a bitmap. UIDs order stmts in execution order. We use
124 : : two worklists to first make forward progress before iterating. */
125 : : static bitmap ssa_edge_worklist;
126 : : static vec<gimple *> uid_to_stmt;
127 : :
128 : : /* Current RPO index in the iteration. */
129 : : static int curr_order;
130 : :
131 : :
132 : : /* We have just defined a new value for VAR. If IS_VARYING is true,
133 : : add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
134 : : them to INTERESTING_SSA_EDGES. */
135 : :
136 : : static void
137 : 261109289 : add_ssa_edge (tree var)
138 : : {
139 : 261109289 : imm_use_iterator iter;
140 : 261109289 : use_operand_p use_p;
141 : :
142 : 792545208 : FOR_EACH_IMM_USE_FAST (use_p, iter, var)
143 : : {
144 : 531435919 : gimple *use_stmt = USE_STMT (use_p);
145 : 531435919 : if (!prop_simulate_again_p (use_stmt))
146 : 222778379 : continue;
147 : :
148 : : /* If we did not yet simulate the block wait for this to happen
149 : : and do not add the stmt to the SSA edge worklist. */
150 : 308657540 : basic_block use_bb = gimple_bb (use_stmt);
151 : 308657540 : if (! (use_bb->flags & BB_VISITED))
152 : 126840125 : continue;
153 : :
154 : : /* If this is a use on a not yet executable edge do not bother to
155 : : queue it. */
156 : 181817415 : if (gimple_code (use_stmt) == GIMPLE_PHI
157 : 181817415 : && !(EDGE_PRED (use_bb, PHI_ARG_INDEX_FROM_USE (use_p))->flags
158 : 58794036 : & EDGE_EXECUTABLE))
159 : 5418792 : continue;
160 : :
161 : 176398623 : if (bitmap_set_bit (ssa_edge_worklist, gimple_uid (use_stmt)))
162 : : {
163 : 163269539 : uid_to_stmt[gimple_uid (use_stmt)] = use_stmt;
164 : 163269539 : if (dump_file && (dump_flags & TDF_DETAILS))
165 : : {
166 : 0 : fprintf (dump_file, "ssa_edge_worklist: adding SSA use in ");
167 : 0 : print_gimple_stmt (dump_file, use_stmt, 0, TDF_SLIM);
168 : : }
169 : : }
170 : : }
171 : 261109289 : }
172 : :
173 : :
174 : : /* Add edge E to the control flow worklist. */
175 : :
176 : : static void
177 : 128326121 : add_control_edge (edge e)
178 : : {
179 : 128326121 : basic_block bb = e->dest;
180 : 128326121 : if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
181 : : return;
182 : :
183 : : /* If the edge had already been executed, skip it. */
184 : 113638425 : if (e->flags & EDGE_EXECUTABLE)
185 : : return;
186 : :
187 : 97685898 : e->flags |= EDGE_EXECUTABLE;
188 : :
189 : 97685898 : int bb_order = bb_to_cfg_order[bb->index];
190 : 97685898 : bitmap_set_bit (cfg_blocks, bb_order);
191 : :
192 : 97685898 : if (dump_file && (dump_flags & TDF_DETAILS))
193 : 53 : fprintf (dump_file, "Adding destination of edge (%d -> %d) to worklist\n",
194 : 53 : e->src->index, e->dest->index);
195 : : }
196 : :
197 : :
198 : : /* Simulate the execution of STMT and update the work lists accordingly. */
199 : :
200 : : void
201 : 702256643 : ssa_propagation_engine::simulate_stmt (gimple *stmt)
202 : : {
203 : 702256643 : enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
204 : 702256643 : edge taken_edge = NULL;
205 : 702256643 : tree output_name = NULL_TREE;
206 : :
207 : : /* Pull the stmt off the SSA edge worklist. */
208 : 702256643 : bitmap_clear_bit (ssa_edge_worklist, gimple_uid (stmt));
209 : :
210 : : /* Don't bother visiting statements that are already
211 : : considered varying by the propagator. */
212 : 702256643 : if (!prop_simulate_again_p (stmt))
213 : 487897926 : return;
214 : :
215 : 327169266 : if (gimple_code (stmt) == GIMPLE_PHI)
216 : : {
217 : 72680759 : val = visit_phi (as_a <gphi *> (stmt));
218 : 72680759 : output_name = gimple_phi_result (stmt);
219 : : }
220 : : else
221 : 254488507 : val = visit_stmt (stmt, &taken_edge, &output_name);
222 : :
223 : 327169266 : if (val == SSA_PROP_VARYING)
224 : : {
225 : 112810549 : prop_set_simulate_again (stmt, false);
226 : :
227 : : /* If the statement produced a new varying value, add the SSA
228 : : edges coming out of OUTPUT_NAME. */
229 : 112810549 : if (output_name)
230 : 67937507 : add_ssa_edge (output_name);
231 : :
232 : : /* If STMT transfers control out of its basic block, add
233 : : all outgoing edges to the work list. */
234 : 112810549 : if (stmt_ends_bb_p (stmt))
235 : : {
236 : 46315006 : edge e;
237 : 46315006 : edge_iterator ei;
238 : 46315006 : basic_block bb = gimple_bb (stmt);
239 : 119529271 : FOR_EACH_EDGE (e, ei, bb->succs)
240 : 73214265 : add_control_edge (e);
241 : : }
242 : 112810549 : return;
243 : : }
244 : 214358717 : else if (val == SSA_PROP_INTERESTING)
245 : : {
246 : : /* If the statement produced new value, add the SSA edges coming
247 : : out of OUTPUT_NAME. */
248 : 199290451 : if (output_name)
249 : 193171782 : add_ssa_edge (output_name);
250 : :
251 : : /* If we know which edge is going to be taken out of this block,
252 : : add it to the CFG work list. */
253 : 199290451 : if (taken_edge)
254 : 6118669 : add_control_edge (taken_edge);
255 : : }
256 : :
257 : : /* If there are no SSA uses on the stmt whose defs are simulated
258 : : again then this stmt will be never visited again. */
259 : 214358717 : bool has_simulate_again_uses = false;
260 : 214358717 : use_operand_p use_p;
261 : 214358717 : ssa_op_iter iter;
262 : 214358717 : if (gimple_code (stmt) == GIMPLE_PHI)
263 : : {
264 : 60319591 : edge_iterator ei;
265 : 60319591 : edge e;
266 : 60319591 : tree arg;
267 : 94362772 : FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->preds)
268 : 92420662 : if (!(e->flags & EDGE_EXECUTABLE)
269 : 92420662 : || ((arg = PHI_ARG_DEF_FROM_EDGE (stmt, e))
270 : 85049278 : && TREE_CODE (arg) == SSA_NAME
271 : 69997704 : && !SSA_NAME_IS_DEFAULT_DEF (arg)
272 : 69796707 : && prop_simulate_again_p (SSA_NAME_DEF_STMT (arg))))
273 : : {
274 : : has_simulate_again_uses = true;
275 : : break;
276 : : }
277 : : }
278 : : else
279 : 189769956 : FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
280 : : {
281 : 153148390 : gimple *def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p));
282 : 153148390 : if (!gimple_nop_p (def_stmt)
283 : 153148390 : && prop_simulate_again_p (def_stmt))
284 : : {
285 : : has_simulate_again_uses = true;
286 : : break;
287 : : }
288 : : }
289 : 214358717 : if (!has_simulate_again_uses)
290 : : {
291 : 38563676 : if (dump_file && (dump_flags & TDF_DETAILS))
292 : 31 : fprintf (dump_file, "marking stmt to be not simulated again\n");
293 : 38563676 : prop_set_simulate_again (stmt, false);
294 : : }
295 : : }
296 : :
297 : :
298 : : /* Simulate the execution of BLOCK. Evaluate the statement associated
299 : : with each variable reference inside the block. */
300 : :
301 : : void
302 : 74344278 : ssa_propagation_engine::simulate_block (basic_block block)
303 : : {
304 : 74344278 : gimple_stmt_iterator gsi;
305 : :
306 : : /* There is nothing to do for the exit block. */
307 : 74344278 : if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
308 : 74344278 : return;
309 : :
310 : 74344278 : if (dump_file && (dump_flags & TDF_DETAILS))
311 : 51 : fprintf (dump_file, "\nSimulating block %d\n", block->index);
312 : :
313 : : /* Always simulate PHI nodes, even if we have simulated this block
314 : : before. */
315 : 113789780 : for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
316 : 39445502 : simulate_stmt (gsi_stmt (gsi));
317 : :
318 : : /* If this is the first time we've simulated this block, then we
319 : : must simulate each of its statements. */
320 : 74344278 : if (! (block->flags & BB_VISITED))
321 : : {
322 : 69524601 : gimple_stmt_iterator j;
323 : 69524601 : unsigned int normal_edge_count;
324 : 69524601 : edge e, normal_edge;
325 : 69524601 : edge_iterator ei;
326 : :
327 : 638690250 : for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
328 : 499641048 : simulate_stmt (gsi_stmt (j));
329 : :
330 : : /* Note that we have simulated this block. */
331 : 69524601 : block->flags |= BB_VISITED;
332 : :
333 : : /* We cannot predict when abnormal and EH edges will be executed, so
334 : : once a block is considered executable, we consider any
335 : : outgoing abnormal edges as executable.
336 : :
337 : : TODO: This is not exactly true. Simplifying statement might
338 : : prove it non-throwing and also computed goto can be handled
339 : : when destination is known.
340 : :
341 : : At the same time, if this block has only one successor that is
342 : : reached by non-abnormal edges, then add that successor to the
343 : : worklist. */
344 : 69524601 : normal_edge_count = 0;
345 : 69524601 : normal_edge = NULL;
346 : 167401418 : FOR_EACH_EDGE (e, ei, block->succs)
347 : : {
348 : 97876817 : if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
349 : 6822964 : add_control_edge (e);
350 : : else
351 : : {
352 : 91053853 : normal_edge_count++;
353 : 91053853 : normal_edge = e;
354 : : }
355 : : }
356 : :
357 : 69524601 : if (normal_edge_count == 1)
358 : 34671700 : add_control_edge (normal_edge);
359 : : }
360 : : }
361 : :
362 : :
363 : : /* Initialize local data structures and work lists. */
364 : :
365 : : static void
366 : 7498523 : ssa_prop_init (void)
367 : : {
368 : 7498523 : edge e;
369 : 7498523 : edge_iterator ei;
370 : 7498523 : basic_block bb;
371 : :
372 : : /* Worklists of SSA edges. */
373 : 7498523 : ssa_edge_worklist = BITMAP_ALLOC (NULL);
374 : 7498523 : bitmap_tree_view (ssa_edge_worklist);
375 : :
376 : : /* Worklist of basic-blocks. */
377 : 7498523 : bb_to_cfg_order = XNEWVEC (int, last_basic_block_for_fn (cfun) + 1);
378 : 7498523 : cfg_order_to_bb = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
379 : 7498523 : int n = pre_and_rev_post_order_compute_fn (cfun, NULL,
380 : : cfg_order_to_bb, false);
381 : 77412432 : for (int i = 0; i < n; ++i)
382 : 69913909 : bb_to_cfg_order[cfg_order_to_bb[i]] = i;
383 : 7498523 : cfg_blocks = BITMAP_ALLOC (NULL);
384 : :
385 : : /* Initially assume that every edge in the CFG is not executable.
386 : : (including the edges coming out of the entry block). Mark blocks
387 : : as not visited, blocks not yet visited will have all their statements
388 : : simulated once an incoming edge gets executable. */
389 : 7498523 : set_gimple_stmt_max_uid (cfun, 0);
390 : 77412432 : for (int i = 0; i < n; ++i)
391 : : {
392 : 69913909 : gimple_stmt_iterator si;
393 : 69913909 : bb = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb[i]);
394 : :
395 : 98901409 : for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
396 : : {
397 : 28987500 : gimple *stmt = gsi_stmt (si);
398 : 28987500 : gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
399 : : }
400 : :
401 : 640681691 : for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
402 : : {
403 : 500853873 : gimple *stmt = gsi_stmt (si);
404 : 500853873 : gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
405 : : }
406 : :
407 : 69913909 : bb->flags &= ~BB_VISITED;
408 : 168168016 : FOR_EACH_EDGE (e, ei, bb->succs)
409 : 98254107 : e->flags &= ~EDGE_EXECUTABLE;
410 : : }
411 : 7498523 : uid_to_stmt.safe_grow (gimple_stmt_max_uid (cfun), true);
412 : 7498523 : }
413 : :
414 : :
415 : : /* Free allocated storage. */
416 : :
417 : : static void
418 : 7498523 : ssa_prop_fini (void)
419 : : {
420 : 7498523 : BITMAP_FREE (cfg_blocks);
421 : 7498523 : free (bb_to_cfg_order);
422 : 7498523 : free (cfg_order_to_bb);
423 : 7498523 : BITMAP_FREE (ssa_edge_worklist);
424 : 7498523 : uid_to_stmt.release ();
425 : 7498523 : }
426 : :
427 : :
428 : : /* Entry point to the propagation engine.
429 : :
430 : : The VISIT_STMT virtual function is called for every statement
431 : : visited and the VISIT_PHI virtual function is called for every PHI
432 : : node visited. */
433 : :
434 : : void
435 : 7498523 : ssa_propagation_engine::ssa_propagate (void)
436 : : {
437 : 7498523 : ssa_prop_init ();
438 : :
439 : 7498523 : curr_order = 0;
440 : :
441 : : /* Iterate until the worklists are empty. We iterate both blocks
442 : : and stmts in RPO order, prioritizing backedge processing.
443 : : Seed the algorithm by adding the successors of the entry block to the
444 : : edge worklist. */
445 : 7498523 : edge e;
446 : 7498523 : edge_iterator ei;
447 : 14997046 : FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
448 : : {
449 : 7498523 : e->flags &= ~EDGE_EXECUTABLE;
450 : 7498523 : add_control_edge (e);
451 : : }
452 : 245012894 : while (1)
453 : : {
454 : 245012894 : int next_block_order = (bitmap_empty_p (cfg_blocks)
455 : 245012894 : ? -1 : bitmap_first_set_bit (cfg_blocks));
456 : 245012894 : int next_stmt_uid = (bitmap_empty_p (ssa_edge_worklist)
457 : 245012894 : ? -1 : bitmap_first_set_bit (ssa_edge_worklist));
458 : 245012894 : if (next_block_order == -1 && next_stmt_uid == -1)
459 : : break;
460 : :
461 : 237514371 : int next_stmt_bb_order = -1;
462 : 237514371 : gimple *next_stmt = NULL;
463 : 237514371 : if (next_stmt_uid != -1)
464 : : {
465 : 166722431 : next_stmt = uid_to_stmt[next_stmt_uid];
466 : 166722431 : next_stmt_bb_order = bb_to_cfg_order[gimple_bb (next_stmt)->index];
467 : : }
468 : :
469 : : /* Pull the next block to simulate off the worklist if it comes first. */
470 : 237514371 : if (next_block_order != -1
471 : 209470601 : && (next_stmt_bb_order == -1
472 : 209470601 : || next_block_order <= next_stmt_bb_order))
473 : : {
474 : 74344278 : curr_order = next_block_order;
475 : 74344278 : bitmap_clear_bit (cfg_blocks, next_block_order);
476 : 74344278 : basic_block bb
477 : 74344278 : = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb [next_block_order]);
478 : 74344278 : simulate_block (bb);
479 : 74344278 : }
480 : : /* Else simulate from the SSA edge worklist. */
481 : : else
482 : : {
483 : 163170093 : curr_order = next_stmt_bb_order;
484 : 163170093 : if (dump_file && (dump_flags & TDF_DETAILS))
485 : : {
486 : 0 : fprintf (dump_file, "\nSimulating statement: ");
487 : 0 : print_gimple_stmt (dump_file, next_stmt, 0, dump_flags);
488 : : }
489 : 163170093 : simulate_stmt (next_stmt);
490 : : }
491 : : }
492 : :
493 : 7498523 : ssa_prop_fini ();
494 : 7498523 : }
495 : :
496 : : /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
497 : : is a non-volatile pointer dereference, a structure reference or a
498 : : reference to a single _DECL. Ignore volatile memory references
499 : : because they are not interesting for the optimizers. */
500 : :
501 : : bool
502 : 25622104 : stmt_makes_single_store (gimple *stmt)
503 : : {
504 : 25622104 : tree lhs;
505 : :
506 : 25622104 : if (gimple_code (stmt) != GIMPLE_ASSIGN
507 : 25622104 : && gimple_code (stmt) != GIMPLE_CALL)
508 : : return false;
509 : :
510 : 25636041 : if (!gimple_vdef (stmt))
511 : : return false;
512 : :
513 : 2446769 : lhs = gimple_get_lhs (stmt);
514 : :
515 : : /* A call statement may have a null LHS. */
516 : 2446769 : if (!lhs)
517 : : return false;
518 : :
519 : 2446769 : return (!TREE_THIS_VOLATILE (lhs)
520 : 2446769 : && (DECL_P (lhs)
521 : 2432832 : || REFERENCE_CLASS_P (lhs)));
522 : : }
523 : :
524 : :
525 : : /* Propagation statistics. */
526 : : struct prop_stats_d
527 : : {
528 : : long num_const_prop;
529 : : long num_copy_prop;
530 : : long num_stmts_folded;
531 : : };
532 : :
533 : : static struct prop_stats_d prop_stats;
534 : :
535 : : // range_query default methods to drive from a value_of_expr() ranther than
536 : : // range_of_expr.
537 : :
538 : : tree
539 : 50774323 : substitute_and_fold_engine::value_on_edge (edge, tree expr)
540 : : {
541 : 50774323 : return value_of_expr (expr);
542 : : }
543 : :
544 : : tree
545 : 123175248 : substitute_and_fold_engine::value_of_stmt (gimple *stmt, tree name)
546 : : {
547 : 123175248 : if (!name)
548 : 0 : name = gimple_get_lhs (stmt);
549 : :
550 : 123175248 : gcc_checking_assert (!name || name == gimple_get_lhs (stmt));
551 : :
552 : 123175248 : if (name)
553 : 123175248 : return value_of_expr (name);
554 : : return NULL_TREE;
555 : : }
556 : :
557 : : bool
558 : 0 : substitute_and_fold_engine::range_of_expr (vrange &, tree, gimple *)
559 : : {
560 : 0 : return false;
561 : : }
562 : :
563 : : /* Replace USE references in statement STMT with the values stored in
564 : : PROP_VALUE. Return true if at least one reference was replaced. */
565 : :
566 : : bool
567 : 695162084 : substitute_and_fold_engine::replace_uses_in (gimple *stmt)
568 : : {
569 : 695162084 : bool replaced = false;
570 : 695162084 : use_operand_p use;
571 : 695162084 : ssa_op_iter iter;
572 : :
573 : 1038434734 : FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
574 : : {
575 : 343272650 : tree tuse = USE_FROM_PTR (use);
576 : 343272650 : tree val = value_of_expr (tuse, stmt);
577 : :
578 : 343272650 : if (val == tuse || val == NULL_TREE)
579 : 330793265 : continue;
580 : :
581 : 12479385 : if (gimple_code (stmt) == GIMPLE_ASM
582 : 12479385 : && !may_propagate_copy_into_asm (tuse))
583 : 0 : continue;
584 : :
585 : 12479385 : if (!may_propagate_copy (tuse, val))
586 : 450 : continue;
587 : :
588 : 12478935 : if (TREE_CODE (val) != SSA_NAME)
589 : 4673233 : prop_stats.num_const_prop++;
590 : : else
591 : 7805702 : prop_stats.num_copy_prop++;
592 : :
593 : 12478935 : propagate_value (use, val);
594 : :
595 : 12478935 : replaced = true;
596 : : }
597 : :
598 : 695162084 : return replaced;
599 : : }
600 : :
601 : :
602 : : /* Replace propagated values into all the arguments for PHI using the
603 : : values from PROP_VALUE. */
604 : :
605 : : bool
606 : 21704167 : substitute_and_fold_engine::replace_phi_args_in (gphi *phi)
607 : : {
608 : 21704167 : size_t i;
609 : 21704167 : bool replaced = false;
610 : :
611 : 71486340 : for (i = 0; i < gimple_phi_num_args (phi); i++)
612 : : {
613 : 49782173 : tree arg = gimple_phi_arg_def (phi, i);
614 : :
615 : 49782173 : if (TREE_CODE (arg) == SSA_NAME)
616 : : {
617 : 35172593 : edge e = gimple_phi_arg_edge (phi, i);
618 : 35172593 : tree val = value_on_edge (e, arg);
619 : :
620 : 35172593 : if (val && val != arg && may_propagate_copy (arg, val))
621 : : {
622 : 1056083 : if (TREE_CODE (val) != SSA_NAME)
623 : 30288 : prop_stats.num_const_prop++;
624 : : else
625 : 1025795 : prop_stats.num_copy_prop++;
626 : :
627 : 1056083 : propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
628 : 1056083 : replaced = true;
629 : :
630 : : /* If we propagated a copy and this argument flows
631 : : through an abnormal edge, update the replacement
632 : : accordingly. */
633 : 1056083 : if (TREE_CODE (val) == SSA_NAME
634 : 1025795 : && e->flags & EDGE_ABNORMAL
635 : 1056083 : && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
636 : : {
637 : : /* This can only occur for virtual operands, since
638 : : for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
639 : : would prevent replacement. */
640 : 0 : gcc_checking_assert (virtual_operand_p (val));
641 : 0 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
642 : : }
643 : : }
644 : : }
645 : : }
646 : :
647 : 21704167 : if (dump_file && (dump_flags & TDF_DETAILS))
648 : : {
649 : 144 : if (!replaced)
650 : 144 : fprintf (dump_file, "No folding possible\n");
651 : : else
652 : : {
653 : 0 : fprintf (dump_file, "Folded into: ");
654 : 0 : print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
655 : 0 : fprintf (dump_file, "\n");
656 : : }
657 : : }
658 : :
659 : 21704167 : return replaced;
660 : : }
661 : :
662 : :
663 : : class substitute_and_fold_dom_walker : public dom_walker
664 : : {
665 : : public:
666 : 11500316 : substitute_and_fold_dom_walker (cdi_direction direction,
667 : : class substitute_and_fold_engine *engine)
668 : 11500316 : : dom_walker (direction),
669 : 11500316 : something_changed (false),
670 : 11500316 : substitute_and_fold_engine (engine)
671 : : {
672 : 11500316 : dceworklist = BITMAP_ALLOC (NULL);
673 : 11500316 : stmts_to_fixup.create (0);
674 : 11500316 : need_eh_cleanup = BITMAP_ALLOC (NULL);
675 : 11500316 : need_ab_cleanup = BITMAP_ALLOC (NULL);
676 : 11500316 : }
677 : 11500316 : ~substitute_and_fold_dom_walker ()
678 : 11500316 : {
679 : 11500316 : BITMAP_FREE (dceworklist);
680 : 11500316 : stmts_to_fixup.release ();
681 : 11500316 : BITMAP_FREE (need_eh_cleanup);
682 : 11500316 : BITMAP_FREE (need_ab_cleanup);
683 : 11500316 : }
684 : :
685 : : edge before_dom_children (basic_block) final override;
686 : 112118334 : void after_dom_children (basic_block bb) final override
687 : : {
688 : 112118334 : substitute_and_fold_engine->post_fold_bb (bb);
689 : 112118334 : }
690 : :
691 : : bool something_changed;
692 : : bitmap dceworklist;
693 : : vec<gimple *> stmts_to_fixup;
694 : : bitmap need_eh_cleanup;
695 : : bitmap need_ab_cleanup;
696 : :
697 : : class substitute_and_fold_engine *substitute_and_fold_engine;
698 : :
699 : : private:
700 : : void foreach_new_stmt_in_bb (gimple_stmt_iterator old_gsi,
701 : : gimple_stmt_iterator new_gsi);
702 : : };
703 : :
704 : : /* Call post_new_stmt for each new statement that has been added
705 : : to the current BB. OLD_GSI is the statement iterator before the BB
706 : : changes ocurred. NEW_GSI is the iterator which may contain new
707 : : statements. */
708 : :
709 : : void
710 : 14286538 : substitute_and_fold_dom_walker::foreach_new_stmt_in_bb
711 : : (gimple_stmt_iterator old_gsi,
712 : : gimple_stmt_iterator new_gsi)
713 : : {
714 : 14286538 : basic_block bb = gsi_bb (new_gsi);
715 : 14286538 : if (gsi_end_p (old_gsi))
716 : 3230106 : old_gsi = gsi_start_bb (bb);
717 : : else
718 : 12671485 : gsi_next (&old_gsi);
719 : 14354542 : while (gsi_stmt (old_gsi) != gsi_stmt (new_gsi))
720 : : {
721 : 68004 : gimple *stmt = gsi_stmt (old_gsi);
722 : 68004 : substitute_and_fold_engine->post_new_stmt (stmt);
723 : 68004 : gsi_next (&old_gsi);
724 : : }
725 : 14286538 : }
726 : :
727 : : bool
728 : 112118334 : substitute_and_fold_engine::propagate_into_phi_args (basic_block bb)
729 : : {
730 : 112118334 : edge e;
731 : 112118334 : edge_iterator ei;
732 : 112118334 : bool propagated = false;
733 : :
734 : : /* Visit BB successor PHI nodes and replace PHI args. */
735 : 264783847 : FOR_EACH_EDGE (e, ei, bb->succs)
736 : : {
737 : 152665513 : for (gphi_iterator gpi = gsi_start_phis (e->dest);
738 : 253572133 : !gsi_end_p (gpi); gsi_next (&gpi))
739 : : {
740 : 100906620 : gphi *phi = gpi.phi ();
741 : 100906620 : use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
742 : 100906620 : tree arg = USE_FROM_PTR (use_p);
743 : 163363203 : if (TREE_CODE (arg) != SSA_NAME
744 : 100906620 : || virtual_operand_p (arg))
745 : 62456583 : continue;
746 : 38450037 : tree val = value_on_edge (e, arg);
747 : 38450037 : if (val
748 : 2865716 : && is_gimple_min_invariant (val)
749 : 40237063 : && may_propagate_copy (arg, val))
750 : : {
751 : 1785217 : propagate_value (use_p, val);
752 : 1785217 : propagated = true;
753 : : }
754 : : }
755 : : }
756 : 112118334 : return propagated;
757 : : }
758 : :
759 : : edge
760 : 112118334 : substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
761 : : {
762 : 112118334 : substitute_and_fold_engine->pre_fold_bb (bb);
763 : :
764 : : /* Propagate known values into PHI nodes. */
765 : 112118334 : for (gphi_iterator i = gsi_start_phis (bb);
766 : 155174092 : !gsi_end_p (i);
767 : 43055758 : gsi_next (&i))
768 : : {
769 : 43055758 : gphi *phi = i.phi ();
770 : 43055758 : tree res = gimple_phi_result (phi);
771 : 86111516 : if (virtual_operand_p (res))
772 : 19391986 : continue;
773 : 23663772 : if (dump_file && (dump_flags & TDF_DETAILS))
774 : : {
775 : 152 : fprintf (dump_file, "Folding PHI node: ");
776 : 152 : print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
777 : : }
778 : 23663772 : if (res && TREE_CODE (res) == SSA_NAME)
779 : : {
780 : 23663772 : tree sprime = substitute_and_fold_engine->value_of_expr (res, phi);
781 : 25623377 : if (sprime
782 : 23663772 : && sprime != res
783 : 23663772 : && may_propagate_copy (res, sprime))
784 : : {
785 : 1959605 : if (dump_file && (dump_flags & TDF_DETAILS))
786 : : {
787 : 8 : fprintf (dump_file, "Queued PHI for removal. Folds to: ");
788 : 8 : print_generic_expr (dump_file, sprime);
789 : 8 : fprintf (dump_file, "\n");
790 : : }
791 : 1959605 : bitmap_set_bit (dceworklist, SSA_NAME_VERSION (res));
792 : : /* As this now constitutes a copy duplicate points-to
793 : : and range info appropriately. */
794 : 1959605 : if (TREE_CODE (sprime) == SSA_NAME)
795 : 1234525 : maybe_duplicate_ssa_info_at_copy (res, sprime);
796 : 1959605 : continue;
797 : : }
798 : : }
799 : 21704167 : something_changed |= substitute_and_fold_engine->replace_phi_args_in (phi);
800 : : }
801 : :
802 : : /* Propagate known values into stmts. In some case it exposes
803 : : more trivially deletable stmts to walk backward. */
804 : 224236668 : for (gimple_stmt_iterator i = gsi_start_bb (bb);
805 : 820878367 : !gsi_end_p (i);
806 : 708760033 : gsi_next (&i))
807 : : {
808 : 708760033 : bool did_replace;
809 : 708760033 : gimple *stmt = gsi_stmt (i);
810 : :
811 : 708760033 : substitute_and_fold_engine->pre_fold_stmt (stmt);
812 : :
813 : 708760033 : if (dump_file && (dump_flags & TDF_DETAILS))
814 : : {
815 : 1760 : fprintf (dump_file, "Folding statement: ");
816 : 1760 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
817 : : }
818 : :
819 : : /* No point propagating into a stmt we have a value for we
820 : : can propagate into all uses. Mark it for removal instead. */
821 : 708760033 : tree lhs = gimple_get_lhs (stmt);
822 : 708760033 : if (lhs && TREE_CODE (lhs) == SSA_NAME)
823 : : {
824 : 168017870 : tree sprime = substitute_and_fold_engine->value_of_stmt (stmt, lhs);
825 : 181615819 : if (sprime
826 : 168017870 : && sprime != lhs
827 : 13616290 : && may_propagate_copy (lhs, sprime)
828 : 13615001 : && !stmt_could_throw_p (cfun, stmt)
829 : 181619735 : && !gimple_has_side_effects (stmt))
830 : : {
831 : 13597949 : if (dump_file && (dump_flags & TDF_DETAILS))
832 : : {
833 : 50 : fprintf (dump_file, "Queued stmt for removal. Folds to: ");
834 : 50 : print_generic_expr (dump_file, sprime);
835 : 50 : fprintf (dump_file, "\n");
836 : : }
837 : 13597949 : bitmap_set_bit (dceworklist, SSA_NAME_VERSION (lhs));
838 : : /* As this now constitutes a copy duplicate points-to
839 : : and range info appropriately. */
840 : 13597949 : if (TREE_CODE (sprime) == SSA_NAME)
841 : 8083161 : maybe_duplicate_ssa_info_at_copy (lhs, sprime);
842 : 13597949 : continue;
843 : : }
844 : : }
845 : :
846 : : /* Replace the statement with its folded version and mark it
847 : : folded. */
848 : 695162084 : did_replace = false;
849 : 695162084 : gimple *old_stmt = stmt;
850 : 695162084 : bool was_noreturn = false;
851 : 695162084 : bool can_make_abnormal_goto = false;
852 : 695162084 : if (is_gimple_call (stmt))
853 : : {
854 : 49644904 : was_noreturn = gimple_call_noreturn_p (stmt);
855 : 49644904 : can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
856 : : }
857 : :
858 : : /* Replace real uses in the statement. */
859 : 695162084 : did_replace |= substitute_and_fold_engine->replace_uses_in (stmt);
860 : :
861 : 695162084 : gimple_stmt_iterator prev_gsi = i;
862 : 695162084 : gsi_prev (&prev_gsi);
863 : :
864 : : /* If we made a replacement, fold the statement. */
865 : 695162084 : if (did_replace)
866 : : {
867 : 11707763 : fold_stmt (&i, follow_single_use_edges);
868 : 11707763 : stmt = gsi_stmt (i);
869 : 11707763 : gimple_set_modified (stmt, true);
870 : : }
871 : : /* Also fold if we want to fold all statements. */
872 : 683454321 : else if (substitute_and_fold_engine->fold_all_stmts
873 : 683454321 : && fold_stmt (&i, follow_single_use_edges))
874 : : {
875 : 0 : did_replace = true;
876 : 0 : stmt = gsi_stmt (i);
877 : 0 : gimple_set_modified (stmt, true);
878 : : }
879 : :
880 : : /* Some statements may be simplified using propagator
881 : : specific information. Do this before propagating
882 : : into the stmt to not disturb pass specific information. */
883 : 695162084 : update_stmt_if_modified (stmt);
884 : 695162084 : if (substitute_and_fold_engine->fold_stmt (&i))
885 : : {
886 : 2969572 : did_replace = true;
887 : 2969572 : prop_stats.num_stmts_folded++;
888 : 2969572 : stmt = gsi_stmt (i);
889 : 2969572 : gimple_set_modified (stmt, true);
890 : : }
891 : :
892 : : /* If this is a control statement the propagator left edges
893 : : unexecuted on force the condition in a way consistent with
894 : : that. See PR66945 for cases where the propagator can end
895 : : up with a different idea of a taken edge than folding
896 : : (once undefined behavior is involved). */
897 : 695162084 : if (gimple_code (stmt) == GIMPLE_COND)
898 : : {
899 : 38880912 : if ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE)
900 : 38880912 : ^ (EDGE_SUCC (bb, 1)->flags & EDGE_EXECUTABLE))
901 : : {
902 : 344146 : if (((EDGE_SUCC (bb, 0)->flags & EDGE_TRUE_VALUE) != 0)
903 : 344146 : == ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE) != 0))
904 : 79065 : gimple_cond_make_true (as_a <gcond *> (stmt));
905 : : else
906 : 265081 : gimple_cond_make_false (as_a <gcond *> (stmt));
907 : 344146 : gimple_set_modified (stmt, true);
908 : 344146 : did_replace = true;
909 : : }
910 : : }
911 : :
912 : : /* Now cleanup. */
913 : 695162084 : if (did_replace)
914 : : {
915 : 14286538 : foreach_new_stmt_in_bb (prev_gsi, i);
916 : :
917 : : /* If we cleaned up EH information from the statement,
918 : : remove EH edges. */
919 : 14286538 : if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
920 : 155449 : bitmap_set_bit (need_eh_cleanup, bb->index);
921 : :
922 : : /* If we turned a call with possible abnormal control transfer
923 : : into one that doesn't, remove abnormal edges. */
924 : 14286538 : if (can_make_abnormal_goto
925 : 14286538 : && !stmt_can_make_abnormal_goto (stmt))
926 : 3 : bitmap_set_bit (need_ab_cleanup, bb->index);
927 : :
928 : : /* If we turned a not noreturn call into a noreturn one
929 : : schedule it for fixup. */
930 : 14286538 : if (!was_noreturn
931 : 14163539 : && is_gimple_call (stmt)
932 : 15602629 : && gimple_call_noreturn_p (stmt))
933 : 53 : stmts_to_fixup.safe_push (stmt);
934 : :
935 : 14286538 : if (gimple_assign_single_p (stmt))
936 : : {
937 : 3169437 : tree rhs = gimple_assign_rhs1 (stmt);
938 : :
939 : 3169437 : if (TREE_CODE (rhs) == ADDR_EXPR)
940 : 338406 : recompute_tree_invariant_for_addr_expr (rhs);
941 : : }
942 : :
943 : : /* Determine what needs to be done to update the SSA form. */
944 : 14286538 : update_stmt_if_modified (stmt);
945 : 14286538 : if (!is_gimple_debug (stmt))
946 : 10959202 : something_changed = true;
947 : : }
948 : :
949 : 695162084 : if (dump_file && (dump_flags & TDF_DETAILS))
950 : : {
951 : 1710 : if (did_replace)
952 : : {
953 : 154 : fprintf (dump_file, "Folded into: ");
954 : 154 : print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
955 : 154 : fprintf (dump_file, "\n");
956 : : }
957 : : else
958 : 1556 : fprintf (dump_file, "Not folded\n");
959 : : }
960 : : }
961 : :
962 : 112118334 : something_changed |= substitute_and_fold_engine->propagate_into_phi_args (bb);
963 : :
964 : 112118334 : return NULL;
965 : : }
966 : :
967 : :
968 : :
969 : : /* Perform final substitution and folding of propagated values.
970 : : Process the whole function if BLOCK is null, otherwise only
971 : : process the blocks that BLOCK dominates. In the latter case,
972 : : it is the caller's responsibility to ensure that dominator
973 : : information is available and up-to-date.
974 : :
975 : : PROP_VALUE[I] contains the single value that should be substituted
976 : : at every use of SSA name N_I. If PROP_VALUE is NULL, no values are
977 : : substituted.
978 : :
979 : : If FOLD_FN is non-NULL the function will be invoked on all statements
980 : : before propagating values for pass specific simplification.
981 : :
982 : : DO_DCE is true if trivially dead stmts can be removed.
983 : :
984 : : If DO_DCE is true, the statements within a BB are walked from
985 : : last to first element. Otherwise we scan from first to last element.
986 : :
987 : : Return TRUE when something changed. */
988 : :
989 : : bool
990 : 11500316 : substitute_and_fold_engine::substitute_and_fold (basic_block block)
991 : : {
992 : 11500316 : if (dump_file && (dump_flags & TDF_DETAILS))
993 : 157 : fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
994 : :
995 : 11500316 : memset (&prop_stats, 0, sizeof (prop_stats));
996 : :
997 : : /* Don't call calculate_dominance_info when iterating over a subgraph.
998 : : Callers that are using the interface this way are likely to want to
999 : : iterate over several disjoint subgraphs, and it would be expensive
1000 : : in enable-checking builds to revalidate the whole dominance tree
1001 : : each time. */
1002 : 11500316 : if (block)
1003 : 1161 : gcc_assert (dom_info_state (CDI_DOMINATORS));
1004 : : else
1005 : 11499155 : calculate_dominance_info (CDI_DOMINATORS);
1006 : 11500316 : substitute_and_fold_dom_walker walker (CDI_DOMINATORS, this);
1007 : 11500316 : walker.walk (block ? block : ENTRY_BLOCK_PTR_FOR_FN (cfun));
1008 : :
1009 : 11500316 : simple_dce_from_worklist (walker.dceworklist, walker.need_eh_cleanup);
1010 : 11500316 : if (!bitmap_empty_p (walker.need_eh_cleanup))
1011 : 36438 : gimple_purge_all_dead_eh_edges (walker.need_eh_cleanup);
1012 : 11500316 : if (!bitmap_empty_p (walker.need_ab_cleanup))
1013 : 3 : gimple_purge_all_dead_abnormal_call_edges (walker.need_ab_cleanup);
1014 : :
1015 : : /* Fixup stmts that became noreturn calls. This may require splitting
1016 : : blocks and thus isn't possible during the dominator walk. Do this
1017 : : in reverse order so we don't inadvertedly remove a stmt we want to
1018 : : fixup by visiting a dominating now noreturn call first. */
1019 : 11500369 : while (!walker.stmts_to_fixup.is_empty ())
1020 : : {
1021 : 53 : gimple *stmt = walker.stmts_to_fixup.pop ();
1022 : 0 : if (dump_file && dump_flags & TDF_DETAILS)
1023 : : {
1024 : 0 : fprintf (dump_file, "Fixing up noreturn call ");
1025 : 0 : print_gimple_stmt (dump_file, stmt, 0);
1026 : 0 : fprintf (dump_file, "\n");
1027 : : }
1028 : 53 : fixup_noreturn_call (stmt);
1029 : : }
1030 : :
1031 : 11500316 : statistics_counter_event (cfun, "Constants propagated",
1032 : 11500316 : prop_stats.num_const_prop);
1033 : 11500316 : statistics_counter_event (cfun, "Copies propagated",
1034 : 11500316 : prop_stats.num_copy_prop);
1035 : 11500316 : statistics_counter_event (cfun, "Statements folded",
1036 : 11500316 : prop_stats.num_stmts_folded);
1037 : :
1038 : 11500316 : return walker.something_changed;
1039 : 11500316 : }
1040 : :
1041 : :
1042 : : /* Return true if we may propagate ORIG into DEST, false otherwise.
1043 : : If DEST_NOT_ABNORMAL_PHI_EDGE_P is true then assume the propagation does
1044 : : not happen into a PHI argument which flows in from an abnormal edge
1045 : : which relaxes some constraints. */
1046 : :
1047 : : bool
1048 : 137755859 : may_propagate_copy (tree dest, tree orig, bool dest_not_abnormal_phi_edge_p)
1049 : : {
1050 : 137755859 : tree type_d = TREE_TYPE (dest);
1051 : 137755859 : tree type_o = TREE_TYPE (orig);
1052 : :
1053 : : /* If ORIG is a default definition which flows in from an abnormal edge
1054 : : then the copy can be propagated. It is important that we do so to avoid
1055 : : uninitialized copies. */
1056 : 137755859 : if (TREE_CODE (orig) == SSA_NAME
1057 : 95710723 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
1058 : 26257 : && SSA_NAME_IS_DEFAULT_DEF (orig)
1059 : 137759029 : && (SSA_NAME_VAR (orig) == NULL_TREE
1060 : 3170 : || VAR_P (SSA_NAME_VAR (orig))))
1061 : : ;
1062 : : /* Otherwise if ORIG just flows in from an abnormal edge then the copy cannot
1063 : : be propagated. */
1064 : 137753086 : else if (TREE_CODE (orig) == SSA_NAME
1065 : 137753086 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1066 : : return false;
1067 : : /* Similarly if DEST flows in from an abnormal edge then the copy cannot be
1068 : : propagated. If we know we do not propagate into such a PHI argument this
1069 : : does not apply. */
1070 : 137729602 : else if (!dest_not_abnormal_phi_edge_p
1071 : 72464534 : && TREE_CODE (dest) == SSA_NAME
1072 : 210194136 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest))
1073 : : return false;
1074 : :
1075 : : /* Do not copy between types for which we *do* need a conversion. */
1076 : 137717667 : if (!useless_type_conversion_p (type_d, type_o))
1077 : : return false;
1078 : :
1079 : : /* Generally propagating virtual operands is not ok as that may
1080 : : create overlapping life-ranges. */
1081 : 137716231 : if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest))
1082 : : return false;
1083 : :
1084 : : /* Keep lhs of [[gnu::musttail]] calls as is, those need to be still
1085 : : tail callable. */
1086 : 137282064 : if (TREE_CODE (dest) == SSA_NAME
1087 : 137151672 : && is_gimple_call (SSA_NAME_DEF_STMT (dest))
1088 : 138473688 : && gimple_call_must_tail_p (as_a <gcall *> (SSA_NAME_DEF_STMT (dest))))
1089 : : return false;
1090 : :
1091 : : /* Anything else is OK. */
1092 : : return true;
1093 : : }
1094 : :
1095 : : /* Like may_propagate_copy, but use as the destination expression
1096 : : the principal expression (typically, the RHS) contained in
1097 : : statement DEST. This is more efficient when working with the
1098 : : gimple tuples representation. */
1099 : :
1100 : : bool
1101 : 301399 : may_propagate_copy_into_stmt (gimple *dest, tree orig)
1102 : : {
1103 : 301399 : tree type_d;
1104 : 301399 : tree type_o;
1105 : :
1106 : : /* If the statement is a switch or a single-rhs assignment,
1107 : : then the expression to be replaced by the propagation may
1108 : : be an SSA_NAME. Fortunately, there is an explicit tree
1109 : : for the expression, so we delegate to may_propagate_copy. */
1110 : :
1111 : 301399 : if (gimple_assign_single_p (dest))
1112 : 130420 : return may_propagate_copy (gimple_assign_rhs1 (dest), orig, true);
1113 : 170979 : else if (gswitch *dest_swtch = dyn_cast <gswitch *> (dest))
1114 : 0 : return may_propagate_copy (gimple_switch_index (dest_swtch), orig, true);
1115 : :
1116 : : /* In other cases, the expression is not materialized, so there
1117 : : is no destination to pass to may_propagate_copy. On the other
1118 : : hand, the expression cannot be an SSA_NAME, so the analysis
1119 : : is much simpler. */
1120 : :
1121 : 170979 : if (TREE_CODE (orig) == SSA_NAME
1122 : 170979 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1123 : : return false;
1124 : :
1125 : 170979 : if (is_gimple_assign (dest))
1126 : 141562 : type_d = TREE_TYPE (gimple_assign_lhs (dest));
1127 : 29417 : else if (gimple_code (dest) == GIMPLE_COND)
1128 : 28855 : type_d = boolean_type_node;
1129 : 562 : else if (is_gimple_call (dest)
1130 : 562 : && gimple_call_lhs (dest) != NULL_TREE)
1131 : 562 : type_d = TREE_TYPE (gimple_call_lhs (dest));
1132 : : else
1133 : 0 : gcc_unreachable ();
1134 : :
1135 : 170979 : type_o = TREE_TYPE (orig);
1136 : :
1137 : 170979 : if (!useless_type_conversion_p (type_d, type_o))
1138 : : return false;
1139 : :
1140 : : return true;
1141 : : }
1142 : :
1143 : : /* Similarly, but we know that we're propagating into an ASM_EXPR. */
1144 : :
1145 : : bool
1146 : 11489 : may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED)
1147 : : {
1148 : 11489 : return true;
1149 : : }
1150 : :
1151 : :
1152 : : /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
1153 : :
1154 : : Use this version when not const/copy propagating values. For example,
1155 : : PRE uses this version when building expressions as they would appear
1156 : : in specific blocks taking into account actions of PHI nodes.
1157 : :
1158 : : The statement in which an expression has been replaced should be
1159 : : folded using fold_stmt_inplace. */
1160 : :
1161 : : void
1162 : 49387423 : replace_exp (use_operand_p op_p, tree val)
1163 : : {
1164 : 49387423 : if (TREE_CODE (val) == SSA_NAME || CONSTANT_CLASS_P (val))
1165 : 46144752 : SET_USE (op_p, val);
1166 : : else
1167 : 3242671 : SET_USE (op_p, unshare_expr (val));
1168 : 49387423 : }
1169 : :
1170 : :
1171 : : /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1172 : : into the operand pointed to by OP_P.
1173 : :
1174 : : Use this version for const/copy propagation as it will perform additional
1175 : : checks to ensure validity of the const/copy propagation. */
1176 : :
1177 : : void
1178 : 43259206 : propagate_value (use_operand_p op_p, tree val)
1179 : : {
1180 : 43259206 : if (flag_checking)
1181 : : {
1182 : 43258734 : bool ab = (is_a <gphi *> (USE_STMT (op_p))
1183 : 58124588 : && (gimple_phi_arg_edge (as_a <gphi *> (USE_STMT (op_p)),
1184 : 14865854 : PHI_ARG_INDEX_FROM_USE (op_p))
1185 : 14865854 : ->flags & EDGE_ABNORMAL));
1186 : 43258734 : gcc_assert (may_propagate_copy (USE_FROM_PTR (op_p), val, !ab));
1187 : : }
1188 : 43259206 : replace_exp (op_p, val);
1189 : 43259206 : }
1190 : :
1191 : :
1192 : : /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1193 : : into the tree pointed to by OP_P.
1194 : :
1195 : : Use this version for const/copy propagation when SSA operands are not
1196 : : available. It will perform the additional checks to ensure validity of
1197 : : the const/copy propagation, but will not update any operand information.
1198 : : Be sure to mark the stmt as modified. */
1199 : :
1200 : : void
1201 : 338599 : propagate_tree_value (tree *op_p, tree val)
1202 : : {
1203 : 338599 : if (TREE_CODE (val) == SSA_NAME)
1204 : 301372 : *op_p = val;
1205 : : else
1206 : 37227 : *op_p = unshare_expr (val);
1207 : 338599 : }
1208 : :
1209 : :
1210 : : /* Like propagate_tree_value, but use as the operand to replace
1211 : : the principal expression (typically, the RHS) contained in the
1212 : : statement referenced by iterator GSI. Note that it is not
1213 : : always possible to update the statement in-place, so a new
1214 : : statement may be created to replace the original. */
1215 : :
1216 : : void
1217 : 338599 : propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
1218 : : {
1219 : 338599 : gimple *stmt = gsi_stmt (*gsi);
1220 : :
1221 : 338599 : if (is_gimple_assign (stmt))
1222 : : {
1223 : 304145 : tree expr = NULL_TREE;
1224 : 304145 : if (gimple_assign_single_p (stmt))
1225 : 158318 : expr = gimple_assign_rhs1 (stmt);
1226 : 304145 : propagate_tree_value (&expr, val);
1227 : 304145 : gimple_assign_set_rhs_from_tree (gsi, expr);
1228 : : }
1229 : 34454 : else if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
1230 : : {
1231 : 33662 : tree lhs = NULL_TREE;
1232 : 33662 : tree rhs = build_zero_cst (TREE_TYPE (val));
1233 : 33662 : propagate_tree_value (&lhs, val);
1234 : 33662 : gimple_cond_set_code (cond_stmt, NE_EXPR);
1235 : 33662 : gimple_cond_set_lhs (cond_stmt, lhs);
1236 : 33662 : gimple_cond_set_rhs (cond_stmt, rhs);
1237 : : }
1238 : 792 : else if (is_gimple_call (stmt)
1239 : 792 : && gimple_call_lhs (stmt) != NULL_TREE)
1240 : : {
1241 : 792 : tree expr = NULL_TREE;
1242 : 792 : propagate_tree_value (&expr, val);
1243 : 792 : replace_call_with_value (gsi, expr);
1244 : : }
1245 : 0 : else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
1246 : 0 : propagate_tree_value (gimple_switch_index_ptr (swtch_stmt), val);
1247 : : else
1248 : 0 : gcc_unreachable ();
1249 : 338599 : }
1250 : :
1251 : : /* Check exits of each loop in FUN, walk over loop closed PHIs in
1252 : : each exit basic block and propagate degenerate PHIs. */
1253 : :
1254 : : unsigned
1255 : 227894 : clean_up_loop_closed_phi (function *fun)
1256 : : {
1257 : 227894 : gphi *phi;
1258 : 227894 : tree rhs;
1259 : 227894 : tree lhs;
1260 : 227894 : gphi_iterator gsi;
1261 : :
1262 : : /* Avoid possibly quadratic work when scanning for loop exits across
1263 : : all loops of a nest. */
1264 : 227894 : if (!loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
1265 : : return 0;
1266 : :
1267 : : /* replace_uses_by might purge dead EH edges and we want it to also
1268 : : remove dominated blocks. */
1269 : 227894 : calculate_dominance_info (CDI_DOMINATORS);
1270 : :
1271 : : /* Walk over loop in function. */
1272 : 1270834 : for (auto loop : loops_list (fun, 0))
1273 : : {
1274 : : /* Check each exit edege of loop. */
1275 : 587152 : auto_vec<edge> exits = get_loop_exit_edges (loop);
1276 : 2909680 : for (edge e : exits)
1277 : 2110997 : if (single_pred_p (e->dest))
1278 : : /* Walk over loop-closed PHIs. */
1279 : 2051040 : for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi);)
1280 : : {
1281 : 1100511 : phi = gsi.phi ();
1282 : 1100511 : rhs = gimple_phi_arg_def (phi, 0);
1283 : 1100511 : lhs = gimple_phi_result (phi);
1284 : :
1285 : 2200718 : if (virtual_operand_p (rhs))
1286 : : {
1287 : 580858 : imm_use_iterator iter;
1288 : 580858 : use_operand_p use_p;
1289 : 580858 : gimple *stmt;
1290 : :
1291 : 1362414 : FOR_EACH_IMM_USE_STMT (stmt, iter, lhs)
1292 : 2357008 : FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1293 : 1368584 : SET_USE (use_p, rhs);
1294 : :
1295 : 580858 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1296 : 48 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
1297 : 580858 : remove_phi_node (&gsi, true);
1298 : : }
1299 : 519653 : else if (may_propagate_copy (lhs, rhs))
1300 : : {
1301 : : /* Dump details. */
1302 : 519617 : if (dump_file && (dump_flags & TDF_DETAILS))
1303 : : {
1304 : 3 : fprintf (dump_file, " Replacing '");
1305 : 3 : print_generic_expr (dump_file, lhs, dump_flags);
1306 : 3 : fprintf (dump_file, "' with '");
1307 : 3 : print_generic_expr (dump_file, rhs, dump_flags);
1308 : 3 : fprintf (dump_file, "'\n");
1309 : : }
1310 : :
1311 : 519617 : replace_uses_by (lhs, rhs);
1312 : 519617 : remove_phi_node (&gsi, true);
1313 : : }
1314 : : else
1315 : 36 : gsi_next (&gsi);
1316 : : }
1317 : 587152 : }
1318 : :
1319 : 227894 : return 0;
1320 : : }
|