Line data Source code
1 : /* Backward propagation of indirect loads through PHIs.
2 : Copyright (C) 2007-2026 Free Software Foundation, Inc.
3 : Contributed by Richard Guenther <rguenther@suse.de>
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify
8 : it under the terms of the GNU General Public License as published by
9 : the Free Software Foundation; either version 3, or (at your option)
10 : any later version.
11 :
12 : GCC is distributed in the hope that it will be useful,
13 : but WITHOUT ANY WARRANTY; without even the implied warranty of
14 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 : GNU General Public License for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "tree.h"
26 : #include "gimple.h"
27 : #include "tree-pass.h"
28 : #include "ssa.h"
29 : #include "gimple-pretty-print.h"
30 : #include "fold-const.h"
31 : #include "tree-eh.h"
32 : #include "gimplify.h"
33 : #include "gimple-iterator.h"
34 : #include "stor-layout.h"
35 : #include "tree-ssa-loop.h"
36 : #include "tree-cfg.h"
37 : #include "tree-ssa-dce.h"
38 : #include "cfgloop.h"
39 :
40 : /* This pass propagates indirect loads through the PHI node for its
41 : address to make the load source possibly non-addressable and to
42 : allow for PHI optimization to trigger.
43 :
44 : For example the pass changes
45 :
46 : # addr_1 = PHI <&a, &b>
47 : tmp_1 = *addr_1;
48 :
49 : to
50 :
51 : # tmp_1 = PHI <a, b>
52 :
53 : but also handles more complex scenarios like
54 :
55 : D.2077_2 = &this_1(D)->a1;
56 : ...
57 :
58 : # b_12 = PHI <&c(2), D.2077_2(3)>
59 : D.2114_13 = *b_12;
60 : ...
61 :
62 : # b_15 = PHI <b_12(4), &b(5)>
63 : D.2080_5 = &this_1(D)->a0;
64 : ...
65 :
66 : # b_18 = PHI <D.2080_5(6), &c(7)>
67 : ...
68 :
69 : # b_21 = PHI <b_15(8), b_18(9)>
70 : D.2076_8 = *b_21;
71 :
72 : where the addresses loaded are defined by PHIs itself.
73 : The above happens for
74 :
75 : std::max(std::min(a0, c), std::min(std::max(a1, c), b))
76 :
77 : where this pass transforms it to a form later PHI optimization
78 : recognizes and transforms it to the simple
79 :
80 : D.2109_10 = this_1(D)->a1;
81 : D.2110_11 = c;
82 : D.2114_31 = MAX_EXPR <D.2109_10, D.2110_11>;
83 : D.2115_14 = b;
84 : D.2125_17 = MIN_EXPR <D.2115_14, D.2114_31>;
85 : D.2119_16 = this_1(D)->a0;
86 : D.2124_32 = MIN_EXPR <D.2110_11, D.2119_16>;
87 : D.2076_33 = MAX_EXPR <D.2125_17, D.2124_32>;
88 :
89 : The pass does a dominator walk processing loads using a basic-block
90 : local analysis and stores the result for use by transformations on
91 : dominated basic-blocks. */
92 :
93 :
94 : /* Structure to keep track of the value of a dereferenced PHI result
95 : and the virtual operand used for that dereference. */
96 :
97 : struct phiprop_d
98 : {
99 : tree value;
100 : tree vuse;
101 : };
102 :
103 : /* Insert a new phi node for the dereference of PHI at basic_block
104 : BB with the virtual operands from USE_STMT. */
105 :
106 : static tree
107 15875 : phiprop_insert_phi (basic_block bb, gphi *phi, gimple *use_stmt,
108 : struct phiprop_d *phivn, size_t n,
109 : bitmap dce_ssa_names)
110 : {
111 15875 : tree res;
112 15875 : gphi *new_phi = NULL;
113 15875 : edge_iterator ei;
114 15875 : edge e;
115 15875 : tree phi_result = PHI_RESULT (phi);
116 15875 : bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION (phi_result));
117 :
118 15875 : gcc_assert (is_gimple_assign (use_stmt)
119 : && gimple_assign_rhs_code (use_stmt) == MEM_REF);
120 :
121 : /* Build a new PHI node to replace the definition of
122 : the indirect reference lhs. */
123 15875 : res = gimple_assign_lhs (use_stmt);
124 15875 : if (TREE_CODE (res) == SSA_NAME)
125 15734 : new_phi = create_phi_node (res, bb);
126 :
127 15875 : if (dump_file && (dump_flags & TDF_DETAILS))
128 : {
129 6 : fprintf (dump_file, "Inserting PHI for result of load ");
130 6 : print_gimple_stmt (dump_file, use_stmt, 0);
131 : }
132 :
133 15875 : gphi *vphi = get_virtual_phi (bb);
134 :
135 : /* Add PHI arguments for each edge inserting loads of the
136 : addressable operands. */
137 46159 : FOR_EACH_EDGE (e, ei, bb->preds)
138 : {
139 30284 : tree old_arg, new_var;
140 30284 : gassign *tmp;
141 30284 : location_t locus;
142 :
143 30284 : old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
144 30284 : locus = gimple_phi_arg_location_from_edge (phi, e);
145 30284 : while (TREE_CODE (old_arg) == SSA_NAME
146 31588 : && (SSA_NAME_VERSION (old_arg) >= n
147 1396 : || phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE))
148 : {
149 1304 : gimple *def_stmt = SSA_NAME_DEF_STMT (old_arg);
150 1304 : old_arg = gimple_assign_rhs1 (def_stmt);
151 1304 : locus = gimple_location (def_stmt);
152 : }
153 :
154 30284 : if (TREE_CODE (old_arg) == SSA_NAME)
155 : {
156 92 : if (dump_file && (dump_flags & TDF_DETAILS))
157 : {
158 0 : fprintf (dump_file, " for edge defining ");
159 0 : print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e));
160 0 : fprintf (dump_file, " reusing PHI result ");
161 0 : print_generic_expr (dump_file,
162 0 : phivn[SSA_NAME_VERSION (old_arg)].value);
163 0 : fprintf (dump_file, "\n");
164 : }
165 : /* Reuse a formerly created dereference. */
166 92 : new_var = phivn[SSA_NAME_VERSION (old_arg)].value;
167 : }
168 : else
169 : {
170 30192 : tree rhs = gimple_assign_rhs1 (use_stmt);
171 30192 : gcc_assert (TREE_CODE (old_arg) == ADDR_EXPR);
172 30192 : tree vuse = NULL_TREE;
173 30192 : if (TREE_CODE (res) == SSA_NAME)
174 : {
175 29848 : new_var = make_ssa_name (TREE_TYPE (rhs));
176 29848 : if (vphi)
177 96 : vuse = PHI_ARG_DEF_FROM_EDGE (vphi, e);
178 : else
179 29752 : vuse = gimple_vuse (use_stmt);
180 : }
181 : else
182 : /* For the aggregate copy case updating virtual operands
183 : we'd have to possibly insert a virtual PHI and we have
184 : to split the existing VUSE lifetime. Leave that to
185 : the generic SSA updating. */
186 344 : new_var = unshare_expr (res);
187 30192 : if (!is_gimple_min_invariant (old_arg))
188 1281 : old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
189 : else
190 28911 : old_arg = unshare_expr (old_arg);
191 30192 : tmp = gimple_build_assign (new_var,
192 30192 : fold_build2 (MEM_REF, TREE_TYPE (rhs),
193 : old_arg,
194 : TREE_OPERAND (rhs, 1)));
195 30192 : gimple_set_location (tmp, locus);
196 30192 : if (vuse)
197 29848 : gimple_set_vuse (tmp, vuse);
198 :
199 30192 : gsi_insert_on_edge (e, tmp);
200 30192 : update_stmt (tmp);
201 :
202 30192 : if (dump_file && (dump_flags & TDF_DETAILS))
203 : {
204 12 : fprintf (dump_file, " for edge defining ");
205 12 : print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e));
206 12 : fprintf (dump_file, " inserting load ");
207 12 : print_gimple_stmt (dump_file, tmp, 0);
208 : }
209 : }
210 :
211 30284 : if (new_phi)
212 29940 : add_phi_arg (new_phi, new_var, e, locus);
213 : }
214 :
215 15875 : if (new_phi)
216 : {
217 15734 : update_stmt (new_phi);
218 :
219 15734 : if (dump_file && (dump_flags & TDF_DETAILS))
220 6 : print_gimple_stmt (dump_file, new_phi, 0);
221 : }
222 :
223 15875 : return res;
224 : }
225 :
226 : /* Verify if *idx is available at *DATA. */
227 :
228 : static bool
229 48 : chk_uses (tree, tree *idx, void *data)
230 : {
231 48 : basic_block dom = (basic_block) data;
232 48 : if (TREE_CODE (*idx) == SSA_NAME)
233 30 : return (SSA_NAME_IS_DEFAULT_DEF (*idx)
234 30 : || ! dominated_by_p (CDI_DOMINATORS,
235 18 : gimple_bb (SSA_NAME_DEF_STMT (*idx)), dom));
236 : return true;
237 : }
238 :
239 : /* Propagate between the phi node arguments of PHI in BB and phi result
240 : users. For now this matches
241 : # p_2 = PHI <&x, &y>
242 : <Lx>:;
243 : p_3 = p_2;
244 : z_2 = *p_3;
245 : and converts it to
246 : # z_2 = PHI <x, y>
247 : <Lx>:;
248 : Returns true if a transformation was done and edge insertions
249 : need to be committed. Global data PHIVN and N is used to track
250 : past transformation results. VPHI is the virtual PHI node in BB
251 : if there is one. We need to be especially careful here
252 : with aliasing issues as we are moving memory reads. */
253 :
254 : static bool
255 7615520 : propagate_with_phi (basic_block bb, gphi *vphi, gphi *phi,
256 : struct phiprop_d *phivn, size_t n, bitmap dce_ssa_names)
257 : {
258 7615520 : tree ptr = PHI_RESULT (phi);
259 7615520 : gimple *use_stmt;
260 7615520 : tree res = NULL_TREE;
261 7615520 : gimple_stmt_iterator gsi;
262 7615520 : imm_use_iterator ui;
263 7615520 : use_operand_p arg_p, use;
264 7615520 : ssa_op_iter i;
265 7615520 : bool phi_inserted;
266 7615520 : bool changed;
267 7615520 : tree type = NULL_TREE;
268 :
269 14516123 : if (!POINTER_TYPE_P (TREE_TYPE (ptr))
270 7643017 : || (!is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr)))
271 351413 : && TYPE_MODE (TREE_TYPE (TREE_TYPE (ptr))) == BLKmode))
272 7131877 : return false;
273 :
274 483643 : tree up_vuse = NULL_TREE;
275 483643 : bool canpossible_trap = false;
276 : /* Check if we can "cheaply" dereference all phi arguments. */
277 595057 : FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_USE)
278 : {
279 564310 : tree arg = USE_FROM_PTR (arg_p);
280 : /* Walk the ssa chain until we reach a ssa name we already
281 : created a value for or we reach a definition of the form
282 : ssa_name_n = &var; */
283 564310 : while (TREE_CODE (arg) == SSA_NAME
284 461560 : && !SSA_NAME_IS_DEFAULT_DEF (arg)
285 1175596 : && (SSA_NAME_VERSION (arg) >= n
286 407486 : || phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE))
287 : {
288 407389 : gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
289 407389 : if (!gimple_assign_single_p (def_stmt))
290 : return false;
291 203790 : arg = gimple_assign_rhs1 (def_stmt);
292 : }
293 360711 : if (TREE_CODE (arg) == ADDR_EXPR)
294 : {
295 111308 : tree decl = TREE_OPERAND (arg, 0);
296 111308 : if (!canpossible_trap)
297 108450 : canpossible_trap = tree_could_trap_p (decl);
298 : }
299 : /* When we have an SSA name see if we previously encountered a
300 : dereference of it. */
301 249403 : else if (TREE_CODE (arg) == SSA_NAME
302 54171 : && SSA_NAME_VERSION (arg) < n
303 54171 : && phivn[SSA_NAME_VERSION (arg)].value != NULL_TREE
304 249510 : && (!type
305 3 : || types_compatible_p
306 3 : (type, TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value))))
307 : {
308 : /* The dereference should be under the VUSE that's active in BB.
309 : If the BB has no virtual PHI then record the common "incoming"
310 : vuse. */
311 107 : if (vphi)
312 2 : up_vuse = gimple_phi_arg_def (vphi, phi_arg_index_from_use (arg_p));
313 107 : if (!up_vuse)
314 102 : up_vuse = phivn[SSA_NAME_VERSION (arg)].vuse;
315 5 : else if (up_vuse != phivn[SSA_NAME_VERSION (arg)].vuse)
316 : return false;
317 : }
318 : else
319 249296 : return false;
320 111414 : if (!type
321 111355 : && TREE_CODE (arg) == SSA_NAME)
322 103 : type = TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value);
323 : }
324 :
325 : /* Find a dereferencing use. First follow (single use) ssa
326 : copy chains for ptr. */
327 31841 : while (single_imm_use (ptr, &use, &use_stmt)
328 31841 : && gimple_assign_ssa_name_copy_p (use_stmt))
329 1094 : ptr = gimple_assign_lhs (use_stmt);
330 :
331 : /* Replace the first dereference of *ptr if there is one and if we
332 : can move the loads to the place of the ptr phi node. */
333 30747 : phi_inserted = false;
334 30747 : changed = false;
335 30747 : auto_vec<gimple*> delayed_uses;
336 74533 : FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
337 : {
338 43786 : gimple *def_stmt;
339 43786 : tree vuse;
340 43786 : bool delay = false;
341 :
342 : /* Check whether this is a load of *ptr. */
343 43786 : if (!(is_gimple_assign (use_stmt)
344 23503 : && gimple_assign_rhs_code (use_stmt) == MEM_REF
345 17049 : && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == ptr
346 17049 : && integer_zerop (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 1))
347 17033 : && (!type
348 109 : || types_compatible_p
349 109 : (TREE_TYPE (gimple_assign_lhs (use_stmt)), type))
350 : /* We cannot replace a load that may throw or is volatile.
351 : For volatiles the transform can change the number of
352 : executions if the load is inside a loop but the address
353 : computations outside (PR91812). We could relax this
354 : if we guard against that appropriately. For loads that can
355 : throw we could relax things if the moved loads all are
356 : known to not throw. */
357 17033 : && !stmt_can_throw_internal (cfun, use_stmt)
358 33614 : && !gimple_has_volatile_ops (use_stmt)))
359 26991 : continue;
360 :
361 16795 : bool aggregate = false;
362 16795 : if (!is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (use_stmt))))
363 : aggregate = true;
364 :
365 16604 : if ((canpossible_trap || aggregate)
366 16795 : && !dom_info_available_p (cfun, CDI_POST_DOMINATORS))
367 622 : calculate_dominance_info (CDI_POST_DOMINATORS);
368 :
369 : /* Only replace loads in blocks that post-dominate the PHI node. That
370 : makes sure we don't end up speculating trapping loads or
371 : aggregate stores won't happen speculating. */
372 16795 : if ((canpossible_trap || aggregate)
373 16795 : && !dominated_by_p (CDI_POST_DOMINATORS,
374 2586 : bb, gimple_bb (use_stmt)))
375 : delay = true;
376 :
377 : /* Amend the post-dominance check for SSA cycles, we need to
378 : make sure each PHI result value is dereferenced.
379 : We only want to delay this if we don't insert a phi. */
380 16795 : if (!(gimple_bb (use_stmt) == bb
381 655 : || (!(bb->flags & BB_IRREDUCIBLE_LOOP)
382 655 : && !(gimple_bb (use_stmt)->flags & BB_IRREDUCIBLE_LOOP)
383 655 : && (bb->loop_father == gimple_bb (use_stmt)->loop_father
384 193 : || flow_loop_nested_p (bb->loop_father,
385 193 : gimple_bb (use_stmt)->loop_father)))))
386 : delay = true;
387 :
388 : /* Check if we can move the loads. This is when the virtual use
389 : is the same as the one active at the start of BB which we know
390 : either from its virtual PHI def or from the common incoming
391 : VUSE. If neither is present make sure the def stmt of the virtual
392 : use is in a different basic block dominating BB. When the
393 : def is an edge-inserted one we know it dominates us. */
394 16795 : vuse = gimple_vuse (use_stmt);
395 16795 : if (vphi)
396 : {
397 249 : if (vuse != gimple_phi_result (vphi))
398 147 : goto next;
399 : }
400 16546 : else if (up_vuse)
401 : {
402 109 : if (vuse != up_vuse)
403 20 : goto next;
404 : }
405 : else
406 : {
407 16437 : def_stmt = SSA_NAME_DEF_STMT (vuse);
408 16437 : if (!SSA_NAME_IS_DEFAULT_DEF (vuse)
409 16437 : && (gimple_bb (def_stmt) == bb
410 14929 : || !dominated_by_p (CDI_DOMINATORS,
411 14929 : bb, gimple_bb (def_stmt))))
412 433 : goto next;
413 : }
414 :
415 : /* Found a proper dereference with an aggregate copy. Just
416 : insert aggregate copies on the edges instead. */
417 16195 : if (aggregate)
418 : {
419 : /* aggregate copies are too hard to handled if delayed. */
420 187 : if (delay)
421 46 : goto next;
422 342 : if (!gimple_vdef (use_stmt))
423 0 : goto next;
424 :
425 : /* As we replicate the lhs on each incoming edge all
426 : used SSA names have to be available there. */
427 171 : if (! for_each_index (gimple_assign_lhs_ptr (use_stmt),
428 : chk_uses,
429 171 : get_immediate_dominator (CDI_DOMINATORS,
430 : gimple_bb (phi))))
431 18 : goto next;
432 :
433 153 : gimple *vuse_stmt;
434 153 : imm_use_iterator vui;
435 153 : use_operand_p vuse_p;
436 : /* In order to move the aggregate copies earlier, make sure
437 : there are no statements that could read from memory
438 : aliasing the lhs in between the start of bb and use_stmt.
439 : As we require use_stmt to have a VDEF above, loads after
440 : use_stmt will use a different virtual SSA_NAME. When
441 : we reach an edge inserted load the constraints we place
442 : on processing guarantees that program order is preserved
443 : so we can avoid checking those. */
444 908 : FOR_EACH_IMM_USE_FAST (vuse_p, vui, vuse)
445 : {
446 614 : vuse_stmt = USE_STMT (vuse_p);
447 614 : if (vuse_stmt == use_stmt)
448 148 : continue;
449 466 : if (!gimple_bb (vuse_stmt)
450 908 : || !dominated_by_p (CDI_DOMINATORS,
451 442 : gimple_bb (vuse_stmt), bb))
452 454 : continue;
453 12 : if (ref_maybe_used_by_stmt_p (vuse_stmt,
454 : gimple_assign_lhs (use_stmt)))
455 12 : goto next;
456 12 : }
457 :
458 141 : phiprop_insert_phi (bb, phi, use_stmt, phivn, n, dce_ssa_names);
459 :
460 : /* Remove old stmt. The phi and all of maybe its depedencies
461 : will be removed later via simple_dce_from_worklist. */
462 141 : gsi = gsi_for_stmt (use_stmt);
463 : /* Unlinking the VDEF here is fine as we are sure that we process
464 : stmts in execution order due to aggregate copies having VDEFs
465 : and we emit loads on the edges in the very same order.
466 : We get multiple copies (or intermediate register loads) handled
467 : only by walking PHIs or immediate uses in a lucky order though,
468 : so we could signal the caller to re-start iterating over PHIs
469 : when we come here which would make it quadratic in the number
470 : of PHIs. */
471 141 : unlink_stmt_vdef (use_stmt);
472 141 : gsi_remove (&gsi, true);
473 :
474 141 : changed = true;
475 : }
476 : /* Further replacements are easy, just make a copy out of the
477 : load. */
478 16008 : else if (phi_inserted)
479 : {
480 0 : gimple_assign_set_rhs1 (use_stmt, res);
481 0 : update_stmt (use_stmt);
482 0 : changed = true;
483 : }
484 16008 : else if (delay)
485 274 : delayed_uses.safe_push (use_stmt);
486 : /* Found a proper dereference. Insert a phi node if this
487 : is the first load transformation. */
488 : else
489 : {
490 15734 : res = phiprop_insert_phi (bb, phi, use_stmt, phivn, n, dce_ssa_names);
491 15734 : type = TREE_TYPE (res);
492 :
493 : /* Remember the value we created for *ptr. */
494 15734 : phivn[SSA_NAME_VERSION (ptr)].value = res;
495 15734 : phivn[SSA_NAME_VERSION (ptr)].vuse = vuse;
496 :
497 : /* Remove old stmt. The phi and all of maybe its depedencies
498 : will be removed later via simple_dce_from_worklist. */
499 15734 : gsi = gsi_for_stmt (use_stmt);
500 15734 : gsi_remove (&gsi, true);
501 :
502 15734 : phi_inserted = true;
503 15734 : changed = true;
504 : }
505 :
506 43786 : next:;
507 : /* Continue searching for a proper dereference. */
508 30747 : }
509 :
510 : /* Update the delayed uses if there is any
511 : as now we know this is safe to do. */
512 30747 : if (phi_inserted)
513 16274 : for (auto use_stmt : delayed_uses)
514 : {
515 : /* The types must match of the inserted phi. */
516 250 : if (!types_compatible_p (type, TREE_TYPE (gimple_assign_lhs (use_stmt))))
517 6 : continue;
518 244 : gimple_assign_set_rhs1 (use_stmt, res);
519 244 : update_stmt (use_stmt);
520 : }
521 :
522 30747 : return changed;
523 30747 : }
524 :
525 : /* Main entry for phiprop pass. */
526 :
527 : namespace {
528 :
529 : const pass_data pass_data_phiprop =
530 : {
531 : GIMPLE_PASS, /* type */
532 : "phiprop", /* name */
533 : OPTGROUP_NONE, /* optinfo_flags */
534 : TV_TREE_PHIPROP, /* tv_id */
535 : ( PROP_cfg | PROP_ssa ), /* properties_required */
536 : 0, /* properties_provided */
537 : 0, /* properties_destroyed */
538 : 0, /* todo_flags_start */
539 : 0, /* todo_flags_finish */
540 : };
541 :
542 : class pass_phiprop : public gimple_opt_pass
543 : {
544 : public:
545 577550 : pass_phiprop (gcc::context *ctxt)
546 1155100 : : gimple_opt_pass (pass_data_phiprop, ctxt)
547 : {}
548 :
549 : /* opt_pass methods: */
550 288775 : opt_pass * clone () final override { return new pass_phiprop (m_ctxt); }
551 3479004 : bool gate (function *) final override { return flag_tree_phiprop; }
552 : unsigned int execute (function *) final override;
553 :
554 : }; // class pass_phiprop
555 :
556 : unsigned int
557 3478797 : pass_phiprop::execute (function *fun)
558 : {
559 3478797 : struct phiprop_d *phivn;
560 3478797 : bool did_something = false;
561 3478797 : basic_block bb;
562 3478797 : gphi_iterator gsi;
563 3478797 : unsigned i;
564 3478797 : size_t n;
565 3478797 : auto_bitmap dce_ssa_names;
566 :
567 3478797 : calculate_dominance_info (CDI_DOMINATORS);
568 :
569 3478797 : n = num_ssa_names;
570 3478797 : phivn = XCNEWVEC (struct phiprop_d, n);
571 :
572 : /* Walk the dominator tree in preorder. */
573 3478797 : auto_vec<basic_block> bbs
574 : = get_all_dominated_blocks (CDI_DOMINATORS,
575 3478797 : single_succ (ENTRY_BLOCK_PTR_FOR_FN (fun)));
576 28542551 : FOR_EACH_VEC_ELT (bbs, i, bb)
577 : {
578 : /* Since we're going to move dereferences across predecessor
579 : edges avoid blocks with abnormal predecessors. */
580 25063754 : if (bb_has_abnormal_pred (bb))
581 8588 : continue;
582 25055166 : gphi *vphi = get_virtual_phi (bb);
583 32670686 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
584 7615520 : did_something |= propagate_with_phi (bb, vphi, gsi.phi (),
585 : phivn, n, dce_ssa_names);
586 : }
587 :
588 3478797 : if (did_something)
589 : {
590 13489 : gsi_commit_edge_inserts ();
591 13489 : simple_dce_from_worklist (dce_ssa_names);
592 : }
593 :
594 3478797 : free (phivn);
595 :
596 3478797 : free_dominance_info (CDI_POST_DOMINATORS);
597 :
598 3478797 : return did_something ? TODO_update_ssa_only_virtuals : 0;
599 3478797 : }
600 :
601 : } // anon namespace
602 :
603 : gimple_opt_pass *
604 288775 : make_pass_phiprop (gcc::context *ctxt)
605 : {
606 288775 : return new pass_phiprop (ctxt);
607 : }
|