Branch data Line data Source code
1 : : /* Backward propagation of indirect loads through PHIs.
2 : : Copyright (C) 2007-2025 Free Software Foundation, Inc.
3 : : Contributed by Richard Guenther <rguenther@suse.de>
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify
8 : : it under the terms of the GNU General Public License as published by
9 : : the Free Software Foundation; either version 3, or (at your option)
10 : : any later version.
11 : :
12 : : GCC is distributed in the hope that it will be useful,
13 : : but WITHOUT ANY WARRANTY; without even the implied warranty of
14 : : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 : : GNU General Public License for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : : #include "config.h"
22 : : #include "system.h"
23 : : #include "coretypes.h"
24 : : #include "backend.h"
25 : : #include "tree.h"
26 : : #include "gimple.h"
27 : : #include "tree-pass.h"
28 : : #include "ssa.h"
29 : : #include "gimple-pretty-print.h"
30 : : #include "fold-const.h"
31 : : #include "tree-eh.h"
32 : : #include "gimplify.h"
33 : : #include "gimple-iterator.h"
34 : : #include "stor-layout.h"
35 : : #include "tree-ssa-loop.h"
36 : : #include "tree-cfg.h"
37 : : #include "tree-ssa-dce.h"
38 : : #include "cfgloop.h"
39 : :
40 : : /* This pass propagates indirect loads through the PHI node for its
41 : : address to make the load source possibly non-addressable and to
42 : : allow for PHI optimization to trigger.
43 : :
44 : : For example the pass changes
45 : :
46 : : # addr_1 = PHI <&a, &b>
47 : : tmp_1 = *addr_1;
48 : :
49 : : to
50 : :
51 : : # tmp_1 = PHI <a, b>
52 : :
53 : : but also handles more complex scenarios like
54 : :
55 : : D.2077_2 = &this_1(D)->a1;
56 : : ...
57 : :
58 : : # b_12 = PHI <&c(2), D.2077_2(3)>
59 : : D.2114_13 = *b_12;
60 : : ...
61 : :
62 : : # b_15 = PHI <b_12(4), &b(5)>
63 : : D.2080_5 = &this_1(D)->a0;
64 : : ...
65 : :
66 : : # b_18 = PHI <D.2080_5(6), &c(7)>
67 : : ...
68 : :
69 : : # b_21 = PHI <b_15(8), b_18(9)>
70 : : D.2076_8 = *b_21;
71 : :
72 : : where the addresses loaded are defined by PHIs itself.
73 : : The above happens for
74 : :
75 : : std::max(std::min(a0, c), std::min(std::max(a1, c), b))
76 : :
77 : : where this pass transforms it to a form later PHI optimization
78 : : recognizes and transforms it to the simple
79 : :
80 : : D.2109_10 = this_1(D)->a1;
81 : : D.2110_11 = c;
82 : : D.2114_31 = MAX_EXPR <D.2109_10, D.2110_11>;
83 : : D.2115_14 = b;
84 : : D.2125_17 = MIN_EXPR <D.2115_14, D.2114_31>;
85 : : D.2119_16 = this_1(D)->a0;
86 : : D.2124_32 = MIN_EXPR <D.2110_11, D.2119_16>;
87 : : D.2076_33 = MAX_EXPR <D.2125_17, D.2124_32>;
88 : :
89 : : The pass does a dominator walk processing loads using a basic-block
90 : : local analysis and stores the result for use by transformations on
91 : : dominated basic-blocks. */
92 : :
93 : :
94 : : /* Structure to keep track of the value of a dereferenced PHI result
95 : : and the virtual operand used for that dereference. */
96 : :
97 : : struct phiprop_d
98 : : {
99 : : tree value;
100 : : tree vuse;
101 : : };
102 : :
103 : : /* Insert a new phi node for the dereference of PHI at basic_block
104 : : BB with the virtual operands from USE_STMT. */
105 : :
106 : : static tree
107 : 15945 : phiprop_insert_phi (basic_block bb, gphi *phi, gimple *use_stmt,
108 : : struct phiprop_d *phivn, size_t n,
109 : : bitmap dce_ssa_names)
110 : : {
111 : 15945 : tree res;
112 : 15945 : gphi *new_phi = NULL;
113 : 15945 : edge_iterator ei;
114 : 15945 : edge e;
115 : 15945 : tree phi_result = PHI_RESULT (phi);
116 : 15945 : bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION (phi_result));
117 : :
118 : 15945 : gcc_assert (is_gimple_assign (use_stmt)
119 : : && gimple_assign_rhs_code (use_stmt) == MEM_REF);
120 : :
121 : : /* Build a new PHI node to replace the definition of
122 : : the indirect reference lhs. */
123 : 15945 : res = gimple_assign_lhs (use_stmt);
124 : 15945 : if (TREE_CODE (res) == SSA_NAME)
125 : 15797 : new_phi = create_phi_node (res, bb);
126 : :
127 : 15945 : if (dump_file && (dump_flags & TDF_DETAILS))
128 : : {
129 : 6 : fprintf (dump_file, "Inserting PHI for result of load ");
130 : 6 : print_gimple_stmt (dump_file, use_stmt, 0);
131 : : }
132 : :
133 : 15945 : gphi *vphi = get_virtual_phi (bb);
134 : :
135 : : /* Add PHI arguments for each edge inserting loads of the
136 : : addressable operands. */
137 : 46377 : FOR_EACH_EDGE (e, ei, bb->preds)
138 : : {
139 : 30432 : tree old_arg, new_var;
140 : 30432 : gassign *tmp;
141 : 30432 : location_t locus;
142 : :
143 : 30432 : old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
144 : 30432 : locus = gimple_phi_arg_location_from_edge (phi, e);
145 : 30432 : while (TREE_CODE (old_arg) == SSA_NAME
146 : 31886 : && (SSA_NAME_VERSION (old_arg) >= n
147 : 1546 : || phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE))
148 : : {
149 : 1454 : gimple *def_stmt = SSA_NAME_DEF_STMT (old_arg);
150 : 1454 : old_arg = gimple_assign_rhs1 (def_stmt);
151 : 1454 : locus = gimple_location (def_stmt);
152 : : }
153 : :
154 : 30432 : if (TREE_CODE (old_arg) == SSA_NAME)
155 : : {
156 : 92 : if (dump_file && (dump_flags & TDF_DETAILS))
157 : : {
158 : 0 : fprintf (dump_file, " for edge defining ");
159 : 0 : print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e));
160 : 0 : fprintf (dump_file, " reusing PHI result ");
161 : 0 : print_generic_expr (dump_file,
162 : 0 : phivn[SSA_NAME_VERSION (old_arg)].value);
163 : 0 : fprintf (dump_file, "\n");
164 : : }
165 : : /* Reuse a formerly created dereference. */
166 : 92 : new_var = phivn[SSA_NAME_VERSION (old_arg)].value;
167 : : }
168 : : else
169 : : {
170 : 30340 : tree rhs = gimple_assign_rhs1 (use_stmt);
171 : 30340 : gcc_assert (TREE_CODE (old_arg) == ADDR_EXPR);
172 : 30340 : tree vuse = NULL_TREE;
173 : 30340 : if (TREE_CODE (res) == SSA_NAME)
174 : : {
175 : 29990 : new_var = make_ssa_name (TREE_TYPE (rhs));
176 : 29990 : if (vphi)
177 : 90 : vuse = PHI_ARG_DEF_FROM_EDGE (vphi, e);
178 : : else
179 : 29900 : vuse = gimple_vuse (use_stmt);
180 : : }
181 : : else
182 : : /* For the aggregate copy case updating virtual operands
183 : : we'd have to possibly insert a virtual PHI and we have
184 : : to split the existing VUSE lifetime. Leave that to
185 : : the generic SSA updating. */
186 : 350 : new_var = unshare_expr (res);
187 : 30340 : if (!is_gimple_min_invariant (old_arg))
188 : 1431 : old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
189 : : else
190 : 28909 : old_arg = unshare_expr (old_arg);
191 : 30340 : tmp = gimple_build_assign (new_var,
192 : 30340 : fold_build2 (MEM_REF, TREE_TYPE (rhs),
193 : : old_arg,
194 : : TREE_OPERAND (rhs, 1)));
195 : 30340 : gimple_set_location (tmp, locus);
196 : 30340 : if (vuse)
197 : 29990 : gimple_set_vuse (tmp, vuse);
198 : :
199 : 30340 : gsi_insert_on_edge (e, tmp);
200 : 30340 : update_stmt (tmp);
201 : :
202 : 30340 : if (dump_file && (dump_flags & TDF_DETAILS))
203 : : {
204 : 12 : fprintf (dump_file, " for edge defining ");
205 : 12 : print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e));
206 : 12 : fprintf (dump_file, " inserting load ");
207 : 12 : print_gimple_stmt (dump_file, tmp, 0);
208 : : }
209 : : }
210 : :
211 : 30432 : if (new_phi)
212 : 30082 : add_phi_arg (new_phi, new_var, e, locus);
213 : : }
214 : :
215 : 15945 : if (new_phi)
216 : : {
217 : 15797 : update_stmt (new_phi);
218 : :
219 : 15797 : if (dump_file && (dump_flags & TDF_DETAILS))
220 : 6 : print_gimple_stmt (dump_file, new_phi, 0);
221 : : }
222 : :
223 : 15945 : return res;
224 : : }
225 : :
226 : : /* Verify if *idx is available at *DATA. */
227 : :
228 : : static bool
229 : 48 : chk_uses (tree, tree *idx, void *data)
230 : : {
231 : 48 : basic_block dom = (basic_block) data;
232 : 48 : if (TREE_CODE (*idx) == SSA_NAME)
233 : 30 : return (SSA_NAME_IS_DEFAULT_DEF (*idx)
234 : 30 : || ! dominated_by_p (CDI_DOMINATORS,
235 : 18 : gimple_bb (SSA_NAME_DEF_STMT (*idx)), dom));
236 : : return true;
237 : : }
238 : :
239 : : /* Propagate between the phi node arguments of PHI in BB and phi result
240 : : users. For now this matches
241 : : # p_2 = PHI <&x, &y>
242 : : <Lx>:;
243 : : p_3 = p_2;
244 : : z_2 = *p_3;
245 : : and converts it to
246 : : # z_2 = PHI <x, y>
247 : : <Lx>:;
248 : : Returns true if a transformation was done and edge insertions
249 : : need to be committed. Global data PHIVN and N is used to track
250 : : past transformation results. VPHI is the virtual PHI node in BB
251 : : if there is one. We need to be especially careful here
252 : : with aliasing issues as we are moving memory reads. */
253 : :
254 : : static bool
255 : 7817898 : propagate_with_phi (basic_block bb, gphi *vphi, gphi *phi,
256 : : struct phiprop_d *phivn, size_t n, bitmap dce_ssa_names)
257 : : {
258 : 7817898 : tree ptr = PHI_RESULT (phi);
259 : 7817898 : gimple *use_stmt;
260 : 7817898 : tree res = NULL_TREE;
261 : 7817898 : gimple_stmt_iterator gsi;
262 : 7817898 : imm_use_iterator ui;
263 : 7817898 : use_operand_p arg_p, use;
264 : 7817898 : ssa_op_iter i;
265 : 7817898 : bool phi_inserted;
266 : 7817898 : bool changed;
267 : 7817898 : tree type = NULL_TREE;
268 : :
269 : 14880994 : if (!POINTER_TYPE_P (TREE_TYPE (ptr))
270 : 7844058 : || (!is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr)))
271 : 349414 : && TYPE_MODE (TREE_TYPE (TREE_TYPE (ptr))) == BLKmode))
272 : 7292429 : return false;
273 : :
274 : 525469 : tree up_vuse = NULL_TREE;
275 : 525469 : bool canpossible_trap = false;
276 : : /* Check if we can "cheaply" dereference all phi arguments. */
277 : 644690 : FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_USE)
278 : : {
279 : 613166 : tree arg = USE_FROM_PTR (arg_p);
280 : : /* Walk the ssa chain until we reach a ssa name we already
281 : : created a value for or we reach a definition of the form
282 : : ssa_name_n = &var; */
283 : 613166 : while (TREE_CODE (arg) == SSA_NAME
284 : 510218 : && !SSA_NAME_IS_DEFAULT_DEF (arg)
285 : 1290222 : && (SSA_NAME_VERSION (arg) >= n
286 : 455192 : || phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE))
287 : : {
288 : 455094 : gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
289 : 455094 : if (!gimple_assign_single_p (def_stmt))
290 : : return false;
291 : 221854 : arg = gimple_assign_rhs1 (def_stmt);
292 : : }
293 : 379926 : if (TREE_CODE (arg) == ADDR_EXPR)
294 : : {
295 : 119115 : tree decl = TREE_OPERAND (arg, 0);
296 : 119115 : if (!canpossible_trap)
297 : 116230 : canpossible_trap = tree_could_trap_p (decl);
298 : : }
299 : : /* When we have an SSA name see if we previously encountered a
300 : : dereference of it. */
301 : 260811 : else if (TREE_CODE (arg) == SSA_NAME
302 : 55124 : && SSA_NAME_VERSION (arg) < n
303 : 55124 : && phivn[SSA_NAME_VERSION (arg)].value != NULL_TREE
304 : 260919 : && (!type
305 : 3 : || types_compatible_p
306 : 3 : (type, TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value))))
307 : : {
308 : : /* The dereference should be under the VUSE that's active in BB.
309 : : If the BB has no virtual PHI then record the common "incoming"
310 : : vuse. */
311 : 108 : if (vphi)
312 : 3 : up_vuse = gimple_phi_arg_def (vphi, phi_arg_index_from_use (arg_p));
313 : 108 : if (!up_vuse)
314 : 102 : up_vuse = phivn[SSA_NAME_VERSION (arg)].vuse;
315 : 6 : else if (up_vuse != phivn[SSA_NAME_VERSION (arg)].vuse)
316 : : return false;
317 : : }
318 : : else
319 : 260703 : return false;
320 : 119221 : if (!type
321 : 119162 : && TREE_CODE (arg) == SSA_NAME)
322 : 103 : type = TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value);
323 : : }
324 : :
325 : : /* Find a dereferencing use. First follow (single use) ssa
326 : : copy chains for ptr. */
327 : 32602 : while (single_imm_use (ptr, &use, &use_stmt)
328 : 32602 : && gimple_assign_ssa_name_copy_p (use_stmt))
329 : 1078 : ptr = gimple_assign_lhs (use_stmt);
330 : :
331 : : /* Replace the first dereference of *ptr if there is one and if we
332 : : can move the loads to the place of the ptr phi node. */
333 : 31524 : phi_inserted = false;
334 : 31524 : changed = false;
335 : 31524 : auto_vec<gimple*> delayed_uses;
336 : 78424 : FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
337 : : {
338 : 46900 : gimple *def_stmt;
339 : 46900 : tree vuse;
340 : 46900 : bool delay = false;
341 : :
342 : 46900 : if (canpossible_trap
343 : 46900 : && !dom_info_available_p (cfun, CDI_POST_DOMINATORS))
344 : 2340 : calculate_dominance_info (CDI_POST_DOMINATORS);
345 : :
346 : : /* Only replace loads in blocks that post-dominate the PHI node. That
347 : : makes sure we don't end up speculating trapping loads. */
348 : 46900 : if (canpossible_trap
349 : 46900 : && !dominated_by_p (CDI_POST_DOMINATORS,
350 : 8315 : bb, gimple_bb (use_stmt)))
351 : : delay = true;
352 : :
353 : : /* Amend the post-dominance check for SSA cycles, we need to
354 : : make sure each PHI result value is dereferenced.
355 : : We only want to delay this if we don't insert a phi. */
356 : 46900 : if (!(gimple_bb (use_stmt) == bb
357 : 10531 : || (!(bb->flags & BB_IRREDUCIBLE_LOOP)
358 : 10531 : && !(gimple_bb (use_stmt)->flags & BB_IRREDUCIBLE_LOOP)
359 : 10530 : && (bb->loop_father == gimple_bb (use_stmt)->loop_father
360 : 2061 : || flow_loop_nested_p (bb->loop_father,
361 : 2061 : gimple_bb (use_stmt)->loop_father)))))
362 : : delay = true;
363 : :
364 : : /* Check whether this is a load of *ptr. */
365 : 46900 : if (!(is_gimple_assign (use_stmt)
366 : 24138 : && gimple_assign_rhs_code (use_stmt) == MEM_REF
367 : 17219 : && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == ptr
368 : 17219 : && integer_zerop (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 1))
369 : 17203 : && (!type
370 : 109 : || types_compatible_p
371 : 109 : (TREE_TYPE (gimple_assign_lhs (use_stmt)), type))
372 : : /* We cannot replace a load that may throw or is volatile.
373 : : For volatiles the transform can change the number of
374 : : executions if the load is inside a loop but the address
375 : : computations outside (PR91812). We could relax this
376 : : if we guard against that appropriately. For loads that can
377 : : throw we could relax things if the moved loads all are
378 : : known to not throw. */
379 : 17203 : && !stmt_can_throw_internal (cfun, use_stmt)
380 : 33952 : && !gimple_has_volatile_ops (use_stmt)))
381 : 29933 : continue;
382 : :
383 : : /* Check if we can move the loads. This is when the virtual use
384 : : is the same as the one active at the start of BB which we know
385 : : either from its virtual PHI def or from the common incoming
386 : : VUSE. If neither is present make sure the def stmt of the virtual
387 : : use is in a different basic block dominating BB. When the
388 : : def is an edge-inserted one we know it dominates us. */
389 : 16967 : vuse = gimple_vuse (use_stmt);
390 : 16967 : if (vphi)
391 : : {
392 : 242 : if (vuse != gimple_phi_result (vphi))
393 : 146 : goto next;
394 : : }
395 : 16725 : else if (up_vuse)
396 : : {
397 : 109 : if (vuse != up_vuse)
398 : 20 : goto next;
399 : : }
400 : : else
401 : : {
402 : 16616 : def_stmt = SSA_NAME_DEF_STMT (vuse);
403 : 16616 : if (!SSA_NAME_IS_DEFAULT_DEF (vuse)
404 : 16616 : && (gimple_bb (def_stmt) == bb
405 : 14843 : || !dominated_by_p (CDI_DOMINATORS,
406 : 14843 : bb, gimple_bb (def_stmt))))
407 : 534 : goto next;
408 : : }
409 : :
410 : : /* Found a proper dereference with an aggregate copy. Just
411 : : insert aggregate copies on the edges instead. */
412 : 16267 : if (!is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (use_stmt))))
413 : : {
414 : : /* aggregate copies are too hard to handled if delayed. */
415 : 193 : if (delay)
416 : 45 : goto next;
417 : 356 : if (!gimple_vdef (use_stmt))
418 : 0 : goto next;
419 : :
420 : : /* As we replicate the lhs on each incoming edge all
421 : : used SSA names have to be available there. */
422 : 178 : if (! for_each_index (gimple_assign_lhs_ptr (use_stmt),
423 : : chk_uses,
424 : 178 : get_immediate_dominator (CDI_DOMINATORS,
425 : : gimple_bb (phi))))
426 : 18 : goto next;
427 : :
428 : 160 : gimple *vuse_stmt;
429 : 160 : imm_use_iterator vui;
430 : 160 : use_operand_p vuse_p;
431 : : /* In order to move the aggregate copies earlier, make sure
432 : : there are no statements that could read from memory
433 : : aliasing the lhs in between the start of bb and use_stmt.
434 : : As we require use_stmt to have a VDEF above, loads after
435 : : use_stmt will use a different virtual SSA_NAME. When
436 : : we reach an edge inserted load the constraints we place
437 : : on processing guarantees that program order is preserved
438 : : so we can avoid checking those. */
439 : 1008 : FOR_EACH_IMM_USE_FAST (vuse_p, vui, vuse)
440 : : {
441 : 700 : vuse_stmt = USE_STMT (vuse_p);
442 : 700 : if (vuse_stmt == use_stmt)
443 : 155 : continue;
444 : 545 : if (!gimple_bb (vuse_stmt)
445 : 1066 : || !dominated_by_p (CDI_DOMINATORS,
446 : 521 : gimple_bb (vuse_stmt), bb))
447 : 533 : continue;
448 : 12 : if (ref_maybe_used_by_stmt_p (vuse_stmt,
449 : : gimple_assign_lhs (use_stmt)))
450 : 12 : goto next;
451 : 12 : }
452 : :
453 : 148 : phiprop_insert_phi (bb, phi, use_stmt, phivn, n, dce_ssa_names);
454 : :
455 : : /* Remove old stmt. The phi and all of maybe its depedencies
456 : : will be removed later via simple_dce_from_worklist. */
457 : 148 : gsi = gsi_for_stmt (use_stmt);
458 : : /* Unlinking the VDEF here is fine as we are sure that we process
459 : : stmts in execution order due to aggregate copies having VDEFs
460 : : and we emit loads on the edges in the very same order.
461 : : We get multiple copies (or intermediate register loads) handled
462 : : only by walking PHIs or immediate uses in a lucky order though,
463 : : so we could signal the caller to re-start iterating over PHIs
464 : : when we come here which would make it quadratic in the number
465 : : of PHIs. */
466 : 148 : unlink_stmt_vdef (use_stmt);
467 : 148 : gsi_remove (&gsi, true);
468 : :
469 : 148 : changed = true;
470 : : }
471 : : /* Further replacements are easy, just make a copy out of the
472 : : load. */
473 : 16074 : else if (phi_inserted)
474 : : {
475 : 0 : gimple_assign_set_rhs1 (use_stmt, res);
476 : 0 : update_stmt (use_stmt);
477 : 0 : changed = true;
478 : : }
479 : 16074 : else if (delay)
480 : 277 : delayed_uses.safe_push (use_stmt);
481 : : /* Found a proper dereference. Insert a phi node if this
482 : : is the first load transformation. */
483 : : else
484 : : {
485 : 15797 : res = phiprop_insert_phi (bb, phi, use_stmt, phivn, n, dce_ssa_names);
486 : 15797 : type = TREE_TYPE (res);
487 : :
488 : : /* Remember the value we created for *ptr. */
489 : 15797 : phivn[SSA_NAME_VERSION (ptr)].value = res;
490 : 15797 : phivn[SSA_NAME_VERSION (ptr)].vuse = vuse;
491 : :
492 : : /* Remove old stmt. The phi and all of maybe its depedencies
493 : : will be removed later via simple_dce_from_worklist. */
494 : 15797 : gsi = gsi_for_stmt (use_stmt);
495 : 15797 : gsi_remove (&gsi, true);
496 : :
497 : 15797 : phi_inserted = true;
498 : 15797 : changed = true;
499 : : }
500 : :
501 : 46900 : next:;
502 : : /* Continue searching for a proper dereference. */
503 : 31524 : }
504 : :
505 : : /* Update the delayed uses if there is any
506 : : as now we know this is safe to do. */
507 : 31524 : if (phi_inserted)
508 : 16346 : for (auto use_stmt : delayed_uses)
509 : : {
510 : : /* The types must match of the inserted phi. */
511 : 253 : if (!types_compatible_p (type, TREE_TYPE (gimple_assign_lhs (use_stmt))))
512 : 6 : continue;
513 : 247 : gimple_assign_set_rhs1 (use_stmt, res);
514 : 247 : update_stmt (use_stmt);
515 : : }
516 : :
517 : 31524 : return changed;
518 : 31524 : }
519 : :
520 : : /* Main entry for phiprop pass. */
521 : :
522 : : namespace {
523 : :
524 : : const pass_data pass_data_phiprop =
525 : : {
526 : : GIMPLE_PASS, /* type */
527 : : "phiprop", /* name */
528 : : OPTGROUP_NONE, /* optinfo_flags */
529 : : TV_TREE_PHIPROP, /* tv_id */
530 : : ( PROP_cfg | PROP_ssa ), /* properties_required */
531 : : 0, /* properties_provided */
532 : : 0, /* properties_destroyed */
533 : : 0, /* todo_flags_start */
534 : : 0, /* todo_flags_finish */
535 : : };
536 : :
537 : : class pass_phiprop : public gimple_opt_pass
538 : : {
539 : : public:
540 : 580066 : pass_phiprop (gcc::context *ctxt)
541 : 1160132 : : gimple_opt_pass (pass_data_phiprop, ctxt)
542 : : {}
543 : :
544 : : /* opt_pass methods: */
545 : 290033 : opt_pass * clone () final override { return new pass_phiprop (m_ctxt); }
546 : 3542403 : bool gate (function *) final override { return flag_tree_phiprop; }
547 : : unsigned int execute (function *) final override;
548 : :
549 : : }; // class pass_phiprop
550 : :
551 : : unsigned int
552 : 3542202 : pass_phiprop::execute (function *fun)
553 : : {
554 : 3542202 : struct phiprop_d *phivn;
555 : 3542202 : bool did_something = false;
556 : 3542202 : basic_block bb;
557 : 3542202 : gphi_iterator gsi;
558 : 3542202 : unsigned i;
559 : 3542202 : size_t n;
560 : 3542202 : auto_bitmap dce_ssa_names;
561 : :
562 : 3542202 : calculate_dominance_info (CDI_DOMINATORS);
563 : :
564 : 3542202 : n = num_ssa_names;
565 : 3542202 : phivn = XCNEWVEC (struct phiprop_d, n);
566 : :
567 : : /* Walk the dominator tree in preorder. */
568 : 3542202 : auto_vec<basic_block> bbs
569 : : = get_all_dominated_blocks (CDI_DOMINATORS,
570 : 3542202 : single_succ (ENTRY_BLOCK_PTR_FOR_FN (fun)));
571 : 28846157 : FOR_EACH_VEC_ELT (bbs, i, bb)
572 : : {
573 : : /* Since we're going to move dereferences across predecessor
574 : : edges avoid blocks with abnormal predecessors. */
575 : 25303955 : if (bb_has_abnormal_pred (bb))
576 : 8476 : continue;
577 : 25295479 : gphi *vphi = get_virtual_phi (bb);
578 : 33113377 : for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
579 : 7817898 : did_something |= propagate_with_phi (bb, vphi, gsi.phi (),
580 : : phivn, n, dce_ssa_names);
581 : : }
582 : :
583 : 3542202 : if (did_something)
584 : : {
585 : 13515 : gsi_commit_edge_inserts ();
586 : 13515 : simple_dce_from_worklist (dce_ssa_names);
587 : : }
588 : :
589 : 3542202 : free (phivn);
590 : :
591 : 3542202 : free_dominance_info (CDI_POST_DOMINATORS);
592 : :
593 : 3542202 : return did_something ? TODO_update_ssa_only_virtuals : 0;
594 : 3542202 : }
595 : :
596 : : } // anon namespace
597 : :
598 : : gimple_opt_pass *
599 : 290033 : make_pass_phiprop (gcc::context *ctxt)
600 : : {
601 : 290033 : return new pass_phiprop (ctxt);
602 : : }
|