Line data Source code
1 : /* Control flow graph analysis code for GNU compiler.
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : /* This file contains various simple utilities to analyze the CFG. */
21 :
22 : #include "config.h"
23 : #include "system.h"
24 : #include "coretypes.h"
25 : #include "backend.h"
26 : #include "cfghooks.h"
27 : #include "timevar.h"
28 : #include "cfganal.h"
29 : #include "cfgloop.h"
30 : #include "diagnostic.h"
31 :
32 : namespace {
33 : /* Store the data structures necessary for depth-first search. */
34 : class depth_first_search
35 : {
36 : public:
37 : depth_first_search ();
38 :
39 : basic_block execute (basic_block);
40 : void add_bb (basic_block);
41 :
42 : private:
43 : /* stack for backtracking during the algorithm */
44 : auto_vec<basic_block, 20> m_stack;
45 :
46 : /* record of basic blocks already seen by depth-first search */
47 : auto_sbitmap m_visited_blocks;
48 : };
49 : }
50 :
51 : /* Mark the back edges in DFS traversal.
52 : Return nonzero if a loop (natural or otherwise) is present.
53 : Inspired by Depth_First_Search_PP described in:
54 :
55 : Advanced Compiler Design and Implementation
56 : Steven Muchnick
57 : Morgan Kaufmann, 1997
58 :
59 : and heavily borrowed from pre_and_rev_post_order_compute. */
60 :
61 : bool
62 33869952 : mark_dfs_back_edges (struct function *fun)
63 : {
64 33869952 : int *pre;
65 33869952 : int *post;
66 33869952 : int prenum = 1;
67 33869952 : int postnum = 1;
68 33869952 : bool found = false;
69 :
70 : /* Allocate the preorder and postorder number arrays. */
71 33869952 : pre = XCNEWVEC (int, last_basic_block_for_fn (fun));
72 33869952 : post = XCNEWVEC (int, last_basic_block_for_fn (fun));
73 :
74 : /* Allocate stack for back-tracking up CFG. */
75 33869952 : auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (fun) + 1);
76 :
77 : /* Allocate bitmap to track nodes that have been visited. */
78 33869952 : auto_sbitmap visited (last_basic_block_for_fn (fun));
79 :
80 : /* None of the nodes in the CFG have been visited yet. */
81 33869952 : bitmap_clear (visited);
82 :
83 : /* Push the first edge on to the stack. */
84 33869952 : stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fun)->succs));
85 :
86 947025858 : while (!stack.is_empty ())
87 : {
88 913155906 : basic_block src;
89 913155906 : basic_block dest;
90 :
91 : /* Look at the edge on the top of the stack. */
92 913155906 : edge_iterator ei = stack.last ();
93 913155906 : src = ei_edge (ei)->src;
94 913155906 : dest = ei_edge (ei)->dest;
95 913155906 : ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
96 :
97 : /* Check if the edge destination has been visited yet. */
98 877094152 : if (dest != EXIT_BLOCK_PTR_FOR_FN (fun) && ! bitmap_bit_p (visited,
99 : dest->index))
100 : {
101 : /* Mark that we have visited the destination. */
102 367671235 : bitmap_set_bit (visited, dest->index);
103 :
104 367671235 : pre[dest->index] = prenum++;
105 367671235 : if (EDGE_COUNT (dest->succs) > 0)
106 : {
107 : /* Since the DEST node has been visited for the first
108 : time, check its successors. */
109 347176946 : stack.quick_push (ei_start (dest->succs));
110 : }
111 : else
112 20494289 : post[dest->index] = postnum++;
113 : }
114 : else
115 : {
116 545484671 : if (dest != EXIT_BLOCK_PTR_FOR_FN (fun)
117 509422917 : && src != ENTRY_BLOCK_PTR_FOR_FN (fun)
118 475552965 : && pre[src->index] >= pre[dest->index]
119 115945083 : && post[dest->index] == 0)
120 20054923 : ei_edge (ei)->flags |= EDGE_DFS_BACK, found = true;
121 :
122 545484671 : if (ei_one_before_end_p (ei)
123 545484671 : && src != ENTRY_BLOCK_PTR_FOR_FN (fun))
124 347176946 : post[src->index] = postnum++;
125 :
126 545484671 : if (!ei_one_before_end_p (ei))
127 164437773 : ei_next (&stack.last ());
128 : else
129 381046898 : stack.pop ();
130 : }
131 : }
132 :
133 33869952 : free (pre);
134 33869952 : free (post);
135 :
136 33869952 : return found;
137 33869952 : }
138 :
139 : bool
140 26287227 : mark_dfs_back_edges (void)
141 : {
142 26287227 : return mark_dfs_back_edges (cfun);
143 : }
144 :
145 : /* Return TRUE if EDGE_DFS_BACK is up to date for CFUN. */
146 :
147 : void
148 0 : verify_marked_backedges (struct function *fun)
149 : {
150 0 : auto_edge_flag saved_dfs_back (fun);
151 0 : basic_block bb;
152 0 : edge e;
153 0 : edge_iterator ei;
154 :
155 : // Save all the back edges...
156 0 : FOR_EACH_BB_FN (bb, fun)
157 0 : FOR_EACH_EDGE (e, ei, bb->succs)
158 : {
159 0 : if (e->flags & EDGE_DFS_BACK)
160 : {
161 0 : e->flags |= saved_dfs_back;
162 0 : e->flags &= ~EDGE_DFS_BACK;
163 : }
164 : }
165 :
166 : // ...and verify that recalculating them agrees with the saved ones.
167 0 : mark_dfs_back_edges ();
168 0 : FOR_EACH_BB_FN (bb, fun)
169 0 : FOR_EACH_EDGE (e, ei, bb->succs)
170 : {
171 0 : if (((e->flags & EDGE_DFS_BACK) != 0)
172 0 : != ((e->flags & saved_dfs_back) != 0))
173 0 : internal_error ("%<verify_marked_backedges%> failed");
174 :
175 0 : e->flags &= ~saved_dfs_back;
176 : }
177 0 : }
178 :
179 : /* Find unreachable blocks. An unreachable block will have 0 in
180 : the reachable bit in block->flags. A nonzero value indicates the
181 : block is reachable. */
182 :
183 : void
184 77107484 : find_unreachable_blocks (void)
185 : {
186 77107484 : edge e;
187 77107484 : edge_iterator ei;
188 77107484 : basic_block *tos, *worklist, bb;
189 :
190 77107484 : tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
191 :
192 : /* Clear all the reachability flags. */
193 :
194 1082955330 : FOR_EACH_BB_FN (bb, cfun)
195 1005847846 : bb->flags &= ~BB_REACHABLE;
196 :
197 : /* Add our starting points to the worklist. Almost always there will
198 : be only one. It isn't inconceivable that we might one day directly
199 : support Fortran alternate entry points. */
200 :
201 154214968 : FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
202 : {
203 77107484 : *tos++ = e->dest;
204 :
205 : /* Mark the block reachable. */
206 77107484 : e->dest->flags |= BB_REACHABLE;
207 : }
208 :
209 : /* Iterate: find everything reachable from what we've already seen. */
210 :
211 1089046867 : while (tos != worklist)
212 : {
213 1011939383 : basic_block b = *--tos;
214 :
215 2473310893 : FOR_EACH_EDGE (e, ei, b->succs)
216 : {
217 1461371510 : basic_block dest = e->dest;
218 :
219 1461371510 : if (!(dest->flags & BB_REACHABLE))
220 : {
221 934831899 : *tos++ = dest;
222 934831899 : dest->flags |= BB_REACHABLE;
223 : }
224 : }
225 : }
226 :
227 77107484 : free (worklist);
228 77107484 : }
229 :
230 : /* Verify that there are no unreachable blocks in the current function. */
231 :
232 : void
233 47431755 : verify_no_unreachable_blocks (void)
234 : {
235 47431755 : find_unreachable_blocks ();
236 :
237 47431755 : basic_block bb;
238 635510991 : FOR_EACH_BB_FN (bb, cfun)
239 588079236 : gcc_assert ((bb->flags & BB_REACHABLE) != 0);
240 47431755 : }
241 :
242 :
243 : /* Functions to access an edge list with a vector representation.
244 : Enough data is kept such that given an index number, the
245 : pred and succ that edge represents can be determined, or
246 : given a pred and a succ, its index number can be returned.
247 : This allows algorithms which consume a lot of memory to
248 : represent the normally full matrix of edge (pred,succ) with a
249 : single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
250 : wasted space in the client code due to sparse flow graphs. */
251 :
252 : /* This functions initializes the edge list. Basically the entire
253 : flowgraph is processed, and all edges are assigned a number,
254 : and the data structure is filled in. */
255 :
256 : struct edge_list *
257 494536 : create_edge_list (void)
258 : {
259 494536 : struct edge_list *elist;
260 494536 : edge e;
261 494536 : int num_edges;
262 494536 : basic_block bb;
263 494536 : edge_iterator ei;
264 :
265 : /* Determine the number of edges in the flow graph by counting successor
266 : edges on each basic block. */
267 494536 : num_edges = 0;
268 10633342 : FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
269 : EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
270 : {
271 20277009 : num_edges += EDGE_COUNT (bb->succs);
272 : }
273 :
274 494536 : elist = XNEW (struct edge_list);
275 494536 : elist->num_edges = num_edges;
276 494536 : elist->index_to_edge = XNEWVEC (edge, num_edges);
277 :
278 494536 : num_edges = 0;
279 :
280 : /* Follow successors of blocks, and register these edges. */
281 10633342 : FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
282 : EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
283 25366182 : FOR_EACH_EDGE (e, ei, bb->succs)
284 15227376 : elist->index_to_edge[num_edges++] = e;
285 :
286 494536 : return elist;
287 : }
288 :
289 : /* This function free's memory associated with an edge list. */
290 :
291 : void
292 494536 : free_edge_list (struct edge_list *elist)
293 : {
294 494536 : if (elist)
295 : {
296 494536 : free (elist->index_to_edge);
297 494536 : free (elist);
298 : }
299 494536 : }
300 :
301 : /* This function provides debug output showing an edge list. */
302 :
303 : DEBUG_FUNCTION void
304 0 : print_edge_list (FILE *f, struct edge_list *elist)
305 : {
306 0 : int x;
307 :
308 0 : fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
309 0 : n_basic_blocks_for_fn (cfun), elist->num_edges);
310 :
311 0 : for (x = 0; x < elist->num_edges; x++)
312 : {
313 0 : fprintf (f, " %-4d - edge(", x);
314 0 : if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
315 0 : fprintf (f, "entry,");
316 : else
317 0 : fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
318 :
319 0 : if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR_FOR_FN (cfun))
320 0 : fprintf (f, "exit)\n");
321 : else
322 0 : fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
323 : }
324 0 : }
325 :
326 : /* This function provides an internal consistency check of an edge list,
327 : verifying that all edges are present, and that there are no
328 : extra edges. */
329 :
330 : DEBUG_FUNCTION void
331 0 : verify_edge_list (FILE *f, struct edge_list *elist)
332 : {
333 0 : int pred, succ, index;
334 0 : edge e;
335 0 : basic_block bb, p, s;
336 0 : edge_iterator ei;
337 :
338 0 : FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
339 : EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
340 : {
341 0 : FOR_EACH_EDGE (e, ei, bb->succs)
342 : {
343 0 : pred = e->src->index;
344 0 : succ = e->dest->index;
345 0 : index = EDGE_INDEX (elist, e->src, e->dest);
346 0 : if (index == EDGE_INDEX_NO_EDGE)
347 : {
348 0 : fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
349 0 : continue;
350 : }
351 :
352 0 : if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
353 0 : fprintf (f, "*p* Pred for index %d should be %d not %d\n",
354 : index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
355 0 : if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
356 0 : fprintf (f, "*p* Succ for index %d should be %d not %d\n",
357 : index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
358 : }
359 : }
360 :
361 : /* We've verified that all the edges are in the list, now lets make sure
362 : there are no spurious edges in the list. This is an expensive check! */
363 :
364 0 : FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR_FOR_FN (cfun),
365 : EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
366 0 : FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
367 : {
368 0 : int found_edge = 0;
369 :
370 0 : FOR_EACH_EDGE (e, ei, p->succs)
371 0 : if (e->dest == s)
372 : {
373 : found_edge = 1;
374 : break;
375 : }
376 :
377 0 : FOR_EACH_EDGE (e, ei, s->preds)
378 0 : if (e->src == p)
379 : {
380 : found_edge = 1;
381 : break;
382 : }
383 :
384 0 : if (EDGE_INDEX (elist, p, s)
385 0 : == EDGE_INDEX_NO_EDGE && found_edge != 0)
386 0 : fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
387 : p->index, s->index);
388 0 : if (EDGE_INDEX (elist, p, s)
389 0 : != EDGE_INDEX_NO_EDGE && found_edge == 0)
390 0 : fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
391 : p->index, s->index, EDGE_INDEX (elist, p, s));
392 : }
393 0 : }
394 :
395 :
396 : /* Functions to compute control dependences. */
397 :
398 : /* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */
399 : void
400 46648143 : control_dependences::set_control_dependence_map_bit (basic_block bb,
401 : int edge_index)
402 : {
403 46648143 : if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
404 : return;
405 46648143 : gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
406 46648143 : bitmap_set_bit (&control_dependence_map[bb->index], edge_index);
407 : }
408 :
409 : /* Clear all control dependences for block BB. */
410 : void
411 0 : control_dependences::clear_control_dependence_bitmap (basic_block bb)
412 : {
413 0 : bitmap_clear (&control_dependence_map[bb->index]);
414 0 : }
415 :
416 : /* Determine all blocks' control dependences on the given edge with edge_list
417 : EL index EDGE_INDEX, ala Morgan, Section 3.6. */
418 :
419 : void
420 48581947 : control_dependences::find_control_dependence (int edge_index)
421 : {
422 48581947 : basic_block current_block;
423 48581947 : basic_block ending_block;
424 :
425 48581947 : gcc_assert (get_edge_src (edge_index) != EXIT_BLOCK_PTR_FOR_FN (cfun));
426 :
427 48581947 : ending_block = get_immediate_dominator (CDI_POST_DOMINATORS,
428 : get_edge_src (edge_index));
429 :
430 48581947 : for (current_block = get_edge_dest (edge_index);
431 : current_block != ending_block
432 95230090 : && current_block != EXIT_BLOCK_PTR_FOR_FN (cfun);
433 46648143 : current_block = get_immediate_dominator (CDI_POST_DOMINATORS,
434 : current_block))
435 46648143 : set_control_dependence_map_bit (current_block, edge_index);
436 48581947 : }
437 :
438 : /* Record all blocks' control dependences on all edges in the edge
439 : list EL, ala Morgan, Section 3.6. */
440 :
441 3543903 : control_dependences::control_dependences ()
442 : {
443 3543903 : timevar_push (TV_CONTROL_DEPENDENCES);
444 :
445 : /* Initialize the edge list. */
446 3543903 : int num_edges = 0;
447 3543903 : basic_block bb;
448 39371117 : FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
449 : EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
450 70746522 : num_edges += EDGE_COUNT (bb->succs);
451 3543903 : m_el.create (num_edges);
452 3543903 : edge e;
453 3543903 : edge_iterator ei;
454 39371117 : FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
455 : EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
456 84422967 : FOR_EACH_EDGE (e, ei, bb->succs)
457 : {
458 : /* For abnormal edges, we don't make current_block control
459 : dependent because instructions that throw are always necessary
460 : anyway. */
461 48595753 : if (e->flags & EDGE_ABNORMAL)
462 : {
463 13806 : num_edges--;
464 13806 : continue;
465 : }
466 48581947 : m_el.quick_push (std::make_pair (e->src->index, e->dest->index));
467 : }
468 :
469 3543903 : bitmap_obstack_initialize (&m_bitmaps);
470 3543903 : control_dependence_map.create (last_basic_block_for_fn (cfun));
471 3543903 : control_dependence_map.quick_grow_cleared (last_basic_block_for_fn (cfun));
472 43752765 : for (int i = 0; i < last_basic_block_for_fn (cfun); ++i)
473 40208862 : bitmap_initialize (&control_dependence_map[i], &m_bitmaps);
474 52125850 : for (int i = 0; i < num_edges; ++i)
475 48581947 : find_control_dependence (i);
476 :
477 3543903 : timevar_pop (TV_CONTROL_DEPENDENCES);
478 3543903 : }
479 :
480 : /* Free control dependences and the associated edge list. */
481 :
482 3543903 : control_dependences::~control_dependences ()
483 : {
484 3543903 : control_dependence_map.release ();
485 3543903 : m_el.release ();
486 3543903 : bitmap_obstack_release (&m_bitmaps);
487 3543903 : }
488 :
489 : /* Returns the bitmap of edges the basic-block I is dependent on. */
490 :
491 : bitmap
492 30493628 : control_dependences::get_edges_dependent_on (int i)
493 : {
494 30493628 : return &control_dependence_map[i];
495 : }
496 :
497 : /* Returns the edge source with index I from the edge list. */
498 :
499 : basic_block
500 142605191 : control_dependences::get_edge_src (int i)
501 : {
502 142605191 : return BASIC_BLOCK_FOR_FN (cfun, m_el[i].first);
503 : }
504 :
505 : /* Returns the edge destination with index I from the edge list. */
506 :
507 : basic_block
508 48581947 : control_dependences::get_edge_dest (int i)
509 : {
510 48581947 : return BASIC_BLOCK_FOR_FN (cfun, m_el[i].second);
511 : }
512 :
513 :
514 : /* Given PRED and SUCC blocks, return the edge which connects the blocks.
515 : If no such edge exists, return NULL. */
516 :
517 : edge
518 828468596 : find_edge (basic_block pred, basic_block succ)
519 : {
520 828468596 : edge e;
521 828468596 : edge_iterator ei;
522 :
523 2299918706 : if (EDGE_COUNT (pred->succs) <= EDGE_COUNT (succ->preds))
524 : {
525 911315250 : FOR_EACH_EDGE (e, ei, pred->succs)
526 697797637 : if (e->dest == succ)
527 : return e;
528 : }
529 : else
530 : {
531 228835553 : FOR_EACH_EDGE (e, ei, succ->preds)
532 141454878 : if (e->src == pred)
533 : return e;
534 : }
535 :
536 : return NULL;
537 : }
538 :
539 : /* This routine will determine what, if any, edge there is between
540 : a specified predecessor and successor. */
541 :
542 : int
543 38 : find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
544 : {
545 38 : int x;
546 :
547 332 : for (x = 0; x < NUM_EDGES (edge_list); x++)
548 332 : if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
549 60 : && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
550 : return x;
551 :
552 : return (EDGE_INDEX_NO_EDGE);
553 : }
554 :
555 : /* This routine will remove any fake predecessor edges for a basic block.
556 : When the edge is removed, it is also removed from whatever successor
557 : list it is in. */
558 :
559 : static void
560 6955164 : remove_fake_predecessors (basic_block bb)
561 : {
562 6955164 : edge e;
563 6955164 : edge_iterator ei;
564 :
565 18148717 : for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
566 : {
567 11193553 : if ((e->flags & EDGE_FAKE) == EDGE_FAKE)
568 4216422 : remove_edge (e);
569 : else
570 6977131 : ei_next (&ei);
571 : }
572 6955164 : }
573 :
574 : /* This routine will remove all fake edges from the flow graph. If
575 : we remove all fake successors, it will automatically remove all
576 : fake predecessors. */
577 :
578 : void
579 2550 : remove_fake_edges (void)
580 : {
581 2550 : basic_block bb;
582 :
583 19149 : FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
584 16599 : remove_fake_predecessors (bb);
585 2550 : }
586 :
587 : /* This routine will remove all fake edges to the EXIT_BLOCK. */
588 :
589 : void
590 6938565 : remove_fake_exit_edges (void)
591 : {
592 6938565 : remove_fake_predecessors (EXIT_BLOCK_PTR_FOR_FN (cfun));
593 6938565 : }
594 :
595 :
596 : /* This function will add a fake edge between any block which has no
597 : successors, and the exit block. Some data flow equations require these
598 : edges to exist. */
599 :
600 : void
601 6941115 : add_noreturn_fake_exit_edges (void)
602 : {
603 6941115 : basic_block bb;
604 :
605 82281368 : FOR_EACH_BB_FN (bb, cfun)
606 75340253 : if (EDGE_COUNT (bb->succs) == 0)
607 4230075 : make_single_succ_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
608 6941115 : }
609 :
610 : /* This function adds a fake edge between any noreturn block and
611 : infinite loops to the exit block. Some optimizations require a path
612 : from each node to the exit node.
613 :
614 : See also Morgan, Figure 3.10, pp. 82-83.
615 :
616 : The current implementation is ugly, not attempting to minimize the
617 : number of inserted fake edges. To reduce the number of fake edges
618 : to insert, add fake edges from _innermost_ loops containing only
619 : nodes not reachable from the exit block. */
620 :
621 : void
622 5501212 : connect_infinite_loops_to_exit (void)
623 : {
624 : /* First add fake exits to noreturn blocks, this is required to
625 : discover only truly infinite loops below. */
626 5501212 : add_noreturn_fake_exit_edges ();
627 :
628 : /* Perform depth-first search in the reverse graph to find nodes
629 : reachable from the exit block. */
630 5501212 : depth_first_search dfs;
631 5501212 : dfs.add_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
632 :
633 : /* Repeatedly add fake edges, updating the unreachable nodes. */
634 5501212 : basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
635 5551584 : while (1)
636 : {
637 5526398 : unvisited_block = dfs.execute (unvisited_block);
638 5526398 : if (!unvisited_block)
639 : break;
640 :
641 25186 : basic_block deadend_block = dfs_find_deadend (unvisited_block);
642 25186 : edge e = make_edge (deadend_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
643 : EDGE_FAKE);
644 25186 : e->probability = profile_probability::never ();
645 25186 : dfs.add_bb (deadend_block);
646 25186 : }
647 5501212 : }
648 :
649 : /* Compute reverse top sort order. This is computing a post order
650 : numbering of the graph. If INCLUDE_ENTRY_EXIT is true, then
651 : ENTRY_BLOCK and EXIT_BLOCK are included. If DELETE_UNREACHABLE is
652 : true, unreachable blocks are deleted. */
653 :
654 : int
655 43491944 : post_order_compute (int *post_order, bool include_entry_exit,
656 : bool delete_unreachable)
657 : {
658 43491944 : int post_order_num = 0;
659 43491944 : int count;
660 :
661 43491944 : if (include_entry_exit)
662 42310102 : post_order[post_order_num++] = EXIT_BLOCK;
663 :
664 : /* Allocate stack for back-tracking up CFG. */
665 43491944 : auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
666 :
667 : /* Allocate bitmap to track nodes that have been visited. */
668 43491944 : auto_sbitmap visited (last_basic_block_for_fn (cfun));
669 :
670 : /* None of the nodes in the CFG have been visited yet. */
671 43491944 : bitmap_clear (visited);
672 :
673 : /* Push the first edge on to the stack. */
674 43491944 : stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
675 :
676 1378896189 : while (!stack.is_empty ())
677 : {
678 1335404245 : basic_block src;
679 1335404245 : basic_block dest;
680 :
681 : /* Look at the edge on the top of the stack. */
682 1335404245 : edge_iterator ei = stack.last ();
683 1335404245 : src = ei_edge (ei)->src;
684 1335404245 : dest = ei_edge (ei)->dest;
685 :
686 : /* Check if the edge destination has been visited yet. */
687 1335404245 : if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
688 1335404245 : && ! bitmap_bit_p (visited, dest->index))
689 : {
690 : /* Mark that we have visited the destination. */
691 524248886 : bitmap_set_bit (visited, dest->index);
692 :
693 524248886 : if (EDGE_COUNT (dest->succs) > 0)
694 : /* Since the DEST node has been visited for the first
695 : time, check its successors. */
696 499528863 : stack.quick_push (ei_start (dest->succs));
697 : else
698 24720023 : post_order[post_order_num++] = dest->index;
699 : }
700 : else
701 : {
702 811155359 : if (ei_one_before_end_p (ei)
703 811155359 : && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
704 499528863 : post_order[post_order_num++] = src->index;
705 :
706 811155359 : if (!ei_one_before_end_p (ei))
707 268134552 : ei_next (&stack.last ());
708 : else
709 543020807 : stack.pop ();
710 : }
711 : }
712 :
713 43491944 : if (include_entry_exit)
714 : {
715 42310102 : post_order[post_order_num++] = ENTRY_BLOCK;
716 42310102 : count = post_order_num;
717 : }
718 : else
719 1181842 : count = post_order_num + 2;
720 :
721 : /* Delete the unreachable blocks if some were found and we are
722 : supposed to do it. */
723 43491944 : if (delete_unreachable && (count != n_basic_blocks_for_fn (cfun)))
724 : {
725 13599 : basic_block b;
726 13599 : basic_block next_bb;
727 13599 : for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
728 2285617 : != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
729 : {
730 2272018 : next_bb = b->next_bb;
731 :
732 2272018 : if (!(bitmap_bit_p (visited, b->index)))
733 33258 : delete_basic_block (b);
734 : }
735 :
736 13599 : tidy_fallthru_edges ();
737 : }
738 :
739 43491944 : return post_order_num;
740 43491944 : }
741 :
742 :
743 : /* Helper routine for inverted_rev_post_order_compute
744 : flow_dfs_compute_reverse_execute, and the reverse-CFG
745 : deapth first search in dominance.cc.
746 : BB has to belong to a region of CFG
747 : unreachable by inverted traversal from the exit.
748 : i.e. there's no control flow path from ENTRY to EXIT
749 : that contains this BB.
750 : This can happen in two cases - if there's an infinite loop
751 : or if there's a block that has no successor
752 : (call to a function with no return).
753 : Some RTL passes deal with this condition by
754 : calling connect_infinite_loops_to_exit () and/or
755 : add_noreturn_fake_exit_edges ().
756 : However, those methods involve modifying the CFG itself
757 : which may not be desirable.
758 : Hence, we deal with the infinite loop/no return cases
759 : by identifying a unique basic block that can reach all blocks
760 : in such a region by inverted traversal.
761 : This function returns a basic block that guarantees
762 : that all blocks in the region are reachable
763 : by starting an inverted traversal from the returned block. */
764 :
765 : basic_block
766 468469 : dfs_find_deadend (basic_block bb)
767 : {
768 468469 : auto_bitmap visited;
769 468469 : basic_block next = bb;
770 :
771 1643612 : for (;;)
772 : {
773 2112081 : if (EDGE_COUNT (next->succs) == 0)
774 : return next;
775 :
776 1643612 : if (! bitmap_set_bit (visited, next->index))
777 : return bb;
778 :
779 1175143 : bb = next;
780 : /* If we are in an analyzed cycle make sure to try exiting it.
781 : Note this is a heuristic only and expected to work when loop
782 : fixup is needed as well. */
783 1175143 : if (! bb->loop_father
784 1175143 : || ! loop_outer (bb->loop_father))
785 814710 : next = EDGE_SUCC (bb, 0)->dest;
786 : else
787 : {
788 360433 : edge_iterator ei;
789 360433 : edge e;
790 751624 : FOR_EACH_EDGE (e, ei, bb->succs)
791 433430 : if (loop_exit_edge_p (bb->loop_father, e))
792 : break;
793 360433 : next = e ? e->dest : EDGE_SUCC (bb, 0)->dest;
794 : }
795 : }
796 468469 : }
797 :
798 :
799 : /* Compute the reverse top sort order of the inverted CFG
800 : i.e. starting from the exit block and following the edges backward
801 : (from successors to predecessors).
802 : This ordering can be used for forward dataflow problems among others.
803 :
804 : Optionally if START_POINTS is specified, start from exit block and all
805 : basic blocks in START_POINTS. This is used by CD-DCE.
806 :
807 : This function assumes that all blocks in the CFG are reachable
808 : from the ENTRY (but not necessarily from EXIT).
809 :
810 : If there's an infinite loop,
811 : a simple inverted traversal starting from the blocks
812 : with no successors can't visit all blocks.
813 : To solve this problem, we first do inverted traversal
814 : starting from the blocks with no successor.
815 : And if there's any block left that's not visited by the regular
816 : inverted traversal from EXIT,
817 : those blocks are in such problematic region.
818 : Among those, we find one block that has
819 : any visited predecessor (which is an entry into such a region),
820 : and start looking for a "dead end" from that block
821 : and do another inverted traversal from that block. */
822 :
823 : int
824 47432512 : inverted_rev_post_order_compute (struct function *fn,
825 : int *rev_post_order,
826 : sbitmap *start_points)
827 : {
828 47432512 : basic_block bb;
829 :
830 47432512 : int rev_post_order_num = n_basic_blocks_for_fn (fn) - 1;
831 :
832 47432512 : if (flag_checking)
833 47431755 : verify_no_unreachable_blocks ();
834 :
835 : /* Allocate stack for back-tracking up CFG. */
836 47432512 : auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
837 :
838 : /* Allocate bitmap to track nodes that have been visited. */
839 47432512 : auto_sbitmap visited (last_basic_block_for_fn (cfun));
840 :
841 : /* None of the nodes in the CFG have been visited yet. */
842 47432512 : bitmap_clear (visited);
843 :
844 47432512 : if (start_points)
845 : {
846 814914 : FOR_ALL_BB_FN (bb, cfun)
847 794911 : if (bitmap_bit_p (*start_points, bb->index)
848 1345568 : && EDGE_COUNT (bb->preds) > 0)
849 : {
850 550657 : stack.quick_push (ei_start (bb->preds));
851 550657 : bitmap_set_bit (visited, bb->index);
852 : }
853 20003 : if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
854 : {
855 18977 : stack.quick_push (ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds));
856 18977 : bitmap_set_bit (visited, EXIT_BLOCK_PTR_FOR_FN (cfun)->index);
857 : }
858 : }
859 : else
860 : /* Put all blocks that have no successor into the initial work list. */
861 729565389 : FOR_ALL_BB_FN (bb, cfun)
862 682152880 : if (EDGE_COUNT (bb->succs) == 0)
863 : {
864 : /* Push the initial edge on to the stack. */
865 752850622 : if (EDGE_COUNT (bb->preds) > 0)
866 : {
867 70697742 : stack.quick_push (ei_start (bb->preds));
868 70697742 : bitmap_set_bit (visited, bb->index);
869 : }
870 : }
871 :
872 47780979 : do
873 : {
874 47780979 : bool has_unvisited_bb = false;
875 :
876 : /* The inverted traversal loop. */
877 1564267777 : while (!stack.is_empty ())
878 : {
879 1516486798 : edge_iterator ei;
880 1516486798 : basic_block pred;
881 :
882 : /* Look at the edge on the top of the stack. */
883 1516486798 : ei = stack.last ();
884 1516486798 : bb = ei_edge (ei)->dest;
885 1516486798 : pred = ei_edge (ei)->src;
886 :
887 : /* Check if the predecessor has been visited yet. */
888 1516486798 : if (! bitmap_bit_p (visited, pred->index))
889 : {
890 : /* Mark that we have visited the destination. */
891 610225039 : bitmap_set_bit (visited, pred->index);
892 :
893 610225039 : if (EDGE_COUNT (pred->preds) > 0)
894 : /* Since the predecessor node has been visited for the first
895 : time, check its predecessors. */
896 562792527 : stack.quick_push (ei_start (pred->preds));
897 : else
898 47432512 : rev_post_order[rev_post_order_num--] = pred->index;
899 : }
900 : else
901 : {
902 906261759 : if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
903 906261759 : && ei_one_before_end_p (ei))
904 588082767 : rev_post_order[rev_post_order_num--] = bb->index;
905 :
906 906261759 : if (!ei_one_before_end_p (ei))
907 271853389 : ei_next (&stack.last ());
908 : else
909 634408370 : stack.pop ();
910 : }
911 : }
912 :
913 : /* Detect any infinite loop and activate the kludge.
914 : Note that this doesn't check EXIT_BLOCK itself
915 : since EXIT_BLOCK is always added after the outer do-while loop. */
916 686684317 : FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
917 : EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
918 639158988 : if (!bitmap_bit_p (visited, bb->index))
919 : {
920 904531 : has_unvisited_bb = true;
921 :
922 639807869 : if (EDGE_COUNT (bb->preds) > 0)
923 : {
924 811714 : edge_iterator ei;
925 811714 : edge e;
926 811714 : basic_block visited_pred = NULL;
927 :
928 : /* Find an already visited predecessor. */
929 2029369 : FOR_EACH_EDGE (e, ei, bb->preds)
930 : {
931 1217655 : if (bitmap_bit_p (visited, e->src->index))
932 282809 : visited_pred = e->src;
933 : }
934 :
935 811714 : if (visited_pred)
936 : {
937 255650 : basic_block be = dfs_find_deadend (bb);
938 255650 : gcc_assert (be != NULL);
939 255650 : bitmap_set_bit (visited, be->index);
940 255650 : stack.quick_push (ei_start (be->preds));
941 255650 : break;
942 : }
943 : }
944 : }
945 :
946 47780979 : if (has_unvisited_bb && stack.is_empty ())
947 : {
948 : /* No blocks are reachable from EXIT at all.
949 : Find a dead-end from the ENTRY, and restart the iteration. */
950 92817 : basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR_FOR_FN (cfun));
951 92817 : gcc_assert (be != NULL);
952 92817 : bitmap_set_bit (visited, be->index);
953 92817 : stack.quick_push (ei_start (be->preds));
954 : }
955 :
956 : /* The only case the below while fires is
957 : when there's an infinite loop. */
958 : }
959 95561958 : while (!stack.is_empty ());
960 :
961 : /* EXIT_BLOCK is always included. */
962 47432512 : rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
963 47432512 : return n_basic_blocks_for_fn (fn);
964 47432512 : }
965 :
966 : /* Compute the depth first search order of FN and store in the array
967 : PRE_ORDER if nonzero. If REV_POST_ORDER is nonzero, return the
968 : reverse completion number for each node. Returns the number of nodes
969 : visited. A depth first search tries to get as far away from the starting
970 : point as quickly as possible.
971 :
972 : In case the function has unreachable blocks the number of nodes
973 : visited does not include them.
974 :
975 : pre_order is a really a preorder numbering of the graph.
976 : rev_post_order is really a reverse postorder numbering of the graph. */
977 :
978 : int
979 98801027 : pre_and_rev_post_order_compute_fn (struct function *fn,
980 : int *pre_order, int *rev_post_order,
981 : bool include_entry_exit)
982 : {
983 98801027 : int pre_order_num = 0;
984 98801027 : int rev_post_order_num = n_basic_blocks_for_fn (fn) - 1;
985 :
986 : /* Allocate stack for back-tracking up CFG. */
987 98801027 : auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (fn) + 1);
988 :
989 98801027 : if (include_entry_exit)
990 : {
991 37658447 : if (pre_order)
992 0 : pre_order[pre_order_num] = ENTRY_BLOCK;
993 37658447 : pre_order_num++;
994 37658447 : if (rev_post_order)
995 37658447 : rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
996 : }
997 : else
998 61142580 : rev_post_order_num -= NUM_FIXED_BLOCKS;
999 :
1000 : /* BB flag to track nodes that have been visited. */
1001 98801027 : auto_bb_flag visited (fn);
1002 :
1003 : /* Push the first edge on to the stack. */
1004 98801027 : stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs));
1005 :
1006 2905546659 : while (!stack.is_empty ())
1007 : {
1008 2806745632 : basic_block src;
1009 2806745632 : basic_block dest;
1010 :
1011 : /* Look at the edge on the top of the stack. */
1012 2806745632 : edge_iterator ei = stack.last ();
1013 2806745632 : src = ei_edge (ei)->src;
1014 2806745632 : dest = ei_edge (ei)->dest;
1015 :
1016 : /* Check if the edge destination has been visited yet. */
1017 2806745632 : if (dest != EXIT_BLOCK_PTR_FOR_FN (fn)
1018 2806745632 : && ! (dest->flags & visited))
1019 : {
1020 : /* Mark that we have visited the destination. */
1021 1111364103 : dest->flags |= visited;
1022 :
1023 1111364103 : if (pre_order)
1024 0 : pre_order[pre_order_num] = dest->index;
1025 :
1026 1111364103 : pre_order_num++;
1027 :
1028 1111364103 : if (EDGE_COUNT (dest->succs) > 0)
1029 : /* Since the DEST node has been visited for the first
1030 : time, check its successors. */
1031 1040352238 : stack.quick_push (ei_start (dest->succs));
1032 71011865 : else if (rev_post_order)
1033 : /* There are no successors for the DEST node so assign
1034 : its reverse completion number. */
1035 71011865 : rev_post_order[rev_post_order_num--] = dest->index;
1036 : }
1037 : else
1038 : {
1039 1695381529 : if (ei_one_before_end_p (ei)
1040 1139153265 : && src != ENTRY_BLOCK_PTR_FOR_FN (fn)
1041 2735733767 : && rev_post_order)
1042 : /* There are no more successors for the SRC node
1043 : so assign its reverse completion number. */
1044 1040352238 : rev_post_order[rev_post_order_num--] = src->index;
1045 :
1046 1695381529 : if (!ei_one_before_end_p (ei))
1047 556228264 : ei_next (&stack.last ());
1048 : else
1049 1139153265 : stack.pop ();
1050 : }
1051 : }
1052 :
1053 98801027 : if (include_entry_exit)
1054 : {
1055 37658447 : if (pre_order)
1056 0 : pre_order[pre_order_num] = EXIT_BLOCK;
1057 37658447 : pre_order_num++;
1058 37658447 : if (rev_post_order)
1059 37658447 : rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
1060 : }
1061 :
1062 : /* Clear the temporarily allocated flag. */
1063 98801027 : if (!rev_post_order)
1064 : rev_post_order = pre_order;
1065 1285482024 : for (int i = 0; i < pre_order_num; ++i)
1066 1186680997 : BASIC_BLOCK_FOR_FN (fn, rev_post_order[i])->flags &= ~visited;
1067 :
1068 98801027 : return pre_order_num;
1069 98801027 : }
1070 :
1071 : /* Like pre_and_rev_post_order_compute_fn but operating on the
1072 : current function and asserting that all nodes were visited. */
1073 :
1074 : int
1075 81211310 : pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
1076 : bool include_entry_exit)
1077 : {
1078 81211310 : int pre_order_num
1079 81211310 : = pre_and_rev_post_order_compute_fn (cfun, pre_order, rev_post_order,
1080 : include_entry_exit);
1081 81211310 : if (include_entry_exit)
1082 : /* The number of nodes visited should be the number of blocks. */
1083 37658302 : gcc_assert (pre_order_num == n_basic_blocks_for_fn (cfun));
1084 : else
1085 : /* The number of nodes visited should be the number of blocks minus
1086 : the entry and exit blocks which are not visited here. */
1087 43553008 : gcc_assert (pre_order_num
1088 : == (n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS));
1089 :
1090 81211310 : return pre_order_num;
1091 : }
1092 :
1093 :
1094 : /* Per basic-block data for rev_post_order_and_mark_dfs_back_seme,
1095 : element of a sparsely populated array indexed by basic-block number. */
1096 : typedef auto_vec<int, 2> scc_exit_vec_t;
1097 : struct rpoamdbs_bb_data {
1098 : int depth;
1099 : int bb_to_pre;
1100 : /* The basic-block index of the SCC entry of the block visited first
1101 : (the SCC leader). */
1102 : int scc;
1103 : /* The index into the RPO array where the blocks SCC entries end
1104 : (only valid for the SCC leader). */
1105 : int scc_end;
1106 : /* The indexes of the exits destinations of this SCC (only valid
1107 : for the SCC leader). Initialized upon discovery of SCC leaders. */
1108 : scc_exit_vec_t scc_exits;
1109 : };
1110 :
1111 : /* Tag H as a header of B, weaving H and its loop header list into the
1112 : current loop header list of B. */
1113 :
1114 : static void
1115 122039143 : tag_header (int b, int h, rpoamdbs_bb_data *bb_data)
1116 : {
1117 122039143 : if (h == -1 || b == h)
1118 : return;
1119 : int cur1 = b;
1120 : int cur2 = h;
1121 32236343 : while (bb_data[cur1].scc != -1)
1122 : {
1123 6183761 : int ih = bb_data[cur1].scc;
1124 6183761 : if (ih == cur2)
1125 : return;
1126 873388 : if (bb_data[ih].depth < bb_data[cur2].depth)
1127 : {
1128 322135 : bb_data[cur1].scc = cur2;
1129 322135 : cur1 = cur2;
1130 322135 : cur2 = ih;
1131 : }
1132 : else
1133 : cur1 = ih;
1134 : }
1135 26052582 : bb_data[cur1].scc = cur2;
1136 : }
1137 :
1138 : /* Comparator for a sort of two edges destinations E1 and E2 after their index
1139 : in the PRE array as specified by BB_TO_PRE. */
1140 :
1141 : static int
1142 45085247 : cmp_edge_dest_pre (const void *e1_, const void *e2_, void *data_)
1143 : {
1144 45085247 : const int *e1 = (const int *)e1_;
1145 45085247 : const int *e2 = (const int *)e2_;
1146 45085247 : rpoamdbs_bb_data *bb_data = (rpoamdbs_bb_data *)data_;
1147 45085247 : return (bb_data[*e1].bb_to_pre - bb_data[*e2].bb_to_pre);
1148 : }
1149 :
1150 : /* Compute the reverse completion number of a depth first search
1151 : on the SEME region denoted by the ENTRY edge and the EXIT_BBS set of
1152 : exit block indexes and store it in the array REV_POST_ORDER.
1153 : Also sets the EDGE_DFS_BACK edge flags according to this visitation
1154 : order.
1155 : Returns the number of nodes visited.
1156 :
1157 : In case the function has unreachable blocks the number of nodes
1158 : visited does not include them.
1159 :
1160 : If FOR_ITERATION is true then compute an RPO where SCCs form a
1161 : contiguous region in the RPO array.
1162 : *TOPLEVEL_SCC_EXTENTS if not NULL is filled with pairs of
1163 : *REV_POST_ORDER indexes denoting extents of the toplevel SCCs in
1164 : this region. */
1165 :
1166 : int
1167 13227718 : rev_post_order_and_mark_dfs_back_seme (struct function *fn, edge entry,
1168 : bitmap exit_bbs, bool for_iteration,
1169 : int *rev_post_order,
1170 : vec<std::pair<int, int> >
1171 : *toplevel_scc_extents)
1172 : {
1173 13227718 : int rev_post_order_num = 0;
1174 :
1175 : /* BB flag to track nodes that have been visited. */
1176 13227718 : auto_bb_flag visited (fn);
1177 :
1178 : /* Lazily initialized per-BB data for the two DFS walks below. */
1179 13227718 : rpoamdbs_bb_data *bb_data
1180 13227718 : = XNEWVEC (rpoamdbs_bb_data, last_basic_block_for_fn (fn));
1181 :
1182 : /* First DFS walk, loop discovery according to
1183 : A New Algorithm for Identifying Loops in Decompilation
1184 : by Tao Wei, Jian Mao, Wei Zou and You Chen of the Institute of
1185 : Computer Science and Technology of the Peking University. */
1186 13227718 : auto_vec<edge_iterator, 20> ei_stack (n_basic_blocks_for_fn (fn) + 1);
1187 13227718 : auto_bb_flag is_header (fn);
1188 13227718 : int depth = 1;
1189 13227718 : unsigned n_sccs = 0;
1190 :
1191 13227718 : basic_block dest = entry->dest;
1192 13227718 : edge_iterator ei;
1193 13227718 : int pre_num = 0;
1194 :
1195 : /* DFS process DEST. */
1196 121947583 : find_loops:
1197 121947583 : bb_data[dest->index].bb_to_pre = pre_num++;
1198 121947583 : bb_data[dest->index].depth = depth;
1199 121947583 : bb_data[dest->index].scc = -1;
1200 121947583 : depth++;
1201 121947583 : gcc_assert ((dest->flags & (is_header|visited)) == 0);
1202 121947583 : dest->flags |= visited;
1203 121947583 : ei = ei_start (dest->succs);
1204 291752049 : while (!ei_end_p (ei))
1205 : {
1206 169804466 : ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
1207 169804466 : if (bitmap_bit_p (exit_bbs, ei_edge (ei)->dest->index))
1208 : ;
1209 156026496 : else if (!(ei_edge (ei)->dest->flags & visited))
1210 : {
1211 108719865 : ei_stack.quick_push (ei);
1212 108719865 : dest = ei_edge (ei)->dest;
1213 : /* DFS recurse on DEST. */
1214 108719865 : goto find_loops;
1215 :
1216 108719865 : ret_from_find_loops:
1217 : /* Return point of DFS recursion. */
1218 217439730 : ei = ei_stack.pop ();
1219 108719865 : dest = ei_edge (ei)->src;
1220 108719865 : int header = bb_data[ei_edge (ei)->dest->index].scc;
1221 108719865 : tag_header (dest->index, header, bb_data);
1222 108719865 : depth = bb_data[dest->index].depth + 1;
1223 : }
1224 : else
1225 : {
1226 47306631 : if (bb_data[ei_edge (ei)->dest->index].depth > 0) /* on the stack */
1227 : {
1228 6952019 : ei_edge (ei)->flags |= EDGE_DFS_BACK;
1229 6952019 : n_sccs++;
1230 6952019 : ei_edge (ei)->dest->flags |= is_header;
1231 6952019 : ::new (&bb_data[ei_edge (ei)->dest->index].scc_exits)
1232 6952019 : auto_vec<int, 2> ();
1233 6952019 : tag_header (dest->index, ei_edge (ei)->dest->index, bb_data);
1234 : }
1235 40354612 : else if (bb_data[ei_edge (ei)->dest->index].scc == -1)
1236 : ;
1237 : else
1238 : {
1239 6384739 : int header = bb_data[ei_edge (ei)->dest->index].scc;
1240 6384739 : if (bb_data[header].depth > 0)
1241 6353902 : tag_header (dest->index, header, bb_data);
1242 : else
1243 : {
1244 : /* A re-entry into an existing loop. */
1245 : /* ??? Need to mark is_header? */
1246 43621 : while (bb_data[header].scc != -1)
1247 : {
1248 26141 : header = bb_data[header].scc;
1249 26141 : if (bb_data[header].depth > 0)
1250 : {
1251 13357 : tag_header (dest->index, header, bb_data);
1252 13357 : break;
1253 : }
1254 : }
1255 : }
1256 : }
1257 : }
1258 169804466 : ei_next (&ei);
1259 : }
1260 121947583 : rev_post_order[rev_post_order_num++] = dest->index;
1261 : /* not on the stack anymore */
1262 121947583 : bb_data[dest->index].depth = -bb_data[dest->index].depth;
1263 121947583 : if (!ei_stack.is_empty ())
1264 : /* Return from DFS recursion. */
1265 108719865 : goto ret_from_find_loops;
1266 :
1267 : /* Optimize for no SCCs found or !for_iteration. */
1268 13227718 : if (n_sccs == 0 || !for_iteration)
1269 : {
1270 : /* Clear the temporarily allocated flags. */
1271 86476973 : for (int i = 0; i < rev_post_order_num; ++i)
1272 149469748 : BASIC_BLOCK_FOR_FN (fn, rev_post_order[i])->flags
1273 74734874 : &= ~(is_header|visited);
1274 : /* And swap elements. */
1275 44174127 : for (int i = 0; i < rev_post_order_num/2; ++i)
1276 32432028 : std::swap (rev_post_order[i], rev_post_order[rev_post_order_num-i-1]);
1277 11742099 : XDELETEVEC (bb_data);
1278 :
1279 11742099 : return rev_post_order_num;
1280 : }
1281 :
1282 : /* Next find SCC exits, clear the visited flag and compute an upper bound
1283 : for the edge stack below. */
1284 : unsigned edge_count = 0;
1285 48698328 : for (int i = 0; i < rev_post_order_num; ++i)
1286 : {
1287 47212709 : int bb = rev_post_order[i];
1288 47212709 : BASIC_BLOCK_FOR_FN (fn, bb)->flags &= ~visited;
1289 47212709 : edge e;
1290 115005037 : FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (fn, bb)->succs)
1291 : {
1292 67792328 : if (bitmap_bit_p (exit_bbs, e->dest->index))
1293 1584935 : continue;
1294 66207393 : edge_count++;
1295 : /* if e is an exit from e->src, record it for
1296 : bb_data[e->src].scc. */
1297 66207393 : int src_scc = e->src->index;
1298 66207393 : if (!(e->src->flags & is_header))
1299 58269478 : src_scc = bb_data[src_scc].scc;
1300 66207393 : if (src_scc == -1)
1301 35176477 : continue;
1302 31030916 : int dest_scc = e->dest->index;
1303 31030916 : if (!(e->dest->flags & is_header))
1304 25808091 : dest_scc = bb_data[dest_scc].scc;
1305 31030916 : if (src_scc == dest_scc)
1306 22727532 : continue;
1307 : /* When dest_scc is nested insde src_scc it's not an
1308 : exit. */
1309 : int tem_dest_scc = dest_scc;
1310 : unsigned dest_scc_depth = 0;
1311 10142636 : while (tem_dest_scc != -1)
1312 : {
1313 2797020 : dest_scc_depth++;
1314 2797020 : if ((tem_dest_scc = bb_data[tem_dest_scc].scc) == src_scc)
1315 : break;
1316 : }
1317 8303384 : if (tem_dest_scc != -1)
1318 957768 : continue;
1319 : /* When src_scc is nested inside dest_scc record an
1320 : exit from the outermost SCC this edge exits. */
1321 : int tem_src_scc = src_scc;
1322 : unsigned src_scc_depth = 0;
1323 8033385 : while (tem_src_scc != -1)
1324 : {
1325 7947287 : if (bb_data[tem_src_scc].scc == dest_scc)
1326 : {
1327 7259518 : edge_count++;
1328 7259518 : bb_data[tem_src_scc].scc_exits.safe_push (e->dest->index);
1329 7259518 : break;
1330 : }
1331 687769 : tem_src_scc = bb_data[tem_src_scc].scc;
1332 687769 : src_scc_depth++;
1333 : }
1334 : /* Else find the outermost SCC this edge exits (exits
1335 : from the inner SCCs are not important for the DFS
1336 : walk adjustment). Do so by computing the common
1337 : ancestor SCC where the immediate child it to the source
1338 : SCC is the exited SCC. */
1339 7345616 : if (tem_src_scc == -1)
1340 : {
1341 86098 : edge_count++;
1342 87179 : while (src_scc_depth > dest_scc_depth)
1343 : {
1344 1081 : src_scc = bb_data[src_scc].scc;
1345 1081 : src_scc_depth--;
1346 : }
1347 86279 : while (dest_scc_depth > src_scc_depth)
1348 : {
1349 181 : dest_scc = bb_data[dest_scc].scc;
1350 181 : dest_scc_depth--;
1351 : }
1352 86112 : while (bb_data[src_scc].scc != bb_data[dest_scc].scc)
1353 : {
1354 : src_scc = bb_data[src_scc].scc;
1355 : dest_scc = bb_data[dest_scc].scc;
1356 : }
1357 86098 : bb_data[src_scc].scc_exits.safe_push (e->dest->index);
1358 : }
1359 : }
1360 : }
1361 :
1362 : /* Now the second DFS walk to compute a RPO where the extent of SCCs
1363 : is minimized thus SCC members are adjacent in the RPO array.
1364 : This is done by performing a DFS walk computing RPO with first visiting
1365 : extra direct edges from SCC entry to its exits.
1366 : That simulates a DFS walk over the graph with SCCs collapsed and
1367 : walking the SCCs themselves only when all outgoing edges from the
1368 : SCCs have been visited.
1369 : SCC_END[scc-header-index] is the position in the RPO array of the
1370 : last member of the SCC. */
1371 2971238 : auto_vec<std::pair<basic_block, basic_block>, 20> estack (edge_count + 1);
1372 1485619 : int idx = rev_post_order_num;
1373 1485619 : basic_block edest;
1374 1485619 : dest = entry->dest;
1375 :
1376 : /* DFS process DEST. */
1377 47212709 : dfs_rpo:
1378 47212709 : gcc_checking_assert ((dest->flags & visited) == 0);
1379 : /* Verify we enter SCCs through the same header and SCC nesting appears
1380 : the same. */
1381 47212709 : gcc_assert (bb_data[dest->index].scc == -1
1382 : || (BASIC_BLOCK_FOR_FN (fn, bb_data[dest->index].scc)->flags
1383 : & visited));
1384 47212709 : dest->flags |= visited;
1385 47212709 : bb_data[dest->index].scc_end = -1;
1386 47212709 : if ((dest->flags & is_header)
1387 47212709 : && !bb_data[dest->index].scc_exits.is_empty ())
1388 : {
1389 : /* Push the all SCC exits as outgoing edges from its header to
1390 : be visited first.
1391 : To process exits in the same relative order as in the first
1392 : DFS walk sort them after their destination PRE order index. */
1393 3976565 : gcc_sort_r (&bb_data[dest->index].scc_exits[0],
1394 3976565 : bb_data[dest->index].scc_exits.length (),
1395 : sizeof (int), cmp_edge_dest_pre, bb_data);
1396 : /* Process edges in reverse to match previous DFS walk order. */
1397 15298746 : for (int i = bb_data[dest->index].scc_exits.length () - 1; i >= 0; --i)
1398 14691232 : estack.quick_push (std::make_pair
1399 7345616 : (dest, BASIC_BLOCK_FOR_FN (fn, bb_data[dest->index].scc_exits[i])));
1400 : }
1401 : else
1402 : {
1403 43236144 : if (dest->flags & is_header)
1404 151057 : bb_data[dest->index].scc_end = idx - 1;
1405 : /* Push the edge vector in reverse to match the iteration order
1406 : from the DFS walk above. */
1407 145854321 : for (int i = EDGE_COUNT (dest->succs) - 1; i >= 0; --i)
1408 60014539 : if (!bitmap_bit_p (exit_bbs, EDGE_SUCC (dest, i)->dest->index))
1409 58429889 : estack.quick_push (std::make_pair (dest,
1410 58429889 : EDGE_SUCC (dest, i)->dest));
1411 : }
1412 119280099 : while (!estack.is_empty ()
1413 240045817 : && estack.last ().first == dest)
1414 : {
1415 73553009 : edest = estack.last ().second;
1416 73553009 : if (!(edest->flags & visited))
1417 : {
1418 45727090 : dest = edest;
1419 : /* DFS recurse on DEST. */
1420 45727090 : goto dfs_rpo;
1421 :
1422 45727090 : ret_from_dfs_rpo:
1423 : /* Return point of DFS recursion. */
1424 45727090 : dest = estack.last ().first;
1425 : }
1426 73553009 : estack.pop ();
1427 : /* If we processed all SCC exits from DEST mark the SCC end
1428 : since all RPO entries up to DEST itself will now belong
1429 : to its SCC. The special-case of no SCC exits is already
1430 : dealt with above. */
1431 73553009 : if (dest->flags & is_header
1432 : /* When the last exit edge was processed mark the SCC end
1433 : and push the regular edges. */
1434 15283531 : && bb_data[dest->index].scc_end == -1
1435 80895972 : && (estack.is_empty ()
1436 7342963 : || estack.last ().first != dest))
1437 : {
1438 3976565 : bb_data[dest->index].scc_end = idx - 1;
1439 : /* Push the edge vector in reverse to match the iteration order
1440 : from the DFS walk above. */
1441 15730919 : for (int i = EDGE_COUNT (dest->succs) - 1; i >= 0; --i)
1442 7777789 : if (!bitmap_bit_p (exit_bbs, EDGE_SUCC (dest, i)->dest->index))
1443 7777504 : estack.quick_push (std::make_pair (dest,
1444 7777504 : EDGE_SUCC (dest, i)->dest));
1445 : }
1446 : }
1447 47212709 : rev_post_order[--idx] = dest->index;
1448 47212709 : if (!estack.is_empty ())
1449 : /* Return from DFS recursion. */
1450 45727090 : goto ret_from_dfs_rpo;
1451 :
1452 : /* Each SCC extends are from the position of the header inside
1453 : the RPO array up to RPO array index scc_end[header-index]. */
1454 1485619 : if (toplevel_scc_extents)
1455 9085428 : for (int i = 0; i < rev_post_order_num; i++)
1456 : {
1457 8609705 : basic_block bb = BASIC_BLOCK_FOR_FN (fn, rev_post_order[i]);
1458 8609705 : if (bb->flags & is_header)
1459 : {
1460 977280 : toplevel_scc_extents->safe_push
1461 977280 : (std::make_pair (i, bb_data[bb->index].scc_end));
1462 977280 : i = bb_data[bb->index].scc_end;
1463 : }
1464 : }
1465 :
1466 : /* Clear the temporarily allocated flags and free memory. */
1467 48698328 : for (int i = 0; i < rev_post_order_num; ++i)
1468 : {
1469 47212709 : basic_block bb = BASIC_BLOCK_FOR_FN (fn, rev_post_order[i]);
1470 47212709 : if (bb->flags & is_header)
1471 4127622 : bb_data[bb->index].scc_exits.~scc_exit_vec_t ();
1472 47212709 : bb->flags &= ~(visited|is_header);
1473 : }
1474 :
1475 1485619 : XDELETEVEC (bb_data);
1476 :
1477 1485619 : return rev_post_order_num;
1478 13227718 : }
1479 :
1480 :
1481 :
1482 : /* Compute the depth first search order on the _reverse_ graph and
1483 : store it in the array DFS_ORDER, marking the nodes visited in VISITED.
1484 : Returns the number of nodes visited.
1485 :
1486 : The computation is split into three pieces:
1487 :
1488 : flow_dfs_compute_reverse_init () creates the necessary data
1489 : structures.
1490 :
1491 : flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1492 : structures. The block will start the search.
1493 :
1494 : flow_dfs_compute_reverse_execute () continues (or starts) the
1495 : search using the block on the top of the stack, stopping when the
1496 : stack is empty.
1497 :
1498 : flow_dfs_compute_reverse_finish () destroys the necessary data
1499 : structures.
1500 :
1501 : Thus, the user will probably call ..._init(), call ..._add_bb() to
1502 : add a beginning basic block to the stack, call ..._execute(),
1503 : possibly add another bb to the stack and again call ..._execute(),
1504 : ..., and finally call _finish(). */
1505 :
1506 : /* Initialize the data structures used for depth-first search on the
1507 : reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1508 : added to the basic block stack. DATA is the current depth-first
1509 : search context. If INITIALIZE_STACK is nonzero, there is an
1510 : element on the stack. */
1511 :
1512 5501212 : depth_first_search::depth_first_search () :
1513 5501212 : m_stack (n_basic_blocks_for_fn (cfun)),
1514 5501212 : m_visited_blocks (last_basic_block_for_fn (cfun))
1515 : {
1516 5501212 : bitmap_clear (m_visited_blocks);
1517 5501212 : }
1518 :
1519 : /* Add the specified basic block to the top of the dfs data
1520 : structures. When the search continues, it will start at the
1521 : block. */
1522 :
1523 : void
1524 66004792 : depth_first_search::add_bb (basic_block bb)
1525 : {
1526 66004792 : m_stack.quick_push (bb);
1527 66004792 : bitmap_set_bit (m_visited_blocks, bb->index);
1528 66004792 : }
1529 :
1530 : /* Continue the depth-first search through the reverse graph starting with the
1531 : block at the stack's top and ending when the stack is empty. Visited nodes
1532 : are marked. Returns an unvisited basic block, or NULL if there is none
1533 : available. */
1534 :
1535 : basic_block
1536 5526398 : depth_first_search::execute (basic_block last_unvisited)
1537 : {
1538 5526398 : basic_block bb;
1539 5526398 : edge e;
1540 5526398 : edge_iterator ei;
1541 :
1542 71531190 : while (!m_stack.is_empty ())
1543 : {
1544 66004792 : bb = m_stack.pop ();
1545 :
1546 : /* Perform depth-first search on adjacent vertices. */
1547 147417275 : FOR_EACH_EDGE (e, ei, bb->preds)
1548 81412483 : if (!bitmap_bit_p (m_visited_blocks, e->src->index))
1549 60478394 : add_bb (e->src);
1550 : }
1551 :
1552 : /* Determine if there are unvisited basic blocks. */
1553 71531190 : FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
1554 66029978 : if (!bitmap_bit_p (m_visited_blocks, bb->index))
1555 : return bb;
1556 :
1557 : return NULL;
1558 : }
1559 :
1560 : /* Performs dfs search from BB over vertices satisfying PREDICATE;
1561 : if REVERSE, go against direction of edges. Returns number of blocks
1562 : found and their list in RSLT. RSLT can contain at most RSLT_MAX items. */
1563 : int
1564 221381539 : dfs_enumerate_from (basic_block bb, int reverse,
1565 : bool (*predicate) (const_basic_block, const void *),
1566 : basic_block *rslt, int rslt_max, const void *data)
1567 : {
1568 221381539 : basic_block *st, lbb;
1569 221381539 : int sp = 0, tv = 0;
1570 :
1571 221381539 : auto_bb_flag visited (cfun);
1572 :
1573 : #define MARK_VISITED(BB) ((BB)->flags |= visited)
1574 : #define UNMARK_VISITED(BB) ((BB)->flags &= ~visited)
1575 : #define VISITED_P(BB) (((BB)->flags & visited) != 0)
1576 :
1577 221381539 : st = XNEWVEC (basic_block, rslt_max);
1578 221381539 : rslt[tv++] = st[sp++] = bb;
1579 221381539 : MARK_VISITED (bb);
1580 1448831131 : while (sp)
1581 : {
1582 1227449592 : edge e;
1583 1227449592 : edge_iterator ei;
1584 1227449592 : lbb = st[--sp];
1585 1227449592 : if (reverse)
1586 : {
1587 3005835341 : FOR_EACH_EDGE (e, ei, lbb->preds)
1588 1780098702 : if (!VISITED_P (e->src) && predicate (e->src, data))
1589 : {
1590 1005291190 : gcc_assert (tv != rslt_max);
1591 1005291190 : rslt[tv++] = st[sp++] = e->src;
1592 1005291190 : MARK_VISITED (e->src);
1593 : }
1594 : }
1595 : else
1596 : {
1597 4185384 : FOR_EACH_EDGE (e, ei, lbb->succs)
1598 2472431 : if (!VISITED_P (e->dest) && predicate (e->dest, data))
1599 : {
1600 776863 : gcc_assert (tv != rslt_max);
1601 776863 : rslt[tv++] = st[sp++] = e->dest;
1602 776863 : MARK_VISITED (e->dest);
1603 : }
1604 : }
1605 : }
1606 221381539 : free (st);
1607 1448831131 : for (sp = 0; sp < tv; sp++)
1608 1227449592 : UNMARK_VISITED (rslt[sp]);
1609 221381539 : return tv;
1610 : #undef MARK_VISITED
1611 : #undef UNMARK_VISITED
1612 : #undef VISITED_P
1613 221381539 : }
1614 :
1615 :
1616 : /* Compute dominance frontiers, ala Harvey, Ferrante, et al.
1617 :
1618 : This algorithm can be found in Timothy Harvey's PhD thesis, at
1619 : http://www.cs.rice.edu/~harv/dissertation.pdf in the section on iterative
1620 : dominance algorithms.
1621 :
1622 : First, we identify each join point, j (any node with more than one
1623 : incoming edge is a join point).
1624 :
1625 : We then examine each predecessor, p, of j and walk up the dominator tree
1626 : starting at p.
1627 :
1628 : We stop the walk when we reach j's immediate dominator - j is in the
1629 : dominance frontier of each of the nodes in the walk, except for j's
1630 : immediate dominator. Intuitively, all of the rest of j's dominators are
1631 : shared by j's predecessors as well.
1632 : Since they dominate j, they will not have j in their dominance frontiers.
1633 :
1634 : The number of nodes touched by this algorithm is equal to the size
1635 : of the dominance frontiers, no more, no less.
1636 : */
1637 :
1638 : void
1639 11921658 : compute_dominance_frontiers (bitmap_head *frontiers)
1640 : {
1641 11921658 : timevar_push (TV_DOM_FRONTIERS);
1642 :
1643 11921658 : edge p;
1644 11921658 : edge_iterator ei;
1645 11921658 : basic_block b;
1646 190963202 : FOR_EACH_BB_FN (b, cfun)
1647 : {
1648 220811988 : if (EDGE_COUNT (b->preds) >= 2)
1649 : {
1650 41770444 : basic_block domsb = get_immediate_dominator (CDI_DOMINATORS, b);
1651 157212561 : FOR_EACH_EDGE (p, ei, b->preds)
1652 : {
1653 115442117 : basic_block runner = p->src;
1654 115442117 : if (runner == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1655 9085 : continue;
1656 :
1657 320778922 : while (runner != domsb)
1658 : {
1659 235413774 : if (!bitmap_set_bit (&frontiers[runner->index], b->index))
1660 : break;
1661 205345890 : runner = get_immediate_dominator (CDI_DOMINATORS, runner);
1662 : }
1663 : }
1664 : }
1665 : }
1666 :
1667 11921658 : timevar_pop (TV_DOM_FRONTIERS);
1668 11921658 : }
1669 :
1670 : /* Given a set of blocks with variable definitions (DEF_BLOCKS),
1671 : return a bitmap with all the blocks in the iterated dominance
1672 : frontier of the blocks in DEF_BLOCKS. DFS contains dominance
1673 : frontier information as returned by compute_dominance_frontiers.
1674 :
1675 : The resulting set of blocks are the potential sites where PHI nodes
1676 : are needed. The caller is responsible for freeing the memory
1677 : allocated for the return value. */
1678 :
1679 : bitmap
1680 23837129 : compute_idf (bitmap def_blocks, bitmap_head *dfs)
1681 : {
1682 23837129 : bitmap_iterator bi, bi2;
1683 23837129 : unsigned bb_index, i;
1684 23837129 : bitmap phi_insertion_points;
1685 :
1686 23837129 : phi_insertion_points = BITMAP_ALLOC (NULL);
1687 :
1688 : /* The initial workset is the DEF_BLOCKs, process that first,
1689 : seeding phi_insertion_points as the start of the worklist
1690 : for the iteration which then also serves as a visited set. */
1691 88146204 : EXECUTE_IF_SET_IN_BITMAP (def_blocks, 0, bb_index, bi2)
1692 : {
1693 : /* Since the registration of NEW -> OLD name mappings is done
1694 : separately from the call to update_ssa, when updating the SSA
1695 : form, the basic blocks where new and/or old names are defined
1696 : may have disappeared by CFG cleanup calls. In this case,
1697 : we may pull a non-existing block from the work stack. */
1698 64309075 : gcc_checking_assert (bb_index
1699 : < (unsigned) last_basic_block_for_fn (cfun));
1700 :
1701 : /* The population counts of the dominance frontiers is low
1702 : compared to that of phi_insertion_points which approaches
1703 : the IDF and of work_set which is at most that of the IDF
1704 : as well. That makes iterating over the DFS bitmap preferential
1705 : to whole bitmap operations involving also phi_insertion_points. */
1706 130597184 : EXECUTE_IF_SET_IN_BITMAP (&dfs[bb_index], 0, i, bi)
1707 66288109 : bitmap_set_bit (phi_insertion_points, i);
1708 : }
1709 :
1710 : /* Seed the work set with the initial phi_insertion_points. */
1711 23837129 : auto_vec<int, 64> work_set (n_basic_blocks_for_fn (cfun));
1712 57503406 : EXECUTE_IF_SET_IN_BITMAP (phi_insertion_points, 0, i, bi)
1713 33666277 : work_set.quick_push (i);
1714 :
1715 : /* Pop a block off the workset, add every block that appears in
1716 : the original block's DF that we have not already processed to
1717 : the workset. Iterate until the workset is empty. Blocks
1718 : which are added to the workset are potential sites for
1719 : PHI nodes. */
1720 73379335 : while (!work_set.is_empty ())
1721 : {
1722 49542206 : bb_index = work_set.pop ();
1723 49542206 : gcc_checking_assert (bb_index
1724 : < (unsigned) last_basic_block_for_fn (cfun));
1725 104728753 : EXECUTE_IF_SET_IN_BITMAP (&dfs[bb_index], 0, i, bi)
1726 55186547 : if (bitmap_set_bit (phi_insertion_points, i))
1727 15875929 : work_set.quick_push (i);
1728 : }
1729 :
1730 23837129 : return phi_insertion_points;
1731 23837129 : }
1732 :
1733 : /* Intersection and union of preds/succs for sbitmap based data flow
1734 : solvers. All four functions defined below take the same arguments:
1735 : B is the basic block to perform the operation for. DST is the
1736 : target sbitmap, i.e. the result. SRC is an sbitmap vector of size
1737 : last_basic_block so that it can be indexed with basic block indices.
1738 : DST may be (but does not have to be) SRC[B->index]. */
1739 :
1740 : /* Set the bitmap DST to the intersection of SRC of successors of
1741 : basic block B. */
1742 :
1743 : void
1744 10507374 : bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1745 : {
1746 10507374 : unsigned int set_size = dst->size;
1747 10507374 : edge e;
1748 10507374 : unsigned ix;
1749 :
1750 10510340 : for (e = NULL, ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1751 : {
1752 10432041 : e = EDGE_SUCC (b, ix);
1753 10432041 : if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1754 2966 : continue;
1755 :
1756 10429075 : bitmap_copy (dst, src[e->dest->index]);
1757 10429075 : break;
1758 : }
1759 :
1760 10507374 : if (e == 0)
1761 75333 : bitmap_ones (dst);
1762 : else
1763 16871654 : for (++ix; ix < EDGE_COUNT (b->succs); ix++)
1764 : {
1765 6439613 : unsigned int i;
1766 6439613 : SBITMAP_ELT_TYPE *p, *r;
1767 :
1768 6439613 : e = EDGE_SUCC (b, ix);
1769 6439613 : if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1770 0 : continue;
1771 :
1772 6439613 : p = src[e->dest->index]->elms;
1773 6439613 : r = dst->elms;
1774 25065205 : for (i = 0; i < set_size; i++)
1775 18625592 : *r++ &= *p++;
1776 : }
1777 10507374 : }
1778 :
1779 : /* Set the bitmap DST to the intersection of SRC of predecessors of
1780 : basic block B. */
1781 :
1782 : void
1783 56304190 : bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1784 : {
1785 56304190 : unsigned int set_size = dst->size;
1786 56304190 : edge e;
1787 56304190 : unsigned ix;
1788 :
1789 56304190 : for (e = NULL, ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1790 : {
1791 56304190 : e = EDGE_PRED (b, ix);
1792 56304190 : if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1793 0 : continue;
1794 :
1795 56304190 : bitmap_copy (dst, src[e->src->index]);
1796 56304190 : break;
1797 : }
1798 :
1799 56304190 : if (e == 0)
1800 0 : bitmap_ones (dst);
1801 : else
1802 138943880 : for (++ix; ix < EDGE_COUNT (b->preds); ix++)
1803 : {
1804 82639690 : unsigned int i;
1805 82639690 : SBITMAP_ELT_TYPE *p, *r;
1806 :
1807 82639690 : e = EDGE_PRED (b, ix);
1808 82639690 : if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1809 0 : continue;
1810 :
1811 82639690 : p = src[e->src->index]->elms;
1812 82639690 : r = dst->elms;
1813 741512372 : for (i = 0; i < set_size; i++)
1814 658872682 : *r++ &= *p++;
1815 : }
1816 56304190 : }
1817 :
1818 : /* Set the bitmap DST to the union of SRC of successors of
1819 : basic block B. */
1820 :
1821 : void
1822 0 : bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1823 : {
1824 0 : unsigned int set_size = dst->size;
1825 0 : edge e;
1826 0 : unsigned ix;
1827 :
1828 0 : for (ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1829 : {
1830 0 : e = EDGE_SUCC (b, ix);
1831 0 : if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1832 0 : continue;
1833 :
1834 0 : bitmap_copy (dst, src[e->dest->index]);
1835 0 : break;
1836 : }
1837 :
1838 0 : if (ix == EDGE_COUNT (b->succs))
1839 0 : bitmap_clear (dst);
1840 : else
1841 0 : for (ix++; ix < EDGE_COUNT (b->succs); ix++)
1842 : {
1843 0 : unsigned int i;
1844 0 : SBITMAP_ELT_TYPE *p, *r;
1845 :
1846 0 : e = EDGE_SUCC (b, ix);
1847 0 : if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1848 0 : continue;
1849 :
1850 0 : p = src[e->dest->index]->elms;
1851 0 : r = dst->elms;
1852 0 : for (i = 0; i < set_size; i++)
1853 0 : *r++ |= *p++;
1854 : }
1855 0 : }
1856 :
1857 : /* Set the bitmap DST to the union of SRC of predecessors of
1858 : basic block B. */
1859 :
1860 : void
1861 0 : bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1862 : {
1863 0 : unsigned int set_size = dst->size;
1864 0 : edge e;
1865 0 : unsigned ix;
1866 :
1867 0 : for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1868 : {
1869 0 : e = EDGE_PRED (b, ix);
1870 0 : if (e->src== ENTRY_BLOCK_PTR_FOR_FN (cfun))
1871 0 : continue;
1872 :
1873 0 : bitmap_copy (dst, src[e->src->index]);
1874 0 : break;
1875 : }
1876 :
1877 0 : if (ix == EDGE_COUNT (b->preds))
1878 0 : bitmap_clear (dst);
1879 : else
1880 0 : for (ix++; ix < EDGE_COUNT (b->preds); ix++)
1881 : {
1882 0 : unsigned int i;
1883 0 : SBITMAP_ELT_TYPE *p, *r;
1884 :
1885 0 : e = EDGE_PRED (b, ix);
1886 0 : if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1887 0 : continue;
1888 :
1889 0 : p = src[e->src->index]->elms;
1890 0 : r = dst->elms;
1891 0 : for (i = 0; i < set_size; i++)
1892 0 : *r++ |= *p++;
1893 : }
1894 0 : }
1895 :
1896 : /* Returns the list of basic blocks in the function in an order that guarantees
1897 : that if a block X has just a single predecessor Y, then Y is after X in the
1898 : ordering. */
1899 :
1900 : basic_block *
1901 7590512 : single_pred_before_succ_order (void)
1902 : {
1903 7590512 : basic_block x, y;
1904 7590512 : basic_block *order = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
1905 7590512 : unsigned n = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
1906 7590512 : unsigned np, i;
1907 7590512 : auto_sbitmap visited (last_basic_block_for_fn (cfun));
1908 :
1909 : #define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
1910 : #define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
1911 :
1912 7590512 : bitmap_clear (visited);
1913 :
1914 7590512 : MARK_VISITED (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1915 70408788 : FOR_EACH_BB_FN (x, cfun)
1916 : {
1917 62818276 : if (VISITED_P (x))
1918 1890704 : continue;
1919 :
1920 : /* Walk the predecessors of x as long as they have precisely one
1921 : predecessor and add them to the list, so that they get stored
1922 : after x. */
1923 1890704 : for (y = x, np = 1;
1924 110318238 : single_pred_p (y) && !VISITED_P (single_pred (y));
1925 1890704 : y = single_pred (y))
1926 1890704 : np++;
1927 60927572 : for (y = x, i = n - np;
1928 110318238 : single_pred_p (y) && !VISITED_P (single_pred (y));
1929 1890704 : y = single_pred (y), i++)
1930 : {
1931 1890704 : order[i] = y;
1932 1890704 : MARK_VISITED (y);
1933 : }
1934 60927572 : order[i] = y;
1935 60927572 : MARK_VISITED (y);
1936 :
1937 60927572 : gcc_assert (i == n - 1);
1938 : n -= np;
1939 : }
1940 :
1941 7590512 : gcc_assert (n == 0);
1942 7590512 : return order;
1943 :
1944 : #undef MARK_VISITED
1945 : #undef VISITED_P
1946 7590512 : }
1947 :
1948 : /* Ignoring loop backedges, if BB has precisely one incoming edge then
1949 : return that edge. Otherwise return NULL.
1950 :
1951 : When IGNORE_NOT_EXECUTABLE is true, also ignore edges that are not marked
1952 : as executable. */
1953 :
1954 : edge
1955 97123919 : single_pred_edge_ignoring_loop_edges (basic_block bb,
1956 : bool ignore_not_executable)
1957 : {
1958 97123919 : edge retval = NULL;
1959 97123919 : edge e;
1960 97123919 : edge_iterator ei;
1961 :
1962 191028253 : FOR_EACH_EDGE (e, ei, bb->preds)
1963 : {
1964 : /* A loop back edge can be identified by the destination of
1965 : the edge dominating the source of the edge. */
1966 109080113 : if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1967 5050473 : continue;
1968 :
1969 : /* We can safely ignore edges that are not executable. */
1970 104029640 : if (ignore_not_executable
1971 25321359 : && (e->flags & EDGE_EXECUTABLE) == 0)
1972 100145 : continue;
1973 :
1974 : /* If we have already seen a non-loop edge, then we must have
1975 : multiple incoming non-loop edges and thus we return NULL. */
1976 103929495 : if (retval)
1977 : return NULL;
1978 :
1979 : /* This is the first non-loop incoming edge we have found. Record
1980 : it. */
1981 : retval = e;
1982 : }
1983 :
1984 : return retval;
1985 : }
|