Line data Source code
1 : /* Control flow graph analysis code for GNU compiler.
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : /* This file contains various simple utilities to analyze the CFG. */
21 :
22 : #include "config.h"
23 : #include "system.h"
24 : #include "coretypes.h"
25 : #include "backend.h"
26 : #include "cfghooks.h"
27 : #include "timevar.h"
28 : #include "cfganal.h"
29 : #include "cfgloop.h"
30 : #include "diagnostic.h"
31 :
32 : namespace {
33 : /* Store the data structures necessary for depth-first search. */
34 : class depth_first_search
35 : {
36 : public:
37 : depth_first_search ();
38 :
39 : basic_block execute (basic_block);
40 : void add_bb (basic_block);
41 :
42 : private:
43 : /* stack for backtracking during the algorithm */
44 : auto_vec<basic_block, 20> m_stack;
45 :
46 : /* record of basic blocks already seen by depth-first search */
47 : auto_sbitmap m_visited_blocks;
48 : };
49 : }
50 :
51 : /* Mark the back edges in DFS traversal.
52 : Return nonzero if a loop (natural or otherwise) is present.
53 : Inspired by Depth_First_Search_PP described in:
54 :
55 : Advanced Compiler Design and Implementation
56 : Steven Muchnick
57 : Morgan Kaufmann, 1997
58 :
59 : and heavily borrowed from pre_and_rev_post_order_compute. */
60 :
61 : bool
62 33910991 : mark_dfs_back_edges (struct function *fun)
63 : {
64 33910991 : int *pre;
65 33910991 : int *post;
66 33910991 : int prenum = 1;
67 33910991 : int postnum = 1;
68 33910991 : bool found = false;
69 :
70 : /* Allocate the preorder and postorder number arrays. */
71 33910991 : pre = XCNEWVEC (int, last_basic_block_for_fn (fun));
72 33910991 : post = XCNEWVEC (int, last_basic_block_for_fn (fun));
73 :
74 : /* Allocate stack for back-tracking up CFG. */
75 33910991 : auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (fun) + 1);
76 :
77 : /* Allocate bitmap to track nodes that have been visited. */
78 33910991 : auto_sbitmap visited (last_basic_block_for_fn (fun));
79 :
80 : /* None of the nodes in the CFG have been visited yet. */
81 33910991 : bitmap_clear (visited);
82 :
83 : /* Push the first edge on to the stack. */
84 33910991 : stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fun)->succs));
85 :
86 947861467 : while (!stack.is_empty ())
87 : {
88 913950476 : basic_block src;
89 913950476 : basic_block dest;
90 :
91 : /* Look at the edge on the top of the stack. */
92 913950476 : edge_iterator ei = stack.last ();
93 913950476 : src = ei_edge (ei)->src;
94 913950476 : dest = ei_edge (ei)->dest;
95 913950476 : ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
96 :
97 : /* Check if the edge destination has been visited yet. */
98 877831996 : if (dest != EXIT_BLOCK_PTR_FOR_FN (fun) && ! bitmap_bit_p (visited,
99 : dest->index))
100 : {
101 : /* Mark that we have visited the destination. */
102 368056635 : bitmap_set_bit (visited, dest->index);
103 :
104 368056635 : pre[dest->index] = prenum++;
105 368056635 : if (EDGE_COUNT (dest->succs) > 0)
106 : {
107 : /* Since the DEST node has been visited for the first
108 : time, check its successors. */
109 347492672 : stack.quick_push (ei_start (dest->succs));
110 : }
111 : else
112 20563963 : post[dest->index] = postnum++;
113 : }
114 : else
115 : {
116 545893841 : if (dest != EXIT_BLOCK_PTR_FOR_FN (fun)
117 509775361 : && src != ENTRY_BLOCK_PTR_FOR_FN (fun)
118 475864370 : && pre[src->index] >= pre[dest->index]
119 115995289 : && post[dest->index] == 0)
120 20082939 : ei_edge (ei)->flags |= EDGE_DFS_BACK, found = true;
121 :
122 545893841 : if (ei_one_before_end_p (ei)
123 545893841 : && src != ENTRY_BLOCK_PTR_FOR_FN (fun))
124 347492672 : post[src->index] = postnum++;
125 :
126 545893841 : if (!ei_one_before_end_p (ei))
127 164490178 : ei_next (&stack.last ());
128 : else
129 381403663 : stack.pop ();
130 : }
131 : }
132 :
133 33910991 : free (pre);
134 33910991 : free (post);
135 :
136 33910991 : return found;
137 33910991 : }
138 :
139 : bool
140 26331531 : mark_dfs_back_edges (void)
141 : {
142 26331531 : return mark_dfs_back_edges (cfun);
143 : }
144 :
145 : /* Return TRUE if EDGE_DFS_BACK is up to date for CFUN. */
146 :
147 : void
148 0 : verify_marked_backedges (struct function *fun)
149 : {
150 0 : auto_edge_flag saved_dfs_back (fun);
151 0 : basic_block bb;
152 0 : edge e;
153 0 : edge_iterator ei;
154 :
155 : // Save all the back edges...
156 0 : FOR_EACH_BB_FN (bb, fun)
157 0 : FOR_EACH_EDGE (e, ei, bb->succs)
158 : {
159 0 : if (e->flags & EDGE_DFS_BACK)
160 : {
161 0 : e->flags |= saved_dfs_back;
162 0 : e->flags &= ~EDGE_DFS_BACK;
163 : }
164 : }
165 :
166 : // ...and verify that recalculating them agrees with the saved ones.
167 0 : mark_dfs_back_edges ();
168 0 : FOR_EACH_BB_FN (bb, fun)
169 0 : FOR_EACH_EDGE (e, ei, bb->succs)
170 : {
171 0 : if (((e->flags & EDGE_DFS_BACK) != 0)
172 0 : != ((e->flags & saved_dfs_back) != 0))
173 0 : internal_error ("%<verify_marked_backedges%> failed");
174 :
175 0 : e->flags &= ~saved_dfs_back;
176 : }
177 0 : }
178 :
179 : /* Find unreachable blocks. An unreachable block will have 0 in
180 : the reachable bit in block->flags. A nonzero value indicates the
181 : block is reachable. */
182 :
183 : void
184 77207590 : find_unreachable_blocks (void)
185 : {
186 77207590 : edge e;
187 77207590 : edge_iterator ei;
188 77207590 : basic_block *tos, *worklist, bb;
189 :
190 77207590 : tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
191 :
192 : /* Clear all the reachability flags. */
193 :
194 1085771338 : FOR_EACH_BB_FN (bb, cfun)
195 1008563748 : bb->flags &= ~BB_REACHABLE;
196 :
197 : /* Add our starting points to the worklist. Almost always there will
198 : be only one. It isn't inconceivable that we might one day directly
199 : support Fortran alternate entry points. */
200 :
201 154415180 : FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
202 : {
203 77207590 : *tos++ = e->dest;
204 :
205 : /* Mark the block reachable. */
206 77207590 : e->dest->flags |= BB_REACHABLE;
207 : }
208 :
209 : /* Iterate: find everything reachable from what we've already seen. */
210 :
211 1091854185 : while (tos != worklist)
212 : {
213 1014646595 : basic_block b = *--tos;
214 :
215 2480147347 : FOR_EACH_EDGE (e, ei, b->succs)
216 : {
217 1465500752 : basic_block dest = e->dest;
218 :
219 1465500752 : if (!(dest->flags & BB_REACHABLE))
220 : {
221 937439005 : *tos++ = dest;
222 937439005 : dest->flags |= BB_REACHABLE;
223 : }
224 : }
225 : }
226 :
227 77207590 : free (worklist);
228 77207590 : }
229 :
230 : /* Verify that there are no unreachable blocks in the current function. */
231 :
232 : void
233 47539330 : verify_no_unreachable_blocks (void)
234 : {
235 47539330 : find_unreachable_blocks ();
236 :
237 47539330 : basic_block bb;
238 637217417 : FOR_EACH_BB_FN (bb, cfun)
239 589678087 : gcc_assert ((bb->flags & BB_REACHABLE) != 0);
240 47539330 : }
241 :
242 :
243 : /* Functions to access an edge list with a vector representation.
244 : Enough data is kept such that given an index number, the
245 : pred and succ that edge represents can be determined, or
246 : given a pred and a succ, its index number can be returned.
247 : This allows algorithms which consume a lot of memory to
248 : represent the normally full matrix of edge (pred,succ) with a
249 : single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
250 : wasted space in the client code due to sparse flow graphs. */
251 :
252 : /* This functions initializes the edge list. Basically the entire
253 : flowgraph is processed, and all edges are assigned a number,
254 : and the data structure is filled in. */
255 :
256 : struct edge_list *
257 495611 : create_edge_list (void)
258 : {
259 495611 : struct edge_list *elist;
260 495611 : edge e;
261 495611 : int num_edges;
262 495611 : basic_block bb;
263 495611 : edge_iterator ei;
264 :
265 : /* Determine the number of edges in the flow graph by counting successor
266 : edges on each basic block. */
267 495611 : num_edges = 0;
268 10659635 : FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
269 : EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
270 : {
271 20327445 : num_edges += EDGE_COUNT (bb->succs);
272 : }
273 :
274 495611 : elist = XNEW (struct edge_list);
275 495611 : elist->num_edges = num_edges;
276 495611 : elist->index_to_edge = XNEWVEC (edge, num_edges);
277 :
278 495611 : num_edges = 0;
279 :
280 : /* Follow successors of blocks, and register these edges. */
281 10659635 : FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
282 : EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
283 25430555 : FOR_EACH_EDGE (e, ei, bb->succs)
284 15266531 : elist->index_to_edge[num_edges++] = e;
285 :
286 495611 : return elist;
287 : }
288 :
289 : /* This function free's memory associated with an edge list. */
290 :
291 : void
292 495611 : free_edge_list (struct edge_list *elist)
293 : {
294 495611 : if (elist)
295 : {
296 495611 : free (elist->index_to_edge);
297 495611 : free (elist);
298 : }
299 495611 : }
300 :
301 : /* This function provides debug output showing an edge list. */
302 :
303 : DEBUG_FUNCTION void
304 0 : print_edge_list (FILE *f, struct edge_list *elist)
305 : {
306 0 : int x;
307 :
308 0 : fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
309 0 : n_basic_blocks_for_fn (cfun), elist->num_edges);
310 :
311 0 : for (x = 0; x < elist->num_edges; x++)
312 : {
313 0 : fprintf (f, " %-4d - edge(", x);
314 0 : if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
315 0 : fprintf (f, "entry,");
316 : else
317 0 : fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
318 :
319 0 : if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR_FOR_FN (cfun))
320 0 : fprintf (f, "exit)\n");
321 : else
322 0 : fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
323 : }
324 0 : }
325 :
326 : /* This function provides an internal consistency check of an edge list,
327 : verifying that all edges are present, and that there are no
328 : extra edges. */
329 :
330 : DEBUG_FUNCTION void
331 0 : verify_edge_list (FILE *f, struct edge_list *elist)
332 : {
333 0 : int pred, succ, index;
334 0 : edge e;
335 0 : basic_block bb, p, s;
336 0 : edge_iterator ei;
337 :
338 0 : FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
339 : EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
340 : {
341 0 : FOR_EACH_EDGE (e, ei, bb->succs)
342 : {
343 0 : pred = e->src->index;
344 0 : succ = e->dest->index;
345 0 : index = EDGE_INDEX (elist, e->src, e->dest);
346 0 : if (index == EDGE_INDEX_NO_EDGE)
347 : {
348 0 : fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
349 0 : continue;
350 : }
351 :
352 0 : if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
353 0 : fprintf (f, "*p* Pred for index %d should be %d not %d\n",
354 : index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
355 0 : if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
356 0 : fprintf (f, "*p* Succ for index %d should be %d not %d\n",
357 : index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
358 : }
359 : }
360 :
361 : /* We've verified that all the edges are in the list, now lets make sure
362 : there are no spurious edges in the list. This is an expensive check! */
363 :
364 0 : FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR_FOR_FN (cfun),
365 : EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
366 0 : FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
367 : {
368 0 : int found_edge = 0;
369 :
370 0 : FOR_EACH_EDGE (e, ei, p->succs)
371 0 : if (e->dest == s)
372 : {
373 : found_edge = 1;
374 : break;
375 : }
376 :
377 0 : FOR_EACH_EDGE (e, ei, s->preds)
378 0 : if (e->src == p)
379 : {
380 : found_edge = 1;
381 : break;
382 : }
383 :
384 0 : if (EDGE_INDEX (elist, p, s)
385 0 : == EDGE_INDEX_NO_EDGE && found_edge != 0)
386 0 : fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
387 : p->index, s->index);
388 0 : if (EDGE_INDEX (elist, p, s)
389 0 : != EDGE_INDEX_NO_EDGE && found_edge == 0)
390 0 : fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
391 : p->index, s->index, EDGE_INDEX (elist, p, s));
392 : }
393 0 : }
394 :
395 :
396 : /* Functions to compute control dependences. */
397 :
398 : /* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */
399 : void
400 46766924 : control_dependences::set_control_dependence_map_bit (basic_block bb,
401 : int edge_index)
402 : {
403 46766924 : if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
404 : return;
405 46766924 : gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
406 46766924 : bitmap_set_bit (&control_dependence_map[bb->index], edge_index);
407 : }
408 :
409 : /* Clear all control dependences for block BB. */
410 : void
411 0 : control_dependences::clear_control_dependence_bitmap (basic_block bb)
412 : {
413 0 : bitmap_clear (&control_dependence_map[bb->index]);
414 0 : }
415 :
416 : /* Determine all blocks' control dependences on the given edge with edge_list
417 : EL index EDGE_INDEX, ala Morgan, Section 3.6. */
418 :
419 : void
420 48661006 : control_dependences::find_control_dependence (int edge_index)
421 : {
422 48661006 : basic_block current_block;
423 48661006 : basic_block ending_block;
424 :
425 48661006 : gcc_assert (get_edge_src (edge_index) != EXIT_BLOCK_PTR_FOR_FN (cfun));
426 :
427 48661006 : ending_block = get_immediate_dominator (CDI_POST_DOMINATORS,
428 : get_edge_src (edge_index));
429 :
430 48661006 : for (current_block = get_edge_dest (edge_index);
431 : current_block != ending_block
432 95427930 : && current_block != EXIT_BLOCK_PTR_FOR_FN (cfun);
433 46766924 : current_block = get_immediate_dominator (CDI_POST_DOMINATORS,
434 : current_block))
435 46766924 : set_control_dependence_map_bit (current_block, edge_index);
436 48661006 : }
437 :
438 : /* Record all blocks' control dependences on all edges in the edge
439 : list EL, ala Morgan, Section 3.6. */
440 :
441 3557077 : control_dependences::control_dependences ()
442 : {
443 3557077 : timevar_push (TV_CONTROL_DEPENDENCES);
444 :
445 : /* Initialize the edge list. */
446 3557077 : int num_edges = 0;
447 3557077 : basic_block bb;
448 39460037 : FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
449 : EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
450 70897309 : num_edges += EDGE_COUNT (bb->succs);
451 3557077 : m_el.create (num_edges);
452 3557077 : edge e;
453 3557077 : edge_iterator ei;
454 39460037 : FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
455 : EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
456 84577768 : FOR_EACH_EDGE (e, ei, bb->succs)
457 : {
458 : /* For abnormal edges, we don't make current_block control
459 : dependent because instructions that throw are always necessary
460 : anyway. */
461 48674808 : if (e->flags & EDGE_ABNORMAL)
462 : {
463 13802 : num_edges--;
464 13802 : continue;
465 : }
466 48661006 : m_el.quick_push (std::make_pair (e->src->index, e->dest->index));
467 : }
468 :
469 3557077 : bitmap_obstack_initialize (&m_bitmaps);
470 3557077 : control_dependence_map.create (last_basic_block_for_fn (cfun));
471 3557077 : control_dependence_map.quick_grow_cleared (last_basic_block_for_fn (cfun));
472 43854706 : for (int i = 0; i < last_basic_block_for_fn (cfun); ++i)
473 40297629 : bitmap_initialize (&control_dependence_map[i], &m_bitmaps);
474 52218083 : for (int i = 0; i < num_edges; ++i)
475 48661006 : find_control_dependence (i);
476 :
477 3557077 : timevar_pop (TV_CONTROL_DEPENDENCES);
478 3557077 : }
479 :
480 : /* Free control dependences and the associated edge list. */
481 :
482 3557077 : control_dependences::~control_dependences ()
483 : {
484 3557077 : control_dependence_map.release ();
485 3557077 : m_el.release ();
486 3557077 : bitmap_obstack_release (&m_bitmaps);
487 3557077 : }
488 :
489 : /* Returns the bitmap of edges the basic-block I is dependent on. */
490 :
491 : bitmap
492 30537364 : control_dependences::get_edges_dependent_on (int i)
493 : {
494 30537364 : return &control_dependence_map[i];
495 : }
496 :
497 : /* Returns the edge source with index I from the edge list. */
498 :
499 : basic_block
500 142847969 : control_dependences::get_edge_src (int i)
501 : {
502 142847969 : return BASIC_BLOCK_FOR_FN (cfun, m_el[i].first);
503 : }
504 :
505 : /* Returns the edge destination with index I from the edge list. */
506 :
507 : basic_block
508 48661006 : control_dependences::get_edge_dest (int i)
509 : {
510 48661006 : return BASIC_BLOCK_FOR_FN (cfun, m_el[i].second);
511 : }
512 :
513 :
514 : /* Given PRED and SUCC blocks, return the edge which connects the blocks.
515 : If no such edge exists, return NULL. */
516 :
517 : edge
518 829166873 : find_edge (basic_block pred, basic_block succ)
519 : {
520 829166873 : edge e;
521 829166873 : edge_iterator ei;
522 :
523 2301848046 : if (EDGE_COUNT (pred->succs) <= EDGE_COUNT (succ->preds))
524 : {
525 912576800 : FOR_EACH_EDGE (e, ei, pred->succs)
526 698694449 : if (e->dest == succ)
527 : return e;
528 : }
529 : else
530 : {
531 228666035 : FOR_EACH_EDGE (e, ei, succ->preds)
532 141162793 : if (e->src == pred)
533 : return e;
534 : }
535 :
536 : return NULL;
537 : }
538 :
539 : /* This routine will determine what, if any, edge there is between
540 : a specified predecessor and successor. */
541 :
542 : int
543 38 : find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
544 : {
545 38 : int x;
546 :
547 332 : for (x = 0; x < NUM_EDGES (edge_list); x++)
548 332 : if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
549 60 : && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
550 : return x;
551 :
552 : return (EDGE_INDEX_NO_EDGE);
553 : }
554 :
555 : /* This routine will remove any fake predecessor edges for a basic block.
556 : When the edge is removed, it is also removed from whatever successor
557 : list it is in. */
558 :
559 : static void
560 6979423 : remove_fake_predecessors (basic_block bb)
561 : {
562 6979423 : edge e;
563 6979423 : edge_iterator ei;
564 :
565 18219832 : for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
566 : {
567 11240409 : if ((e->flags & EDGE_FAKE) == EDGE_FAKE)
568 4238126 : remove_edge (e);
569 : else
570 7002283 : ei_next (&ei);
571 : }
572 6979423 : }
573 :
574 : /* This routine will remove all fake edges from the flow graph. If
575 : we remove all fake successors, it will automatically remove all
576 : fake predecessors. */
577 :
578 : void
579 2546 : remove_fake_edges (void)
580 : {
581 2546 : basic_block bb;
582 :
583 19092 : FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
584 16546 : remove_fake_predecessors (bb);
585 2546 : }
586 :
587 : /* This routine will remove all fake edges to the EXIT_BLOCK. */
588 :
589 : void
590 6962877 : remove_fake_exit_edges (void)
591 : {
592 6962877 : remove_fake_predecessors (EXIT_BLOCK_PTR_FOR_FN (cfun));
593 6962877 : }
594 :
595 :
596 : /* This function will add a fake edge between any block which has no
597 : successors, and the exit block. Some data flow equations require these
598 : edges to exist. */
599 :
600 : void
601 6965423 : add_noreturn_fake_exit_edges (void)
602 : {
603 6965423 : basic_block bb;
604 :
605 82521901 : FOR_EACH_BB_FN (bb, cfun)
606 75556478 : if (EDGE_COUNT (bb->succs) == 0)
607 4251682 : make_single_succ_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
608 6965423 : }
609 :
610 : /* This function adds a fake edge between any noreturn block and
611 : infinite loops to the exit block. Some optimizations require a path
612 : from each node to the exit node.
613 :
614 : See also Morgan, Figure 3.10, pp. 82-83.
615 :
616 : The current implementation is ugly, not attempting to minimize the
617 : number of inserted fake edges. To reduce the number of fake edges
618 : to insert, add fake edges from _innermost_ loops containing only
619 : nodes not reachable from the exit block. */
620 :
621 : void
622 5520638 : connect_infinite_loops_to_exit (void)
623 : {
624 : /* First add fake exits to noreturn blocks, this is required to
625 : discover only truly infinite loops below. */
626 5520638 : add_noreturn_fake_exit_edges ();
627 :
628 : /* Perform depth-first search in the reverse graph to find nodes
629 : reachable from the exit block. */
630 5520638 : depth_first_search dfs;
631 5520638 : dfs.add_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
632 :
633 : /* Repeatedly add fake edges, updating the unreachable nodes. */
634 5520638 : basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
635 5570964 : while (1)
636 : {
637 5545801 : unvisited_block = dfs.execute (unvisited_block);
638 5545801 : if (!unvisited_block)
639 : break;
640 :
641 25163 : basic_block deadend_block = dfs_find_deadend (unvisited_block);
642 25163 : edge e = make_edge (deadend_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
643 : EDGE_FAKE);
644 25163 : e->probability = profile_probability::never ();
645 25163 : dfs.add_bb (deadend_block);
646 25163 : }
647 5520638 : }
648 :
649 : /* Compute reverse top sort order. This is computing a post order
650 : numbering of the graph. If INCLUDE_ENTRY_EXIT is true, then
651 : ENTRY_BLOCK and EXIT_BLOCK are included. If DELETE_UNREACHABLE is
652 : true, unreachable blocks are deleted. */
653 :
654 : int
655 43598324 : post_order_compute (int *post_order, bool include_entry_exit,
656 : bool delete_unreachable)
657 : {
658 43598324 : int post_order_num = 0;
659 43598324 : int count;
660 :
661 43598324 : if (include_entry_exit)
662 42412946 : post_order[post_order_num++] = EXIT_BLOCK;
663 :
664 : /* Allocate stack for back-tracking up CFG. */
665 43598324 : auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
666 :
667 : /* Allocate bitmap to track nodes that have been visited. */
668 43598324 : auto_sbitmap visited (last_basic_block_for_fn (cfun));
669 :
670 : /* None of the nodes in the CFG have been visited yet. */
671 43598324 : bitmap_clear (visited);
672 :
673 : /* Push the first edge on to the stack. */
674 43598324 : stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
675 :
676 1382682698 : while (!stack.is_empty ())
677 : {
678 1339084374 : basic_block src;
679 1339084374 : basic_block dest;
680 :
681 : /* Look at the edge on the top of the stack. */
682 1339084374 : edge_iterator ei = stack.last ();
683 1339084374 : src = ei_edge (ei)->src;
684 1339084374 : dest = ei_edge (ei)->dest;
685 :
686 : /* Check if the edge destination has been visited yet. */
687 1339084374 : if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
688 1339084374 : && ! bitmap_bit_p (visited, dest->index))
689 : {
690 : /* Mark that we have visited the destination. */
691 525663133 : bitmap_set_bit (visited, dest->index);
692 :
693 525663133 : if (EDGE_COUNT (dest->succs) > 0)
694 : /* Since the DEST node has been visited for the first
695 : time, check its successors. */
696 500874500 : stack.quick_push (ei_start (dest->succs));
697 : else
698 24788633 : post_order[post_order_num++] = dest->index;
699 : }
700 : else
701 : {
702 813421241 : if (ei_one_before_end_p (ei)
703 813421241 : && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
704 500874500 : post_order[post_order_num++] = src->index;
705 :
706 813421241 : if (!ei_one_before_end_p (ei))
707 268948417 : ei_next (&stack.last ());
708 : else
709 544472824 : stack.pop ();
710 : }
711 : }
712 :
713 43598324 : if (include_entry_exit)
714 : {
715 42412946 : post_order[post_order_num++] = ENTRY_BLOCK;
716 42412946 : count = post_order_num;
717 : }
718 : else
719 1185378 : count = post_order_num + 2;
720 :
721 : /* Delete the unreachable blocks if some were found and we are
722 : supposed to do it. */
723 43598324 : if (delete_unreachable && (count != n_basic_blocks_for_fn (cfun)))
724 : {
725 13810 : basic_block b;
726 13810 : basic_block next_bb;
727 13810 : for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
728 2342844 : != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
729 : {
730 2329034 : next_bb = b->next_bb;
731 :
732 2329034 : if (!(bitmap_bit_p (visited, b->index)))
733 33748 : delete_basic_block (b);
734 : }
735 :
736 13810 : tidy_fallthru_edges ();
737 : }
738 :
739 43598324 : return post_order_num;
740 43598324 : }
741 :
742 :
743 : /* Helper routine for inverted_rev_post_order_compute
744 : flow_dfs_compute_reverse_execute, and the reverse-CFG
745 : deapth first search in dominance.cc.
746 : BB has to belong to a region of CFG
747 : unreachable by inverted traversal from the exit.
748 : i.e. there's no control flow path from ENTRY to EXIT
749 : that contains this BB.
750 : This can happen in two cases - if there's an infinite loop
751 : or if there's a block that has no successor
752 : (call to a function with no return).
753 : Some RTL passes deal with this condition by
754 : calling connect_infinite_loops_to_exit () and/or
755 : add_noreturn_fake_exit_edges ().
756 : However, those methods involve modifying the CFG itself
757 : which may not be desirable.
758 : Hence, we deal with the infinite loop/no return cases
759 : by identifying a unique basic block that can reach all blocks
760 : in such a region by inverted traversal.
761 : This function returns a basic block that guarantees
762 : that all blocks in the region are reachable
763 : by starting an inverted traversal from the returned block. */
764 :
765 : basic_block
766 467973 : dfs_find_deadend (basic_block bb)
767 : {
768 467973 : auto_bitmap visited;
769 467973 : basic_block next = bb;
770 :
771 1642596 : for (;;)
772 : {
773 2110569 : if (EDGE_COUNT (next->succs) == 0)
774 : return next;
775 :
776 1642596 : if (! bitmap_set_bit (visited, next->index))
777 : return bb;
778 :
779 1174623 : bb = next;
780 : /* If we are in an analyzed cycle make sure to try exiting it.
781 : Note this is a heuristic only and expected to work when loop
782 : fixup is needed as well. */
783 1174623 : if (! bb->loop_father
784 1174623 : || ! loop_outer (bb->loop_father))
785 814756 : next = EDGE_SUCC (bb, 0)->dest;
786 : else
787 : {
788 359867 : edge_iterator ei;
789 359867 : edge e;
790 750430 : FOR_EACH_EDGE (e, ei, bb->succs)
791 432748 : if (loop_exit_edge_p (bb->loop_father, e))
792 : break;
793 359867 : next = e ? e->dest : EDGE_SUCC (bb, 0)->dest;
794 : }
795 : }
796 467973 : }
797 :
798 :
799 : /* Compute the reverse top sort order of the inverted CFG
800 : i.e. starting from the exit block and following the edges backward
801 : (from successors to predecessors).
802 : This ordering can be used for forward dataflow problems among others.
803 :
804 : Optionally if START_POINTS is specified, start from exit block and all
805 : basic blocks in START_POINTS. This is used by CD-DCE.
806 :
807 : This function assumes that all blocks in the CFG are reachable
808 : from the ENTRY (but not necessarily from EXIT).
809 :
810 : If there's an infinite loop,
811 : a simple inverted traversal starting from the blocks
812 : with no successors can't visit all blocks.
813 : To solve this problem, we first do inverted traversal
814 : starting from the blocks with no successor.
815 : And if there's any block left that's not visited by the regular
816 : inverted traversal from EXIT,
817 : those blocks are in such problematic region.
818 : Among those, we find one block that has
819 : any visited predecessor (which is an entry into such a region),
820 : and start looking for a "dead end" from that block
821 : and do another inverted traversal from that block. */
822 :
823 : int
824 47540087 : inverted_rev_post_order_compute (struct function *fn,
825 : int *rev_post_order,
826 : sbitmap *start_points)
827 : {
828 47540087 : basic_block bb;
829 :
830 47540087 : int rev_post_order_num = n_basic_blocks_for_fn (fn) - 1;
831 :
832 47540087 : if (flag_checking)
833 47539330 : verify_no_unreachable_blocks ();
834 :
835 : /* Allocate stack for back-tracking up CFG. */
836 47540087 : auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
837 :
838 : /* Allocate bitmap to track nodes that have been visited. */
839 47540087 : auto_sbitmap visited (last_basic_block_for_fn (cfun));
840 :
841 : /* None of the nodes in the CFG have been visited yet. */
842 47540087 : bitmap_clear (visited);
843 :
844 47540087 : if (start_points)
845 : {
846 793236 : FOR_ALL_BB_FN (bb, cfun)
847 773297 : if (bitmap_bit_p (*start_points, bb->index)
848 1304183 : && EDGE_COUNT (bb->preds) > 0)
849 : {
850 530886 : stack.quick_push (ei_start (bb->preds));
851 530886 : bitmap_set_bit (visited, bb->index);
852 : }
853 19939 : if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
854 : {
855 18913 : stack.quick_push (ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds));
856 18913 : bitmap_set_bit (visited, EXIT_BLOCK_PTR_FOR_FN (cfun)->index);
857 : }
858 : }
859 : else
860 : /* Put all blocks that have no successor into the initial work list. */
861 731508643 : FOR_ALL_BB_FN (bb, cfun)
862 683988495 : if (EDGE_COUNT (bb->succs) == 0)
863 : {
864 : /* Push the initial edge on to the stack. */
865 754852391 : if (EDGE_COUNT (bb->preds) > 0)
866 : {
867 70863896 : stack.quick_push (ei_start (bb->preds));
868 70863896 : bitmap_set_bit (visited, bb->index);
869 : }
870 : }
871 :
872 47888261 : do
873 : {
874 47888261 : bool has_unvisited_bb = false;
875 :
876 : /* The inverted traversal loop. */
877 1568594027 : while (!stack.is_empty ())
878 : {
879 1520705766 : edge_iterator ei;
880 1520705766 : basic_block pred;
881 :
882 : /* Look at the edge on the top of the stack. */
883 1520705766 : ei = stack.last ();
884 1520705766 : bb = ei_edge (ei)->dest;
885 1520705766 : pred = ei_edge (ei)->src;
886 :
887 : /* Check if the predecessor has been visited yet. */
888 1520705766 : if (! bitmap_bit_p (visited, pred->index))
889 : {
890 : /* Mark that we have visited the destination. */
891 611890958 : bitmap_set_bit (visited, pred->index);
892 :
893 611890958 : if (EDGE_COUNT (pred->preds) > 0)
894 : /* Since the predecessor node has been visited for the first
895 : time, check its predecessors. */
896 564350871 : stack.quick_push (ei_start (pred->preds));
897 : else
898 47540087 : rev_post_order[rev_post_order_num--] = pred->index;
899 : }
900 : else
901 : {
902 908814808 : if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
903 908814808 : && ei_one_before_end_p (ei))
904 589681618 : rev_post_order[rev_post_order_num--] = bb->index;
905 :
906 908814808 : if (!ei_one_before_end_p (ei))
907 272702068 : ei_next (&stack.last ());
908 : else
909 636112740 : stack.pop ();
910 : }
911 : }
912 :
913 : /* Detect any infinite loop and activate the kludge.
914 : Note that this doesn't check EXIT_BLOCK itself
915 : since EXIT_BLOCK is always added after the outer do-while loop. */
916 688598063 : FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
917 : EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
918 640965379 : if (!bitmap_bit_p (visited, bb->index))
919 : {
920 902036 : has_unvisited_bb = true;
921 :
922 641611838 : if (EDGE_COUNT (bb->preds) > 0)
923 : {
924 809439 : edge_iterator ei;
925 809439 : edge e;
926 809439 : basic_block visited_pred = NULL;
927 :
928 : /* Find an already visited predecessor. */
929 2023744 : FOR_EACH_EDGE (e, ei, bb->preds)
930 : {
931 1214305 : if (bitmap_bit_p (visited, e->src->index))
932 281941 : visited_pred = e->src;
933 : }
934 :
935 809439 : if (visited_pred)
936 : {
937 255577 : basic_block be = dfs_find_deadend (bb);
938 255577 : gcc_assert (be != NULL);
939 255577 : bitmap_set_bit (visited, be->index);
940 255577 : stack.quick_push (ei_start (be->preds));
941 255577 : break;
942 : }
943 : }
944 : }
945 :
946 47888261 : if (has_unvisited_bb && stack.is_empty ())
947 : {
948 : /* No blocks are reachable from EXIT at all.
949 : Find a dead-end from the ENTRY, and restart the iteration. */
950 92597 : basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR_FOR_FN (cfun));
951 92597 : gcc_assert (be != NULL);
952 92597 : bitmap_set_bit (visited, be->index);
953 92597 : stack.quick_push (ei_start (be->preds));
954 : }
955 :
956 : /* The only case the below while fires is
957 : when there's an infinite loop. */
958 : }
959 95776522 : while (!stack.is_empty ());
960 :
961 : /* EXIT_BLOCK is always included. */
962 47540087 : rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
963 47540087 : return n_basic_blocks_for_fn (fn);
964 47540087 : }
965 :
966 : /* Compute the depth first search order of FN and store in the array
967 : PRE_ORDER if nonzero. If REV_POST_ORDER is nonzero, return the
968 : reverse completion number for each node. Returns the number of nodes
969 : visited. A depth first search tries to get as far away from the starting
970 : point as quickly as possible.
971 :
972 : In case the function has unreachable blocks the number of nodes
973 : visited does not include them.
974 :
975 : pre_order is a really a preorder numbering of the graph.
976 : rev_post_order is really a reverse postorder numbering of the graph. */
977 :
978 : int
979 99000177 : pre_and_rev_post_order_compute_fn (struct function *fn,
980 : int *pre_order, int *rev_post_order,
981 : bool include_entry_exit)
982 : {
983 99000177 : int pre_order_num = 0;
984 99000177 : int rev_post_order_num = n_basic_blocks_for_fn (fn) - 1;
985 :
986 : /* Allocate stack for back-tracking up CFG. */
987 99000177 : auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (fn) + 1);
988 :
989 99000177 : if (include_entry_exit)
990 : {
991 37762738 : if (pre_order)
992 0 : pre_order[pre_order_num] = ENTRY_BLOCK;
993 37762738 : pre_order_num++;
994 37762738 : if (rev_post_order)
995 37762738 : rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
996 : }
997 : else
998 61237439 : rev_post_order_num -= NUM_FIXED_BLOCKS;
999 :
1000 : /* BB flag to track nodes that have been visited. */
1001 99000177 : auto_bb_flag visited (fn);
1002 :
1003 : /* Push the first edge on to the stack. */
1004 99000177 : stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs));
1005 :
1006 2909700592 : while (!stack.is_empty ())
1007 : {
1008 2810700415 : basic_block src;
1009 2810700415 : basic_block dest;
1010 :
1011 : /* Look at the edge on the top of the stack. */
1012 2810700415 : edge_iterator ei = stack.last ();
1013 2810700415 : src = ei_edge (ei)->src;
1014 2810700415 : dest = ei_edge (ei)->dest;
1015 :
1016 : /* Check if the edge destination has been visited yet. */
1017 2810700415 : if (dest != EXIT_BLOCK_PTR_FOR_FN (fn)
1018 2810700415 : && ! (dest->flags & visited))
1019 : {
1020 : /* Mark that we have visited the destination. */
1021 1113013311 : dest->flags |= visited;
1022 :
1023 1113013311 : if (pre_order)
1024 0 : pre_order[pre_order_num] = dest->index;
1025 :
1026 1113013311 : pre_order_num++;
1027 :
1028 1113013311 : if (EDGE_COUNT (dest->succs) > 0)
1029 : /* Since the DEST node has been visited for the first
1030 : time, check its successors. */
1031 1041776498 : stack.quick_push (ei_start (dest->succs));
1032 71236813 : else if (rev_post_order)
1033 : /* There are no successors for the DEST node so assign
1034 : its reverse completion number. */
1035 71236813 : rev_post_order[rev_post_order_num--] = dest->index;
1036 : }
1037 : else
1038 : {
1039 1697687104 : if (ei_one_before_end_p (ei)
1040 1140776675 : && src != ENTRY_BLOCK_PTR_FOR_FN (fn)
1041 2739463602 : && rev_post_order)
1042 : /* There are no more successors for the SRC node
1043 : so assign its reverse completion number. */
1044 1041776498 : rev_post_order[rev_post_order_num--] = src->index;
1045 :
1046 1697687104 : if (!ei_one_before_end_p (ei))
1047 556910429 : ei_next (&stack.last ());
1048 : else
1049 1140776675 : stack.pop ();
1050 : }
1051 : }
1052 :
1053 99000177 : if (include_entry_exit)
1054 : {
1055 37762738 : if (pre_order)
1056 0 : pre_order[pre_order_num] = EXIT_BLOCK;
1057 37762738 : pre_order_num++;
1058 37762738 : if (rev_post_order)
1059 37762738 : rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
1060 : }
1061 :
1062 : /* Clear the temporarily allocated flag. */
1063 99000177 : if (!rev_post_order)
1064 : rev_post_order = pre_order;
1065 1287538964 : for (int i = 0; i < pre_order_num; ++i)
1066 1188538787 : BASIC_BLOCK_FOR_FN (fn, rev_post_order[i])->flags &= ~visited;
1067 :
1068 99000177 : return pre_order_num;
1069 99000177 : }
1070 :
1071 : /* Like pre_and_rev_post_order_compute_fn but operating on the
1072 : current function and asserting that all nodes were visited. */
1073 :
1074 : int
1075 81362013 : pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
1076 : bool include_entry_exit)
1077 : {
1078 81362013 : int pre_order_num
1079 81362013 : = pre_and_rev_post_order_compute_fn (cfun, pre_order, rev_post_order,
1080 : include_entry_exit);
1081 81362013 : if (include_entry_exit)
1082 : /* The number of nodes visited should be the number of blocks. */
1083 37762593 : gcc_assert (pre_order_num == n_basic_blocks_for_fn (cfun));
1084 : else
1085 : /* The number of nodes visited should be the number of blocks minus
1086 : the entry and exit blocks which are not visited here. */
1087 43599420 : gcc_assert (pre_order_num
1088 : == (n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS));
1089 :
1090 81362013 : return pre_order_num;
1091 : }
1092 :
1093 :
1094 : /* Per basic-block data for rev_post_order_and_mark_dfs_back_seme,
1095 : element of a sparsely populated array indexed by basic-block number. */
1096 : typedef auto_vec<int, 2> scc_exit_vec_t;
1097 : struct rpoamdbs_bb_data {
1098 : int depth;
1099 : int bb_to_pre;
1100 : /* The basic-block index of the SCC entry of the block visited first
1101 : (the SCC leader). */
1102 : int scc;
1103 : /* The index into the RPO array where the blocks SCC entries end
1104 : (only valid for the SCC leader). */
1105 : int scc_end;
1106 : /* The indexes of the exits destinations of this SCC (only valid
1107 : for the SCC leader). Initialized upon discovery of SCC leaders. */
1108 : scc_exit_vec_t scc_exits;
1109 : };
1110 :
1111 : /* Tag H as a header of B, weaving H and its loop header list into the
1112 : current loop header list of B. */
1113 :
1114 : static void
1115 122271850 : tag_header (int b, int h, rpoamdbs_bb_data *bb_data)
1116 : {
1117 122271850 : if (h == -1 || b == h)
1118 : return;
1119 : int cur1 = b;
1120 : int cur2 = h;
1121 32387414 : while (bb_data[cur1].scc != -1)
1122 : {
1123 6206531 : int ih = bb_data[cur1].scc;
1124 6206531 : if (ih == cur2)
1125 : return;
1126 881394 : if (bb_data[ih].depth < bb_data[cur2].depth)
1127 : {
1128 324573 : bb_data[cur1].scc = cur2;
1129 324573 : cur1 = cur2;
1130 324573 : cur2 = ih;
1131 : }
1132 : else
1133 : cur1 = ih;
1134 : }
1135 26180883 : bb_data[cur1].scc = cur2;
1136 : }
1137 :
1138 : /* Comparator for a sort of two edges destinations E1 and E2 after their index
1139 : in the PRE array as specified by BB_TO_PRE. */
1140 :
1141 : static int
1142 45509301 : cmp_edge_dest_pre (const void *e1_, const void *e2_, void *data_)
1143 : {
1144 45509301 : const int *e1 = (const int *)e1_;
1145 45509301 : const int *e2 = (const int *)e2_;
1146 45509301 : rpoamdbs_bb_data *bb_data = (rpoamdbs_bb_data *)data_;
1147 45509301 : return (bb_data[*e1].bb_to_pre - bb_data[*e2].bb_to_pre);
1148 : }
1149 :
1150 : /* Compute the reverse completion number of a depth first search
1151 : on the SEME region denoted by the ENTRY edge and the EXIT_BBS set of
1152 : exit block indexes and store it in the array REV_POST_ORDER.
1153 : Also sets the EDGE_DFS_BACK edge flags according to this visitation
1154 : order.
1155 : Returns the number of nodes visited.
1156 :
1157 : In case the function has unreachable blocks the number of nodes
1158 : visited does not include them.
1159 :
1160 : If FOR_ITERATION is true then compute an RPO where SCCs form a
1161 : contiguous region in the RPO array.
1162 : *TOPLEVEL_SCC_EXTENTS if not NULL is filled with pairs of
1163 : *REV_POST_ORDER indexes denoting extents of the toplevel SCCs in
1164 : this region. */
1165 :
1166 : int
1167 13275187 : rev_post_order_and_mark_dfs_back_seme (struct function *fn, edge entry,
1168 : bitmap exit_bbs, bool for_iteration,
1169 : int *rev_post_order,
1170 : vec<std::pair<int, int> >
1171 : *toplevel_scc_extents)
1172 : {
1173 13275187 : int rev_post_order_num = 0;
1174 :
1175 : /* BB flag to track nodes that have been visited. */
1176 13275187 : auto_bb_flag visited (fn);
1177 :
1178 : /* Lazily initialized per-BB data for the two DFS walks below. */
1179 13275187 : rpoamdbs_bb_data *bb_data
1180 13275187 : = XNEWVEC (rpoamdbs_bb_data, last_basic_block_for_fn (fn));
1181 :
1182 : /* First DFS walk, loop discovery according to
1183 : A New Algorithm for Identifying Loops in Decompilation
1184 : by Tao Wei, Jian Mao, Wei Zou and You Chen of the Institute of
1185 : Computer Science and Technology of the Peking University. */
1186 13275187 : auto_vec<edge_iterator, 20> ei_stack (n_basic_blocks_for_fn (fn) + 1);
1187 13275187 : auto_bb_flag is_header (fn);
1188 13275187 : int depth = 1;
1189 13275187 : unsigned n_sccs = 0;
1190 :
1191 13275187 : basic_block dest = entry->dest;
1192 13275187 : edge_iterator ei;
1193 13275187 : int pre_num = 0;
1194 :
1195 : /* DFS process DEST. */
1196 122185332 : find_loops:
1197 122185332 : bb_data[dest->index].bb_to_pre = pre_num++;
1198 122185332 : bb_data[dest->index].depth = depth;
1199 122185332 : bb_data[dest->index].scc = -1;
1200 122185332 : depth++;
1201 122185332 : gcc_assert ((dest->flags & (is_header|visited)) == 0);
1202 122185332 : dest->flags |= visited;
1203 122185332 : ei = ei_start (dest->succs);
1204 292262713 : while (!ei_end_p (ei))
1205 : {
1206 170077381 : ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
1207 170077381 : if (bitmap_bit_p (exit_bbs, ei_edge (ei)->dest->index))
1208 : ;
1209 156250050 : else if (!(ei_edge (ei)->dest->flags & visited))
1210 : {
1211 108910145 : ei_stack.quick_push (ei);
1212 108910145 : dest = ei_edge (ei)->dest;
1213 : /* DFS recurse on DEST. */
1214 108910145 : goto find_loops;
1215 :
1216 108910145 : ret_from_find_loops:
1217 : /* Return point of DFS recursion. */
1218 217820290 : ei = ei_stack.pop ();
1219 108910145 : dest = ei_edge (ei)->src;
1220 108910145 : int header = bb_data[ei_edge (ei)->dest->index].scc;
1221 108910145 : tag_header (dest->index, header, bb_data);
1222 108910145 : depth = bb_data[dest->index].depth + 1;
1223 : }
1224 : else
1225 : {
1226 47339905 : if (bb_data[ei_edge (ei)->dest->index].depth > 0) /* on the stack */
1227 : {
1228 6967352 : ei_edge (ei)->flags |= EDGE_DFS_BACK;
1229 6967352 : n_sccs++;
1230 6967352 : ei_edge (ei)->dest->flags |= is_header;
1231 6967352 : ::new (&bb_data[ei_edge (ei)->dest->index].scc_exits)
1232 6967352 : auto_vec<int, 2> ();
1233 6967352 : tag_header (dest->index, ei_edge (ei)->dest->index, bb_data);
1234 : }
1235 40372553 : else if (bb_data[ei_edge (ei)->dest->index].scc == -1)
1236 : ;
1237 : else
1238 : {
1239 6412073 : int header = bb_data[ei_edge (ei)->dest->index].scc;
1240 6412073 : if (bb_data[header].depth > 0)
1241 6380729 : tag_header (dest->index, header, bb_data);
1242 : else
1243 : {
1244 : /* A re-entry into an existing loop. */
1245 : /* ??? Need to mark is_header? */
1246 44291 : while (bb_data[header].scc != -1)
1247 : {
1248 26571 : header = bb_data[header].scc;
1249 26571 : if (bb_data[header].depth > 0)
1250 : {
1251 13624 : tag_header (dest->index, header, bb_data);
1252 13624 : break;
1253 : }
1254 : }
1255 : }
1256 : }
1257 : }
1258 170077381 : ei_next (&ei);
1259 : }
1260 122185332 : rev_post_order[rev_post_order_num++] = dest->index;
1261 : /* not on the stack anymore */
1262 122185332 : bb_data[dest->index].depth = -bb_data[dest->index].depth;
1263 122185332 : if (!ei_stack.is_empty ())
1264 : /* Return from DFS recursion. */
1265 108910145 : goto ret_from_find_loops;
1266 :
1267 : /* Optimize for no SCCs found or !for_iteration. */
1268 13275187 : if (n_sccs == 0 || !for_iteration)
1269 : {
1270 : /* Clear the temporarily allocated flags. */
1271 86521825 : for (int i = 0; i < rev_post_order_num; ++i)
1272 149473960 : BASIC_BLOCK_FOR_FN (fn, rev_post_order[i])->flags
1273 74736980 : &= ~(is_header|visited);
1274 : /* And swap elements. */
1275 44195578 : for (int i = 0; i < rev_post_order_num/2; ++i)
1276 32410733 : std::swap (rev_post_order[i], rev_post_order[rev_post_order_num-i-1]);
1277 11784845 : XDELETEVEC (bb_data);
1278 :
1279 11784845 : return rev_post_order_num;
1280 : }
1281 :
1282 : /* Next find SCC exits, clear the visited flag and compute an upper bound
1283 : for the edge stack below. */
1284 : unsigned edge_count = 0;
1285 48938694 : for (int i = 0; i < rev_post_order_num; ++i)
1286 : {
1287 47448352 : int bb = rev_post_order[i];
1288 47448352 : BASIC_BLOCK_FOR_FN (fn, bb)->flags &= ~visited;
1289 47448352 : edge e;
1290 115567578 : FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (fn, bb)->succs)
1291 : {
1292 68119226 : if (bitmap_bit_p (exit_bbs, e->dest->index))
1293 1590138 : continue;
1294 66529088 : edge_count++;
1295 : /* if e is an exit from e->src, record it for
1296 : bb_data[e->src].scc. */
1297 66529088 : int src_scc = e->src->index;
1298 66529088 : if (!(e->src->flags & is_header))
1299 58571273 : src_scc = bb_data[src_scc].scc;
1300 66529088 : if (src_scc == -1)
1301 35348478 : continue;
1302 31180610 : int dest_scc = e->dest->index;
1303 31180610 : if (!(e->dest->flags & is_header))
1304 25943021 : dest_scc = bb_data[dest_scc].scc;
1305 31180610 : if (src_scc == dest_scc)
1306 22833029 : continue;
1307 : /* When dest_scc is nested insde src_scc it's not an
1308 : exit. */
1309 : int tem_dest_scc = dest_scc;
1310 : unsigned dest_scc_depth = 0;
1311 10194066 : while (tem_dest_scc != -1)
1312 : {
1313 2808988 : dest_scc_depth++;
1314 2808988 : if ((tem_dest_scc = bb_data[tem_dest_scc].scc) == src_scc)
1315 : break;
1316 : }
1317 8347581 : if (tem_dest_scc != -1)
1318 962503 : continue;
1319 : /* When src_scc is nested inside dest_scc record an
1320 : exit from the outermost SCC this edge exits. */
1321 : int tem_src_scc = src_scc;
1322 : unsigned src_scc_depth = 0;
1323 8076466 : while (tem_src_scc != -1)
1324 : {
1325 7990409 : if (bb_data[tem_src_scc].scc == dest_scc)
1326 : {
1327 7299021 : edge_count++;
1328 7299021 : bb_data[tem_src_scc].scc_exits.safe_push (e->dest->index);
1329 7299021 : break;
1330 : }
1331 691388 : tem_src_scc = bb_data[tem_src_scc].scc;
1332 691388 : src_scc_depth++;
1333 : }
1334 : /* Else find the outermost SCC this edge exits (exits
1335 : from the inner SCCs are not important for the DFS
1336 : walk adjustment). Do so by computing the common
1337 : ancestor SCC where the immediate child it to the source
1338 : SCC is the exited SCC. */
1339 7385078 : if (tem_src_scc == -1)
1340 : {
1341 86057 : edge_count++;
1342 87100 : while (src_scc_depth > dest_scc_depth)
1343 : {
1344 1043 : src_scc = bb_data[src_scc].scc;
1345 1043 : src_scc_depth--;
1346 : }
1347 86239 : while (dest_scc_depth > src_scc_depth)
1348 : {
1349 182 : dest_scc = bb_data[dest_scc].scc;
1350 182 : dest_scc_depth--;
1351 : }
1352 86071 : while (bb_data[src_scc].scc != bb_data[dest_scc].scc)
1353 : {
1354 : src_scc = bb_data[src_scc].scc;
1355 : dest_scc = bb_data[dest_scc].scc;
1356 : }
1357 86057 : bb_data[src_scc].scc_exits.safe_push (e->dest->index);
1358 : }
1359 : }
1360 : }
1361 :
1362 : /* Now the second DFS walk to compute a RPO where the extent of SCCs
1363 : is minimized thus SCC members are adjacent in the RPO array.
1364 : This is done by performing a DFS walk computing RPO with first visiting
1365 : extra direct edges from SCC entry to its exits.
1366 : That simulates a DFS walk over the graph with SCCs collapsed and
1367 : walking the SCCs themselves only when all outgoing edges from the
1368 : SCCs have been visited.
1369 : SCC_END[scc-header-index] is the position in the RPO array of the
1370 : last member of the SCC. */
1371 2980684 : auto_vec<std::pair<basic_block, basic_block>, 20> estack (edge_count + 1);
1372 1490342 : int idx = rev_post_order_num;
1373 1490342 : basic_block edest;
1374 1490342 : dest = entry->dest;
1375 :
1376 : /* DFS process DEST. */
1377 47448352 : dfs_rpo:
1378 47448352 : gcc_checking_assert ((dest->flags & visited) == 0);
1379 : /* Verify we enter SCCs through the same header and SCC nesting appears
1380 : the same. */
1381 47448352 : gcc_assert (bb_data[dest->index].scc == -1
1382 : || (BASIC_BLOCK_FOR_FN (fn, bb_data[dest->index].scc)->flags
1383 : & visited));
1384 47448352 : dest->flags |= visited;
1385 47448352 : bb_data[dest->index].scc_end = -1;
1386 47448352 : if ((dest->flags & is_header)
1387 47448352 : && !bb_data[dest->index].scc_exits.is_empty ())
1388 : {
1389 : /* Push the all SCC exits as outgoing edges from its header to
1390 : be visited first.
1391 : To process exits in the same relative order as in the first
1392 : DFS walk sort them after their destination PRE order index. */
1393 3986738 : gcc_sort_r (&bb_data[dest->index].scc_exits[0],
1394 3986738 : bb_data[dest->index].scc_exits.length (),
1395 : sizeof (int), cmp_edge_dest_pre, bb_data);
1396 : /* Process edges in reverse to match previous DFS walk order. */
1397 15358554 : for (int i = bb_data[dest->index].scc_exits.length () - 1; i >= 0; --i)
1398 14770156 : estack.quick_push (std::make_pair
1399 7385078 : (dest, BASIC_BLOCK_FOR_FN (fn, bb_data[dest->index].scc_exits[i])));
1400 : }
1401 : else
1402 : {
1403 43461614 : if (dest->flags & is_header)
1404 150692 : bb_data[dest->index].scc_end = idx - 1;
1405 : /* Push the edge vector in reverse to match the iteration order
1406 : from the DFS walk above. */
1407 146611019 : for (int i = EDGE_COUNT (dest->succs) - 1; i >= 0; --i)
1408 60321171 : if (!bitmap_bit_p (exit_bbs, EDGE_SUCC (dest, i)->dest->index))
1409 58731319 : estack.quick_push (std::make_pair (dest,
1410 58731319 : EDGE_SUCC (dest, i)->dest));
1411 : }
1412 119872176 : while (!estack.is_empty ()
1413 241234694 : && estack.last ().first == dest)
1414 : {
1415 73914166 : edest = estack.last ().second;
1416 73914166 : if (!(edest->flags & visited))
1417 : {
1418 45958010 : dest = edest;
1419 : /* DFS recurse on DEST. */
1420 45958010 : goto dfs_rpo;
1421 :
1422 45958010 : ret_from_dfs_rpo:
1423 : /* Return point of DFS recursion. */
1424 45958010 : dest = estack.last ().first;
1425 : }
1426 73914166 : estack.pop ();
1427 : /* If we processed all SCC exits from DEST mark the SCC end
1428 : since all RPO entries up to DEST itself will now belong
1429 : to its SCC. The special-case of no SCC exits is already
1430 : dealt with above. */
1431 73914166 : if (dest->flags & is_header
1432 : /* When the last exit edge was processed mark the SCC end
1433 : and push the regular edges. */
1434 15342893 : && bb_data[dest->index].scc_end == -1
1435 81296591 : && (estack.is_empty ()
1436 7382425 : || estack.last ().first != dest))
1437 : {
1438 3986738 : bb_data[dest->index].scc_end = idx - 1;
1439 : /* Push the edge vector in reverse to match the iteration order
1440 : from the DFS walk above. */
1441 15771531 : for (int i = EDGE_COUNT (dest->succs) - 1; i >= 0; --i)
1442 7798055 : if (!bitmap_bit_p (exit_bbs, EDGE_SUCC (dest, i)->dest->index))
1443 7797769 : estack.quick_push (std::make_pair (dest,
1444 7797769 : EDGE_SUCC (dest, i)->dest));
1445 : }
1446 : }
1447 47448352 : rev_post_order[--idx] = dest->index;
1448 47448352 : if (!estack.is_empty ())
1449 : /* Return from DFS recursion. */
1450 45958010 : goto ret_from_dfs_rpo;
1451 :
1452 : /* Each SCC extends are from the position of the header inside
1453 : the RPO array up to RPO array index scc_end[header-index]. */
1454 1490342 : if (toplevel_scc_extents)
1455 9127223 : for (int i = 0; i < rev_post_order_num; i++)
1456 : {
1457 8650414 : basic_block bb = BASIC_BLOCK_FOR_FN (fn, rev_post_order[i]);
1458 8650414 : if (bb->flags & is_header)
1459 : {
1460 978181 : toplevel_scc_extents->safe_push
1461 978181 : (std::make_pair (i, bb_data[bb->index].scc_end));
1462 978181 : i = bb_data[bb->index].scc_end;
1463 : }
1464 : }
1465 :
1466 : /* Clear the temporarily allocated flags and free memory. */
1467 48938694 : for (int i = 0; i < rev_post_order_num; ++i)
1468 : {
1469 47448352 : basic_block bb = BASIC_BLOCK_FOR_FN (fn, rev_post_order[i]);
1470 47448352 : if (bb->flags & is_header)
1471 4137430 : bb_data[bb->index].scc_exits.~scc_exit_vec_t ();
1472 47448352 : bb->flags &= ~(visited|is_header);
1473 : }
1474 :
1475 1490342 : XDELETEVEC (bb_data);
1476 :
1477 1490342 : return rev_post_order_num;
1478 13275187 : }
1479 :
1480 :
1481 :
1482 : /* Compute the depth first search order on the _reverse_ graph and
1483 : store it in the array DFS_ORDER, marking the nodes visited in VISITED.
1484 : Returns the number of nodes visited.
1485 :
1486 : The computation is split into three pieces:
1487 :
1488 : flow_dfs_compute_reverse_init () creates the necessary data
1489 : structures.
1490 :
1491 : flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1492 : structures. The block will start the search.
1493 :
1494 : flow_dfs_compute_reverse_execute () continues (or starts) the
1495 : search using the block on the top of the stack, stopping when the
1496 : stack is empty.
1497 :
1498 : flow_dfs_compute_reverse_finish () destroys the necessary data
1499 : structures.
1500 :
1501 : Thus, the user will probably call ..._init(), call ..._add_bb() to
1502 : add a beginning basic block to the stack, call ..._execute(),
1503 : possibly add another bb to the stack and again call ..._execute(),
1504 : ..., and finally call _finish(). */
1505 :
1506 : /* Initialize the data structures used for depth-first search on the
1507 : reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1508 : added to the basic block stack. DATA is the current depth-first
1509 : search context. If INITIALIZE_STACK is nonzero, there is an
1510 : element on the stack. */
1511 :
1512 5520638 : depth_first_search::depth_first_search () :
1513 5520638 : m_stack (n_basic_blocks_for_fn (cfun)),
1514 5520638 : m_visited_blocks (last_basic_block_for_fn (cfun))
1515 : {
1516 5520638 : bitmap_clear (m_visited_blocks);
1517 5520638 : }
1518 :
1519 : /* Add the specified basic block to the top of the dfs data
1520 : structures. When the search continues, it will start at the
1521 : block. */
1522 :
1523 : void
1524 66201254 : depth_first_search::add_bb (basic_block bb)
1525 : {
1526 66201254 : m_stack.quick_push (bb);
1527 66201254 : bitmap_set_bit (m_visited_blocks, bb->index);
1528 66201254 : }
1529 :
1530 : /* Continue the depth-first search through the reverse graph starting with the
1531 : block at the stack's top and ending when the stack is empty. Visited nodes
1532 : are marked. Returns an unvisited basic block, or NULL if there is none
1533 : available. */
1534 :
1535 : basic_block
1536 5545801 : depth_first_search::execute (basic_block last_unvisited)
1537 : {
1538 5545801 : basic_block bb;
1539 5545801 : edge e;
1540 5545801 : edge_iterator ei;
1541 :
1542 71747055 : while (!m_stack.is_empty ())
1543 : {
1544 66201254 : bb = m_stack.pop ();
1545 :
1546 : /* Perform depth-first search on adjacent vertices. */
1547 147836556 : FOR_EACH_EDGE (e, ei, bb->preds)
1548 81635302 : if (!bitmap_bit_p (m_visited_blocks, e->src->index))
1549 60655453 : add_bb (e->src);
1550 : }
1551 :
1552 : /* Determine if there are unvisited basic blocks. */
1553 71747055 : FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
1554 66226417 : if (!bitmap_bit_p (m_visited_blocks, bb->index))
1555 : return bb;
1556 :
1557 : return NULL;
1558 : }
1559 :
1560 : /* Performs dfs search from BB over vertices satisfying PREDICATE;
1561 : if REVERSE, go against direction of edges. Returns number of blocks
1562 : found and their list in RSLT. RSLT can contain at most RSLT_MAX items. */
1563 : int
1564 221593910 : dfs_enumerate_from (basic_block bb, int reverse,
1565 : bool (*predicate) (const_basic_block, const void *),
1566 : basic_block *rslt, int rslt_max, const void *data)
1567 : {
1568 221593910 : basic_block *st, lbb;
1569 221593910 : int sp = 0, tv = 0;
1570 :
1571 221593910 : auto_bb_flag visited (cfun);
1572 :
1573 : #define MARK_VISITED(BB) ((BB)->flags |= visited)
1574 : #define UNMARK_VISITED(BB) ((BB)->flags &= ~visited)
1575 : #define VISITED_P(BB) (((BB)->flags & visited) != 0)
1576 :
1577 221593910 : st = XNEWVEC (basic_block, rslt_max);
1578 221593910 : rslt[tv++] = st[sp++] = bb;
1579 221593910 : MARK_VISITED (bb);
1580 1452400289 : while (sp)
1581 : {
1582 1230806379 : edge e;
1583 1230806379 : edge_iterator ei;
1584 1230806379 : lbb = st[--sp];
1585 1230806379 : if (reverse)
1586 : {
1587 3013735653 : FOR_EACH_EDGE (e, ei, lbb->preds)
1588 1784640906 : if (!VISITED_P (e->src) && predicate (e->src, data))
1589 : {
1590 1008436215 : gcc_assert (tv != rslt_max);
1591 1008436215 : rslt[tv++] = st[sp++] = e->src;
1592 1008436215 : MARK_VISITED (e->src);
1593 : }
1594 : }
1595 : else
1596 : {
1597 4182121 : FOR_EACH_EDGE (e, ei, lbb->succs)
1598 2470489 : if (!VISITED_P (e->dest) && predicate (e->dest, data))
1599 : {
1600 776254 : gcc_assert (tv != rslt_max);
1601 776254 : rslt[tv++] = st[sp++] = e->dest;
1602 776254 : MARK_VISITED (e->dest);
1603 : }
1604 : }
1605 : }
1606 221593910 : free (st);
1607 1452400289 : for (sp = 0; sp < tv; sp++)
1608 1230806379 : UNMARK_VISITED (rslt[sp]);
1609 221593910 : return tv;
1610 : #undef MARK_VISITED
1611 : #undef UNMARK_VISITED
1612 : #undef VISITED_P
1613 221593910 : }
1614 :
1615 :
1616 : /* Compute dominance frontiers, ala Harvey, Ferrante, et al.
1617 :
1618 : This algorithm can be found in Timothy Harvey's PhD thesis, at
1619 : http://www.cs.rice.edu/~harv/dissertation.pdf in the section on iterative
1620 : dominance algorithms.
1621 :
1622 : First, we identify each join point, j (any node with more than one
1623 : incoming edge is a join point).
1624 :
1625 : We then examine each predecessor, p, of j and walk up the dominator tree
1626 : starting at p.
1627 :
1628 : We stop the walk when we reach j's immediate dominator - j is in the
1629 : dominance frontier of each of the nodes in the walk, except for j's
1630 : immediate dominator. Intuitively, all of the rest of j's dominators are
1631 : shared by j's predecessors as well.
1632 : Since they dominate j, they will not have j in their dominance frontiers.
1633 :
1634 : The number of nodes touched by this algorithm is equal to the size
1635 : of the dominance frontiers, no more, no less.
1636 : */
1637 :
1638 : void
1639 11938319 : compute_dominance_frontiers (bitmap_head *frontiers)
1640 : {
1641 11938319 : timevar_push (TV_DOM_FRONTIERS);
1642 :
1643 11938319 : edge p;
1644 11938319 : edge_iterator ei;
1645 11938319 : basic_block b;
1646 191337525 : FOR_EACH_BB_FN (b, cfun)
1647 : {
1648 221269941 : if (EDGE_COUNT (b->preds) >= 2)
1649 : {
1650 41870735 : basic_block domsb = get_immediate_dominator (CDI_DOMINATORS, b);
1651 157478005 : FOR_EACH_EDGE (p, ei, b->preds)
1652 : {
1653 115607270 : basic_block runner = p->src;
1654 115607270 : if (runner == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1655 9029 : continue;
1656 :
1657 321514591 : while (runner != domsb)
1658 : {
1659 235953175 : if (!bitmap_set_bit (&frontiers[runner->index], b->index))
1660 : break;
1661 205916350 : runner = get_immediate_dominator (CDI_DOMINATORS, runner);
1662 : }
1663 : }
1664 : }
1665 : }
1666 :
1667 11938319 : timevar_pop (TV_DOM_FRONTIERS);
1668 11938319 : }
1669 :
1670 : /* Given a set of blocks with variable definitions (DEF_BLOCKS),
1671 : return a bitmap with all the blocks in the iterated dominance
1672 : frontier of the blocks in DEF_BLOCKS. DFS contains dominance
1673 : frontier information as returned by compute_dominance_frontiers.
1674 :
1675 : The resulting set of blocks are the potential sites where PHI nodes
1676 : are needed. The caller is responsible for freeing the memory
1677 : allocated for the return value. */
1678 :
1679 : bitmap
1680 23820280 : compute_idf (bitmap def_blocks, bitmap_head *dfs)
1681 : {
1682 23820280 : bitmap_iterator bi, bi2;
1683 23820280 : unsigned bb_index, i;
1684 23820280 : bitmap phi_insertion_points;
1685 :
1686 23820280 : phi_insertion_points = BITMAP_ALLOC (NULL);
1687 :
1688 : /* The initial workset is the DEF_BLOCKs, process that first,
1689 : seeding phi_insertion_points as the start of the worklist
1690 : for the iteration which then also serves as a visited set. */
1691 88058493 : EXECUTE_IF_SET_IN_BITMAP (def_blocks, 0, bb_index, bi2)
1692 : {
1693 : /* Since the registration of NEW -> OLD name mappings is done
1694 : separately from the call to update_ssa, when updating the SSA
1695 : form, the basic blocks where new and/or old names are defined
1696 : may have disappeared by CFG cleanup calls. In this case,
1697 : we may pull a non-existing block from the work stack. */
1698 64238213 : gcc_checking_assert (bb_index
1699 : < (unsigned) last_basic_block_for_fn (cfun));
1700 :
1701 : /* The population counts of the dominance frontiers is low
1702 : compared to that of phi_insertion_points which approaches
1703 : the IDF and of work_set which is at most that of the IDF
1704 : as well. That makes iterating over the DFS bitmap preferential
1705 : to whole bitmap operations involving also phi_insertion_points. */
1706 130613255 : EXECUTE_IF_SET_IN_BITMAP (&dfs[bb_index], 0, i, bi)
1707 66375042 : bitmap_set_bit (phi_insertion_points, i);
1708 : }
1709 :
1710 : /* Seed the work set with the initial phi_insertion_points. */
1711 23820280 : auto_vec<int, 64> work_set (n_basic_blocks_for_fn (cfun));
1712 57561263 : EXECUTE_IF_SET_IN_BITMAP (phi_insertion_points, 0, i, bi)
1713 33740983 : work_set.quick_push (i);
1714 :
1715 : /* Pop a block off the workset, add every block that appears in
1716 : the original block's DF that we have not already processed to
1717 : the workset. Iterate until the workset is empty. Blocks
1718 : which are added to the workset are potential sites for
1719 : PHI nodes. */
1720 73590738 : while (!work_set.is_empty ())
1721 : {
1722 49770458 : bb_index = work_set.pop ();
1723 49770458 : gcc_checking_assert (bb_index
1724 : < (unsigned) last_basic_block_for_fn (cfun));
1725 105375615 : EXECUTE_IF_SET_IN_BITMAP (&dfs[bb_index], 0, i, bi)
1726 55605157 : if (bitmap_set_bit (phi_insertion_points, i))
1727 16029475 : work_set.quick_push (i);
1728 : }
1729 :
1730 23820280 : return phi_insertion_points;
1731 23820280 : }
1732 :
1733 : /* Intersection and union of preds/succs for sbitmap based data flow
1734 : solvers. All four functions defined below take the same arguments:
1735 : B is the basic block to perform the operation for. DST is the
1736 : target sbitmap, i.e. the result. SRC is an sbitmap vector of size
1737 : last_basic_block so that it can be indexed with basic block indices.
1738 : DST may be (but does not have to be) SRC[B->index]. */
1739 :
1740 : /* Set the bitmap DST to the intersection of SRC of successors of
1741 : basic block B. */
1742 :
1743 : void
1744 10530886 : bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1745 : {
1746 10530886 : unsigned int set_size = dst->size;
1747 10530886 : edge e;
1748 10530886 : unsigned ix;
1749 :
1750 10533840 : for (e = NULL, ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1751 : {
1752 10455630 : e = EDGE_SUCC (b, ix);
1753 10455630 : if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1754 2954 : continue;
1755 :
1756 10452676 : bitmap_copy (dst, src[e->dest->index]);
1757 10452676 : break;
1758 : }
1759 :
1760 10530886 : if (e == 0)
1761 75256 : bitmap_ones (dst);
1762 : else
1763 16909942 : for (++ix; ix < EDGE_COUNT (b->succs); ix++)
1764 : {
1765 6454312 : unsigned int i;
1766 6454312 : SBITMAP_ELT_TYPE *p, *r;
1767 :
1768 6454312 : e = EDGE_SUCC (b, ix);
1769 6454312 : if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1770 0 : continue;
1771 :
1772 6454312 : p = src[e->dest->index]->elms;
1773 6454312 : r = dst->elms;
1774 25135952 : for (i = 0; i < set_size; i++)
1775 18681640 : *r++ &= *p++;
1776 : }
1777 10530886 : }
1778 :
1779 : /* Set the bitmap DST to the intersection of SRC of predecessors of
1780 : basic block B. */
1781 :
1782 : void
1783 56489745 : bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1784 : {
1785 56489745 : unsigned int set_size = dst->size;
1786 56489745 : edge e;
1787 56489745 : unsigned ix;
1788 :
1789 56489745 : for (e = NULL, ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1790 : {
1791 56489745 : e = EDGE_PRED (b, ix);
1792 56489745 : if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1793 0 : continue;
1794 :
1795 56489745 : bitmap_copy (dst, src[e->src->index]);
1796 56489745 : break;
1797 : }
1798 :
1799 56489745 : if (e == 0)
1800 0 : bitmap_ones (dst);
1801 : else
1802 139269371 : for (++ix; ix < EDGE_COUNT (b->preds); ix++)
1803 : {
1804 82779626 : unsigned int i;
1805 82779626 : SBITMAP_ELT_TYPE *p, *r;
1806 :
1807 82779626 : e = EDGE_PRED (b, ix);
1808 82779626 : if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1809 0 : continue;
1810 :
1811 82779626 : p = src[e->src->index]->elms;
1812 82779626 : r = dst->elms;
1813 742311106 : for (i = 0; i < set_size; i++)
1814 659531480 : *r++ &= *p++;
1815 : }
1816 56489745 : }
1817 :
1818 : /* Set the bitmap DST to the union of SRC of successors of
1819 : basic block B. */
1820 :
1821 : void
1822 0 : bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1823 : {
1824 0 : unsigned int set_size = dst->size;
1825 0 : edge e;
1826 0 : unsigned ix;
1827 :
1828 0 : for (ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1829 : {
1830 0 : e = EDGE_SUCC (b, ix);
1831 0 : if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1832 0 : continue;
1833 :
1834 0 : bitmap_copy (dst, src[e->dest->index]);
1835 0 : break;
1836 : }
1837 :
1838 0 : if (ix == EDGE_COUNT (b->succs))
1839 0 : bitmap_clear (dst);
1840 : else
1841 0 : for (ix++; ix < EDGE_COUNT (b->succs); ix++)
1842 : {
1843 0 : unsigned int i;
1844 0 : SBITMAP_ELT_TYPE *p, *r;
1845 :
1846 0 : e = EDGE_SUCC (b, ix);
1847 0 : if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1848 0 : continue;
1849 :
1850 0 : p = src[e->dest->index]->elms;
1851 0 : r = dst->elms;
1852 0 : for (i = 0; i < set_size; i++)
1853 0 : *r++ |= *p++;
1854 : }
1855 0 : }
1856 :
1857 : /* Set the bitmap DST to the union of SRC of predecessors of
1858 : basic block B. */
1859 :
1860 : void
1861 0 : bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1862 : {
1863 0 : unsigned int set_size = dst->size;
1864 0 : edge e;
1865 0 : unsigned ix;
1866 :
1867 0 : for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1868 : {
1869 0 : e = EDGE_PRED (b, ix);
1870 0 : if (e->src== ENTRY_BLOCK_PTR_FOR_FN (cfun))
1871 0 : continue;
1872 :
1873 0 : bitmap_copy (dst, src[e->src->index]);
1874 0 : break;
1875 : }
1876 :
1877 0 : if (ix == EDGE_COUNT (b->preds))
1878 0 : bitmap_clear (dst);
1879 : else
1880 0 : for (ix++; ix < EDGE_COUNT (b->preds); ix++)
1881 : {
1882 0 : unsigned int i;
1883 0 : SBITMAP_ELT_TYPE *p, *r;
1884 :
1885 0 : e = EDGE_PRED (b, ix);
1886 0 : if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1887 0 : continue;
1888 :
1889 0 : p = src[e->src->index]->elms;
1890 0 : r = dst->elms;
1891 0 : for (i = 0; i < set_size; i++)
1892 0 : *r++ |= *p++;
1893 : }
1894 0 : }
1895 :
1896 : /* Returns the list of basic blocks in the function in an order that guarantees
1897 : that if a block X has just a single predecessor Y, then Y is after X in the
1898 : ordering. */
1899 :
1900 : basic_block *
1901 7616246 : single_pred_before_succ_order (void)
1902 : {
1903 7616246 : basic_block x, y;
1904 7616246 : basic_block *order = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
1905 7616246 : unsigned n = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
1906 7616246 : unsigned np, i;
1907 7616246 : auto_sbitmap visited (last_basic_block_for_fn (cfun));
1908 :
1909 : #define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
1910 : #define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
1911 :
1912 7616246 : bitmap_clear (visited);
1913 :
1914 7616246 : MARK_VISITED (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1915 70514728 : FOR_EACH_BB_FN (x, cfun)
1916 : {
1917 62898482 : if (VISITED_P (x))
1918 1898176 : continue;
1919 :
1920 : /* Walk the predecessors of x as long as they have precisely one
1921 : predecessor and add them to the list, so that they get stored
1922 : after x. */
1923 1898176 : for (y = x, np = 1;
1924 110464387 : single_pred_p (y) && !VISITED_P (single_pred (y));
1925 1898176 : y = single_pred (y))
1926 1898176 : np++;
1927 61000306 : for (y = x, i = n - np;
1928 110464387 : single_pred_p (y) && !VISITED_P (single_pred (y));
1929 1898176 : y = single_pred (y), i++)
1930 : {
1931 1898176 : order[i] = y;
1932 1898176 : MARK_VISITED (y);
1933 : }
1934 61000306 : order[i] = y;
1935 61000306 : MARK_VISITED (y);
1936 :
1937 61000306 : gcc_assert (i == n - 1);
1938 : n -= np;
1939 : }
1940 :
1941 7616246 : gcc_assert (n == 0);
1942 7616246 : return order;
1943 :
1944 : #undef MARK_VISITED
1945 : #undef VISITED_P
1946 7616246 : }
1947 :
1948 : /* Ignoring loop backedges, if BB has precisely one incoming edge then
1949 : return that edge. Otherwise return NULL.
1950 :
1951 : When IGNORE_NOT_EXECUTABLE is true, also ignore edges that are not marked
1952 : as executable. */
1953 :
1954 : edge
1955 97347350 : single_pred_edge_ignoring_loop_edges (basic_block bb,
1956 : bool ignore_not_executable)
1957 : {
1958 97347350 : edge retval = NULL;
1959 97347350 : edge e;
1960 97347350 : edge_iterator ei;
1961 :
1962 191445150 : FOR_EACH_EDGE (e, ei, bb->preds)
1963 : {
1964 : /* A loop back edge can be identified by the destination of
1965 : the edge dominating the source of the edge. */
1966 109305369 : if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1967 5061161 : continue;
1968 :
1969 : /* We can safely ignore edges that are not executable. */
1970 104244208 : if (ignore_not_executable
1971 25386550 : && (e->flags & EDGE_EXECUTABLE) == 0)
1972 89475 : continue;
1973 :
1974 : /* If we have already seen a non-loop edge, then we must have
1975 : multiple incoming non-loop edges and thus we return NULL. */
1976 104154733 : if (retval)
1977 : return NULL;
1978 :
1979 : /* This is the first non-loop incoming edge we have found. Record
1980 : it. */
1981 : retval = e;
1982 : }
1983 :
1984 : return retval;
1985 : }
|