Line data Source code
1 : /* Exception handling semantics and decomposition for trees.
2 : Copyright (C) 2003-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify
7 : it under the terms of the GNU General Public License as published by
8 : the Free Software Foundation; either version 3, or (at your option)
9 : any later version.
10 :
11 : GCC is distributed in the hope that it will be useful,
12 : but WITHOUT ANY WARRANTY; without even the implied warranty of
13 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 : GNU General Public License for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : #include "config.h"
21 : #include "system.h"
22 : #include "coretypes.h"
23 : #include "backend.h"
24 : #include "rtl.h"
25 : #include "tree.h"
26 : #include "gimple.h"
27 : #include "cfghooks.h"
28 : #include "tree-pass.h"
29 : #include "ssa.h"
30 : #include "cgraph.h"
31 : #include "diagnostic-core.h"
32 : #include "fold-const.h"
33 : #include "calls.h"
34 : #include "except.h"
35 : #include "cfganal.h"
36 : #include "cfgcleanup.h"
37 : #include "tree-eh.h"
38 : #include "gimple-iterator.h"
39 : #include "tree-cfg.h"
40 : #include "tree-into-ssa.h"
41 : #include "tree-ssa.h"
42 : #include "tree-inline.h"
43 : #include "langhooks.h"
44 : #include "cfgloop.h"
45 : #include "gimple-low.h"
46 : #include "stringpool.h"
47 : #include "attribs.h"
48 : #include "asan.h"
49 : #include "gimplify.h"
50 :
51 : /* In some instances a tree and a gimple need to be stored in a same table,
52 : i.e. in hash tables. This is a structure to do this. */
53 : typedef union {tree *tp; tree t; gimple *g;} treemple;
54 :
55 : /* Misc functions used in this file. */
56 :
57 : /* Remember and lookup EH landing pad data for arbitrary statements.
58 : Really this means any statement that could_throw_p. We could
59 : stuff this information into the stmt_ann data structure, but:
60 :
61 : (1) We absolutely rely on this information being kept until
62 : we get to rtl. Once we're done with lowering here, if we lose
63 : the information there's no way to recover it!
64 :
65 : (2) There are many more statements that *cannot* throw as
66 : compared to those that can. We should be saving some amount
67 : of space by only allocating memory for those that can throw. */
68 :
69 : /* Add statement T in function IFUN to landing pad NUM. */
70 :
71 : static void
72 6499675 : add_stmt_to_eh_lp_fn (struct function *ifun, gimple *t, int num)
73 : {
74 6499675 : gcc_assert (num != 0);
75 :
76 6499675 : if (!get_eh_throw_stmt_table (ifun))
77 384755 : set_eh_throw_stmt_table (ifun, hash_map<gimple *, int>::create_ggc (31));
78 :
79 6499675 : bool existed = get_eh_throw_stmt_table (ifun)->put (t, num);
80 6499675 : gcc_assert (!existed);
81 6499675 : }
82 :
83 : /* Add statement T in the current function (cfun) to EH landing pad NUM. */
84 :
85 : void
86 2586223 : add_stmt_to_eh_lp (gimple *t, int num)
87 : {
88 2586223 : add_stmt_to_eh_lp_fn (cfun, t, num);
89 2586223 : }
90 :
91 : /* Add statement T to the single EH landing pad in REGION. */
92 :
93 : static void
94 3160463 : record_stmt_eh_region (eh_region region, gimple *t)
95 : {
96 3160463 : if (region == NULL)
97 : return;
98 3160463 : if (region->type == ERT_MUST_NOT_THROW)
99 92882 : add_stmt_to_eh_lp_fn (cfun, t, -region->index);
100 : else
101 : {
102 3067581 : eh_landing_pad lp = region->landing_pads;
103 3067581 : if (lp == NULL)
104 855417 : lp = gen_eh_landing_pad (region);
105 : else
106 2212164 : gcc_assert (lp->next_lp == NULL);
107 3067581 : add_stmt_to_eh_lp_fn (cfun, t, lp->index);
108 : }
109 : }
110 :
111 :
112 : /* Remove statement T in function IFUN from its EH landing pad. */
113 :
114 : bool
115 292520105 : remove_stmt_from_eh_lp_fn (struct function *ifun, gimple *t)
116 : {
117 292520105 : if (!get_eh_throw_stmt_table (ifun))
118 : return false;
119 :
120 133406001 : if (!get_eh_throw_stmt_table (ifun)->get (t))
121 : return false;
122 :
123 5668125 : get_eh_throw_stmt_table (ifun)->remove (t);
124 5668125 : return true;
125 : }
126 :
127 :
128 : /* Remove statement T in the current function (cfun) from its
129 : EH landing pad. */
130 :
131 : bool
132 120177153 : remove_stmt_from_eh_lp (gimple *t)
133 : {
134 120177153 : return remove_stmt_from_eh_lp_fn (cfun, t);
135 : }
136 :
137 : /* Determine if statement T is inside an EH region in function IFUN.
138 : Positive numbers indicate a landing pad index; negative numbers
139 : indicate a MUST_NOT_THROW region index; zero indicates that the
140 : statement is not recorded in the region table. */
141 :
142 : int
143 15255699765 : lookup_stmt_eh_lp_fn (struct function *ifun, const gimple *t)
144 : {
145 15255699765 : if (ifun->eh->throw_stmt_table == NULL)
146 : return 0;
147 :
148 8225914870 : int *lp_nr = ifun->eh->throw_stmt_table->get (const_cast <gimple *> (t));
149 8225914870 : return lp_nr ? *lp_nr : 0;
150 : }
151 :
152 : /* Likewise, but always use the current function. */
153 :
154 : int
155 14595171896 : lookup_stmt_eh_lp (const gimple *t)
156 : {
157 : /* We can get called from initialized data when -fnon-call-exceptions
158 : is on; prevent crash. */
159 14595171896 : if (!cfun)
160 : return 0;
161 14595171896 : return lookup_stmt_eh_lp_fn (cfun, t);
162 : }
163 :
164 : /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
165 : nodes and LABEL_DECL nodes. We will use this during the second phase to
166 : determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
167 :
168 : struct finally_tree_node
169 : {
170 : /* When storing a GIMPLE_TRY, we have to record a gimple. However
171 : when deciding whether a GOTO to a certain LABEL_DECL (which is a
172 : tree) leaves the TRY block, its necessary to record a tree in
173 : this field. Thus a treemple is used. */
174 : treemple child;
175 : gtry *parent;
176 : };
177 :
178 : /* Hashtable helpers. */
179 :
180 : struct finally_tree_hasher : free_ptr_hash <finally_tree_node>
181 : {
182 : static inline hashval_t hash (const finally_tree_node *);
183 : static inline bool equal (const finally_tree_node *,
184 : const finally_tree_node *);
185 : };
186 :
187 : inline hashval_t
188 146946798 : finally_tree_hasher::hash (const finally_tree_node *v)
189 : {
190 146946798 : return (intptr_t)v->child.t >> 4;
191 : }
192 :
193 : inline bool
194 130706287 : finally_tree_hasher::equal (const finally_tree_node *v,
195 : const finally_tree_node *c)
196 : {
197 130706287 : return v->child.t == c->child.t;
198 : }
199 :
200 : /* Note that this table is *not* marked GTY. It is short-lived. */
201 : static hash_table<finally_tree_hasher> *finally_tree;
202 :
203 : static void
204 18063455 : record_in_finally_tree (treemple child, gtry *parent)
205 : {
206 18063455 : struct finally_tree_node *n;
207 18063455 : finally_tree_node **slot;
208 :
209 18063455 : n = XNEW (struct finally_tree_node);
210 18063455 : n->child = child;
211 18063455 : n->parent = parent;
212 :
213 18063455 : slot = finally_tree->find_slot (n, INSERT);
214 18063455 : gcc_assert (!*slot);
215 18063455 : *slot = n;
216 18063455 : }
217 :
218 : static void
219 : collect_finally_tree (gimple *stmt, gtry *region);
220 :
221 : /* Go through the gimple sequence. Works with collect_finally_tree to
222 : record all GIMPLE_LABEL and GIMPLE_TRY statements. */
223 :
224 : static void
225 8768088 : collect_finally_tree_1 (gimple_seq seq, gtry *region)
226 : {
227 8768088 : gimple_stmt_iterator gsi;
228 :
229 98105399 : for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
230 89337311 : collect_finally_tree (gsi_stmt (gsi), region);
231 4882577 : }
232 :
233 : static void
234 89337311 : collect_finally_tree (gimple *stmt, gtry *region)
235 : {
236 89337311 : treemple temp;
237 :
238 89337311 : switch (gimple_code (stmt))
239 : {
240 16395402 : case GIMPLE_LABEL:
241 16395402 : temp.t = gimple_label_label (as_a <glabel *> (stmt));
242 16395402 : record_in_finally_tree (temp, region);
243 16395402 : break;
244 :
245 2409380 : case GIMPLE_TRY:
246 2409380 : if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
247 : {
248 1612895 : temp.g = stmt;
249 1612895 : record_in_finally_tree (temp, region);
250 1612895 : collect_finally_tree_1 (gimple_try_eval (stmt),
251 : as_a <gtry *> (stmt));
252 1612895 : collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
253 : }
254 796485 : else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
255 : {
256 796485 : collect_finally_tree_1 (gimple_try_eval (stmt), region);
257 796485 : collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
258 : }
259 : break;
260 :
261 57081 : case GIMPLE_CATCH:
262 114162 : collect_finally_tree_1 (gimple_catch_handler (
263 57081 : as_a <gcatch *> (stmt)),
264 : region);
265 57081 : break;
266 :
267 5332 : case GIMPLE_EH_FILTER:
268 5332 : collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
269 5332 : break;
270 :
271 702 : case GIMPLE_EH_ELSE:
272 702 : {
273 702 : geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
274 702 : collect_finally_tree_1 (gimple_eh_else_n_body (eh_else_stmt), region);
275 702 : collect_finally_tree_1 (gimple_eh_else_e_body (eh_else_stmt), region);
276 : }
277 702 : break;
278 :
279 : default:
280 : /* A type, a decl, or some kind of statement that we're not
281 : interested in. Don't walk them. */
282 : break;
283 : }
284 89337311 : }
285 :
286 :
287 : /* Use the finally tree to determine if a jump from START to TARGET
288 : would leave the try_finally node that START lives in. */
289 :
290 : static bool
291 8802563 : outside_finally_tree (treemple start, gimple *target)
292 : {
293 9502676 : struct finally_tree_node n, *p;
294 :
295 9502676 : do
296 : {
297 9502676 : n.child = start;
298 9502676 : p = finally_tree->find (&n);
299 9502676 : if (!p)
300 : return true;
301 8835831 : start.g = p->parent;
302 : }
303 8835831 : while (start.g != target);
304 :
305 : return false;
306 : }
307 :
308 : /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
309 : nodes into a set of gotos, magic labels, and eh regions.
310 : The eh region creation is straight-forward, but frobbing all the gotos
311 : and such into shape isn't. */
312 :
313 : /* The sequence into which we record all EH stuff. This will be
314 : placed at the end of the function when we're all done. */
315 : static gimple_seq eh_seq;
316 :
317 : /* Record whether an EH region contains something that can throw,
318 : indexed by EH region number. */
319 : static bitmap eh_region_may_contain_throw_map;
320 :
321 : /* The GOTO_QUEUE is an array of GIMPLE_GOTO and GIMPLE_RETURN
322 : statements that are seen to escape this GIMPLE_TRY_FINALLY node.
323 : The idea is to record a gimple statement for everything except for
324 : the conditionals, which get their labels recorded. Since labels are
325 : of type 'tree', we need this node to store both gimple and tree
326 : objects. REPL_STMT is the sequence used to replace the goto/return
327 : statement. CONT_STMT is used to store the statement that allows
328 : the return/goto to jump to the original destination. */
329 :
330 : struct goto_queue_node
331 : {
332 : treemple stmt;
333 : location_t location;
334 : gimple_seq repl_stmt;
335 : gimple *cont_stmt;
336 : int index;
337 : /* This is used when index >= 0 to indicate that stmt is a label (as
338 : opposed to a goto stmt). */
339 : int is_label;
340 : };
341 :
342 : /* State of the world while lowering. */
343 :
344 : struct leh_state
345 : {
346 : /* What's "current" while constructing the eh region tree. These
347 : correspond to variables of the same name in cfun->eh, which we
348 : don't have easy access to. */
349 : eh_region cur_region;
350 :
351 : /* What's "current" for the purposes of __builtin_eh_pointer. For
352 : a CATCH, this is the associated TRY. For an EH_FILTER, this is
353 : the associated ALLOWED_EXCEPTIONS, etc. */
354 : eh_region ehp_region;
355 :
356 : /* Processing of TRY_FINALLY requires a bit more state. This is
357 : split out into a separate structure so that we don't have to
358 : copy so much when processing other nodes. */
359 : struct leh_tf_state *tf;
360 :
361 : /* Outer non-clean up region. */
362 : eh_region outer_non_cleanup;
363 : };
364 :
365 : struct leh_tf_state
366 : {
367 : /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
368 : try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
369 : this so that outside_finally_tree can reliably reference the tree used
370 : in the collect_finally_tree data structures. */
371 : gtry *try_finally_expr;
372 : gtry *top_p;
373 :
374 : /* While lowering a top_p usually it is expanded into multiple statements,
375 : thus we need the following field to store them. */
376 : gimple_seq top_p_seq;
377 :
378 : /* The state outside this try_finally node. */
379 : struct leh_state *outer;
380 :
381 : /* The exception region created for it. */
382 : eh_region region;
383 :
384 : /* The goto queue. */
385 : struct goto_queue_node *goto_queue;
386 : size_t goto_queue_size;
387 : size_t goto_queue_active;
388 :
389 : /* Pointer map to help in searching goto_queue when it is large. */
390 : hash_map<gimple *, goto_queue_node *> *goto_queue_map;
391 :
392 : /* The set of unique labels seen as entries in the goto queue. */
393 : vec<tree> dest_array;
394 :
395 : /* A label to be added at the end of the completed transformed
396 : sequence. It will be set if may_fallthru was true *at one time*,
397 : though subsequent transformations may have cleared that flag. */
398 : tree fallthru_label;
399 :
400 : /* True if it is possible to fall out the bottom of the try block.
401 : Cleared if the fallthru is converted to a goto. */
402 : bool may_fallthru;
403 :
404 : /* True if any entry in goto_queue is a GIMPLE_RETURN. */
405 : bool may_return;
406 :
407 : /* True if the finally block can receive an exception edge.
408 : Cleared if the exception case is handled by code duplication. */
409 : bool may_throw;
410 : };
411 :
412 : static gimple_seq lower_eh_must_not_throw (struct leh_state *, gtry *);
413 :
414 : /* Search for STMT in the goto queue. Return the replacement,
415 : or null if the statement isn't in the queue. */
416 :
417 : #define LARGE_GOTO_QUEUE 20
418 :
419 : static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq);
420 :
421 : static gimple_seq
422 6965597 : find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
423 : {
424 6965597 : unsigned int i;
425 :
426 6965597 : if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
427 : {
428 20999435 : for (i = 0; i < tf->goto_queue_active; i++)
429 14772078 : if ( tf->goto_queue[i].stmt.g == stmt.g)
430 657830 : return tf->goto_queue[i].repl_stmt;
431 : return NULL;
432 : }
433 :
434 : /* If we have a large number of entries in the goto_queue, create a
435 : pointer map and use that for searching. */
436 :
437 80410 : if (!tf->goto_queue_map)
438 : {
439 329 : tf->goto_queue_map = new hash_map<gimple *, goto_queue_node *>;
440 9344 : for (i = 0; i < tf->goto_queue_active; i++)
441 : {
442 18030 : bool existed = tf->goto_queue_map->put (tf->goto_queue[i].stmt.g,
443 9015 : &tf->goto_queue[i]);
444 9015 : gcc_assert (!existed);
445 : }
446 : }
447 :
448 80410 : goto_queue_node **slot = tf->goto_queue_map->get (stmt.g);
449 80410 : if (slot != NULL)
450 9015 : return ((*slot)->repl_stmt);
451 :
452 : return NULL;
453 : }
454 :
455 : /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
456 : lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
457 : then we can just splat it in, otherwise we add the new stmts immediately
458 : after the GIMPLE_COND and redirect. */
459 :
460 : static void
461 4433618 : replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
462 : gimple_stmt_iterator *gsi)
463 : {
464 4433618 : tree label;
465 4433618 : gimple_seq new_seq;
466 4433618 : treemple temp;
467 4433618 : location_t loc = gimple_location (gsi_stmt (*gsi));
468 :
469 4433618 : temp.tp = tp;
470 4433618 : new_seq = find_goto_replacement (tf, temp);
471 4433618 : if (!new_seq)
472 4433543 : return;
473 :
474 1792 : if (gimple_seq_singleton_p (new_seq)
475 1717 : && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
476 : {
477 1717 : *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
478 1717 : return;
479 : }
480 :
481 75 : label = create_artificial_label (loc);
482 : /* Set the new label for the GIMPLE_COND */
483 75 : *tp = label;
484 :
485 75 : gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
486 75 : gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
487 : }
488 :
489 : /* The real work of replace_goto_queue. Returns with TSI updated to
490 : point to the next statement. */
491 :
492 : static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
493 :
494 : static void
495 34888626 : replace_goto_queue_1 (gimple *stmt, struct leh_tf_state *tf,
496 : gimple_stmt_iterator *gsi)
497 : {
498 34888626 : gimple_seq seq;
499 34888626 : treemple temp;
500 34888626 : temp.g = NULL;
501 :
502 34888626 : switch (gimple_code (stmt))
503 : {
504 2531844 : case GIMPLE_GOTO:
505 2531844 : case GIMPLE_RETURN:
506 2531844 : temp.g = stmt;
507 2531844 : seq = find_goto_replacement (tf, temp);
508 2531844 : if (seq)
509 : {
510 665018 : gimple_stmt_iterator i;
511 665018 : seq = gimple_seq_copy (seq);
512 1331270 : for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
513 1186577 : gimple_set_location (gsi_stmt (i), gimple_location (stmt));
514 665018 : gsi_insert_seq_before (gsi, seq, GSI_SAME_STMT);
515 665018 : gsi_remove (gsi, false);
516 665018 : return;
517 : }
518 : break;
519 :
520 12870 : case GIMPLE_ASM:
521 12870 : if (int n = gimple_asm_nlabels (as_a <gasm *> (stmt)))
522 : {
523 79 : temp.g = stmt;
524 79 : gasm *asm_stmt = as_a <gasm *> (stmt);
525 79 : location_t loc = gimple_location (stmt);
526 79 : tree bypass_label = NULL_TREE;
527 214 : for (int i = 0; i < n; ++i)
528 : {
529 135 : tree elt = gimple_asm_label_op (asm_stmt, i);
530 135 : temp.tp = &TREE_VALUE (elt);
531 135 : seq = find_goto_replacement (tf, temp);
532 135 : if (!seq)
533 100 : continue;
534 35 : if (gimple_seq_singleton_p (seq)
535 30 : && gimple_code (gimple_seq_first_stmt (seq)) == GIMPLE_GOTO)
536 : {
537 30 : TREE_VALUE (elt)
538 30 : = gimple_goto_dest (gimple_seq_first_stmt (seq));
539 30 : continue;
540 : }
541 :
542 5 : if (bypass_label == NULL_TREE)
543 : {
544 3 : bypass_label = create_artificial_label (loc);
545 3 : gsi_insert_after (gsi, gimple_build_goto (bypass_label),
546 : GSI_CONTINUE_LINKING);
547 : }
548 :
549 5 : tree label = create_artificial_label (loc);
550 5 : TREE_VALUE (elt) = label;
551 5 : gsi_insert_after (gsi, gimple_build_label (label),
552 : GSI_CONTINUE_LINKING);
553 5 : gsi_insert_seq_after (gsi, gimple_seq_copy (seq),
554 : GSI_CONTINUE_LINKING);
555 : }
556 79 : if (bypass_label)
557 3 : gsi_insert_after (gsi, gimple_build_label (bypass_label),
558 : GSI_CONTINUE_LINKING);
559 : }
560 : break;
561 :
562 2216809 : case GIMPLE_COND:
563 2216809 : replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
564 2216809 : replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
565 2216809 : break;
566 :
567 0 : case GIMPLE_TRY:
568 0 : replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf);
569 0 : replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
570 0 : break;
571 0 : case GIMPLE_CATCH:
572 0 : replace_goto_queue_stmt_list (gimple_catch_handler_ptr (
573 : as_a <gcatch *> (stmt)),
574 : tf);
575 0 : break;
576 0 : case GIMPLE_EH_FILTER:
577 0 : replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
578 0 : break;
579 0 : case GIMPLE_EH_ELSE:
580 0 : {
581 0 : geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
582 0 : replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (eh_else_stmt),
583 : tf);
584 0 : replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (eh_else_stmt),
585 : tf);
586 : }
587 0 : break;
588 :
589 : default:
590 : /* These won't have gotos in them. */
591 : break;
592 : }
593 :
594 34223608 : gsi_next (gsi);
595 : }
596 :
597 : /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
598 :
599 : static void
600 1000972 : replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf)
601 : {
602 1000972 : gimple_stmt_iterator gsi = gsi_start (*seq);
603 :
604 35889598 : while (!gsi_end_p (gsi))
605 34888626 : replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
606 1000972 : }
607 :
608 : /* Replace all goto queue members. */
609 :
610 : static void
611 503194 : replace_goto_queue (struct leh_tf_state *tf)
612 : {
613 503194 : if (tf->goto_queue_active == 0)
614 : return;
615 500486 : replace_goto_queue_stmt_list (&tf->top_p_seq, tf);
616 500486 : replace_goto_queue_stmt_list (&eh_seq, tf);
617 : }
618 :
619 : /* Add a new record to the goto queue contained in TF. NEW_STMT is the
620 : data to be added, IS_LABEL indicates whether NEW_STMT is a label or
621 : a gimple return. */
622 :
623 : static void
624 666845 : record_in_goto_queue (struct leh_tf_state *tf,
625 : treemple new_stmt,
626 : int index,
627 : bool is_label,
628 : location_t location)
629 : {
630 666845 : size_t active, size;
631 666845 : struct goto_queue_node *q;
632 :
633 666845 : gcc_assert (!tf->goto_queue_map);
634 :
635 666845 : active = tf->goto_queue_active;
636 666845 : size = tf->goto_queue_size;
637 666845 : if (active >= size)
638 : {
639 500521 : size = (size ? size * 2 : 32);
640 500521 : tf->goto_queue_size = size;
641 500521 : tf->goto_queue
642 500521 : = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
643 : }
644 :
645 666845 : q = &tf->goto_queue[active];
646 666845 : tf->goto_queue_active = active + 1;
647 :
648 666845 : memset (q, 0, sizeof (*q));
649 666845 : q->stmt = new_stmt;
650 666845 : q->index = index;
651 666845 : q->location = location;
652 666845 : q->is_label = is_label;
653 666845 : }
654 :
655 : /* Record the LABEL label in the goto queue contained in TF.
656 : TF is not null. */
657 :
658 : static void
659 8583853 : record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
660 : location_t location)
661 : {
662 8583853 : int index;
663 8583853 : treemple temp, new_stmt;
664 :
665 8583853 : if (!label)
666 7917008 : return;
667 :
668 : /* Computed and non-local gotos do not get processed. Given
669 : their nature we can neither tell whether we've escaped the
670 : finally block nor redirect them if we knew. */
671 8583853 : if (TREE_CODE (label) != LABEL_DECL)
672 : return;
673 :
674 : /* No need to record gotos that don't leave the try block. */
675 8583621 : temp.t = label;
676 8583621 : if (!outside_finally_tree (temp, tf->try_finally_expr))
677 : return;
678 :
679 666845 : if (! tf->dest_array.exists ())
680 : {
681 500486 : tf->dest_array.create (10);
682 500486 : tf->dest_array.quick_push (label);
683 500486 : index = 0;
684 : }
685 : else
686 : {
687 166359 : int n = tf->dest_array.length ();
688 183539 : for (index = 0; index < n; ++index)
689 179306 : if (tf->dest_array[index] == label)
690 : break;
691 166359 : if (index == n)
692 4233 : tf->dest_array.safe_push (label);
693 : }
694 :
695 : /* In the case of a GOTO we want to record the destination label,
696 : since with a GIMPLE_COND we have an easy access to the then/else
697 : labels. */
698 666845 : new_stmt = stmt;
699 666845 : record_in_goto_queue (tf, new_stmt, index, true, location);
700 : }
701 :
702 : /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
703 : node, and if so record that fact in the goto queue associated with that
704 : try_finally node. */
705 :
706 : static void
707 14401296 : maybe_record_in_goto_queue (struct leh_state *state, gimple *stmt)
708 : {
709 14401296 : struct leh_tf_state *tf = state->tf;
710 14401296 : treemple new_stmt;
711 :
712 14401296 : if (!tf)
713 14401296 : return;
714 :
715 5631947 : switch (gimple_code (stmt))
716 : {
717 2968290 : case GIMPLE_COND:
718 2968290 : {
719 2968290 : gcond *cond_stmt = as_a <gcond *> (stmt);
720 2968290 : new_stmt.tp = gimple_op_ptr (cond_stmt, 2);
721 2968290 : record_in_goto_queue_label (tf, new_stmt,
722 : gimple_cond_true_label (cond_stmt),
723 2968290 : EXPR_LOCATION (*new_stmt.tp));
724 2968290 : new_stmt.tp = gimple_op_ptr (cond_stmt, 3);
725 0 : record_in_goto_queue_label (tf, new_stmt,
726 : gimple_cond_false_label (cond_stmt),
727 2968290 : EXPR_LOCATION (*new_stmt.tp));
728 : }
729 2968290 : break;
730 :
731 2647219 : case GIMPLE_GOTO:
732 2647219 : new_stmt.g = stmt;
733 2647219 : record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
734 : gimple_location (stmt));
735 2647219 : break;
736 :
737 16438 : case GIMPLE_ASM:
738 16438 : if (int n = gimple_asm_nlabels (as_a <gasm *> (stmt)))
739 : {
740 94 : new_stmt.g = stmt;
741 : gasm *asm_stmt = as_a <gasm *> (stmt);
742 94 : for (int i = 0; i < n; ++i)
743 : {
744 54 : tree elt = gimple_asm_label_op (asm_stmt, i);
745 54 : new_stmt.tp = &TREE_VALUE (elt);
746 54 : record_in_goto_queue_label (tf, new_stmt, TREE_VALUE (elt),
747 : gimple_location (stmt));
748 : }
749 : }
750 : break;
751 :
752 0 : case GIMPLE_RETURN:
753 0 : tf->may_return = true;
754 0 : new_stmt.g = stmt;
755 0 : record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt));
756 0 : break;
757 :
758 0 : default:
759 0 : gcc_unreachable ();
760 : }
761 : }
762 :
763 :
764 : #if CHECKING_P
765 : /* We do not process GIMPLE_SWITCHes for now. As long as the original source
766 : was in fact structured, and we've not yet done jump threading, then none
767 : of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
768 :
769 : static void
770 50517 : verify_norecord_switch_expr (struct leh_state *state,
771 : gswitch *switch_expr)
772 : {
773 50517 : struct leh_tf_state *tf = state->tf;
774 50517 : size_t i, n;
775 :
776 50517 : if (!tf)
777 : return;
778 :
779 30138 : n = gimple_switch_num_labels (switch_expr);
780 :
781 249080 : for (i = 0; i < n; ++i)
782 : {
783 218942 : treemple temp;
784 218942 : tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
785 218942 : temp.t = lab;
786 218942 : gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
787 : }
788 : }
789 : #else
790 : #define verify_norecord_switch_expr(state, switch_expr)
791 : #endif
792 :
793 : /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
794 : non-null, insert it before the new branch. */
795 :
796 : static void
797 0 : do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
798 : {
799 0 : gimple *x;
800 :
801 : /* In the case of a return, the queue node must be a gimple statement. */
802 0 : gcc_assert (!q->is_label);
803 :
804 : /* Note that the return value may have already been computed, e.g.,
805 :
806 : int x;
807 : int foo (void)
808 : {
809 : x = 0;
810 : try {
811 : return x;
812 : } finally {
813 : x++;
814 : }
815 : }
816 :
817 : should return 0, not 1. We don't have to do anything to make
818 : this happens because the return value has been placed in the
819 : RESULT_DECL already. */
820 :
821 0 : q->cont_stmt = q->stmt.g;
822 :
823 0 : if (mod)
824 0 : gimple_seq_add_seq (&q->repl_stmt, mod);
825 :
826 0 : x = gimple_build_goto (finlab);
827 0 : gimple_set_location (x, q->location);
828 0 : gimple_seq_add_stmt (&q->repl_stmt, x);
829 0 : }
830 :
831 : /* Similar, but easier, for GIMPLE_GOTO. */
832 :
833 : static void
834 666845 : do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
835 : struct leh_tf_state *tf)
836 : {
837 666845 : ggoto *x;
838 :
839 666845 : gcc_assert (q->is_label);
840 :
841 666845 : q->cont_stmt = gimple_build_goto (tf->dest_array[q->index]);
842 :
843 666845 : if (mod)
844 1314 : gimple_seq_add_seq (&q->repl_stmt, mod);
845 :
846 666845 : x = gimple_build_goto (finlab);
847 666845 : gimple_set_location (x, q->location);
848 666845 : gimple_seq_add_stmt (&q->repl_stmt, x);
849 666845 : }
850 :
851 : /* Emit a standard landing pad sequence into SEQ for REGION. */
852 :
853 : static void
854 855417 : emit_post_landing_pad (gimple_seq *seq, eh_region region)
855 : {
856 855417 : eh_landing_pad lp = region->landing_pads;
857 855417 : glabel *x;
858 :
859 855417 : if (lp == NULL)
860 0 : lp = gen_eh_landing_pad (region);
861 :
862 855417 : lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
863 855417 : EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
864 :
865 855417 : x = gimple_build_label (lp->post_landing_pad);
866 855417 : gimple_seq_add_stmt (seq, x);
867 855417 : }
868 :
869 : /* Emit a RESX statement into SEQ for REGION. */
870 :
871 : static void
872 855415 : emit_resx (gimple_seq *seq, eh_region region)
873 : {
874 855415 : gresx *x = gimple_build_resx (region->index);
875 855415 : gimple_seq_add_stmt (seq, x);
876 855415 : if (region->outer)
877 506794 : record_stmt_eh_region (region->outer, x);
878 855415 : }
879 :
880 : /* Note that the current EH region may contain a throw, or a
881 : call to a function which itself may contain a throw. */
882 :
883 : static void
884 2653669 : note_eh_region_may_contain_throw (eh_region region)
885 : {
886 3160463 : while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
887 : {
888 929367 : if (region->type == ERT_MUST_NOT_THROW)
889 : break;
890 855417 : region = region->outer;
891 855417 : if (region == NULL)
892 : break;
893 : }
894 2653669 : }
895 :
896 : /* Check if REGION has been marked as containing a throw. If REGION is
897 : NULL, this predicate is false. */
898 :
899 : static inline bool
900 1246415 : eh_region_may_contain_throw (eh_region r)
901 : {
902 1246415 : return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
903 : }
904 :
905 : /* We want to transform
906 : try { body; } catch { stuff; }
907 : to
908 : normal_sequence:
909 : body;
910 : over:
911 : eh_sequence:
912 : landing_pad:
913 : stuff;
914 : goto over;
915 :
916 : TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
917 : should be placed before the second operand, or NULL. OVER is
918 : an existing label that should be put at the exit, or NULL. */
919 :
920 : static gimple_seq
921 41587 : frob_into_branch_around (gtry *tp, eh_region region, tree over)
922 : {
923 41587 : gimple *x;
924 41587 : gimple_seq cleanup, result;
925 41587 : location_t loc = gimple_location (tp);
926 :
927 41587 : cleanup = gimple_try_cleanup (tp);
928 41587 : result = gimple_try_eval (tp);
929 :
930 41587 : if (region)
931 41587 : emit_post_landing_pad (&eh_seq, region);
932 :
933 41587 : if (gimple_seq_may_fallthru (cleanup))
934 : {
935 0 : if (!over)
936 0 : over = create_artificial_label (loc);
937 0 : x = gimple_build_goto (over);
938 0 : gimple_set_location (x, loc);
939 0 : gimple_seq_add_stmt (&cleanup, x);
940 : }
941 41587 : gimple_seq_add_seq (&eh_seq, cleanup);
942 :
943 41587 : if (over)
944 : {
945 13265 : x = gimple_build_label (over);
946 13265 : gimple_seq_add_stmt (&result, x);
947 : }
948 41587 : return result;
949 : }
950 :
951 : /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
952 : Make sure to record all new labels found. */
953 :
954 : static gimple_seq
955 1016311 : lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
956 : location_t loc)
957 : {
958 1016311 : gtry *region = NULL;
959 1016311 : gimple_seq new_seq;
960 1016311 : gimple_stmt_iterator gsi;
961 :
962 1016311 : new_seq = copy_gimple_seq_and_replace_locals (seq);
963 :
964 2456671 : for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
965 : {
966 1440360 : gimple *stmt = gsi_stmt (gsi);
967 1440360 : if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
968 : {
969 947214 : tree block = gimple_block (stmt);
970 947214 : gimple_set_location (stmt, loc);
971 947214 : gimple_set_block (stmt, block);
972 : }
973 : }
974 :
975 1016311 : if (outer_state->tf)
976 563553 : region = outer_state->tf->try_finally_expr;
977 1016311 : collect_finally_tree_1 (new_seq, region);
978 :
979 1016311 : return new_seq;
980 : }
981 :
982 : /* A subroutine of lower_try_finally. Create a fallthru label for
983 : the given try_finally state. The only tricky bit here is that
984 : we have to make sure to record the label in our outer context. */
985 :
986 : static tree
987 113873 : lower_try_finally_fallthru_label (struct leh_tf_state *tf)
988 : {
989 113873 : tree label = tf->fallthru_label;
990 113873 : treemple temp;
991 :
992 113873 : if (!label)
993 : {
994 113873 : label = create_artificial_label (gimple_location (tf->try_finally_expr));
995 113873 : tf->fallthru_label = label;
996 113873 : if (tf->outer->tf)
997 : {
998 55158 : temp.t = label;
999 55158 : record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
1000 : }
1001 : }
1002 113873 : return label;
1003 : }
1004 :
1005 : /* A subroutine of lower_try_finally. If FINALLY consits of a
1006 : GIMPLE_EH_ELSE node, return it. */
1007 :
1008 : static inline geh_else *
1009 2612944 : get_eh_else (gimple_seq finally)
1010 : {
1011 2612944 : gimple *x = gimple_seq_first_stmt (finally);
1012 2612944 : if (x && gimple_code (x) == GIMPLE_EH_ELSE)
1013 : {
1014 711 : gcc_assert (gimple_seq_singleton_p (finally));
1015 711 : return as_a <geh_else *> (x);
1016 : }
1017 : return NULL;
1018 : }
1019 :
1020 : /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
1021 : langhook returns non-null, then the language requires that the exception
1022 : path out of a try_finally be treated specially. To wit: the code within
1023 : the finally block may not itself throw an exception. We have two choices
1024 : here. First we can duplicate the finally block and wrap it in a
1025 : must_not_throw region. Second, we can generate code like
1026 :
1027 : try {
1028 : finally_block;
1029 : } catch {
1030 : if (fintmp == eh_edge)
1031 : protect_cleanup_actions;
1032 : }
1033 :
1034 : where "fintmp" is the temporary used in the switch statement generation
1035 : alternative considered below. For the nonce, we always choose the first
1036 : option.
1037 :
1038 : THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
1039 :
1040 : static void
1041 813830 : honor_protect_cleanup_actions (struct leh_state *outer_state,
1042 : struct leh_state *this_state,
1043 : struct leh_tf_state *tf)
1044 : {
1045 813830 : gimple_seq finally = gimple_try_cleanup (tf->top_p);
1046 :
1047 : /* EH_ELSE doesn't come from user code; only compiler generated stuff.
1048 : It does need to be handled here, so as to separate the (different)
1049 : EH path from the normal path. But we should not attempt to wrap
1050 : it with a must-not-throw node (which indeed gets in the way). */
1051 813830 : if (geh_else *eh_else = get_eh_else (finally))
1052 : {
1053 277 : gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
1054 277 : finally = gimple_eh_else_e_body (eh_else);
1055 :
1056 : /* Let the ELSE see the exception that's being processed, but
1057 : since the cleanup is outside the try block, process it with
1058 : outer_state, otherwise it may be used as a cleanup for
1059 : itself, and Bad Things (TM) ensue. */
1060 277 : eh_region save_ehp = outer_state->ehp_region;
1061 277 : outer_state->ehp_region = this_state->cur_region;
1062 277 : lower_eh_constructs_1 (outer_state, &finally);
1063 277 : outer_state->ehp_region = save_ehp;
1064 : }
1065 : else
1066 : {
1067 : /* First check for nothing to do. */
1068 813553 : if (lang_hooks.eh_protect_cleanup_actions == NULL)
1069 181259 : return;
1070 632294 : tree actions = lang_hooks.eh_protect_cleanup_actions ();
1071 632294 : if (actions == NULL)
1072 : return;
1073 :
1074 632294 : if (this_state)
1075 575225 : finally = lower_try_finally_dup_block (finally, outer_state,
1076 575225 : gimple_location (tf->try_finally_expr));
1077 :
1078 : /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1079 : set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1080 : to be in an enclosing scope, but needs to be implemented at this level
1081 : to avoid a nesting violation (see wrap_temporary_cleanups in
1082 : cp/decl.cc). Since it's logically at an outer level, we should call
1083 : terminate before we get to it, so strip it away before adding the
1084 : MUST_NOT_THROW filter. */
1085 632294 : gimple_stmt_iterator gsi = gsi_start (finally);
1086 632294 : gimple *x = !gsi_end_p (gsi) ? gsi_stmt (gsi) : NULL;
1087 632278 : if (x
1088 632278 : && gimple_code (x) == GIMPLE_TRY
1089 5871 : && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1090 5871 : && gimple_try_catch_is_cleanup (x))
1091 : {
1092 69 : gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1093 69 : gsi_remove (&gsi, false);
1094 : }
1095 :
1096 : /* Wrap the block with protect_cleanup_actions as the action. */
1097 632294 : geh_mnt *eh_mnt = gimple_build_eh_must_not_throw (actions);
1098 632294 : gtry *try_stmt = gimple_build_try (finally,
1099 : gimple_seq_alloc_with_stmt (eh_mnt),
1100 : GIMPLE_TRY_CATCH);
1101 632294 : finally = lower_eh_must_not_throw (outer_state, try_stmt);
1102 : }
1103 :
1104 : /* Drop all of this into the exception sequence. */
1105 632571 : emit_post_landing_pad (&eh_seq, tf->region);
1106 632571 : gimple_seq_add_seq (&eh_seq, finally);
1107 632571 : if (gimple_seq_may_fallthru (finally))
1108 632571 : emit_resx (&eh_seq, tf->region);
1109 :
1110 : /* Having now been handled, EH isn't to be considered with
1111 : the rest of the outgoing edges. */
1112 632571 : tf->may_throw = false;
1113 : }
1114 :
1115 : /* A subroutine of lower_try_finally. We have determined that there is
1116 : no fallthru edge out of the finally block. This means that there is
1117 : no outgoing edge corresponding to any incoming edge. Restructure the
1118 : try_finally node for this special case. */
1119 :
1120 : static void
1121 2 : lower_try_finally_nofallthru (struct leh_state *state,
1122 : struct leh_tf_state *tf)
1123 : {
1124 2 : tree lab;
1125 2 : gimple *x;
1126 2 : geh_else *eh_else;
1127 2 : gimple_seq finally;
1128 2 : struct goto_queue_node *q, *qe;
1129 :
1130 2 : lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1131 :
1132 : /* We expect that tf->top_p is a GIMPLE_TRY. */
1133 2 : finally = gimple_try_cleanup (tf->top_p);
1134 2 : tf->top_p_seq = gimple_try_eval (tf->top_p);
1135 :
1136 2 : x = gimple_build_label (lab);
1137 2 : gimple_seq_add_stmt (&tf->top_p_seq, x);
1138 :
1139 2 : q = tf->goto_queue;
1140 2 : qe = q + tf->goto_queue_active;
1141 2 : for (; q < qe; ++q)
1142 0 : if (q->index < 0)
1143 0 : do_return_redirection (q, lab, NULL);
1144 : else
1145 0 : do_goto_redirection (q, lab, NULL, tf);
1146 :
1147 2 : replace_goto_queue (tf);
1148 :
1149 : /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1150 2 : eh_else = get_eh_else (finally);
1151 2 : if (eh_else)
1152 : {
1153 0 : finally = gimple_eh_else_n_body (eh_else);
1154 0 : lower_eh_constructs_1 (state, &finally);
1155 0 : gimple_seq_add_seq (&tf->top_p_seq, finally);
1156 :
1157 0 : if (tf->may_throw)
1158 : {
1159 0 : finally = gimple_eh_else_e_body (eh_else);
1160 0 : lower_eh_constructs_1 (state, &finally);
1161 :
1162 0 : emit_post_landing_pad (&eh_seq, tf->region);
1163 0 : gimple_seq_add_seq (&eh_seq, finally);
1164 : }
1165 : }
1166 : else
1167 : {
1168 2 : lower_eh_constructs_1 (state, &finally);
1169 2 : gimple_seq_add_seq (&tf->top_p_seq, finally);
1170 :
1171 2 : if (tf->may_throw)
1172 : {
1173 2 : emit_post_landing_pad (&eh_seq, tf->region);
1174 :
1175 2 : x = gimple_build_goto (lab);
1176 2 : gimple_set_location (x, gimple_location (tf->try_finally_expr));
1177 2 : gimple_seq_add_stmt (&eh_seq, x);
1178 : }
1179 : }
1180 2 : }
1181 :
1182 : /* A subroutine of lower_try_finally. We have determined that there is
1183 : exactly one destination of the finally block. Restructure the
1184 : try_finally node for this special case. */
1185 :
1186 : static void
1187 1381368 : lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1188 : {
1189 1381368 : struct goto_queue_node *q, *qe;
1190 1381368 : geh_else *eh_else;
1191 1381368 : glabel *label_stmt;
1192 1381368 : gimple *x;
1193 1381368 : gimple_seq finally;
1194 1381368 : gimple_stmt_iterator gsi;
1195 1381368 : tree finally_label;
1196 1381368 : location_t loc = gimple_location (tf->try_finally_expr);
1197 :
1198 1381368 : finally = gimple_try_cleanup (tf->top_p);
1199 1381368 : tf->top_p_seq = gimple_try_eval (tf->top_p);
1200 :
1201 : /* Since there's only one destination, and the destination edge can only
1202 : either be EH or non-EH, that implies that all of our incoming edges
1203 : are of the same type. Therefore we can lower EH_ELSE immediately. */
1204 1381368 : eh_else = get_eh_else (finally);
1205 1381368 : if (eh_else)
1206 : {
1207 410 : if (tf->may_throw)
1208 0 : finally = gimple_eh_else_e_body (eh_else);
1209 : else
1210 410 : finally = gimple_eh_else_n_body (eh_else);
1211 : }
1212 :
1213 1381368 : lower_eh_constructs_1 (state, &finally);
1214 :
1215 3498003 : for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1216 : {
1217 2116635 : gimple *stmt = gsi_stmt (gsi);
1218 2116635 : if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
1219 : {
1220 1089179 : tree block = gimple_block (stmt);
1221 1089179 : gimple_set_location (stmt, gimple_location (tf->try_finally_expr));
1222 1089179 : gimple_set_block (stmt, block);
1223 : }
1224 : }
1225 :
1226 1381368 : if (tf->may_throw)
1227 : {
1228 : /* Only reachable via the exception edge. Add the given label to
1229 : the head of the FINALLY block. Append a RESX at the end. */
1230 1120 : emit_post_landing_pad (&eh_seq, tf->region);
1231 1120 : gimple_seq_add_seq (&eh_seq, finally);
1232 1120 : emit_resx (&eh_seq, tf->region);
1233 1032084 : return;
1234 : }
1235 :
1236 1380248 : if (tf->may_fallthru)
1237 : {
1238 : /* Only reachable via the fallthru edge. Do nothing but let
1239 : the two blocks run together; we'll fall out the bottom. */
1240 1029844 : gimple_seq_add_seq (&tf->top_p_seq, finally);
1241 1029844 : return;
1242 : }
1243 :
1244 350404 : finally_label = create_artificial_label (loc);
1245 350404 : label_stmt = gimple_build_label (finally_label);
1246 350404 : gimple_seq_add_stmt (&tf->top_p_seq, label_stmt);
1247 :
1248 350404 : gimple_seq_add_seq (&tf->top_p_seq, finally);
1249 :
1250 350404 : q = tf->goto_queue;
1251 350404 : qe = q + tf->goto_queue_active;
1252 :
1253 350404 : if (tf->may_return)
1254 : {
1255 : /* Reachable by return expressions only. Redirect them. */
1256 0 : for (; q < qe; ++q)
1257 0 : do_return_redirection (q, finally_label, NULL);
1258 0 : replace_goto_queue (tf);
1259 : }
1260 : else
1261 : {
1262 : /* Reachable by goto expressions only. Redirect them. */
1263 761815 : for (; q < qe; ++q)
1264 411411 : do_goto_redirection (q, finally_label, NULL, tf);
1265 350404 : replace_goto_queue (tf);
1266 :
1267 350404 : if (tf->dest_array[0] == tf->fallthru_label)
1268 : {
1269 : /* Reachable by goto to fallthru label only. Redirect it
1270 : to the new label (already created, sadly), and do not
1271 : emit the final branch out, or the fallthru label. */
1272 0 : tf->fallthru_label = NULL;
1273 0 : return;
1274 : }
1275 : }
1276 :
1277 : /* Place the original return/goto to the original destination
1278 : immediately after the finally block. */
1279 350404 : x = tf->goto_queue[0].cont_stmt;
1280 350404 : gimple_seq_add_stmt (&tf->top_p_seq, x);
1281 350404 : maybe_record_in_goto_queue (state, x);
1282 : }
1283 :
1284 : /* A subroutine of lower_try_finally. There are multiple edges incoming
1285 : and outgoing from the finally block. Implement this by duplicating the
1286 : finally block for every destination. */
1287 :
1288 : static void
1289 205331 : lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1290 : {
1291 205331 : gimple_seq finally;
1292 205331 : gimple_seq new_stmt;
1293 205331 : gimple_seq seq;
1294 205331 : gimple *x;
1295 205331 : geh_else *eh_else;
1296 205331 : tree tmp;
1297 205331 : location_t tf_loc = gimple_location (tf->try_finally_expr);
1298 :
1299 205331 : finally = gimple_try_cleanup (tf->top_p);
1300 :
1301 : /* Notice EH_ELSE, and simplify some of the remaining code
1302 : by considering FINALLY to be the normal return path only. */
1303 205331 : eh_else = get_eh_else (finally);
1304 205331 : if (eh_else)
1305 6 : finally = gimple_eh_else_n_body (eh_else);
1306 :
1307 205331 : tf->top_p_seq = gimple_try_eval (tf->top_p);
1308 205331 : new_stmt = NULL;
1309 :
1310 205331 : if (tf->may_fallthru)
1311 : {
1312 110531 : seq = lower_try_finally_dup_block (finally, state, tf_loc);
1313 110531 : lower_eh_constructs_1 (state, &seq);
1314 110531 : gimple_seq_add_seq (&new_stmt, seq);
1315 :
1316 110531 : tmp = lower_try_finally_fallthru_label (tf);
1317 110531 : x = gimple_build_goto (tmp);
1318 110531 : gimple_set_location (x, tf_loc);
1319 110531 : gimple_seq_add_stmt (&new_stmt, x);
1320 : }
1321 :
1322 205331 : if (tf->may_throw)
1323 : {
1324 : /* We don't need to copy the EH path of EH_ELSE,
1325 : since it is only emitted once. */
1326 177349 : if (eh_else)
1327 0 : seq = gimple_eh_else_e_body (eh_else);
1328 : else
1329 177349 : seq = lower_try_finally_dup_block (finally, state, tf_loc);
1330 177349 : lower_eh_constructs_1 (state, &seq);
1331 :
1332 177349 : emit_post_landing_pad (&eh_seq, tf->region);
1333 177349 : gimple_seq_add_seq (&eh_seq, seq);
1334 177349 : emit_resx (&eh_seq, tf->region);
1335 : }
1336 :
1337 205331 : if (tf->goto_queue)
1338 : {
1339 149247 : struct goto_queue_node *q, *qe;
1340 149247 : int return_index, index;
1341 149247 : struct labels_s
1342 : {
1343 : struct goto_queue_node *q;
1344 : tree label;
1345 : } *labels;
1346 :
1347 149247 : return_index = tf->dest_array.length ();
1348 149247 : labels = XCNEWVEC (struct labels_s, return_index + 1);
1349 :
1350 149247 : q = tf->goto_queue;
1351 149247 : qe = q + tf->goto_queue_active;
1352 403367 : for (; q < qe; q++)
1353 : {
1354 254120 : index = q->index < 0 ? return_index : q->index;
1355 :
1356 254120 : if (!labels[index].q)
1357 153206 : labels[index].q = q;
1358 : }
1359 :
1360 451700 : for (index = 0; index < return_index + 1; index++)
1361 : {
1362 302453 : tree lab;
1363 :
1364 302453 : q = labels[index].q;
1365 302453 : if (! q)
1366 149247 : continue;
1367 :
1368 306412 : lab = labels[index].label
1369 153206 : = create_artificial_label (tf_loc);
1370 :
1371 153206 : if (index == return_index)
1372 0 : do_return_redirection (q, lab, NULL);
1373 : else
1374 153206 : do_goto_redirection (q, lab, NULL, tf);
1375 :
1376 153206 : x = gimple_build_label (lab);
1377 153206 : gimple_seq_add_stmt (&new_stmt, x);
1378 :
1379 153206 : seq = lower_try_finally_dup_block (finally, state, q->location);
1380 153206 : lower_eh_constructs_1 (state, &seq);
1381 153206 : gimple_seq_add_seq (&new_stmt, seq);
1382 :
1383 153206 : gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1384 153206 : maybe_record_in_goto_queue (state, q->cont_stmt);
1385 : }
1386 :
1387 403367 : for (q = tf->goto_queue; q < qe; q++)
1388 : {
1389 254120 : tree lab;
1390 :
1391 254120 : index = q->index < 0 ? return_index : q->index;
1392 :
1393 254120 : if (labels[index].q == q)
1394 153206 : continue;
1395 :
1396 100914 : lab = labels[index].label;
1397 :
1398 100914 : if (index == return_index)
1399 0 : do_return_redirection (q, lab, NULL);
1400 : else
1401 100914 : do_goto_redirection (q, lab, NULL, tf);
1402 : }
1403 :
1404 149247 : replace_goto_queue (tf);
1405 149247 : free (labels);
1406 : }
1407 :
1408 : /* Need to link new stmts after running replace_goto_queue due
1409 : to not wanting to process the same goto stmts twice. */
1410 205331 : gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1411 205331 : }
1412 :
1413 : /* A subroutine of lower_try_finally. There are multiple edges incoming
1414 : and outgoing from the finally block. Implement this by instrumenting
1415 : each incoming edge and creating a switch statement at the end of the
1416 : finally block that branches to the appropriate destination. */
1417 :
1418 : static void
1419 3541 : lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1420 : {
1421 3541 : struct goto_queue_node *q, *qe;
1422 3541 : tree finally_tmp, finally_label;
1423 3541 : int return_index, eh_index, fallthru_index;
1424 3541 : int nlabels, ndests, j, last_case_index;
1425 3541 : tree last_case;
1426 3541 : auto_vec<tree> case_label_vec;
1427 3541 : gimple_seq switch_body = NULL;
1428 3541 : gimple *x;
1429 3541 : geh_else *eh_else;
1430 3541 : tree tmp;
1431 3541 : gimple *switch_stmt;
1432 3541 : gimple_seq finally;
1433 3541 : hash_map<tree, gimple *> *cont_map = NULL;
1434 : /* The location of the TRY_FINALLY stmt. */
1435 3541 : location_t tf_loc = gimple_location (tf->try_finally_expr);
1436 : /* The location of the finally block. */
1437 3541 : location_t finally_loc;
1438 :
1439 3541 : finally = gimple_try_cleanup (tf->top_p);
1440 3541 : eh_else = get_eh_else (finally);
1441 :
1442 : /* Mash the TRY block to the head of the chain. */
1443 3541 : tf->top_p_seq = gimple_try_eval (tf->top_p);
1444 :
1445 : /* The location of the finally is either the last stmt in the finally
1446 : block or the location of the TRY_FINALLY itself. */
1447 3541 : x = gimple_seq_last_stmt (finally);
1448 3541 : finally_loc = x ? gimple_location (x) : tf_loc;
1449 :
1450 : /* Prepare for switch statement generation. */
1451 3541 : nlabels = tf->dest_array.length ();
1452 3541 : return_index = nlabels;
1453 3541 : eh_index = return_index + tf->may_return;
1454 3541 : fallthru_index = eh_index + (tf->may_throw && !eh_else);
1455 3541 : ndests = fallthru_index + tf->may_fallthru;
1456 :
1457 3541 : finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1458 3541 : finally_label = create_artificial_label (finally_loc);
1459 :
1460 : /* We use vec::quick_push on case_label_vec throughout this function,
1461 : since we know the size in advance and allocate precisely as muce
1462 : space as needed. */
1463 3541 : case_label_vec.create (ndests);
1464 3541 : last_case = NULL;
1465 3541 : last_case_index = 0;
1466 :
1467 : /* Begin inserting code for getting to the finally block. Things
1468 : are done in this order to correspond to the sequence the code is
1469 : laid out. */
1470 :
1471 3541 : if (tf->may_fallthru)
1472 : {
1473 3342 : x = gimple_build_assign (finally_tmp,
1474 : build_int_cst (integer_type_node,
1475 3342 : fallthru_index));
1476 3342 : gimple_set_location (x, finally_loc);
1477 3342 : gimple_seq_add_stmt (&tf->top_p_seq, x);
1478 :
1479 3342 : tmp = build_int_cst (integer_type_node, fallthru_index);
1480 3342 : last_case = build_case_label (tmp, NULL,
1481 : create_artificial_label (finally_loc));
1482 3342 : case_label_vec.quick_push (last_case);
1483 3342 : last_case_index++;
1484 :
1485 3342 : x = gimple_build_label (CASE_LABEL (last_case));
1486 3342 : gimple_seq_add_stmt (&switch_body, x);
1487 :
1488 3342 : tmp = lower_try_finally_fallthru_label (tf);
1489 3342 : x = gimple_build_goto (tmp);
1490 3342 : gimple_set_location (x, finally_loc);
1491 3342 : gimple_seq_add_stmt (&switch_body, x);
1492 : }
1493 :
1494 : /* For EH_ELSE, emit the exception path (plus resx) now, then
1495 : subsequently we only need consider the normal path. */
1496 3541 : if (eh_else)
1497 : {
1498 6 : if (tf->may_throw)
1499 : {
1500 0 : finally = gimple_eh_else_e_body (eh_else);
1501 0 : lower_eh_constructs_1 (state, &finally);
1502 :
1503 0 : emit_post_landing_pad (&eh_seq, tf->region);
1504 0 : gimple_seq_add_seq (&eh_seq, finally);
1505 0 : emit_resx (&eh_seq, tf->region);
1506 : }
1507 :
1508 6 : finally = gimple_eh_else_n_body (eh_else);
1509 : }
1510 3535 : else if (tf->may_throw)
1511 : {
1512 2788 : emit_post_landing_pad (&eh_seq, tf->region);
1513 :
1514 2788 : x = gimple_build_assign (finally_tmp,
1515 2788 : build_int_cst (integer_type_node, eh_index));
1516 2788 : gimple_seq_add_stmt (&eh_seq, x);
1517 :
1518 2788 : x = gimple_build_goto (finally_label);
1519 2788 : gimple_set_location (x, tf_loc);
1520 2788 : gimple_seq_add_stmt (&eh_seq, x);
1521 :
1522 2788 : tmp = build_int_cst (integer_type_node, eh_index);
1523 2788 : last_case = build_case_label (tmp, NULL,
1524 : create_artificial_label (tf_loc));
1525 2788 : case_label_vec.quick_push (last_case);
1526 2788 : last_case_index++;
1527 :
1528 2788 : x = gimple_build_label (CASE_LABEL (last_case));
1529 2788 : gimple_seq_add_stmt (&eh_seq, x);
1530 2788 : emit_resx (&eh_seq, tf->region);
1531 : }
1532 :
1533 3541 : x = gimple_build_label (finally_label);
1534 3541 : gimple_seq_add_stmt (&tf->top_p_seq, x);
1535 :
1536 3541 : lower_eh_constructs_1 (state, &finally);
1537 3541 : gimple_seq_add_seq (&tf->top_p_seq, finally);
1538 :
1539 : /* Redirect each incoming goto edge. */
1540 3541 : q = tf->goto_queue;
1541 3541 : qe = q + tf->goto_queue_active;
1542 3541 : j = last_case_index + tf->may_return;
1543 : /* Prepare the assignments to finally_tmp that are executed upon the
1544 : entrance through a particular edge. */
1545 4855 : for (; q < qe; ++q)
1546 : {
1547 1314 : gimple_seq mod = NULL;
1548 1314 : int switch_id;
1549 1314 : unsigned int case_index;
1550 :
1551 1314 : if (q->index < 0)
1552 : {
1553 0 : x = gimple_build_assign (finally_tmp,
1554 : build_int_cst (integer_type_node,
1555 0 : return_index));
1556 0 : gimple_seq_add_stmt (&mod, x);
1557 0 : do_return_redirection (q, finally_label, mod);
1558 0 : switch_id = return_index;
1559 : }
1560 : else
1561 : {
1562 1314 : x = gimple_build_assign (finally_tmp,
1563 1314 : build_int_cst (integer_type_node, q->index));
1564 1314 : gimple_seq_add_stmt (&mod, x);
1565 1314 : do_goto_redirection (q, finally_label, mod, tf);
1566 1314 : switch_id = q->index;
1567 : }
1568 :
1569 1314 : case_index = j + q->index;
1570 1519 : if (case_label_vec.length () <= case_index || !case_label_vec[case_index])
1571 : {
1572 1109 : tree case_lab;
1573 1109 : tmp = build_int_cst (integer_type_node, switch_id);
1574 1109 : case_lab = build_case_label (tmp, NULL,
1575 : create_artificial_label (tf_loc));
1576 : /* We store the cont_stmt in the pointer map, so that we can recover
1577 : it in the loop below. */
1578 1109 : if (!cont_map)
1579 835 : cont_map = new hash_map<tree, gimple *>;
1580 1109 : cont_map->put (case_lab, q->cont_stmt);
1581 1109 : case_label_vec.quick_push (case_lab);
1582 : }
1583 : }
1584 4650 : for (j = last_case_index; j < last_case_index + nlabels; j++)
1585 : {
1586 1109 : gimple *cont_stmt;
1587 :
1588 1109 : last_case = case_label_vec[j];
1589 :
1590 1109 : gcc_assert (last_case);
1591 1109 : gcc_assert (cont_map);
1592 :
1593 1109 : cont_stmt = *cont_map->get (last_case);
1594 :
1595 1109 : x = gimple_build_label (CASE_LABEL (last_case));
1596 1109 : gimple_seq_add_stmt (&switch_body, x);
1597 1109 : gimple_seq_add_stmt (&switch_body, cont_stmt);
1598 1109 : maybe_record_in_goto_queue (state, cont_stmt);
1599 : }
1600 3541 : if (cont_map)
1601 835 : delete cont_map;
1602 :
1603 3541 : replace_goto_queue (tf);
1604 :
1605 : /* Make sure that the last case is the default label, as one is required.
1606 : Then sort the labels, which is also required in GIMPLE. */
1607 3541 : CASE_LOW (last_case) = NULL;
1608 3541 : tree tem = case_label_vec.pop ();
1609 3541 : gcc_assert (tem == last_case);
1610 3541 : sort_case_labels (case_label_vec);
1611 :
1612 : /* Build the switch statement, setting last_case to be the default
1613 : label. */
1614 3541 : switch_stmt = gimple_build_switch (finally_tmp, last_case,
1615 : case_label_vec);
1616 3541 : gimple_set_location (switch_stmt, finally_loc);
1617 :
1618 : /* Need to link SWITCH_STMT after running replace_goto_queue
1619 : due to not wanting to process the same goto stmts twice. */
1620 3541 : gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1621 3541 : gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1622 3541 : }
1623 :
1624 : /* Decide whether or not we are going to duplicate the finally block.
1625 : There are several considerations.
1626 :
1627 : Second, we'd like to prevent egregious code growth. One way to
1628 : do this is to estimate the size of the finally block, multiply
1629 : that by the number of copies we'd need to make, and compare against
1630 : the estimate of the size of the switch machinery we'd have to add. */
1631 :
1632 : static bool
1633 208872 : decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
1634 : {
1635 208872 : int f_estimate, sw_estimate;
1636 208872 : geh_else *eh_else;
1637 :
1638 : /* If there's an EH_ELSE involved, the exception path is separate
1639 : and really doesn't come into play for this computation. */
1640 208872 : eh_else = get_eh_else (finally);
1641 208872 : if (eh_else)
1642 : {
1643 12 : ndests -= may_throw;
1644 12 : finally = gimple_eh_else_n_body (eh_else);
1645 : }
1646 :
1647 208872 : if (!optimize)
1648 : {
1649 16686 : gimple_stmt_iterator gsi;
1650 :
1651 16686 : if (ndests == 1)
1652 : return true;
1653 :
1654 42207 : for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1655 : {
1656 : /* Duplicate __builtin_stack_restore in the hope of eliminating it
1657 : on the EH paths and, consequently, useless cleanups. */
1658 29041 : gimple *stmt = gsi_stmt (gsi);
1659 29041 : if (!is_gimple_debug (stmt)
1660 29041 : && !gimple_clobber_p (stmt)
1661 32678 : && !gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1662 : return false;
1663 : }
1664 : return true;
1665 : }
1666 :
1667 : /* Finally estimate N times, plus N gotos. */
1668 192186 : f_estimate = estimate_num_insns_seq (finally, &eni_size_weights);
1669 192186 : f_estimate = (f_estimate + 1) * ndests;
1670 :
1671 : /* Switch statement (cost 10), N variable assignments, N gotos. */
1672 192186 : sw_estimate = 10 + 2 * ndests;
1673 :
1674 : /* Optimize for size clearly wants our best guess. */
1675 192186 : if (optimize_function_for_size_p (cfun))
1676 2382 : return f_estimate < sw_estimate;
1677 :
1678 : /* ??? These numbers are completely made up so far. */
1679 189804 : if (optimize > 1)
1680 372474 : return f_estimate < 100 || f_estimate < sw_estimate * 2;
1681 : else
1682 7127 : return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1683 : }
1684 :
1685 : /* REG is current region of a LEH state.
1686 : is the enclosing region for a possible cleanup region, or the region
1687 : itself. Returns TRUE if such a region would be unreachable.
1688 :
1689 : Cleanup regions within a must-not-throw region aren't actually reachable
1690 : even if there are throwing stmts within them, because the personality
1691 : routine will call terminate before unwinding. */
1692 :
1693 : static bool
1694 1378157 : cleanup_is_dead_in (leh_state *state)
1695 : {
1696 1378157 : if (flag_checking)
1697 : {
1698 1378145 : eh_region reg = state->cur_region;
1699 26986823 : while (reg && reg->type == ERT_CLEANUP)
1700 25608678 : reg = reg->outer;
1701 :
1702 1378145 : gcc_assert (reg == state->outer_non_cleanup);
1703 : }
1704 :
1705 1378157 : eh_region reg = state->outer_non_cleanup;
1706 1378157 : return (reg && reg->type == ERT_MUST_NOT_THROW);
1707 : }
1708 :
1709 : /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1710 : to a sequence of labels and blocks, plus the exception region trees
1711 : that record all the magic. This is complicated by the need to
1712 : arrange for the FINALLY block to be executed on all exits. */
1713 :
1714 : static gimple_seq
1715 1607866 : lower_try_finally (struct leh_state *state, gtry *tp)
1716 : {
1717 1607866 : struct leh_tf_state this_tf;
1718 1607866 : struct leh_state this_state;
1719 1607866 : int ndests;
1720 1607866 : gimple_seq old_eh_seq;
1721 :
1722 : /* Process the try block. */
1723 :
1724 1607866 : memset (&this_tf, 0, sizeof (this_tf));
1725 1607866 : this_tf.try_finally_expr = tp;
1726 1607866 : this_tf.top_p = tp;
1727 1607866 : this_tf.outer = state;
1728 1607866 : if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state))
1729 : {
1730 1079766 : this_tf.region = gen_eh_region_cleanup (state->cur_region);
1731 1079766 : this_state.cur_region = this_tf.region;
1732 : }
1733 : else
1734 : {
1735 528100 : this_tf.region = NULL;
1736 528100 : this_state.cur_region = state->cur_region;
1737 : }
1738 :
1739 1607866 : this_state.outer_non_cleanup = state->outer_non_cleanup;
1740 1607866 : this_state.ehp_region = state->ehp_region;
1741 1607866 : this_state.tf = &this_tf;
1742 :
1743 1607866 : old_eh_seq = eh_seq;
1744 1607866 : eh_seq = NULL;
1745 :
1746 1607866 : lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1747 :
1748 : /* Determine if the try block is escaped through the bottom. */
1749 1607866 : this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1750 :
1751 : /* Determine if any exceptions are possible within the try block. */
1752 1607866 : if (this_tf.region)
1753 1079766 : this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1754 1607866 : if (this_tf.may_throw)
1755 756761 : honor_protect_cleanup_actions (state, &this_state, &this_tf);
1756 :
1757 : /* Determine how many edges (still) reach the finally block. Or rather,
1758 : how many destinations are reached by the finally block. Use this to
1759 : determine how we process the finally block itself. */
1760 :
1761 1607866 : ndests = this_tf.dest_array.length ();
1762 1607866 : ndests += this_tf.may_fallthru;
1763 1607866 : ndests += this_tf.may_return;
1764 1607866 : ndests += this_tf.may_throw;
1765 :
1766 : /* If the FINALLY block is not reachable, dike it out. */
1767 1607866 : if (ndests == 0)
1768 : {
1769 17624 : gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1770 17624 : gimple_try_set_cleanup (tp, NULL);
1771 : }
1772 : /* If the finally block doesn't fall through, then any destination
1773 : we might try to impose there isn't reached either. There may be
1774 : some minor amount of cleanup and redirection still needed. */
1775 1590242 : else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1776 2 : lower_try_finally_nofallthru (state, &this_tf);
1777 :
1778 : /* We can easily special-case redirection to a single destination. */
1779 1590240 : else if (ndests == 1)
1780 1381368 : lower_try_finally_onedest (state, &this_tf);
1781 208872 : else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1782 : gimple_try_cleanup (tp)))
1783 205331 : lower_try_finally_copy (state, &this_tf);
1784 : else
1785 3541 : lower_try_finally_switch (state, &this_tf);
1786 :
1787 : /* If someone requested we add a label at the end of the transformed
1788 : block, do so. */
1789 1607866 : if (this_tf.fallthru_label)
1790 : {
1791 : /* This must be reached only if ndests == 0. */
1792 113873 : gimple *x = gimple_build_label (this_tf.fallthru_label);
1793 113873 : gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1794 : }
1795 :
1796 1607866 : this_tf.dest_array.release ();
1797 1607866 : free (this_tf.goto_queue);
1798 1607866 : if (this_tf.goto_queue_map)
1799 329 : delete this_tf.goto_queue_map;
1800 :
1801 : /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1802 : If there was no old eh_seq, then the append is trivially already done. */
1803 1607866 : if (old_eh_seq)
1804 : {
1805 136834 : if (eh_seq == NULL)
1806 11555 : eh_seq = old_eh_seq;
1807 : else
1808 : {
1809 125279 : gimple_seq new_eh_seq = eh_seq;
1810 125279 : eh_seq = old_eh_seq;
1811 125279 : gimple_seq_add_seq (&eh_seq, new_eh_seq);
1812 : }
1813 : }
1814 :
1815 1607866 : return this_tf.top_p_seq;
1816 : }
1817 :
1818 : /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1819 : list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1820 : exception region trees that records all the magic. */
1821 :
1822 : static gimple_seq
1823 45713 : lower_catch (struct leh_state *state, gtry *tp)
1824 : {
1825 45713 : eh_region try_region = NULL;
1826 45713 : struct leh_state this_state = *state;
1827 45713 : gimple_stmt_iterator gsi;
1828 45713 : tree out_label;
1829 45713 : gimple_seq new_seq, cleanup;
1830 45713 : gimple *x;
1831 45713 : geh_dispatch *eh_dispatch;
1832 45713 : location_t try_catch_loc = gimple_location (tp);
1833 45713 : location_t catch_loc = UNKNOWN_LOCATION;
1834 :
1835 45713 : if (flag_exceptions)
1836 : {
1837 45707 : try_region = gen_eh_region_try (state->cur_region);
1838 45707 : this_state.cur_region = try_region;
1839 45707 : this_state.outer_non_cleanup = this_state.cur_region;
1840 : }
1841 :
1842 45713 : lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1843 :
1844 45713 : if (!eh_region_may_contain_throw (try_region))
1845 4650 : return gimple_try_eval (tp);
1846 :
1847 41063 : new_seq = NULL;
1848 41063 : eh_dispatch = gimple_build_eh_dispatch (try_region->index);
1849 41063 : gimple_seq_add_stmt (&new_seq, eh_dispatch);
1850 41063 : emit_resx (&new_seq, try_region);
1851 :
1852 41063 : this_state.cur_region = state->cur_region;
1853 41063 : this_state.outer_non_cleanup = state->outer_non_cleanup;
1854 41063 : this_state.ehp_region = try_region;
1855 :
1856 : /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup
1857 : itself, so that e.g. for coverage purposes the nested cleanups don't
1858 : appear before the cleanup body. See PR64634 for details. */
1859 41063 : gimple_seq old_eh_seq = eh_seq;
1860 41063 : eh_seq = NULL;
1861 :
1862 41063 : out_label = NULL;
1863 41063 : cleanup = gimple_try_cleanup (tp);
1864 41063 : for (gsi = gsi_start (cleanup);
1865 46873 : !gsi_end_p (gsi);
1866 5810 : gsi_next (&gsi))
1867 : {
1868 43952 : eh_catch c;
1869 43952 : gcatch *catch_stmt;
1870 43952 : gimple_seq handler;
1871 :
1872 43952 : catch_stmt = as_a <gcatch *> (gsi_stmt (gsi));
1873 43952 : if (catch_loc == UNKNOWN_LOCATION)
1874 41063 : catch_loc = gimple_location (catch_stmt);
1875 43952 : c = gen_eh_region_catch (try_region, gimple_catch_types (catch_stmt));
1876 :
1877 43952 : handler = gimple_catch_handler (catch_stmt);
1878 43952 : lower_eh_constructs_1 (&this_state, &handler);
1879 :
1880 43952 : c->label = create_artificial_label (UNKNOWN_LOCATION);
1881 43952 : x = gimple_build_label (c->label);
1882 43952 : gimple_seq_add_stmt (&new_seq, x);
1883 :
1884 43952 : gimple_seq_add_seq (&new_seq, handler);
1885 :
1886 43952 : if (gimple_seq_may_fallthru (new_seq))
1887 : {
1888 13812 : if (!out_label)
1889 13265 : out_label = create_artificial_label (try_catch_loc);
1890 :
1891 13812 : x = gimple_build_goto (out_label);
1892 13812 : gimple_seq_add_stmt (&new_seq, x);
1893 : }
1894 43952 : if (!c->type_list)
1895 : break;
1896 : }
1897 :
1898 : /* Try to set a location on the dispatching construct to avoid inheriting
1899 : the location of the previous statement. */
1900 41063 : gimple_set_location (eh_dispatch, catch_loc);
1901 :
1902 41063 : gimple_try_set_cleanup (tp, new_seq);
1903 :
1904 41063 : gimple_seq new_eh_seq = eh_seq;
1905 41063 : eh_seq = old_eh_seq;
1906 41063 : gimple_seq ret_seq = frob_into_branch_around (tp, try_region, out_label);
1907 41063 : gimple_seq_add_seq (&eh_seq, new_eh_seq);
1908 41063 : return ret_seq;
1909 : }
1910 :
1911 : /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1912 : GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1913 : region trees that record all the magic. */
1914 :
1915 : static gimple_seq
1916 5332 : lower_eh_filter (struct leh_state *state, gtry *tp)
1917 : {
1918 5332 : struct leh_state this_state = *state;
1919 5332 : eh_region this_region = NULL;
1920 5332 : gimple *inner, *x;
1921 5332 : gimple_seq new_seq;
1922 :
1923 5332 : inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1924 :
1925 5332 : if (flag_exceptions)
1926 : {
1927 5332 : this_region = gen_eh_region_allowed (state->cur_region,
1928 : gimple_eh_filter_types (inner));
1929 5332 : this_state.cur_region = this_region;
1930 5332 : this_state.outer_non_cleanup = this_state.cur_region;
1931 : }
1932 :
1933 5332 : lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1934 :
1935 5332 : if (!eh_region_may_contain_throw (this_region))
1936 4808 : return gimple_try_eval (tp);
1937 :
1938 524 : this_state.cur_region = state->cur_region;
1939 524 : this_state.ehp_region = this_region;
1940 :
1941 524 : new_seq = NULL;
1942 524 : x = gimple_build_eh_dispatch (this_region->index);
1943 524 : gimple_set_location (x, gimple_location (tp));
1944 524 : gimple_seq_add_stmt (&new_seq, x);
1945 524 : emit_resx (&new_seq, this_region);
1946 :
1947 524 : this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1948 524 : x = gimple_build_label (this_region->u.allowed.label);
1949 524 : gimple_seq_add_stmt (&new_seq, x);
1950 :
1951 524 : lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner));
1952 524 : gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1953 :
1954 524 : gimple_try_set_cleanup (tp, new_seq);
1955 :
1956 524 : return frob_into_branch_around (tp, this_region, NULL);
1957 : }
1958 :
1959 : /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1960 : an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1961 : plus the exception region trees that record all the magic. */
1962 :
1963 : static gimple_seq
1964 1232209 : lower_eh_must_not_throw (struct leh_state *state, gtry *tp)
1965 : {
1966 1232209 : struct leh_state this_state = *state;
1967 :
1968 1232209 : if (flag_exceptions)
1969 : {
1970 1232209 : gimple *inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1971 1232209 : eh_region this_region;
1972 :
1973 1232209 : this_region = gen_eh_region_must_not_throw (state->cur_region);
1974 1232209 : this_region->u.must_not_throw.failure_decl
1975 1232209 : = gimple_eh_must_not_throw_fndecl (
1976 1232209 : as_a <geh_mnt *> (inner));
1977 1232209 : this_region->u.must_not_throw.failure_loc
1978 1232209 : = LOCATION_LOCUS (gimple_location (tp));
1979 :
1980 : /* In order to get mangling applied to this decl, we must mark it
1981 : used now. Otherwise, pass_ipa_free_lang_data won't think it
1982 : needs to happen. */
1983 1232209 : TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1984 :
1985 1232209 : this_state.cur_region = this_region;
1986 1232209 : this_state.outer_non_cleanup = this_state.cur_region;
1987 : }
1988 :
1989 1232209 : lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1990 :
1991 1232209 : return gimple_try_eval (tp);
1992 : }
1993 :
1994 : /* Implement a cleanup expression. This is similar to try-finally,
1995 : except that we only execute the cleanup block for exception edges. */
1996 :
1997 : static gimple_seq
1998 137088 : lower_cleanup (struct leh_state *state, gtry *tp)
1999 : {
2000 137088 : struct leh_state this_state = *state;
2001 137088 : eh_region this_region = NULL;
2002 137088 : struct leh_tf_state fake_tf;
2003 137088 : gimple_seq result;
2004 137088 : bool cleanup_dead = cleanup_is_dead_in (state);
2005 :
2006 137088 : if (flag_exceptions && !cleanup_dead)
2007 : {
2008 115079 : this_region = gen_eh_region_cleanup (state->cur_region);
2009 115079 : this_state.cur_region = this_region;
2010 115079 : this_state.outer_non_cleanup = state->outer_non_cleanup;
2011 : }
2012 :
2013 137088 : lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
2014 :
2015 137088 : if (cleanup_dead || !eh_region_may_contain_throw (this_region))
2016 80019 : return gimple_try_eval (tp);
2017 :
2018 : /* Build enough of a try-finally state so that we can reuse
2019 : honor_protect_cleanup_actions. */
2020 57069 : memset (&fake_tf, 0, sizeof (fake_tf));
2021 57069 : fake_tf.top_p = fake_tf.try_finally_expr = tp;
2022 57069 : fake_tf.outer = state;
2023 57069 : fake_tf.region = this_region;
2024 57069 : fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
2025 57069 : fake_tf.may_throw = true;
2026 :
2027 57069 : honor_protect_cleanup_actions (state, NULL, &fake_tf);
2028 :
2029 57069 : if (fake_tf.may_throw)
2030 : {
2031 : /* In this case honor_protect_cleanup_actions had nothing to do,
2032 : and we should process this normally. */
2033 0 : lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp));
2034 0 : result = frob_into_branch_around (tp, this_region,
2035 : fake_tf.fallthru_label);
2036 : }
2037 : else
2038 : {
2039 : /* In this case honor_protect_cleanup_actions did nearly all of
2040 : the work. All we have left is to append the fallthru_label. */
2041 :
2042 57069 : result = gimple_try_eval (tp);
2043 57069 : if (fake_tf.fallthru_label)
2044 : {
2045 0 : gimple *x = gimple_build_label (fake_tf.fallthru_label);
2046 0 : gimple_seq_add_stmt (&result, x);
2047 : }
2048 : }
2049 57069 : return result;
2050 : }
2051 :
2052 : /* Main loop for lowering eh constructs. Also moves gsi to the next
2053 : statement. */
2054 :
2055 : static void
2056 89828982 : lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
2057 : {
2058 89828982 : gimple_seq replace;
2059 89828982 : gimple *x;
2060 89828982 : gimple *stmt = gsi_stmt (*gsi);
2061 :
2062 89828982 : switch (gimple_code (stmt))
2063 : {
2064 10953524 : case GIMPLE_CALL:
2065 10953524 : {
2066 10953524 : tree fndecl = gimple_call_fndecl (stmt);
2067 10953524 : tree rhs, lhs;
2068 :
2069 10953524 : if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
2070 2163942 : switch (DECL_FUNCTION_CODE (fndecl))
2071 : {
2072 19084 : case BUILT_IN_EH_POINTER:
2073 : /* The front end may have generated a call to
2074 : __builtin_eh_pointer (0) within a catch region. Replace
2075 : this zero argument with the current catch region number. */
2076 19084 : if (state->ehp_region)
2077 : {
2078 38158 : tree nr = build_int_cst (integer_type_node,
2079 19079 : state->ehp_region->index);
2080 19079 : gimple_call_set_arg (stmt, 0, nr);
2081 : }
2082 : else
2083 : {
2084 : /* The user has dome something silly. Remove it. */
2085 5 : rhs = null_pointer_node;
2086 5 : goto do_replace;
2087 : }
2088 19079 : break;
2089 :
2090 0 : case BUILT_IN_EH_FILTER:
2091 : /* ??? This should never appear, but since it's a builtin it
2092 : is accessible to abuse by users. Just remove it and
2093 : replace the use with the arbitrary value zero. */
2094 0 : rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
2095 5 : do_replace:
2096 5 : lhs = gimple_call_lhs (stmt);
2097 5 : x = gimple_build_assign (lhs, rhs);
2098 5 : gsi_insert_before (gsi, x, GSI_SAME_STMT);
2099 : /* FALLTHRU */
2100 :
2101 5 : case BUILT_IN_EH_COPY_VALUES:
2102 : /* Likewise this should not appear. Remove it. */
2103 5 : gsi_remove (gsi, true);
2104 2395924 : return;
2105 :
2106 : default:
2107 : break;
2108 : }
2109 : }
2110 : /* FALLTHRU */
2111 :
2112 53777353 : case GIMPLE_ASSIGN:
2113 : /* If the stmt can throw, use a new temporary for the assignment
2114 : to a LHS. This makes sure the old value of the LHS is
2115 : available on the EH edge. Only do so for statements that
2116 : potentially fall through (no noreturn calls e.g.), otherwise
2117 : this new assignment might create fake fallthru regions. */
2118 53777353 : if (stmt_could_throw_p (cfun, stmt)
2119 3503094 : && gimple_has_lhs (stmt)
2120 2558584 : && gimple_stmt_may_fallthru (stmt)
2121 2558581 : && !tree_could_throw_p (gimple_get_lhs (stmt))
2122 56134682 : && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
2123 : {
2124 1749752 : tree lhs = gimple_get_lhs (stmt);
2125 1749752 : tree tmp = create_tmp_var (TREE_TYPE (lhs));
2126 1749752 : gimple *s = gimple_build_assign (lhs, tmp);
2127 1749752 : gimple_set_location (s, gimple_location (stmt));
2128 1749752 : gimple_set_block (s, gimple_block (stmt));
2129 1749752 : gimple_set_lhs (stmt, tmp);
2130 1749752 : gsi_insert_after (gsi, s, GSI_SAME_STMT);
2131 : }
2132 : /* Look for things that can throw exceptions, and record them. */
2133 53777353 : if (state->cur_region && stmt_could_throw_p (cfun, stmt))
2134 : {
2135 2653669 : record_stmt_eh_region (state->cur_region, stmt);
2136 2653669 : note_eh_region_may_contain_throw (state->cur_region);
2137 : }
2138 : break;
2139 :
2140 13896577 : case GIMPLE_COND:
2141 13896577 : case GIMPLE_GOTO:
2142 13896577 : case GIMPLE_RETURN:
2143 13896577 : case GIMPLE_ASM:
2144 13896577 : maybe_record_in_goto_queue (state, stmt);
2145 13896577 : break;
2146 :
2147 50517 : case GIMPLE_SWITCH:
2148 50517 : verify_norecord_switch_expr (state, as_a <gswitch *> (stmt));
2149 50517 : break;
2150 :
2151 2395914 : case GIMPLE_TRY:
2152 2395914 : {
2153 2395914 : gtry *try_stmt = as_a <gtry *> (stmt);
2154 2395914 : if (gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2155 1607866 : replace = lower_try_finally (state, try_stmt);
2156 : else
2157 : {
2158 788048 : x = gimple_seq_first_stmt (gimple_try_cleanup (try_stmt));
2159 788048 : if (!x)
2160 : {
2161 0 : replace = gimple_try_eval (try_stmt);
2162 0 : lower_eh_constructs_1 (state, &replace);
2163 : }
2164 : else
2165 788048 : switch (gimple_code (x))
2166 : {
2167 45713 : case GIMPLE_CATCH:
2168 45713 : replace = lower_catch (state, try_stmt);
2169 45713 : break;
2170 5332 : case GIMPLE_EH_FILTER:
2171 5332 : replace = lower_eh_filter (state, try_stmt);
2172 5332 : break;
2173 599915 : case GIMPLE_EH_MUST_NOT_THROW:
2174 599915 : replace = lower_eh_must_not_throw (state, try_stmt);
2175 599915 : break;
2176 0 : case GIMPLE_EH_ELSE:
2177 : /* This code is only valid with GIMPLE_TRY_FINALLY. */
2178 0 : gcc_unreachable ();
2179 137088 : default:
2180 137088 : replace = lower_cleanup (state, try_stmt);
2181 137088 : break;
2182 : }
2183 : }
2184 : }
2185 :
2186 : /* Remove the old stmt and insert the transformed sequence
2187 : instead. */
2188 2395914 : gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2189 2395914 : gsi_remove (gsi, true);
2190 :
2191 : /* Return since we don't want gsi_next () */
2192 2395914 : return;
2193 :
2194 0 : case GIMPLE_EH_ELSE:
2195 : /* We should be eliminating this in lower_try_finally et al. */
2196 0 : gcc_unreachable ();
2197 :
2198 : default:
2199 : /* A type, a decl, or some kind of statement that we're not
2200 : interested in. Don't walk them. */
2201 : break;
2202 : }
2203 :
2204 87433063 : gsi_next (gsi);
2205 : }
2206 :
2207 : /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2208 :
2209 : static void
2210 7768158 : lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq)
2211 : {
2212 7768158 : gimple_stmt_iterator gsi;
2213 105248157 : for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);)
2214 89828982 : lower_eh_constructs_2 (state, &gsi);
2215 7768158 : }
2216 :
2217 : namespace {
2218 :
2219 : const pass_data pass_data_lower_eh =
2220 : {
2221 : GIMPLE_PASS, /* type */
2222 : "eh", /* name */
2223 : OPTGROUP_NONE, /* optinfo_flags */
2224 : TV_TREE_EH, /* tv_id */
2225 : PROP_gimple_lcf, /* properties_required */
2226 : PROP_gimple_leh, /* properties_provided */
2227 : 0, /* properties_destroyed */
2228 : 0, /* todo_flags_start */
2229 : 0, /* todo_flags_finish */
2230 : };
2231 :
2232 : class pass_lower_eh : public gimple_opt_pass
2233 : {
2234 : public:
2235 285722 : pass_lower_eh (gcc::context *ctxt)
2236 571444 : : gimple_opt_pass (pass_data_lower_eh, ctxt)
2237 : {}
2238 :
2239 : /* opt_pass methods: */
2240 : unsigned int execute (function *) final override;
2241 :
2242 : }; // class pass_lower_eh
2243 :
2244 : unsigned int
2245 2869200 : pass_lower_eh::execute (function *fun)
2246 : {
2247 2869200 : struct leh_state null_state;
2248 2869200 : gimple_seq bodyp;
2249 :
2250 2869200 : bodyp = gimple_body (current_function_decl);
2251 2869200 : if (bodyp == NULL)
2252 : return 0;
2253 :
2254 2869200 : finally_tree = new hash_table<finally_tree_hasher> (31);
2255 2869200 : eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2256 2869200 : memset (&null_state, 0, sizeof (null_state));
2257 :
2258 2869200 : collect_finally_tree_1 (bodyp, NULL);
2259 2869200 : lower_eh_constructs_1 (&null_state, &bodyp);
2260 2869200 : gimple_set_body (current_function_decl, bodyp);
2261 :
2262 : /* We assume there's a return statement, or something, at the end of
2263 : the function, and thus ploping the EH sequence afterward won't
2264 : change anything. */
2265 2869200 : gcc_assert (!gimple_seq_may_fallthru (bodyp));
2266 2869200 : gimple_seq_add_seq (&bodyp, eh_seq);
2267 :
2268 : /* We assume that since BODYP already existed, adding EH_SEQ to it
2269 : didn't change its value, and we don't have to re-set the function. */
2270 2869200 : gcc_assert (bodyp == gimple_body (current_function_decl));
2271 :
2272 2869200 : delete finally_tree;
2273 2869200 : finally_tree = NULL;
2274 2869200 : BITMAP_FREE (eh_region_may_contain_throw_map);
2275 2869200 : eh_seq = NULL;
2276 :
2277 : /* If this function needs a language specific EH personality routine
2278 : and the frontend didn't already set one do so now. */
2279 2869200 : if (function_needs_eh_personality (fun) == eh_personality_lang
2280 2869200 : && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2281 763811 : DECL_FUNCTION_PERSONALITY (current_function_decl)
2282 1527622 : = lang_hooks.eh_personality ();
2283 :
2284 : return 0;
2285 : }
2286 :
2287 : } // anon namespace
2288 :
2289 : gimple_opt_pass *
2290 285722 : make_pass_lower_eh (gcc::context *ctxt)
2291 : {
2292 285722 : return new pass_lower_eh (ctxt);
2293 : }
2294 :
2295 : /* Create the multiple edges from an EH_DISPATCH statement to all of
2296 : the possible handlers for its EH region. Return true if there's
2297 : no fallthru edge; false if there is. */
2298 :
2299 : bool
2300 50677 : make_eh_dispatch_edges (geh_dispatch *stmt)
2301 : {
2302 50677 : eh_region r;
2303 50677 : eh_catch c;
2304 50677 : basic_block src, dst;
2305 :
2306 50677 : r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2307 50677 : src = gimple_bb (stmt);
2308 :
2309 50677 : switch (r->type)
2310 : {
2311 50001 : case ERT_TRY:
2312 56661 : for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2313 : {
2314 53168 : dst = label_to_block (cfun, c->label);
2315 53168 : make_edge (src, dst, 0);
2316 :
2317 : /* A catch-all handler doesn't have a fallthru. */
2318 53168 : if (c->type_list == NULL)
2319 : return false;
2320 : }
2321 : break;
2322 :
2323 676 : case ERT_ALLOWED_EXCEPTIONS:
2324 676 : dst = label_to_block (cfun, r->u.allowed.label);
2325 676 : make_edge (src, dst, 0);
2326 676 : break;
2327 :
2328 0 : default:
2329 0 : gcc_unreachable ();
2330 : }
2331 :
2332 : return true;
2333 : }
2334 :
2335 : /* Create the single EH edge from STMT to its nearest landing pad,
2336 : if there is such a landing pad within the current function. */
2337 :
2338 : edge
2339 5931669 : make_eh_edge (gimple *stmt)
2340 : {
2341 5931669 : basic_block src, dst;
2342 5931669 : eh_landing_pad lp;
2343 5931669 : int lp_nr;
2344 :
2345 5931669 : lp_nr = lookup_stmt_eh_lp (stmt);
2346 5931669 : if (lp_nr <= 0)
2347 : return NULL;
2348 :
2349 3751331 : lp = get_eh_landing_pad_from_number (lp_nr);
2350 3751331 : gcc_assert (lp != NULL);
2351 :
2352 3751331 : src = gimple_bb (stmt);
2353 3751331 : dst = label_to_block (cfun, lp->post_landing_pad);
2354 3751331 : return make_edge (src, dst, EDGE_EH);
2355 : }
2356 :
2357 : /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2358 : do not actually perform the final edge redirection.
2359 :
2360 : CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2361 : we intend to change the destination EH region as well; this means
2362 : EH_LANDING_PAD_NR must already be set on the destination block label.
2363 : If false, we're being called from generic cfg manipulation code and we
2364 : should preserve our place within the region tree. */
2365 :
2366 : static void
2367 2657489 : redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2368 : {
2369 2657489 : eh_landing_pad old_lp, new_lp;
2370 2657489 : basic_block old_bb;
2371 2657489 : gimple *throw_stmt;
2372 2657489 : int old_lp_nr, new_lp_nr;
2373 2657489 : tree old_label, new_label;
2374 2657489 : edge_iterator ei;
2375 2657489 : edge e;
2376 :
2377 2657489 : old_bb = edge_in->dest;
2378 2657489 : old_label = gimple_block_label (old_bb);
2379 2657489 : old_lp_nr = EH_LANDING_PAD_NR (old_label);
2380 2657489 : gcc_assert (old_lp_nr > 0);
2381 2657489 : old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2382 :
2383 2657489 : throw_stmt = *gsi_last_bb (edge_in->src);
2384 2657489 : gcc_checking_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2385 :
2386 2657489 : new_label = gimple_block_label (new_bb);
2387 :
2388 : /* Look for an existing region that might be using NEW_BB already. */
2389 2657489 : new_lp_nr = EH_LANDING_PAD_NR (new_label);
2390 2657489 : if (new_lp_nr)
2391 : {
2392 1644484 : new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2393 1644484 : gcc_assert (new_lp);
2394 :
2395 : /* Unless CHANGE_REGION is true, the new and old landing pad
2396 : had better be associated with the same EH region. */
2397 1644484 : gcc_assert (change_region || new_lp->region == old_lp->region);
2398 : }
2399 : else
2400 : {
2401 1013005 : new_lp = NULL;
2402 1013005 : gcc_assert (!change_region);
2403 : }
2404 :
2405 : /* Notice when we redirect the last EH edge away from OLD_BB. */
2406 10384271 : FOR_EACH_EDGE (e, ei, old_bb->preds)
2407 9165238 : if (e != edge_in && (e->flags & EDGE_EH))
2408 : break;
2409 :
2410 2657489 : if (new_lp)
2411 : {
2412 : /* NEW_LP already exists. If there are still edges into OLD_LP,
2413 : there's nothing to do with the EH tree. If there are no more
2414 : edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2415 : If CHANGE_REGION is true, then our caller is expecting to remove
2416 : the landing pad. */
2417 1644484 : if (e == NULL && !change_region)
2418 765445 : remove_eh_landing_pad (old_lp);
2419 : }
2420 : else
2421 : {
2422 : /* No correct landing pad exists. If there are no more edges
2423 : into OLD_LP, then we can simply re-use the existing landing pad.
2424 : Otherwise, we have to create a new landing pad. */
2425 1013005 : if (e == NULL)
2426 : {
2427 226413 : EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2428 226413 : new_lp = old_lp;
2429 : }
2430 : else
2431 786592 : new_lp = gen_eh_landing_pad (old_lp->region);
2432 1013005 : new_lp->post_landing_pad = new_label;
2433 1013005 : EH_LANDING_PAD_NR (new_label) = new_lp->index;
2434 : }
2435 :
2436 : /* Maybe move the throwing statement to the new region. */
2437 2657489 : if (old_lp != new_lp)
2438 : {
2439 2431076 : remove_stmt_from_eh_lp (throw_stmt);
2440 2431076 : add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2441 : }
2442 2657489 : }
2443 :
2444 : /* Redirect EH edge E to NEW_BB. */
2445 :
2446 : edge
2447 966720 : redirect_eh_edge (edge edge_in, basic_block new_bb)
2448 : {
2449 966720 : redirect_eh_edge_1 (edge_in, new_bb, false);
2450 966720 : return ssa_redirect_edge (edge_in, new_bb);
2451 : }
2452 :
2453 : /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2454 : labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2455 : The actual edge update will happen in the caller. */
2456 :
2457 : void
2458 0 : redirect_eh_dispatch_edge (geh_dispatch *stmt, edge e, basic_block new_bb)
2459 : {
2460 0 : tree new_lab = gimple_block_label (new_bb);
2461 0 : bool any_changed = false;
2462 0 : basic_block old_bb;
2463 0 : eh_region r;
2464 0 : eh_catch c;
2465 :
2466 0 : r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2467 0 : switch (r->type)
2468 : {
2469 0 : case ERT_TRY:
2470 0 : for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2471 : {
2472 0 : old_bb = label_to_block (cfun, c->label);
2473 0 : if (old_bb == e->dest)
2474 : {
2475 0 : c->label = new_lab;
2476 0 : any_changed = true;
2477 : }
2478 : }
2479 : break;
2480 :
2481 0 : case ERT_ALLOWED_EXCEPTIONS:
2482 0 : old_bb = label_to_block (cfun, r->u.allowed.label);
2483 0 : gcc_assert (old_bb == e->dest);
2484 0 : r->u.allowed.label = new_lab;
2485 0 : any_changed = true;
2486 0 : break;
2487 :
2488 0 : default:
2489 0 : gcc_unreachable ();
2490 : }
2491 :
2492 0 : gcc_assert (any_changed);
2493 0 : }
2494 :
2495 : /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2496 :
2497 : bool
2498 1840909444 : operation_could_trap_helper_p (enum tree_code op,
2499 : bool fp_operation,
2500 : bool honor_trapv,
2501 : bool honor_nans,
2502 : bool honor_snans,
2503 : tree divisor,
2504 : bool *handled)
2505 : {
2506 1840909444 : *handled = true;
2507 1840909444 : switch (op)
2508 : {
2509 2111193 : case TRUNC_DIV_EXPR:
2510 2111193 : case CEIL_DIV_EXPR:
2511 2111193 : case FLOOR_DIV_EXPR:
2512 2111193 : case ROUND_DIV_EXPR:
2513 2111193 : case EXACT_DIV_EXPR:
2514 2111193 : case CEIL_MOD_EXPR:
2515 2111193 : case FLOOR_MOD_EXPR:
2516 2111193 : case ROUND_MOD_EXPR:
2517 2111193 : case TRUNC_MOD_EXPR:
2518 2111193 : if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2519 677186 : return true;
2520 1434007 : if (TREE_CODE (divisor) == VECTOR_CST)
2521 : {
2522 : /* Inspired by initializer_each_zero_or_onep. */
2523 430 : unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (divisor);
2524 430 : if (VECTOR_CST_STEPPED_P (divisor)
2525 430 : && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (divisor))
2526 0 : .is_constant (&nelts))
2527 4645254 : return true;
2528 1492 : for (unsigned int i = 0; i < nelts; ++i)
2529 : {
2530 1095 : tree elt = vector_cst_elt (divisor, i);
2531 1095 : if (integer_zerop (elt))
2532 : return true;
2533 : }
2534 : }
2535 : return false;
2536 :
2537 413031 : case RDIV_EXPR:
2538 413031 : if (fp_operation)
2539 : {
2540 413031 : if (honor_snans)
2541 : return true;
2542 405262 : return flag_trapping_math;
2543 : }
2544 : /* Fixed point operations also use RDIV_EXPR. */
2545 0 : if (!TREE_CONSTANT (divisor) || fixed_zerop (divisor))
2546 0 : return true;
2547 : return false;
2548 :
2549 : case LT_EXPR:
2550 : case LE_EXPR:
2551 : case GT_EXPR:
2552 : case GE_EXPR:
2553 : case LTGT_EXPR:
2554 : /* MIN/MAX similar as LT/LE/GT/GE. */
2555 : case MIN_EXPR:
2556 : case MAX_EXPR:
2557 : /* Some floating point comparisons may trap. */
2558 : return honor_nans;
2559 :
2560 208277874 : case EQ_EXPR:
2561 208277874 : case NE_EXPR:
2562 208277874 : case UNORDERED_EXPR:
2563 208277874 : case ORDERED_EXPR:
2564 208277874 : case UNLT_EXPR:
2565 208277874 : case UNLE_EXPR:
2566 208277874 : case UNGT_EXPR:
2567 208277874 : case UNGE_EXPR:
2568 208277874 : case UNEQ_EXPR:
2569 208277874 : return honor_snans;
2570 :
2571 1316972 : case NEGATE_EXPR:
2572 1316972 : case ABS_EXPR:
2573 1316972 : case CONJ_EXPR:
2574 : /* These operations don't trap with floating point. */
2575 1316972 : if (honor_trapv)
2576 : return true;
2577 : return false;
2578 :
2579 : case ABSU_EXPR:
2580 : /* ABSU_EXPR never traps. */
2581 : return false;
2582 :
2583 82496627 : case PLUS_EXPR:
2584 82496627 : case MINUS_EXPR:
2585 82496627 : case MULT_EXPR:
2586 : /* Any floating arithmetic may trap. */
2587 82496627 : if (fp_operation && flag_trapping_math)
2588 : return true;
2589 80807764 : if (honor_trapv)
2590 : return true;
2591 : return false;
2592 :
2593 : case COMPLEX_EXPR:
2594 : case CONSTRUCTOR:
2595 : case VEC_DUPLICATE_EXPR:
2596 : case PAREN_EXPR:
2597 : /* Constructing an object cannot trap. */
2598 : return false;
2599 :
2600 223169 : case FIX_TRUNC_EXPR:
2601 223169 : case VEC_PACK_FIX_TRUNC_EXPR:
2602 223169 : case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
2603 223169 : case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
2604 : /* The FIX_TRUNC family are always potentially trapping. */
2605 223169 : return flag_trapping_math;
2606 :
2607 75469 : case COND_EXPR:
2608 75469 : case VEC_COND_EXPR:
2609 : /* Whether *COND_EXPR can trap depends on whether the
2610 : first argument can trap, so signal it as not handled.
2611 : Whether lhs is floating or not doesn't matter. */
2612 75469 : *handled = false;
2613 75469 : return false;
2614 :
2615 1465760100 : default:
2616 : /* Any floating arithmetic may trap. */
2617 1465760100 : if (fp_operation && flag_trapping_math)
2618 : return true;
2619 :
2620 1463509119 : *handled = false;
2621 1463509119 : return false;
2622 : }
2623 : }
2624 :
2625 : /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2626 : on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2627 : type operands that may trap. If OP is a division operator, DIVISOR contains
2628 : the value of the divisor. */
2629 :
2630 : bool
2631 4060536974 : operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2632 : tree divisor)
2633 : {
2634 44679579 : bool honor_nans = (fp_operation && flag_trapping_math
2635 4093569417 : && !flag_finite_math_only);
2636 44679579 : bool honor_snans = fp_operation && flag_signaling_nans != 0;
2637 4060536974 : bool handled;
2638 :
2639 : /* This function cannot tell whether or not COND_EXPR could trap,
2640 : because that depends on its condition op. */
2641 4060536974 : gcc_assert (op != COND_EXPR);
2642 :
2643 4060536974 : if (TREE_CODE_CLASS (op) != tcc_comparison
2644 : && TREE_CODE_CLASS (op) != tcc_unary
2645 4060536974 : && TREE_CODE_CLASS (op) != tcc_binary)
2646 : return false;
2647 :
2648 272069666 : return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2649 : honor_nans, honor_snans, divisor,
2650 272069666 : &handled);
2651 : }
2652 :
2653 :
2654 : /* Returns true if it is possible to prove that the index of
2655 : an array access REF (an ARRAY_REF expression) falls into the
2656 : array bounds. */
2657 :
2658 : static bool
2659 249912504 : in_array_bounds_p (tree ref)
2660 : {
2661 249912504 : tree idx = TREE_OPERAND (ref, 1);
2662 249912504 : tree min, max;
2663 :
2664 249912504 : if (TREE_CODE (idx) != INTEGER_CST)
2665 : return false;
2666 :
2667 246885422 : min = array_ref_low_bound (ref);
2668 246885422 : max = array_ref_up_bound (ref);
2669 246885422 : if (!min
2670 246885422 : || !max
2671 246868952 : || TREE_CODE (min) != INTEGER_CST
2672 246868952 : || TREE_CODE (max) != INTEGER_CST)
2673 : return false;
2674 :
2675 246864084 : if (tree_int_cst_lt (idx, min)
2676 246864084 : || tree_int_cst_lt (max, idx))
2677 54321 : return false;
2678 :
2679 : return true;
2680 : }
2681 :
2682 : /* Returns true if it is possible to prove that the range of
2683 : an array access REF (an ARRAY_RANGE_REF expression) falls
2684 : into the array bounds. */
2685 :
2686 : static bool
2687 1884 : range_in_array_bounds_p (tree ref)
2688 : {
2689 1884 : tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
2690 1884 : tree range_min, range_max, min, max;
2691 :
2692 1884 : range_min = TYPE_MIN_VALUE (domain_type);
2693 1884 : range_max = TYPE_MAX_VALUE (domain_type);
2694 1884 : if (!range_min
2695 1884 : || !range_max
2696 1884 : || TREE_CODE (range_min) != INTEGER_CST
2697 1884 : || TREE_CODE (range_max) != INTEGER_CST)
2698 : return false;
2699 :
2700 1884 : min = array_ref_low_bound (ref);
2701 1884 : max = array_ref_up_bound (ref);
2702 1884 : if (!min
2703 1884 : || !max
2704 1884 : || TREE_CODE (min) != INTEGER_CST
2705 1884 : || TREE_CODE (max) != INTEGER_CST)
2706 : return false;
2707 :
2708 1884 : if (tree_int_cst_lt (range_min, min)
2709 1884 : || tree_int_cst_lt (max, range_max))
2710 209 : return false;
2711 :
2712 : return true;
2713 : }
2714 :
2715 : /* Return true iff a BIT_FIELD_REF <(TYPE)???, SIZE, OFFSET> would access a bit
2716 : range that is known to be in bounds for TYPE. */
2717 :
2718 : bool
2719 588201 : access_in_bounds_of_type_p (tree type, poly_uint64 size, poly_uint64 offset)
2720 : {
2721 588201 : tree type_size_tree;
2722 588201 : poly_uint64 type_size_max, min = offset, wid = size, max;
2723 :
2724 588201 : type_size_tree = TYPE_SIZE (type);
2725 588201 : if (!type_size_tree || !poly_int_tree_p (type_size_tree, &type_size_max))
2726 21 : return false;
2727 :
2728 588180 : max = min + wid;
2729 588180 : if (maybe_lt (max, min)
2730 588180 : || maybe_lt (type_size_max, max))
2731 : return false;
2732 :
2733 : return true;
2734 : }
2735 :
2736 : /* Return whether an access at [off, refsz[ to an object spanning [0, size[
2737 : accesses storage outside of the object. */
2738 :
2739 : static bool
2740 243376010 : ref_outside_object_p (tree size, poly_offset_int off, tree refsz)
2741 : {
2742 243376010 : if (size == NULL_TREE
2743 243376010 : || refsz == NULL_TREE
2744 243362877 : || !poly_int_tree_p (size)
2745 243362834 : || !poly_int_tree_p (refsz)
2746 243346334 : || maybe_le (wi::to_poly_offset (size), off)
2747 486667169 : || maybe_gt (off + wi::to_poly_offset (refsz),
2748 : wi::to_poly_offset (size)))
2749 217203 : return true;
2750 : /* Now we are sure the whole base of the access is inside
2751 : the object. */
2752 : return false;
2753 : }
2754 :
2755 : /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2756 : location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2757 : This routine expects only GIMPLE lhs or rhs input. */
2758 :
2759 : bool
2760 3791427769 : tree_could_trap_p (tree expr)
2761 : {
2762 3791427769 : enum tree_code code;
2763 3791427769 : bool fp_operation = false;
2764 3791427769 : bool honor_trapv = false;
2765 3791427769 : tree t, base, div = NULL_TREE;
2766 :
2767 3791427769 : if (!expr)
2768 : return false;
2769 :
2770 : /* In COND_EXPR and VEC_COND_EXPR only the condition may trap, but
2771 : they won't appear as operands in GIMPLE form, so this is just for the
2772 : GENERIC uses where it needs to recurse on the operands and so
2773 : *COND_EXPR itself doesn't trap. */
2774 3786215553 : if (TREE_CODE (expr) == COND_EXPR || TREE_CODE (expr) == VEC_COND_EXPR)
2775 : return false;
2776 :
2777 3786214231 : code = TREE_CODE (expr);
2778 3786214231 : t = TREE_TYPE (expr);
2779 :
2780 3786214231 : if (t)
2781 : {
2782 3786206851 : if (COMPARISON_CLASS_P (expr))
2783 7403972 : fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2784 : else
2785 3778802879 : fp_operation = FLOAT_TYPE_P (t);
2786 3786206851 : honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2787 : }
2788 :
2789 3786214231 : if (TREE_CODE_CLASS (code) == tcc_binary)
2790 1000525 : div = TREE_OPERAND (expr, 1);
2791 3786214231 : if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2792 : return true;
2793 :
2794 3784893625 : restart:
2795 4716673522 : switch (code)
2796 : {
2797 1735370 : case BIT_FIELD_REF:
2798 1735370 : if (DECL_P (TREE_OPERAND (expr, 0))
2799 1735370 : && !access_in_bounds_of_type_p (TREE_TYPE (TREE_OPERAND (expr, 0)),
2800 : bit_field_size (expr),
2801 : bit_field_offset (expr)))
2802 : return true;
2803 : /* Fall through. */
2804 :
2805 931779897 : case COMPONENT_REF:
2806 931779897 : case REALPART_EXPR:
2807 931779897 : case IMAGPART_EXPR:
2808 931779897 : case VIEW_CONVERT_EXPR:
2809 931779897 : case WITH_SIZE_EXPR:
2810 931779897 : expr = TREE_OPERAND (expr, 0);
2811 931779897 : code = TREE_CODE (expr);
2812 931779897 : goto restart;
2813 :
2814 2106 : case ARRAY_RANGE_REF:
2815 2106 : base = TREE_OPERAND (expr, 0);
2816 2106 : if (tree_could_trap_p (base))
2817 : return true;
2818 1884 : if (TREE_THIS_NOTRAP (expr))
2819 : return false;
2820 1884 : return !range_in_array_bounds_p (expr);
2821 :
2822 260685434 : case ARRAY_REF:
2823 260685434 : base = TREE_OPERAND (expr, 0);
2824 260685434 : if (tree_could_trap_p (base))
2825 : return true;
2826 249942349 : if (TREE_THIS_NOTRAP (expr))
2827 : return false;
2828 249912504 : return !in_array_bounds_p (expr);
2829 :
2830 631666805 : case TARGET_MEM_REF:
2831 631666805 : case MEM_REF:
2832 631666805 : if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
2833 631666805 : && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
2834 : return true;
2835 631666805 : if (TREE_THIS_NOTRAP (expr))
2836 : return false;
2837 : /* We cannot prove that the access is in-bounds when we have
2838 : variable-index TARGET_MEM_REFs. */
2839 541668716 : if (code == TARGET_MEM_REF
2840 550071112 : && (TMR_INDEX (expr) || TMR_INDEX2 (expr)))
2841 : return true;
2842 534104702 : if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2843 : {
2844 241350449 : tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2845 241350449 : poly_offset_int off = mem_ref_offset (expr);
2846 241350449 : if (maybe_lt (off, 0))
2847 : return true;
2848 241347400 : if (TREE_CODE (base) == STRING_CST)
2849 195938 : return maybe_le (TREE_STRING_LENGTH (base), off);
2850 241151462 : tree size = DECL_SIZE_UNIT (base);
2851 241151462 : tree refsz = TYPE_SIZE_UNIT (TREE_TYPE (expr));
2852 241151462 : return ref_outside_object_p (size, off, refsz);
2853 : }
2854 292754253 : if (cfun
2855 292596985 : && TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
2856 298275488 : && ((TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2857 4652409 : && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (expr, 0))
2858 2740850 : && (SSA_NAME_VAR (TREE_OPERAND (expr, 0))
2859 2740850 : == DECL_ARGUMENTS (cfun->decl)))
2860 3305123 : || TREE_OPERAND (expr, 0) == DECL_ARGUMENTS (cfun->decl)))
2861 : {
2862 2226371 : poly_offset_int off = mem_ref_offset (expr);
2863 2226371 : if (maybe_lt (off, 0))
2864 : return true;
2865 2224548 : tree size = TYPE_SIZE_UNIT
2866 : (TYPE_METHOD_BASETYPE (TREE_TYPE (cfun->decl)));
2867 2224548 : tree refsz = TYPE_SIZE_UNIT (TREE_TYPE (expr));
2868 2224548 : return ref_outside_object_p (size, off, refsz);
2869 : }
2870 : return true;
2871 :
2872 1527915 : case INDIRECT_REF:
2873 1527915 : return !TREE_THIS_NOTRAP (expr);
2874 :
2875 0 : case ASM_EXPR:
2876 0 : return TREE_THIS_VOLATILE (expr);
2877 :
2878 515327 : case CALL_EXPR:
2879 : /* Internal function calls do not trap. */
2880 515327 : if (CALL_EXPR_FN (expr) == NULL_TREE)
2881 : return false;
2882 515203 : t = get_callee_fndecl (expr);
2883 : /* Assume that indirect and calls to weak functions may trap. */
2884 515203 : if (!t || !DECL_P (t))
2885 : return true;
2886 515203 : if (DECL_WEAK (t))
2887 : return tree_could_trap_p (t);
2888 : return false;
2889 :
2890 3240 : case FUNCTION_DECL:
2891 : /* Assume that accesses to weak functions may trap, unless we know
2892 : they are certainly defined in current TU or in some other
2893 : LTO partition. */
2894 3240 : if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
2895 : {
2896 13 : cgraph_node *node = cgraph_node::get (expr);
2897 13 : if (node)
2898 13 : node = node->function_symbol ();
2899 26 : return !(node && node->in_other_partition);
2900 : }
2901 : return false;
2902 :
2903 1044186499 : case VAR_DECL:
2904 : /* Assume that accesses to weak vars may trap, unless we know
2905 : they are certainly defined in current TU or in some other
2906 : LTO partition. */
2907 1044186499 : if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
2908 : {
2909 14997 : varpool_node *node = varpool_node::get (expr);
2910 14997 : if (node)
2911 14952 : node = node->ultimate_alias_target ();
2912 29949 : return !(node && node->in_other_partition);
2913 : }
2914 : return false;
2915 :
2916 : default:
2917 : return false;
2918 : }
2919 : }
2920 :
2921 : /* Return non-NULL if there is an integer operation with trapping overflow
2922 : we can rewrite into non-trapping. Called via walk_tree from
2923 : rewrite_to_non_trapping_overflow. */
2924 :
2925 : static tree
2926 270 : find_trapping_overflow (tree *tp, int *walk_subtrees, void *data)
2927 : {
2928 270 : if (EXPR_P (*tp)
2929 159 : && ANY_INTEGRAL_TYPE_P (TREE_TYPE (*tp))
2930 407 : && !operation_no_trapping_overflow (TREE_TYPE (*tp), TREE_CODE (*tp)))
2931 9 : return *tp;
2932 261 : if (IS_TYPE_OR_DECL_P (*tp)
2933 258 : || (TREE_CODE (*tp) == SAVE_EXPR && data == NULL))
2934 3 : *walk_subtrees = 0;
2935 : return NULL_TREE;
2936 : }
2937 :
2938 : /* Rewrite selected operations into unsigned arithmetics, so that they
2939 : don't trap on overflow. */
2940 :
2941 : static tree
2942 74 : replace_trapping_overflow (tree *tp, int *walk_subtrees, void *data)
2943 : {
2944 74 : if (find_trapping_overflow (tp, walk_subtrees, data))
2945 : {
2946 6 : tree type = TREE_TYPE (*tp);
2947 6 : tree utype = unsigned_type_for (type);
2948 6 : *walk_subtrees = 0;
2949 6 : int len = TREE_OPERAND_LENGTH (*tp);
2950 18 : for (int i = 0; i < len; ++i)
2951 12 : walk_tree (&TREE_OPERAND (*tp, i), replace_trapping_overflow,
2952 : data, (hash_set<tree> *) data);
2953 :
2954 6 : if (TREE_CODE (*tp) == ABS_EXPR)
2955 : {
2956 0 : TREE_SET_CODE (*tp, ABSU_EXPR);
2957 0 : TREE_TYPE (*tp) = utype;
2958 0 : *tp = fold_convert (type, *tp);
2959 : }
2960 : else
2961 : {
2962 6 : TREE_TYPE (*tp) = utype;
2963 6 : len = TREE_OPERAND_LENGTH (*tp);
2964 18 : for (int i = 0; i < len; ++i)
2965 12 : TREE_OPERAND (*tp, i)
2966 24 : = fold_convert (utype, TREE_OPERAND (*tp, i));
2967 6 : *tp = fold_convert (type, *tp);
2968 : }
2969 : }
2970 74 : return NULL_TREE;
2971 : }
2972 :
2973 : /* If any subexpression of EXPR can trap due to -ftrapv, rewrite it
2974 : using unsigned arithmetics to avoid traps in it. */
2975 :
2976 : tree
2977 46667 : rewrite_to_non_trapping_overflow (tree expr)
2978 : {
2979 46667 : if (!flag_trapv)
2980 46630 : return expr;
2981 37 : hash_set<tree> pset;
2982 37 : if (!walk_tree (&expr, find_trapping_overflow, &pset, &pset))
2983 34 : return expr;
2984 3 : expr = unshare_expr (expr);
2985 3 : pset.empty ();
2986 3 : walk_tree (&expr, replace_trapping_overflow, &pset, &pset);
2987 3 : return expr;
2988 37 : }
2989 :
2990 : /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2991 : an assignment or a conditional) may throw. */
2992 :
2993 : static bool
2994 1632121396 : stmt_could_throw_1_p (gassign *stmt)
2995 : {
2996 1632121396 : enum tree_code code = gimple_assign_rhs_code (stmt);
2997 1632121396 : bool honor_nans = false;
2998 1632121396 : bool honor_snans = false;
2999 1632121396 : bool fp_operation = false;
3000 1632121396 : bool honor_trapv = false;
3001 1632121396 : tree t;
3002 1632121396 : size_t i;
3003 1632121396 : bool handled, ret;
3004 :
3005 1632121396 : if (TREE_CODE_CLASS (code) == tcc_comparison
3006 : || TREE_CODE_CLASS (code) == tcc_unary
3007 1632121396 : || TREE_CODE_CLASS (code) == tcc_binary)
3008 : {
3009 233948282 : if (TREE_CODE_CLASS (code) == tcc_comparison)
3010 30104426 : t = TREE_TYPE (gimple_assign_rhs1 (stmt));
3011 : else
3012 203843856 : t = TREE_TYPE (gimple_assign_lhs (stmt));
3013 233948282 : fp_operation = FLOAT_TYPE_P (t);
3014 233948282 : if (fp_operation)
3015 : {
3016 7347236 : honor_nans = flag_trapping_math && !flag_finite_math_only;
3017 7347236 : honor_snans = flag_signaling_nans != 0;
3018 : }
3019 226601046 : else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
3020 : honor_trapv = true;
3021 : }
3022 :
3023 : /* First check the LHS. */
3024 1632121396 : if (tree_could_trap_p (gimple_assign_lhs (stmt)))
3025 : return true;
3026 :
3027 : /* Check if the main expression may trap. */
3028 1711927222 : ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
3029 : honor_nans, honor_snans,
3030 : gimple_assign_rhs2 (stmt),
3031 : &handled);
3032 1563227604 : if (handled)
3033 : return ret;
3034 :
3035 : /* If the expression does not trap, see if any of the individual operands may
3036 : trap. */
3037 2729775362 : for (i = 1; i < gimple_num_ops (stmt); i++)
3038 1498065562 : if (tree_could_trap_p (gimple_op (stmt, i)))
3039 : return true;
3040 :
3041 : return false;
3042 : }
3043 :
3044 :
3045 : /* Return true if statement STMT within FUN could throw an exception. */
3046 :
3047 : bool
3048 16592082202 : stmt_could_throw_p (function *fun, gimple *stmt)
3049 : {
3050 16592082202 : if (!flag_exceptions)
3051 : return false;
3052 :
3053 : /* The only statements that can throw an exception are assignments,
3054 : conditionals, calls, resx, and asms. */
3055 12346532045 : switch (gimple_code (stmt))
3056 : {
3057 : case GIMPLE_RESX:
3058 : return true;
3059 :
3060 1023516635 : case GIMPLE_CALL:
3061 1023516635 : return !gimple_call_nothrow_p (as_a <gcall *> (stmt));
3062 :
3063 193722784 : case GIMPLE_COND:
3064 193722784 : {
3065 193722784 : if (fun && !fun->can_throw_non_call_exceptions)
3066 : return false;
3067 67312230 : gcond *cond = as_a <gcond *> (stmt);
3068 67312230 : tree lhs = gimple_cond_lhs (cond);
3069 67312230 : return operation_could_trap_p (gimple_cond_code (cond),
3070 67312230 : FLOAT_TYPE_P (TREE_TYPE (lhs)),
3071 67312230 : false, NULL_TREE);
3072 : }
3073 :
3074 3817116016 : case GIMPLE_ASSIGN:
3075 3817116016 : if ((fun && !fun->can_throw_non_call_exceptions)
3076 5497207082 : || gimple_clobber_p (stmt))
3077 : return false;
3078 1632121396 : return stmt_could_throw_1_p (as_a <gassign *> (stmt));
3079 :
3080 769692 : case GIMPLE_ASM:
3081 769692 : if (fun && !fun->can_throw_non_call_exceptions)
3082 : return false;
3083 59371 : return gimple_asm_volatile_p (as_a <gasm *> (stmt));
3084 :
3085 : default:
3086 : return false;
3087 : }
3088 : }
3089 :
3090 : /* Return true if STMT in function FUN must be assumed necessary because of
3091 : non-call exceptions. */
3092 :
3093 : bool
3094 25417438 : stmt_unremovable_because_of_non_call_eh_p (function *fun, gimple *stmt)
3095 : {
3096 25417438 : return (fun->can_throw_non_call_exceptions
3097 5782290 : && !fun->can_delete_dead_exceptions
3098 31178220 : && stmt_could_throw_p (fun, stmt));
3099 : }
3100 :
3101 : /* Return true if expression T could throw an exception. */
3102 :
3103 : bool
3104 41760013 : tree_could_throw_p (tree t)
3105 : {
3106 41760013 : if (!flag_exceptions)
3107 : return false;
3108 35236882 : if (TREE_CODE (t) == MODIFY_EXPR)
3109 : {
3110 0 : if (cfun->can_throw_non_call_exceptions
3111 0 : && tree_could_trap_p (TREE_OPERAND (t, 0)))
3112 : return true;
3113 0 : t = TREE_OPERAND (t, 1);
3114 : }
3115 :
3116 35236882 : if (TREE_CODE (t) == WITH_SIZE_EXPR)
3117 0 : t = TREE_OPERAND (t, 0);
3118 35236882 : if (TREE_CODE (t) == CALL_EXPR)
3119 406648 : return (call_expr_flags (t) & ECF_NOTHROW) == 0;
3120 34830234 : if (cfun->can_throw_non_call_exceptions)
3121 22274700 : return tree_could_trap_p (t);
3122 : return false;
3123 : }
3124 :
3125 : /* Return true if STMT can throw an exception that is not caught within its
3126 : function FUN. FUN can be NULL but the function is extra conservative
3127 : then. */
3128 :
3129 : bool
3130 865583787 : stmt_can_throw_external (function *fun, gimple *stmt)
3131 : {
3132 865583787 : int lp_nr;
3133 :
3134 865583787 : if (!stmt_could_throw_p (fun, stmt))
3135 : return false;
3136 45334613 : if (!fun)
3137 : return true;
3138 :
3139 45328318 : lp_nr = lookup_stmt_eh_lp_fn (fun, stmt);
3140 45328318 : return lp_nr == 0;
3141 : }
3142 :
3143 : /* Return true if STMT can throw an exception that is caught within its
3144 : function FUN. */
3145 :
3146 : bool
3147 14295619837 : stmt_can_throw_internal (function *fun, gimple *stmt)
3148 : {
3149 14295619837 : int lp_nr;
3150 :
3151 14295619837 : gcc_checking_assert (fun);
3152 14295619837 : if (!stmt_could_throw_p (fun, stmt))
3153 : return false;
3154 :
3155 606894417 : lp_nr = lookup_stmt_eh_lp_fn (fun, stmt);
3156 606894417 : return lp_nr > 0;
3157 : }
3158 :
3159 : /* Given a statement STMT in IFUN, if STMT can no longer throw, then
3160 : remove any entry it might have from the EH table. Return true if
3161 : any change was made. */
3162 :
3163 : bool
3164 186869334 : maybe_clean_eh_stmt_fn (struct function *ifun, gimple *stmt)
3165 : {
3166 186869334 : if (stmt_could_throw_p (ifun, stmt))
3167 : return false;
3168 170298419 : return remove_stmt_from_eh_lp_fn (ifun, stmt);
3169 : }
3170 :
3171 : /* Likewise, but always use the current function. */
3172 :
3173 : bool
3174 186869334 : maybe_clean_eh_stmt (gimple *stmt)
3175 : {
3176 186869334 : return maybe_clean_eh_stmt_fn (cfun, stmt);
3177 : }
3178 :
3179 : /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
3180 : OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
3181 : in the table if it should be in there. Return TRUE if a replacement was
3182 : done that my require an EH edge purge. */
3183 :
3184 : bool
3185 46531070 : maybe_clean_or_replace_eh_stmt (gimple *old_stmt, gimple *new_stmt)
3186 : {
3187 46531070 : int lp_nr = lookup_stmt_eh_lp (old_stmt);
3188 :
3189 46531070 : if (lp_nr != 0)
3190 : {
3191 1484137 : bool new_stmt_could_throw = stmt_could_throw_p (cfun, new_stmt);
3192 :
3193 1484137 : if (new_stmt == old_stmt && new_stmt_could_throw)
3194 : return false;
3195 :
3196 319556 : remove_stmt_from_eh_lp (old_stmt);
3197 319556 : if (new_stmt_could_throw)
3198 : {
3199 314 : add_stmt_to_eh_lp (new_stmt, lp_nr);
3200 314 : return false;
3201 : }
3202 : else
3203 : return true;
3204 : }
3205 :
3206 : return false;
3207 : }
3208 :
3209 : /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
3210 : in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
3211 : operand is the return value of duplicate_eh_regions. */
3212 :
3213 : bool
3214 85845196 : maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple *new_stmt,
3215 : struct function *old_fun, gimple *old_stmt,
3216 : hash_map<void *, void *> *map,
3217 : int default_lp_nr)
3218 : {
3219 85845196 : int old_lp_nr, new_lp_nr;
3220 :
3221 85845196 : if (!stmt_could_throw_p (new_fun, new_stmt))
3222 : return false;
3223 :
3224 1708722 : old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
3225 1708722 : if (old_lp_nr == 0)
3226 : {
3227 1536152 : if (default_lp_nr == 0)
3228 : return false;
3229 : new_lp_nr = default_lp_nr;
3230 : }
3231 172570 : else if (old_lp_nr > 0)
3232 : {
3233 121067 : eh_landing_pad old_lp, new_lp;
3234 :
3235 121067 : old_lp = (*old_fun->eh->lp_array)[old_lp_nr];
3236 121067 : new_lp = static_cast<eh_landing_pad> (*map->get (old_lp));
3237 121067 : new_lp_nr = new_lp->index;
3238 : }
3239 : else
3240 : {
3241 51503 : eh_region old_r, new_r;
3242 :
3243 51503 : old_r = (*old_fun->eh->region_array)[-old_lp_nr];
3244 51503 : new_r = static_cast<eh_region> (*map->get (old_r));
3245 51503 : new_lp_nr = -new_r->index;
3246 : }
3247 :
3248 752989 : add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
3249 752989 : return true;
3250 : }
3251 :
3252 : /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
3253 : and thus no remapping is required. */
3254 :
3255 : bool
3256 22237820 : maybe_duplicate_eh_stmt (gimple *new_stmt, gimple *old_stmt)
3257 : {
3258 22237820 : int lp_nr;
3259 :
3260 22237820 : if (!stmt_could_throw_p (cfun, new_stmt))
3261 : return false;
3262 :
3263 72333 : lp_nr = lookup_stmt_eh_lp (old_stmt);
3264 72333 : if (lp_nr == 0)
3265 : return false;
3266 :
3267 7098 : add_stmt_to_eh_lp (new_stmt, lp_nr);
3268 7098 : return true;
3269 : }
3270 :
3271 : /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
3272 : GIMPLE_TRY) that are similar enough to be considered the same. Currently
3273 : this only handles handlers consisting of a single call, as that's the
3274 : important case for C++: a destructor call for a particular object showing
3275 : up in multiple handlers. */
3276 :
3277 : static bool
3278 1 : same_handler_p (gimple_seq oneh, gimple_seq twoh)
3279 : {
3280 1 : gimple_stmt_iterator gsi;
3281 1 : gimple *ones, *twos;
3282 1 : unsigned int ai;
3283 :
3284 1 : gsi = gsi_start (oneh);
3285 1 : if (!gsi_one_before_end_p (gsi))
3286 : return false;
3287 1 : ones = gsi_stmt (gsi);
3288 :
3289 1 : gsi = gsi_start (twoh);
3290 1 : if (!gsi_one_before_end_p (gsi))
3291 : return false;
3292 1 : twos = gsi_stmt (gsi);
3293 :
3294 1 : if (!is_gimple_call (ones)
3295 1 : || !is_gimple_call (twos)
3296 1 : || gimple_call_lhs (ones)
3297 1 : || gimple_call_lhs (twos)
3298 1 : || gimple_call_chain (ones)
3299 1 : || gimple_call_chain (twos)
3300 1 : || !gimple_call_same_target_p (ones, twos)
3301 2 : || gimple_call_num_args (ones) != gimple_call_num_args (twos))
3302 0 : return false;
3303 :
3304 2 : for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
3305 1 : if (!operand_equal_p (gimple_call_arg (ones, ai),
3306 1 : gimple_call_arg (twos, ai), 0))
3307 : return false;
3308 :
3309 : return true;
3310 : }
3311 :
3312 : /* Optimize
3313 : try { A() } finally { try { ~B() } catch { ~A() } }
3314 : try { ... } finally { ~A() }
3315 : into
3316 : try { A() } catch { ~B() }
3317 : try { ~B() ... } finally { ~A() }
3318 :
3319 : This occurs frequently in C++, where A is a local variable and B is a
3320 : temporary used in the initializer for A. */
3321 :
3322 : static void
3323 51732 : optimize_double_finally (gtry *one, gtry *two)
3324 : {
3325 51732 : gimple *oneh;
3326 51732 : gimple_stmt_iterator gsi;
3327 51732 : gimple_seq cleanup;
3328 :
3329 51732 : cleanup = gimple_try_cleanup (one);
3330 51732 : gsi = gsi_start (cleanup);
3331 51732 : if (!gsi_one_before_end_p (gsi))
3332 51732 : return;
3333 :
3334 50414 : oneh = gsi_stmt (gsi);
3335 50414 : if (gimple_code (oneh) != GIMPLE_TRY
3336 50414 : || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
3337 : return;
3338 :
3339 1 : if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
3340 : {
3341 1 : gimple_seq seq = gimple_try_eval (oneh);
3342 :
3343 1 : gimple_try_set_cleanup (one, seq);
3344 1 : gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
3345 1 : seq = copy_gimple_seq_and_replace_locals (seq);
3346 1 : gimple_seq_add_seq (&seq, gimple_try_eval (two));
3347 1 : gimple_try_set_eval (two, seq);
3348 : }
3349 : }
3350 :
3351 : /* Perform EH refactoring optimizations that are simpler to do when code
3352 : flow has been lowered but EH structures haven't. */
3353 :
3354 : static void
3355 6213828 : refactor_eh_r (gimple_seq seq)
3356 : {
3357 6213828 : gimple_stmt_iterator gsi;
3358 6213828 : gimple *one, *two;
3359 :
3360 6213828 : one = NULL;
3361 6213828 : two = NULL;
3362 6213828 : gsi = gsi_start (seq);
3363 50025934 : while (1)
3364 : {
3365 56239762 : one = two;
3366 56239762 : if (gsi_end_p (gsi))
3367 : two = NULL;
3368 : else
3369 : two = gsi_stmt (gsi);
3370 56239762 : if (one && two)
3371 43927761 : if (gtry *try_one = dyn_cast <gtry *> (one))
3372 1369509 : if (gtry *try_two = dyn_cast <gtry *> (two))
3373 65837 : if (gimple_try_kind (try_one) == GIMPLE_TRY_FINALLY
3374 65837 : && gimple_try_kind (try_two) == GIMPLE_TRY_FINALLY)
3375 51732 : optimize_double_finally (try_one, try_two);
3376 56239762 : if (one)
3377 50025934 : switch (gimple_code (one))
3378 : {
3379 2018677 : case GIMPLE_TRY:
3380 2018677 : refactor_eh_r (gimple_try_eval (one));
3381 2018677 : refactor_eh_r (gimple_try_cleanup (one));
3382 2018677 : break;
3383 39592 : case GIMPLE_CATCH:
3384 39592 : refactor_eh_r (gimple_catch_handler (as_a <gcatch *> (one)));
3385 39592 : break;
3386 5332 : case GIMPLE_EH_FILTER:
3387 5332 : refactor_eh_r (gimple_eh_filter_failure (one));
3388 5332 : break;
3389 701 : case GIMPLE_EH_ELSE:
3390 701 : {
3391 701 : geh_else *eh_else_stmt = as_a <geh_else *> (one);
3392 701 : refactor_eh_r (gimple_eh_else_n_body (eh_else_stmt));
3393 701 : refactor_eh_r (gimple_eh_else_e_body (eh_else_stmt));
3394 : }
3395 701 : break;
3396 : default:
3397 : break;
3398 : }
3399 56239762 : if (two)
3400 50025934 : gsi_next (&gsi);
3401 : else
3402 : break;
3403 : }
3404 6213828 : }
3405 :
3406 : namespace {
3407 :
3408 : const pass_data pass_data_refactor_eh =
3409 : {
3410 : GIMPLE_PASS, /* type */
3411 : "ehopt", /* name */
3412 : OPTGROUP_NONE, /* optinfo_flags */
3413 : TV_TREE_EH, /* tv_id */
3414 : PROP_gimple_lcf, /* properties_required */
3415 : 0, /* properties_provided */
3416 : 0, /* properties_destroyed */
3417 : 0, /* todo_flags_start */
3418 : 0, /* todo_flags_finish */
3419 : };
3420 :
3421 : class pass_refactor_eh : public gimple_opt_pass
3422 : {
3423 : public:
3424 285722 : pass_refactor_eh (gcc::context *ctxt)
3425 571444 : : gimple_opt_pass (pass_data_refactor_eh, ctxt)
3426 : {}
3427 :
3428 : /* opt_pass methods: */
3429 2869205 : bool gate (function *) final override { return flag_exceptions != 0; }
3430 2130148 : unsigned int execute (function *) final override
3431 : {
3432 2130148 : refactor_eh_r (gimple_body (current_function_decl));
3433 2130148 : return 0;
3434 : }
3435 :
3436 : }; // class pass_refactor_eh
3437 :
3438 : } // anon namespace
3439 :
3440 : gimple_opt_pass *
3441 285722 : make_pass_refactor_eh (gcc::context *ctxt)
3442 : {
3443 285722 : return new pass_refactor_eh (ctxt);
3444 : }
3445 :
3446 : /* At the end of gimple optimization, we can lower RESX. */
3447 :
3448 : static bool
3449 169363 : lower_resx (basic_block bb, gresx *stmt,
3450 : hash_map<eh_region, tree> *mnt_map)
3451 : {
3452 169363 : int lp_nr;
3453 169363 : eh_region src_r, dst_r;
3454 169363 : gimple_stmt_iterator gsi;
3455 169363 : gcall *x;
3456 169363 : tree fn, src_nr;
3457 169363 : bool ret = false;
3458 :
3459 169363 : lp_nr = lookup_stmt_eh_lp (stmt);
3460 169363 : if (lp_nr != 0)
3461 92941 : dst_r = get_eh_region_from_lp_number (lp_nr);
3462 : else
3463 76422 : dst_r = NULL;
3464 :
3465 169363 : src_r = get_eh_region_from_number (gimple_resx_region (stmt));
3466 169363 : gsi = gsi_last_bb (bb);
3467 :
3468 169363 : if (src_r == NULL)
3469 : {
3470 : /* We can wind up with no source region when pass_cleanup_eh shows
3471 : that there are no entries into an eh region and deletes it, but
3472 : then the block that contains the resx isn't removed. This can
3473 : happen without optimization when the switch statement created by
3474 : lower_try_finally_switch isn't simplified to remove the eh case.
3475 :
3476 : Resolve this by expanding the resx node to an abort. */
3477 :
3478 0 : fn = builtin_decl_implicit (BUILT_IN_TRAP);
3479 0 : x = gimple_build_call (fn, 0);
3480 0 : gimple_call_set_ctrl_altering (x, true);
3481 0 : gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3482 :
3483 0 : while (EDGE_COUNT (bb->succs) > 0)
3484 0 : remove_edge (EDGE_SUCC (bb, 0));
3485 : }
3486 169363 : else if (dst_r)
3487 : {
3488 : /* When we have a destination region, we resolve this by copying
3489 : the excptr and filter values into place, and changing the edge
3490 : to immediately after the landing pad. */
3491 92941 : edge e;
3492 :
3493 92941 : if (lp_nr < 0)
3494 : {
3495 474 : basic_block new_bb;
3496 474 : tree lab;
3497 :
3498 : /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3499 : the failure decl into a new block, if needed. */
3500 474 : gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
3501 :
3502 474 : tree *slot = mnt_map->get (dst_r);
3503 474 : if (slot == NULL)
3504 : {
3505 354 : gimple_stmt_iterator gsi2;
3506 :
3507 354 : new_bb = create_empty_bb (bb);
3508 354 : new_bb->count = bb->count;
3509 354 : add_bb_to_loop (new_bb, bb->loop_father);
3510 354 : lab = gimple_block_label (new_bb);
3511 354 : gsi2 = gsi_start_bb (new_bb);
3512 :
3513 : /* Handle failure fns that expect either no arguments or the
3514 : exception pointer. */
3515 354 : fn = dst_r->u.must_not_throw.failure_decl;
3516 354 : if (TYPE_ARG_TYPES (TREE_TYPE (fn)) != void_list_node)
3517 : {
3518 354 : tree epfn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3519 354 : src_nr = build_int_cst (integer_type_node, src_r->index);
3520 354 : x = gimple_build_call (epfn, 1, src_nr);
3521 354 : tree var = create_tmp_var (ptr_type_node);
3522 354 : var = make_ssa_name (var, x);
3523 354 : gimple_call_set_lhs (x, var);
3524 354 : gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3525 354 : x = gimple_build_call (fn, 1, var);
3526 : }
3527 : else
3528 0 : x = gimple_build_call (fn, 0);
3529 354 : gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3530 354 : gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3531 :
3532 354 : mnt_map->put (dst_r, lab);
3533 : }
3534 : else
3535 : {
3536 120 : lab = *slot;
3537 120 : new_bb = label_to_block (cfun, lab);
3538 : }
3539 :
3540 474 : gcc_assert (EDGE_COUNT (bb->succs) == 0);
3541 474 : e = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
3542 : }
3543 : else
3544 : {
3545 92467 : edge_iterator ei;
3546 92467 : tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
3547 :
3548 92467 : fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
3549 92467 : src_nr = build_int_cst (integer_type_node, src_r->index);
3550 92467 : x = gimple_build_call (fn, 2, dst_nr, src_nr);
3551 92467 : gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3552 :
3553 : /* Update the flags for the outgoing edge. */
3554 92467 : e = single_succ_edge (bb);
3555 92467 : gcc_assert (e->flags & EDGE_EH);
3556 92467 : e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3557 92467 : e->probability = profile_probability::always ();
3558 :
3559 : /* If there are no more EH users of the landing pad, delete it. */
3560 139254 : FOR_EACH_EDGE (e, ei, e->dest->preds)
3561 119796 : if (e->flags & EDGE_EH)
3562 : break;
3563 92467 : if (e == NULL)
3564 : {
3565 19458 : eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3566 19458 : remove_eh_landing_pad (lp);
3567 : }
3568 : }
3569 :
3570 92941 : ret = true;
3571 : }
3572 : else
3573 : {
3574 76422 : tree var;
3575 :
3576 : /* When we don't have a destination region, this exception escapes
3577 : up the call chain. We resolve this by generating a call to the
3578 : _Unwind_Resume library function. */
3579 :
3580 : /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3581 : with no arguments for C++. Check for that. */
3582 76422 : if (src_r->use_cxa_end_cleanup)
3583 : {
3584 0 : fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
3585 0 : x = gimple_build_call (fn, 0);
3586 0 : gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3587 : }
3588 : else
3589 : {
3590 76422 : fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3591 76422 : src_nr = build_int_cst (integer_type_node, src_r->index);
3592 76422 : x = gimple_build_call (fn, 1, src_nr);
3593 76422 : var = create_tmp_var (ptr_type_node);
3594 76422 : var = make_ssa_name (var, x);
3595 76422 : gimple_call_set_lhs (x, var);
3596 76422 : gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3597 :
3598 : /* When exception handling is delegated to a caller function, we
3599 : have to guarantee that shadow memory variables living on stack
3600 : will be cleaner before control is given to a parent function. */
3601 76422 : if (sanitize_flags_p (SANITIZE_ADDRESS))
3602 : {
3603 271 : tree decl
3604 271 : = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
3605 271 : gimple *g = gimple_build_call (decl, 0);
3606 271 : gimple_set_location (g, gimple_location (stmt));
3607 271 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
3608 : }
3609 :
3610 76422 : fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
3611 76422 : x = gimple_build_call (fn, 1, var);
3612 76422 : gimple_call_set_ctrl_altering (x, true);
3613 76422 : gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3614 : }
3615 :
3616 76422 : gcc_assert (EDGE_COUNT (bb->succs) == 0);
3617 : }
3618 :
3619 169363 : gsi_remove (&gsi, true);
3620 :
3621 169363 : return ret;
3622 : }
3623 :
3624 : namespace {
3625 :
3626 : const pass_data pass_data_lower_resx =
3627 : {
3628 : GIMPLE_PASS, /* type */
3629 : "resx", /* name */
3630 : OPTGROUP_NONE, /* optinfo_flags */
3631 : TV_TREE_EH, /* tv_id */
3632 : PROP_gimple_lcf, /* properties_required */
3633 : 0, /* properties_provided */
3634 : 0, /* properties_destroyed */
3635 : 0, /* todo_flags_start */
3636 : 0, /* todo_flags_finish */
3637 : };
3638 :
3639 : class pass_lower_resx : public gimple_opt_pass
3640 : {
3641 : public:
3642 285722 : pass_lower_resx (gcc::context *ctxt)
3643 571444 : : gimple_opt_pass (pass_data_lower_resx, ctxt)
3644 : {}
3645 :
3646 : /* opt_pass methods: */
3647 1472150 : bool gate (function *) final override { return flag_exceptions != 0; }
3648 : unsigned int execute (function *) final override;
3649 :
3650 : }; // class pass_lower_resx
3651 :
3652 : unsigned
3653 798211 : pass_lower_resx::execute (function *fun)
3654 : {
3655 798211 : basic_block bb;
3656 798211 : bool dominance_invalidated = false;
3657 798211 : bool any_rewritten = false;
3658 :
3659 798211 : hash_map<eh_region, tree> mnt_map;
3660 :
3661 9695271 : FOR_EACH_BB_FN (bb, fun)
3662 : {
3663 25212183 : if (gresx *last = safe_dyn_cast <gresx *> (*gsi_last_bb (bb)))
3664 : {
3665 169363 : dominance_invalidated |= lower_resx (bb, last, &mnt_map);
3666 169363 : any_rewritten = true;
3667 : }
3668 : }
3669 :
3670 798211 : if (dominance_invalidated)
3671 : {
3672 20955 : free_dominance_info (CDI_DOMINATORS);
3673 20955 : free_dominance_info (CDI_POST_DOMINATORS);
3674 : }
3675 :
3676 798211 : return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3677 798211 : }
3678 :
3679 : } // anon namespace
3680 :
3681 : gimple_opt_pass *
3682 285722 : make_pass_lower_resx (gcc::context *ctxt)
3683 : {
3684 285722 : return new pass_lower_resx (ctxt);
3685 : }
3686 :
3687 : /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3688 : external throw. */
3689 :
3690 : static void
3691 604925 : optimize_clobbers (basic_block bb)
3692 : {
3693 604925 : gimple_stmt_iterator gsi = gsi_last_bb (bb);
3694 604925 : bool any_clobbers = false;
3695 604925 : bool seen_stack_restore = false;
3696 604925 : edge_iterator ei;
3697 604925 : edge e;
3698 :
3699 : /* Only optimize anything if the bb contains at least one clobber,
3700 : ends with resx (checked by caller), optionally contains some
3701 : debug stmts or labels, or at most one __builtin_stack_restore
3702 : call, and has an incoming EH edge. */
3703 4758482 : for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3704 : {
3705 2362724 : gimple *stmt = gsi_stmt (gsi);
3706 2362724 : if (is_gimple_debug (stmt))
3707 1299309 : continue;
3708 1063415 : if (gimple_clobber_p (stmt))
3709 : {
3710 474591 : any_clobbers = true;
3711 474591 : continue;
3712 : }
3713 589240 : if (!seen_stack_restore
3714 588824 : && gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
3715 : {
3716 416 : seen_stack_restore = true;
3717 416 : continue;
3718 : }
3719 588408 : if (gimple_code (stmt) == GIMPLE_LABEL)
3720 : break;
3721 417728 : return;
3722 : }
3723 340657 : if (!any_clobbers)
3724 : return;
3725 208651 : FOR_EACH_EDGE (e, ei, bb->preds)
3726 201247 : if (e->flags & EDGE_EH)
3727 : break;
3728 194601 : if (e == NULL)
3729 : return;
3730 187197 : gsi = gsi_last_bb (bb);
3731 1755510 : for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3732 : {
3733 690558 : gimple *stmt = gsi_stmt (gsi);
3734 690558 : if (!gimple_clobber_p (stmt))
3735 351816 : continue;
3736 338742 : unlink_stmt_vdef (stmt);
3737 338742 : gsi_remove (&gsi, true);
3738 338742 : release_defs (stmt);
3739 : }
3740 : }
3741 :
3742 : /* Try to sink var = {v} {CLOBBER} stmts followed just by
3743 : internal throw to successor BB.
3744 : SUNK, if not NULL, is an array of sequences indexed by basic-block
3745 : index to sink to and to pick up sinking opportunities from.
3746 : If FOUND_OPPORTUNITY is not NULL then do not perform the optimization
3747 : but set *FOUND_OPPORTUNITY to true. */
3748 :
3749 : static int
3750 638840 : sink_clobbers (basic_block bb,
3751 : gimple_seq *sunk = NULL, bool *found_opportunity = NULL)
3752 : {
3753 638840 : edge e;
3754 638840 : edge_iterator ei;
3755 638840 : gimple_stmt_iterator gsi, dgsi;
3756 638840 : basic_block succbb;
3757 638840 : bool any_clobbers = false;
3758 638840 : unsigned todo = 0;
3759 :
3760 : /* Only optimize if BB has a single EH successor and
3761 : all predecessor edges are EH too. */
3762 1115610 : if (!single_succ_p (bb)
3763 638367 : || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3764 : return 0;
3765 :
3766 2694649 : FOR_EACH_EDGE (e, ei, bb->preds)
3767 : {
3768 2074554 : if ((e->flags & EDGE_EH) == 0)
3769 : return 0;
3770 : }
3771 :
3772 : /* And BB contains only CLOBBER stmts before the final
3773 : RESX. */
3774 620095 : gsi = gsi_last_bb (bb);
3775 5048154 : for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3776 : {
3777 2524077 : gimple *stmt = gsi_stmt (gsi);
3778 2524077 : if (is_gimple_debug (stmt))
3779 1522777 : continue;
3780 1001300 : if (gimple_code (stmt) == GIMPLE_LABEL)
3781 : break;
3782 754844 : if (!gimple_clobber_p (stmt))
3783 : return 0;
3784 : any_clobbers = true;
3785 : }
3786 246456 : if (!any_clobbers && (!sunk || gimple_seq_empty_p (sunk[bb->index])))
3787 : return 0;
3788 :
3789 : /* If this was a dry run, tell it we found clobbers to sink. */
3790 164722 : if (found_opportunity)
3791 : {
3792 3125 : *found_opportunity = true;
3793 3125 : return 0;
3794 : }
3795 :
3796 161597 : edge succe = single_succ_edge (bb);
3797 161597 : succbb = succe->dest;
3798 :
3799 : /* See if there is a virtual PHI node to take an updated virtual
3800 : operand from. */
3801 161597 : gphi *vphi = NULL;
3802 161597 : for (gphi_iterator gpi = gsi_start_phis (succbb);
3803 161966 : !gsi_end_p (gpi); gsi_next (&gpi))
3804 : {
3805 129457 : tree res = gimple_phi_result (gpi.phi ());
3806 258914 : if (virtual_operand_p (res))
3807 : {
3808 : vphi = gpi.phi ();
3809 : break;
3810 : }
3811 : }
3812 :
3813 161597 : gimple *first_sunk = NULL;
3814 161597 : gimple *last_sunk = NULL;
3815 161597 : if (sunk && !(succbb->flags & BB_VISITED))
3816 8925 : dgsi = gsi_start (sunk[succbb->index]);
3817 : else
3818 154319 : dgsi = gsi_after_labels (succbb);
3819 161597 : gsi = gsi_last_bb (bb);
3820 826362 : for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3821 : {
3822 413181 : gimple *stmt = gsi_stmt (gsi);
3823 413181 : tree lhs;
3824 413181 : if (is_gimple_debug (stmt))
3825 61475 : continue;
3826 351706 : if (gimple_code (stmt) == GIMPLE_LABEL)
3827 : break;
3828 190109 : lhs = gimple_assign_lhs (stmt);
3829 : /* Unfortunately we don't have dominance info updated at this
3830 : point, so checking if
3831 : dominated_by_p (CDI_DOMINATORS, succbb,
3832 : gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0)))
3833 : would be too costly. Thus, avoid sinking any clobbers that
3834 : refer to non-(D) SSA_NAMEs. */
3835 190116 : if (TREE_CODE (lhs) == MEM_REF
3836 79 : && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
3837 190159 : && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0)))
3838 : {
3839 7 : unlink_stmt_vdef (stmt);
3840 7 : gsi_remove (&gsi, true);
3841 7 : release_defs (stmt);
3842 7 : continue;
3843 : }
3844 :
3845 : /* As we do not change stmt order when sinking across a
3846 : forwarder edge we can keep virtual operands in place. */
3847 190102 : gsi_remove (&gsi, false);
3848 190102 : gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT);
3849 190102 : if (!first_sunk)
3850 161584 : first_sunk = stmt;
3851 : last_sunk = stmt;
3852 : }
3853 161597 : if (sunk && !gimple_seq_empty_p (sunk[bb->index]))
3854 : {
3855 1705 : if (!first_sunk)
3856 6 : first_sunk = gsi_stmt (gsi_last (sunk[bb->index]));
3857 1705 : last_sunk = gsi_stmt (gsi_start (sunk[bb->index]));
3858 1705 : gsi_insert_seq_before_without_update (&dgsi,
3859 : sunk[bb->index], GSI_NEW_STMT);
3860 1705 : sunk[bb->index] = NULL;
3861 : }
3862 161597 : if (first_sunk)
3863 : {
3864 : /* If there isn't a single predecessor but no virtual PHI node
3865 : create one and arrange for virtual operands to be renamed as
3866 : we cannot be sure all incoming edges will updated from sinking
3867 : something. */
3868 194092 : if (!vphi && !single_pred_p (succbb))
3869 : {
3870 10177 : vphi = create_phi_node (gimple_vop (cfun), succbb);
3871 99024 : FOR_EACH_EDGE (e, ei, succbb->preds)
3872 88847 : add_phi_arg (vphi, gimple_vop (cfun), e, UNKNOWN_LOCATION);
3873 10177 : mark_virtual_operands_for_renaming (cfun);
3874 10177 : todo |= TODO_update_ssa_only_virtuals;
3875 : }
3876 : /* Adjust virtual operands if we sunk across a virtual PHI. */
3877 161590 : if (vphi)
3878 : {
3879 139265 : imm_use_iterator iter;
3880 139265 : use_operand_p use_p;
3881 139265 : gimple *use_stmt;
3882 139265 : tree phi_def = gimple_phi_result (vphi);
3883 408389 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, phi_def)
3884 389615 : FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3885 259756 : SET_USE (use_p, gimple_vdef (first_sunk));
3886 139265 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (phi_def))
3887 : {
3888 0 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (first_sunk)) = 1;
3889 0 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (phi_def) = 0;
3890 : }
3891 278530 : SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe),
3892 : gimple_vuse (last_sunk));
3893 139265 : SET_USE (gimple_vuse_op (last_sunk), phi_def);
3894 : }
3895 : }
3896 :
3897 161597 : return todo;
3898 : }
3899 :
3900 : /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3901 : we have found some duplicate labels and removed some edges. */
3902 :
3903 : static bool
3904 41809 : lower_eh_dispatch (basic_block src, geh_dispatch *stmt)
3905 : {
3906 41809 : gimple_stmt_iterator gsi;
3907 41809 : int region_nr;
3908 41809 : eh_region r;
3909 41809 : tree filter, fn;
3910 41809 : gimple *x;
3911 41809 : bool redirected = false;
3912 :
3913 41809 : region_nr = gimple_eh_dispatch_region (stmt);
3914 41809 : r = get_eh_region_from_number (region_nr);
3915 :
3916 41809 : gsi = gsi_last_bb (src);
3917 :
3918 41809 : switch (r->type)
3919 : {
3920 41523 : case ERT_TRY:
3921 41523 : {
3922 41523 : auto_vec<tree> labels;
3923 41523 : tree default_label = NULL;
3924 41523 : eh_catch c;
3925 41523 : edge_iterator ei;
3926 41523 : edge e;
3927 41523 : hash_set<tree> seen_values;
3928 :
3929 : /* Collect the labels for a switch. Zero the post_landing_pad
3930 : field becase we'll no longer have anything keeping these labels
3931 : in existence and the optimizer will be free to merge these
3932 : blocks at will. */
3933 47283 : for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3934 : {
3935 44350 : tree tp_node, flt_node, lab = c->label;
3936 44350 : bool have_label = false;
3937 :
3938 44350 : c->label = NULL;
3939 44350 : tp_node = c->type_list;
3940 44350 : flt_node = c->filter_list;
3941 :
3942 44350 : if (tp_node == NULL)
3943 : {
3944 : default_label = lab;
3945 : break;
3946 : }
3947 5760 : do
3948 : {
3949 : /* Filter out duplicate labels that arise when this handler
3950 : is shadowed by an earlier one. When no labels are
3951 : attached to the handler anymore, we remove
3952 : the corresponding edge and then we delete unreachable
3953 : blocks at the end of this pass. */
3954 5760 : if (! seen_values.contains (TREE_VALUE (flt_node)))
3955 : {
3956 5739 : tree t = build_case_label (TREE_VALUE (flt_node),
3957 5739 : NULL, lab);
3958 5739 : labels.safe_push (t);
3959 5739 : seen_values.add (TREE_VALUE (flt_node));
3960 5739 : have_label = true;
3961 : }
3962 :
3963 5760 : tp_node = TREE_CHAIN (tp_node);
3964 5760 : flt_node = TREE_CHAIN (flt_node);
3965 : }
3966 5760 : while (tp_node);
3967 5760 : if (! have_label)
3968 : {
3969 21 : remove_edge (find_edge (src, label_to_block (cfun, lab)));
3970 21 : redirected = true;
3971 : }
3972 : }
3973 :
3974 : /* Clean up the edge flags. */
3975 88785 : FOR_EACH_EDGE (e, ei, src->succs)
3976 : {
3977 47262 : if (e->flags & EDGE_FALLTHRU)
3978 : {
3979 : /* If there was no catch-all, use the fallthru edge. */
3980 2933 : if (default_label == NULL)
3981 2933 : default_label = gimple_block_label (e->dest);
3982 2933 : e->flags &= ~EDGE_FALLTHRU;
3983 : }
3984 : }
3985 41523 : gcc_assert (default_label != NULL);
3986 :
3987 : /* Don't generate a switch if there's only a default case.
3988 : This is common in the form of try { A; } catch (...) { B; }. */
3989 41523 : if (!labels.exists ())
3990 : {
3991 36784 : e = single_succ_edge (src);
3992 36784 : e->flags |= EDGE_FALLTHRU;
3993 : }
3994 : else
3995 : {
3996 4739 : fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3997 4739 : x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3998 4739 : region_nr));
3999 4739 : filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
4000 4739 : filter = make_ssa_name (filter, x);
4001 4739 : gimple_call_set_lhs (x, filter);
4002 4739 : gimple_set_location (x, gimple_location (stmt));
4003 4739 : gsi_insert_before (&gsi, x, GSI_SAME_STMT);
4004 :
4005 : /* Turn the default label into a default case. */
4006 4739 : default_label = build_case_label (NULL, NULL, default_label);
4007 4739 : sort_case_labels (labels);
4008 :
4009 4739 : x = gimple_build_switch (filter, default_label, labels);
4010 4739 : gimple_set_location (x, gimple_location (stmt));
4011 4739 : gsi_insert_before (&gsi, x, GSI_SAME_STMT);
4012 : }
4013 41523 : }
4014 41523 : break;
4015 :
4016 286 : case ERT_ALLOWED_EXCEPTIONS:
4017 286 : {
4018 286 : edge b_e = BRANCH_EDGE (src);
4019 286 : edge f_e = FALLTHRU_EDGE (src);
4020 :
4021 286 : fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
4022 286 : x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
4023 286 : region_nr));
4024 286 : filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
4025 286 : filter = make_ssa_name (filter, x);
4026 286 : gimple_call_set_lhs (x, filter);
4027 286 : gimple_set_location (x, gimple_location (stmt));
4028 286 : gsi_insert_before (&gsi, x, GSI_SAME_STMT);
4029 :
4030 286 : r->u.allowed.label = NULL;
4031 286 : x = gimple_build_cond (EQ_EXPR, filter,
4032 286 : build_int_cst (TREE_TYPE (filter),
4033 286 : r->u.allowed.filter),
4034 : NULL_TREE, NULL_TREE);
4035 286 : gsi_insert_before (&gsi, x, GSI_SAME_STMT);
4036 :
4037 286 : b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
4038 286 : f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
4039 : }
4040 286 : break;
4041 :
4042 0 : default:
4043 0 : gcc_unreachable ();
4044 : }
4045 :
4046 : /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
4047 41809 : gsi_remove (&gsi, true);
4048 41809 : return redirected;
4049 : }
4050 :
4051 : namespace {
4052 :
4053 : const pass_data pass_data_lower_eh_dispatch =
4054 : {
4055 : GIMPLE_PASS, /* type */
4056 : "ehdisp", /* name */
4057 : OPTGROUP_NONE, /* optinfo_flags */
4058 : TV_TREE_EH, /* tv_id */
4059 : PROP_gimple_lcf, /* properties_required */
4060 : 0, /* properties_provided */
4061 : 0, /* properties_destroyed */
4062 : 0, /* todo_flags_start */
4063 : 0, /* todo_flags_finish */
4064 : };
4065 :
4066 : class pass_lower_eh_dispatch : public gimple_opt_pass
4067 : {
4068 : public:
4069 285722 : pass_lower_eh_dispatch (gcc::context *ctxt)
4070 571444 : : gimple_opt_pass (pass_data_lower_eh_dispatch, ctxt)
4071 : {}
4072 :
4073 : /* opt_pass methods: */
4074 1472320 : bool gate (function *fun) final override
4075 : {
4076 1472320 : return fun->eh->region_tree != NULL;
4077 : }
4078 : unsigned int execute (function *) final override;
4079 :
4080 : }; // class pass_lower_eh_dispatch
4081 :
4082 : unsigned
4083 141076 : pass_lower_eh_dispatch::execute (function *fun)
4084 : {
4085 141076 : basic_block bb;
4086 141076 : int flags = 0;
4087 141076 : bool redirected = false;
4088 141076 : bool any_resx_to_process = false;
4089 :
4090 141076 : assign_filter_values ();
4091 :
4092 5255824 : FOR_EACH_BB_FN (bb, fun)
4093 : {
4094 5114748 : gimple *last = *gsi_last_bb (bb);
4095 5114748 : if (last == NULL)
4096 72973 : continue;
4097 5041775 : if (gimple_code (last) == GIMPLE_EH_DISPATCH)
4098 : {
4099 41809 : redirected |= lower_eh_dispatch (bb,
4100 : as_a <geh_dispatch *> (last));
4101 41809 : flags |= TODO_update_ssa_only_virtuals;
4102 : }
4103 4999966 : else if (gimple_code (last) == GIMPLE_RESX)
4104 : {
4105 204036 : if (stmt_can_throw_external (fun, last))
4106 98017 : optimize_clobbers (bb);
4107 106019 : else if (!any_resx_to_process)
4108 99346 : sink_clobbers (bb, NULL, &any_resx_to_process);
4109 : }
4110 5041775 : bb->flags &= ~BB_VISITED;
4111 : }
4112 141076 : if (redirected)
4113 : {
4114 12 : free_dominance_info (CDI_DOMINATORS);
4115 12 : delete_unreachable_blocks ();
4116 : }
4117 :
4118 141076 : if (any_resx_to_process)
4119 : {
4120 : /* Make sure to catch all secondary sinking opportunities by processing
4121 : blocks in RPO order and after all CFG modifications from lowering
4122 : and unreachable block removal. */
4123 3125 : int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun));
4124 3125 : int rpo_n = pre_and_rev_post_order_compute_fn (fun, NULL, rpo, false);
4125 3125 : gimple_seq *sunk = XCNEWVEC (gimple_seq, last_basic_block_for_fn (fun));
4126 115237 : for (int i = 0; i < rpo_n; ++i)
4127 : {
4128 112112 : bb = BASIC_BLOCK_FOR_FN (fun, rpo[i]);
4129 112112 : gimple *last = *gsi_last_bb (bb);
4130 112112 : if (last
4131 110561 : && gimple_code (last) == GIMPLE_RESX
4132 126366 : && !stmt_can_throw_external (fun, last))
4133 10755 : flags |= sink_clobbers (bb, sunk);
4134 : /* If there were any clobbers sunk into this BB, insert them now. */
4135 112112 : if (!gimple_seq_empty_p (sunk[bb->index]))
4136 : {
4137 3926 : gimple_stmt_iterator gsi = gsi_after_labels (bb);
4138 3926 : gsi_insert_seq_before (&gsi, sunk[bb->index], GSI_NEW_STMT);
4139 3926 : sunk[bb->index] = NULL;
4140 : }
4141 112112 : bb->flags |= BB_VISITED;
4142 : }
4143 3125 : free (rpo);
4144 3125 : free (sunk);
4145 : }
4146 :
4147 141076 : return flags;
4148 : }
4149 :
4150 : } // anon namespace
4151 :
4152 : gimple_opt_pass *
4153 285722 : make_pass_lower_eh_dispatch (gcc::context *ctxt)
4154 : {
4155 285722 : return new pass_lower_eh_dispatch (ctxt);
4156 : }
4157 :
4158 : /* Walk statements, see what regions and, optionally, landing pads
4159 : are really referenced.
4160 :
4161 : Returns in R_REACHABLEP an sbitmap with bits set for reachable regions,
4162 : and in LP_REACHABLE an sbitmap with bits set for reachable landing pads.
4163 :
4164 : Passing NULL for LP_REACHABLE is valid, in this case only reachable
4165 : regions are marked.
4166 :
4167 : The caller is responsible for freeing the returned sbitmaps. */
4168 :
4169 : static void
4170 1365164 : mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep)
4171 : {
4172 1365164 : sbitmap r_reachable, lp_reachable;
4173 1365164 : basic_block bb;
4174 1365164 : bool mark_landing_pads = (lp_reachablep != NULL);
4175 1365164 : gcc_checking_assert (r_reachablep != NULL);
4176 :
4177 1365164 : r_reachable = sbitmap_alloc (cfun->eh->region_array->length ());
4178 1365164 : bitmap_clear (r_reachable);
4179 1365164 : *r_reachablep = r_reachable;
4180 :
4181 1365164 : if (mark_landing_pads)
4182 : {
4183 1096057 : lp_reachable = sbitmap_alloc (cfun->eh->lp_array->length ());
4184 1096057 : bitmap_clear (lp_reachable);
4185 1096057 : *lp_reachablep = lp_reachable;
4186 : }
4187 : else
4188 : lp_reachable = NULL;
4189 :
4190 20323727 : FOR_EACH_BB_FN (bb, cfun)
4191 : {
4192 18958563 : gimple_stmt_iterator gsi;
4193 :
4194 174147909 : for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4195 : {
4196 136230783 : gimple *stmt = gsi_stmt (gsi);
4197 :
4198 136230783 : if (mark_landing_pads)
4199 : {
4200 81960709 : int lp_nr = lookup_stmt_eh_lp (stmt);
4201 :
4202 : /* Negative LP numbers are MUST_NOT_THROW regions which
4203 : are not considered BB enders. */
4204 81960709 : if (lp_nr < 0)
4205 88666 : bitmap_set_bit (r_reachable, -lp_nr);
4206 :
4207 : /* Positive LP numbers are real landing pads, and BB enders. */
4208 81872043 : else if (lp_nr > 0)
4209 : {
4210 3277978 : gcc_assert (gsi_one_before_end_p (gsi));
4211 3277978 : eh_region region = get_eh_region_from_lp_number (lp_nr);
4212 3277978 : bitmap_set_bit (r_reachable, region->index);
4213 3277978 : bitmap_set_bit (lp_reachable, lp_nr);
4214 : }
4215 : }
4216 :
4217 : /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
4218 136230783 : switch (gimple_code (stmt))
4219 : {
4220 1069539 : case GIMPLE_RESX:
4221 2139078 : bitmap_set_bit (r_reachable,
4222 1069539 : gimple_resx_region (as_a <gresx *> (stmt)));
4223 1069539 : break;
4224 64979 : case GIMPLE_EH_DISPATCH:
4225 129958 : bitmap_set_bit (r_reachable,
4226 : gimple_eh_dispatch_region (
4227 64979 : as_a <geh_dispatch *> (stmt)));
4228 64979 : break;
4229 7511121 : case GIMPLE_CALL:
4230 7511121 : if (gimple_call_builtin_p (stmt, BUILT_IN_EH_COPY_VALUES))
4231 3504 : for (int i = 0; i < 2; ++i)
4232 : {
4233 2336 : tree rt = gimple_call_arg (stmt, i);
4234 2336 : HOST_WIDE_INT ri = tree_to_shwi (rt);
4235 :
4236 2336 : gcc_assert (ri == (int)ri);
4237 2336 : bitmap_set_bit (r_reachable, ri);
4238 : }
4239 : break;
4240 : default:
4241 : break;
4242 : }
4243 : }
4244 : }
4245 1365164 : }
4246 :
4247 : /* Remove unreachable handlers and unreachable landing pads. */
4248 :
4249 : static void
4250 1096057 : remove_unreachable_handlers (void)
4251 : {
4252 1096057 : sbitmap r_reachable, lp_reachable;
4253 1096057 : eh_region region;
4254 1096057 : eh_landing_pad lp;
4255 1096057 : unsigned i;
4256 :
4257 1096057 : mark_reachable_handlers (&r_reachable, &lp_reachable);
4258 :
4259 1096057 : if (dump_file)
4260 : {
4261 3 : fprintf (dump_file, "Before removal of unreachable regions:\n");
4262 3 : dump_eh_tree (dump_file, cfun);
4263 3 : fprintf (dump_file, "Reachable regions: ");
4264 3 : dump_bitmap_file (dump_file, r_reachable);
4265 3 : fprintf (dump_file, "Reachable landing pads: ");
4266 3 : dump_bitmap_file (dump_file, lp_reachable);
4267 : }
4268 :
4269 1096057 : if (dump_file)
4270 : {
4271 18 : FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
4272 15 : if (region && !bitmap_bit_p (r_reachable, region->index))
4273 6 : fprintf (dump_file,
4274 : "Removing unreachable region %d\n",
4275 : region->index);
4276 : }
4277 :
4278 1096057 : remove_unreachable_eh_regions (r_reachable);
4279 :
4280 5511443 : FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
4281 3319329 : if (lp && !bitmap_bit_p (lp_reachable, lp->index))
4282 : {
4283 10357 : if (dump_file)
4284 0 : fprintf (dump_file,
4285 : "Removing unreachable landing pad %d\n",
4286 : lp->index);
4287 10357 : remove_eh_landing_pad (lp);
4288 : }
4289 :
4290 1096057 : if (dump_file)
4291 : {
4292 3 : fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
4293 3 : dump_eh_tree (dump_file, cfun);
4294 3 : fprintf (dump_file, "\n\n");
4295 : }
4296 :
4297 1096057 : sbitmap_free (r_reachable);
4298 1096057 : sbitmap_free (lp_reachable);
4299 :
4300 1096057 : if (flag_checking)
4301 1096042 : verify_eh_tree (cfun);
4302 1096057 : }
4303 :
4304 : /* Remove unreachable handlers if any landing pads have been removed after
4305 : last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
4306 :
4307 : void
4308 1713090 : maybe_remove_unreachable_handlers (void)
4309 : {
4310 1713090 : eh_landing_pad lp;
4311 1713090 : unsigned i;
4312 :
4313 1713090 : if (cfun->eh == NULL)
4314 : return;
4315 :
4316 5139280 : FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
4317 3429974 : if (lp
4318 3429974 : && (lp->post_landing_pad == NULL_TREE
4319 349031 : || label_to_block (cfun, lp->post_landing_pad) == NULL))
4320 : {
4321 3784 : remove_unreachable_handlers ();
4322 3784 : return;
4323 : }
4324 : }
4325 :
4326 : /* Remove regions that do not have landing pads. This assumes
4327 : that remove_unreachable_handlers has already been run, and
4328 : that we've just manipulated the landing pads since then.
4329 :
4330 : Preserve regions with landing pads and regions that prevent
4331 : exceptions from propagating further, even if these regions
4332 : are not reachable. */
4333 :
4334 : static void
4335 269107 : remove_unreachable_handlers_no_lp (void)
4336 : {
4337 269107 : eh_region region;
4338 269107 : sbitmap r_reachable;
4339 269107 : unsigned i;
4340 :
4341 269107 : mark_reachable_handlers (&r_reachable, /*lp_reachablep=*/NULL);
4342 :
4343 2875419 : FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
4344 : {
4345 2337205 : if (! region)
4346 1480053 : continue;
4347 :
4348 857152 : if (region->landing_pads != NULL
4349 560472 : || region->type == ERT_MUST_NOT_THROW)
4350 327439 : bitmap_set_bit (r_reachable, region->index);
4351 :
4352 857152 : if (dump_file
4353 857152 : && !bitmap_bit_p (r_reachable, region->index))
4354 6 : fprintf (dump_file,
4355 : "Removing unreachable region %d\n",
4356 : region->index);
4357 : }
4358 :
4359 269107 : remove_unreachable_eh_regions (r_reachable);
4360 :
4361 269107 : sbitmap_free (r_reachable);
4362 269107 : }
4363 :
4364 : /* Undo critical edge splitting on an EH landing pad. Earlier, we
4365 : optimisticaly split all sorts of edges, including EH edges. The
4366 : optimization passes in between may not have needed them; if not,
4367 : we should undo the split.
4368 :
4369 : Recognize this case by having one EH edge incoming to the BB and
4370 : one normal edge outgoing; BB should be empty apart from the
4371 : post_landing_pad label.
4372 :
4373 : Note that this is slightly different from the empty handler case
4374 : handled by cleanup_empty_eh, in that the actual handler may yet
4375 : have actual code but the landing pad has been separated from the
4376 : handler. As such, cleanup_empty_eh relies on this transformation
4377 : having been done first. */
4378 :
4379 : static bool
4380 1651925 : unsplit_eh (eh_landing_pad lp)
4381 : {
4382 1651925 : basic_block bb = label_to_block (cfun, lp->post_landing_pad);
4383 1651925 : gimple_stmt_iterator gsi;
4384 1651925 : edge e_in, e_out;
4385 :
4386 : /* Quickly check the edge counts on BB for singularity. */
4387 3383657 : if (!single_pred_p (bb) || !single_succ_p (bb))
4388 : return false;
4389 1219633 : e_in = single_pred_edge (bb);
4390 1219633 : e_out = single_succ_edge (bb);
4391 :
4392 : /* Input edge must be EH and output edge must be normal. */
4393 1219633 : if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
4394 : return false;
4395 :
4396 : /* The block must be empty except for the labels and debug insns. */
4397 943549 : gsi = gsi_after_labels (bb);
4398 943549 : if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4399 3709 : gsi_next_nondebug (&gsi);
4400 943549 : if (!gsi_end_p (gsi))
4401 : return false;
4402 :
4403 : /* The destination block must not already have a landing pad
4404 : for a different region. */
4405 2611094 : for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4406 : {
4407 1721786 : glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
4408 868746 : tree lab;
4409 868746 : int lp_nr;
4410 :
4411 868746 : if (!label_stmt)
4412 : break;
4413 868746 : lab = gimple_label_label (label_stmt);
4414 868746 : lp_nr = EH_LANDING_PAD_NR (lab);
4415 868746 : if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4416 : return false;
4417 : }
4418 :
4419 : /* The new destination block must not already be a destination of
4420 : the source block, lest we merge fallthru and eh edges and get
4421 : all sorts of confused. */
4422 871174 : if (find_edge (e_in->src, e_out->dest))
4423 : return false;
4424 :
4425 : /* ??? We can get degenerate phis due to cfg cleanups. I would have
4426 : thought this should have been cleaned up by a phicprop pass, but
4427 : that doesn't appear to handle virtuals. Propagate by hand. */
4428 871170 : if (!gimple_seq_empty_p (phi_nodes (bb)))
4429 : {
4430 10 : for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); )
4431 : {
4432 5 : gimple *use_stmt;
4433 5 : gphi *phi = gpi.phi ();
4434 5 : tree lhs = gimple_phi_result (phi);
4435 5 : tree rhs = gimple_phi_arg_def (phi, 0);
4436 5 : use_operand_p use_p;
4437 5 : imm_use_iterator iter;
4438 :
4439 19 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
4440 : {
4441 27 : FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
4442 9 : SET_USE (use_p, rhs);
4443 5 : }
4444 :
4445 5 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4446 0 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
4447 :
4448 5 : remove_phi_node (&gpi, true);
4449 : }
4450 : }
4451 :
4452 871170 : if (dump_file && (dump_flags & TDF_DETAILS))
4453 0 : fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
4454 0 : lp->index, e_out->dest->index);
4455 :
4456 : /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
4457 : a successor edge, humor it. But do the real CFG change with the
4458 : predecessor of E_OUT in order to preserve the ordering of arguments
4459 : to the PHI nodes in E_OUT->DEST. */
4460 871170 : redirect_eh_edge_1 (e_in, e_out->dest, false);
4461 871170 : redirect_edge_pred (e_out, e_in->src);
4462 871170 : e_out->flags = e_in->flags;
4463 871170 : e_out->probability = e_in->probability;
4464 871170 : remove_edge (e_in);
4465 :
4466 871170 : return true;
4467 : }
4468 :
4469 : /* Examine each landing pad block and see if it matches unsplit_eh. */
4470 :
4471 : static bool
4472 541270 : unsplit_all_eh (void)
4473 : {
4474 541270 : bool changed = false;
4475 541270 : eh_landing_pad lp;
4476 541270 : int i;
4477 :
4478 2881367 : for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4479 2340097 : if (lp)
4480 1651925 : changed |= unsplit_eh (lp);
4481 :
4482 541270 : return changed;
4483 : }
4484 :
4485 : /* Wrapper around unsplit_all_eh that makes it usable everywhere. */
4486 :
4487 : void
4488 240953 : unsplit_eh_edges (void)
4489 : {
4490 240953 : bool changed;
4491 :
4492 : /* unsplit_all_eh can die looking up unreachable landing pads. */
4493 240953 : maybe_remove_unreachable_handlers ();
4494 :
4495 240953 : changed = unsplit_all_eh ();
4496 :
4497 : /* If EH edges have been unsplit, delete unreachable forwarder blocks. */
4498 240953 : if (changed)
4499 : {
4500 9441 : free_dominance_info (CDI_DOMINATORS);
4501 9441 : free_dominance_info (CDI_POST_DOMINATORS);
4502 9441 : delete_unreachable_blocks ();
4503 : }
4504 240953 : }
4505 :
4506 : /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
4507 : to OLD_BB to NEW_BB; return true on success, false on failure.
4508 :
4509 : OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
4510 : PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
4511 : Virtual PHIs may be deleted and marked for renaming. */
4512 :
4513 : static bool
4514 251473 : cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
4515 : edge old_bb_out, bool change_region)
4516 : {
4517 251473 : gphi_iterator ngsi, ogsi;
4518 251473 : edge_iterator ei;
4519 251473 : edge e;
4520 251473 : bitmap ophi_handled;
4521 :
4522 : /* The destination block must not be a regular successor for any
4523 : of the preds of the landing pad. Thus, avoid turning
4524 : <..>
4525 : | \ EH
4526 : | <..>
4527 : | /
4528 : <..>
4529 : into
4530 : <..>
4531 : | | EH
4532 : <..>
4533 : which CFG verification would choke on. See PR45172 and PR51089. */
4534 251473 : if (!single_pred_p (new_bb))
4535 1038346 : FOR_EACH_EDGE (e, ei, old_bb->preds)
4536 821800 : if (find_edge (e->src, new_bb))
4537 : return false;
4538 :
4539 1415011 : FOR_EACH_EDGE (e, ei, old_bb->preds)
4540 1163548 : redirect_edge_var_map_clear (e);
4541 :
4542 251463 : ophi_handled = BITMAP_ALLOC (NULL);
4543 :
4544 : /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
4545 : for the edges we're going to move. */
4546 473164 : for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
4547 : {
4548 221703 : gphi *ophi, *nphi = ngsi.phi ();
4549 221703 : tree nresult, nop;
4550 :
4551 221703 : nresult = gimple_phi_result (nphi);
4552 221703 : nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
4553 :
4554 : /* Find the corresponding PHI in OLD_BB so we can forward-propagate
4555 : the source ssa_name. */
4556 221703 : ophi = NULL;
4557 238871 : for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4558 : {
4559 89985 : ophi = ogsi.phi ();
4560 89985 : if (gimple_phi_result (ophi) == nop)
4561 : break;
4562 17168 : ophi = NULL;
4563 : }
4564 :
4565 : /* If we did find the corresponding PHI, copy those inputs. */
4566 221703 : if (ophi)
4567 : {
4568 : /* If NOP is used somewhere else beyond phis in new_bb, give up. */
4569 72817 : if (!has_single_use (nop))
4570 : {
4571 13687 : imm_use_iterator imm_iter;
4572 13687 : use_operand_p use_p;
4573 :
4574 54820 : FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
4575 : {
4576 27448 : if (!gimple_debug_bind_p (USE_STMT (use_p))
4577 27448 : && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
4578 27446 : || gimple_bb (USE_STMT (use_p)) != new_bb))
4579 2 : goto fail;
4580 13687 : }
4581 : }
4582 72815 : bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
4583 725470 : FOR_EACH_EDGE (e, ei, old_bb->preds)
4584 : {
4585 652655 : location_t oloc;
4586 652655 : tree oop;
4587 :
4588 652655 : if ((e->flags & EDGE_EH) == 0)
4589 2835 : continue;
4590 649820 : oop = gimple_phi_arg_def (ophi, e->dest_idx);
4591 649820 : oloc = gimple_phi_arg_location (ophi, e->dest_idx);
4592 649820 : redirect_edge_var_map_add (e, nresult, oop, oloc);
4593 : }
4594 : }
4595 : /* If we didn't find the PHI, if it's a real variable or a VOP, we know
4596 : from the fact that OLD_BB is tree_empty_eh_handler_p that the
4597 : variable is unchanged from input to the block and we can simply
4598 : re-use the input to NEW_BB from the OLD_BB_OUT edge. */
4599 : else
4600 : {
4601 148886 : location_t nloc
4602 148886 : = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
4603 397246 : FOR_EACH_EDGE (e, ei, old_bb->preds)
4604 248360 : redirect_edge_var_map_add (e, nresult, nop, nloc);
4605 : }
4606 : }
4607 :
4608 : /* Second, verify that all PHIs from OLD_BB have been handled. If not,
4609 : we don't know what values from the other edges into NEW_BB to use. */
4610 323976 : for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4611 : {
4612 92477 : gphi *ophi = ogsi.phi ();
4613 92477 : tree oresult = gimple_phi_result (ophi);
4614 92477 : if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
4615 19962 : goto fail;
4616 : }
4617 :
4618 : /* Finally, move the edges and update the PHIs. */
4619 1052227 : for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
4620 820728 : if (e->flags & EDGE_EH)
4621 : {
4622 : /* ??? CFG manipluation routines do not try to update loop
4623 : form on edge redirection. Do so manually here for now. */
4624 : /* If we redirect a loop entry or latch edge that will either create
4625 : a multiple entry loop or rotate the loop. If the loops merge
4626 : we may have created a loop with multiple latches.
4627 : All of this isn't easily fixed thus cancel the affected loop
4628 : and mark the other loop as possibly having multiple latches. */
4629 819599 : if (e->dest == e->dest->loop_father->header)
4630 : {
4631 0 : mark_loop_for_removal (e->dest->loop_father);
4632 0 : new_bb->loop_father->latch = NULL;
4633 0 : loops_state_set (LOOPS_MAY_HAVE_MULTIPLE_LATCHES);
4634 : }
4635 819599 : redirect_eh_edge_1 (e, new_bb, change_region);
4636 819599 : redirect_edge_succ (e, new_bb);
4637 819599 : flush_pending_stmts (e);
4638 : }
4639 : else
4640 1129 : ei_next (&ei);
4641 :
4642 231499 : BITMAP_FREE (ophi_handled);
4643 231499 : return true;
4644 :
4645 19964 : fail:
4646 362784 : FOR_EACH_EDGE (e, ei, old_bb->preds)
4647 342820 : redirect_edge_var_map_clear (e);
4648 19964 : BITMAP_FREE (ophi_handled);
4649 19964 : return false;
4650 : }
4651 :
4652 : /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
4653 : old region to NEW_REGION at BB. */
4654 :
4655 : static void
4656 9601 : cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
4657 : eh_landing_pad lp, eh_region new_region)
4658 : {
4659 9601 : gimple_stmt_iterator gsi;
4660 9601 : eh_landing_pad *pp;
4661 :
4662 9601 : for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
4663 0 : continue;
4664 9601 : *pp = lp->next_lp;
4665 :
4666 9601 : lp->region = new_region;
4667 9601 : lp->next_lp = new_region->landing_pads;
4668 9601 : new_region->landing_pads = lp;
4669 :
4670 : /* Delete the RESX that was matched within the empty handler block. */
4671 9601 : gsi = gsi_last_bb (bb);
4672 9601 : unlink_stmt_vdef (gsi_stmt (gsi));
4673 9601 : gsi_remove (&gsi, true);
4674 :
4675 : /* Clean up E_OUT for the fallthru. */
4676 9601 : e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
4677 9601 : e_out->probability = profile_probability::always ();
4678 0 : }
4679 :
4680 : /* A subroutine of cleanup_empty_eh. Handle more complex cases of
4681 : unsplitting than unsplit_eh was prepared to handle, e.g. when
4682 : multiple incoming edges and phis are involved. */
4683 :
4684 : static bool
4685 13874 : cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
4686 : {
4687 13874 : gimple_stmt_iterator gsi;
4688 13874 : tree lab;
4689 :
4690 : /* We really ought not have totally lost everything following
4691 : a landing pad label. Given that BB is empty, there had better
4692 : be a successor. */
4693 13874 : gcc_assert (e_out != NULL);
4694 :
4695 : /* The destination block must not already have a landing pad
4696 : for a different region. */
4697 13874 : lab = NULL;
4698 40772 : for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4699 : {
4700 26898 : glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
4701 13024 : int lp_nr;
4702 :
4703 13024 : if (!stmt)
4704 : break;
4705 13024 : lab = gimple_label_label (stmt);
4706 13024 : lp_nr = EH_LANDING_PAD_NR (lab);
4707 13024 : if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4708 : return false;
4709 : }
4710 :
4711 : /* Attempt to move the PHIs into the successor block. */
4712 13874 : if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
4713 : {
4714 4306 : if (dump_file && (dump_flags & TDF_DETAILS))
4715 0 : fprintf (dump_file,
4716 : "Unsplit EH landing pad %d to block %i "
4717 : "(via cleanup_empty_eh).\n",
4718 0 : lp->index, e_out->dest->index);
4719 4306 : return true;
4720 : }
4721 :
4722 : return false;
4723 : }
4724 :
4725 : /* Return true if edge E_FIRST is part of an empty infinite loop
4726 : or leads to such a loop through a series of single successor
4727 : empty bbs. */
4728 :
4729 : static bool
4730 13924 : infinite_empty_loop_p (edge e_first)
4731 : {
4732 13924 : bool inf_loop = false;
4733 13924 : edge e;
4734 :
4735 13924 : if (e_first->dest == e_first->src)
4736 : return true;
4737 :
4738 13922 : e_first->src->aux = (void *) 1;
4739 13994 : for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4740 : {
4741 7868 : gimple_stmt_iterator gsi;
4742 7868 : if (e->dest->aux)
4743 : {
4744 : inf_loop = true;
4745 7796 : break;
4746 : }
4747 7820 : e->dest->aux = (void *) 1;
4748 7820 : gsi = gsi_after_labels (e->dest);
4749 7820 : if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4750 2923 : gsi_next_nondebug (&gsi);
4751 7820 : if (!gsi_end_p (gsi))
4752 : break;
4753 : }
4754 13922 : e_first->src->aux = NULL;
4755 21742 : for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4756 7820 : e->dest->aux = NULL;
4757 :
4758 : return inf_loop;
4759 : }
4760 :
4761 : /* Examine the block associated with LP to determine if it's an empty
4762 : handler for its EH region. If so, attempt to redirect EH edges to
4763 : an outer region. Return true the CFG was updated in any way. This
4764 : is similar to jump forwarding, just across EH edges. */
4765 :
4766 : static bool
4767 1304114 : cleanup_empty_eh (eh_landing_pad lp)
4768 : {
4769 1304114 : basic_block bb = label_to_block (cfun, lp->post_landing_pad);
4770 1304114 : gimple_stmt_iterator gsi;
4771 1304114 : gimple *resx;
4772 1304114 : eh_region new_region;
4773 1304114 : edge_iterator ei;
4774 1304114 : edge e, e_out;
4775 1304114 : bool has_non_eh_pred;
4776 1304114 : bool ret = false;
4777 1304114 : int new_lp_nr;
4778 :
4779 : /* There can be zero or one edges out of BB. This is the quickest test. */
4780 1304114 : switch (EDGE_COUNT (bb->succs))
4781 : {
4782 : case 0:
4783 : e_out = NULL;
4784 : break;
4785 677378 : case 1:
4786 677378 : e_out = single_succ_edge (bb);
4787 677378 : break;
4788 : default:
4789 : return false;
4790 : }
4791 :
4792 1185520 : gsi = gsi_last_nondebug_bb (bb);
4793 1185520 : resx = gsi_stmt (gsi);
4794 1185520 : if (resx && is_gimple_resx (resx))
4795 : {
4796 1035647 : if (stmt_can_throw_external (cfun, resx))
4797 506908 : optimize_clobbers (bb);
4798 528739 : else if (sink_clobbers (bb))
4799 1185520 : ret = true;
4800 : }
4801 :
4802 1185520 : gsi = gsi_after_labels (bb);
4803 :
4804 : /* Make sure to skip debug statements. */
4805 1185520 : if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4806 432809 : gsi_next_nondebug (&gsi);
4807 :
4808 : /* If the block is totally empty, look for more unsplitting cases. */
4809 1185520 : if (gsi_end_p (gsi))
4810 : {
4811 : /* For the degenerate case of an infinite loop bail out.
4812 : If bb has no successors and is totally empty, which can happen e.g.
4813 : because of incorrect noreturn attribute, bail out too. */
4814 13924 : if (e_out == NULL
4815 13924 : || infinite_empty_loop_p (e_out))
4816 50 : return ret;
4817 :
4818 13874 : return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
4819 : }
4820 :
4821 : /* The block should consist only of a single RESX statement, modulo a
4822 : preceding call to __builtin_stack_restore if there is no outgoing
4823 : edge, since the call can be eliminated in this case. */
4824 1171596 : resx = gsi_stmt (gsi);
4825 1171596 : if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4826 : {
4827 318 : gsi_next_nondebug (&gsi);
4828 318 : resx = gsi_stmt (gsi);
4829 : }
4830 1171596 : if (!is_gimple_resx (resx))
4831 : return ret;
4832 532913 : gcc_assert (gsi_one_nondebug_before_end_p (gsi));
4833 :
4834 : /* Determine if there are non-EH edges, or resx edges into the handler. */
4835 532913 : has_non_eh_pred = false;
4836 3123684 : FOR_EACH_EDGE (e, ei, bb->preds)
4837 2590771 : if (!(e->flags & EDGE_EH))
4838 5858 : has_non_eh_pred = true;
4839 :
4840 : /* Find the handler that's outer of the empty handler by looking at
4841 : where the RESX instruction was vectored. */
4842 532913 : new_lp_nr = lookup_stmt_eh_lp (resx);
4843 532913 : new_region = get_eh_region_from_lp_number (new_lp_nr);
4844 :
4845 : /* If there's no destination region within the current function,
4846 : redirection is trivial via removing the throwing statements from
4847 : the EH region, removing the EH edges, and allowing the block
4848 : to go unreachable. */
4849 532913 : if (new_region == NULL)
4850 : {
4851 295309 : gcc_assert (e_out == NULL);
4852 1915863 : for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4853 1620554 : if (e->flags & EDGE_EH)
4854 : {
4855 1617971 : gimple *stmt = *gsi_last_bb (e->src);
4856 1617971 : remove_stmt_from_eh_lp (stmt);
4857 1617971 : remove_edge (e);
4858 : }
4859 : else
4860 2583 : ei_next (&ei);
4861 295309 : goto succeed;
4862 : }
4863 :
4864 : /* If the destination region is a MUST_NOT_THROW, allow the runtime
4865 : to handle the abort and allow the blocks to go unreachable. */
4866 237604 : if (new_region->type == ERT_MUST_NOT_THROW)
4867 : {
4868 11 : for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4869 6 : if (e->flags & EDGE_EH)
4870 : {
4871 6 : gimple *stmt = *gsi_last_bb (e->src);
4872 6 : remove_stmt_from_eh_lp (stmt);
4873 6 : add_stmt_to_eh_lp (stmt, new_lp_nr);
4874 6 : remove_edge (e);
4875 : }
4876 : else
4877 0 : ei_next (&ei);
4878 5 : goto succeed;
4879 : }
4880 :
4881 : /* Try to redirect the EH edges and merge the PHIs into the destination
4882 : landing pad block. If the merge succeeds, we'll already have redirected
4883 : all the EH edges. The handler itself will go unreachable if there were
4884 : no normal edges. */
4885 237599 : if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
4886 227193 : goto succeed;
4887 :
4888 : /* Finally, if all input edges are EH edges, then we can (potentially)
4889 : reduce the number of transfers from the runtime by moving the landing
4890 : pad from the original region to the new region. This is a win when
4891 : we remove the last CLEANUP region along a particular exception
4892 : propagation path. Since nothing changes except for the region with
4893 : which the landing pad is associated, the PHI nodes do not need to be
4894 : adjusted at all. */
4895 10406 : if (!has_non_eh_pred)
4896 : {
4897 9601 : cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4898 9601 : if (dump_file && (dump_flags & TDF_DETAILS))
4899 0 : fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4900 : lp->index, new_region->index);
4901 :
4902 : /* ??? The CFG didn't change, but we may have rendered the
4903 : old EH region unreachable. Trigger a cleanup there. */
4904 9601 : return true;
4905 : }
4906 :
4907 : return ret;
4908 :
4909 522507 : succeed:
4910 522507 : if (dump_file && (dump_flags & TDF_DETAILS))
4911 6 : fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4912 522507 : remove_eh_landing_pad (lp);
4913 522507 : return true;
4914 : }
4915 :
4916 : /* Do a post-order traversal of the EH region tree. Examine each
4917 : post_landing_pad block and see if we can eliminate it as empty. */
4918 :
4919 : static bool
4920 327289 : cleanup_all_empty_eh (void)
4921 : {
4922 327289 : bool changed = false;
4923 327289 : eh_landing_pad lp;
4924 327289 : int i;
4925 :
4926 : /* The post-order traversal may lead to quadraticness in the redirection
4927 : of incoming EH edges from inner LPs, so first try to walk the region
4928 : tree from inner to outer LPs in order to eliminate these edges. */
4929 2614621 : for (i = vec_safe_length (cfun->eh->lp_array) - 1; i >= 1; --i)
4930 : {
4931 1960043 : lp = (*cfun->eh->lp_array)[i];
4932 1960043 : if (lp)
4933 914811 : changed |= cleanup_empty_eh (lp);
4934 : }
4935 :
4936 : /* Now do the post-order traversal to eliminate outer empty LPs. */
4937 2287911 : for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4938 1960622 : if (lp)
4939 389303 : changed |= cleanup_empty_eh (lp);
4940 :
4941 327289 : return changed;
4942 : }
4943 :
4944 : /* Perform cleanups and lowering of exception handling
4945 : 1) cleanups regions with handlers doing nothing are optimized out
4946 : 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4947 : 3) Info about regions that are containing instructions, and regions
4948 : reachable via local EH edges is collected
4949 : 4) Eh tree is pruned for regions no longer necessary.
4950 :
4951 : TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4952 : Unify those that have the same failure decl and locus.
4953 : */
4954 :
4955 : static unsigned int
4956 1092273 : execute_cleanup_eh_1 (void)
4957 : {
4958 : /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4959 : looking up unreachable landing pads. */
4960 1092273 : remove_unreachable_handlers ();
4961 :
4962 : /* Watch out for the region tree vanishing due to all unreachable. */
4963 1092273 : if (cfun->eh->region_tree)
4964 : {
4965 327289 : bool changed = false;
4966 :
4967 327289 : if (optimize)
4968 300317 : changed |= unsplit_all_eh ();
4969 327289 : changed |= cleanup_all_empty_eh ();
4970 :
4971 327289 : if (changed)
4972 : {
4973 269107 : free_dominance_info (CDI_DOMINATORS);
4974 269107 : free_dominance_info (CDI_POST_DOMINATORS);
4975 :
4976 : /* We delayed all basic block deletion, as we may have performed
4977 : cleanups on EH edges while non-EH edges were still present. */
4978 269107 : delete_unreachable_blocks ();
4979 :
4980 : /* We manipulated the landing pads. Remove any region that no
4981 : longer has a landing pad. */
4982 269107 : remove_unreachable_handlers_no_lp ();
4983 :
4984 269107 : return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4985 : }
4986 : }
4987 :
4988 : return 0;
4989 : }
4990 :
4991 : namespace {
4992 :
4993 : const pass_data pass_data_cleanup_eh =
4994 : {
4995 : GIMPLE_PASS, /* type */
4996 : "ehcleanup", /* name */
4997 : OPTGROUP_NONE, /* optinfo_flags */
4998 : TV_TREE_EH, /* tv_id */
4999 : PROP_gimple_lcf, /* properties_required */
5000 : 0, /* properties_provided */
5001 : 0, /* properties_destroyed */
5002 : 0, /* todo_flags_start */
5003 : 0, /* todo_flags_finish */
5004 : };
5005 :
5006 : class pass_cleanup_eh : public gimple_opt_pass
5007 : {
5008 : public:
5009 571444 : pass_cleanup_eh (gcc::context *ctxt)
5010 1142888 : : gimple_opt_pass (pass_data_cleanup_eh, ctxt)
5011 : {}
5012 :
5013 : /* opt_pass methods: */
5014 285722 : opt_pass * clone () final override { return new pass_cleanup_eh (m_ctxt); }
5015 3884578 : bool gate (function *fun) final override
5016 : {
5017 3884578 : return fun->eh != NULL && fun->eh->region_tree != NULL;
5018 : }
5019 :
5020 : unsigned int execute (function *) final override;
5021 :
5022 : }; // class pass_cleanup_eh
5023 :
5024 : unsigned int
5025 1092273 : pass_cleanup_eh::execute (function *fun)
5026 : {
5027 1092273 : int ret = execute_cleanup_eh_1 ();
5028 :
5029 : /* If the function no longer needs an EH personality routine
5030 : clear it. This exposes cross-language inlining opportunities
5031 : and avoids references to a never defined personality routine. */
5032 1092273 : if (DECL_FUNCTION_PERSONALITY (current_function_decl)
5033 1092273 : && function_needs_eh_personality (fun) != eh_personality_lang)
5034 713823 : DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
5035 :
5036 1092273 : return ret;
5037 : }
5038 :
5039 : } // anon namespace
5040 :
5041 : gimple_opt_pass *
5042 285722 : make_pass_cleanup_eh (gcc::context *ctxt)
5043 : {
5044 285722 : return new pass_cleanup_eh (ctxt);
5045 : }
5046 :
5047 : /* Disable warnings about missing quoting in GCC diagnostics for
5048 : the verification errors. Their format strings don't follow GCC
5049 : diagnostic conventions but are only used for debugging. */
5050 : #if __GNUC__ >= 10
5051 : # pragma GCC diagnostic push
5052 : # pragma GCC diagnostic ignored "-Wformat-diag"
5053 : #endif
5054 :
5055 : /* Verify that BB containing STMT as the last statement, has precisely the
5056 : edge that make_eh_edge would create. */
5057 :
5058 : DEBUG_FUNCTION bool
5059 1800942887 : verify_eh_edges (gimple *stmt)
5060 : {
5061 1800942887 : basic_block bb = gimple_bb (stmt);
5062 1800942887 : eh_landing_pad lp = NULL;
5063 1800942887 : int lp_nr;
5064 1800942887 : edge_iterator ei;
5065 1800942887 : edge e, eh_edge;
5066 :
5067 1800942887 : lp_nr = lookup_stmt_eh_lp (stmt);
5068 1800942887 : if (lp_nr > 0)
5069 187618688 : lp = get_eh_landing_pad_from_number (lp_nr);
5070 :
5071 1800942887 : eh_edge = NULL;
5072 4380224393 : FOR_EACH_EDGE (e, ei, bb->succs)
5073 : {
5074 2579281506 : if (e->flags & EDGE_EH)
5075 : {
5076 187618688 : if (eh_edge)
5077 : {
5078 0 : error ("BB %i has multiple EH edges", bb->index);
5079 0 : return true;
5080 : }
5081 : else
5082 : eh_edge = e;
5083 : }
5084 : }
5085 :
5086 1800942887 : if (lp == NULL)
5087 : {
5088 1613324199 : if (eh_edge)
5089 : {
5090 0 : error ("BB %i cannot throw but has an EH edge", bb->index);
5091 0 : return true;
5092 : }
5093 : return false;
5094 : }
5095 :
5096 187618688 : if (!stmt_could_throw_p (cfun, stmt))
5097 : {
5098 0 : error ("BB %i last statement has incorrectly set lp", bb->index);
5099 0 : return true;
5100 : }
5101 :
5102 187618688 : if (eh_edge == NULL)
5103 : {
5104 0 : error ("BB %i is missing an EH edge", bb->index);
5105 0 : return true;
5106 : }
5107 :
5108 187618688 : if (eh_edge->dest != label_to_block (cfun, lp->post_landing_pad))
5109 : {
5110 0 : error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
5111 0 : return true;
5112 : }
5113 :
5114 : return false;
5115 : }
5116 :
5117 : /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
5118 :
5119 : DEBUG_FUNCTION bool
5120 1879927 : verify_eh_dispatch_edge (geh_dispatch *stmt)
5121 : {
5122 1879927 : eh_region r;
5123 1879927 : eh_catch c;
5124 1879927 : basic_block src, dst;
5125 1879927 : bool want_fallthru = true;
5126 1879927 : edge_iterator ei;
5127 1879927 : edge e, fall_edge;
5128 :
5129 1879927 : r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
5130 1879927 : src = gimple_bb (stmt);
5131 :
5132 4005805 : FOR_EACH_EDGE (e, ei, src->succs)
5133 2125878 : gcc_assert (e->aux == NULL);
5134 :
5135 1879927 : switch (r->type)
5136 : {
5137 1871059 : case ERT_TRY:
5138 2108142 : for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5139 : {
5140 1980456 : dst = label_to_block (cfun, c->label);
5141 1980456 : e = find_edge (src, dst);
5142 1980456 : if (e == NULL)
5143 : {
5144 0 : error ("BB %i is missing an edge", src->index);
5145 0 : return true;
5146 : }
5147 1980456 : e->aux = (void *)e;
5148 :
5149 : /* A catch-all handler doesn't have a fallthru. */
5150 1980456 : if (c->type_list == NULL)
5151 : {
5152 : want_fallthru = false;
5153 : break;
5154 : }
5155 : }
5156 : break;
5157 :
5158 8868 : case ERT_ALLOWED_EXCEPTIONS:
5159 8868 : dst = label_to_block (cfun, r->u.allowed.label);
5160 8868 : e = find_edge (src, dst);
5161 8868 : if (e == NULL)
5162 : {
5163 0 : error ("BB %i is missing an edge", src->index);
5164 0 : return true;
5165 : }
5166 8868 : e->aux = (void *)e;
5167 8868 : break;
5168 :
5169 0 : default:
5170 0 : gcc_unreachable ();
5171 : }
5172 :
5173 1879927 : fall_edge = NULL;
5174 4005805 : FOR_EACH_EDGE (e, ei, src->succs)
5175 : {
5176 2125878 : if (e->flags & EDGE_FALLTHRU)
5177 : {
5178 136554 : if (fall_edge != NULL)
5179 : {
5180 0 : error ("BB %i too many fallthru edges", src->index);
5181 0 : return true;
5182 : }
5183 : fall_edge = e;
5184 : }
5185 1989324 : else if (e->aux)
5186 1989324 : e->aux = NULL;
5187 : else
5188 : {
5189 0 : error ("BB %i has incorrect edge", src->index);
5190 0 : return true;
5191 : }
5192 : }
5193 1879927 : if ((fall_edge != NULL) ^ want_fallthru)
5194 : {
5195 0 : error ("BB %i has incorrect fallthru edge", src->index);
5196 0 : return true;
5197 : }
5198 :
5199 : return false;
5200 : }
5201 :
5202 : #if __GNUC__ >= 10
5203 : # pragma GCC diagnostic pop
5204 : #endif
|