Line data Source code
1 : /* Implements exception handling.
2 : Copyright (C) 1989-2026 Free Software Foundation, Inc.
3 : Contributed by Mike Stump <mrs@cygnus.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it under
8 : the terms of the GNU General Public License as published by the Free
9 : Software Foundation; either version 3, or (at your option) any later
10 : version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 :
22 : /* An exception is an event that can be "thrown" from within a
23 : function. This event can then be "caught" by the callers of
24 : the function.
25 :
26 : The representation of exceptions changes several times during
27 : the compilation process:
28 :
29 : In the beginning, in the front end, we have the GENERIC trees
30 : TRY_CATCH_EXPR, TRY_FINALLY_EXPR, EH_ELSE_EXPR, WITH_CLEANUP_EXPR,
31 : CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32 :
33 : During initial gimplification (gimplify.cc) these are lowered to the
34 : GIMPLE_TRY, GIMPLE_CATCH, GIMPLE_EH_ELSE, and GIMPLE_EH_FILTER
35 : nodes. The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are
36 : converted into GIMPLE_TRY_FINALLY nodes; the others are a more
37 : direct 1-1 conversion.
38 :
39 : During pass_lower_eh (tree-eh.cc) we record the nested structure
40 : of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41 : We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42 : regions at this time. We can then flatten the statements within
43 : the TRY nodes to straight-line code. Statements that had been within
44 : TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45 : so that we may remember what action is supposed to be taken if
46 : a given statement does throw. During this lowering process,
47 : we create an EH_LANDING_PAD node for each EH_REGION that has
48 : some code within the function that needs to be executed if a
49 : throw does happen. We also create RESX statements that are
50 : used to transfer control from an inner EH_REGION to an outer
51 : EH_REGION. We also create EH_DISPATCH statements as placeholders
52 : for a runtime type comparison that should be made in order to
53 : select the action to perform among different CATCH and EH_FILTER
54 : regions.
55 :
56 : During pass_lower_eh_dispatch (tree-eh.cc), which is run after
57 : all inlining is complete, we are able to run assign_filter_values,
58 : which allows us to map the set of types manipulated by all of the
59 : CATCH and EH_FILTER regions to a set of integers. This set of integers
60 : will be how the exception runtime communicates with the code generated
61 : within the function. We then expand the GIMPLE_EH_DISPATCH statements
62 : to a switch or conditional branches that use the argument provided by
63 : the runtime (__builtin_eh_filter) and the set of integers we computed
64 : in assign_filter_values.
65 :
66 : During pass_lower_resx (tree-eh.cc), which is run near the end
67 : of optimization, we expand RESX statements. If the eh region
68 : that is outer to the RESX statement is a MUST_NOT_THROW, then
69 : the RESX expands to some form of abort statement. If the eh
70 : region that is outer to the RESX statement is within the current
71 : function, then the RESX expands to a bookkeeping call
72 : (__builtin_eh_copy_values) and a goto. Otherwise, the next
73 : handler for the exception must be within a function somewhere
74 : up the call chain, so we call back into the exception runtime
75 : (__builtin_unwind_resume).
76 :
77 : During pass_expand (cfgexpand.cc), we generate REG_EH_REGION notes
78 : that create an rtl to eh_region mapping that corresponds to the
79 : gimple to eh_region mapping that had been recorded in the
80 : THROW_STMT_TABLE.
81 :
82 : Then, via finish_eh_generation, we generate the real landing pads
83 : to which the runtime will actually transfer control. These new
84 : landing pads perform whatever bookkeeping is needed by the target
85 : backend in order to resume execution within the current function.
86 : Each of these new landing pads falls through into the post_landing_pad
87 : label which had been used within the CFG up to this point. All
88 : exception edges within the CFG are redirected to the new landing pads.
89 : If the target uses setjmp to implement exceptions, the various extra
90 : calls into the runtime to register and unregister the current stack
91 : frame are emitted at this time.
92 :
93 : During pass_convert_to_eh_region_ranges (except.cc), we transform
94 : the REG_EH_REGION notes attached to individual insns into
95 : non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96 : and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
97 : same associated action within the exception region tree, meaning
98 : that (1) the exception is caught by the same landing pad within the
99 : current function, (2) the exception is blocked by the runtime with
100 : a MUST_NOT_THROW region, or (3) the exception is not handled at all
101 : within the current function.
102 :
103 : Finally, during assembly generation, we call
104 : output_function_exception_table (except.cc) to emit the tables with
105 : which the exception runtime can determine if a given stack frame
106 : handles a given exception, and if so what filter value to provide
107 : to the function when the non-local control transfer is effected.
108 : If the target uses dwarf2 unwinding to implement exceptions, then
109 : output_call_frame_info (dwarf2out.cc) emits the required unwind data. */
110 :
111 :
112 : #include "config.h"
113 : #include "system.h"
114 : #include "coretypes.h"
115 : #include "backend.h"
116 : #include "target.h"
117 : #include "rtl.h"
118 : #include "tree.h"
119 : #include "cfghooks.h"
120 : #include "tree-pass.h"
121 : #include "memmodel.h"
122 : #include "tm_p.h"
123 : #include "stringpool.h"
124 : #include "expmed.h"
125 : #include "optabs.h"
126 : #include "emit-rtl.h"
127 : #include "cgraph.h"
128 : #include "diagnostic.h"
129 : #include "fold-const.h"
130 : #include "stor-layout.h"
131 : #include "explow.h"
132 : #include "stmt.h"
133 : #include "expr.h"
134 : #include "calls.h"
135 : #include "libfuncs.h"
136 : #include "except.h"
137 : #include "output.h"
138 : #include "dwarf2asm.h"
139 : #include "dwarf2.h"
140 : #include "common/common-target.h"
141 : #include "langhooks.h"
142 : #include "cfgrtl.h"
143 : #include "tree-pretty-print.h"
144 : #include "cfgloop.h"
145 : #include "builtins.h"
146 : #include "tree-hash-traits.h"
147 : #include "flags.h"
148 :
149 : static GTY(()) int call_site_base;
150 :
151 : static GTY(()) hash_map<tree_hash, tree> *type_to_runtime_map;
152 :
153 : static GTY(()) tree setjmp_fn;
154 :
155 : /* Describe the SjLj_Function_Context structure. */
156 : static GTY(()) tree sjlj_fc_type_node;
157 : static int sjlj_fc_call_site_ofs;
158 : static int sjlj_fc_data_ofs;
159 : static int sjlj_fc_personality_ofs;
160 : static int sjlj_fc_lsda_ofs;
161 : static int sjlj_fc_jbuf_ofs;
162 :
163 :
164 : struct GTY(()) call_site_record_d
165 : {
166 : rtx landing_pad;
167 : int action;
168 : };
169 :
170 : /* In the following structure and associated functions,
171 : we represent entries in the action table as 1-based indices.
172 : Special cases are:
173 :
174 : 0: null action record, non-null landing pad; implies cleanups
175 : -1: null action record, null landing pad; implies no action
176 : -2: no call-site entry; implies must_not_throw
177 : -3: we have yet to process outer regions
178 :
179 : Further, no special cases apply to the "next" field of the record.
180 : For next, 0 means end of list. */
181 :
182 : struct action_record
183 : {
184 : int offset;
185 : int filter;
186 : int next;
187 : };
188 :
189 : /* Hashtable helpers. */
190 :
191 : struct action_record_hasher : free_ptr_hash <action_record>
192 : {
193 : static inline hashval_t hash (const action_record *);
194 : static inline bool equal (const action_record *, const action_record *);
195 : };
196 :
197 : inline hashval_t
198 669313 : action_record_hasher::hash (const action_record *entry)
199 : {
200 669313 : return entry->next * 1009 + entry->filter;
201 : }
202 :
203 : inline bool
204 334707 : action_record_hasher::equal (const action_record *entry,
205 : const action_record *data)
206 : {
207 334707 : return entry->filter == data->filter && entry->next == data->next;
208 : }
209 :
210 : typedef hash_table<action_record_hasher> action_hash_type;
211 :
212 : static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
213 : eh_landing_pad *);
214 :
215 : static void dw2_build_landing_pads (void);
216 :
217 : static int collect_one_action_chain (action_hash_type *, eh_region);
218 : static int add_call_site (rtx, int, int);
219 :
220 : static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
221 : static void push_sleb128 (vec<uchar, va_gc> **, int);
222 : static int dw2_size_of_call_site_table (int);
223 : static int sjlj_size_of_call_site_table (void);
224 : static void dw2_output_call_site_table (int, int);
225 : static void sjlj_output_call_site_table (void);
226 :
227 :
228 : void
229 297128 : init_eh (void)
230 : {
231 297128 : if (! flag_exceptions)
232 : return;
233 :
234 138027 : type_to_runtime_map = hash_map<tree_hash, tree>::create_ggc (31);
235 :
236 : /* Create the SjLj_Function_Context structure. This should match
237 : the definition in unwind-sjlj.c. */
238 138027 : if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
239 : {
240 0 : tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
241 :
242 0 : sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
243 :
244 0 : f_prev = build_decl (BUILTINS_LOCATION,
245 : FIELD_DECL, get_identifier ("__prev"),
246 : build_pointer_type (sjlj_fc_type_node));
247 0 : DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
248 :
249 0 : f_cs = build_decl (BUILTINS_LOCATION,
250 : FIELD_DECL, get_identifier ("__call_site"),
251 : integer_type_node);
252 0 : DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
253 :
254 0 : tmp = build_index_type (size_int (4 - 1));
255 0 : tmp = build_array_type (lang_hooks.types.type_for_mode
256 0 : (targetm.unwind_word_mode (), 1),
257 : tmp);
258 0 : f_data = build_decl (BUILTINS_LOCATION,
259 : FIELD_DECL, get_identifier ("__data"), tmp);
260 0 : DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
261 :
262 0 : f_per = build_decl (BUILTINS_LOCATION,
263 : FIELD_DECL, get_identifier ("__personality"),
264 : ptr_type_node);
265 0 : DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
266 :
267 0 : f_lsda = build_decl (BUILTINS_LOCATION,
268 : FIELD_DECL, get_identifier ("__lsda"),
269 : ptr_type_node);
270 0 : DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
271 :
272 : #ifdef DONT_USE_BUILTIN_SETJMP
273 : #ifdef JMP_BUF_SIZE
274 : tmp = size_int (JMP_BUF_SIZE - 1);
275 : #else
276 : /* Should be large enough for most systems, if it is not,
277 : JMP_BUF_SIZE should be defined with the proper value. It will
278 : also tend to be larger than necessary for most systems, a more
279 : optimal port will define JMP_BUF_SIZE. */
280 : tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
281 : #endif
282 : #else
283 : /* Compute a minimally sized jump buffer. We need room to store at
284 : least 3 pointers - stack pointer, frame pointer and return address.
285 : Plus for some targets we need room for an extra pointer - in the
286 : case of MIPS this is the global pointer. This makes a total of four
287 : pointers, but to be safe we actually allocate room for 5.
288 :
289 : If pointers are smaller than words then we allocate enough room for
290 : 5 words, just in case the backend needs this much room. For more
291 : discussion on this issue see:
292 : http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */
293 0 : if (POINTER_SIZE > BITS_PER_WORD)
294 0 : tmp = size_int (5 - 1);
295 : else
296 0 : tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
297 : #endif
298 :
299 0 : tmp = build_index_type (tmp);
300 0 : tmp = build_array_type (ptr_type_node, tmp);
301 0 : f_jbuf = build_decl (BUILTINS_LOCATION,
302 : FIELD_DECL, get_identifier ("__jbuf"), tmp);
303 : #ifdef DONT_USE_BUILTIN_SETJMP
304 : /* We don't know what the alignment requirements of the
305 : runtime's jmp_buf has. Overestimate. */
306 : SET_DECL_ALIGN (f_jbuf, BIGGEST_ALIGNMENT);
307 : DECL_USER_ALIGN (f_jbuf) = 1;
308 : #endif
309 0 : DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
310 :
311 0 : TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
312 0 : TREE_CHAIN (f_prev) = f_cs;
313 0 : TREE_CHAIN (f_cs) = f_data;
314 0 : TREE_CHAIN (f_data) = f_per;
315 0 : TREE_CHAIN (f_per) = f_lsda;
316 0 : TREE_CHAIN (f_lsda) = f_jbuf;
317 :
318 0 : layout_type (sjlj_fc_type_node);
319 :
320 : /* Cache the interesting field offsets so that we have
321 : easy access from rtl. */
322 0 : sjlj_fc_call_site_ofs
323 0 : = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
324 0 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
325 0 : sjlj_fc_data_ofs
326 0 : = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
327 0 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
328 0 : sjlj_fc_personality_ofs
329 0 : = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
330 0 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
331 0 : sjlj_fc_lsda_ofs
332 0 : = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
333 0 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
334 0 : sjlj_fc_jbuf_ofs
335 0 : = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
336 0 : + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
337 :
338 : #ifdef DONT_USE_BUILTIN_SETJMP
339 : tmp = build_function_type_list (integer_type_node, TREE_TYPE (f_jbuf),
340 : NULL);
341 : setjmp_fn = build_decl (BUILTINS_LOCATION, FUNCTION_DECL,
342 : get_identifier ("setjmp"), tmp);
343 : TREE_PUBLIC (setjmp_fn) = 1;
344 : DECL_EXTERNAL (setjmp_fn) = 1;
345 : DECL_ASSEMBLER_NAME (setjmp_fn);
346 : #endif
347 : }
348 : }
349 :
350 : void
351 203970954 : init_eh_for_function (void)
352 : {
353 203970954 : cfun->eh = ggc_cleared_alloc<eh_status> ();
354 :
355 : /* Make sure zero'th entries are used. */
356 203970954 : vec_safe_push (cfun->eh->region_array, (eh_region)0);
357 203970954 : vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
358 203970954 : }
359 :
360 : /* Routines to generate the exception tree somewhat directly.
361 : These are used from tree-eh.cc when processing exception related
362 : nodes during tree optimization. */
363 :
364 : static eh_region
365 2702183 : gen_eh_region (enum eh_region_type type, eh_region outer)
366 : {
367 2702183 : eh_region new_eh;
368 :
369 : /* Insert a new blank region as a leaf in the tree. */
370 2702183 : new_eh = ggc_cleared_alloc<eh_region_d> ();
371 2702183 : new_eh->type = type;
372 2702183 : new_eh->outer = outer;
373 2702183 : if (outer)
374 : {
375 1181632 : new_eh->next_peer = outer->inner;
376 1181632 : outer->inner = new_eh;
377 : }
378 : else
379 : {
380 1520551 : new_eh->next_peer = cfun->eh->region_tree;
381 1520551 : cfun->eh->region_tree = new_eh;
382 : }
383 :
384 2702183 : new_eh->index = vec_safe_length (cfun->eh->region_array);
385 2702183 : vec_safe_push (cfun->eh->region_array, new_eh);
386 :
387 : /* Copy the language's notion of whether to use __cxa_end_cleanup. */
388 2702183 : if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
389 0 : new_eh->use_cxa_end_cleanup = true;
390 :
391 2702183 : return new_eh;
392 : }
393 :
394 : eh_region
395 1195873 : gen_eh_region_cleanup (eh_region outer)
396 : {
397 1195873 : return gen_eh_region (ERT_CLEANUP, outer);
398 : }
399 :
400 : eh_region
401 45707 : gen_eh_region_try (eh_region outer)
402 : {
403 45707 : return gen_eh_region (ERT_TRY, outer);
404 : }
405 :
406 : eh_catch
407 53208 : gen_eh_region_catch (eh_region t, tree type_or_list)
408 : {
409 53208 : eh_catch c, l;
410 53208 : tree type_list, type_node;
411 :
412 53208 : gcc_assert (t->type == ERT_TRY);
413 :
414 : /* Ensure to always end up with a type list to normalize further
415 : processing, then register each type against the runtime types map. */
416 53208 : type_list = type_or_list;
417 53208 : if (type_or_list)
418 : {
419 6665 : if (TREE_CODE (type_or_list) != TREE_LIST)
420 5810 : type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
421 :
422 6665 : type_node = type_list;
423 13330 : for (; type_node; type_node = TREE_CHAIN (type_node))
424 6665 : add_type_for_runtime (TREE_VALUE (type_node));
425 : }
426 :
427 53208 : c = ggc_cleared_alloc<eh_catch_d> ();
428 53208 : c->type_list = type_list;
429 53208 : l = t->u.eh_try.last_catch;
430 53208 : c->prev_catch = l;
431 53208 : if (l)
432 3167 : l->next_catch = c;
433 : else
434 50041 : t->u.eh_try.first_catch = c;
435 53208 : t->u.eh_try.last_catch = c;
436 :
437 53208 : return c;
438 : }
439 :
440 : eh_region
441 5332 : gen_eh_region_allowed (eh_region outer, tree allowed)
442 : {
443 5332 : eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
444 5332 : region->u.allowed.type_list = allowed;
445 :
446 5499 : for (; allowed ; allowed = TREE_CHAIN (allowed))
447 167 : add_type_for_runtime (TREE_VALUE (allowed));
448 :
449 5332 : return region;
450 : }
451 :
452 : eh_region
453 1232209 : gen_eh_region_must_not_throw (eh_region outer)
454 : {
455 1232209 : return gen_eh_region (ERT_MUST_NOT_THROW, outer);
456 : }
457 :
458 : eh_landing_pad
459 1726990 : gen_eh_landing_pad (eh_region region)
460 : {
461 1726990 : eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
462 :
463 1726990 : lp->next_lp = region->landing_pads;
464 1726990 : lp->region = region;
465 1726990 : lp->index = vec_safe_length (cfun->eh->lp_array);
466 1726990 : region->landing_pads = lp;
467 :
468 1726990 : vec_safe_push (cfun->eh->lp_array, lp);
469 :
470 1726990 : return lp;
471 : }
472 :
473 : eh_region
474 2226788 : get_eh_region_from_number_fn (struct function *ifun, int i)
475 : {
476 2226788 : return (*ifun->eh->region_array)[i];
477 : }
478 :
479 : eh_region
480 2141794 : get_eh_region_from_number (int i)
481 : {
482 2141794 : return get_eh_region_from_number_fn (cfun, i);
483 : }
484 :
485 : eh_landing_pad
486 268877398 : get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
487 : {
488 268877398 : return (*ifun->eh->lp_array)[i];
489 : }
490 :
491 : eh_landing_pad
492 268877398 : get_eh_landing_pad_from_number (int i)
493 : {
494 268877398 : return get_eh_landing_pad_from_number_fn (cfun, i);
495 : }
496 :
497 : eh_region
498 11270299 : get_eh_region_from_lp_number_fn (struct function *ifun, int i)
499 : {
500 11270299 : if (i < 0)
501 56692 : return (*ifun->eh->region_array)[-i];
502 11213607 : else if (i == 0)
503 : return NULL;
504 : else
505 : {
506 4786677 : eh_landing_pad lp;
507 4786677 : lp = (*ifun->eh->lp_array)[i];
508 4786677 : return lp->region;
509 : }
510 : }
511 :
512 : eh_region
513 4674951 : get_eh_region_from_lp_number (int i)
514 : {
515 4674951 : return get_eh_region_from_lp_number_fn (cfun, i);
516 : }
517 :
518 : /* Returns true if the current function has exception handling regions. */
519 :
520 : bool
521 184 : current_function_has_exception_handlers (void)
522 : {
523 184 : return cfun->eh->region_tree != NULL;
524 : }
525 :
526 : /* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
527 : Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
528 :
529 : struct duplicate_eh_regions_data
530 : {
531 : duplicate_eh_regions_map label_map;
532 : void *label_map_data;
533 : hash_map<void *, void *> *eh_map;
534 : };
535 :
536 : static void
537 223062 : duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
538 : eh_region old_r, eh_region outer)
539 : {
540 223062 : eh_landing_pad old_lp, new_lp;
541 223062 : eh_region new_r;
542 :
543 223062 : new_r = gen_eh_region (old_r->type, outer);
544 223062 : bool existed = data->eh_map->put (old_r, new_r);
545 223062 : gcc_assert (!existed);
546 :
547 223062 : switch (old_r->type)
548 : {
549 : case ERT_CLEANUP:
550 : break;
551 :
552 8978 : case ERT_TRY:
553 8978 : {
554 8978 : eh_catch oc, nc;
555 18234 : for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
556 : {
557 : /* We should be doing all our region duplication before and
558 : during inlining, which is before filter lists are created. */
559 9256 : gcc_assert (oc->filter_list == NULL);
560 9256 : nc = gen_eh_region_catch (new_r, oc->type_list);
561 9256 : nc->label = data->label_map (oc->label, data->label_map_data);
562 : }
563 : }
564 : break;
565 :
566 1105 : case ERT_ALLOWED_EXCEPTIONS:
567 1105 : new_r->u.allowed.type_list = old_r->u.allowed.type_list;
568 1105 : if (old_r->u.allowed.label)
569 163 : new_r->u.allowed.label
570 163 : = data->label_map (old_r->u.allowed.label, data->label_map_data);
571 : else
572 942 : new_r->u.allowed.label = NULL_TREE;
573 : break;
574 :
575 144280 : case ERT_MUST_NOT_THROW:
576 288560 : new_r->u.must_not_throw.failure_loc =
577 144280 : LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
578 144280 : new_r->u.must_not_throw.failure_decl =
579 144280 : old_r->u.must_not_throw.failure_decl;
580 144280 : break;
581 : }
582 :
583 302207 : for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
584 : {
585 : /* Don't bother copying unused landing pads. */
586 79145 : if (old_lp->post_landing_pad == NULL)
587 1980 : continue;
588 :
589 77165 : new_lp = gen_eh_landing_pad (new_r);
590 77165 : bool existed = data->eh_map->put (old_lp, new_lp);
591 77165 : gcc_assert (!existed);
592 :
593 77165 : new_lp->post_landing_pad
594 77165 : = data->label_map (old_lp->post_landing_pad, data->label_map_data);
595 77165 : EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
596 : }
597 :
598 : /* Make sure to preserve the original use of __cxa_end_cleanup. */
599 223062 : new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
600 :
601 241794 : for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
602 18732 : duplicate_eh_regions_1 (data, old_r, new_r);
603 223062 : }
604 :
605 : /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
606 : the current function and root the tree below OUTER_REGION.
607 : The special case of COPY_REGION of NULL means all regions.
608 : Remap labels using MAP/MAP_DATA callback. Return a pointer map
609 : that allows the caller to remap uses of both EH regions and
610 : EH landing pads. */
611 :
612 : hash_map<void *, void *> *
613 4550826 : duplicate_eh_regions (struct function *ifun,
614 : eh_region copy_region, int outer_lp,
615 : duplicate_eh_regions_map map, void *map_data)
616 : {
617 4550826 : struct duplicate_eh_regions_data data;
618 4550826 : eh_region outer_region;
619 :
620 4550826 : if (flag_checking)
621 4550814 : verify_eh_tree (ifun);
622 :
623 4550826 : data.label_map = map;
624 4550826 : data.label_map_data = map_data;
625 4550826 : data.eh_map = new hash_map<void *, void *>;
626 :
627 4550826 : outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp);
628 :
629 : /* Copy all the regions in the subtree. */
630 4550826 : if (copy_region)
631 1998 : duplicate_eh_regions_1 (&data, copy_region, outer_region);
632 : else
633 : {
634 4548828 : eh_region r;
635 4751160 : for (r = ifun->eh->region_tree; r ; r = r->next_peer)
636 202332 : duplicate_eh_regions_1 (&data, r, outer_region);
637 : }
638 :
639 4550826 : if (flag_checking)
640 4550814 : verify_eh_tree (cfun);
641 :
642 4550826 : return data.eh_map;
643 : }
644 :
645 : /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
646 :
647 : eh_region
648 249 : eh_region_outermost (struct function *ifun, eh_region region_a,
649 : eh_region region_b)
650 : {
651 249 : gcc_assert (ifun->eh->region_array);
652 249 : gcc_assert (ifun->eh->region_tree);
653 :
654 249 : auto_sbitmap b_outer (ifun->eh->region_array->length ());
655 249 : bitmap_clear (b_outer);
656 :
657 600 : do
658 : {
659 600 : bitmap_set_bit (b_outer, region_b->index);
660 600 : region_b = region_b->outer;
661 : }
662 600 : while (region_b);
663 :
664 489 : do
665 : {
666 489 : if (bitmap_bit_p (b_outer, region_a->index))
667 : break;
668 241 : region_a = region_a->outer;
669 : }
670 241 : while (region_a);
671 :
672 249 : return region_a;
673 249 : }
674 :
675 : void
676 6834 : add_type_for_runtime (tree type)
677 : {
678 : /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
679 6834 : if (TREE_CODE (type) == NOP_EXPR)
680 12 : return;
681 :
682 6822 : bool existed = false;
683 6822 : tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
684 6822 : if (!existed)
685 2962 : *slot = lang_hooks.eh_runtime_type (type);
686 : }
687 :
688 : tree
689 28911 : lookup_type_for_runtime (tree type)
690 : {
691 : /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
692 28911 : if (TREE_CODE (type) == NOP_EXPR)
693 : return type;
694 :
695 : /* We should have always inserted the data earlier. */
696 28911 : return *type_to_runtime_map->get (type);
697 : }
698 :
699 :
700 : /* Represent an entry in @TTypes for either catch actions
701 : or exception filter actions. */
702 : struct ttypes_filter {
703 : tree t;
704 : int filter;
705 : };
706 :
707 : /* Helper for ttypes_filter hashing. */
708 :
709 : struct ttypes_filter_hasher : free_ptr_hash <ttypes_filter>
710 : {
711 : typedef tree_node *compare_type;
712 : static inline hashval_t hash (const ttypes_filter *);
713 : static inline bool equal (const ttypes_filter *, const tree_node *);
714 : };
715 :
716 : /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
717 : (a tree) for a @TTypes type node we are thinking about adding. */
718 :
719 : inline bool
720 22296 : ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
721 : {
722 22296 : return entry->t == data;
723 : }
724 :
725 : inline hashval_t
726 25571 : ttypes_filter_hasher::hash (const ttypes_filter *entry)
727 : {
728 25571 : return TREE_HASH (entry->t);
729 : }
730 :
731 : typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
732 :
733 :
734 : /* Helper for ehspec hashing. */
735 :
736 : struct ehspec_hasher : free_ptr_hash <ttypes_filter>
737 : {
738 : static inline hashval_t hash (const ttypes_filter *);
739 : static inline bool equal (const ttypes_filter *, const ttypes_filter *);
740 : };
741 :
742 : /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
743 : exception specification list we are thinking about adding. */
744 : /* ??? Currently we use the type lists in the order given. Someone
745 : should put these in some canonical order. */
746 :
747 : inline bool
748 522 : ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
749 : {
750 522 : return type_list_equal (entry->t, data->t);
751 : }
752 :
753 : /* Hash function for exception specification lists. */
754 :
755 : inline hashval_t
756 1810 : ehspec_hasher::hash (const ttypes_filter *entry)
757 : {
758 1810 : hashval_t h = 0;
759 1810 : tree list;
760 :
761 1950 : for (list = entry->t; list ; list = TREE_CHAIN (list))
762 140 : h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
763 1810 : return h;
764 : }
765 :
766 : typedef hash_table<ehspec_hasher> ehspec_hash_type;
767 :
768 :
769 : /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
770 : to speed up the search. Return the filter value to be used. */
771 :
772 : static int
773 44569 : add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
774 : {
775 44569 : struct ttypes_filter **slot, *n;
776 :
777 44569 : slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
778 : INSERT);
779 :
780 44569 : if ((n = *slot) == NULL)
781 : {
782 : /* Filter value is a 1 based table index. */
783 :
784 22574 : n = XNEW (struct ttypes_filter);
785 22574 : n->t = type;
786 22574 : n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
787 22574 : *slot = n;
788 :
789 22574 : vec_safe_push (cfun->eh->ttype_data, type);
790 : }
791 :
792 44569 : return n->filter;
793 : }
794 :
795 : /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
796 : to speed up the search. Return the filter value to be used. */
797 :
798 : static int
799 1300 : add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
800 : tree list)
801 : {
802 1300 : struct ttypes_filter **slot, *n;
803 1300 : struct ttypes_filter dummy;
804 :
805 1300 : dummy.t = list;
806 1300 : slot = ehspec_hash->find_slot (&dummy, INSERT);
807 :
808 1300 : if ((n = *slot) == NULL)
809 : {
810 778 : int len;
811 :
812 778 : if (targetm.arm_eabi_unwinder)
813 0 : len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
814 : else
815 1556 : len = vec_safe_length (cfun->eh->ehspec_data.other);
816 :
817 : /* Filter value is a -1 based byte index into a uleb128 buffer. */
818 :
819 778 : n = XNEW (struct ttypes_filter);
820 778 : n->t = list;
821 778 : n->filter = -(len + 1);
822 778 : *slot = n;
823 :
824 : /* Generate a 0 terminated list of filter values. */
825 906 : for (; list ; list = TREE_CHAIN (list))
826 : {
827 128 : if (targetm.arm_eabi_unwinder)
828 0 : vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
829 : else
830 : {
831 : /* Look up each type in the list and encode its filter
832 : value as a uleb128. */
833 256 : push_uleb128 (&cfun->eh->ehspec_data.other,
834 128 : add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
835 : }
836 : }
837 778 : if (targetm.arm_eabi_unwinder)
838 0 : vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
839 : else
840 778 : vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
841 : }
842 :
843 1300 : return n->filter;
844 : }
845 :
846 : /* Generate the action filter values to be used for CATCH and
847 : ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
848 : we use lots of landing pads, and so every type or list can share
849 : the same filter value, which saves table space. */
850 :
851 : void
852 141076 : assign_filter_values (void)
853 : {
854 141076 : int i;
855 141076 : eh_region r;
856 141076 : eh_catch c;
857 :
858 141076 : vec_alloc (cfun->eh->ttype_data, 16);
859 141076 : if (targetm.arm_eabi_unwinder)
860 0 : vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
861 : else
862 141076 : vec_alloc (cfun->eh->ehspec_data.other, 64);
863 :
864 141076 : ehspec_hash_type ehspec (31);
865 141076 : ttypes_hash_type ttypes (31);
866 :
867 1221646 : for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
868 : {
869 1080570 : if (r == NULL)
870 634911 : continue;
871 :
872 445659 : switch (r->type)
873 : {
874 42436 : case ERT_TRY:
875 86877 : for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
876 : {
877 : /* Whatever type_list is (NULL or true list), we build a list
878 : of filters for the region. */
879 44441 : c->filter_list = NULL_TREE;
880 :
881 44441 : if (c->type_list != NULL)
882 : {
883 : /* Get a filter value for each of the types caught and store
884 : them in the region's dedicated list. */
885 : tree tp_node = c->type_list;
886 :
887 11554 : for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
888 : {
889 5777 : int flt
890 5777 : = add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
891 5777 : tree flt_node = build_int_cst (integer_type_node, flt);
892 :
893 5777 : c->filter_list
894 5777 : = tree_cons (NULL_TREE, flt_node, c->filter_list);
895 : }
896 : }
897 : else
898 : {
899 : /* Get a filter value for the NULL list also since it
900 : will need an action record anyway. */
901 38664 : int flt = add_ttypes_entry (&ttypes, NULL);
902 38664 : tree flt_node = build_int_cst (integer_type_node, flt);
903 :
904 38664 : c->filter_list
905 38664 : = tree_cons (NULL_TREE, flt_node, NULL);
906 : }
907 : }
908 : break;
909 :
910 1300 : case ERT_ALLOWED_EXCEPTIONS:
911 1300 : r->u.allowed.filter
912 1300 : = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
913 1300 : break;
914 :
915 : default:
916 : break;
917 : }
918 : }
919 141076 : }
920 :
921 : /* Emit SEQ into basic block just before INSN (that is assumed to be
922 : first instruction of some existing BB and return the newly
923 : produced block. */
924 : static basic_block
925 204366 : emit_to_new_bb_before (rtx_insn *seq, rtx_insn *insn)
926 : {
927 204366 : rtx_insn *next, *last;
928 204366 : basic_block bb;
929 204366 : edge e;
930 204366 : edge_iterator ei;
931 :
932 : /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
933 : call), we don't want it to go into newly created landing pad or other EH
934 : construct. */
935 1046769 : for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
936 842403 : if (e->flags & EDGE_FALLTHRU)
937 53836 : force_nonfallthru (e);
938 : else
939 788567 : ei_next (&ei);
940 :
941 : /* Make sure to put the location of INSN or a subsequent instruction on SEQ
942 : to avoid inheriting the location of the previous instruction. */
943 : next = insn;
944 1005264 : while (next && !NONDEBUG_INSN_P (next))
945 800898 : next = NEXT_INSN (next);
946 204366 : if (next)
947 204358 : last = emit_insn_before_setloc (seq, insn, INSN_LOCATION (next));
948 : else
949 8 : last = emit_insn_before (seq, insn);
950 204366 : if (BARRIER_P (last))
951 0 : last = PREV_INSN (last);
952 204366 : bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
953 204366 : update_bb_for_insn (bb);
954 204366 : bb->flags |= BB_SUPERBLOCK;
955 204366 : return bb;
956 : }
957 :
958 : /* A subroutine of dw2_build_landing_pads, also used for edge splitting
959 : at the rtl level. Emit the code required by the target at a landing
960 : pad for the given region. */
961 :
962 : static void
963 204366 : expand_dw2_landing_pad_for_region (eh_region region)
964 : {
965 204366 : if (targetm.have_exception_receiver ())
966 0 : emit_insn (targetm.gen_exception_receiver ());
967 204366 : else if (targetm.have_nonlocal_goto_receiver ())
968 0 : emit_insn (targetm.gen_nonlocal_goto_receiver ());
969 : else
970 : { /* Nothing */ }
971 :
972 204366 : if (region->exc_ptr_reg)
973 : {
974 176887 : rtx exc_ptr_reg;
975 176887 : if (EH_RETURN_DATA_REGNO (0) != INVALID_REGNUM)
976 176887 : exc_ptr_reg = gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0));
977 : else
978 : /* The target must be doing something special. Submit a dummy. */
979 : exc_ptr_reg = constm1_rtx;
980 176887 : emit_move_insn (region->exc_ptr_reg, exc_ptr_reg);
981 : }
982 204366 : if (region->filter_reg)
983 : {
984 109173 : rtx filter_reg;
985 109173 : if (EH_RETURN_DATA_REGNO (1) != INVALID_REGNUM)
986 109173 : filter_reg = gen_rtx_REG (targetm.eh_return_filter_mode (),
987 : EH_RETURN_DATA_REGNO (1));
988 : else
989 : /* The target must be doing something special. Submit a dummy. */
990 : filter_reg = constm1_rtx;
991 109173 : emit_move_insn (region->filter_reg, filter_reg);
992 : }
993 204366 : }
994 :
995 : /* Expand the extra code needed at landing pads for dwarf2 unwinding. */
996 :
997 : static void
998 62307 : dw2_build_landing_pads (void)
999 : {
1000 62307 : int i;
1001 62307 : eh_landing_pad lp;
1002 62307 : int e_flags = EDGE_FALLTHRU;
1003 :
1004 : /* If we're going to partition blocks, we need to be able to add
1005 : new landing pads later, which means that we need to hold on to
1006 : the post-landing-pad block. Prevent it from being merged away.
1007 : We'll remove this bit after partitioning. */
1008 62307 : if (flag_reorder_blocks_and_partition)
1009 41948 : e_flags |= EDGE_PRESERVE;
1010 :
1011 1271521 : for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1012 : {
1013 1209214 : basic_block bb;
1014 1209214 : rtx_insn *seq;
1015 :
1016 1209214 : if (lp == NULL || lp->post_landing_pad == NULL)
1017 1004848 : continue;
1018 :
1019 204366 : start_sequence ();
1020 :
1021 204366 : lp->landing_pad = gen_label_rtx ();
1022 204366 : emit_label (lp->landing_pad);
1023 204366 : LABEL_PRESERVE_P (lp->landing_pad) = 1;
1024 :
1025 204366 : expand_dw2_landing_pad_for_region (lp->region);
1026 :
1027 204366 : seq = end_sequence ();
1028 :
1029 204366 : bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
1030 204366 : bb->count = bb->next_bb->count;
1031 204366 : make_single_succ_edge (bb, bb->next_bb, e_flags);
1032 204366 : if (current_loops)
1033 : {
1034 204366 : class loop *loop = bb->next_bb->loop_father;
1035 : /* If we created a pre-header block, add the new block to the
1036 : outer loop, otherwise to the loop itself. */
1037 204366 : if (bb->next_bb == loop->header)
1038 8326 : add_bb_to_loop (bb, loop_outer (loop));
1039 : else
1040 196040 : add_bb_to_loop (bb, loop);
1041 : }
1042 : }
1043 62307 : }
1044 :
1045 :
1046 : static vec<int> sjlj_lp_call_site_index;
1047 :
1048 : /* Process all active landing pads. Assign each one a compact dispatch
1049 : index, and a call-site index. */
1050 :
1051 : static int
1052 0 : sjlj_assign_call_site_values (void)
1053 : {
1054 0 : action_hash_type ar_hash (31);
1055 0 : int i, disp_index;
1056 0 : eh_landing_pad lp;
1057 :
1058 0 : vec_alloc (crtl->eh.action_record_data, 64);
1059 :
1060 0 : disp_index = 0;
1061 0 : call_site_base = 1;
1062 0 : for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1063 0 : if (lp && lp->post_landing_pad)
1064 : {
1065 0 : int action, call_site;
1066 :
1067 : /* First: build the action table. */
1068 0 : action = collect_one_action_chain (&ar_hash, lp->region);
1069 :
1070 : /* Next: assign call-site values. If dwarf2 terms, this would be
1071 : the region number assigned by convert_to_eh_region_ranges, but
1072 : handles no-action and must-not-throw differently. */
1073 : /* Map must-not-throw to otherwise unused call-site index 0. */
1074 0 : if (action == -2)
1075 : call_site = 0;
1076 : /* Map no-action to otherwise unused call-site index -1. */
1077 0 : else if (action == -1)
1078 : call_site = -1;
1079 : /* Otherwise, look it up in the table. */
1080 : else
1081 0 : call_site = add_call_site (GEN_INT (disp_index), action, 0);
1082 0 : sjlj_lp_call_site_index[i] = call_site;
1083 :
1084 0 : disp_index++;
1085 : }
1086 :
1087 0 : return disp_index;
1088 0 : }
1089 :
1090 : /* Emit code to record the current call-site index before every
1091 : insn that can throw. */
1092 :
1093 : static void
1094 0 : sjlj_mark_call_sites (void)
1095 : {
1096 0 : int last_call_site = -2;
1097 0 : rtx_insn *insn;
1098 0 : rtx mem;
1099 :
1100 0 : for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1101 : {
1102 0 : eh_landing_pad lp;
1103 0 : eh_region r;
1104 0 : bool nothrow;
1105 0 : int this_call_site;
1106 0 : rtx_insn *before, *p;
1107 :
1108 : /* Reset value tracking at extended basic block boundaries. */
1109 0 : if (LABEL_P (insn))
1110 0 : last_call_site = -2;
1111 :
1112 : /* If the function allocates dynamic stack space, the context must
1113 : be updated after every allocation/deallocation accordingly. */
1114 0 : if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
1115 : {
1116 0 : rtx buf_addr;
1117 :
1118 0 : start_sequence ();
1119 0 : buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0),
1120 0 : sjlj_fc_jbuf_ofs);
1121 0 : expand_builtin_update_setjmp_buf (buf_addr);
1122 0 : p = end_sequence ();
1123 0 : emit_insn_before (p, insn);
1124 : }
1125 :
1126 0 : if (! INSN_P (insn))
1127 0 : continue;
1128 :
1129 0 : nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1130 0 : if (nothrow)
1131 0 : continue;
1132 0 : if (lp)
1133 0 : this_call_site = sjlj_lp_call_site_index[lp->index];
1134 0 : else if (r == NULL)
1135 : {
1136 : /* Calls (and trapping insns) without notes are outside any
1137 : exception handling region in this function. Mark them as
1138 : no action. */
1139 : this_call_site = -1;
1140 : }
1141 : else
1142 : {
1143 0 : gcc_assert (r->type == ERT_MUST_NOT_THROW);
1144 : this_call_site = 0;
1145 : }
1146 :
1147 0 : if (this_call_site != -1)
1148 0 : crtl->uses_eh_lsda = 1;
1149 :
1150 0 : if (this_call_site == last_call_site)
1151 0 : continue;
1152 :
1153 : /* Don't separate a call from it's argument loads. */
1154 0 : before = insn;
1155 0 : if (CALL_P (insn))
1156 0 : before = find_first_parameter_load (insn, NULL);
1157 :
1158 0 : start_sequence ();
1159 0 : mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1160 : sjlj_fc_call_site_ofs);
1161 0 : emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
1162 0 : p = end_sequence ();
1163 :
1164 0 : emit_insn_before (p, before);
1165 0 : last_call_site = this_call_site;
1166 : }
1167 0 : }
1168 :
1169 : /* Construct the SjLj_Function_Context. */
1170 :
1171 : static void
1172 0 : sjlj_emit_function_enter (rtx_code_label *dispatch_label)
1173 : {
1174 0 : rtx_insn *fn_begin, *seq;
1175 0 : rtx fc, mem;
1176 0 : bool fn_begin_outside_block;
1177 0 : rtx personality = get_personality_function (current_function_decl);
1178 :
1179 0 : fc = crtl->eh.sjlj_fc;
1180 :
1181 0 : start_sequence ();
1182 :
1183 : /* We're storing this libcall's address into memory instead of
1184 : calling it directly. Thus, we must call assemble_external_libcall
1185 : here, as we cannot depend on emit_library_call to do it for us. */
1186 0 : assemble_external_libcall (personality);
1187 0 : mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1188 0 : emit_move_insn (mem, personality);
1189 :
1190 0 : mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1191 0 : if (crtl->uses_eh_lsda)
1192 : {
1193 0 : char buf[20];
1194 0 : rtx sym;
1195 :
1196 0 : ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1197 0 : sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1198 0 : SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1199 0 : emit_move_insn (mem, sym);
1200 : }
1201 : else
1202 0 : emit_move_insn (mem, const0_rtx);
1203 :
1204 0 : if (dispatch_label)
1205 : {
1206 0 : rtx addr = plus_constant (Pmode, XEXP (fc, 0), sjlj_fc_jbuf_ofs);
1207 :
1208 : #ifdef DONT_USE_BUILTIN_SETJMP
1209 : addr = copy_addr_to_reg (addr);
1210 : addr = convert_memory_address (ptr_mode, addr);
1211 : tree addr_tree = make_tree (ptr_type_node, addr);
1212 :
1213 : tree call_expr = build_call_expr (setjmp_fn, 1, addr_tree);
1214 : rtx x = expand_call (call_expr, NULL_RTX, false);
1215 :
1216 : emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1217 : TYPE_MODE (integer_type_node), 0,
1218 : dispatch_label,
1219 : profile_probability::unlikely ());
1220 : #else
1221 0 : expand_builtin_setjmp_setup (addr, dispatch_label);
1222 : #endif
1223 : }
1224 :
1225 0 : emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1226 0 : XEXP (fc, 0), Pmode);
1227 :
1228 0 : seq = end_sequence ();
1229 :
1230 : /* ??? Instead of doing this at the beginning of the function,
1231 : do this in a block that is at loop level 0 and dominates all
1232 : can_throw_internal instructions. */
1233 :
1234 0 : fn_begin_outside_block = true;
1235 0 : for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1236 0 : if (NOTE_P (fn_begin))
1237 : {
1238 0 : if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1239 : break;
1240 0 : else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1241 0 : fn_begin_outside_block = false;
1242 : }
1243 : /* assign_params can indirectly call emit_block_move_via_loop, e.g.
1244 : for g++.dg/torture/pr85627.C for 16-bit targets. */
1245 0 : else if (JUMP_P (fn_begin))
1246 0 : fn_begin_outside_block = true;
1247 :
1248 : #ifdef DONT_USE_BUILTIN_SETJMP
1249 : if (dispatch_label)
1250 : {
1251 : /* The sequence contains a branch in the middle so we need to force
1252 : the creation of a new basic block by means of BB_SUPERBLOCK. */
1253 : if (fn_begin_outside_block)
1254 : {
1255 : basic_block bb
1256 : = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1257 : if (JUMP_P (BB_END (bb)))
1258 : emit_insn_before (seq, BB_END (bb));
1259 : else
1260 : emit_insn_after (seq, BB_END (bb));
1261 : }
1262 : else
1263 : emit_insn_after (seq, fn_begin);
1264 :
1265 : single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flags |= BB_SUPERBLOCK;
1266 : return;
1267 : }
1268 : #endif
1269 :
1270 0 : if (fn_begin_outside_block)
1271 0 : insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1272 : else
1273 0 : emit_insn_after (seq, fn_begin);
1274 0 : }
1275 :
1276 : /* Call back from expand_function_end to know where we should put
1277 : the call to unwind_sjlj_unregister_libfunc if needed. */
1278 :
1279 : void
1280 0 : sjlj_emit_function_exit_after (rtx_insn *after)
1281 : {
1282 0 : crtl->eh.sjlj_exit_after = after;
1283 0 : }
1284 :
1285 : static void
1286 0 : sjlj_emit_function_exit (void)
1287 : {
1288 0 : rtx_insn *seq, *insn;
1289 :
1290 0 : start_sequence ();
1291 :
1292 0 : emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1293 0 : XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1294 :
1295 0 : seq = end_sequence ();
1296 :
1297 : /* ??? Really this can be done in any block at loop level 0 that
1298 : post-dominates all can_throw_internal instructions. This is
1299 : the last possible moment. */
1300 :
1301 0 : insn = crtl->eh.sjlj_exit_after;
1302 0 : if (LABEL_P (insn))
1303 0 : insn = NEXT_INSN (insn);
1304 :
1305 0 : emit_insn_after (seq, insn);
1306 0 : }
1307 :
1308 : static void
1309 0 : sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
1310 : {
1311 0 : scalar_int_mode unwind_word_mode = targetm.unwind_word_mode ();
1312 0 : scalar_int_mode filter_mode = targetm.eh_return_filter_mode ();
1313 0 : eh_landing_pad lp;
1314 0 : rtx mem, fc, exc_ptr_reg, filter_reg;
1315 0 : rtx_insn *seq;
1316 0 : basic_block bb;
1317 0 : eh_region r;
1318 0 : int i, disp_index;
1319 0 : vec<tree> dispatch_labels = vNULL;
1320 :
1321 0 : fc = crtl->eh.sjlj_fc;
1322 :
1323 0 : start_sequence ();
1324 :
1325 0 : emit_label (dispatch_label);
1326 :
1327 : #ifndef DONT_USE_BUILTIN_SETJMP
1328 0 : expand_builtin_setjmp_receiver (dispatch_label);
1329 :
1330 : /* The caller of expand_builtin_setjmp_receiver is responsible for
1331 : making sure that the label doesn't vanish. The only other caller
1332 : is the expander for __builtin_setjmp_receiver, which places this
1333 : label on the nonlocal_goto_label list. Since we're modeling these
1334 : CFG edges more exactly, we can use the forced_labels list instead. */
1335 0 : LABEL_PRESERVE_P (dispatch_label) = 1;
1336 0 : vec_safe_push<rtx_insn *> (forced_labels, dispatch_label);
1337 : #endif
1338 :
1339 : /* Load up exc_ptr and filter values from the function context. */
1340 0 : mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1341 0 : if (unwind_word_mode != ptr_mode)
1342 : {
1343 : #ifdef POINTERS_EXTEND_UNSIGNED
1344 0 : mem = convert_memory_address (ptr_mode, mem);
1345 : #else
1346 : mem = convert_to_mode (ptr_mode, mem, 0);
1347 : #endif
1348 : }
1349 0 : exc_ptr_reg = force_reg (ptr_mode, mem);
1350 :
1351 0 : mem = adjust_address (fc, unwind_word_mode,
1352 : sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1353 0 : if (unwind_word_mode != filter_mode)
1354 0 : mem = convert_to_mode (filter_mode, mem, 0);
1355 0 : filter_reg = force_reg (filter_mode, mem);
1356 :
1357 : /* Jump to one of the directly reachable regions. */
1358 :
1359 0 : disp_index = 0;
1360 0 : rtx_code_label *first_reachable_label = NULL;
1361 :
1362 : /* If there's exactly one call site in the function, don't bother
1363 : generating a switch statement. */
1364 0 : if (num_dispatch > 1)
1365 0 : dispatch_labels.create (num_dispatch);
1366 :
1367 0 : for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1368 0 : if (lp && lp->post_landing_pad)
1369 : {
1370 0 : rtx_insn *seq2;
1371 0 : rtx_code_label *label;
1372 :
1373 0 : start_sequence ();
1374 :
1375 0 : lp->landing_pad = dispatch_label;
1376 :
1377 0 : if (num_dispatch > 1)
1378 : {
1379 0 : tree t_label, case_elt, t;
1380 :
1381 0 : t_label = create_artificial_label (UNKNOWN_LOCATION);
1382 0 : t = build_int_cst (integer_type_node, disp_index);
1383 0 : case_elt = build_case_label (t, NULL, t_label);
1384 0 : dispatch_labels.quick_push (case_elt);
1385 0 : label = jump_target_rtx (t_label);
1386 : }
1387 : else
1388 0 : label = gen_label_rtx ();
1389 :
1390 0 : if (disp_index == 0)
1391 0 : first_reachable_label = label;
1392 0 : emit_label (label);
1393 :
1394 0 : r = lp->region;
1395 0 : if (r->exc_ptr_reg)
1396 0 : emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1397 0 : if (r->filter_reg)
1398 0 : emit_move_insn (r->filter_reg, filter_reg);
1399 :
1400 0 : seq2 = end_sequence ();
1401 :
1402 0 : rtx_insn *before = label_rtx (lp->post_landing_pad);
1403 0 : bb = emit_to_new_bb_before (seq2, before);
1404 0 : make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1405 0 : if (current_loops)
1406 : {
1407 0 : class loop *loop = bb->next_bb->loop_father;
1408 : /* If we created a pre-header block, add the new block to the
1409 : outer loop, otherwise to the loop itself. */
1410 0 : if (bb->next_bb == loop->header)
1411 0 : add_bb_to_loop (bb, loop_outer (loop));
1412 : else
1413 0 : add_bb_to_loop (bb, loop);
1414 : /* ??? For multiple dispatches we will end up with edges
1415 : from the loop tree root into this loop, making it a
1416 : multiple-entry loop. Discard all affected loops. */
1417 0 : if (num_dispatch > 1)
1418 : {
1419 0 : for (loop = bb->loop_father;
1420 0 : loop_outer (loop); loop = loop_outer (loop))
1421 0 : mark_loop_for_removal (loop);
1422 : }
1423 : }
1424 :
1425 0 : disp_index++;
1426 : }
1427 0 : gcc_assert (disp_index == num_dispatch);
1428 :
1429 0 : if (num_dispatch > 1)
1430 : {
1431 0 : rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1432 : sjlj_fc_call_site_ofs);
1433 0 : expand_sjlj_dispatch_table (disp, dispatch_labels);
1434 : }
1435 :
1436 0 : seq = end_sequence ();
1437 :
1438 0 : bb = emit_to_new_bb_before (seq, first_reachable_label);
1439 0 : if (num_dispatch == 1)
1440 : {
1441 0 : make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1442 0 : if (current_loops)
1443 : {
1444 0 : class loop *loop = bb->next_bb->loop_father;
1445 : /* If we created a pre-header block, add the new block to the
1446 : outer loop, otherwise to the loop itself. */
1447 0 : if (bb->next_bb == loop->header)
1448 0 : add_bb_to_loop (bb, loop_outer (loop));
1449 : else
1450 0 : add_bb_to_loop (bb, loop);
1451 : }
1452 : }
1453 : else
1454 : {
1455 : /* We are not wiring up edges here, but as the dispatcher call
1456 : is at function begin simply associate the block with the
1457 : outermost (non-)loop. */
1458 0 : if (current_loops)
1459 0 : add_bb_to_loop (bb, current_loops->tree_root);
1460 : }
1461 0 : }
1462 :
1463 : static void
1464 0 : sjlj_build_landing_pads (void)
1465 : {
1466 0 : int num_dispatch;
1467 :
1468 0 : num_dispatch = vec_safe_length (cfun->eh->lp_array);
1469 0 : if (num_dispatch == 0)
1470 : return;
1471 0 : sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch, true);
1472 :
1473 0 : num_dispatch = sjlj_assign_call_site_values ();
1474 0 : if (num_dispatch > 0)
1475 : {
1476 0 : rtx_code_label *dispatch_label = gen_label_rtx ();
1477 0 : int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1478 : TYPE_MODE (sjlj_fc_type_node),
1479 : TYPE_ALIGN (sjlj_fc_type_node));
1480 0 : crtl->eh.sjlj_fc
1481 0 : = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1482 0 : int_size_in_bytes (sjlj_fc_type_node),
1483 : align);
1484 :
1485 0 : sjlj_mark_call_sites ();
1486 0 : sjlj_emit_function_enter (dispatch_label);
1487 0 : sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1488 0 : sjlj_emit_function_exit ();
1489 : }
1490 :
1491 : /* If we do not have any landing pads, we may still need to register a
1492 : personality routine and (empty) LSDA to handle must-not-throw regions. */
1493 0 : else if (function_needs_eh_personality (cfun) != eh_personality_none)
1494 : {
1495 0 : int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1496 : TYPE_MODE (sjlj_fc_type_node),
1497 : TYPE_ALIGN (sjlj_fc_type_node));
1498 0 : crtl->eh.sjlj_fc
1499 0 : = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1500 0 : int_size_in_bytes (sjlj_fc_type_node),
1501 : align);
1502 :
1503 0 : sjlj_mark_call_sites ();
1504 0 : sjlj_emit_function_enter (NULL);
1505 0 : sjlj_emit_function_exit ();
1506 : }
1507 :
1508 0 : sjlj_lp_call_site_index.release ();
1509 : }
1510 :
1511 : /* Update the sjlj function context. This function should be called
1512 : whenever we allocate or deallocate dynamic stack space. */
1513 :
1514 : void
1515 0 : update_sjlj_context (void)
1516 : {
1517 0 : if (!flag_exceptions)
1518 : return;
1519 :
1520 0 : emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
1521 : }
1522 :
1523 : /* After initial rtl generation, call back to finish generating
1524 : exception support code. */
1525 :
1526 : void
1527 62307 : finish_eh_generation (void)
1528 : {
1529 62307 : basic_block bb;
1530 :
1531 : /* Construct the landing pads. */
1532 62307 : if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1533 0 : sjlj_build_landing_pads ();
1534 : else
1535 62307 : dw2_build_landing_pads ();
1536 :
1537 62307 : break_superblocks ();
1538 :
1539 : /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1540 3637497 : FOR_EACH_BB_FN (bb, cfun)
1541 : {
1542 3575190 : eh_landing_pad lp;
1543 3575190 : edge_iterator ei;
1544 3575190 : edge e;
1545 :
1546 3575190 : lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1547 :
1548 7425973 : FOR_EACH_EDGE (e, ei, bb->succs)
1549 4552871 : if (e->flags & EDGE_EH)
1550 : break;
1551 :
1552 : /* We should not have generated any new throwing insns during this
1553 : pass, and we should not have lost any EH edges, so we only need
1554 : to handle two cases here:
1555 : (1) reachable handler and an existing edge to post-landing-pad,
1556 : (2) no reachable handler and no edge. */
1557 3575190 : gcc_assert ((lp != NULL) == (e != NULL));
1558 3575190 : if (lp != NULL)
1559 : {
1560 702088 : gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1561 :
1562 702088 : redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1563 702088 : e->flags |= (CALL_P (BB_END (bb))
1564 702088 : ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1565 : : EDGE_ABNORMAL);
1566 : }
1567 : }
1568 :
1569 62307 : if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1570 : /* Kludge for Alpha (see alpha_gp_save_rtx). */
1571 62307 : || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1572 0 : commit_edge_insertions ();
1573 62307 : }
1574 :
1575 : /* This section handles removing dead code for flow. */
1576 :
1577 : void
1578 1317767 : remove_eh_landing_pad (eh_landing_pad lp)
1579 : {
1580 1317767 : eh_landing_pad *pp;
1581 :
1582 29946628 : for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1583 28628861 : continue;
1584 1317767 : *pp = lp->next_lp;
1585 :
1586 1317767 : if (lp->post_landing_pad)
1587 1311188 : EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1588 1317767 : (*cfun->eh->lp_array)[lp->index] = NULL;
1589 28628861 : }
1590 :
1591 : /* Splice the EH region at PP from the region tree. */
1592 :
1593 : static void
1594 2325803 : remove_eh_handler_splicer (eh_region *pp)
1595 : {
1596 2325803 : eh_region region = *pp;
1597 2325803 : eh_landing_pad lp;
1598 :
1599 2460403 : for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1600 : {
1601 134600 : if (lp->post_landing_pad)
1602 134562 : EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1603 134600 : (*cfun->eh->lp_array)[lp->index] = NULL;
1604 : }
1605 :
1606 2325803 : if (region->inner)
1607 : {
1608 113152 : eh_region p, outer;
1609 113152 : outer = region->outer;
1610 :
1611 113152 : *pp = p = region->inner;
1612 134072 : do
1613 : {
1614 134072 : p->outer = outer;
1615 134072 : pp = &p->next_peer;
1616 134072 : p = *pp;
1617 : }
1618 134072 : while (p);
1619 : }
1620 2325803 : *pp = region->next_peer;
1621 :
1622 2325803 : (*cfun->eh->region_array)[region->index] = NULL;
1623 2325803 : }
1624 :
1625 : /* Splice a single EH region REGION from the region tree.
1626 :
1627 : To unlink REGION, we need to find the pointer to it with a relatively
1628 : expensive search in REGION's outer region. If you are going to
1629 : remove a number of handlers, using remove_unreachable_eh_regions may
1630 : be a better option. */
1631 :
1632 : void
1633 0 : remove_eh_handler (eh_region region)
1634 : {
1635 0 : eh_region *pp, *pp_start, p, outer;
1636 :
1637 0 : outer = region->outer;
1638 0 : if (outer)
1639 0 : pp_start = &outer->inner;
1640 : else
1641 0 : pp_start = &cfun->eh->region_tree;
1642 0 : for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1643 0 : continue;
1644 :
1645 0 : remove_eh_handler_splicer (pp);
1646 0 : }
1647 :
1648 : /* Worker for remove_unreachable_eh_regions.
1649 : PP is a pointer to the region to start a region tree depth-first
1650 : search from. R_REACHABLE is the set of regions that have to be
1651 : preserved. */
1652 :
1653 : static void
1654 5208503 : remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1655 : {
1656 9051842 : while (*pp)
1657 : {
1658 3843339 : eh_region region = *pp;
1659 3843339 : remove_unreachable_eh_regions_worker (®ion->inner, r_reachable);
1660 3843339 : if (!bitmap_bit_p (r_reachable, region->index))
1661 2325803 : remove_eh_handler_splicer (pp);
1662 : else
1663 1517536 : pp = ®ion->next_peer;
1664 : }
1665 5208503 : }
1666 :
1667 : /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1668 : Do this by traversing the EH tree top-down and splice out regions that
1669 : are not marked. By removing regions from the leaves, we avoid costly
1670 : searches in the region tree. */
1671 :
1672 : void
1673 1365164 : remove_unreachable_eh_regions (sbitmap r_reachable)
1674 : {
1675 1365164 : remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1676 1365164 : }
1677 :
1678 : /* Invokes CALLBACK for every exception handler landing pad label.
1679 : Only used by reload hackery; should not be used by new code. */
1680 :
1681 : void
1682 0 : for_each_eh_label (void (*callback) (rtx))
1683 : {
1684 0 : eh_landing_pad lp;
1685 0 : int i;
1686 :
1687 0 : for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1688 : {
1689 0 : if (lp)
1690 : {
1691 0 : rtx_code_label *lab = lp->landing_pad;
1692 0 : if (lab && LABEL_P (lab))
1693 0 : (*callback) (lab);
1694 : }
1695 : }
1696 0 : }
1697 :
1698 : /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1699 : call insn.
1700 :
1701 : At the gimple level, we use LP_NR
1702 : > 0 : The statement transfers to landing pad LP_NR
1703 : = 0 : The statement is outside any EH region
1704 : < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1705 :
1706 : At the rtl level, we use LP_NR
1707 : > 0 : The insn transfers to landing pad LP_NR
1708 : = 0 : The insn cannot throw
1709 : < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1710 : = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1711 : missing note: The insn is outside any EH region.
1712 :
1713 : ??? This difference probably ought to be avoided. We could stand
1714 : to record nothrow for arbitrary gimple statements, and so avoid
1715 : some moderately complex lookups in stmt_could_throw_p. Perhaps
1716 : NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1717 : no-nonlocal-goto property should be recorded elsewhere as a bit
1718 : on the call_insn directly. Perhaps we should make more use of
1719 : attaching the trees to call_insns (reachable via symbol_ref in
1720 : direct call cases) and just pull the data out of the trees. */
1721 :
1722 : void
1723 6711436 : make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
1724 : {
1725 6711436 : rtx value;
1726 6711436 : if (ecf_flags & ECF_NOTHROW)
1727 2350769 : value = const0_rtx;
1728 4360667 : else if (lp_nr != 0)
1729 776415 : value = GEN_INT (lp_nr);
1730 : else
1731 : return;
1732 3127184 : add_reg_note (insn, REG_EH_REGION, value);
1733 : }
1734 :
1735 : /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1736 : nor perform a non-local goto. Replace the region note if it
1737 : already exists. */
1738 :
1739 : void
1740 493783 : make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
1741 : {
1742 493783 : rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1743 493783 : rtx intmin = GEN_INT (INT_MIN);
1744 :
1745 493783 : if (note != 0)
1746 229159 : XEXP (note, 0) = intmin;
1747 : else
1748 264624 : add_reg_note (insn, REG_EH_REGION, intmin);
1749 493783 : }
1750 :
1751 : /* Return true if INSN could throw, assuming no REG_EH_REGION note
1752 : to the contrary. */
1753 :
1754 : bool
1755 3148392003 : insn_could_throw_p (const_rtx insn)
1756 : {
1757 3148392003 : if (!flag_exceptions)
1758 : return false;
1759 2621870410 : if (CALL_P (insn))
1760 : return true;
1761 2249788500 : if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1762 1922168686 : return may_trap_p (PATTERN (insn));
1763 : return false;
1764 : }
1765 :
1766 : /* Copy an REG_EH_REGION note to each insn that might throw beginning
1767 : at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1768 : to look for a note, or the note itself. */
1769 :
1770 : void
1771 2379480 : copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1772 : {
1773 2379480 : rtx_insn *insn;
1774 2379480 : rtx note = note_or_insn;
1775 :
1776 2379480 : if (INSN_P (note_or_insn))
1777 : {
1778 2296569 : note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1779 2296569 : if (note == NULL)
1780 : return;
1781 : }
1782 82911 : else if (is_a <rtx_insn *> (note_or_insn))
1783 : return;
1784 41789 : note = XEXP (note, 0);
1785 :
1786 91147 : for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1787 49358 : if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1788 49358 : && insn_could_throw_p (insn))
1789 12940 : add_reg_note (insn, REG_EH_REGION, note);
1790 : }
1791 :
1792 : /* Likewise, but iterate backward. */
1793 :
1794 : void
1795 2846 : copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
1796 : {
1797 2846 : rtx_insn *insn;
1798 2846 : rtx note = note_or_insn;
1799 :
1800 2846 : if (INSN_P (note_or_insn))
1801 : {
1802 0 : note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1803 0 : if (note == NULL)
1804 : return;
1805 : }
1806 2846 : else if (is_a <rtx_insn *> (note_or_insn))
1807 : return;
1808 2846 : note = XEXP (note, 0);
1809 :
1810 8360 : for (insn = last; insn != first; insn = PREV_INSN (insn))
1811 5514 : if (insn_could_throw_p (insn))
1812 5283 : add_reg_note (insn, REG_EH_REGION, note);
1813 : }
1814 :
1815 :
1816 : /* Extract all EH information from INSN. Return true if the insn
1817 : was marked NOTHROW. */
1818 :
1819 : static bool
1820 3384598205 : get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1821 : eh_landing_pad *plp)
1822 : {
1823 3384598205 : eh_landing_pad lp = NULL;
1824 3384598205 : eh_region r = NULL;
1825 3384598205 : bool ret = false;
1826 3384598205 : rtx note;
1827 3384598205 : int lp_nr;
1828 :
1829 3384598205 : if (! INSN_P (insn))
1830 544692 : goto egress;
1831 :
1832 3384053513 : if (NONJUMP_INSN_P (insn)
1833 3384053513 : && GET_CODE (PATTERN (insn)) == SEQUENCE)
1834 0 : insn = XVECEXP (PATTERN (insn), 0, 0);
1835 :
1836 3384053513 : note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1837 3384053513 : if (!note)
1838 : {
1839 3144946846 : ret = !insn_could_throw_p (insn);
1840 3144946846 : goto egress;
1841 : }
1842 :
1843 239106667 : lp_nr = INTVAL (XEXP (note, 0));
1844 239106667 : if (lp_nr == 0 || lp_nr == INT_MIN)
1845 : {
1846 197596556 : ret = true;
1847 197596556 : goto egress;
1848 : }
1849 :
1850 41510111 : if (lp_nr < 0)
1851 4248324 : r = (*cfun->eh->region_array)[-lp_nr];
1852 : else
1853 : {
1854 37261787 : lp = (*cfun->eh->lp_array)[lp_nr];
1855 37261787 : r = lp->region;
1856 : }
1857 :
1858 3384598205 : egress:
1859 3384598205 : *plp = lp;
1860 3384598205 : *pr = r;
1861 3384598205 : return ret;
1862 : }
1863 :
1864 : /* Return the landing pad to which INSN may go, or NULL if it does not
1865 : have a reachable landing pad within this function. */
1866 :
1867 : eh_landing_pad
1868 2183446552 : get_eh_landing_pad_from_rtx (const_rtx insn)
1869 : {
1870 2183446552 : eh_landing_pad lp;
1871 2183446552 : eh_region r;
1872 :
1873 2183446552 : get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1874 2183446552 : return lp;
1875 : }
1876 :
1877 : /* Return the region to which INSN may go, or NULL if it does not
1878 : have a reachable region within this function. */
1879 :
1880 : eh_region
1881 3486263 : get_eh_region_from_rtx (const_rtx insn)
1882 : {
1883 3486263 : eh_landing_pad lp;
1884 3486263 : eh_region r;
1885 :
1886 3486263 : get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1887 3486263 : return r;
1888 : }
1889 :
1890 : /* Return true if INSN throws and is caught by something in this function. */
1891 :
1892 : bool
1893 2145139517 : can_throw_internal (const_rtx insn)
1894 : {
1895 2145139517 : return get_eh_landing_pad_from_rtx (insn) != NULL;
1896 : }
1897 :
1898 : /* Return true if INSN throws and escapes from the current function. */
1899 :
1900 : bool
1901 38714641 : can_throw_external (const_rtx insn)
1902 : {
1903 38714641 : eh_landing_pad lp;
1904 38714641 : eh_region r;
1905 38714641 : bool nothrow;
1906 :
1907 38714641 : if (! INSN_P (insn))
1908 : return false;
1909 :
1910 12137077 : if (NONJUMP_INSN_P (insn)
1911 12137077 : && GET_CODE (PATTERN (insn)) == SEQUENCE)
1912 : {
1913 0 : rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1914 0 : int i, n = seq->len ();
1915 :
1916 0 : for (i = 0; i < n; i++)
1917 0 : if (can_throw_external (seq->element (i)))
1918 : return true;
1919 :
1920 : return false;
1921 : }
1922 :
1923 12137077 : nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1924 :
1925 : /* If we can't throw, we obviously can't throw external. */
1926 12137077 : if (nothrow)
1927 : return false;
1928 :
1929 : /* If we have an internal landing pad, then we're not external. */
1930 625010 : if (lp != NULL)
1931 : return false;
1932 :
1933 : /* If we're not within an EH region, then we are external. */
1934 477171 : if (r == NULL)
1935 : return true;
1936 :
1937 : /* The only thing that ought to be left is MUST_NOT_THROW regions,
1938 : which don't always have landing pads. */
1939 16981 : gcc_assert (r->type == ERT_MUST_NOT_THROW);
1940 : return false;
1941 : }
1942 :
1943 : /* Return true if INSN cannot throw at all. */
1944 :
1945 : bool
1946 1172806502 : insn_nothrow_p (const_rtx insn)
1947 : {
1948 1172806502 : eh_landing_pad lp;
1949 1172806502 : eh_region r;
1950 :
1951 1172806502 : if (! INSN_P (insn))
1952 : return true;
1953 :
1954 1172806502 : if (NONJUMP_INSN_P (insn)
1955 1172806502 : && GET_CODE (PATTERN (insn)) == SEQUENCE)
1956 : {
1957 0 : rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1958 0 : int i, n = seq->len ();
1959 :
1960 0 : for (i = 0; i < n; i++)
1961 0 : if (!insn_nothrow_p (seq->element (i)))
1962 : return false;
1963 :
1964 : return true;
1965 : }
1966 :
1967 1172806502 : return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1968 : }
1969 :
1970 : /* Return true if INSN can perform a non-local goto. */
1971 : /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1972 :
1973 : bool
1974 397663899 : can_nonlocal_goto (const rtx_insn *insn)
1975 : {
1976 397663899 : if (nonlocal_goto_handler_labels && CALL_P (insn))
1977 : {
1978 89209 : rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1979 89209 : if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1980 : return true;
1981 : }
1982 : return false;
1983 : }
1984 :
1985 : /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1986 :
1987 : static unsigned int
1988 1471363 : set_nothrow_function_flags (void)
1989 : {
1990 1471363 : rtx_insn *insn;
1991 :
1992 1471363 : crtl->nothrow = 1;
1993 :
1994 : /* Assume crtl->all_throwers_are_sibcalls until we encounter
1995 : something that can throw an exception. We specifically exempt
1996 : CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1997 : and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1998 : is optimistic. */
1999 :
2000 1471363 : crtl->all_throwers_are_sibcalls = 1;
2001 :
2002 : /* If we don't know that this implementation of the function will
2003 : actually be used, then we must not set TREE_NOTHROW, since
2004 : callers must not assume that this function does not throw. */
2005 1471363 : if (TREE_NOTHROW (current_function_decl))
2006 : return 0;
2007 :
2008 709727 : if (! flag_exceptions)
2009 : return 0;
2010 :
2011 38347999 : for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2012 38289090 : if (can_throw_external (insn))
2013 : {
2014 458332 : crtl->nothrow = 0;
2015 :
2016 458332 : if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2017 : {
2018 437020 : crtl->all_throwers_are_sibcalls = 0;
2019 437020 : return 0;
2020 : }
2021 : }
2022 :
2023 58909 : if (crtl->nothrow
2024 58909 : && (cgraph_node::get (current_function_decl)->get_availability ()
2025 : >= AVAIL_AVAILABLE))
2026 : {
2027 37079 : struct cgraph_node *node = cgraph_node::get (current_function_decl);
2028 37079 : struct cgraph_edge *e;
2029 48994 : for (e = node->callers; e; e = e->next_caller)
2030 11915 : e->can_throw_external = false;
2031 37079 : node->set_nothrow_flag (true);
2032 :
2033 37079 : if (dump_file)
2034 5 : fprintf (dump_file, "Marking function nothrow: %s\n\n",
2035 : current_function_name ());
2036 : }
2037 : return 0;
2038 : }
2039 :
2040 : namespace {
2041 :
2042 : const pass_data pass_data_set_nothrow_function_flags =
2043 : {
2044 : RTL_PASS, /* type */
2045 : "nothrow", /* name */
2046 : OPTGROUP_NONE, /* optinfo_flags */
2047 : TV_NONE, /* tv_id */
2048 : 0, /* properties_required */
2049 : 0, /* properties_provided */
2050 : 0, /* properties_destroyed */
2051 : 0, /* todo_flags_start */
2052 : 0, /* todo_flags_finish */
2053 : };
2054 :
2055 : class pass_set_nothrow_function_flags : public rtl_opt_pass
2056 : {
2057 : public:
2058 285722 : pass_set_nothrow_function_flags (gcc::context *ctxt)
2059 571444 : : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2060 : {}
2061 :
2062 : /* opt_pass methods: */
2063 1471363 : unsigned int execute (function *) final override
2064 : {
2065 1471363 : return set_nothrow_function_flags ();
2066 : }
2067 :
2068 : }; // class pass_set_nothrow_function_flags
2069 :
2070 : } // anon namespace
2071 :
2072 : rtl_opt_pass *
2073 285722 : make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2074 : {
2075 285722 : return new pass_set_nothrow_function_flags (ctxt);
2076 : }
2077 :
2078 :
2079 : /* Various hooks for unwind library. */
2080 :
2081 : /* Expand the EH support builtin functions:
2082 : __builtin_eh_pointer and __builtin_eh_filter. */
2083 :
2084 : static eh_region
2085 284799 : expand_builtin_eh_common (tree region_nr_t)
2086 : {
2087 284799 : HOST_WIDE_INT region_nr;
2088 284799 : eh_region region;
2089 :
2090 284799 : gcc_assert (tree_fits_shwi_p (region_nr_t));
2091 284799 : region_nr = tree_to_shwi (region_nr_t);
2092 :
2093 284799 : region = (*cfun->eh->region_array)[region_nr];
2094 :
2095 : /* ??? We shouldn't have been able to delete a eh region without
2096 : deleting all the code that depended on it. */
2097 284799 : gcc_assert (region != NULL);
2098 :
2099 284799 : return region;
2100 : }
2101 :
2102 : /* Expand to the exc_ptr value from the given eh region. */
2103 :
2104 : rtx
2105 94836 : expand_builtin_eh_pointer (tree exp)
2106 : {
2107 94836 : eh_region region
2108 94836 : = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2109 94836 : if (region->exc_ptr_reg == NULL)
2110 63147 : region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2111 94836 : return region->exc_ptr_reg;
2112 : }
2113 :
2114 : /* Expand to the filter value from the given eh region. */
2115 :
2116 : rtx
2117 5031 : expand_builtin_eh_filter (tree exp)
2118 : {
2119 5031 : eh_region region
2120 5031 : = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2121 5031 : if (region->filter_reg == NULL)
2122 3884 : region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2123 5031 : return region->filter_reg;
2124 : }
2125 :
2126 : /* Copy the exc_ptr and filter values from one landing pad's registers
2127 : to another. This is used to inline the resx statement. */
2128 :
2129 : rtx
2130 92466 : expand_builtin_eh_copy_values (tree exp)
2131 : {
2132 92466 : eh_region dst
2133 92466 : = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2134 92466 : eh_region src
2135 92466 : = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2136 92466 : scalar_int_mode fmode = targetm.eh_return_filter_mode ();
2137 :
2138 92466 : if (dst->exc_ptr_reg == NULL)
2139 68940 : dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2140 92466 : if (src->exc_ptr_reg == NULL)
2141 50016 : src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2142 :
2143 92466 : if (dst->filter_reg == NULL)
2144 68942 : dst->filter_reg = gen_reg_rtx (fmode);
2145 92466 : if (src->filter_reg == NULL)
2146 49381 : src->filter_reg = gen_reg_rtx (fmode);
2147 :
2148 92466 : emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2149 92466 : emit_move_insn (dst->filter_reg, src->filter_reg);
2150 :
2151 92466 : return const0_rtx;
2152 : }
2153 :
2154 : /* Do any necessary initialization to access arbitrary stack frames.
2155 : On the SPARC, this means flushing the register windows. */
2156 :
2157 : void
2158 34 : expand_builtin_unwind_init (void)
2159 : {
2160 : /* Set this so all the registers get saved in our frame; we need to be
2161 : able to copy the saved values for any registers from frames we unwind. */
2162 34 : crtl->saves_all_registers = 1;
2163 :
2164 34 : SETUP_FRAME_ADDRESSES ();
2165 34 : }
2166 :
2167 : /* Map a non-negative number to an eh return data register number; expands
2168 : to -1 if no return data register is associated with the input number.
2169 : At least the inputs 0 and 1 must be mapped; the target may provide more. */
2170 :
2171 : rtx
2172 21 : expand_builtin_eh_return_data_regno (tree exp)
2173 : {
2174 21 : tree which = CALL_EXPR_ARG (exp, 0);
2175 21 : unsigned HOST_WIDE_INT iwhich;
2176 :
2177 21 : if (TREE_CODE (which) != INTEGER_CST)
2178 : {
2179 0 : error ("argument of %<__builtin_eh_return_regno%> must be constant");
2180 0 : return constm1_rtx;
2181 : }
2182 :
2183 21 : if (!tree_fits_uhwi_p (which))
2184 1 : return constm1_rtx;
2185 :
2186 20 : iwhich = tree_to_uhwi (which);
2187 20 : iwhich = EH_RETURN_DATA_REGNO (iwhich);
2188 20 : if (iwhich == INVALID_REGNUM)
2189 0 : return constm1_rtx;
2190 :
2191 : #ifdef DWARF_FRAME_REGNUM
2192 20 : iwhich = DWARF_FRAME_REGNUM (iwhich);
2193 : #else
2194 : iwhich = DEBUGGER_REGNO (iwhich);
2195 : #endif
2196 :
2197 20 : return GEN_INT (iwhich);
2198 : }
2199 :
2200 : /* Given a value extracted from the return address register or stack slot,
2201 : return the actual address encoded in that value. */
2202 :
2203 : rtx
2204 2350 : expand_builtin_extract_return_addr (tree addr_tree)
2205 : {
2206 2666 : rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2207 :
2208 2350 : if (GET_MODE (addr) != Pmode
2209 2350 : && GET_MODE (addr) != VOIDmode)
2210 : {
2211 : #ifdef POINTERS_EXTEND_UNSIGNED
2212 0 : addr = convert_memory_address (Pmode, addr);
2213 : #else
2214 : addr = convert_to_mode (Pmode, addr, 0);
2215 : #endif
2216 : }
2217 :
2218 : /* First mask out any unwanted bits. */
2219 2350 : rtx mask = MASK_RETURN_ADDR;
2220 2350 : if (mask)
2221 : expand_and (Pmode, addr, mask, addr);
2222 :
2223 : /* Then adjust to find the real return address. */
2224 2350 : if (RETURN_ADDR_OFFSET)
2225 : addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2226 :
2227 2350 : return addr;
2228 : }
2229 :
2230 : /* Given an actual address in addr_tree, do any necessary encoding
2231 : and return the value to be stored in the return address register or
2232 : stack slot so the epilogue will return to that address. */
2233 :
2234 : rtx
2235 19 : expand_builtin_frob_return_addr (tree addr_tree)
2236 : {
2237 19 : rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2238 :
2239 19 : addr = convert_memory_address (Pmode, addr);
2240 :
2241 19 : if (RETURN_ADDR_OFFSET)
2242 : {
2243 : addr = force_reg (Pmode, addr);
2244 : addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2245 : }
2246 :
2247 19 : return addr;
2248 : }
2249 :
2250 : /* Set up the epilogue with the magic bits we'll need to return to the
2251 : exception handler. */
2252 :
2253 : void
2254 29 : expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2255 : tree handler_tree)
2256 : {
2257 29 : rtx tmp;
2258 :
2259 : #ifdef EH_RETURN_STACKADJ_RTX
2260 29 : tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2261 : VOIDmode, EXPAND_NORMAL);
2262 29 : tmp = convert_memory_address (Pmode, tmp);
2263 29 : if (!crtl->eh.ehr_stackadj)
2264 29 : crtl->eh.ehr_stackadj = copy_addr_to_reg (tmp);
2265 0 : else if (tmp != crtl->eh.ehr_stackadj)
2266 0 : emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2267 : #endif
2268 :
2269 29 : tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2270 : VOIDmode, EXPAND_NORMAL);
2271 29 : tmp = convert_memory_address (Pmode, tmp);
2272 29 : if (!crtl->eh.ehr_handler)
2273 29 : crtl->eh.ehr_handler = copy_addr_to_reg (tmp);
2274 0 : else if (tmp != crtl->eh.ehr_handler)
2275 0 : emit_move_insn (crtl->eh.ehr_handler, tmp);
2276 :
2277 29 : if (!crtl->eh.ehr_label)
2278 29 : crtl->eh.ehr_label = gen_label_rtx ();
2279 29 : emit_jump (crtl->eh.ehr_label);
2280 29 : }
2281 :
2282 : /* Expand __builtin_eh_return. This exit path from the function loads up
2283 : the eh return data registers, adjusts the stack, and branches to a
2284 : given PC other than the normal return address. */
2285 :
2286 : void
2287 1472140 : expand_eh_return (void)
2288 : {
2289 1472140 : rtx_code_label *around_label;
2290 :
2291 1472140 : if (! crtl->eh.ehr_label)
2292 : return;
2293 :
2294 29 : crtl->calls_eh_return = 1;
2295 :
2296 : #ifdef EH_RETURN_STACKADJ_RTX
2297 37 : emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2298 : #endif
2299 :
2300 : #ifdef EH_RETURN_TAKEN_RTX
2301 : emit_move_insn (EH_RETURN_TAKEN_RTX, const0_rtx);
2302 : #endif
2303 :
2304 29 : around_label = gen_label_rtx ();
2305 29 : emit_jump (around_label);
2306 :
2307 29 : emit_label (crtl->eh.ehr_label);
2308 29 : clobber_return_register ();
2309 :
2310 : #ifdef EH_RETURN_STACKADJ_RTX
2311 37 : emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2312 : #endif
2313 :
2314 : #ifdef EH_RETURN_TAKEN_RTX
2315 : emit_move_insn (EH_RETURN_TAKEN_RTX, const1_rtx);
2316 : #endif
2317 :
2318 29 : if (targetm.have_eh_return ())
2319 29 : emit_insn (targetm.gen_eh_return (crtl->eh.ehr_handler));
2320 : else
2321 : {
2322 0 : if (rtx handler = EH_RETURN_HANDLER_RTX)
2323 : emit_move_insn (handler, crtl->eh.ehr_handler);
2324 : else
2325 0 : error ("%<__builtin_eh_return%> not supported on this target");
2326 : }
2327 :
2328 : #ifdef EH_RETURN_TAKEN_RTX
2329 : rtx_code_label *eh_done_label = gen_label_rtx ();
2330 : emit_jump (eh_done_label);
2331 : #endif
2332 :
2333 29 : emit_label (around_label);
2334 :
2335 : #ifdef EH_RETURN_TAKEN_RTX
2336 : for (rtx tmp : { EH_RETURN_STACKADJ_RTX, EH_RETURN_HANDLER_RTX })
2337 : if (tmp && REG_P (tmp))
2338 : emit_clobber (tmp);
2339 : emit_label (eh_done_label);
2340 : #endif
2341 : }
2342 :
2343 : /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2344 : POINTERS_EXTEND_UNSIGNED and return it. */
2345 :
2346 : rtx
2347 2 : expand_builtin_extend_pointer (tree addr_tree)
2348 : {
2349 2 : rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2350 2 : int extend;
2351 :
2352 : #ifdef POINTERS_EXTEND_UNSIGNED
2353 2 : extend = POINTERS_EXTEND_UNSIGNED;
2354 : #else
2355 : /* The previous EH code did an unsigned extend by default, so we do this also
2356 : for consistency. */
2357 : extend = 1;
2358 : #endif
2359 :
2360 2 : return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2361 : }
2362 :
2363 : static int
2364 341941 : add_action_record (action_hash_type *ar_hash, int filter, int next)
2365 : {
2366 341941 : struct action_record **slot, *new_ar, tmp;
2367 :
2368 341941 : tmp.filter = filter;
2369 341941 : tmp.next = next;
2370 341941 : slot = ar_hash->find_slot (&tmp, INSERT);
2371 :
2372 341941 : if ((new_ar = *slot) == NULL)
2373 : {
2374 25530 : new_ar = XNEW (struct action_record);
2375 25530 : new_ar->offset = crtl->eh.action_record_data->length () + 1;
2376 25530 : new_ar->filter = filter;
2377 25530 : new_ar->next = next;
2378 25530 : *slot = new_ar;
2379 :
2380 : /* The filter value goes in untouched. The link to the next
2381 : record is a "self-relative" byte offset, or zero to indicate
2382 : that there is no next record. So convert the absolute 1 based
2383 : indices we've been carrying around into a displacement. */
2384 :
2385 25530 : push_sleb128 (&crtl->eh.action_record_data, filter);
2386 25530 : if (next)
2387 4909 : next -= crtl->eh.action_record_data->length () + 1;
2388 25530 : push_sleb128 (&crtl->eh.action_record_data, next);
2389 : }
2390 :
2391 341941 : return new_ar->offset;
2392 : }
2393 :
2394 : static int
2395 25411550 : collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
2396 : {
2397 25411550 : int next;
2398 :
2399 : /* If we've reached the top of the region chain, then we have
2400 : no actions, and require no landing pad. */
2401 25411550 : if (region == NULL)
2402 : return -1;
2403 :
2404 25097822 : switch (region->type)
2405 : {
2406 24716088 : case ERT_CLEANUP:
2407 24716088 : {
2408 24716088 : eh_region r;
2409 : /* A cleanup adds a zero filter to the beginning of the chain, but
2410 : there are special cases to look out for. If there are *only*
2411 : cleanups along a path, then it compresses to a zero action.
2412 : Further, if there are multiple cleanups along a path, we only
2413 : need to represent one of them, as that is enough to trigger
2414 : entry to the landing pad at runtime. */
2415 24716088 : next = collect_one_action_chain (ar_hash, region->outer);
2416 24716088 : if (next <= 0)
2417 : return 0;
2418 38307 : for (r = region->outer; r ; r = r->outer)
2419 34113 : if (r->type == ERT_CLEANUP)
2420 : return next;
2421 4194 : return add_action_record (ar_hash, 0, next);
2422 : }
2423 :
2424 329618 : case ERT_TRY:
2425 329618 : {
2426 329618 : eh_catch c;
2427 :
2428 : /* Process the associated catch regions in reverse order.
2429 : If there's a catch-all handler, then we don't need to
2430 : search outer regions. Use a magic -3 value to record
2431 : that we haven't done the outer search. */
2432 329618 : next = -3;
2433 664914 : for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2434 : {
2435 335296 : if (c->type_list == NULL)
2436 : {
2437 : /* Retrieve the filter from the head of the filter list
2438 : where we have stored it (see assign_filter_values). */
2439 323513 : int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2440 323513 : next = add_action_record (ar_hash, filter, 0);
2441 : }
2442 : else
2443 : {
2444 : /* Once the outer search is done, trigger an action record for
2445 : each filter we have. */
2446 11783 : tree flt_node;
2447 :
2448 11783 : if (next == -3)
2449 : {
2450 6105 : next = collect_one_action_chain (ar_hash, region->outer);
2451 :
2452 : /* If there is no next action, terminate the chain. */
2453 6105 : if (next == -1)
2454 : next = 0;
2455 : /* If all outer actions are cleanups or must_not_throw,
2456 : we'll have no action record for it, since we had wanted
2457 : to encode these states in the call-site record directly.
2458 : Add a cleanup action to the chain to catch these. */
2459 2176 : else if (next <= 0)
2460 1925 : next = add_action_record (ar_hash, 0, 0);
2461 : }
2462 :
2463 11783 : flt_node = c->filter_list;
2464 23566 : for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2465 : {
2466 11783 : int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2467 11783 : next = add_action_record (ar_hash, filter, next);
2468 : }
2469 : }
2470 : }
2471 : return next;
2472 : }
2473 :
2474 494 : case ERT_ALLOWED_EXCEPTIONS:
2475 : /* An exception specification adds its filter to the
2476 : beginning of the chain. */
2477 494 : next = collect_one_action_chain (ar_hash, region->outer);
2478 :
2479 : /* If there is no next action, terminate the chain. */
2480 494 : if (next == -1)
2481 : next = 0;
2482 : /* If all outer actions are cleanups or must_not_throw,
2483 : we'll have no action record for it, since we had wanted
2484 : to encode these states in the call-site record directly.
2485 : Add a cleanup action to the chain to catch these. */
2486 41 : else if (next <= 0)
2487 32 : next = add_action_record (ar_hash, 0, 0);
2488 :
2489 494 : return add_action_record (ar_hash, region->u.allowed.filter, next);
2490 :
2491 : case ERT_MUST_NOT_THROW:
2492 : /* A must-not-throw region with no inner handlers or cleanups
2493 : requires no call-site entry. Note that this differs from
2494 : the no handler or cleanup case in that we do require an lsda
2495 : to be generated. Return a magic -2 value to record this. */
2496 : return -2;
2497 : }
2498 :
2499 0 : gcc_unreachable ();
2500 : }
2501 :
2502 : static int
2503 399898 : add_call_site (rtx landing_pad, int action, int section)
2504 : {
2505 399898 : call_site_record record;
2506 :
2507 399898 : record = ggc_alloc<call_site_record_d> ();
2508 399898 : record->landing_pad = landing_pad;
2509 399898 : record->action = action;
2510 :
2511 399898 : vec_safe_push (crtl->eh.call_site_record_v[section], record);
2512 :
2513 399898 : return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
2514 : }
2515 :
2516 : static rtx_note *
2517 399898 : emit_note_eh_region_end (rtx_insn *insn)
2518 : {
2519 0 : return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2520 : }
2521 :
2522 : /* Add NOP after NOTE_INSN_SWITCH_TEXT_SECTIONS when the cold section starts
2523 : with landing pad.
2524 : With landing pad being at offset 0 from the start label of the section
2525 : we would miss EH delivery because 0 is special and means no landing pad. */
2526 :
2527 : static bool
2528 62284 : maybe_add_nop_after_section_switch (void)
2529 : {
2530 62284 : if (!crtl->uses_eh_lsda
2531 62229 : || !crtl->eh.call_site_record_v[1])
2532 : return false;
2533 13755 : int n = vec_safe_length (crtl->eh.call_site_record_v[1]);
2534 13755 : hash_set<rtx_insn *> visited;
2535 :
2536 52011 : for (int i = 0; i < n; ++i)
2537 : {
2538 38256 : struct call_site_record_d *cs
2539 38256 : = (*crtl->eh.call_site_record_v[1])[i];
2540 38256 : if (cs->landing_pad)
2541 : {
2542 20168 : rtx_insn *insn = as_a <rtx_insn *> (cs->landing_pad);
2543 326046 : while (true)
2544 : {
2545 : /* Landing pads have LABEL_PRESERVE_P flag set. This check make
2546 : sure that we do not walk past landing pad visited earlier
2547 : which would result in possible quadratic behaviour. */
2548 20169 : if (LABEL_P (insn) && LABEL_PRESERVE_P (insn)
2549 193275 : && visited.add (insn))
2550 : break;
2551 :
2552 : /* Conservatively assume that ASM insn may be empty. We have
2553 : now way to tell what they contain. */
2554 171203 : if (active_insn_p (insn)
2555 17973 : && GET_CODE (PATTERN (insn)) != ASM_INPUT
2556 189176 : && GET_CODE (PATTERN (insn)) != ASM_OPERANDS)
2557 : break;
2558 :
2559 : /* If we reached the start of hot section, then NOP will be
2560 : needed. */
2561 153230 : if (GET_CODE (insn) == NOTE
2562 116991 : && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2563 : {
2564 291 : emit_insn_after (gen_nop (), insn);
2565 291 : break;
2566 : }
2567 :
2568 : /* We visit only labels from cold section. We should never hit
2569 : beginning of the insn stream here. */
2570 152939 : insn = PREV_INSN (insn);
2571 152939 : }
2572 : }
2573 : }
2574 13755 : return false;
2575 13755 : }
2576 :
2577 : /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2578 : The new note numbers will not refer to region numbers, but
2579 : instead to call site entries. */
2580 :
2581 : static unsigned int
2582 62284 : convert_to_eh_region_ranges (void)
2583 : {
2584 62284 : rtx insn;
2585 62284 : rtx_insn *iter;
2586 62284 : rtx_note *note;
2587 62284 : action_hash_type ar_hash (31);
2588 62284 : int last_action = -3;
2589 62284 : rtx_insn *last_action_insn = NULL;
2590 62284 : rtx last_landing_pad = NULL_RTX;
2591 62284 : rtx_insn *first_no_action_insn = NULL;
2592 62284 : int call_site = 0;
2593 62284 : int cur_sec = 0;
2594 62284 : rtx_insn *section_switch_note = NULL;
2595 62284 : rtx_insn *first_no_action_insn_before_switch = NULL;
2596 62284 : rtx_insn *last_no_action_insn_before_switch = NULL;
2597 62284 : int saved_call_site_base = call_site_base;
2598 :
2599 62284 : vec_alloc (crtl->eh.action_record_data, 64);
2600 :
2601 56965443 : for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2602 56903159 : if (INSN_P (iter))
2603 : {
2604 12721811 : eh_landing_pad lp;
2605 12721811 : eh_region region;
2606 12721811 : bool nothrow;
2607 12721811 : int this_action;
2608 12721811 : rtx_code_label *this_landing_pad;
2609 :
2610 12721811 : insn = iter;
2611 12721811 : if (NONJUMP_INSN_P (insn)
2612 12721811 : && GET_CODE (PATTERN (insn)) == SEQUENCE)
2613 0 : insn = XVECEXP (PATTERN (insn), 0, 0);
2614 :
2615 12721811 : nothrow = get_eh_region_and_lp_from_rtx (insn, ®ion, &lp);
2616 12721811 : if (nothrow)
2617 11780048 : continue;
2618 941763 : if (region)
2619 688863 : this_action = collect_one_action_chain (&ar_hash, region);
2620 : else
2621 : this_action = -1;
2622 :
2623 : /* Existence of catch handlers, or must-not-throw regions
2624 : implies that an lsda is needed (even if empty). */
2625 688863 : if (this_action != -1)
2626 688863 : crtl->uses_eh_lsda = 1;
2627 :
2628 : /* Delay creation of region notes for no-action regions
2629 : until we're sure that an lsda will be required. */
2630 252900 : else if (last_action == -3)
2631 : {
2632 27822 : first_no_action_insn = iter;
2633 27822 : last_action = -1;
2634 : }
2635 :
2636 941763 : if (this_action >= 0)
2637 638141 : this_landing_pad = lp->landing_pad;
2638 : else
2639 : this_landing_pad = NULL;
2640 :
2641 : /* Differing actions or landing pads implies a change in call-site
2642 : info, which implies some EH_REGION note should be emitted. */
2643 941763 : if (last_action != this_action
2644 941763 : || last_landing_pad != this_landing_pad)
2645 : {
2646 : /* If there is a queued no-action region in the other section
2647 : with hot/cold partitioning, emit it now. */
2648 402118 : if (first_no_action_insn_before_switch)
2649 : {
2650 612 : gcc_assert (this_action != -1
2651 : && last_action == (first_no_action_insn
2652 : ? -1 : -3));
2653 365 : call_site = add_call_site (NULL_RTX, 0, 0);
2654 365 : note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2655 : first_no_action_insn_before_switch);
2656 365 : NOTE_EH_HANDLER (note) = call_site;
2657 365 : note
2658 365 : = emit_note_eh_region_end (last_no_action_insn_before_switch);
2659 365 : NOTE_EH_HANDLER (note) = call_site;
2660 365 : gcc_assert (last_action != -3
2661 : || (last_action_insn
2662 : == last_no_action_insn_before_switch));
2663 365 : first_no_action_insn_before_switch = NULL;
2664 365 : last_no_action_insn_before_switch = NULL;
2665 365 : call_site_base++;
2666 : }
2667 : /* If we'd not seen a previous action (-3) or the previous
2668 : action was must-not-throw (-2), then we do not need an
2669 : end note. */
2670 402118 : if (last_action >= -1)
2671 : {
2672 : /* If we delayed the creation of the begin, do it now. */
2673 347087 : if (first_no_action_insn)
2674 : {
2675 27421 : call_site = add_call_site (NULL_RTX, 0, cur_sec);
2676 27421 : note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2677 : first_no_action_insn);
2678 27421 : NOTE_EH_HANDLER (note) = call_site;
2679 27421 : first_no_action_insn = NULL;
2680 : }
2681 :
2682 347087 : note = emit_note_eh_region_end (last_action_insn);
2683 347087 : NOTE_EH_HANDLER (note) = call_site;
2684 : }
2685 :
2686 : /* If the new action is must-not-throw, then no region notes
2687 : are created. */
2688 402118 : if (this_action >= -1)
2689 : {
2690 372112 : call_site = add_call_site (this_landing_pad,
2691 : this_action < 0 ? 0 : this_action,
2692 : cur_sec);
2693 372112 : note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2694 372112 : NOTE_EH_HANDLER (note) = call_site;
2695 : }
2696 :
2697 : last_action = this_action;
2698 : last_landing_pad = this_landing_pad;
2699 : }
2700 941763 : last_action_insn = iter;
2701 : }
2702 44181348 : else if (NOTE_P (iter)
2703 41834150 : && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2704 : {
2705 13767 : gcc_assert (section_switch_note == NULL_RTX);
2706 13767 : gcc_assert (flag_reorder_blocks_and_partition);
2707 13767 : section_switch_note = iter;
2708 13767 : if (first_no_action_insn)
2709 : {
2710 365 : first_no_action_insn_before_switch = first_no_action_insn;
2711 365 : last_no_action_insn_before_switch = last_action_insn;
2712 365 : first_no_action_insn = NULL;
2713 365 : gcc_assert (last_action == -1);
2714 : last_action = -3;
2715 : }
2716 : /* Force closing of current EH region before section switch and
2717 : opening a new one afterwards. */
2718 13402 : else if (last_action != -3)
2719 12645 : last_landing_pad = pc_rtx;
2720 13767 : if (crtl->eh.call_site_record_v[cur_sec])
2721 12530 : call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
2722 13767 : cur_sec++;
2723 13767 : gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
2724 13767 : vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
2725 : }
2726 :
2727 62284 : if (last_action >= -1 && ! first_no_action_insn)
2728 : {
2729 52446 : note = emit_note_eh_region_end (last_action_insn);
2730 52446 : NOTE_EH_HANDLER (note) = call_site;
2731 : }
2732 :
2733 62284 : call_site_base = saved_call_site_base;
2734 :
2735 124568 : return 0;
2736 62284 : }
2737 :
2738 : namespace {
2739 :
2740 : const pass_data pass_data_convert_to_eh_region_ranges =
2741 : {
2742 : RTL_PASS, /* type */
2743 : "eh_ranges", /* name */
2744 : OPTGROUP_NONE, /* optinfo_flags */
2745 : TV_NONE, /* tv_id */
2746 : 0, /* properties_required */
2747 : 0, /* properties_provided */
2748 : 0, /* properties_destroyed */
2749 : 0, /* todo_flags_start */
2750 : 0, /* todo_flags_finish */
2751 : };
2752 :
2753 : class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2754 : {
2755 : public:
2756 285722 : pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2757 571444 : : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2758 : {}
2759 :
2760 : /* opt_pass methods: */
2761 : bool gate (function *) final override;
2762 62284 : unsigned int execute (function *) final override
2763 : {
2764 62284 : int ret = convert_to_eh_region_ranges ();
2765 62284 : maybe_add_nop_after_section_switch ();
2766 62284 : return ret;
2767 : }
2768 :
2769 : }; // class pass_convert_to_eh_region_ranges
2770 :
2771 : bool
2772 1471370 : pass_convert_to_eh_region_ranges::gate (function *)
2773 : {
2774 : /* Nothing to do for SJLJ exceptions or if no regions created. */
2775 1471370 : if (cfun->eh->region_tree == NULL)
2776 : return false;
2777 62284 : if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2778 : return false;
2779 : return true;
2780 : }
2781 :
2782 : } // anon namespace
2783 :
2784 : rtl_opt_pass *
2785 285722 : make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2786 : {
2787 285722 : return new pass_convert_to_eh_region_ranges (ctxt);
2788 : }
2789 :
2790 : static void
2791 128 : push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2792 : {
2793 128 : do
2794 : {
2795 128 : unsigned char byte = value & 0x7f;
2796 128 : value >>= 7;
2797 128 : if (value)
2798 0 : byte |= 0x80;
2799 128 : vec_safe_push (*data_area, byte);
2800 : }
2801 128 : while (value);
2802 128 : }
2803 :
2804 : static void
2805 51060 : push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2806 : {
2807 51060 : unsigned char byte;
2808 51060 : int more;
2809 :
2810 51060 : do
2811 : {
2812 51060 : byte = value & 0x7f;
2813 51060 : value >>= 7;
2814 51060 : more = ! ((value == 0 && (byte & 0x40) == 0)
2815 5145 : || (value == -1 && (byte & 0x40) != 0));
2816 : if (more)
2817 0 : byte |= 0x80;
2818 51060 : vec_safe_push (*data_area, byte);
2819 : }
2820 51060 : while (more);
2821 51060 : }
2822 :
2823 :
2824 : static int
2825 0 : dw2_size_of_call_site_table (int section)
2826 : {
2827 0 : int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2828 0 : int size = n * (4 + 4 + 4);
2829 0 : int i;
2830 :
2831 0 : for (i = 0; i < n; ++i)
2832 : {
2833 0 : struct call_site_record_d *cs =
2834 0 : (*crtl->eh.call_site_record_v[section])[i];
2835 0 : size += size_of_uleb128 (cs->action);
2836 : }
2837 :
2838 0 : return size;
2839 : }
2840 :
2841 : static int
2842 0 : sjlj_size_of_call_site_table (void)
2843 : {
2844 0 : int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2845 0 : int size = 0;
2846 0 : int i;
2847 :
2848 0 : for (i = 0; i < n; ++i)
2849 : {
2850 0 : struct call_site_record_d *cs =
2851 0 : (*crtl->eh.call_site_record_v[0])[i];
2852 0 : size += size_of_uleb128 (INTVAL (cs->landing_pad));
2853 0 : size += size_of_uleb128 (cs->action);
2854 : }
2855 :
2856 0 : return size;
2857 : }
2858 :
2859 : static void
2860 75984 : dw2_output_call_site_table (int cs_format, int section)
2861 : {
2862 75984 : int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2863 75984 : int i;
2864 75984 : const char *begin;
2865 :
2866 75984 : if (section == 0)
2867 62229 : begin = current_function_func_begin_label;
2868 13755 : else if (first_function_block_is_cold)
2869 0 : begin = crtl->subsections.hot_section_label;
2870 : else
2871 13755 : begin = crtl->subsections.cold_section_label;
2872 :
2873 475882 : for (i = 0; i < n; ++i)
2874 : {
2875 399898 : struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
2876 399898 : char reg_start_lab[32];
2877 399898 : char reg_end_lab[32];
2878 399898 : char landing_pad_lab[32];
2879 :
2880 399898 : ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2881 399898 : ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2882 :
2883 399898 : if (cs->landing_pad)
2884 261958 : ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2885 : CODE_LABEL_NUMBER (cs->landing_pad));
2886 :
2887 : /* ??? Perhaps use insn length scaling if the assembler supports
2888 : generic arithmetic. */
2889 : /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2890 : data4 if the function is small enough. */
2891 399898 : if (cs_format == DW_EH_PE_uleb128)
2892 : {
2893 399898 : dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2894 : "region %d start", i);
2895 399898 : dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2896 : "length");
2897 399898 : if (cs->landing_pad)
2898 261958 : dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2899 : "landing pad");
2900 : else
2901 137940 : dw2_asm_output_data_uleb128 (0, "landing pad");
2902 : }
2903 : else
2904 : {
2905 0 : dw2_asm_output_delta (4, reg_start_lab, begin,
2906 : "region %d start", i);
2907 0 : dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2908 0 : if (cs->landing_pad)
2909 0 : dw2_asm_output_delta (4, landing_pad_lab, begin,
2910 : "landing pad");
2911 : else
2912 0 : dw2_asm_output_data (4, 0, "landing pad");
2913 : }
2914 399898 : dw2_asm_output_data_uleb128 (cs->action, "action");
2915 : }
2916 :
2917 75984 : call_site_base += n;
2918 75984 : }
2919 :
2920 : static void
2921 0 : sjlj_output_call_site_table (void)
2922 : {
2923 0 : int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2924 0 : int i;
2925 :
2926 0 : for (i = 0; i < n; ++i)
2927 : {
2928 0 : struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
2929 :
2930 0 : dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2931 : "region %d landing pad", i);
2932 0 : dw2_asm_output_data_uleb128 (cs->action, "action");
2933 : }
2934 :
2935 0 : call_site_base += n;
2936 0 : }
2937 :
2938 : /* Switch to the section that should be used for exception tables. */
2939 :
2940 : static void
2941 75984 : switch_to_exception_section (const char * ARG_UNUSED (fnname))
2942 : {
2943 75984 : section *s;
2944 :
2945 75984 : if (exception_section
2946 : /* Don't use the cached section for comdat if it will be different. */
2947 : #ifdef HAVE_LD_EH_GC_SECTIONS
2948 91305 : && !(targetm_common.have_named_sections
2949 15321 : && DECL_COMDAT_GROUP (current_function_decl)
2950 : && HAVE_COMDAT_GROUP)
2951 : #endif
2952 : )
2953 14177 : s = exception_section;
2954 : else
2955 : {
2956 61807 : int flags;
2957 :
2958 61807 : if (EH_TABLES_CAN_BE_READ_ONLY)
2959 : {
2960 61807 : int tt_format =
2961 61807 : ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2962 123614 : flags = ((! flag_pic
2963 5080 : || ((tt_format & 0x70) != DW_EH_PE_absptr
2964 5080 : && (tt_format & 0x70) != DW_EH_PE_aligned))
2965 61807 : ? 0 : SECTION_WRITE);
2966 : }
2967 : else
2968 : flags = SECTION_WRITE;
2969 :
2970 : /* Compute the section and cache it into exception_section,
2971 : unless it depends on the function name. */
2972 61807 : if (targetm_common.have_named_sections)
2973 : {
2974 : #ifdef HAVE_LD_EH_GC_SECTIONS
2975 61807 : if (flag_function_sections
2976 61807 : || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
2977 : {
2978 52362 : char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2979 : /* The EH table must match the code section, so only mark
2980 : it linkonce if we have COMDAT groups to tie them together. */
2981 52362 : if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
2982 29160 : flags |= SECTION_LINKONCE;
2983 52362 : sprintf (section_name, ".gcc_except_table.%s", fnname);
2984 52362 : s = get_section (section_name, flags, current_function_decl);
2985 52362 : free (section_name);
2986 : }
2987 : else
2988 : #endif
2989 9445 : exception_section
2990 9445 : = s = get_section (".gcc_except_table", flags, NULL);
2991 : }
2992 : else
2993 0 : exception_section
2994 0 : = s = flags == SECTION_WRITE ? data_section : readonly_data_section;
2995 : }
2996 :
2997 75984 : switch_to_section (s);
2998 75984 : }
2999 :
3000 : /* Output a reference from an exception table to the type_info object TYPE.
3001 : TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3002 : the value. */
3003 :
3004 : static void
3005 26356 : output_ttype (tree type, int tt_format, int tt_format_size)
3006 : {
3007 26356 : rtx value;
3008 26356 : bool is_public = true;
3009 :
3010 26356 : if (type == NULL_TREE)
3011 20752 : value = const0_rtx;
3012 : else
3013 : {
3014 : /* FIXME lto. pass_ipa_free_lang_data changes all types to
3015 : runtime types so TYPE should already be a runtime type
3016 : reference. When pass_ipa_free_lang data is made a default
3017 : pass, we can then remove the call to lookup_type_for_runtime
3018 : below. */
3019 5604 : if (TYPE_P (type))
3020 5308 : type = lookup_type_for_runtime (type);
3021 :
3022 5604 : value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3023 :
3024 : /* Let cgraph know that the rtti decl is used. Not all of the
3025 : paths below go through assemble_integer, which would take
3026 : care of this for us. */
3027 5604 : STRIP_NOPS (type);
3028 5604 : if (TREE_CODE (type) == ADDR_EXPR)
3029 : {
3030 5604 : type = TREE_OPERAND (type, 0);
3031 5604 : if (VAR_P (type))
3032 5604 : is_public = TREE_PUBLIC (type);
3033 : }
3034 : else
3035 0 : gcc_assert (TREE_CODE (type) == INTEGER_CST);
3036 : }
3037 :
3038 : /* Allow the target to override the type table entry format. */
3039 26356 : if (targetm.asm_out.ttype (value))
3040 : return;
3041 :
3042 26356 : if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3043 3061 : assemble_integer (value, tt_format_size,
3044 3061 : tt_format_size * BITS_PER_UNIT, 1);
3045 : else
3046 23295 : dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
3047 : }
3048 :
3049 : /* Output an exception table for the current function according to SECTION.
3050 :
3051 : If the function has been partitioned into hot and cold parts, value 0 for
3052 : SECTION refers to the table associated with the hot part while value 1
3053 : refers to the table associated with the cold part. If the function has
3054 : not been partitioned, value 0 refers to the single exception table. */
3055 :
3056 : static void
3057 75984 : output_one_function_exception_table (int section)
3058 : {
3059 75984 : int tt_format, cs_format, lp_format, i;
3060 75984 : char ttype_label[32];
3061 75984 : char cs_after_size_label[32];
3062 75984 : char cs_end_label[32];
3063 75984 : int call_site_len;
3064 75984 : int have_tt_data;
3065 75984 : int tt_format_size = 0;
3066 :
3067 75984 : have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
3068 127724 : || (targetm.arm_eabi_unwinder
3069 52322 : ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
3070 52322 : : vec_safe_length (cfun->eh->ehspec_data.other)));
3071 :
3072 : /* Indicate the format of the @TType entries. */
3073 23903 : if (! have_tt_data)
3074 : tt_format = DW_EH_PE_omit;
3075 : else
3076 : {
3077 23903 : tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3078 23903 : if (HAVE_AS_LEB128)
3079 44505 : ASM_GENERATE_INTERNAL_LABEL (ttype_label,
3080 : section ? "LLSDATTC" : "LLSDATT",
3081 : current_function_funcdef_no);
3082 :
3083 23903 : tt_format_size = size_of_encoded_value (tt_format);
3084 :
3085 23903 : assemble_align (tt_format_size * BITS_PER_UNIT);
3086 : }
3087 :
3088 75984 : targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
3089 75984 : current_function_funcdef_no);
3090 :
3091 : /* The LSDA header. */
3092 :
3093 : /* Indicate the format of the landing pad start pointer. An omitted
3094 : field implies @LPStart == @Start. */
3095 : /* Currently we always put @LPStart == @Start. This field would
3096 : be most useful in moving the landing pads completely out of
3097 : line to another section, but it could also be used to minimize
3098 : the size of uleb128 landing pad offsets. */
3099 75984 : lp_format = DW_EH_PE_omit;
3100 75984 : dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3101 : eh_data_format_name (lp_format));
3102 :
3103 : /* @LPStart pointer would go here. */
3104 :
3105 75984 : dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3106 : eh_data_format_name (tt_format));
3107 :
3108 75984 : if (!HAVE_AS_LEB128)
3109 : {
3110 : if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3111 : call_site_len = sjlj_size_of_call_site_table ();
3112 : else
3113 : call_site_len = dw2_size_of_call_site_table (section);
3114 : }
3115 :
3116 : /* A pc-relative 4-byte displacement to the @TType data. */
3117 75984 : if (have_tt_data)
3118 : {
3119 23903 : if (HAVE_AS_LEB128)
3120 : {
3121 23903 : char ttype_after_disp_label[32];
3122 44505 : ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
3123 : section ? "LLSDATTDC" : "LLSDATTD",
3124 : current_function_funcdef_no);
3125 23903 : dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3126 : "@TType base offset");
3127 23903 : ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3128 : }
3129 : else
3130 : {
3131 : /* Ug. Alignment queers things. */
3132 : unsigned int before_disp, after_disp, last_disp, disp;
3133 :
3134 : before_disp = 1 + 1;
3135 : after_disp = (1 + size_of_uleb128 (call_site_len)
3136 : + call_site_len
3137 : + vec_safe_length (crtl->eh.action_record_data)
3138 : + (vec_safe_length (cfun->eh->ttype_data)
3139 : * tt_format_size));
3140 :
3141 : disp = after_disp;
3142 : do
3143 : {
3144 : unsigned int disp_size, pad;
3145 :
3146 : last_disp = disp;
3147 : disp_size = size_of_uleb128 (disp);
3148 : pad = before_disp + disp_size + after_disp;
3149 : if (pad % tt_format_size)
3150 : pad = tt_format_size - (pad % tt_format_size);
3151 : else
3152 : pad = 0;
3153 : disp = after_disp + pad;
3154 : }
3155 : while (disp != last_disp);
3156 :
3157 : dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3158 : }
3159 : }
3160 :
3161 : /* Indicate the format of the call-site offsets. */
3162 75984 : if (HAVE_AS_LEB128)
3163 75984 : cs_format = DW_EH_PE_uleb128;
3164 : else
3165 : cs_format = DW_EH_PE_udata4;
3166 :
3167 75984 : dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3168 : eh_data_format_name (cs_format));
3169 :
3170 75984 : if (HAVE_AS_LEB128)
3171 : {
3172 138213 : ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3173 : section ? "LLSDACSBC" : "LLSDACSB",
3174 : current_function_funcdef_no);
3175 138213 : ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3176 : section ? "LLSDACSEC" : "LLSDACSE",
3177 : current_function_funcdef_no);
3178 75984 : dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3179 : "Call-site table length");
3180 75984 : ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3181 75984 : if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3182 0 : sjlj_output_call_site_table ();
3183 : else
3184 75984 : dw2_output_call_site_table (cs_format, section);
3185 75984 : ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3186 : }
3187 : else
3188 : {
3189 : dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3190 : if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3191 : sjlj_output_call_site_table ();
3192 : else
3193 : dw2_output_call_site_table (cs_format, section);
3194 : }
3195 :
3196 : /* ??? Decode and interpret the data for flag_debug_asm. */
3197 75984 : {
3198 75984 : uchar uc;
3199 212576 : FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
3200 97453 : dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3201 : }
3202 :
3203 75984 : if (have_tt_data)
3204 23903 : assemble_align (tt_format_size * BITS_PER_UNIT);
3205 :
3206 75984 : i = vec_safe_length (cfun->eh->ttype_data);
3207 102340 : while (i-- > 0)
3208 : {
3209 26356 : tree type = (*cfun->eh->ttype_data)[i];
3210 26356 : output_ttype (type, tt_format, tt_format_size);
3211 : }
3212 :
3213 75984 : if (HAVE_AS_LEB128 && have_tt_data)
3214 23903 : ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3215 :
3216 : /* ??? Decode and interpret the data for flag_debug_asm. */
3217 75984 : if (targetm.arm_eabi_unwinder)
3218 : {
3219 : tree type;
3220 0 : for (i = 0;
3221 0 : vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
3222 0 : output_ttype (type, tt_format, tt_format_size);
3223 : }
3224 : else
3225 : {
3226 : uchar uc;
3227 516 : for (i = 0;
3228 76500 : vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
3229 646 : dw2_asm_output_data (1, uc,
3230 : i ? NULL : "Exception specification table");
3231 : }
3232 75984 : }
3233 :
3234 : /* Output an exception table for the current function according to SECTION,
3235 : switching back and forth from the function section appropriately.
3236 :
3237 : If the function has been partitioned into hot and cold parts, value 0 for
3238 : SECTION refers to the table associated with the hot part while value 1
3239 : refers to the table associated with the cold part. If the function has
3240 : not been partitioned, value 0 refers to the single exception table. */
3241 :
3242 : void
3243 1535706 : output_function_exception_table (int section)
3244 : {
3245 : /* Not all functions need anything. */
3246 1535706 : if (!crtl->uses_eh_lsda
3247 1535706 : || targetm_common.except_unwind_info (&global_options) == UI_NONE)
3248 1459722 : return;
3249 :
3250 : /* No need to emit any boilerplate stuff for the cold part. */
3251 75984 : if (section == 1 && !crtl->eh.call_site_record_v[1])
3252 : return;
3253 :
3254 75984 : const char *fnname = get_fnname_from_decl (current_function_decl);
3255 75984 : rtx personality = get_personality_function (current_function_decl);
3256 :
3257 75984 : if (personality)
3258 : {
3259 75984 : assemble_external_libcall (personality);
3260 :
3261 75984 : if (targetm.asm_out.emit_except_personality)
3262 0 : targetm.asm_out.emit_except_personality (personality);
3263 : }
3264 :
3265 75984 : switch_to_exception_section (fnname);
3266 :
3267 : /* If the target wants a label to begin the table, emit it here. */
3268 75984 : targetm.asm_out.emit_except_table_label (asm_out_file);
3269 :
3270 : /* Do the real work. */
3271 75984 : output_one_function_exception_table (section);
3272 :
3273 75984 : switch_to_section (current_function_section ());
3274 : }
3275 :
3276 : void
3277 1856895 : set_eh_throw_stmt_table (function *fun, hash_map<gimple *, int> *table)
3278 : {
3279 1856895 : fun->eh->throw_stmt_table = table;
3280 1856895 : }
3281 :
3282 : hash_map<gimple *, int> *
3283 687993442 : get_eh_throw_stmt_table (struct function *fun)
3284 : {
3285 687993442 : return fun->eh->throw_stmt_table;
3286 : }
3287 :
3288 : /* Determine if the function needs an EH personality function. */
3289 :
3290 : enum eh_personality_kind
3291 5316426 : function_needs_eh_personality (struct function *fn)
3292 : {
3293 5316426 : enum eh_personality_kind kind = eh_personality_none;
3294 5316426 : eh_region i;
3295 :
3296 6217577 : FOR_ALL_EH_REGION_FN (i, fn)
3297 : {
3298 1823667 : switch (i->type)
3299 : {
3300 901151 : case ERT_CLEANUP:
3301 : /* Can do with any personality including the generic C one. */
3302 901151 : kind = eh_personality_any;
3303 901151 : break;
3304 :
3305 : case ERT_TRY:
3306 : case ERT_ALLOWED_EXCEPTIONS:
3307 : /* Always needs a EH personality function. The generic C
3308 : personality doesn't handle these even for empty type lists. */
3309 : return eh_personality_lang;
3310 :
3311 : case ERT_MUST_NOT_THROW:
3312 : /* Always needs a EH personality function. The language may specify
3313 : what abort routine that must be used, e.g. std::terminate. */
3314 : return eh_personality_lang;
3315 : }
3316 : }
3317 :
3318 : return kind;
3319 : }
3320 :
3321 : /* Dump EH information to OUT. */
3322 :
3323 : void
3324 671 : dump_eh_tree (FILE * out, struct function *fun)
3325 : {
3326 671 : eh_region i;
3327 671 : int depth = 0;
3328 671 : static const char *const type_name[] = {
3329 : "cleanup", "try", "allowed_exceptions", "must_not_throw"
3330 : };
3331 :
3332 671 : i = fun->eh->region_tree;
3333 671 : if (!i)
3334 : return;
3335 :
3336 49 : fprintf (out, "Eh tree:\n");
3337 102 : while (1)
3338 : {
3339 102 : fprintf (out, " %*s %i %s", depth * 2, "",
3340 102 : i->index, type_name[(int) i->type]);
3341 :
3342 102 : if (i->landing_pads)
3343 : {
3344 37 : eh_landing_pad lp;
3345 :
3346 37 : fprintf (out, " land:");
3347 37 : if (current_ir_type () == IR_GIMPLE)
3348 : {
3349 74 : for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3350 : {
3351 37 : fprintf (out, "{%i,", lp->index);
3352 37 : print_generic_expr (out, lp->post_landing_pad);
3353 37 : fputc ('}', out);
3354 37 : if (lp->next_lp)
3355 0 : fputc (',', out);
3356 : }
3357 : }
3358 : else
3359 : {
3360 0 : for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3361 : {
3362 0 : fprintf (out, "{%i,", lp->index);
3363 0 : if (lp->landing_pad)
3364 0 : fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3365 0 : NOTE_P (lp->landing_pad) ? "(del)" : "");
3366 : else
3367 0 : fprintf (out, "(nil),");
3368 0 : if (lp->post_landing_pad)
3369 : {
3370 0 : rtx_insn *lab = label_rtx (lp->post_landing_pad);
3371 0 : fprintf (out, "%i%s}", INSN_UID (lab),
3372 0 : NOTE_P (lab) ? "(del)" : "");
3373 : }
3374 : else
3375 0 : fprintf (out, "(nil)}");
3376 0 : if (lp->next_lp)
3377 0 : fputc (',', out);
3378 : }
3379 : }
3380 : }
3381 :
3382 102 : switch (i->type)
3383 : {
3384 : case ERT_CLEANUP:
3385 : case ERT_MUST_NOT_THROW:
3386 : break;
3387 :
3388 8 : case ERT_TRY:
3389 8 : {
3390 8 : eh_catch c;
3391 8 : fprintf (out, " catch:");
3392 16 : for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3393 : {
3394 8 : fputc ('{', out);
3395 8 : if (c->label)
3396 : {
3397 0 : fprintf (out, "lab:");
3398 0 : print_generic_expr (out, c->label);
3399 0 : fputc (';', out);
3400 : }
3401 8 : print_generic_expr (out, c->type_list);
3402 8 : fputc ('}', out);
3403 8 : if (c->next_catch)
3404 0 : fputc (',', out);
3405 : }
3406 : }
3407 : break;
3408 :
3409 0 : case ERT_ALLOWED_EXCEPTIONS:
3410 0 : fprintf (out, " filter :%i types:", i->u.allowed.filter);
3411 0 : print_generic_expr (out, i->u.allowed.type_list);
3412 0 : break;
3413 : }
3414 102 : fputc ('\n', out);
3415 :
3416 : /* If there are sub-regions, process them. */
3417 102 : if (i->inner)
3418 30 : i = i->inner, depth++;
3419 : /* If there are peers, process them. */
3420 72 : else if (i->next_peer)
3421 : i = i->next_peer;
3422 : /* Otherwise, step back up the tree to the next peer. */
3423 : else
3424 : {
3425 79 : do
3426 : {
3427 79 : i = i->outer;
3428 79 : depth--;
3429 79 : if (i == NULL)
3430 : return;
3431 : }
3432 30 : while (i->next_peer == NULL);
3433 : i = i->next_peer;
3434 : }
3435 : }
3436 : }
3437 :
3438 : /* Dump the EH tree for FN on stderr. */
3439 :
3440 : DEBUG_FUNCTION void
3441 0 : debug_eh_tree (struct function *fn)
3442 : {
3443 0 : dump_eh_tree (stderr, fn);
3444 0 : }
3445 :
3446 : /* Verify invariants on EH datastructures. */
3447 :
3448 : DEBUG_FUNCTION void
3449 10197670 : verify_eh_tree (struct function *fun)
3450 : {
3451 10197670 : eh_region r, outer;
3452 10197670 : int nvisited_lp, nvisited_r;
3453 10197670 : int count_lp, count_r, depth, i;
3454 10197670 : eh_landing_pad lp;
3455 10197670 : bool err = false;
3456 :
3457 10197670 : if (!fun->eh->region_tree)
3458 10197670 : return;
3459 :
3460 : count_r = 0;
3461 104254370 : for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3462 100932277 : if (r)
3463 : {
3464 65270128 : if (r->index == i)
3465 65270128 : count_r++;
3466 : else
3467 : {
3468 0 : error ("%<region_array%> is corrupted for region %i", r->index);
3469 0 : err = true;
3470 : }
3471 : }
3472 :
3473 : count_lp = 0;
3474 48745516 : for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3475 45423423 : if (lp)
3476 : {
3477 33432525 : if (lp->index == i)
3478 33432525 : count_lp++;
3479 : else
3480 : {
3481 0 : error ("%<lp_array%> is corrupted for lp %i", lp->index);
3482 0 : err = true;
3483 : }
3484 : }
3485 :
3486 3322093 : depth = nvisited_lp = nvisited_r = 0;
3487 3322093 : outer = NULL;
3488 3322093 : r = fun->eh->region_tree;
3489 65270128 : while (1)
3490 : {
3491 65270128 : if ((*fun->eh->region_array)[r->index] != r)
3492 : {
3493 0 : error ("%<region_array%> is corrupted for region %i", r->index);
3494 0 : err = true;
3495 : }
3496 65270128 : if (r->outer != outer)
3497 : {
3498 0 : error ("outer block of region %i is wrong", r->index);
3499 0 : err = true;
3500 : }
3501 65270128 : if (depth < 0)
3502 : {
3503 0 : error ("negative nesting depth of region %i", r->index);
3504 0 : err = true;
3505 : }
3506 65270128 : nvisited_r++;
3507 :
3508 98702653 : for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3509 : {
3510 33432525 : if ((*fun->eh->lp_array)[lp->index] != lp)
3511 : {
3512 0 : error ("%<lp_array%> is corrupted for lp %i", lp->index);
3513 0 : err = true;
3514 : }
3515 33432525 : if (lp->region != r)
3516 : {
3517 0 : error ("region of lp %i is wrong", lp->index);
3518 0 : err = true;
3519 : }
3520 33432525 : nvisited_lp++;
3521 : }
3522 :
3523 65270128 : if (r->inner)
3524 21470480 : outer = r, r = r->inner, depth++;
3525 43799648 : else if (r->next_peer)
3526 : r = r->next_peer;
3527 : else
3528 : {
3529 24792573 : do
3530 : {
3531 24792573 : r = r->outer;
3532 24792573 : if (r == NULL)
3533 3322093 : goto region_done;
3534 21470480 : depth--;
3535 21470480 : outer = r->outer;
3536 : }
3537 21470480 : while (r->next_peer == NULL);
3538 : r = r->next_peer;
3539 : }
3540 : }
3541 3322093 : region_done:
3542 3322093 : if (depth != 0)
3543 : {
3544 0 : error ("tree list ends on depth %i", depth);
3545 0 : err = true;
3546 : }
3547 3322093 : if (count_r != nvisited_r)
3548 : {
3549 0 : error ("%<region_array%> does not match %<region_tree%>");
3550 0 : err = true;
3551 : }
3552 3322093 : if (count_lp != nvisited_lp)
3553 : {
3554 0 : error ("%<lp_array%> does not match %<region_tree%>");
3555 0 : err = true;
3556 : }
3557 :
3558 3322093 : if (err)
3559 : {
3560 0 : dump_eh_tree (stderr, fun);
3561 0 : internal_error ("%qs failed", __func__);
3562 : }
3563 : }
3564 :
3565 : #include "gt-except.h"
|