Branch data Line data Source code
1 : : /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
2 : :
3 : : Copyright (C) 2003-2024 Free Software Foundation, Inc.
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify it under
8 : : the terms of the GNU General Public License as published by the Free
9 : : Software Foundation; either version 3, or (at your option) any later
10 : : version.
11 : :
12 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : : for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : : #include "config.h"
22 : : #include "system.h"
23 : : #include "coretypes.h"
24 : : #include "backend.h"
25 : : #include "tree.h"
26 : : #include "gimple.h"
27 : : #include "tree-pass.h"
28 : : #include "fold-const.h"
29 : : #include "tree-nested.h"
30 : : #include "calls.h"
31 : : #include "gimple-iterator.h"
32 : : #include "gimple-low.h"
33 : : #include "predict.h"
34 : : #include "gimple-predict.h"
35 : : #include "gimple-fold.h"
36 : : #include "cgraph.h"
37 : : #include "tree-ssa.h"
38 : : #include "value-range.h"
39 : : #include "stringpool.h"
40 : : #include "tree-ssanames.h"
41 : : #include "tree-inline.h"
42 : : #include "gimple-walk.h"
43 : : #include "attribs.h"
44 : :
45 : : /* The differences between High GIMPLE and Low GIMPLE are the
46 : : following:
47 : :
48 : : 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
49 : :
50 : : 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
51 : : flow and exception regions are built as an on-the-side region
52 : : hierarchy (See tree-eh.cc:lower_eh_constructs).
53 : :
54 : : 3- Multiple identical return statements are grouped into a single
55 : : return and gotos to the unique return site. */
56 : :
57 : : /* Match a return statement with a label. During lowering, we identify
58 : : identical return statements and replace duplicates with a jump to
59 : : the corresponding label. */
60 : : struct return_statements_t
61 : : {
62 : : tree label;
63 : : greturn *stmt;
64 : : };
65 : : typedef struct return_statements_t return_statements_t;
66 : :
67 : :
68 : : struct lower_data
69 : : {
70 : : /* Block the current statement belongs to. */
71 : : tree block;
72 : :
73 : : /* A vector of label and return statements to be moved to the end
74 : : of the function. */
75 : : vec<return_statements_t> return_statements;
76 : :
77 : : /* True if the current statement cannot fall through. */
78 : : bool cannot_fallthru;
79 : : };
80 : :
81 : : static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
82 : : static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
83 : : static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
84 : : static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
85 : : static void lower_builtin_setjmp (gimple_stmt_iterator *);
86 : : static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
87 : : static void lower_builtin_assume_aligned (gimple_stmt_iterator *);
88 : :
89 : :
90 : : /* Lower the body of current_function_decl from High GIMPLE into Low
91 : : GIMPLE. */
92 : :
93 : : static unsigned int
94 : 2682190 : lower_function_body (void)
95 : : {
96 : 2682190 : struct lower_data data;
97 : 2682190 : gimple_seq body = gimple_body (current_function_decl);
98 : 2682190 : gimple_seq lowered_body;
99 : 2682190 : gimple_stmt_iterator i;
100 : 2682190 : gimple *bind;
101 : 2682190 : gimple *x;
102 : :
103 : : /* The gimplifier should've left a body of exactly one statement,
104 : : namely a GIMPLE_BIND. */
105 : 2682190 : gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
106 : : && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
107 : :
108 : 2682190 : memset (&data, 0, sizeof (data));
109 : 2682190 : data.block = DECL_INITIAL (current_function_decl);
110 : 2682190 : BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
111 : 2682190 : BLOCK_CHAIN (data.block) = NULL_TREE;
112 : 2682190 : TREE_ASM_WRITTEN (data.block) = 1;
113 : 2682190 : data.return_statements.create (8);
114 : :
115 : 2682190 : bind = gimple_seq_first_stmt (body);
116 : 2682190 : lowered_body = NULL;
117 : 2682190 : gimple_seq_add_stmt (&lowered_body, bind);
118 : 2682190 : i = gsi_start (lowered_body);
119 : 2682190 : lower_gimple_bind (&i, &data);
120 : :
121 : 2682190 : i = gsi_last (lowered_body);
122 : :
123 : : /* If we had begin stmt markers from e.g. PCH, but this compilation
124 : : doesn't want them, lower_stmt will have cleaned them up; we can
125 : : now clear the flag that indicates we had them. */
126 : 2682190 : if (!MAY_HAVE_DEBUG_MARKER_STMTS && cfun->debug_nonbind_markers)
127 : : {
128 : : /* This counter needs not be exact, but before lowering it will
129 : : most certainly be. */
130 : 0 : gcc_assert (cfun->debug_marker_count == 0);
131 : 0 : cfun->debug_nonbind_markers = false;
132 : : }
133 : :
134 : : /* If the function falls off the end, we need a null return statement.
135 : : If we've already got one in the return_statements vector, we don't
136 : : need to do anything special. Otherwise build one by hand. */
137 : 2682190 : bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
138 : 2682190 : if (may_fallthru
139 : 2728752 : && (data.return_statements.is_empty ()
140 : 46562 : || (gimple_return_retval (data.return_statements.last().stmt)
141 : : != NULL)))
142 : : {
143 : 1159140 : x = gimple_build_return (NULL);
144 : 1159140 : gimple_set_location (x, cfun->function_end_locus);
145 : 1159140 : gimple_set_block (x, DECL_INITIAL (current_function_decl));
146 : 1159140 : gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
147 : 1159140 : may_fallthru = false;
148 : : }
149 : :
150 : : /* If we lowered any return statements, emit the representative
151 : : at the end of the function. */
152 : 4195619 : while (!data.return_statements.is_empty ())
153 : : {
154 : 1513429 : return_statements_t t = data.return_statements.pop ();
155 : 1513429 : x = gimple_build_label (t.label);
156 : 1513429 : gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
157 : 1513429 : gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
158 : 1513429 : if (may_fallthru)
159 : : {
160 : : /* Remove the line number from the representative return statement.
161 : : It now fills in for the fallthru too. Failure to remove this
162 : : will result in incorrect results for coverage analysis. */
163 : 33947 : gimple_set_location (t.stmt, UNKNOWN_LOCATION);
164 : 33947 : may_fallthru = false;
165 : : }
166 : : }
167 : :
168 : : /* Once the old body has been lowered, replace it with the new
169 : : lowered sequence. */
170 : 2682190 : gimple_set_body (current_function_decl, lowered_body);
171 : :
172 : 2682190 : gcc_assert (data.block == DECL_INITIAL (current_function_decl));
173 : 2682190 : BLOCK_SUBBLOCKS (data.block)
174 : 2682190 : = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
175 : :
176 : 2682190 : clear_block_marks (data.block);
177 : 2682190 : data.return_statements.release ();
178 : 2682190 : return 0;
179 : : }
180 : :
181 : : namespace {
182 : :
183 : : const pass_data pass_data_lower_cf =
184 : : {
185 : : GIMPLE_PASS, /* type */
186 : : "lower", /* name */
187 : : OPTGROUP_NONE, /* optinfo_flags */
188 : : TV_NONE, /* tv_id */
189 : : PROP_gimple_any, /* properties_required */
190 : : PROP_gimple_lcf, /* properties_provided */
191 : : 0, /* properties_destroyed */
192 : : 0, /* todo_flags_start */
193 : : 0, /* todo_flags_finish */
194 : : };
195 : :
196 : : class pass_lower_cf : public gimple_opt_pass
197 : : {
198 : : public:
199 : 280130 : pass_lower_cf (gcc::context *ctxt)
200 : 560260 : : gimple_opt_pass (pass_data_lower_cf, ctxt)
201 : : {}
202 : :
203 : : /* opt_pass methods: */
204 : 2682190 : unsigned int execute (function *) final override
205 : : {
206 : 2682190 : return lower_function_body ();
207 : : }
208 : :
209 : : }; // class pass_lower_cf
210 : :
211 : : } // anon namespace
212 : :
213 : : gimple_opt_pass *
214 : 280130 : make_pass_lower_cf (gcc::context *ctxt)
215 : : {
216 : 280130 : return new pass_lower_cf (ctxt);
217 : : }
218 : :
219 : : /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
220 : : when they are changed -- if this has to be done, the lowering routine must
221 : : do it explicitly. DATA is passed through the recursion. */
222 : :
223 : : static void
224 : 11584887 : lower_sequence (gimple_seq *seq, struct lower_data *data)
225 : : {
226 : 11584887 : gimple_stmt_iterator gsi;
227 : :
228 : 106034083 : for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
229 : 83077895 : lower_stmt (&gsi, data);
230 : 11584887 : }
231 : :
232 : :
233 : : /* Lower the OpenMP directive statement pointed by GSI. DATA is
234 : : passed through the recursion. */
235 : :
236 : : static void
237 : 62395 : lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
238 : : {
239 : 62395 : gimple *stmt;
240 : :
241 : 62395 : stmt = gsi_stmt (*gsi);
242 : :
243 : 62395 : lower_sequence (gimple_omp_body_ptr (stmt), data);
244 : 62395 : gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
245 : 62395 : gimple_omp_set_body (stmt, NULL);
246 : 62395 : gsi_next (gsi);
247 : 62395 : }
248 : :
249 : : /* Create an artificial FUNCTION_DECL for assumption at LOC. */
250 : :
251 : : static tree
252 : 108 : create_assumption_fn (location_t loc)
253 : : {
254 : 108 : tree name = clone_function_name_numbered (current_function_decl, "_assume");
255 : : /* Temporarily, until we determine all the arguments. */
256 : 108 : tree type = build_varargs_function_type_list (boolean_type_node, NULL_TREE);
257 : 108 : tree decl = build_decl (loc, FUNCTION_DECL, name, type);
258 : 108 : TREE_STATIC (decl) = 1;
259 : 108 : TREE_USED (decl) = 1;
260 : 108 : DECL_ARTIFICIAL (decl) = 1;
261 : 108 : DECL_IGNORED_P (decl) = 1;
262 : 108 : DECL_NAMELESS (decl) = 1;
263 : 108 : TREE_PUBLIC (decl) = 0;
264 : 108 : DECL_UNINLINABLE (decl) = 1;
265 : 108 : DECL_EXTERNAL (decl) = 0;
266 : 108 : DECL_CONTEXT (decl) = NULL_TREE;
267 : 108 : DECL_INITIAL (decl) = make_node (BLOCK);
268 : 108 : tree attributes = DECL_ATTRIBUTES (current_function_decl);
269 : 108 : if (lookup_attribute ("noipa", attributes) == NULL)
270 : : {
271 : 103 : attributes = tree_cons (get_identifier ("noipa"), NULL, attributes);
272 : 103 : if (lookup_attribute ("noinline", attributes) == NULL)
273 : 103 : attributes = tree_cons (get_identifier ("noinline"), NULL, attributes);
274 : 103 : if (lookup_attribute ("noclone", attributes) == NULL)
275 : 103 : attributes = tree_cons (get_identifier ("noclone"), NULL, attributes);
276 : 103 : if (lookup_attribute ("no_icf", attributes) == NULL)
277 : 103 : attributes = tree_cons (get_identifier ("no_icf"), NULL, attributes);
278 : : }
279 : 108 : DECL_ATTRIBUTES (decl) = attributes;
280 : 108 : BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
281 : 216 : DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
282 : 108 : = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
283 : 216 : DECL_FUNCTION_SPECIFIC_TARGET (decl)
284 : 108 : = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
285 : 108 : tree t = build_decl (DECL_SOURCE_LOCATION (decl),
286 : : RESULT_DECL, NULL_TREE, boolean_type_node);
287 : 108 : DECL_ARTIFICIAL (t) = 1;
288 : 108 : DECL_IGNORED_P (t) = 1;
289 : 108 : DECL_CONTEXT (t) = decl;
290 : 108 : DECL_RESULT (decl) = t;
291 : 108 : push_struct_function (decl);
292 : 108 : cfun->function_end_locus = loc;
293 : 108 : init_tree_ssa (cfun);
294 : 108 : return decl;
295 : : }
296 : :
297 : 324 : struct lower_assumption_data
298 : : {
299 : : copy_body_data id;
300 : : tree return_false_label;
301 : : tree guard_copy;
302 : : auto_vec<tree> decls;
303 : : };
304 : :
305 : : /* Helper function for lower_assumptions. Find local vars and labels
306 : : in the assumption sequence and remove debug stmts. */
307 : :
308 : : static tree
309 : 664 : find_assumption_locals_r (gimple_stmt_iterator *gsi_p, bool *,
310 : : struct walk_stmt_info *wi)
311 : : {
312 : 664 : lower_assumption_data *data = (lower_assumption_data *) wi->info;
313 : 664 : gimple *stmt = gsi_stmt (*gsi_p);
314 : 664 : tree lhs = gimple_get_lhs (stmt);
315 : 664 : if (lhs && TREE_CODE (lhs) == SSA_NAME)
316 : : {
317 : 0 : gcc_assert (SSA_NAME_VAR (lhs) == NULL_TREE);
318 : 0 : data->id.decl_map->put (lhs, NULL_TREE);
319 : 0 : data->decls.safe_push (lhs);
320 : : }
321 : 664 : switch (gimple_code (stmt))
322 : : {
323 : 114 : case GIMPLE_BIND:
324 : 114 : for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
325 : 251 : var; var = DECL_CHAIN (var))
326 : 137 : if (VAR_P (var)
327 : 137 : && !DECL_EXTERNAL (var)
328 : 274 : && DECL_CONTEXT (var) == data->id.src_fn)
329 : : {
330 : 137 : data->id.decl_map->put (var, var);
331 : 137 : data->decls.safe_push (var);
332 : : }
333 : 114 : break;
334 : 118 : case GIMPLE_LABEL:
335 : 118 : {
336 : 118 : tree label = gimple_label_label (as_a <glabel *> (stmt));
337 : 118 : data->id.decl_map->put (label, label);
338 : 118 : break;
339 : : }
340 : 3 : case GIMPLE_RETURN:
341 : : /* If something in assumption tries to return from parent function,
342 : : if it would be reached in hypothetical evaluation, it would be UB,
343 : : so transform such returns into return false; */
344 : 3 : {
345 : 3 : gimple *g = gimple_build_assign (data->guard_copy, boolean_false_node);
346 : 3 : gsi_insert_before (gsi_p, g, GSI_SAME_STMT);
347 : 3 : gimple_return_set_retval (as_a <greturn *> (stmt), data->guard_copy);
348 : 3 : break;
349 : : }
350 : 0 : case GIMPLE_DEBUG:
351 : : /* As assumptions won't be emitted, debug info stmts in them
352 : : are useless. */
353 : 0 : gsi_remove (gsi_p, true);
354 : 0 : wi->removed_stmt = true;
355 : 0 : break;
356 : : default:
357 : : break;
358 : : }
359 : 664 : return NULL_TREE;
360 : : }
361 : :
362 : : /* Create a new PARM_DECL that is indentical in all respect to DECL except that
363 : : DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
364 : : DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
365 : :
366 : : static tree
367 : 148 : assumption_copy_decl (tree decl, copy_body_data *id)
368 : : {
369 : 148 : tree type = TREE_TYPE (decl);
370 : :
371 : 148 : if (is_global_var (decl))
372 : : return decl;
373 : :
374 : 139 : gcc_assert (VAR_P (decl)
375 : : || TREE_CODE (decl) == PARM_DECL
376 : : || TREE_CODE (decl) == RESULT_DECL);
377 : 139 : if (TREE_THIS_VOLATILE (decl))
378 : 4 : type = build_pointer_type (type);
379 : 139 : tree copy = build_decl (DECL_SOURCE_LOCATION (decl),
380 : 139 : PARM_DECL, DECL_NAME (decl), type);
381 : 139 : if (DECL_PT_UID_SET_P (decl))
382 : 0 : SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
383 : 139 : TREE_THIS_VOLATILE (copy) = 0;
384 : 139 : if (TREE_THIS_VOLATILE (decl))
385 : 4 : TREE_READONLY (copy) = 1;
386 : : else
387 : : {
388 : 135 : TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
389 : 135 : TREE_READONLY (copy) = TREE_READONLY (decl);
390 : 135 : DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
391 : 135 : DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
392 : : }
393 : 139 : DECL_ARG_TYPE (copy) = type;
394 : 139 : ((lower_assumption_data *) id)->decls.safe_push (decl);
395 : 139 : return copy_decl_for_dup_finish (id, decl, copy);
396 : : }
397 : :
398 : : /* Transform gotos out of the assumption into return false. */
399 : :
400 : : static tree
401 : 667 : adjust_assumption_stmt_r (gimple_stmt_iterator *gsi_p, bool *,
402 : : struct walk_stmt_info *wi)
403 : : {
404 : 667 : lower_assumption_data *data = (lower_assumption_data *) wi->info;
405 : 667 : gimple *stmt = gsi_stmt (*gsi_p);
406 : 667 : tree lab = NULL_TREE;
407 : 667 : unsigned int idx = 0;
408 : 667 : if (gimple_code (stmt) == GIMPLE_GOTO)
409 : 25 : lab = gimple_goto_dest (stmt);
410 : 642 : else if (gimple_code (stmt) == GIMPLE_COND)
411 : : {
412 : 59 : repeat:
413 : 118 : if (idx == 0)
414 : 59 : lab = gimple_cond_true_label (as_a <gcond *> (stmt));
415 : : else
416 : 59 : lab = gimple_cond_false_label (as_a <gcond *> (stmt));
417 : : }
418 : 583 : else if (gimple_code (stmt) == GIMPLE_LABEL)
419 : : {
420 : 118 : tree label = gimple_label_label (as_a <glabel *> (stmt));
421 : 118 : DECL_CONTEXT (label) = current_function_decl;
422 : : }
423 : 726 : if (lab)
424 : : {
425 : 143 : if (!data->id.decl_map->get (lab))
426 : : {
427 : 3 : if (!data->return_false_label)
428 : 3 : data->return_false_label
429 : 3 : = create_artificial_label (UNKNOWN_LOCATION);
430 : 3 : if (gimple_code (stmt) == GIMPLE_GOTO)
431 : 3 : gimple_goto_set_dest (as_a <ggoto *> (stmt),
432 : : data->return_false_label);
433 : 0 : else if (idx == 0)
434 : 0 : gimple_cond_set_true_label (as_a <gcond *> (stmt),
435 : : data->return_false_label);
436 : : else
437 : 0 : gimple_cond_set_false_label (as_a <gcond *> (stmt),
438 : : data->return_false_label);
439 : : }
440 : 143 : if (gimple_code (stmt) == GIMPLE_COND && idx == 0)
441 : : {
442 : 59 : idx = 1;
443 : 59 : goto repeat;
444 : : }
445 : : }
446 : 667 : return NULL_TREE;
447 : : }
448 : :
449 : : /* Adjust trees in the assumption body. Called through walk_tree. */
450 : :
451 : : static tree
452 : 1410 : adjust_assumption_stmt_op (tree *tp, int *, void *datap)
453 : : {
454 : 1410 : struct walk_stmt_info *wi = (struct walk_stmt_info *) datap;
455 : 1410 : lower_assumption_data *data = (lower_assumption_data *) wi->info;
456 : 1410 : tree t = *tp;
457 : 1410 : tree *newt;
458 : 1410 : switch (TREE_CODE (t))
459 : : {
460 : 0 : case SSA_NAME:
461 : 0 : newt = data->id.decl_map->get (t);
462 : : /* There shouldn't be SSA_NAMEs other than ones defined in the
463 : : assumption's body. */
464 : 0 : gcc_assert (newt);
465 : 0 : *tp = *newt;
466 : 0 : break;
467 : 261 : case LABEL_DECL:
468 : 261 : newt = data->id.decl_map->get (t);
469 : 261 : if (newt)
470 : 258 : *tp = *newt;
471 : : break;
472 : 667 : case VAR_DECL:
473 : 667 : case PARM_DECL:
474 : 667 : case RESULT_DECL:
475 : 667 : *tp = remap_decl (t, &data->id);
476 : 667 : if (TREE_THIS_VOLATILE (t) && *tp != t)
477 : : {
478 : 4 : *tp = build_simple_mem_ref (*tp);
479 : 4 : TREE_THIS_NOTRAP (*tp) = 1;
480 : : }
481 : : break;
482 : : default:
483 : : break;
484 : : }
485 : 1410 : return NULL_TREE;
486 : : }
487 : :
488 : : /* Lower assumption.
489 : : The gimplifier transformed:
490 : : .ASSUME (cond);
491 : : into:
492 : : [[assume (guard)]]
493 : : {
494 : : guard = cond;
495 : : }
496 : : which we should transform into:
497 : : .ASSUME (&artificial_fn, args...);
498 : : where artificial_fn will look like:
499 : : bool artificial_fn (args...)
500 : : {
501 : : guard = cond;
502 : : return guard;
503 : : }
504 : : with any debug stmts in the block removed and jumps out of
505 : : the block or return stmts replaced with return false; */
506 : :
507 : : static void
508 : 108 : lower_assumption (gimple_stmt_iterator *gsi, struct lower_data *data)
509 : : {
510 : 108 : gimple *stmt = gsi_stmt (*gsi);
511 : 108 : tree guard = gimple_assume_guard (stmt);
512 : 108 : gimple *bind = gimple_assume_body (stmt);
513 : 108 : location_t loc = gimple_location (stmt);
514 : 108 : gcc_assert (gimple_code (bind) == GIMPLE_BIND);
515 : :
516 : 108 : lower_assumption_data lad;
517 : 108 : hash_map<tree, tree> decl_map;
518 : 108 : memset (&lad.id, 0, sizeof (lad.id));
519 : 108 : lad.return_false_label = NULL_TREE;
520 : 108 : lad.id.src_fn = current_function_decl;
521 : 108 : lad.id.dst_fn = create_assumption_fn (loc);
522 : 108 : lad.id.src_cfun = DECL_STRUCT_FUNCTION (lad.id.src_fn);
523 : 108 : lad.id.decl_map = &decl_map;
524 : 108 : lad.id.copy_decl = assumption_copy_decl;
525 : 108 : lad.id.transform_call_graph_edges = CB_CGE_DUPLICATE;
526 : 108 : lad.id.transform_parameter = true;
527 : 108 : lad.id.do_not_unshare = true;
528 : 108 : lad.id.do_not_fold = true;
529 : 108 : cfun->curr_properties = lad.id.src_cfun->curr_properties;
530 : 108 : lad.guard_copy = create_tmp_var (boolean_type_node);
531 : 108 : decl_map.put (lad.guard_copy, lad.guard_copy);
532 : 108 : decl_map.put (guard, lad.guard_copy);
533 : 108 : cfun->assume_function = 1;
534 : :
535 : : /* Find variables, labels and SSA_NAMEs local to the assume GIMPLE_BIND. */
536 : 108 : gimple_stmt_iterator gsi2 = gsi_start (*gimple_assume_body_ptr (stmt));
537 : 108 : struct walk_stmt_info wi;
538 : 108 : memset (&wi, 0, sizeof (wi));
539 : 108 : wi.info = (void *) &lad;
540 : 108 : walk_gimple_stmt (&gsi2, find_assumption_locals_r, NULL, &wi);
541 : 108 : unsigned int sz = lad.decls.length ();
542 : 245 : for (unsigned i = 0; i < sz; ++i)
543 : : {
544 : 137 : tree v = lad.decls[i];
545 : 137 : tree newv;
546 : : /* SSA_NAMEs defined in the assume condition should be replaced
547 : : by new SSA_NAMEs in the artificial function. */
548 : 137 : if (TREE_CODE (v) == SSA_NAME)
549 : : {
550 : 0 : newv = make_ssa_name (remap_type (TREE_TYPE (v), &lad.id));
551 : 0 : decl_map.put (v, newv);
552 : : }
553 : : /* Local vars should have context and type adjusted to the
554 : : new artificial function. */
555 : 137 : else if (VAR_P (v))
556 : : {
557 : 137 : if (is_global_var (v) && !DECL_ASSEMBLER_NAME_SET_P (v))
558 : 3 : DECL_ASSEMBLER_NAME (v);
559 : 137 : TREE_TYPE (v) = remap_type (TREE_TYPE (v), &lad.id);
560 : 137 : DECL_CONTEXT (v) = current_function_decl;
561 : : }
562 : : }
563 : : /* References to other automatic vars should be replaced by
564 : : PARM_DECLs to the artificial function. */
565 : 108 : memset (&wi, 0, sizeof (wi));
566 : 108 : wi.info = (void *) &lad;
567 : 108 : walk_gimple_stmt (&gsi2, adjust_assumption_stmt_r,
568 : : adjust_assumption_stmt_op, &wi);
569 : :
570 : : /* At the start prepend guard = false; */
571 : 108 : gimple_seq body = NULL;
572 : 108 : gimple *g = gimple_build_assign (lad.guard_copy, boolean_false_node);
573 : 108 : gimple_seq_add_stmt (&body, g);
574 : 108 : gimple_seq_add_stmt (&body, bind);
575 : : /* At the end add return guard; */
576 : 108 : greturn *gr = gimple_build_return (lad.guard_copy);
577 : 108 : gimple_seq_add_stmt (&body, gr);
578 : : /* If there were any jumps to labels outside of the condition,
579 : : replace them with a jump to
580 : : return_false_label:
581 : : guard = false;
582 : : return guard; */
583 : 108 : if (lad.return_false_label)
584 : : {
585 : 3 : g = gimple_build_label (lad.return_false_label);
586 : 3 : gimple_seq_add_stmt (&body, g);
587 : 3 : g = gimple_build_assign (lad.guard_copy, boolean_false_node);
588 : 3 : gimple_seq_add_stmt (&body, g);
589 : 3 : gr = gimple_build_return (lad.guard_copy);
590 : 3 : gimple_seq_add_stmt (&body, gr);
591 : : }
592 : 108 : bind = gimple_build_bind (NULL_TREE, body, NULL_TREE);
593 : 108 : body = NULL;
594 : 108 : gimple_seq_add_stmt (&body, bind);
595 : 108 : gimple_set_body (current_function_decl, body);
596 : 108 : pop_cfun ();
597 : :
598 : 108 : tree parms = NULL_TREE;
599 : 108 : tree parmt = void_list_node;
600 : 108 : auto_vec<tree, 8> vargs;
601 : 210 : vargs.safe_grow (1 + (lad.decls.length () - sz), true);
602 : : /* First argument to IFN_ASSUME will be address of the
603 : : artificial function. */
604 : 108 : vargs[0] = build_fold_addr_expr (lad.id.dst_fn);
605 : 355 : for (unsigned i = lad.decls.length (); i > sz; --i)
606 : : {
607 : 139 : tree *v = decl_map.get (lad.decls[i - 1]);
608 : 139 : gcc_assert (v && TREE_CODE (*v) == PARM_DECL);
609 : 139 : DECL_CHAIN (*v) = parms;
610 : 139 : parms = *v;
611 : 139 : parmt = tree_cons (NULL_TREE, TREE_TYPE (*v), parmt);
612 : : /* Remaining arguments will be the variables/parameters
613 : : mentioned in the condition. */
614 : 139 : vargs[i - sz] = lad.decls[i - 1];
615 : 139 : if (TREE_THIS_VOLATILE (lad.decls[i - 1]))
616 : : {
617 : 4 : TREE_ADDRESSABLE (lad.decls[i - 1]) = 1;
618 : 4 : vargs[i - sz] = build_fold_addr_expr (lad.decls[i - 1]);
619 : : }
620 : : /* If they have gimple types, we might need to regimplify
621 : : them to make the IFN_ASSUME call valid. */
622 : 139 : if (is_gimple_reg_type (TREE_TYPE (vargs[i - sz]))
623 : 139 : && !is_gimple_val (vargs[i - sz]))
624 : : {
625 : 6 : tree t = make_ssa_name (TREE_TYPE (vargs[i - sz]));
626 : 6 : g = gimple_build_assign (t, vargs[i - sz]);
627 : 6 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
628 : 6 : vargs[i - sz] = t;
629 : : }
630 : : }
631 : 108 : DECL_ARGUMENTS (lad.id.dst_fn) = parms;
632 : 108 : TREE_TYPE (lad.id.dst_fn) = build_function_type (boolean_type_node, parmt);
633 : :
634 : 108 : cgraph_node::add_new_function (lad.id.dst_fn, false);
635 : :
636 : 245 : for (unsigned i = 0; i < sz; ++i)
637 : : {
638 : 137 : tree v = lad.decls[i];
639 : 137 : if (TREE_CODE (v) == SSA_NAME)
640 : 0 : release_ssa_name (v);
641 : : }
642 : :
643 : 108 : data->cannot_fallthru = false;
644 : : /* Replace GIMPLE_ASSUME statement with IFN_ASSUME call. */
645 : 108 : gcall *call = gimple_build_call_internal_vec (IFN_ASSUME, vargs);
646 : 108 : gimple_set_location (call, loc);
647 : 108 : gsi_replace (gsi, call, true);
648 : 108 : }
649 : :
650 : : /* Lower statement GSI. DATA is passed through the recursion. We try to
651 : : track the fallthruness of statements and get rid of unreachable return
652 : : statements in order to prevent the EH lowering pass from adding useless
653 : : edges that can cause bogus warnings to be issued later; this guess need
654 : : not be 100% accurate, simply be conservative and reset cannot_fallthru
655 : : to false if we don't know. */
656 : :
657 : : static void
658 : 83077895 : lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
659 : : {
660 : 83077895 : gimple *stmt = gsi_stmt (*gsi);
661 : :
662 : 83077895 : gimple_set_block (stmt, data->block);
663 : :
664 : 83077895 : switch (gimple_code (stmt))
665 : : {
666 : 4350979 : case GIMPLE_BIND:
667 : 4350979 : lower_gimple_bind (gsi, data);
668 : : /* Propagate fallthruness. */
669 : 4350979 : return;
670 : :
671 : 8138174 : case GIMPLE_COND:
672 : 8138174 : case GIMPLE_GOTO:
673 : 8138174 : case GIMPLE_SWITCH:
674 : 8138174 : data->cannot_fallthru = true;
675 : 8138174 : gsi_next (gsi);
676 : 8138174 : return;
677 : :
678 : 2059766 : case GIMPLE_RETURN:
679 : 2059766 : if (data->cannot_fallthru)
680 : : {
681 : 331 : gsi_remove (gsi, false);
682 : : /* Propagate fallthruness. */
683 : : }
684 : : else
685 : : {
686 : 2059435 : lower_gimple_return (gsi, data);
687 : 2059435 : data->cannot_fallthru = true;
688 : : }
689 : : return;
690 : :
691 : 2242918 : case GIMPLE_TRY:
692 : 2242918 : if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
693 : 769430 : lower_try_catch (gsi, data);
694 : : else
695 : : {
696 : : /* It must be a GIMPLE_TRY_FINALLY. */
697 : 1473488 : bool cannot_fallthru;
698 : 1473488 : lower_sequence (gimple_try_eval_ptr (stmt), data);
699 : 1473488 : cannot_fallthru = data->cannot_fallthru;
700 : :
701 : : /* The finally clause is always executed after the try clause,
702 : : so if it does not fall through, then the try-finally will not
703 : : fall through. Otherwise, if the try clause does not fall
704 : : through, then when the finally clause falls through it will
705 : : resume execution wherever the try clause was going. So the
706 : : whole try-finally will only fall through if both the try
707 : : clause and the finally clause fall through. */
708 : 1473488 : data->cannot_fallthru = false;
709 : 1473488 : lower_sequence (gimple_try_cleanup_ptr (stmt), data);
710 : 1473488 : data->cannot_fallthru |= cannot_fallthru;
711 : 1473488 : gsi_next (gsi);
712 : : }
713 : : return;
714 : :
715 : 85 : case GIMPLE_EH_ELSE:
716 : 85 : {
717 : 85 : geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
718 : 85 : lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
719 : 85 : lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
720 : : }
721 : 85 : break;
722 : :
723 : 2578473 : case GIMPLE_DEBUG:
724 : 2578473 : gcc_checking_assert (cfun->debug_nonbind_markers);
725 : : /* We can't possibly have debug bind stmts before lowering, we
726 : : first emit them when entering SSA. */
727 : 2578473 : gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt));
728 : : /* Propagate fallthruness. */
729 : : /* If the function (e.g. from PCH) had debug stmts, but they're
730 : : disabled for this compilation, remove them. */
731 : 2578473 : if (!MAY_HAVE_DEBUG_MARKER_STMTS)
732 : 0 : gsi_remove (gsi, true);
733 : : else
734 : 2578473 : gsi_next (gsi);
735 : : return;
736 : :
737 : 0 : case GIMPLE_OMP_STRUCTURED_BLOCK:
738 : : /* These are supposed to be removed already in OMP lowering. */
739 : 0 : gcc_unreachable ();
740 : :
741 : : case GIMPLE_NOP:
742 : : case GIMPLE_ASM:
743 : : case GIMPLE_ASSIGN:
744 : : case GIMPLE_PREDICT:
745 : : case GIMPLE_LABEL:
746 : : case GIMPLE_EH_MUST_NOT_THROW:
747 : : case GIMPLE_OMP_FOR:
748 : : case GIMPLE_OMP_SCOPE:
749 : : case GIMPLE_OMP_DISPATCH:
750 : : case GIMPLE_OMP_SECTIONS:
751 : : case GIMPLE_OMP_SECTIONS_SWITCH:
752 : : case GIMPLE_OMP_SECTION:
753 : : case GIMPLE_OMP_SINGLE:
754 : : case GIMPLE_OMP_MASTER:
755 : : case GIMPLE_OMP_MASKED:
756 : : case GIMPLE_OMP_TASKGROUP:
757 : : case GIMPLE_OMP_ORDERED:
758 : : case GIMPLE_OMP_SCAN:
759 : : case GIMPLE_OMP_CRITICAL:
760 : : case GIMPLE_OMP_RETURN:
761 : : case GIMPLE_OMP_ATOMIC_LOAD:
762 : : case GIMPLE_OMP_ATOMIC_STORE:
763 : : case GIMPLE_OMP_CONTINUE:
764 : : break;
765 : :
766 : 10048467 : case GIMPLE_CALL:
767 : 10048467 : {
768 : 10048467 : tree decl = gimple_call_fndecl (stmt);
769 : 10048467 : unsigned i;
770 : :
771 : 28298491 : for (i = 0; i < gimple_call_num_args (stmt); i++)
772 : : {
773 : 18250024 : tree arg = gimple_call_arg (stmt, i);
774 : 18250024 : if (EXPR_P (arg))
775 : 4688692 : TREE_SET_BLOCK (arg, data->block);
776 : : }
777 : :
778 : 10048467 : if (decl
779 : 10048467 : && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
780 : : {
781 : 2065781 : if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
782 : : {
783 : 796 : lower_builtin_setjmp (gsi);
784 : 796 : data->cannot_fallthru = false;
785 : 796 : return;
786 : : }
787 : 2064985 : else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
788 : 119 : && flag_tree_bit_ccp
789 : 2065091 : && gimple_builtin_call_types_compatible_p (stmt, decl))
790 : : {
791 : 34 : lower_builtin_posix_memalign (gsi);
792 : 34 : return;
793 : : }
794 : 2064951 : else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_ASSUME_ALIGNED
795 : 2064951 : && !optimize)
796 : : {
797 : 77 : lower_builtin_assume_aligned (gsi);
798 : 77 : data->cannot_fallthru = false;
799 : 77 : gsi_next (gsi);
800 : 77 : return;
801 : : }
802 : : }
803 : :
804 : 10047560 : if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
805 : : {
806 : 1506970 : data->cannot_fallthru = true;
807 : 1506970 : gsi_next (gsi);
808 : 1506970 : return;
809 : : }
810 : :
811 : 8540590 : if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
812 : : {
813 : 4554 : tree base = gimple_call_arg (stmt, 1);
814 : 4554 : gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
815 : 4554 : tree decl = TREE_OPERAND (base, 0);
816 : 4554 : if (VAR_P (decl) && TREE_STATIC (decl))
817 : : {
818 : : /* Don't poison a variable with static storage; it might have
819 : : gotten marked before gimplify_init_constructor promoted it
820 : : to static. */
821 : 46 : gsi_remove (gsi, true);
822 : 46 : return;
823 : : }
824 : : }
825 : :
826 : : /* We delay folding of built calls from gimplification to
827 : : here so the IL is in consistent state for the diagnostic
828 : : machineries job. */
829 : 8540544 : if (gimple_call_builtin_p (stmt))
830 : 1380625 : fold_stmt (gsi);
831 : : }
832 : : break;
833 : :
834 : 62395 : case GIMPLE_OMP_PARALLEL:
835 : 62395 : case GIMPLE_OMP_TASK:
836 : 62395 : case GIMPLE_OMP_TARGET:
837 : 62395 : case GIMPLE_OMP_TEAMS:
838 : 62395 : data->cannot_fallthru = false;
839 : 62395 : lower_omp_directive (gsi, data);
840 : 62395 : data->cannot_fallthru = false;
841 : 62395 : return;
842 : :
843 : 108 : case GIMPLE_ASSUME:
844 : 108 : lower_assumption (gsi, data);
845 : 108 : return;
846 : :
847 : 478 : case GIMPLE_TRANSACTION:
848 : 478 : lower_sequence (gimple_transaction_body_ptr (
849 : : as_a <gtransaction *> (stmt)),
850 : : data);
851 : 478 : break;
852 : :
853 : 0 : default:
854 : 0 : gcc_unreachable ();
855 : : }
856 : :
857 : 62137159 : data->cannot_fallthru = false;
858 : 62137159 : gsi_next (gsi);
859 : : }
860 : :
861 : : /* Lower a bind_expr TSI. DATA is passed through the recursion. */
862 : :
863 : : static void
864 : 7033169 : lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
865 : : {
866 : 7033169 : tree old_block = data->block;
867 : 7033169 : gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
868 : 7033169 : tree new_block = gimple_bind_block (stmt);
869 : :
870 : 7033169 : if (new_block)
871 : : {
872 : 5601346 : if (new_block == old_block)
873 : : {
874 : : /* The outermost block of the original function may not be the
875 : : outermost statement chain of the gimplified function. So we
876 : : may see the outermost block just inside the function. */
877 : 1451478 : gcc_assert (new_block == DECL_INITIAL (current_function_decl));
878 : : new_block = NULL;
879 : : }
880 : : else
881 : : {
882 : : /* We do not expect to handle duplicate blocks. */
883 : 4149868 : gcc_assert (!TREE_ASM_WRITTEN (new_block));
884 : 4149868 : TREE_ASM_WRITTEN (new_block) = 1;
885 : :
886 : : /* Block tree may get clobbered by inlining. Normally this would
887 : : be fixed in rest_of_decl_compilation using block notes, but
888 : : since we are not going to emit them, it is up to us. */
889 : 4149868 : BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
890 : 4149868 : BLOCK_SUBBLOCKS (old_block) = new_block;
891 : 4149868 : BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
892 : 4149868 : BLOCK_SUPERCONTEXT (new_block) = old_block;
893 : :
894 : 4149868 : data->block = new_block;
895 : : }
896 : : }
897 : :
898 : 7033169 : record_vars (gimple_bind_vars (stmt));
899 : :
900 : : /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
901 : : need gimple_bind_vars. */
902 : 7033169 : tree next;
903 : : /* BLOCK_VARS and gimple_bind_vars share a common sub-chain. Find
904 : : it by marking all BLOCK_VARS. */
905 : 7033169 : if (gimple_bind_block (stmt))
906 : 11999721 : for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
907 : 6398375 : TREE_VISITED (t) = 1;
908 : 7033169 : for (tree var = gimple_bind_vars (stmt);
909 : 10767585 : var && ! TREE_VISITED (var); var = next)
910 : : {
911 : 3734416 : next = DECL_CHAIN (var);
912 : 3734416 : DECL_CHAIN (var) = NULL_TREE;
913 : : }
914 : : /* Unmark BLOCK_VARS. */
915 : 7033169 : if (gimple_bind_block (stmt))
916 : 11999721 : for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
917 : 6398375 : TREE_VISITED (t) = 0;
918 : :
919 : 7033169 : lower_sequence (gimple_bind_body_ptr (stmt), data);
920 : :
921 : 7033169 : if (new_block)
922 : : {
923 : 4149868 : gcc_assert (data->block == new_block);
924 : :
925 : 4149868 : BLOCK_SUBBLOCKS (new_block)
926 : 4149868 : = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
927 : 4149868 : data->block = old_block;
928 : : }
929 : :
930 : : /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
931 : 7033169 : gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
932 : 7033169 : gsi_remove (gsi, false);
933 : 7033169 : }
934 : :
935 : : /* Same as above, but for a GIMPLE_TRY_CATCH. */
936 : :
937 : : static void
938 : 769430 : lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
939 : : {
940 : 769430 : bool cannot_fallthru;
941 : 769430 : gimple *stmt = gsi_stmt (*gsi);
942 : 769430 : gimple_stmt_iterator i;
943 : :
944 : : /* We don't handle GIMPLE_TRY_FINALLY. */
945 : 769430 : gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
946 : :
947 : 769430 : lower_sequence (gimple_try_eval_ptr (stmt), data);
948 : 769430 : cannot_fallthru = data->cannot_fallthru;
949 : :
950 : 769430 : i = gsi_start (*gimple_try_cleanup_ptr (stmt));
951 : 769430 : switch (gimple_code (gsi_stmt (i)))
952 : : {
953 : : case GIMPLE_CATCH:
954 : : /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
955 : : catch expression and a body. The whole try/catch may fall
956 : : through iff any of the catch bodies falls through. */
957 : 72851 : for (; !gsi_end_p (i); gsi_next (&i))
958 : : {
959 : 37845 : data->cannot_fallthru = false;
960 : 37845 : lower_sequence (gimple_catch_handler_ptr (
961 : : as_a <gcatch *> (gsi_stmt (i))),
962 : : data);
963 : 37845 : if (!data->cannot_fallthru)
964 : 14805 : cannot_fallthru = false;
965 : : }
966 : : break;
967 : :
968 : 4280 : case GIMPLE_EH_FILTER:
969 : : /* The exception filter expression only matters if there is an
970 : : exception. If the exception does not match EH_FILTER_TYPES,
971 : : we will execute EH_FILTER_FAILURE, and we will fall through
972 : : if that falls through. If the exception does match
973 : : EH_FILTER_TYPES, the stack unwinder will continue up the
974 : : stack, so we will not fall through. We don't know whether we
975 : : will throw an exception which matches EH_FILTER_TYPES or not,
976 : : so we just ignore EH_FILTER_TYPES and assume that we might
977 : : throw an exception which doesn't match. */
978 : 4280 : data->cannot_fallthru = false;
979 : 4280 : lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
980 : 4280 : if (!data->cannot_fallthru)
981 : 769430 : cannot_fallthru = false;
982 : : break;
983 : :
984 : 0 : case GIMPLE_DEBUG:
985 : 0 : gcc_checking_assert (gimple_debug_begin_stmt_p (stmt));
986 : : break;
987 : :
988 : 730144 : default:
989 : : /* This case represents statements to be executed when an
990 : : exception occurs. Those statements are implicitly followed
991 : : by a GIMPLE_RESX to resume execution after the exception. So
992 : : in this case the try/catch never falls through. */
993 : 730144 : data->cannot_fallthru = false;
994 : 730144 : lower_sequence (gimple_try_cleanup_ptr (stmt), data);
995 : 730144 : break;
996 : : }
997 : :
998 : 769430 : data->cannot_fallthru = cannot_fallthru;
999 : 769430 : gsi_next (gsi);
1000 : 769430 : }
1001 : :
1002 : :
1003 : : /* Try to determine whether a TRY_CATCH expression can fall through.
1004 : : This is a subroutine of gimple_stmt_may_fallthru. */
1005 : :
1006 : : static bool
1007 : 408588 : gimple_try_catch_may_fallthru (gtry *stmt)
1008 : : {
1009 : 408588 : gimple_stmt_iterator i;
1010 : :
1011 : : /* We don't handle GIMPLE_TRY_FINALLY. */
1012 : 408588 : gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
1013 : :
1014 : : /* If the TRY block can fall through, the whole TRY_CATCH can
1015 : : fall through. */
1016 : 408588 : if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
1017 : : return true;
1018 : :
1019 : 6342 : i = gsi_start (*gimple_try_cleanup_ptr (stmt));
1020 : 6342 : switch (gimple_code (gsi_stmt (i)))
1021 : : {
1022 : : case GIMPLE_CATCH:
1023 : : /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
1024 : : catch expression and a body. The whole try/catch may fall
1025 : : through iff any of the catch bodies falls through. */
1026 : 7910 : for (; !gsi_end_p (i); gsi_next (&i))
1027 : : {
1028 : 4310 : if (gimple_seq_may_fallthru (gimple_catch_handler (
1029 : 4310 : as_a <gcatch *> (gsi_stmt (i)))))
1030 : : return true;
1031 : : }
1032 : : return false;
1033 : :
1034 : 82 : case GIMPLE_EH_FILTER:
1035 : : /* The exception filter expression only matters if there is an
1036 : : exception. If the exception does not match EH_FILTER_TYPES,
1037 : : we will execute EH_FILTER_FAILURE, and we will fall through
1038 : : if that falls through. If the exception does match
1039 : : EH_FILTER_TYPES, the stack unwinder will continue up the
1040 : : stack, so we will not fall through. We don't know whether we
1041 : : will throw an exception which matches EH_FILTER_TYPES or not,
1042 : : so we just ignore EH_FILTER_TYPES and assume that we might
1043 : : throw an exception which doesn't match. */
1044 : 82 : return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
1045 : :
1046 : : default:
1047 : : /* This case represents statements to be executed when an
1048 : : exception occurs. Those statements are implicitly followed
1049 : : by a GIMPLE_RESX to resume execution after the exception. So
1050 : : in this case the try/catch never falls through. */
1051 : : return false;
1052 : : }
1053 : : }
1054 : :
1055 : :
1056 : : /* Try to determine if we can continue executing the statement
1057 : : immediately following STMT. This guess need not be 100% accurate;
1058 : : simply be conservative and return true if we don't know. This is
1059 : : used only to avoid stupidly generating extra code. If we're wrong,
1060 : : we'll just delete the extra code later. */
1061 : :
1062 : : bool
1063 : 16119179 : gimple_stmt_may_fallthru (gimple *stmt)
1064 : : {
1065 : 16119179 : if (!stmt)
1066 : : return true;
1067 : :
1068 : 15880323 : switch (gimple_code (stmt))
1069 : : {
1070 : : case GIMPLE_GOTO:
1071 : : case GIMPLE_RETURN:
1072 : : case GIMPLE_RESX:
1073 : : /* Easy cases. If the last statement of the seq implies
1074 : : control transfer, then we can't fall through. */
1075 : : return false;
1076 : :
1077 : : case GIMPLE_SWITCH:
1078 : : /* Switch has already been lowered and represents a branch
1079 : : to a selected label and hence can't fall through. */
1080 : : return false;
1081 : :
1082 : : case GIMPLE_COND:
1083 : : /* GIMPLE_COND's are already lowered into a two-way branch. They
1084 : : can't fall through. */
1085 : : return false;
1086 : :
1087 : 371552 : case GIMPLE_BIND:
1088 : 371552 : return gimple_seq_may_fallthru (
1089 : 371552 : gimple_bind_body (as_a <gbind *> (stmt)));
1090 : :
1091 : 1259216 : case GIMPLE_TRY:
1092 : 1259216 : if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
1093 : 408588 : return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
1094 : :
1095 : : /* It must be a GIMPLE_TRY_FINALLY. */
1096 : :
1097 : : /* The finally clause is always executed after the try clause,
1098 : : so if it does not fall through, then the try-finally will not
1099 : : fall through. Otherwise, if the try clause does not fall
1100 : : through, then when the finally clause falls through it will
1101 : : resume execution wherever the try clause was going. So the
1102 : : whole try-finally will only fall through if both the try
1103 : : clause and the finally clause fall through. */
1104 : 850628 : return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
1105 : 1342730 : && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
1106 : :
1107 : 379 : case GIMPLE_EH_ELSE:
1108 : 379 : {
1109 : 379 : geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
1110 : 379 : return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
1111 : 379 : || gimple_seq_may_fallthru (gimple_eh_else_e_body (
1112 : : eh_else_stmt)));
1113 : : }
1114 : :
1115 : 4058855 : case GIMPLE_CALL:
1116 : : /* Functions that do not return do not fall through. */
1117 : 4058855 : return !gimple_call_noreturn_p (stmt);
1118 : :
1119 : : default:
1120 : : return true;
1121 : : }
1122 : : }
1123 : :
1124 : :
1125 : : /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
1126 : :
1127 : : bool
1128 : 13667387 : gimple_seq_may_fallthru (gimple_seq seq)
1129 : : {
1130 : 13667387 : return gimple_stmt_may_fallthru (gimple_seq_last_nondebug_stmt (seq));
1131 : : }
1132 : :
1133 : :
1134 : : /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
1135 : :
1136 : : static void
1137 : 2059435 : lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
1138 : : {
1139 : 2059435 : greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
1140 : 2059435 : gimple *t;
1141 : 2059435 : int i;
1142 : 2059435 : return_statements_t tmp_rs;
1143 : :
1144 : : /* Match this up with an existing return statement that's been created. */
1145 : 4118897 : for (i = data->return_statements.length () - 1;
1146 : 2059462 : i >= 0; i--)
1147 : : {
1148 : 546033 : tmp_rs = data->return_statements[i];
1149 : :
1150 : 546033 : if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
1151 : : {
1152 : : /* Remove the line number from the representative return statement.
1153 : : It now fills in for many such returns. Failure to remove this
1154 : : will result in incorrect results for coverage analysis. */
1155 : 546006 : gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
1156 : :
1157 : 546006 : goto found;
1158 : : }
1159 : : }
1160 : :
1161 : : /* Not found. Create a new label and record the return statement. */
1162 : 1513429 : tmp_rs.label = create_artificial_label (cfun->function_end_locus);
1163 : 1513429 : tmp_rs.stmt = stmt;
1164 : 1513429 : data->return_statements.safe_push (tmp_rs);
1165 : :
1166 : : /* Generate a goto statement and remove the return statement. */
1167 : 2059435 : found:
1168 : : /* When not optimizing, make sure user returns are preserved. */
1169 : 2059435 : if (!optimize && gimple_has_location (stmt))
1170 : 253331 : DECL_ARTIFICIAL (tmp_rs.label) = 0;
1171 : 2059435 : t = gimple_build_goto (tmp_rs.label);
1172 : : /* location includes block. */
1173 : 2059435 : gimple_set_location (t, gimple_location (stmt));
1174 : 2059435 : gsi_insert_before (gsi, t, GSI_SAME_STMT);
1175 : 2059435 : gsi_remove (gsi, false);
1176 : 2059435 : }
1177 : :
1178 : : /* Lower a __builtin_setjmp GSI.
1179 : :
1180 : : __builtin_setjmp is passed a pointer to an array of five words (not
1181 : : all will be used on all machines). It operates similarly to the C
1182 : : library function of the same name, but is more efficient.
1183 : :
1184 : : It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
1185 : : __builtin_setjmp_receiver.
1186 : :
1187 : : After full lowering, the body of the function should look like:
1188 : :
1189 : : {
1190 : : int D.1844;
1191 : : int D.2844;
1192 : :
1193 : : [...]
1194 : :
1195 : : __builtin_setjmp_setup (&buf, &<D1847>);
1196 : : D.1844 = 0;
1197 : : goto <D1846>;
1198 : : <D1847>:;
1199 : : __builtin_setjmp_receiver (&<D1847>);
1200 : : D.1844 = 1;
1201 : : <D1846>:;
1202 : : if (D.1844 == 0) goto <D1848>; else goto <D1849>;
1203 : :
1204 : : [...]
1205 : :
1206 : : __builtin_setjmp_setup (&buf, &<D2847>);
1207 : : D.2844 = 0;
1208 : : goto <D2846>;
1209 : : <D2847>:;
1210 : : __builtin_setjmp_receiver (&<D2847>);
1211 : : D.2844 = 1;
1212 : : <D2846>:;
1213 : : if (D.2844 == 0) goto <D2848>; else goto <D2849>;
1214 : :
1215 : : [...]
1216 : :
1217 : : <D3850>:;
1218 : : return;
1219 : : }
1220 : :
1221 : : During cfg creation an extra per-function (or per-OpenMP region)
1222 : : block with ABNORMAL_DISPATCHER internal call will be added, unique
1223 : : destination of all the abnormal call edges and the unique source of
1224 : : all the abnormal edges to the receivers, thus keeping the complexity
1225 : : explosion localized. */
1226 : :
1227 : : static void
1228 : 796 : lower_builtin_setjmp (gimple_stmt_iterator *gsi)
1229 : : {
1230 : 796 : gimple *stmt = gsi_stmt (*gsi);
1231 : 796 : location_t loc = gimple_location (stmt);
1232 : 796 : tree cont_label = create_artificial_label (loc);
1233 : 796 : tree next_label = create_artificial_label (loc);
1234 : 796 : tree dest, t, arg;
1235 : 796 : gimple *g;
1236 : :
1237 : : /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
1238 : : these builtins are modelled as non-local label jumps to the label
1239 : : that is passed to these two builtins, so pretend we have a non-local
1240 : : label during GIMPLE passes too. See PR60003. */
1241 : 796 : cfun->has_nonlocal_label = 1;
1242 : :
1243 : : /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
1244 : : passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
1245 : 796 : FORCED_LABEL (next_label) = 1;
1246 : :
1247 : 796 : tree orig_dest = dest = gimple_call_lhs (stmt);
1248 : 796 : if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
1249 : 725 : dest = create_tmp_reg (TREE_TYPE (orig_dest));
1250 : :
1251 : : /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
1252 : 796 : arg = build_addr (next_label);
1253 : 796 : t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
1254 : 796 : g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
1255 : : /* location includes block. */
1256 : 796 : gimple_set_location (g, loc);
1257 : 796 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1258 : :
1259 : : /* Build 'DEST = 0' and insert. */
1260 : 796 : if (dest)
1261 : : {
1262 : 745 : g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
1263 : 745 : gimple_set_location (g, loc);
1264 : 745 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1265 : : }
1266 : :
1267 : : /* Build 'goto CONT_LABEL' and insert. */
1268 : 796 : g = gimple_build_goto (cont_label);
1269 : 796 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1270 : :
1271 : : /* Build 'NEXT_LABEL:' and insert. */
1272 : 796 : g = gimple_build_label (next_label);
1273 : 796 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1274 : :
1275 : : /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
1276 : 796 : arg = build_addr (next_label);
1277 : 796 : t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
1278 : 796 : g = gimple_build_call (t, 1, arg);
1279 : 796 : gimple_set_location (g, loc);
1280 : 796 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1281 : :
1282 : : /* Build 'DEST = 1' and insert. */
1283 : 796 : if (dest)
1284 : : {
1285 : 745 : g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
1286 : : integer_one_node));
1287 : 745 : gimple_set_location (g, loc);
1288 : 745 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1289 : : }
1290 : :
1291 : : /* Build 'CONT_LABEL:' and insert. */
1292 : 796 : g = gimple_build_label (cont_label);
1293 : 796 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1294 : :
1295 : : /* Build orig_dest = dest if necessary. */
1296 : 796 : if (dest != orig_dest)
1297 : : {
1298 : 725 : g = gimple_build_assign (orig_dest, dest);
1299 : 725 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1300 : : }
1301 : :
1302 : : /* Remove the call to __builtin_setjmp. */
1303 : 796 : gsi_remove (gsi, false);
1304 : 796 : }
1305 : :
1306 : : /* Lower calls to posix_memalign to
1307 : : res = posix_memalign (ptr, align, size);
1308 : : if (res == 0)
1309 : : *ptr = __builtin_assume_aligned (*ptr, align);
1310 : : or to
1311 : : void *tem;
1312 : : res = posix_memalign (&tem, align, size);
1313 : : if (res == 0)
1314 : : ptr = __builtin_assume_aligned (tem, align);
1315 : : in case the first argument was &ptr. That way we can get at the
1316 : : alignment of the heap pointer in CCP. */
1317 : :
1318 : : static void
1319 : 34 : lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
1320 : : {
1321 : 34 : gimple *stmt, *call = gsi_stmt (*gsi);
1322 : 34 : tree pptr = gimple_call_arg (call, 0);
1323 : 34 : tree align = gimple_call_arg (call, 1);
1324 : 34 : tree res = gimple_call_lhs (call);
1325 : 34 : tree ptr = create_tmp_reg (ptr_type_node);
1326 : 34 : if (TREE_CODE (pptr) == ADDR_EXPR)
1327 : : {
1328 : 34 : tree tem = create_tmp_var (ptr_type_node);
1329 : 34 : TREE_ADDRESSABLE (tem) = 1;
1330 : 34 : gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
1331 : 34 : stmt = gimple_build_assign (ptr, tem);
1332 : : }
1333 : : else
1334 : 0 : stmt = gimple_build_assign (ptr,
1335 : 0 : fold_build2 (MEM_REF, ptr_type_node, pptr,
1336 : : build_int_cst (ptr_type_node, 0)));
1337 : 34 : if (res == NULL_TREE)
1338 : : {
1339 : 0 : res = create_tmp_reg (integer_type_node);
1340 : 0 : gimple_call_set_lhs (call, res);
1341 : : }
1342 : 34 : tree align_label = create_artificial_label (UNKNOWN_LOCATION);
1343 : 34 : tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
1344 : 34 : gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
1345 : : align_label, noalign_label);
1346 : 34 : gsi_insert_after (gsi, cond, GSI_NEW_STMT);
1347 : 34 : gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
1348 : 34 : gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1349 : 68 : stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
1350 : : 2, ptr, align);
1351 : 34 : gimple_call_set_lhs (stmt, ptr);
1352 : 34 : gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1353 : 34 : stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
1354 : : build_int_cst (ptr_type_node, 0)),
1355 : : ptr);
1356 : 34 : gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1357 : 34 : gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
1358 : 34 : }
1359 : :
1360 : : /* Lower calls to __builtin_assume_aligned when not optimizing. */
1361 : :
1362 : : static void
1363 : 77 : lower_builtin_assume_aligned (gimple_stmt_iterator *gsi)
1364 : : {
1365 : 77 : gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1366 : :
1367 : 77 : tree lhs = gimple_call_lhs (call);
1368 : 77 : if (!lhs || !POINTER_TYPE_P (TREE_TYPE (lhs)) || TREE_CODE (lhs) != SSA_NAME)
1369 : : return;
1370 : :
1371 : 2 : tree align = gimple_call_arg (call, 1);
1372 : 2 : tree misalign = (gimple_call_num_args (call) > 2
1373 : 2 : ? gimple_call_arg (call, 2) : NULL_TREE);
1374 : 2 : if (!tree_fits_uhwi_p (align)
1375 : 2 : || (misalign && !tree_fits_uhwi_p (misalign)))
1376 : : return;
1377 : :
1378 : 2 : unsigned aligni = TREE_INT_CST_LOW (align);
1379 : 2 : unsigned misaligni = misalign ? TREE_INT_CST_LOW (misalign) : 0;
1380 : 2 : if (aligni <= 1
1381 : 1 : || (aligni & (aligni - 1)) != 0
1382 : 1 : || (misaligni & ~(aligni - 1)) != 0)
1383 : : return;
1384 : :
1385 : : /* For lowering we simply transfer alignment information to the
1386 : : result and leave the call otherwise unchanged, it will be elided
1387 : : at RTL expansion time. */
1388 : 1 : ptr_info_def *pi = get_ptr_info (lhs);
1389 : 1 : set_ptr_info_alignment (pi, aligni, misaligni);
1390 : : }
1391 : :
1392 : :
1393 : : /* Record the variables in VARS into function FN. */
1394 : :
1395 : : void
1396 : 23331533 : record_vars_into (tree vars, tree fn)
1397 : : {
1398 : 45699890 : for (; vars; vars = DECL_CHAIN (vars))
1399 : : {
1400 : 22368357 : tree var = vars;
1401 : :
1402 : : /* BIND_EXPRs contains also function/type/constant declarations
1403 : : we don't need to care about. */
1404 : 22368357 : if (!VAR_P (var))
1405 : 562936 : continue;
1406 : :
1407 : : /* Nothing to do in this case. */
1408 : 21805421 : if (DECL_EXTERNAL (var))
1409 : 2544 : continue;
1410 : :
1411 : : /* Record the variable. */
1412 : 21802877 : add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
1413 : : }
1414 : 23331533 : }
1415 : :
1416 : :
1417 : : /* Record the variables in VARS into current_function_decl. */
1418 : :
1419 : : void
1420 : 23227040 : record_vars (tree vars)
1421 : : {
1422 : 23227040 : record_vars_into (vars, current_function_decl);
1423 : 23227040 : }
|