Branch data Line data Source code
1 : : /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
2 : :
3 : : Copyright (C) 2003-2025 Free Software Foundation, Inc.
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify it under
8 : : the terms of the GNU General Public License as published by the Free
9 : : Software Foundation; either version 3, or (at your option) any later
10 : : version.
11 : :
12 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : : for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : : #include "config.h"
22 : : #include "system.h"
23 : : #include "coretypes.h"
24 : : #include "backend.h"
25 : : #include "tree.h"
26 : : #include "gimple.h"
27 : : #include "tree-pass.h"
28 : : #include "fold-const.h"
29 : : #include "tree-nested.h"
30 : : #include "calls.h"
31 : : #include "gimple-iterator.h"
32 : : #include "gimple-low.h"
33 : : #include "predict.h"
34 : : #include "gimple-predict.h"
35 : : #include "gimple-fold.h"
36 : : #include "cgraph.h"
37 : : #include "tree-ssa.h"
38 : : #include "value-range.h"
39 : : #include "stringpool.h"
40 : : #include "tree-ssanames.h"
41 : : #include "tree-inline.h"
42 : : #include "gimple-walk.h"
43 : : #include "attribs.h"
44 : : #include "diagnostic-core.h"
45 : :
46 : : /* The differences between High GIMPLE and Low GIMPLE are the
47 : : following:
48 : :
49 : : 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
50 : :
51 : : 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
52 : : flow and exception regions are built as an on-the-side region
53 : : hierarchy (See tree-eh.cc:lower_eh_constructs).
54 : :
55 : : 3- Multiple identical return statements are grouped into a single
56 : : return and gotos to the unique return site. */
57 : :
58 : : /* Match a return statement with a label. During lowering, we identify
59 : : identical return statements and replace duplicates with a jump to
60 : : the corresponding label. */
61 : : struct return_statements_t
62 : : {
63 : : tree label;
64 : : greturn *stmt;
65 : : };
66 : : typedef struct return_statements_t return_statements_t;
67 : :
68 : :
69 : : struct lower_data
70 : : {
71 : : /* Block the current statement belongs to. */
72 : : tree block;
73 : :
74 : : /* A vector of label and return statements to be moved to the end
75 : : of the function. */
76 : : vec<return_statements_t> return_statements;
77 : :
78 : : /* True if the current statement cannot fall through. */
79 : : bool cannot_fallthru;
80 : : };
81 : :
82 : : /* Bitmap of LABEL_DECL uids for user labels moved into assume outlined
83 : : functions. */
84 : : static bitmap assume_labels;
85 : :
86 : : static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
87 : : static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
88 : : static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
89 : : static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
90 : : static void lower_builtin_setjmp (gimple_stmt_iterator *);
91 : : static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
92 : : static void lower_builtin_assume_aligned (gimple_stmt_iterator *);
93 : :
94 : :
95 : : /* Helper function for lower_function_body, called via walk_gimple_seq.
96 : : Diagnose uses of user labels defined inside of assume attribute
97 : : expressions. */
98 : :
99 : : static tree
100 : 160 : diagnose_assume_labels (tree *tp, int *, void *data)
101 : : {
102 : 160 : if (TREE_CODE (*tp) == LABEL_DECL
103 : 56 : && !DECL_ARTIFICIAL (*tp)
104 : 24 : && DECL_NAME (*tp)
105 : 184 : && bitmap_bit_p (assume_labels, DECL_UID (*tp)))
106 : : {
107 : 24 : struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
108 : 24 : auto_diagnostic_group d;
109 : 24 : error_at (gimple_location (gsi_stmt (wi->gsi)),
110 : : "reference to label %qD defined inside of %<assume%> "
111 : : "attribute expression from outside of the attribute", *tp);
112 : 24 : inform (DECL_SOURCE_LOCATION (*tp), "%qD defined here", *tp);
113 : 24 : }
114 : 160 : return NULL_TREE;
115 : : }
116 : :
117 : :
118 : : /* Lower the body of current_function_decl from High GIMPLE into Low
119 : : GIMPLE. */
120 : :
121 : : static unsigned int
122 : 2896091 : lower_function_body (void)
123 : : {
124 : 2896091 : struct lower_data data;
125 : 2896091 : gimple_seq body = gimple_body (current_function_decl);
126 : 2896091 : gimple_seq lowered_body;
127 : 2896091 : gimple_stmt_iterator i;
128 : 2896091 : gimple *bind;
129 : 2896091 : gimple *x;
130 : :
131 : : /* The gimplifier should've left a body of exactly one statement,
132 : : namely a GIMPLE_BIND. */
133 : 2896091 : gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
134 : : && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
135 : :
136 : 2896091 : memset (&data, 0, sizeof (data));
137 : 2896091 : data.block = DECL_INITIAL (current_function_decl);
138 : 2896091 : BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
139 : 2896091 : BLOCK_CHAIN (data.block) = NULL_TREE;
140 : 2896091 : TREE_ASM_WRITTEN (data.block) = 1;
141 : 2896091 : data.return_statements.create (8);
142 : :
143 : 2896091 : bind = gimple_seq_first_stmt (body);
144 : 2896091 : lowered_body = NULL;
145 : 2896091 : gimple_seq_add_stmt (&lowered_body, bind);
146 : 2896091 : i = gsi_start (lowered_body);
147 : 2896091 : lower_gimple_bind (&i, &data);
148 : :
149 : 2896091 : i = gsi_last (lowered_body);
150 : :
151 : : /* If we had begin stmt markers from e.g. PCH, but this compilation
152 : : doesn't want them, lower_stmt will have cleaned them up; we can
153 : : now clear the flag that indicates we had them. */
154 : 2896091 : if (!MAY_HAVE_DEBUG_MARKER_STMTS && cfun->debug_nonbind_markers)
155 : : {
156 : : /* This counter needs not be exact, but before lowering it will
157 : : most certainly be. */
158 : 0 : gcc_assert (cfun->debug_marker_count == 0);
159 : 0 : cfun->debug_nonbind_markers = false;
160 : : }
161 : :
162 : : /* If the function falls off the end, we need a null return statement.
163 : : If we've already got one in the return_statements vector, we don't
164 : : need to do anything special. Otherwise build one by hand. */
165 : 2896091 : bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
166 : 2896091 : if (may_fallthru
167 : 2945759 : && (data.return_statements.is_empty ()
168 : 49668 : || (gimple_return_retval (data.return_statements.last().stmt)
169 : : != NULL)))
170 : : {
171 : 1238072 : x = gimple_build_return (NULL);
172 : 1238072 : gimple_set_location (x, cfun->function_end_locus);
173 : 1238072 : gimple_set_block (x, DECL_INITIAL (current_function_decl));
174 : 1238072 : gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
175 : 1238072 : may_fallthru = false;
176 : : }
177 : :
178 : : /* If we lowered any return statements, emit the representative
179 : : at the end of the function. */
180 : 4542592 : while (!data.return_statements.is_empty ())
181 : : {
182 : 1646501 : return_statements_t t = data.return_statements.pop ();
183 : 1646501 : x = gimple_build_label (t.label);
184 : 1646501 : gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
185 : 1646501 : gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
186 : 1646501 : if (may_fallthru)
187 : : {
188 : : /* Remove the line number from the representative return statement.
189 : : It now fills in for the fallthru too. Failure to remove this
190 : : will result in incorrect results for coverage analysis. */
191 : 36919 : gimple_set_location (t.stmt, UNKNOWN_LOCATION);
192 : 36919 : may_fallthru = false;
193 : : }
194 : : }
195 : :
196 : : /* Once the old body has been lowered, replace it with the new
197 : : lowered sequence. */
198 : 2896091 : gimple_set_body (current_function_decl, lowered_body);
199 : :
200 : 2896091 : if (assume_labels)
201 : : {
202 : 8 : struct walk_stmt_info wi;
203 : :
204 : 8 : memset (&wi, 0, sizeof (wi));
205 : 8 : walk_gimple_seq (lowered_body, NULL, diagnose_assume_labels, &wi);
206 : 8 : BITMAP_FREE (assume_labels);
207 : : }
208 : :
209 : 2896091 : gcc_assert (data.block == DECL_INITIAL (current_function_decl));
210 : 2896091 : BLOCK_SUBBLOCKS (data.block)
211 : 2896091 : = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
212 : :
213 : 2896091 : clear_block_marks (data.block);
214 : 2896091 : data.return_statements.release ();
215 : 2896091 : return 0;
216 : : }
217 : :
218 : : namespace {
219 : :
220 : : const pass_data pass_data_lower_cf =
221 : : {
222 : : GIMPLE_PASS, /* type */
223 : : "lower", /* name */
224 : : OPTGROUP_NONE, /* optinfo_flags */
225 : : TV_NONE, /* tv_id */
226 : : PROP_gimple_any, /* properties_required */
227 : : PROP_gimple_lcf, /* properties_provided */
228 : : 0, /* properties_destroyed */
229 : : 0, /* todo_flags_start */
230 : : 0, /* todo_flags_finish */
231 : : };
232 : :
233 : : class pass_lower_cf : public gimple_opt_pass
234 : : {
235 : : public:
236 : 285097 : pass_lower_cf (gcc::context *ctxt)
237 : 570194 : : gimple_opt_pass (pass_data_lower_cf, ctxt)
238 : : {}
239 : :
240 : : /* opt_pass methods: */
241 : 2896091 : unsigned int execute (function *) final override
242 : : {
243 : 2896091 : return lower_function_body ();
244 : : }
245 : :
246 : : }; // class pass_lower_cf
247 : :
248 : : } // anon namespace
249 : :
250 : : gimple_opt_pass *
251 : 285097 : make_pass_lower_cf (gcc::context *ctxt)
252 : : {
253 : 285097 : return new pass_lower_cf (ctxt);
254 : : }
255 : :
256 : : /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
257 : : when they are changed -- if this has to be done, the lowering routine must
258 : : do it explicitly. DATA is passed through the recursion. */
259 : :
260 : : static void
261 : 12576838 : lower_sequence (gimple_seq *seq, struct lower_data *data)
262 : : {
263 : 12576838 : gimple_stmt_iterator gsi;
264 : :
265 : 112518154 : for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
266 : 87592094 : lower_stmt (&gsi, data);
267 : 12576838 : }
268 : :
269 : :
270 : : /* Lower the OpenMP directive statement pointed by GSI. DATA is
271 : : passed through the recursion. */
272 : :
273 : : static void
274 : 64435 : lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
275 : : {
276 : 64435 : gimple *stmt;
277 : :
278 : 64435 : stmt = gsi_stmt (*gsi);
279 : :
280 : 64435 : lower_sequence (gimple_omp_body_ptr (stmt), data);
281 : 64435 : gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
282 : 64435 : gimple_omp_set_body (stmt, NULL);
283 : 64435 : gsi_next (gsi);
284 : 64435 : }
285 : :
286 : : /* Create an artificial FUNCTION_DECL for assumption at LOC. */
287 : :
288 : : static tree
289 : 124 : create_assumption_fn (location_t loc)
290 : : {
291 : 124 : tree name = clone_function_name_numbered (current_function_decl, "_assume");
292 : : /* Temporarily, until we determine all the arguments. */
293 : 124 : tree type = build_varargs_function_type_list (boolean_type_node, NULL_TREE);
294 : 124 : tree decl = build_decl (loc, FUNCTION_DECL, name, type);
295 : 124 : TREE_STATIC (decl) = 1;
296 : 124 : TREE_USED (decl) = 1;
297 : 124 : DECL_ARTIFICIAL (decl) = 1;
298 : 124 : DECL_IGNORED_P (decl) = 1;
299 : 124 : DECL_NAMELESS (decl) = 1;
300 : 124 : TREE_PUBLIC (decl) = 0;
301 : 124 : DECL_UNINLINABLE (decl) = 1;
302 : 124 : DECL_EXTERNAL (decl) = 0;
303 : 124 : DECL_CONTEXT (decl) = NULL_TREE;
304 : 124 : DECL_INITIAL (decl) = make_node (BLOCK);
305 : 124 : tree attributes = DECL_ATTRIBUTES (current_function_decl);
306 : 124 : if (lookup_attribute ("noipa", attributes) == NULL)
307 : : {
308 : 119 : attributes = tree_cons (get_identifier ("noipa"), NULL, attributes);
309 : 119 : if (lookup_attribute ("noinline", attributes) == NULL)
310 : 119 : attributes = tree_cons (get_identifier ("noinline"), NULL, attributes);
311 : 119 : if (lookup_attribute ("noclone", attributes) == NULL)
312 : 119 : attributes = tree_cons (get_identifier ("noclone"), NULL, attributes);
313 : 119 : if (lookup_attribute ("no_icf", attributes) == NULL)
314 : 119 : attributes = tree_cons (get_identifier ("no_icf"), NULL, attributes);
315 : : }
316 : 124 : DECL_ATTRIBUTES (decl) = attributes;
317 : 124 : BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
318 : 248 : DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
319 : 124 : = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
320 : 248 : DECL_FUNCTION_SPECIFIC_TARGET (decl)
321 : 124 : = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
322 : 124 : tree t = build_decl (DECL_SOURCE_LOCATION (decl),
323 : : RESULT_DECL, NULL_TREE, boolean_type_node);
324 : 124 : DECL_ARTIFICIAL (t) = 1;
325 : 124 : DECL_IGNORED_P (t) = 1;
326 : 124 : DECL_CONTEXT (t) = decl;
327 : 124 : DECL_RESULT (decl) = t;
328 : 124 : push_struct_function (decl);
329 : 124 : cfun->function_end_locus = loc;
330 : 124 : init_tree_ssa (cfun);
331 : 124 : return decl;
332 : : }
333 : :
334 : 372 : struct lower_assumption_data
335 : : {
336 : : copy_body_data id;
337 : : tree return_false_label;
338 : : tree guard_copy;
339 : : auto_vec<tree> decls;
340 : : };
341 : :
342 : : /* Helper function for lower_assumptions. Find local vars and labels
343 : : in the assumption sequence and remove debug stmts. */
344 : :
345 : : static tree
346 : 768 : find_assumption_locals_r (gimple_stmt_iterator *gsi_p, bool *,
347 : : struct walk_stmt_info *wi)
348 : : {
349 : 768 : lower_assumption_data *data = (lower_assumption_data *) wi->info;
350 : 768 : gimple *stmt = gsi_stmt (*gsi_p);
351 : 768 : tree lhs = gimple_get_lhs (stmt);
352 : 768 : if (lhs && TREE_CODE (lhs) == SSA_NAME)
353 : : {
354 : 0 : gcc_assert (SSA_NAME_VAR (lhs) == NULL_TREE);
355 : 0 : data->id.decl_map->put (lhs, NULL_TREE);
356 : 0 : data->decls.safe_push (lhs);
357 : : }
358 : 768 : switch (gimple_code (stmt))
359 : : {
360 : 134 : case GIMPLE_BIND:
361 : 134 : for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
362 : 307 : var; var = DECL_CHAIN (var))
363 : 173 : if (VAR_P (var)
364 : 173 : && !DECL_EXTERNAL (var)
365 : 346 : && DECL_CONTEXT (var) == data->id.src_fn)
366 : : {
367 : 173 : data->id.decl_map->put (var, var);
368 : 173 : data->decls.safe_push (var);
369 : : }
370 : 134 : break;
371 : 134 : case GIMPLE_LABEL:
372 : 134 : {
373 : 134 : tree label = gimple_label_label (as_a <glabel *> (stmt));
374 : 134 : data->id.decl_map->put (label, label);
375 : 134 : if (DECL_NAME (label) && !DECL_ARTIFICIAL (label))
376 : : {
377 : 16 : if (assume_labels == NULL)
378 : 8 : assume_labels = BITMAP_ALLOC (NULL);
379 : 16 : bitmap_set_bit (assume_labels, DECL_UID (label));
380 : : }
381 : 134 : break;
382 : : }
383 : 3 : case GIMPLE_RETURN:
384 : : /* If something in assumption tries to return from parent function,
385 : : if it would be reached in hypothetical evaluation, it would be UB,
386 : : so transform such returns into return false; */
387 : 3 : {
388 : 3 : gimple *g = gimple_build_assign (data->guard_copy, boolean_false_node);
389 : 3 : gsi_insert_before (gsi_p, g, GSI_SAME_STMT);
390 : 3 : gimple_return_set_retval (as_a <greturn *> (stmt), data->guard_copy);
391 : 3 : break;
392 : : }
393 : 0 : case GIMPLE_DEBUG:
394 : : /* As assumptions won't be emitted, debug info stmts in them
395 : : are useless. */
396 : 0 : gsi_remove (gsi_p, true);
397 : 0 : wi->removed_stmt = true;
398 : 0 : break;
399 : : default:
400 : : break;
401 : : }
402 : 768 : return NULL_TREE;
403 : : }
404 : :
405 : : /* Create a new PARM_DECL that is indentical in all respect to DECL except that
406 : : DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
407 : : DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
408 : :
409 : : static tree
410 : 164 : assumption_copy_decl (tree decl, copy_body_data *id)
411 : : {
412 : 164 : tree type = TREE_TYPE (decl);
413 : :
414 : 164 : if (is_global_var (decl))
415 : : return decl;
416 : :
417 : 139 : gcc_assert (VAR_P (decl)
418 : : || TREE_CODE (decl) == PARM_DECL
419 : : || TREE_CODE (decl) == RESULT_DECL);
420 : 139 : if (TREE_THIS_VOLATILE (decl))
421 : 4 : type = build_pointer_type (type);
422 : 139 : tree copy = build_decl (DECL_SOURCE_LOCATION (decl),
423 : 139 : PARM_DECL, DECL_NAME (decl), type);
424 : 139 : if (DECL_PT_UID_SET_P (decl))
425 : 0 : SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
426 : 139 : TREE_THIS_VOLATILE (copy) = 0;
427 : 139 : if (TREE_THIS_VOLATILE (decl))
428 : 4 : TREE_READONLY (copy) = 1;
429 : : else
430 : : {
431 : 135 : TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
432 : 135 : TREE_READONLY (copy) = TREE_READONLY (decl);
433 : 135 : DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
434 : 135 : DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
435 : : }
436 : 139 : DECL_ARG_TYPE (copy) = type;
437 : 139 : ((lower_assumption_data *) id)->decls.safe_push (decl);
438 : 139 : return copy_decl_for_dup_finish (id, decl, copy);
439 : : }
440 : :
441 : : /* Transform gotos out of the assumption into return false. */
442 : :
443 : : static tree
444 : 771 : adjust_assumption_stmt_r (gimple_stmt_iterator *gsi_p, bool *,
445 : : struct walk_stmt_info *wi)
446 : : {
447 : 771 : lower_assumption_data *data = (lower_assumption_data *) wi->info;
448 : 771 : gimple *stmt = gsi_stmt (*gsi_p);
449 : 771 : tree lab = NULL_TREE;
450 : 771 : unsigned int idx = 0;
451 : 771 : if (gimple_code (stmt) == GIMPLE_GOTO)
452 : 25 : lab = gimple_goto_dest (stmt);
453 : 746 : else if (gimple_code (stmt) == GIMPLE_COND)
454 : : {
455 : 59 : repeat:
456 : 118 : if (idx == 0)
457 : 59 : lab = gimple_cond_true_label (as_a <gcond *> (stmt));
458 : : else
459 : 59 : lab = gimple_cond_false_label (as_a <gcond *> (stmt));
460 : : }
461 : 687 : else if (gimple_code (stmt) == GIMPLE_LABEL)
462 : : {
463 : 134 : tree label = gimple_label_label (as_a <glabel *> (stmt));
464 : 134 : DECL_CONTEXT (label) = current_function_decl;
465 : : }
466 : 830 : if (lab)
467 : : {
468 : 143 : if (!data->id.decl_map->get (lab))
469 : : {
470 : 3 : if (!data->return_false_label)
471 : 3 : data->return_false_label
472 : 3 : = create_artificial_label (UNKNOWN_LOCATION);
473 : 3 : if (gimple_code (stmt) == GIMPLE_GOTO)
474 : 3 : gimple_goto_set_dest (as_a <ggoto *> (stmt),
475 : : data->return_false_label);
476 : 0 : else if (idx == 0)
477 : 0 : gimple_cond_set_true_label (as_a <gcond *> (stmt),
478 : : data->return_false_label);
479 : : else
480 : 0 : gimple_cond_set_false_label (as_a <gcond *> (stmt),
481 : : data->return_false_label);
482 : : }
483 : 143 : if (gimple_code (stmt) == GIMPLE_COND && idx == 0)
484 : : {
485 : 59 : idx = 1;
486 : 59 : goto repeat;
487 : : }
488 : : }
489 : 771 : return NULL_TREE;
490 : : }
491 : :
492 : : /* Adjust trees in the assumption body. Called through walk_tree. */
493 : :
494 : : static tree
495 : 1578 : adjust_assumption_stmt_op (tree *tp, int *, void *datap)
496 : : {
497 : 1578 : struct walk_stmt_info *wi = (struct walk_stmt_info *) datap;
498 : 1578 : lower_assumption_data *data = (lower_assumption_data *) wi->info;
499 : 1578 : tree t = *tp;
500 : 1578 : tree *newt;
501 : 1578 : switch (TREE_CODE (t))
502 : : {
503 : 0 : case SSA_NAME:
504 : 0 : newt = data->id.decl_map->get (t);
505 : : /* There shouldn't be SSA_NAMEs other than ones defined in the
506 : : assumption's body. */
507 : 0 : gcc_assert (newt);
508 : 0 : *tp = *newt;
509 : 0 : break;
510 : 277 : case LABEL_DECL:
511 : 277 : newt = data->id.decl_map->get (t);
512 : 277 : if (newt)
513 : 274 : *tp = *newt;
514 : : break;
515 : 787 : case VAR_DECL:
516 : 787 : case PARM_DECL:
517 : 787 : case RESULT_DECL:
518 : 787 : *tp = remap_decl (t, &data->id);
519 : 787 : if (TREE_THIS_VOLATILE (t) && *tp != t)
520 : : {
521 : 4 : *tp = build_simple_mem_ref (*tp);
522 : 4 : TREE_THIS_NOTRAP (*tp) = 1;
523 : : }
524 : : break;
525 : : default:
526 : : break;
527 : : }
528 : 1578 : return NULL_TREE;
529 : : }
530 : :
531 : : /* Lower assumption.
532 : : The gimplifier transformed:
533 : : .ASSUME (cond);
534 : : into:
535 : : [[assume (guard)]]
536 : : {
537 : : guard = cond;
538 : : }
539 : : which we should transform into:
540 : : .ASSUME (&artificial_fn, args...);
541 : : where artificial_fn will look like:
542 : : bool artificial_fn (args...)
543 : : {
544 : : guard = cond;
545 : : return guard;
546 : : }
547 : : with any debug stmts in the block removed and jumps out of
548 : : the block or return stmts replaced with return false; */
549 : :
550 : : static void
551 : 124 : lower_assumption (gimple_stmt_iterator *gsi, struct lower_data *data)
552 : : {
553 : 124 : gimple *stmt = gsi_stmt (*gsi);
554 : 124 : tree guard = gimple_assume_guard (stmt);
555 : 124 : gimple *bind = gimple_assume_body (stmt);
556 : 124 : location_t loc = gimple_location (stmt);
557 : 124 : gcc_assert (gimple_code (bind) == GIMPLE_BIND);
558 : :
559 : 124 : lower_assumption_data lad;
560 : 124 : hash_map<tree, tree> decl_map;
561 : 124 : memset (&lad.id, 0, sizeof (lad.id));
562 : 124 : lad.return_false_label = NULL_TREE;
563 : 124 : lad.id.src_fn = current_function_decl;
564 : 124 : lad.id.dst_fn = create_assumption_fn (loc);
565 : 124 : lad.id.src_cfun = DECL_STRUCT_FUNCTION (lad.id.src_fn);
566 : 124 : lad.id.decl_map = &decl_map;
567 : 124 : lad.id.copy_decl = assumption_copy_decl;
568 : 124 : lad.id.transform_call_graph_edges = CB_CGE_DUPLICATE;
569 : 124 : lad.id.transform_parameter = true;
570 : 124 : lad.id.do_not_unshare = true;
571 : 124 : lad.id.do_not_fold = true;
572 : 124 : cfun->curr_properties = lad.id.src_cfun->curr_properties;
573 : 124 : lad.guard_copy = create_tmp_var (boolean_type_node);
574 : 124 : decl_map.put (lad.guard_copy, lad.guard_copy);
575 : 124 : decl_map.put (guard, lad.guard_copy);
576 : 124 : cfun->assume_function = 1;
577 : :
578 : : /* Find variables, labels and SSA_NAMEs local to the assume GIMPLE_BIND. */
579 : 124 : gimple_stmt_iterator gsi2 = gsi_start (*gimple_assume_body_ptr (stmt));
580 : 124 : struct walk_stmt_info wi;
581 : 124 : memset (&wi, 0, sizeof (wi));
582 : 124 : wi.info = (void *) &lad;
583 : 124 : walk_gimple_stmt (&gsi2, find_assumption_locals_r, NULL, &wi);
584 : 124 : unsigned int sz = lad.decls.length ();
585 : 297 : for (unsigned i = 0; i < sz; ++i)
586 : : {
587 : 173 : tree v = lad.decls[i];
588 : 173 : tree newv;
589 : : /* SSA_NAMEs defined in the assume condition should be replaced
590 : : by new SSA_NAMEs in the artificial function. */
591 : 173 : if (TREE_CODE (v) == SSA_NAME)
592 : : {
593 : 0 : newv = make_ssa_name (remap_type (TREE_TYPE (v), &lad.id));
594 : 0 : decl_map.put (v, newv);
595 : : }
596 : : /* Local vars should have context and type adjusted to the
597 : : new artificial function. */
598 : 173 : else if (VAR_P (v))
599 : : {
600 : 173 : if (is_global_var (v) && !DECL_ASSEMBLER_NAME_SET_P (v))
601 : 3 : DECL_ASSEMBLER_NAME (v);
602 : 173 : TREE_TYPE (v) = remap_type (TREE_TYPE (v), &lad.id);
603 : 173 : DECL_CONTEXT (v) = current_function_decl;
604 : : }
605 : : }
606 : : /* References to other automatic vars should be replaced by
607 : : PARM_DECLs to the artificial function. */
608 : 124 : memset (&wi, 0, sizeof (wi));
609 : 124 : wi.info = (void *) &lad;
610 : 124 : walk_gimple_stmt (&gsi2, adjust_assumption_stmt_r,
611 : : adjust_assumption_stmt_op, &wi);
612 : :
613 : : /* At the start prepend guard = false; */
614 : 124 : gimple_seq body = NULL;
615 : 124 : gimple *g = gimple_build_assign (lad.guard_copy, boolean_false_node);
616 : 124 : gimple_seq_add_stmt (&body, g);
617 : 124 : gimple_seq_add_stmt (&body, bind);
618 : : /* At the end add return guard; */
619 : 124 : greturn *gr = gimple_build_return (lad.guard_copy);
620 : 124 : gimple_seq_add_stmt (&body, gr);
621 : : /* If there were any jumps to labels outside of the condition,
622 : : replace them with a jump to
623 : : return_false_label:
624 : : guard = false;
625 : : return guard; */
626 : 124 : if (lad.return_false_label)
627 : : {
628 : 3 : g = gimple_build_label (lad.return_false_label);
629 : 3 : gimple_seq_add_stmt (&body, g);
630 : 3 : g = gimple_build_assign (lad.guard_copy, boolean_false_node);
631 : 3 : gimple_seq_add_stmt (&body, g);
632 : 3 : gr = gimple_build_return (lad.guard_copy);
633 : 3 : gimple_seq_add_stmt (&body, gr);
634 : : }
635 : 124 : bind = gimple_build_bind (NULL_TREE, body, NULL_TREE);
636 : 124 : body = NULL;
637 : 124 : gimple_seq_add_stmt (&body, bind);
638 : 124 : gimple_set_body (current_function_decl, body);
639 : 124 : pop_cfun ();
640 : :
641 : 124 : tree parms = NULL_TREE;
642 : 124 : tree parmt = void_list_node;
643 : 124 : auto_vec<tree, 8> vargs;
644 : 242 : vargs.safe_grow (1 + (lad.decls.length () - sz), true);
645 : : /* First argument to IFN_ASSUME will be address of the
646 : : artificial function. */
647 : 124 : vargs[0] = build_fold_addr_expr (lad.id.dst_fn);
648 : 387 : for (unsigned i = lad.decls.length (); i > sz; --i)
649 : : {
650 : 139 : tree *v = decl_map.get (lad.decls[i - 1]);
651 : 139 : gcc_assert (v && TREE_CODE (*v) == PARM_DECL);
652 : 139 : DECL_CHAIN (*v) = parms;
653 : 139 : parms = *v;
654 : 139 : parmt = tree_cons (NULL_TREE, TREE_TYPE (*v), parmt);
655 : : /* Remaining arguments will be the variables/parameters
656 : : mentioned in the condition. */
657 : 139 : vargs[i - sz] = lad.decls[i - 1];
658 : 139 : if (TREE_THIS_VOLATILE (lad.decls[i - 1]))
659 : : {
660 : 4 : TREE_ADDRESSABLE (lad.decls[i - 1]) = 1;
661 : 4 : vargs[i - sz] = build_fold_addr_expr (lad.decls[i - 1]);
662 : : }
663 : : /* If they have gimple types, we might need to regimplify
664 : : them to make the IFN_ASSUME call valid. */
665 : 139 : if (is_gimple_reg_type (TREE_TYPE (vargs[i - sz]))
666 : 139 : && !is_gimple_val (vargs[i - sz]))
667 : : {
668 : 6 : tree t = make_ssa_name (TREE_TYPE (vargs[i - sz]));
669 : 6 : g = gimple_build_assign (t, vargs[i - sz]);
670 : 6 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
671 : 6 : vargs[i - sz] = t;
672 : : }
673 : : }
674 : 124 : DECL_ARGUMENTS (lad.id.dst_fn) = parms;
675 : 124 : TREE_TYPE (lad.id.dst_fn) = build_function_type (boolean_type_node, parmt);
676 : :
677 : 124 : cgraph_node::add_new_function (lad.id.dst_fn, false);
678 : :
679 : 297 : for (unsigned i = 0; i < sz; ++i)
680 : : {
681 : 173 : tree v = lad.decls[i];
682 : 173 : if (TREE_CODE (v) == SSA_NAME)
683 : 0 : release_ssa_name (v);
684 : : }
685 : :
686 : 124 : data->cannot_fallthru = false;
687 : : /* Replace GIMPLE_ASSUME statement with IFN_ASSUME call. */
688 : 124 : gcall *call = gimple_build_call_internal_vec (IFN_ASSUME, vargs);
689 : 124 : gimple_set_location (call, loc);
690 : 124 : gsi_replace (gsi, call, true);
691 : 124 : }
692 : :
693 : : /* Lower statement GSI. DATA is passed through the recursion. We try to
694 : : track the fallthruness of statements and get rid of unreachable return
695 : : statements in order to prevent the EH lowering pass from adding useless
696 : : edges that can cause bogus warnings to be issued later; this guess need
697 : : not be 100% accurate, simply be conservative and reset cannot_fallthru
698 : : to false if we don't know. */
699 : :
700 : : static void
701 : 87592094 : lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
702 : : {
703 : 87592094 : gimple *stmt = gsi_stmt (*gsi);
704 : :
705 : 87592094 : gimple_set_block (stmt, data->block);
706 : :
707 : 87592094 : switch (gimple_code (stmt))
708 : : {
709 : 4551110 : case GIMPLE_BIND:
710 : 4551110 : lower_gimple_bind (gsi, data);
711 : : /* Propagate fallthruness. */
712 : 4551110 : return;
713 : :
714 : 8554685 : case GIMPLE_COND:
715 : 8554685 : case GIMPLE_GOTO:
716 : 8554685 : case GIMPLE_SWITCH:
717 : 8554685 : data->cannot_fallthru = true;
718 : 8554685 : gsi_next (gsi);
719 : 8554685 : return;
720 : :
721 : 2233493 : case GIMPLE_RETURN:
722 : 2233493 : if (data->cannot_fallthru)
723 : : {
724 : 387 : gsi_remove (gsi, false);
725 : : /* Propagate fallthruness. */
726 : : }
727 : : else
728 : : {
729 : 2233106 : lower_gimple_return (gsi, data);
730 : 2233106 : data->cannot_fallthru = true;
731 : : }
732 : : return;
733 : :
734 : 2530764 : case GIMPLE_TRY:
735 : 2530764 : if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
736 : 823598 : lower_try_catch (gsi, data);
737 : : else
738 : : {
739 : : /* It must be a GIMPLE_TRY_FINALLY. */
740 : 1707166 : bool cannot_fallthru;
741 : 1707166 : lower_sequence (gimple_try_eval_ptr (stmt), data);
742 : 1707166 : cannot_fallthru = data->cannot_fallthru;
743 : :
744 : : /* The finally clause is always executed after the try clause,
745 : : so if it does not fall through, then the try-finally will not
746 : : fall through. Otherwise, if the try clause does not fall
747 : : through, then when the finally clause falls through it will
748 : : resume execution wherever the try clause was going. So the
749 : : whole try-finally will only fall through if both the try
750 : : clause and the finally clause fall through. */
751 : 1707166 : data->cannot_fallthru = false;
752 : 1707166 : lower_sequence (gimple_try_cleanup_ptr (stmt), data);
753 : 1707166 : data->cannot_fallthru |= cannot_fallthru;
754 : 1707166 : gsi_next (gsi);
755 : : }
756 : : return;
757 : :
758 : 87 : case GIMPLE_EH_ELSE:
759 : 87 : {
760 : 87 : geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
761 : 87 : lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
762 : 87 : lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
763 : : }
764 : 87 : break;
765 : :
766 : 2652796 : case GIMPLE_DEBUG:
767 : 2652796 : gcc_checking_assert (cfun->debug_nonbind_markers);
768 : : /* We can't possibly have debug bind stmts before lowering, we
769 : : first emit them when entering SSA. */
770 : 2652796 : gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt));
771 : : /* Propagate fallthruness. */
772 : : /* If the function (e.g. from PCH) had debug stmts, but they're
773 : : disabled for this compilation, remove them. */
774 : 2652796 : if (!MAY_HAVE_DEBUG_MARKER_STMTS)
775 : 0 : gsi_remove (gsi, true);
776 : : else
777 : 2652796 : gsi_next (gsi);
778 : : return;
779 : :
780 : 0 : case GIMPLE_OMP_STRUCTURED_BLOCK:
781 : : /* These are supposed to be removed already in OMP lowering. */
782 : 0 : gcc_unreachable ();
783 : :
784 : : case GIMPLE_NOP:
785 : : case GIMPLE_ASM:
786 : : case GIMPLE_ASSIGN:
787 : : case GIMPLE_PREDICT:
788 : : case GIMPLE_LABEL:
789 : : case GIMPLE_EH_MUST_NOT_THROW:
790 : : case GIMPLE_OMP_FOR:
791 : : case GIMPLE_OMP_SCOPE:
792 : : case GIMPLE_OMP_DISPATCH:
793 : : case GIMPLE_OMP_INTEROP:
794 : : case GIMPLE_OMP_SECTIONS:
795 : : case GIMPLE_OMP_SECTIONS_SWITCH:
796 : : case GIMPLE_OMP_SECTION:
797 : : case GIMPLE_OMP_SINGLE:
798 : : case GIMPLE_OMP_MASTER:
799 : : case GIMPLE_OMP_MASKED:
800 : : case GIMPLE_OMP_TASKGROUP:
801 : : case GIMPLE_OMP_ORDERED:
802 : : case GIMPLE_OMP_SCAN:
803 : : case GIMPLE_OMP_CRITICAL:
804 : : case GIMPLE_OMP_RETURN:
805 : : case GIMPLE_OMP_ATOMIC_LOAD:
806 : : case GIMPLE_OMP_ATOMIC_STORE:
807 : : case GIMPLE_OMP_CONTINUE:
808 : : break;
809 : :
810 : 10923655 : case GIMPLE_CALL:
811 : 10923655 : {
812 : 10923655 : tree decl = gimple_call_fndecl (stmt);
813 : 10923655 : unsigned i;
814 : :
815 : 30799465 : for (i = 0; i < gimple_call_num_args (stmt); i++)
816 : : {
817 : 19875810 : tree arg = gimple_call_arg (stmt, i);
818 : 19875810 : if (EXPR_P (arg))
819 : 5238686 : TREE_SET_BLOCK (arg, data->block);
820 : : }
821 : :
822 : 10923655 : if (decl
823 : 10923655 : && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
824 : : {
825 : 2196174 : if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
826 : : {
827 : 796 : lower_builtin_setjmp (gsi);
828 : 796 : data->cannot_fallthru = false;
829 : 796 : return;
830 : : }
831 : 2195378 : else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
832 : 119 : && flag_tree_bit_ccp
833 : 2195484 : && gimple_builtin_call_types_compatible_p (stmt, decl))
834 : : {
835 : 34 : lower_builtin_posix_memalign (gsi);
836 : 34 : return;
837 : : }
838 : 2195344 : else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_ASSUME_ALIGNED
839 : 2195344 : && !optimize)
840 : : {
841 : 77 : lower_builtin_assume_aligned (gsi);
842 : 77 : data->cannot_fallthru = false;
843 : 77 : gsi_next (gsi);
844 : 77 : return;
845 : : }
846 : : }
847 : :
848 : 10922748 : if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
849 : : {
850 : 1623490 : data->cannot_fallthru = true;
851 : 1623490 : gsi_next (gsi);
852 : 1623490 : return;
853 : : }
854 : :
855 : 9299258 : if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
856 : : {
857 : 4525 : tree base = gimple_call_arg (stmt, 1);
858 : 4525 : gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
859 : 4525 : tree decl = TREE_OPERAND (base, 0);
860 : 4525 : if (VAR_P (decl) && TREE_STATIC (decl))
861 : : {
862 : : /* Don't poison a variable with static storage; it might have
863 : : gotten marked before gimplify_init_constructor promoted it
864 : : to static. */
865 : 62 : gsi_remove (gsi, true);
866 : 62 : return;
867 : : }
868 : : }
869 : :
870 : : /* We delay folding of built calls from gimplification to
871 : : here so the IL is in consistent state for the diagnostic
872 : : machineries job. */
873 : 9299196 : if (gimple_call_builtin_p (stmt))
874 : 1421044 : fold_stmt (gsi);
875 : : }
876 : : break;
877 : :
878 : 64435 : case GIMPLE_OMP_PARALLEL:
879 : 64435 : case GIMPLE_OMP_TASK:
880 : 64435 : case GIMPLE_OMP_TARGET:
881 : 64435 : case GIMPLE_OMP_TEAMS:
882 : 64435 : data->cannot_fallthru = false;
883 : 64435 : lower_omp_directive (gsi, data);
884 : 64435 : data->cannot_fallthru = false;
885 : 64435 : return;
886 : :
887 : 124 : case GIMPLE_ASSUME:
888 : 124 : lower_assumption (gsi, data);
889 : 124 : return;
890 : :
891 : 478 : case GIMPLE_TRANSACTION:
892 : 478 : lower_sequence (gimple_transaction_body_ptr (
893 : : as_a <gtransaction *> (stmt)),
894 : : data);
895 : 478 : break;
896 : :
897 : 0 : default:
898 : 0 : gcc_unreachable ();
899 : : }
900 : :
901 : 65380228 : data->cannot_fallthru = false;
902 : 65380228 : gsi_next (gsi);
903 : : }
904 : :
905 : : /* Lower a bind_expr TSI. DATA is passed through the recursion. */
906 : :
907 : : static void
908 : 7447201 : lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
909 : : {
910 : 7447201 : tree old_block = data->block;
911 : 7447201 : gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
912 : 7447201 : tree new_block = gimple_bind_block (stmt);
913 : :
914 : 7447201 : if (new_block)
915 : : {
916 : 5879206 : if (new_block == old_block)
917 : : {
918 : : /* The outermost block of the original function may not be the
919 : : outermost statement chain of the gimplified function. So we
920 : : may see the outermost block just inside the function. */
921 : 1532717 : gcc_assert (new_block == DECL_INITIAL (current_function_decl));
922 : : new_block = NULL;
923 : : }
924 : : else
925 : : {
926 : : /* We do not expect to handle duplicate blocks. */
927 : 4346489 : gcc_assert (!TREE_ASM_WRITTEN (new_block));
928 : 4346489 : TREE_ASM_WRITTEN (new_block) = 1;
929 : :
930 : : /* Block tree may get clobbered by inlining. Normally this would
931 : : be fixed in rest_of_decl_compilation using block notes, but
932 : : since we are not going to emit them, it is up to us. */
933 : 4346489 : BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
934 : 4346489 : BLOCK_SUBBLOCKS (old_block) = new_block;
935 : 4346489 : BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
936 : 4346489 : BLOCK_SUPERCONTEXT (new_block) = old_block;
937 : :
938 : 4346489 : data->block = new_block;
939 : : }
940 : : }
941 : :
942 : 7447201 : record_vars (gimple_bind_vars (stmt));
943 : :
944 : : /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
945 : : need gimple_bind_vars. */
946 : 7447201 : tree next;
947 : : /* BLOCK_VARS and gimple_bind_vars share a common sub-chain. Find
948 : : it by marking all BLOCK_VARS. */
949 : 7447201 : if (gimple_bind_block (stmt))
950 : 12730470 : for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
951 : 6851264 : TREE_VISITED (t) = 1;
952 : 7447201 : for (tree var = gimple_bind_vars (stmt);
953 : 11542869 : var && ! TREE_VISITED (var); var = next)
954 : : {
955 : 4095668 : next = DECL_CHAIN (var);
956 : 4095668 : DECL_CHAIN (var) = NULL_TREE;
957 : : }
958 : : /* Unmark BLOCK_VARS. */
959 : 7447201 : if (gimple_bind_block (stmt))
960 : 12730470 : for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
961 : 6851264 : TREE_VISITED (t) = 0;
962 : :
963 : 7447201 : lower_sequence (gimple_bind_body_ptr (stmt), data);
964 : :
965 : 7447201 : if (new_block)
966 : : {
967 : 4346489 : gcc_assert (data->block == new_block);
968 : :
969 : 4346489 : BLOCK_SUBBLOCKS (new_block)
970 : 4346489 : = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
971 : 4346489 : data->block = old_block;
972 : : }
973 : :
974 : : /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
975 : 7447201 : gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
976 : 7447201 : gsi_remove (gsi, false);
977 : 7447201 : }
978 : :
979 : : /* Same as above, but for a GIMPLE_TRY_CATCH. */
980 : :
981 : : static void
982 : 823598 : lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
983 : : {
984 : 823598 : bool cannot_fallthru;
985 : 823598 : gimple *stmt = gsi_stmt (*gsi);
986 : 823598 : gimple_stmt_iterator i;
987 : :
988 : : /* We don't handle GIMPLE_TRY_FINALLY. */
989 : 823598 : gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
990 : :
991 : 823598 : lower_sequence (gimple_try_eval_ptr (stmt), data);
992 : 823598 : cannot_fallthru = data->cannot_fallthru;
993 : :
994 : 823598 : i = gsi_start (*gimple_try_cleanup_ptr (stmt));
995 : 823598 : switch (gimple_code (gsi_stmt (i)))
996 : : {
997 : : case GIMPLE_CATCH:
998 : : /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
999 : : catch expression and a body. The whole try/catch may fall
1000 : : through iff any of the catch bodies falls through. */
1001 : 74602 : for (; !gsi_end_p (i); gsi_next (&i))
1002 : : {
1003 : 38812 : data->cannot_fallthru = false;
1004 : 38812 : lower_sequence (gimple_catch_handler_ptr (
1005 : : as_a <gcatch *> (gsi_stmt (i))),
1006 : : data);
1007 : 38812 : if (!data->cannot_fallthru)
1008 : 15609 : cannot_fallthru = false;
1009 : : }
1010 : : break;
1011 : :
1012 : 4269 : case GIMPLE_EH_FILTER:
1013 : : /* The exception filter expression only matters if there is an
1014 : : exception. If the exception does not match EH_FILTER_TYPES,
1015 : : we will execute EH_FILTER_FAILURE, and we will fall through
1016 : : if that falls through. If the exception does match
1017 : : EH_FILTER_TYPES, the stack unwinder will continue up the
1018 : : stack, so we will not fall through. We don't know whether we
1019 : : will throw an exception which matches EH_FILTER_TYPES or not,
1020 : : so we just ignore EH_FILTER_TYPES and assume that we might
1021 : : throw an exception which doesn't match. */
1022 : 4269 : data->cannot_fallthru = false;
1023 : 4269 : lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
1024 : 4269 : if (!data->cannot_fallthru)
1025 : 823598 : cannot_fallthru = false;
1026 : : break;
1027 : :
1028 : 0 : case GIMPLE_DEBUG:
1029 : 0 : gcc_checking_assert (gimple_debug_begin_stmt_p (stmt));
1030 : : break;
1031 : :
1032 : 783539 : default:
1033 : : /* This case represents statements to be executed when an
1034 : : exception occurs. Those statements are implicitly followed
1035 : : by a GIMPLE_RESX to resume execution after the exception. So
1036 : : in this case the try/catch never falls through. */
1037 : 783539 : data->cannot_fallthru = false;
1038 : 783539 : lower_sequence (gimple_try_cleanup_ptr (stmt), data);
1039 : 783539 : break;
1040 : : }
1041 : :
1042 : 823598 : data->cannot_fallthru = cannot_fallthru;
1043 : 823598 : gsi_next (gsi);
1044 : 823598 : }
1045 : :
1046 : :
1047 : : /* Try to determine whether a TRY_CATCH expression can fall through.
1048 : : This is a subroutine of gimple_stmt_may_fallthru. */
1049 : :
1050 : : static bool
1051 : 421126 : gimple_try_catch_may_fallthru (gtry *stmt)
1052 : : {
1053 : 421126 : gimple_stmt_iterator i;
1054 : :
1055 : : /* We don't handle GIMPLE_TRY_FINALLY. */
1056 : 421126 : gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
1057 : :
1058 : : /* If the TRY block can fall through, the whole TRY_CATCH can
1059 : : fall through. */
1060 : 421126 : if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
1061 : : return true;
1062 : :
1063 : 5800 : i = gsi_start (*gimple_try_cleanup_ptr (stmt));
1064 : 5800 : switch (gimple_code (gsi_stmt (i)))
1065 : : {
1066 : : case GIMPLE_CATCH:
1067 : : /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
1068 : : catch expression and a body. The whole try/catch may fall
1069 : : through iff any of the catch bodies falls through. */
1070 : 7890 : for (; !gsi_end_p (i); gsi_next (&i))
1071 : : {
1072 : 4302 : if (gimple_seq_may_fallthru (gimple_catch_handler (
1073 : 4302 : as_a <gcatch *> (gsi_stmt (i)))))
1074 : : return true;
1075 : : }
1076 : : return false;
1077 : :
1078 : 82 : case GIMPLE_EH_FILTER:
1079 : : /* The exception filter expression only matters if there is an
1080 : : exception. If the exception does not match EH_FILTER_TYPES,
1081 : : we will execute EH_FILTER_FAILURE, and we will fall through
1082 : : if that falls through. If the exception does match
1083 : : EH_FILTER_TYPES, the stack unwinder will continue up the
1084 : : stack, so we will not fall through. We don't know whether we
1085 : : will throw an exception which matches EH_FILTER_TYPES or not,
1086 : : so we just ignore EH_FILTER_TYPES and assume that we might
1087 : : throw an exception which doesn't match. */
1088 : 82 : return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
1089 : :
1090 : : default:
1091 : : /* This case represents statements to be executed when an
1092 : : exception occurs. Those statements are implicitly followed
1093 : : by a GIMPLE_RESX to resume execution after the exception. So
1094 : : in this case the try/catch never falls through. */
1095 : : return false;
1096 : : }
1097 : : }
1098 : :
1099 : :
1100 : : /* Try to determine if we can continue executing the statement
1101 : : immediately following STMT. This guess need not be 100% accurate;
1102 : : simply be conservative and return true if we don't know. This is
1103 : : used only to avoid stupidly generating extra code. If we're wrong,
1104 : : we'll just delete the extra code later. */
1105 : :
1106 : : bool
1107 : 17682650 : gimple_stmt_may_fallthru (gimple *stmt)
1108 : : {
1109 : 17682650 : if (!stmt)
1110 : : return true;
1111 : :
1112 : 17427884 : switch (gimple_code (stmt))
1113 : : {
1114 : : case GIMPLE_GOTO:
1115 : : case GIMPLE_RETURN:
1116 : : case GIMPLE_RESX:
1117 : : /* Easy cases. If the last statement of the seq implies
1118 : : control transfer, then we can't fall through. */
1119 : : return false;
1120 : :
1121 : : case GIMPLE_SWITCH:
1122 : : /* Switch has already been lowered and represents a branch
1123 : : to a selected label and hence can't fall through. */
1124 : : return false;
1125 : :
1126 : : case GIMPLE_COND:
1127 : : /* GIMPLE_COND's are already lowered into a two-way branch. They
1128 : : can't fall through. */
1129 : : return false;
1130 : :
1131 : 396606 : case GIMPLE_BIND:
1132 : 396606 : return gimple_seq_may_fallthru (
1133 : 396606 : gimple_bind_body (as_a <gbind *> (stmt)));
1134 : :
1135 : 1382774 : case GIMPLE_TRY:
1136 : 1382774 : if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
1137 : 421126 : return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
1138 : :
1139 : : /* It must be a GIMPLE_TRY_FINALLY. */
1140 : :
1141 : : /* The finally clause is always executed after the try clause,
1142 : : so if it does not fall through, then the try-finally will not
1143 : : fall through. Otherwise, if the try clause does not fall
1144 : : through, then when the finally clause falls through it will
1145 : : resume execution wherever the try clause was going. So the
1146 : : whole try-finally will only fall through if both the try
1147 : : clause and the finally clause fall through. */
1148 : 961648 : return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
1149 : 1518375 : && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
1150 : :
1151 : 379 : case GIMPLE_EH_ELSE:
1152 : 379 : {
1153 : 379 : geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
1154 : 379 : return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
1155 : 379 : || gimple_seq_may_fallthru (gimple_eh_else_e_body (
1156 : : eh_else_stmt)));
1157 : : }
1158 : :
1159 : 4489109 : case GIMPLE_CALL:
1160 : : /* Functions that do not return do not fall through. */
1161 : 4489109 : return !gimple_call_noreturn_p (stmt);
1162 : :
1163 : : default:
1164 : : return true;
1165 : : }
1166 : : }
1167 : :
1168 : :
1169 : : /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
1170 : :
1171 : : bool
1172 : 15073128 : gimple_seq_may_fallthru (gimple_seq seq)
1173 : : {
1174 : 15073128 : return gimple_stmt_may_fallthru (gimple_seq_last_nondebug_stmt (seq));
1175 : : }
1176 : :
1177 : :
1178 : : /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
1179 : :
1180 : : static void
1181 : 2233106 : lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
1182 : : {
1183 : 2233106 : greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
1184 : 2233106 : gimple *t;
1185 : 2233106 : int i;
1186 : 2233106 : return_statements_t tmp_rs;
1187 : :
1188 : : /* Match this up with an existing return statement that's been created. */
1189 : 4466231 : for (i = data->return_statements.length () - 1;
1190 : 2233125 : i >= 0; i--)
1191 : : {
1192 : 586624 : tmp_rs = data->return_statements[i];
1193 : :
1194 : 586624 : if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
1195 : : {
1196 : : /* Remove the line number from the representative return statement.
1197 : : It now fills in for many such returns. Failure to remove this
1198 : : will result in incorrect results for coverage analysis. */
1199 : 586605 : gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
1200 : :
1201 : 586605 : goto found;
1202 : : }
1203 : : }
1204 : :
1205 : : /* Not found. Create a new label and record the return statement. */
1206 : 1646501 : tmp_rs.label = create_artificial_label (cfun->function_end_locus);
1207 : 1646501 : tmp_rs.stmt = stmt;
1208 : 1646501 : data->return_statements.safe_push (tmp_rs);
1209 : :
1210 : : /* Generate a goto statement and remove the return statement. */
1211 : 2233106 : found:
1212 : : /* When not optimizing, make sure user returns are preserved. */
1213 : 2233106 : if (!optimize && gimple_has_location (stmt))
1214 : 256697 : DECL_ARTIFICIAL (tmp_rs.label) = 0;
1215 : 2233106 : t = gimple_build_goto (tmp_rs.label);
1216 : : /* location includes block. */
1217 : 2233106 : gimple_set_location (t, gimple_location (stmt));
1218 : 2233106 : gsi_insert_before (gsi, t, GSI_SAME_STMT);
1219 : 2233106 : gsi_remove (gsi, false);
1220 : 2233106 : }
1221 : :
1222 : : /* Lower a __builtin_setjmp GSI.
1223 : :
1224 : : __builtin_setjmp is passed a pointer to an array of five words (not
1225 : : all will be used on all machines). It operates similarly to the C
1226 : : library function of the same name, but is more efficient.
1227 : :
1228 : : It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
1229 : : __builtin_setjmp_receiver.
1230 : :
1231 : : After full lowering, the body of the function should look like:
1232 : :
1233 : : {
1234 : : int D.1844;
1235 : : int D.2844;
1236 : :
1237 : : [...]
1238 : :
1239 : : __builtin_setjmp_setup (&buf, &<D1847>);
1240 : : D.1844 = 0;
1241 : : goto <D1846>;
1242 : : <D1847>:;
1243 : : __builtin_setjmp_receiver (&<D1847>);
1244 : : D.1844 = 1;
1245 : : <D1846>:;
1246 : : if (D.1844 == 0) goto <D1848>; else goto <D1849>;
1247 : :
1248 : : [...]
1249 : :
1250 : : __builtin_setjmp_setup (&buf, &<D2847>);
1251 : : D.2844 = 0;
1252 : : goto <D2846>;
1253 : : <D2847>:;
1254 : : __builtin_setjmp_receiver (&<D2847>);
1255 : : D.2844 = 1;
1256 : : <D2846>:;
1257 : : if (D.2844 == 0) goto <D2848>; else goto <D2849>;
1258 : :
1259 : : [...]
1260 : :
1261 : : <D3850>:;
1262 : : return;
1263 : : }
1264 : :
1265 : : During cfg creation an extra per-function (or per-OpenMP region)
1266 : : block with ABNORMAL_DISPATCHER internal call will be added, unique
1267 : : destination of all the abnormal call edges and the unique source of
1268 : : all the abnormal edges to the receivers, thus keeping the complexity
1269 : : explosion localized. */
1270 : :
1271 : : static void
1272 : 796 : lower_builtin_setjmp (gimple_stmt_iterator *gsi)
1273 : : {
1274 : 796 : gimple *stmt = gsi_stmt (*gsi);
1275 : 796 : location_t loc = gimple_location (stmt);
1276 : 796 : tree cont_label = create_artificial_label (loc);
1277 : 796 : tree next_label = create_artificial_label (loc);
1278 : 796 : tree dest, t, arg;
1279 : 796 : gimple *g;
1280 : :
1281 : : /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
1282 : : these builtins are modelled as non-local label jumps to the label
1283 : : that is passed to these two builtins, so pretend we have a non-local
1284 : : label during GIMPLE passes too. See PR60003. */
1285 : 796 : cfun->has_nonlocal_label = 1;
1286 : :
1287 : : /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
1288 : : passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
1289 : 796 : FORCED_LABEL (next_label) = 1;
1290 : :
1291 : 796 : tree orig_dest = dest = gimple_call_lhs (stmt);
1292 : 796 : if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
1293 : 725 : dest = create_tmp_reg (TREE_TYPE (orig_dest));
1294 : :
1295 : : /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
1296 : 796 : arg = build_addr (next_label);
1297 : 796 : t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
1298 : 796 : g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
1299 : : /* location includes block. */
1300 : 796 : gimple_set_location (g, loc);
1301 : 796 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1302 : :
1303 : : /* Build 'DEST = 0' and insert. */
1304 : 796 : if (dest)
1305 : : {
1306 : 745 : g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
1307 : 745 : gimple_set_location (g, loc);
1308 : 745 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1309 : : }
1310 : :
1311 : : /* Build 'goto CONT_LABEL' and insert. */
1312 : 796 : g = gimple_build_goto (cont_label);
1313 : 796 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1314 : :
1315 : : /* Build 'NEXT_LABEL:' and insert. */
1316 : 796 : g = gimple_build_label (next_label);
1317 : 796 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1318 : :
1319 : : /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
1320 : 796 : arg = build_addr (next_label);
1321 : 796 : t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
1322 : 796 : g = gimple_build_call (t, 1, arg);
1323 : 796 : gimple_set_location (g, loc);
1324 : 796 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1325 : :
1326 : : /* Build 'DEST = 1' and insert. */
1327 : 796 : if (dest)
1328 : : {
1329 : 745 : g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
1330 : : integer_one_node));
1331 : 745 : gimple_set_location (g, loc);
1332 : 745 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1333 : : }
1334 : :
1335 : : /* Build 'CONT_LABEL:' and insert. */
1336 : 796 : g = gimple_build_label (cont_label);
1337 : 796 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1338 : :
1339 : : /* Build orig_dest = dest if necessary. */
1340 : 796 : if (dest != orig_dest)
1341 : : {
1342 : 725 : g = gimple_build_assign (orig_dest, dest);
1343 : 725 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
1344 : : }
1345 : :
1346 : : /* Remove the call to __builtin_setjmp. */
1347 : 796 : gsi_remove (gsi, false);
1348 : 796 : }
1349 : :
1350 : : /* Lower calls to posix_memalign to
1351 : : res = posix_memalign (ptr, align, size);
1352 : : if (res == 0)
1353 : : *ptr = __builtin_assume_aligned (*ptr, align);
1354 : : or to
1355 : : void *tem;
1356 : : res = posix_memalign (&tem, align, size);
1357 : : if (res == 0)
1358 : : ptr = __builtin_assume_aligned (tem, align);
1359 : : in case the first argument was &ptr. That way we can get at the
1360 : : alignment of the heap pointer in CCP. */
1361 : :
1362 : : static void
1363 : 34 : lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
1364 : : {
1365 : 34 : gimple *stmt, *call = gsi_stmt (*gsi);
1366 : 34 : tree pptr = gimple_call_arg (call, 0);
1367 : 34 : tree align = gimple_call_arg (call, 1);
1368 : 34 : tree res = gimple_call_lhs (call);
1369 : 34 : tree ptr = create_tmp_reg (ptr_type_node);
1370 : 34 : if (TREE_CODE (pptr) == ADDR_EXPR)
1371 : : {
1372 : 34 : tree tem = create_tmp_var (ptr_type_node);
1373 : 34 : TREE_ADDRESSABLE (tem) = 1;
1374 : 34 : gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
1375 : 34 : stmt = gimple_build_assign (ptr, tem);
1376 : : }
1377 : : else
1378 : 0 : stmt = gimple_build_assign (ptr,
1379 : : fold_build2 (MEM_REF, ptr_type_node, pptr,
1380 : : build_int_cst (ptr_type_node, 0)));
1381 : 34 : if (res == NULL_TREE)
1382 : : {
1383 : 0 : res = create_tmp_reg (integer_type_node);
1384 : 0 : gimple_call_set_lhs (call, res);
1385 : : }
1386 : 34 : tree align_label = create_artificial_label (UNKNOWN_LOCATION);
1387 : 34 : tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
1388 : 34 : gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
1389 : : align_label, noalign_label);
1390 : 34 : gsi_insert_after (gsi, cond, GSI_NEW_STMT);
1391 : 34 : gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
1392 : 34 : gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1393 : 68 : stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
1394 : : 2, ptr, align);
1395 : 34 : gimple_call_set_lhs (stmt, ptr);
1396 : 34 : gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1397 : 34 : stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
1398 : : build_int_cst (ptr_type_node, 0)),
1399 : : ptr);
1400 : 34 : gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1401 : 34 : gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
1402 : 34 : }
1403 : :
1404 : : /* Lower calls to __builtin_assume_aligned when not optimizing. */
1405 : :
1406 : : static void
1407 : 77 : lower_builtin_assume_aligned (gimple_stmt_iterator *gsi)
1408 : : {
1409 : 77 : gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1410 : :
1411 : 77 : tree lhs = gimple_call_lhs (call);
1412 : 77 : if (!lhs || !POINTER_TYPE_P (TREE_TYPE (lhs)) || TREE_CODE (lhs) != SSA_NAME)
1413 : : return;
1414 : :
1415 : 2 : tree align = gimple_call_arg (call, 1);
1416 : 2 : tree misalign = (gimple_call_num_args (call) > 2
1417 : 2 : ? gimple_call_arg (call, 2) : NULL_TREE);
1418 : 2 : if (!tree_fits_uhwi_p (align)
1419 : 2 : || (misalign && !tree_fits_uhwi_p (misalign)))
1420 : : return;
1421 : :
1422 : 2 : unsigned aligni = TREE_INT_CST_LOW (align);
1423 : 2 : unsigned misaligni = misalign ? TREE_INT_CST_LOW (misalign) : 0;
1424 : 2 : if (aligni <= 1
1425 : 1 : || (aligni & (aligni - 1)) != 0
1426 : 1 : || (misaligni & ~(aligni - 1)) != 0)
1427 : : return;
1428 : :
1429 : : /* For lowering we simply transfer alignment information to the
1430 : : result and leave the call otherwise unchanged, it will be elided
1431 : : at RTL expansion time. */
1432 : 1 : ptr_info_def *pi = get_ptr_info (lhs);
1433 : 1 : set_ptr_info_alignment (pi, aligni, misaligni);
1434 : : }
1435 : :
1436 : :
1437 : : /* Record the variables in VARS into function FN. */
1438 : :
1439 : : void
1440 : 23999356 : record_vars_into (tree vars, tree fn)
1441 : : {
1442 : 47090209 : for (; vars; vars = DECL_CHAIN (vars))
1443 : : {
1444 : 23090853 : tree var = vars;
1445 : :
1446 : : /* BIND_EXPRs contains also function/type/constant declarations
1447 : : we don't need to care about. */
1448 : 23090853 : if (!VAR_P (var))
1449 : 624200 : continue;
1450 : :
1451 : : /* Nothing to do in this case. */
1452 : 22466653 : if (DECL_EXTERNAL (var))
1453 : 2573 : continue;
1454 : :
1455 : : /* Record the variable. */
1456 : 22464080 : add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
1457 : : }
1458 : 23999356 : }
1459 : :
1460 : :
1461 : : /* Record the variables in VARS into current_function_decl. */
1462 : :
1463 : : void
1464 : 23880513 : record_vars (tree vars)
1465 : : {
1466 : 23880513 : record_vars_into (vars, current_function_decl);
1467 : 23880513 : }
|